From 679a30e02ee864b3af7d964d8135fbb9e99dd52a Mon Sep 17 00:00:00 2001 From: frisitano Date: Fri, 16 Jun 2023 11:42:49 +0100 Subject: [PATCH] feat: introduce recorder objects --- src/data.rs | 307 +++++++++++++++++++++++++++++++++++ src/lib.rs | 1 + src/merkle/mmr/full.rs | 1 + src/merkle/mod.rs | 6 +- src/merkle/partial_mt/mod.rs | 3 +- src/merkle/path.rs | 6 + src/merkle/store/mod.rs | 240 ++++++++++++++++++--------- src/merkle/store/tests.rs | 61 ++++++- src/utils.rs | 2 +- 9 files changed, 541 insertions(+), 86 deletions(-) create mode 100644 src/data.rs diff --git a/src/data.rs b/src/data.rs new file mode 100644 index 0000000..cd82502 --- /dev/null +++ b/src/data.rs @@ -0,0 +1,307 @@ +use super::utils::{ + collections::{btree_map::IntoIter, BTreeMap, BTreeSet}, + Box, +}; +use core::{ + cell::RefCell, + iter::{Chain, Filter}, +}; + +// KEY-VALUE MAP TRAIT +// ================================================================================================ +/// A trait that defines the interface for a key-value map. +pub trait KvMap { + fn get(&self, key: &K) -> Option<&V>; + fn contains_key(&self, key: &K) -> bool; + fn len(&self) -> usize; + fn is_empty(&self) -> bool { + self.len() == 0 + } + fn iter(&self) -> Box + '_>; + fn insert(&mut self, key: K, value: V) -> Option; +} + +// RECORDING MAP +// ================================================================================================ + +/// A [RecordingMap] that records read requests to the underlying key-value map. +/// The data recorder is used to generate a proof for read requests. +/// +/// The [RecordingMap] is composed of three parts: +/// - `data`: which contains the initial key-value pairs from the underlying data set. +/// - `delta`: which contains key-value pairs which have been created after instantiation. +/// - `updated_keys`: which tracks keys from `data` which have been updated in `delta`. +/// - `trace`: which contains the keys from the initial data set (`data`) that are read. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct RecordingMap { + data: BTreeMap, + delta: BTreeMap, + updated_keys: BTreeSet, + trace: RefCell>, +} + +impl RecordingMap { + // CONSTRUCTOR + // -------------------------------------------------------------------------------------------- + /// Returns a new [RecordingMap] instance initialized with the provided key-value pairs. + /// ([BTreeMap]). + pub fn new(init: impl IntoIterator) -> Self { + RecordingMap { + data: init.into_iter().collect(), + delta: BTreeMap::new(), + updated_keys: BTreeSet::new(), + trace: RefCell::new(BTreeSet::new()), + } + } + + // FINALIZER + // -------------------------------------------------------------------------------------------- + /// Consumes the [DataRecorder] and returns a [BTreeMap] containing the key-value pairs from + /// the initial data set that were read during recording. + pub fn into_proof(self) -> BTreeMap { + self.data + .into_iter() + .filter(|(k, _)| self.trace.borrow().contains(k)) + .collect::>() + } +} + +impl KvMap for RecordingMap { + // ACCESSORS + // -------------------------------------------------------------------------------------------- + /// Returns a reference to the value associated with the given key if the value exists. If the + /// key is part of the initial data set, the key access is recorded. + fn get(&self, key: &K) -> Option<&V> { + if let Some(value) = self.delta.get(key) { + return Some(value); + } + + match self.data.get(key) { + None => None, + Some(value) => { + self.trace.borrow_mut().insert(key.clone()); + Some(value) + } + } + } + + /// Returns a boolean to indicate whether the given key exists in the data set. If the key is + /// part of the initial data set, the key access is recorded. + fn contains_key(&self, key: &K) -> bool { + if self.delta.contains_key(key) { + return true; + } + + match self.data.contains_key(key) { + true => { + self.trace.borrow_mut().insert(key.clone()); + true + } + false => false, + } + } + + /// Returns the number of key-value pairs in the data set. + fn len(&self) -> usize { + self.data.len() + self.delta.len() - self.updated_keys.len() + } + + /// Returns an iterator over the key-value pairs in the data set. + fn iter(&self) -> Box + '_> { + Box::new( + self.data + .iter() + .filter(|(k, _)| !self.updated_keys.contains(k)) + .chain(self.delta.iter()), + ) + } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Inserts a key-value pair into the data set. If the key already exists in the data set, the + /// value is updated and the old value is returned. + fn insert(&mut self, key: K, value: V) -> Option { + if let Some(value) = self.delta.insert(key.clone(), value) { + return Some(value); + } + + match self.data.get(&key) { + None => None, + Some(value) => { + self.trace.borrow_mut().insert(key.clone()); + self.updated_keys.insert(key); + Some(value.clone()) + } + } + } +} + +// RECORDING MAP TRAIT IMPLS +// ================================================================================================ + +impl Extend<(K, V)> for RecordingMap { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each(move |(k, v)| { + self.insert(k, v); + }); + } +} + +impl Default for RecordingMap { + fn default() -> Self { + RecordingMap::new(BTreeMap::new()) + } +} + +impl IntoIterator for RecordingMap { + type Item = (K, V); + type IntoIter = + Chain, Box bool>>, IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + #[allow(clippy::type_complexity)] + let filter_updated: Box bool> = + Box::new(move |(k, _)| !self.updated_keys.contains(k)); + let data_iter = self.data.into_iter().filter(filter_updated); + let updates_iter = self.delta.into_iter(); + + data_iter.chain(updates_iter) + } +} + +// BTREE MAP `KvMap` IMPLEMENTATION +// ================================================================================================ +impl KvMap for BTreeMap { + fn get(&self, key: &K) -> Option<&V> { + self.get(key) + } + + fn contains_key(&self, key: &K) -> bool { + self.contains_key(key) + } + + fn len(&self) -> usize { + self.len() + } + + fn iter(&self) -> Box + '_> { + Box::new(self.iter()) + } + + fn insert(&mut self, key: K, value: V) -> Option { + self.insert(key, value) + } +} + +// TESTS +// ================================================================================================ +#[cfg(test)] +mod test_recorder { + use super::*; + + const ITEMS: [(u64, u64); 5] = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]; + + #[test] + fn test_get_item() { + // instantiate a recording map + let map = RecordingMap::new(ITEMS.to_vec()); + + // get a few items + let get_items = [0, 1, 2]; + for key in get_items.iter() { + map.get(key); + } + + // convert the map into a proof + let proof = map.into_proof(); + + // check that the proof contains the expected values + for (key, value) in ITEMS.iter() { + match get_items.contains(key) { + true => assert_eq!(proof.get(key), Some(value)), + false => assert_eq!(proof.get(key), None), + } + } + } + + #[test] + fn test_contains_key() { + // instantiate a recording map + let map = RecordingMap::new(ITEMS.to_vec()); + + // check if the map contains a few items + let get_items = [0, 1, 2]; + for key in get_items.iter() { + map.contains_key(key); + } + + // convert the map into a proof + let proof = map.into_proof(); + + // check that the proof contains the expected values + for (key, _) in ITEMS.iter() { + match get_items.contains(key) { + true => assert_eq!(proof.contains_key(key), true), + false => assert_eq!(proof.contains_key(key), false), + } + } + } + + #[test] + fn test_len() { + // instantiate a recording map + let mut map = RecordingMap::new(ITEMS.to_vec()); + // length of the map should be equal to the number of items + assert_eq!(map.len(), ITEMS.len()); + + // inserting entry with key that already exists should not change the length + map.insert(4, 5); + assert_eq!(map.len(), ITEMS.len()); + + // inserting entry with new key should increase the length + map.insert(5, 5); + assert_eq!(map.len(), ITEMS.len() + 1); + + // get some items so that they are saved in the trace + let get_items = [0, 1, 2]; + for key in get_items.iter() { + map.contains_key(key); + } + + // Note: The length reported by the proof will be different to the length originally + // reported by the map. + let proof = map.into_proof(); + + // length of the proof should be equal to get_items + 1. The extra item is the original + // value at key = 4u64 + assert_eq!(proof.len(), get_items.len() + 1); + } + + #[test] + fn test_iter() { + let mut map = RecordingMap::new(ITEMS.to_vec()); + assert!(map.iter().all(|(x, y)| ITEMS.contains(&(*x, *y)))); + + // when inserting entry with key that already exists the iterator should return the new value + let new_value = 5; + map.insert(4, new_value); + assert_eq!(map.iter().count(), ITEMS.len()); + assert!(map.iter().all(|(x, y)| if x == &4 { + y == &new_value + } else { + ITEMS.contains(&(*x, *y)) + })); + } + + #[test] + fn test_is_empty() { + // instantiate an empty recording map + let empty_map: RecordingMap = RecordingMap::default(); + assert!(empty_map.is_empty()); + + // instantiate a non-empty recording map + let map = RecordingMap::new(ITEMS.to_vec()); + assert!(!map.is_empty()); + } +} diff --git a/src/lib.rs b/src/lib.rs index 7c7d753..f0dc7ee 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,6 +4,7 @@ #[cfg_attr(test, macro_use)] extern crate alloc; +pub mod data; pub mod hash; pub mod merkle; pub mod utils; diff --git a/src/merkle/mmr/full.rs b/src/merkle/mmr/full.rs index 7ceace3..d2fbbeb 100644 --- a/src/merkle/mmr/full.rs +++ b/src/merkle/mmr/full.rs @@ -28,6 +28,7 @@ use std::error::Error; /// /// Since this is a full representation of the MMR, elements are never removed and the MMR will /// grow roughly `O(2n)` in number of leaf elements. +#[derive(Debug, Clone)] pub struct Mmr { /// Refer to the `forest` method documentation for details of the semantics of this value. pub(super) forest: usize, diff --git a/src/merkle/mod.rs b/src/merkle/mod.rs index 2617787..00e8ffc 100644 --- a/src/merkle/mod.rs +++ b/src/merkle/mod.rs @@ -1,4 +1,5 @@ use super::{ + data::{KvMap, RecordingMap}, hash::rpo::{Rpo256, RpoDigest}, utils::collections::{vec, BTreeMap, BTreeSet, Vec}, Felt, StarkField, Word, WORD_SIZE, ZERO, @@ -33,7 +34,10 @@ mod mmr; pub use mmr::{Mmr, MmrPeaks, MmrProof}; mod store; -pub use store::MerkleStore; +pub use store::{ + GenericMerkleStore, MerkleMap, MerkleMapT, MerkleStore, RecordingMerkleMap, + RecordingMerkleStore, +}; mod node; pub use node::InnerNodeInfo; diff --git a/src/merkle/partial_mt/mod.rs b/src/merkle/partial_mt/mod.rs index 8800662..3558c9f 100644 --- a/src/merkle/partial_mt/mod.rs +++ b/src/merkle/partial_mt/mod.rs @@ -154,7 +154,8 @@ impl PartialMerkleTree { self.leaves.iter().map(|&leaf| { ( leaf, - self.get_node(leaf).expect(&format!("Leaf with {leaf} is not in the nodes map")), + self.get_node(leaf) + .unwrap_or_else(|_| panic!("Leaf with {leaf} is not in the nodes map")), ) }) } diff --git a/src/merkle/path.rs b/src/merkle/path.rs index 1842915..975bc68 100644 --- a/src/merkle/path.rs +++ b/src/merkle/path.rs @@ -68,6 +68,12 @@ impl MerklePath { } } +impl From for Vec { + fn from(path: MerklePath) -> Self { + path.nodes + } +} + impl From> for MerklePath { fn from(path: Vec) -> Self { Self::new(path) diff --git a/src/merkle/store/mod.rs b/src/merkle/store/mod.rs index a412092..c85d4fd 100644 --- a/src/merkle/store/mod.rs +++ b/src/merkle/store/mod.rs @@ -1,6 +1,7 @@ use super::{ - mmr::Mmr, BTreeMap, EmptySubtreeRoots, InnerNodeInfo, MerkleError, MerklePath, MerklePathSet, - MerkleTree, NodeIndex, RootPath, Rpo256, RpoDigest, SimpleSmt, TieredSmt, ValuePath, Vec, + mmr::Mmr, BTreeMap, EmptySubtreeRoots, InnerNodeInfo, KvMap, MerkleError, MerklePath, + MerklePathSet, MerkleTree, NodeIndex, RecordingMap, RootPath, Rpo256, RpoDigest, SimpleSmt, + TieredSmt, ValuePath, Vec, }; use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use core::borrow::Borrow; @@ -8,12 +9,56 @@ use core::borrow::Borrow; #[cfg(test)] mod tests; +// TRAIT / TYPE DECLARATIONS +// ================================================================================================ +/// A supertrait that defines the required traits for a type to be used as a data map backend for +/// the [GenericMerkleStore] +pub trait MerkleMapT: + KvMap + + Extend<(RpoDigest, Node)> + + FromIterator<(RpoDigest, Node)> + + IntoIterator +{ +} + +// MERKLE STORE +// ------------------------------------------------------------------------------------------------ + +/// Type that represents a standard MerkleStore. +pub type MerkleStore = GenericMerkleStore; + +/// Declaration of a BTreeMap that uses a [RpoDigest] as a key and a [Node] as the value. This type +/// is used as a data backend for the standard [GenericMerkleStore]. +pub type MerkleMap = BTreeMap; + +/// Implementation of [MerkleMapT] trait on [MerkleMap]. +impl MerkleMapT for MerkleMap {} + +// RECORDING MERKLE STORE +// ------------------------------------------------------------------------------------------------ + +/// Type that represents a MerkleStore with recording capabilities. +pub type RecordingMerkleStore = GenericMerkleStore; + +/// Declaration of a [RecordingMap] that uses a [RpoDigest] as a key and a [Node] as the value. +/// This type is used as a data backend for the recording [GenericMerkleStore]. +pub type RecordingMerkleMap = RecordingMap; + +/// Implementation of [MerkleMapT] on [RecordingMerkleMap]. +impl MerkleMapT for RecordingMerkleMap {} + +// NODE DEFINITION +// ================================================================================================ + #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] pub struct Node { left: RpoDigest, right: RpoDigest, } +// MERKLE STORE IMPLEMENTATION +// ================================================================================================ + /// An in-memory data store for Merkelized data. /// /// This is a in memory data store for Merkle trees, this store allows all the nodes of multiple @@ -51,9 +96,8 @@ pub struct Node { /// let tree2 = MerkleTree::new(vec![A, B, C, D, E, F, G, H1]).unwrap(); /// /// // populates the store with two merkle trees, common nodes are shared -/// store -/// .extend(tree1.inner_nodes()) -/// .extend(tree2.inner_nodes()); +/// store.extend(tree1.inner_nodes()); +/// store.extend(tree2.inner_nodes()); /// /// // every leaf except the last are the same /// for i in 0..7 { @@ -78,41 +122,25 @@ pub struct Node { /// assert_eq!(store.num_internal_nodes() - 255, 10); /// ``` #[derive(Debug, Clone, Eq, PartialEq)] -pub struct MerkleStore { - nodes: BTreeMap, +pub struct GenericMerkleStore { + nodes: T, } -impl Default for MerkleStore { +impl Default for GenericMerkleStore { fn default() -> Self { Self::new() } } -impl MerkleStore { +impl GenericMerkleStore { // CONSTRUCTORS // -------------------------------------------------------------------------------------------- - /// Creates an empty `MerkleStore` instance. - pub fn new() -> MerkleStore { + /// Creates an empty `GenericMerkleStore` instance. + pub fn new() -> GenericMerkleStore { // pre-populate the store with the empty hashes - let subtrees = EmptySubtreeRoots::empty_hashes(255); - let nodes = subtrees - .iter() - .rev() - .copied() - .zip(subtrees.iter().rev().skip(1).copied()) - .map(|(child, parent)| { - ( - parent, - Node { - left: child, - right: child, - }, - ) - }) - .collect(); - - MerkleStore { nodes } + let nodes = empty_hashes().into_iter().collect(); + GenericMerkleStore { nodes } } // PUBLIC ACCESSORS @@ -261,12 +289,12 @@ impl MerkleStore { /// nodes which are descendants of the specified roots. /// /// The roots for which no descendants exist in this Merkle store are ignored. - pub fn subset(&self, roots: I) -> MerkleStore + pub fn subset(&self, roots: I) -> GenericMerkleStore where I: Iterator, R: Borrow, { - let mut store = MerkleStore::new(); + let mut store = GenericMerkleStore::new(); for root in roots { let root = *root.borrow(); store.clone_tree_from(root, self); @@ -274,7 +302,7 @@ impl MerkleStore { store } - /// Iterator over the inner nodes of the [MerkleStore]. + /// Iterator over the inner nodes of the [GenericMerkleStore]. pub fn inner_nodes(&self) -> impl Iterator + '_ { self.nodes.iter().map(|(r, n)| InnerNodeInfo { value: *r, @@ -286,23 +314,6 @@ impl MerkleStore { // STATE MUTATORS // -------------------------------------------------------------------------------------------- - /// Adds a sequence of nodes yielded by the provided iterator into the store. - pub fn extend(&mut self, iter: I) -> &mut MerkleStore - where - I: Iterator, - { - for node in iter { - let value: RpoDigest = node.value; - let left: RpoDigest = node.left; - let right: RpoDigest = node.right; - - debug_assert_eq!(Rpo256::merge(&[left, right]), value); - self.nodes.insert(value, Node { left, right }); - } - - self - } - /// Adds all the nodes of a Merkle path represented by `path`, opening to `node`. Returns the /// new root. /// @@ -332,7 +343,7 @@ impl MerkleStore { /// This will compute the sibling elements for each Merkle `path` and include all the nodes /// into the store. /// - /// For further reference, check [MerkleStore::add_merkle_path]. + /// For further reference, check [GenericMerkleStore::add_merkle_path]. pub fn add_merkle_paths(&mut self, paths: I) -> Result<(), MerkleError> where I: IntoIterator, @@ -345,7 +356,7 @@ impl MerkleStore { /// Appends the provided [MerklePathSet] into the store. /// - /// For further reference, check [MerkleStore::add_merkle_path]. + /// For further reference, check [GenericMerkleStore::add_merkle_path]. pub fn add_merkle_path_set( &mut self, path_set: &MerklePathSet, @@ -420,55 +431,126 @@ impl MerkleStore { } } +// RECORDING MERKLE STORE FINALIZER +// =============================================================================================== + +impl RecordingMerkleStore { + /// Consumes the [DataRecorder] and returns a [BTreeMap] containing the key-value pairs from + /// the initial data set that were read during recording. + pub fn into_proof(self) -> MerkleMap { + self.nodes.into_proof() + } +} + +// EMPTY HASHES +// ================================================================================================ +/// Creates empty hashes for all the subtrees of a tree with a max depth of 255. +fn empty_hashes() -> impl IntoIterator { + let subtrees = EmptySubtreeRoots::empty_hashes(255); + subtrees.iter().rev().copied().zip(subtrees.iter().rev().skip(1).copied()).map( + |(child, parent)| { + ( + parent, + Node { + left: child, + right: child, + }, + ) + }, + ) +} + +/// Consumes an iterator of [InnerNodeInfo] and returns an iterator of `(value, node)` tuples +/// which includes the nodes associate with roots of empty subtrees up to a depth of 255. +fn combine_nodes_with_empty_hashes( + nodes: impl IntoIterator, +) -> impl Iterator { + nodes + .into_iter() + .map(|info| { + ( + info.value, + Node { + left: info.left, + right: info.right, + }, + ) + }) + .chain(empty_hashes().into_iter()) +} + // CONVERSIONS // ================================================================================================ -impl From<&MerkleTree> for MerkleStore { +impl From<&MerkleTree> for GenericMerkleStore { fn from(value: &MerkleTree) -> Self { - let mut store = MerkleStore::new(); - store.extend(value.inner_nodes()); - store + let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect(); + GenericMerkleStore { nodes } } } -impl From<&SimpleSmt> for MerkleStore { +impl From<&SimpleSmt> for GenericMerkleStore { fn from(value: &SimpleSmt) -> Self { - let mut store = MerkleStore::new(); - store.extend(value.inner_nodes()); - store + let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect(); + GenericMerkleStore { nodes } } } -impl From<&Mmr> for MerkleStore { +impl From<&Mmr> for GenericMerkleStore { fn from(value: &Mmr) -> Self { - let mut store = MerkleStore::new(); - store.extend(value.inner_nodes()); - store + let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect(); + GenericMerkleStore { nodes } } } -impl From<&TieredSmt> for MerkleStore { +impl From<&TieredSmt> for GenericMerkleStore { fn from(value: &TieredSmt) -> Self { - let mut store = MerkleStore::new(); - store.extend(value.inner_nodes()); - store + let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect(); + GenericMerkleStore { nodes } } } -impl FromIterator for MerkleStore { - fn from_iter>(iter: T) -> Self { - let mut store = MerkleStore::new(); - store.extend(iter.into_iter()); - store +impl FromIterator for GenericMerkleStore { + fn from_iter>(iter: I) -> Self { + let nodes = combine_nodes_with_empty_hashes(iter).collect(); + GenericMerkleStore { nodes } + } +} + +impl From for RecordingMerkleStore { + fn from(value: MerkleStore) -> Self { + GenericMerkleStore { + nodes: RecordingMerkleMap::new(value.nodes.into_iter()), + } + } +} + +impl FromIterator<(RpoDigest, Node)> for RecordingMerkleMap { + fn from_iter>(iter: T) -> Self { + RecordingMerkleMap::new(iter) + } +} + +impl From for MerkleStore { + fn from(value: MerkleMap) -> Self { + GenericMerkleStore { nodes: value } } } // ITERATORS // ================================================================================================ -impl Extend for MerkleStore { - fn extend>(&mut self, iter: T) { - self.extend(iter.into_iter()); +impl Extend for GenericMerkleStore { + fn extend>(&mut self, iter: I) { + self.nodes.extend(iter.into_iter().map(|info| { + ( + info.value, + Node { + left: info.left, + right: info.right, + }, + ) + })); } } @@ -490,7 +572,7 @@ impl Deserializable for Node { } } -impl Serializable for MerkleStore { +impl Serializable for GenericMerkleStore { fn write_into(&self, target: &mut W) { target.write_u64(self.nodes.len() as u64); @@ -501,10 +583,10 @@ impl Serializable for MerkleStore { } } -impl Deserializable for MerkleStore { +impl Deserializable for GenericMerkleStore { fn read_from(source: &mut R) -> Result { let len = source.read_u64()?; - let mut nodes: BTreeMap = BTreeMap::new(); + let mut nodes: MerkleMap = BTreeMap::new(); for _ in 0..len { let key = RpoDigest::read_from(source)?; @@ -512,6 +594,6 @@ impl Deserializable for MerkleStore { nodes.insert(key, value); } - Ok(MerkleStore { nodes }) + Ok(GenericMerkleStore { nodes }) } } diff --git a/src/merkle/store/tests.rs b/src/merkle/store/tests.rs index 1bc5db9..56b124f 100644 --- a/src/merkle/store/tests.rs +++ b/src/merkle/store/tests.rs @@ -1,13 +1,16 @@ use super::{ - Deserializable, EmptySubtreeRoots, MerkleError, MerklePath, MerkleStore, NodeIndex, RpoDigest, - Serializable, + EmptySubtreeRoots, MerkleError, MerklePath, MerkleStore, NodeIndex, RecordingMerkleStore, + RpoDigest, }; use crate::{ hash::rpo::Rpo256, merkle::{digests_to_words, int_to_leaf, int_to_node, MerklePathSet, MerkleTree, SimpleSmt}, - Felt, Word, WORD_SIZE, + Felt, Word, ONE, WORD_SIZE, ZERO, }; +#[cfg(feature = "std")] +use super::{Deserializable, Serializable}; + #[cfg(feature = "std")] use std::error::Error; @@ -17,6 +20,7 @@ use std::error::Error; const KEYS4: [u64; 4] = [0, 1, 2, 3]; const VALUES4: [RpoDigest; 4] = [int_to_node(1), int_to_node(2), int_to_node(3), int_to_node(4)]; +const KEYS8: [u64; 8] = [0, 1, 2, 3, 4, 5, 6, 7]; const VALUES8: [RpoDigest; 8] = [ int_to_node(1), int_to_node(2), @@ -34,7 +38,7 @@ const VALUES8: [RpoDigest; 8] = [ #[test] fn test_root_not_in_store() -> Result<(), MerkleError> { let mtree = MerkleTree::new(digests_to_words(&VALUES4))?; - let store = MerkleStore::from(&mtree); + let store = MerkleStore::default(); assert_eq!( store.get_node(VALUES4[0], NodeIndex::make(mtree.depth(), 0)), Err(MerkleError::RootNotInStore(VALUES4[0])), @@ -810,3 +814,52 @@ fn test_serialization() -> Result<(), Box> { assert_eq!(store, decoded); Ok(()) } + +// MERKLE RECORDER +// ================================================================================================ +#[test] +fn test_recorder() { + // instantiate recorder from MerkleTree and SimpleSmt + let mtree = MerkleTree::new(digests_to_words(&VALUES4)).unwrap(); + let smtree = SimpleSmt::with_leaves( + 64, + KEYS8.into_iter().zip(VALUES8.into_iter().map(|x| x.into()).rev()), + ) + .unwrap(); + let mut recorder: RecordingMerkleStore = + mtree.inner_nodes().chain(smtree.inner_nodes()).collect(); + + // get nodes from both trees and make sure they are correct + let index_0 = NodeIndex::new(mtree.depth(), 0).unwrap(); + let node = recorder.get_node(mtree.root(), index_0).unwrap(); + assert_eq!(node, mtree.get_node(index_0).unwrap()); + + let index_1 = NodeIndex::new(smtree.depth(), 1).unwrap(); + let node = recorder.get_node(smtree.root(), index_1).unwrap(); + assert_eq!(node, smtree.get_node(index_1).unwrap()); + + // insert a value and assert that when we request it next time it is accurate + let new_value = [ZERO, ZERO, ONE, ONE].into(); + let index_2 = NodeIndex::new(smtree.depth(), 2).unwrap(); + let root = recorder.set_node(smtree.root(), index_2, new_value).unwrap().root; + assert_eq!(recorder.get_node(root, index_2).unwrap(), new_value); + + // construct the proof + let proof = recorder.into_proof(); + let merkle_store: MerkleStore = proof.into(); + + // make sure the proof contains all nodes from both trees + let node = merkle_store.get_node(mtree.root(), index_0).unwrap(); + assert_eq!(node, mtree.get_node(index_0).unwrap()); + + let node = merkle_store.get_node(smtree.root(), index_1).unwrap(); + assert_eq!(node, smtree.get_node(index_1).unwrap()); + + let node = merkle_store.get_node(smtree.root(), index_2).unwrap(); + assert_eq!(node, smtree.get_leaf(index_2.value()).unwrap().into()); + + // assert that is doesnt contain nodes that were not recorded + let not_recorded_index = NodeIndex::new(smtree.depth(), 4).unwrap(); + assert!(merkle_store.get_node(smtree.root(), not_recorded_index).is_err()); + assert!(smtree.get_node(not_recorded_index).is_ok()); +} diff --git a/src/utils.rs b/src/utils.rs index b9b7849..e350b69 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -11,7 +11,7 @@ pub use std::format; // RE-EXPORTS // ================================================================================================ pub use winter_utils::{ - collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable, + collections, string, uninit_vector, Box, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, SliceReader, };