Browse Source

feat: introduce recorder objects

al-gkr-basic-workflow
frisitano 1 year ago
parent
commit
679a30e02e
9 changed files with 541 additions and 86 deletions
  1. +307
    -0
      src/data.rs
  2. +1
    -0
      src/lib.rs
  3. +1
    -0
      src/merkle/mmr/full.rs
  4. +5
    -1
      src/merkle/mod.rs
  5. +2
    -1
      src/merkle/partial_mt/mod.rs
  6. +6
    -0
      src/merkle/path.rs
  7. +161
    -79
      src/merkle/store/mod.rs
  8. +57
    -4
      src/merkle/store/tests.rs
  9. +1
    -1
      src/utils.rs

+ 307
- 0
src/data.rs

@ -0,0 +1,307 @@
use super::utils::{
collections::{btree_map::IntoIter, BTreeMap, BTreeSet},
Box,
};
use core::{
cell::RefCell,
iter::{Chain, Filter},
};
// KEY-VALUE MAP TRAIT
// ================================================================================================
/// A trait that defines the interface for a key-value map.
pub trait KvMap<K, V> {
fn get(&self, key: &K) -> Option<&V>;
fn contains_key(&self, key: &K) -> bool;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_>;
fn insert(&mut self, key: K, value: V) -> Option<V>;
}
// RECORDING MAP
// ================================================================================================
/// A [RecordingMap] that records read requests to the underlying key-value map.
/// The data recorder is used to generate a proof for read requests.
///
/// The [RecordingMap] is composed of three parts:
/// - `data`: which contains the initial key-value pairs from the underlying data set.
/// - `delta`: which contains key-value pairs which have been created after instantiation.
/// - `updated_keys`: which tracks keys from `data` which have been updated in `delta`.
/// - `trace`: which contains the keys from the initial data set (`data`) that are read.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordingMap<K, V> {
data: BTreeMap<K, V>,
delta: BTreeMap<K, V>,
updated_keys: BTreeSet<K>,
trace: RefCell<BTreeSet<K>>,
}
impl<K: Ord + Clone, V: Clone> RecordingMap<K, V> {
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
/// Returns a new [RecordingMap] instance initialized with the provided key-value pairs.
/// ([BTreeMap]).
pub fn new(init: impl IntoIterator<Item = (K, V)>) -> Self {
RecordingMap {
data: init.into_iter().collect(),
delta: BTreeMap::new(),
updated_keys: BTreeSet::new(),
trace: RefCell::new(BTreeSet::new()),
}
}
// FINALIZER
// --------------------------------------------------------------------------------------------
/// Consumes the [DataRecorder] and returns a [BTreeMap] containing the key-value pairs from
/// the initial data set that were read during recording.
pub fn into_proof(self) -> BTreeMap<K, V> {
self.data
.into_iter()
.filter(|(k, _)| self.trace.borrow().contains(k))
.collect::<BTreeMap<_, _>>()
}
}
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for RecordingMap<K, V> {
// ACCESSORS
// --------------------------------------------------------------------------------------------
/// Returns a reference to the value associated with the given key if the value exists. If the
/// key is part of the initial data set, the key access is recorded.
fn get(&self, key: &K) -> Option<&V> {
if let Some(value) = self.delta.get(key) {
return Some(value);
}
match self.data.get(key) {
None => None,
Some(value) => {
self.trace.borrow_mut().insert(key.clone());
Some(value)
}
}
}
/// Returns a boolean to indicate whether the given key exists in the data set. If the key is
/// part of the initial data set, the key access is recorded.
fn contains_key(&self, key: &K) -> bool {
if self.delta.contains_key(key) {
return true;
}
match self.data.contains_key(key) {
true => {
self.trace.borrow_mut().insert(key.clone());
true
}
false => false,
}
}
/// Returns the number of key-value pairs in the data set.
fn len(&self) -> usize {
self.data.len() + self.delta.len() - self.updated_keys.len()
}
/// Returns an iterator over the key-value pairs in the data set.
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
Box::new(
self.data
.iter()
.filter(|(k, _)| !self.updated_keys.contains(k))
.chain(self.delta.iter()),
)
}
// MUTATORS
// --------------------------------------------------------------------------------------------
/// Inserts a key-value pair into the data set. If the key already exists in the data set, the
/// value is updated and the old value is returned.
fn insert(&mut self, key: K, value: V) -> Option<V> {
if let Some(value) = self.delta.insert(key.clone(), value) {
return Some(value);
}
match self.data.get(&key) {
None => None,
Some(value) => {
self.trace.borrow_mut().insert(key.clone());
self.updated_keys.insert(key);
Some(value.clone())
}
}
}
}
// RECORDING MAP TRAIT IMPLS
// ================================================================================================
impl<K: Clone + Ord, V: Clone> Extend<(K, V)> for RecordingMap<K, V> {
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
iter.into_iter().for_each(move |(k, v)| {
self.insert(k, v);
});
}
}
impl<K: Ord + Clone, V: Clone> Default for RecordingMap<K, V> {
fn default() -> Self {
RecordingMap::new(BTreeMap::new())
}
}
impl<K: Ord + 'static, V> IntoIterator for RecordingMap<K, V> {
type Item = (K, V);
type IntoIter =
Chain<Filter<IntoIter<K, V>, Box<dyn FnMut(&Self::Item) -> bool>>, IntoIter<K, V>>;
fn into_iter(self) -> Self::IntoIter {
#[allow(clippy::type_complexity)]
let filter_updated: Box<dyn FnMut(&Self::Item) -> bool> =
Box::new(move |(k, _)| !self.updated_keys.contains(k));
let data_iter = self.data.into_iter().filter(filter_updated);
let updates_iter = self.delta.into_iter();
data_iter.chain(updates_iter)
}
}
// BTREE MAP `KvMap` IMPLEMENTATION
// ================================================================================================
impl<K: Ord, V> KvMap<K, V> for BTreeMap<K, V> {
fn get(&self, key: &K) -> Option<&V> {
self.get(key)
}
fn contains_key(&self, key: &K) -> bool {
self.contains_key(key)
}
fn len(&self) -> usize {
self.len()
}
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
Box::new(self.iter())
}
fn insert(&mut self, key: K, value: V) -> Option<V> {
self.insert(key, value)
}
}
// TESTS
// ================================================================================================
#[cfg(test)]
mod test_recorder {
use super::*;
const ITEMS: [(u64, u64); 5] = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)];
#[test]
fn test_get_item() {
// instantiate a recording map
let map = RecordingMap::new(ITEMS.to_vec());
// get a few items
let get_items = [0, 1, 2];
for key in get_items.iter() {
map.get(key);
}
// convert the map into a proof
let proof = map.into_proof();
// check that the proof contains the expected values
for (key, value) in ITEMS.iter() {
match get_items.contains(key) {
true => assert_eq!(proof.get(key), Some(value)),
false => assert_eq!(proof.get(key), None),
}
}
}
#[test]
fn test_contains_key() {
// instantiate a recording map
let map = RecordingMap::new(ITEMS.to_vec());
// check if the map contains a few items
let get_items = [0, 1, 2];
for key in get_items.iter() {
map.contains_key(key);
}
// convert the map into a proof
let proof = map.into_proof();
// check that the proof contains the expected values
for (key, _) in ITEMS.iter() {
match get_items.contains(key) {
true => assert_eq!(proof.contains_key(key), true),
false => assert_eq!(proof.contains_key(key), false),
}
}
}
#[test]
fn test_len() {
// instantiate a recording map
let mut map = RecordingMap::new(ITEMS.to_vec());
// length of the map should be equal to the number of items
assert_eq!(map.len(), ITEMS.len());
// inserting entry with key that already exists should not change the length
map.insert(4, 5);
assert_eq!(map.len(), ITEMS.len());
// inserting entry with new key should increase the length
map.insert(5, 5);
assert_eq!(map.len(), ITEMS.len() + 1);
// get some items so that they are saved in the trace
let get_items = [0, 1, 2];
for key in get_items.iter() {
map.contains_key(key);
}
// Note: The length reported by the proof will be different to the length originally
// reported by the map.
let proof = map.into_proof();
// length of the proof should be equal to get_items + 1. The extra item is the original
// value at key = 4u64
assert_eq!(proof.len(), get_items.len() + 1);
}
#[test]
fn test_iter() {
let mut map = RecordingMap::new(ITEMS.to_vec());
assert!(map.iter().all(|(x, y)| ITEMS.contains(&(*x, *y))));
// when inserting entry with key that already exists the iterator should return the new value
let new_value = 5;
map.insert(4, new_value);
assert_eq!(map.iter().count(), ITEMS.len());
assert!(map.iter().all(|(x, y)| if x == &4 {
y == &new_value
} else {
ITEMS.contains(&(*x, *y))
}));
}
#[test]
fn test_is_empty() {
// instantiate an empty recording map
let empty_map: RecordingMap<u64, u64> = RecordingMap::default();
assert!(empty_map.is_empty());
// instantiate a non-empty recording map
let map = RecordingMap::new(ITEMS.to_vec());
assert!(!map.is_empty());
}
}

+ 1
- 0
src/lib.rs

@ -4,6 +4,7 @@
#[cfg_attr(test, macro_use)]
extern crate alloc;
pub mod data;
pub mod hash;
pub mod merkle;
pub mod utils;

+ 1
- 0
src/merkle/mmr/full.rs

@ -28,6 +28,7 @@ use std::error::Error;
///
/// Since this is a full representation of the MMR, elements are never removed and the MMR will
/// grow roughly `O(2n)` in number of leaf elements.
#[derive(Debug, Clone)]
pub struct Mmr {
/// Refer to the `forest` method documentation for details of the semantics of this value.
pub(super) forest: usize,

+ 5
- 1
src/merkle/mod.rs

@ -1,4 +1,5 @@
use super::{
data::{KvMap, RecordingMap},
hash::rpo::{Rpo256, RpoDigest},
utils::collections::{vec, BTreeMap, BTreeSet, Vec},
Felt, StarkField, Word, WORD_SIZE, ZERO,
@ -33,7 +34,10 @@ mod mmr;
pub use mmr::{Mmr, MmrPeaks, MmrProof};
mod store;
pub use store::MerkleStore;
pub use store::{
GenericMerkleStore, MerkleMap, MerkleMapT, MerkleStore, RecordingMerkleMap,
RecordingMerkleStore,
};
mod node;
pub use node::InnerNodeInfo;

+ 2
- 1
src/merkle/partial_mt/mod.rs

@ -154,7 +154,8 @@ impl PartialMerkleTree {
self.leaves.iter().map(|&leaf| {
(
leaf,
self.get_node(leaf).expect(&format!("Leaf with {leaf} is not in the nodes map")),
self.get_node(leaf)
.unwrap_or_else(|_| panic!("Leaf with {leaf} is not in the nodes map")),
)
})
}

+ 6
- 0
src/merkle/path.rs

@ -68,6 +68,12 @@ impl MerklePath {
}
}
impl From<MerklePath> for Vec<RpoDigest> {
fn from(path: MerklePath) -> Self {
path.nodes
}
}
impl From<Vec<RpoDigest>> for MerklePath {
fn from(path: Vec<RpoDigest>) -> Self {
Self::new(path)

+ 161
- 79
src/merkle/store/mod.rs

@ -1,6 +1,7 @@
use super::{
mmr::Mmr, BTreeMap, EmptySubtreeRoots, InnerNodeInfo, MerkleError, MerklePath, MerklePathSet,
MerkleTree, NodeIndex, RootPath, Rpo256, RpoDigest, SimpleSmt, TieredSmt, ValuePath, Vec,
mmr::Mmr, BTreeMap, EmptySubtreeRoots, InnerNodeInfo, KvMap, MerkleError, MerklePath,
MerklePathSet, MerkleTree, NodeIndex, RecordingMap, RootPath, Rpo256, RpoDigest, SimpleSmt,
TieredSmt, ValuePath, Vec,
};
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
use core::borrow::Borrow;
@ -8,12 +9,56 @@ use core::borrow::Borrow;
#[cfg(test)]
mod tests;
// TRAIT / TYPE DECLARATIONS
// ================================================================================================
/// A supertrait that defines the required traits for a type to be used as a data map backend for
/// the [GenericMerkleStore]
pub trait MerkleMapT:
KvMap<RpoDigest, Node>
+ Extend<(RpoDigest, Node)>
+ FromIterator<(RpoDigest, Node)>
+ IntoIterator<Item = (RpoDigest, Node)>
{
}
// MERKLE STORE
// ------------------------------------------------------------------------------------------------
/// Type that represents a standard MerkleStore.
pub type MerkleStore = GenericMerkleStore<MerkleMap>;
/// Declaration of a BTreeMap that uses a [RpoDigest] as a key and a [Node] as the value. This type
/// is used as a data backend for the standard [GenericMerkleStore].
pub type MerkleMap = BTreeMap<RpoDigest, Node>;
/// Implementation of [MerkleMapT] trait on [MerkleMap].
impl MerkleMapT for MerkleMap {}
// RECORDING MERKLE STORE
// ------------------------------------------------------------------------------------------------
/// Type that represents a MerkleStore with recording capabilities.
pub type RecordingMerkleStore = GenericMerkleStore<RecordingMerkleMap>;
/// Declaration of a [RecordingMap] that uses a [RpoDigest] as a key and a [Node] as the value.
/// This type is used as a data backend for the recording [GenericMerkleStore].
pub type RecordingMerkleMap = RecordingMap<RpoDigest, Node>;
/// Implementation of [MerkleMapT] on [RecordingMerkleMap].
impl MerkleMapT for RecordingMerkleMap {}
// NODE DEFINITION
// ================================================================================================
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
pub struct Node {
left: RpoDigest,
right: RpoDigest,
}
// MERKLE STORE IMPLEMENTATION
// ================================================================================================
/// An in-memory data store for Merkelized data.
///
/// This is a in memory data store for Merkle trees, this store allows all the nodes of multiple
@ -51,9 +96,8 @@ pub struct Node {
/// let tree2 = MerkleTree::new(vec![A, B, C, D, E, F, G, H1]).unwrap();
///
/// // populates the store with two merkle trees, common nodes are shared
/// store
/// .extend(tree1.inner_nodes())
/// .extend(tree2.inner_nodes());
/// store.extend(tree1.inner_nodes());
/// store.extend(tree2.inner_nodes());
///
/// // every leaf except the last are the same
/// for i in 0..7 {
@ -78,41 +122,25 @@ pub struct Node {
/// assert_eq!(store.num_internal_nodes() - 255, 10);
/// ```
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MerkleStore {
nodes: BTreeMap<RpoDigest, Node>,
pub struct GenericMerkleStore<T: MerkleMapT> {
nodes: T,
}
impl Default for MerkleStore {
impl<T: MerkleMapT> Default for GenericMerkleStore<T> {
fn default() -> Self {
Self::new()
}
}
impl MerkleStore {
impl<T: MerkleMapT> GenericMerkleStore<T> {
// CONSTRUCTORS
// --------------------------------------------------------------------------------------------
/// Creates an empty `MerkleStore` instance.
pub fn new() -> MerkleStore {
/// Creates an empty `GenericMerkleStore` instance.
pub fn new() -> GenericMerkleStore<T> {
// pre-populate the store with the empty hashes
let subtrees = EmptySubtreeRoots::empty_hashes(255);
let nodes = subtrees
.iter()
.rev()
.copied()
.zip(subtrees.iter().rev().skip(1).copied())
.map(|(child, parent)| {
(
parent,
Node {
left: child,
right: child,
},
)
})
.collect();
MerkleStore { nodes }
let nodes = empty_hashes().into_iter().collect();
GenericMerkleStore { nodes }
}
// PUBLIC ACCESSORS
@ -261,12 +289,12 @@ impl MerkleStore {
/// nodes which are descendants of the specified roots.
///
/// The roots for which no descendants exist in this Merkle store are ignored.
pub fn subset<I, R>(&self, roots: I) -> MerkleStore
pub fn subset<I, R>(&self, roots: I) -> GenericMerkleStore<T>
where
I: Iterator<Item = R>,
R: Borrow<RpoDigest>,
{
let mut store = MerkleStore::new();
let mut store = GenericMerkleStore::new();
for root in roots {
let root = *root.borrow();
store.clone_tree_from(root, self);
@ -274,7 +302,7 @@ impl MerkleStore {
store
}
/// Iterator over the inner nodes of the [MerkleStore].
/// Iterator over the inner nodes of the [GenericMerkleStore].
pub fn inner_nodes(&self) -> impl Iterator<Item = InnerNodeInfo> + '_ {
self.nodes.iter().map(|(r, n)| InnerNodeInfo {
value: *r,
@ -286,23 +314,6 @@ impl MerkleStore {
// STATE MUTATORS
// --------------------------------------------------------------------------------------------
/// Adds a sequence of nodes yielded by the provided iterator into the store.
pub fn extend<I>(&mut self, iter: I) -> &mut MerkleStore
where
I: Iterator<Item = InnerNodeInfo>,
{
for node in iter {
let value: RpoDigest = node.value;
let left: RpoDigest = node.left;
let right: RpoDigest = node.right;
debug_assert_eq!(Rpo256::merge(&[left, right]), value);
self.nodes.insert(value, Node { left, right });
}
self
}
/// Adds all the nodes of a Merkle path represented by `path`, opening to `node`. Returns the
/// new root.
///
@ -332,7 +343,7 @@ impl MerkleStore {
/// This will compute the sibling elements for each Merkle `path` and include all the nodes
/// into the store.
///
/// For further reference, check [MerkleStore::add_merkle_path].
/// For further reference, check [GenericMerkleStore::add_merkle_path].
pub fn add_merkle_paths<I>(&mut self, paths: I) -> Result<(), MerkleError>
where
I: IntoIterator<Item = (u64, RpoDigest, MerklePath)>,
@ -345,7 +356,7 @@ impl MerkleStore {
/// Appends the provided [MerklePathSet] into the store.
///
/// For further reference, check [MerkleStore::add_merkle_path].
/// For further reference, check [GenericMerkleStore::add_merkle_path].
pub fn add_merkle_path_set(
&mut self,
path_set: &MerklePathSet,
@ -420,55 +431,126 @@ impl MerkleStore {
}
}
// RECORDING MERKLE STORE FINALIZER
// ===============================================================================================
impl RecordingMerkleStore {
/// Consumes the [DataRecorder] and returns a [BTreeMap] containing the key-value pairs from
/// the initial data set that were read during recording.
pub fn into_proof(self) -> MerkleMap {
self.nodes.into_proof()
}
}
// EMPTY HASHES
// ================================================================================================
/// Creates empty hashes for all the subtrees of a tree with a max depth of 255.
fn empty_hashes() -> impl IntoIterator<Item = (RpoDigest, Node)> {
let subtrees = EmptySubtreeRoots::empty_hashes(255);
subtrees.iter().rev().copied().zip(subtrees.iter().rev().skip(1).copied()).map(
|(child, parent)| {
(
parent,
Node {
left: child,
right: child,
},
)
},
)
}
/// Consumes an iterator of [InnerNodeInfo] and returns an iterator of `(value, node)` tuples
/// which includes the nodes associate with roots of empty subtrees up to a depth of 255.
fn combine_nodes_with_empty_hashes(
nodes: impl IntoIterator<Item = InnerNodeInfo>,
) -> impl Iterator<Item = (RpoDigest, Node)> {
nodes
.into_iter()
.map(|info| {
(
info.value,
Node {
left: info.left,
right: info.right,
},
)
})
.chain(empty_hashes().into_iter())
}
// CONVERSIONS
// ================================================================================================
impl From<&MerkleTree> for MerkleStore {
impl<T: MerkleMapT> From<&MerkleTree> for GenericMerkleStore<T> {
fn from(value: &MerkleTree) -> Self {
let mut store = MerkleStore::new();
store.extend(value.inner_nodes());
store
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
GenericMerkleStore { nodes }
}
}
impl From<&SimpleSmt> for MerkleStore {
impl<T: MerkleMapT> From<&SimpleSmt> for GenericMerkleStore<T> {
fn from(value: &SimpleSmt) -> Self {
let mut store = MerkleStore::new();
store.extend(value.inner_nodes());
store
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
GenericMerkleStore { nodes }
}
}
impl From<&Mmr> for MerkleStore {
impl<T: MerkleMapT> From<&Mmr> for GenericMerkleStore<T> {
fn from(value: &Mmr) -> Self {
let mut store = MerkleStore::new();
store.extend(value.inner_nodes());
store
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
GenericMerkleStore { nodes }
}
}
impl From<&TieredSmt> for MerkleStore {
impl<T: MerkleMapT> From<&TieredSmt> for GenericMerkleStore<T> {
fn from(value: &TieredSmt) -> Self {
let mut store = MerkleStore::new();
store.extend(value.inner_nodes());
store
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
GenericMerkleStore { nodes }
}
}
impl FromIterator<InnerNodeInfo> for MerkleStore {
fn from_iter<T: IntoIterator<Item = InnerNodeInfo>>(iter: T) -> Self {
let mut store = MerkleStore::new();
store.extend(iter.into_iter());
store
impl<T: MerkleMapT> FromIterator<InnerNodeInfo> for GenericMerkleStore<T> {
fn from_iter<I: IntoIterator<Item = InnerNodeInfo>>(iter: I) -> Self {
let nodes = combine_nodes_with_empty_hashes(iter).collect();
GenericMerkleStore { nodes }
}
}
impl From<MerkleStore> for RecordingMerkleStore {
fn from(value: MerkleStore) -> Self {
GenericMerkleStore {
nodes: RecordingMerkleMap::new(value.nodes.into_iter()),
}
}
}
impl FromIterator<(RpoDigest, Node)> for RecordingMerkleMap {
fn from_iter<T: IntoIterator<Item = (RpoDigest, Node)>>(iter: T) -> Self {
RecordingMerkleMap::new(iter)
}
}
impl From<MerkleMap> for MerkleStore {
fn from(value: MerkleMap) -> Self {
GenericMerkleStore { nodes: value }
}
}
// ITERATORS
// ================================================================================================
impl Extend<InnerNodeInfo> for MerkleStore {
fn extend<T: IntoIterator<Item = InnerNodeInfo>>(&mut self, iter: T) {
self.extend(iter.into_iter());
impl<T: MerkleMapT> Extend<InnerNodeInfo> for GenericMerkleStore<T> {
fn extend<I: IntoIterator<Item = InnerNodeInfo>>(&mut self, iter: I) {
self.nodes.extend(iter.into_iter().map(|info| {
(
info.value,
Node {
left: info.left,
right: info.right,
},
)
}));
}
}
@ -490,7 +572,7 @@ impl Deserializable for Node {
}
}
impl Serializable for MerkleStore {
impl<T: MerkleMapT> Serializable for GenericMerkleStore<T> {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write_u64(self.nodes.len() as u64);
@ -501,10 +583,10 @@ impl Serializable for MerkleStore {
}
}
impl Deserializable for MerkleStore {
impl Deserializable for GenericMerkleStore<MerkleMap> {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
let len = source.read_u64()?;
let mut nodes: BTreeMap<RpoDigest, Node> = BTreeMap::new();
let mut nodes: MerkleMap = BTreeMap::new();
for _ in 0..len {
let key = RpoDigest::read_from(source)?;
@ -512,6 +594,6 @@ impl Deserializable for MerkleStore {
nodes.insert(key, value);
}
Ok(MerkleStore { nodes })
Ok(GenericMerkleStore { nodes })
}
}

+ 57
- 4
src/merkle/store/tests.rs

@ -1,13 +1,16 @@
use super::{
Deserializable, EmptySubtreeRoots, MerkleError, MerklePath, MerkleStore, NodeIndex, RpoDigest,
Serializable,
EmptySubtreeRoots, MerkleError, MerklePath, MerkleStore, NodeIndex, RecordingMerkleStore,
RpoDigest,
};
use crate::{
hash::rpo::Rpo256,
merkle::{digests_to_words, int_to_leaf, int_to_node, MerklePathSet, MerkleTree, SimpleSmt},
Felt, Word, WORD_SIZE,
Felt, Word, ONE, WORD_SIZE, ZERO,
};
#[cfg(feature = "std")]
use super::{Deserializable, Serializable};
#[cfg(feature = "std")]
use std::error::Error;
@ -17,6 +20,7 @@ use std::error::Error;
const KEYS4: [u64; 4] = [0, 1, 2, 3];
const VALUES4: [RpoDigest; 4] = [int_to_node(1), int_to_node(2), int_to_node(3), int_to_node(4)];
const KEYS8: [u64; 8] = [0, 1, 2, 3, 4, 5, 6, 7];
const VALUES8: [RpoDigest; 8] = [
int_to_node(1),
int_to_node(2),
@ -34,7 +38,7 @@ const VALUES8: [RpoDigest; 8] = [
#[test]
fn test_root_not_in_store() -> Result<(), MerkleError> {
let mtree = MerkleTree::new(digests_to_words(&VALUES4))?;
let store = MerkleStore::from(&mtree);
let store = MerkleStore::default();
assert_eq!(
store.get_node(VALUES4[0], NodeIndex::make(mtree.depth(), 0)),
Err(MerkleError::RootNotInStore(VALUES4[0])),
@ -810,3 +814,52 @@ fn test_serialization() -> Result<(), Box> {
assert_eq!(store, decoded);
Ok(())
}
// MERKLE RECORDER
// ================================================================================================
#[test]
fn test_recorder() {
// instantiate recorder from MerkleTree and SimpleSmt
let mtree = MerkleTree::new(digests_to_words(&VALUES4)).unwrap();
let smtree = SimpleSmt::with_leaves(
64,
KEYS8.into_iter().zip(VALUES8.into_iter().map(|x| x.into()).rev()),
)
.unwrap();
let mut recorder: RecordingMerkleStore =
mtree.inner_nodes().chain(smtree.inner_nodes()).collect();
// get nodes from both trees and make sure they are correct
let index_0 = NodeIndex::new(mtree.depth(), 0).unwrap();
let node = recorder.get_node(mtree.root(), index_0).unwrap();
assert_eq!(node, mtree.get_node(index_0).unwrap());
let index_1 = NodeIndex::new(smtree.depth(), 1).unwrap();
let node = recorder.get_node(smtree.root(), index_1).unwrap();
assert_eq!(node, smtree.get_node(index_1).unwrap());
// insert a value and assert that when we request it next time it is accurate
let new_value = [ZERO, ZERO, ONE, ONE].into();
let index_2 = NodeIndex::new(smtree.depth(), 2).unwrap();
let root = recorder.set_node(smtree.root(), index_2, new_value).unwrap().root;
assert_eq!(recorder.get_node(root, index_2).unwrap(), new_value);
// construct the proof
let proof = recorder.into_proof();
let merkle_store: MerkleStore = proof.into();
// make sure the proof contains all nodes from both trees
let node = merkle_store.get_node(mtree.root(), index_0).unwrap();
assert_eq!(node, mtree.get_node(index_0).unwrap());
let node = merkle_store.get_node(smtree.root(), index_1).unwrap();
assert_eq!(node, smtree.get_node(index_1).unwrap());
let node = merkle_store.get_node(smtree.root(), index_2).unwrap();
assert_eq!(node, smtree.get_leaf(index_2.value()).unwrap().into());
// assert that is doesnt contain nodes that were not recorded
let not_recorded_index = NodeIndex::new(smtree.depth(), 4).unwrap();
assert!(merkle_store.get_node(smtree.root(), not_recorded_index).is_err());
assert!(smtree.get_node(not_recorded_index).is_ok());
}

+ 1
- 1
src/utils.rs

@ -11,7 +11,7 @@ pub use std::format;
// RE-EXPORTS
// ================================================================================================
pub use winter_utils::{
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
collections, string, uninit_vector, Box, ByteReader, ByteWriter, Deserializable,
DeserializationError, Serializable, SliceReader,
};

Loading…
Cancel
Save