mirror of
https://github.com/arnaucube/miden-crypto.git
synced 2026-01-12 00:51:29 +01:00
Compare commits
78 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
18302d68e0 | ||
|
|
858f95d4a1 | ||
|
|
b2d6866d41 | ||
|
|
f52ac29a02 | ||
|
|
f08644e4df | ||
|
|
679a30e02e | ||
|
|
cede2e57da | ||
|
|
4215e83ae5 | ||
|
|
fe5cac9edc | ||
|
|
53d52b8adc | ||
|
|
1be64fc43d | ||
|
|
049ae32cbf | ||
|
|
b9def61e28 | ||
|
|
0e0a3fda4f | ||
|
|
fe9aa8c28c | ||
|
|
766702e37a | ||
|
|
218a64b5c7 | ||
|
|
2708a23649 | ||
|
|
43f1a4cb64 | ||
|
|
55cc71dadf | ||
|
|
ebf71c2dc7 | ||
|
|
b4324475b6 | ||
|
|
23f448fb33 | ||
|
|
59f7723221 | ||
|
|
2ed880d976 | ||
|
|
daa27f49f2 | ||
|
|
dcda57f71a | ||
|
|
d9e3211418 | ||
|
|
21e7a5c07d | ||
|
|
02673ff87e | ||
|
|
b768eade4d | ||
|
|
51ce07cc34 | ||
|
|
550738bd94 | ||
|
|
629494b601 | ||
|
|
13aeda5a27 | ||
|
|
e5aba870a2 | ||
|
|
fcf03478ba | ||
|
|
0ddd0db89b | ||
|
|
2100d6c861 | ||
|
|
52409ac039 | ||
|
|
4555fc918f | ||
|
|
52db23cd42 | ||
|
|
09025b4014 | ||
|
|
e983e940b2 | ||
|
|
ae4e27b6c7 | ||
|
|
130ae3d12a | ||
|
|
22c9f382c4 | ||
|
|
9be4253f19 | ||
|
|
59595a2e04 | ||
|
|
eb316f51bc | ||
|
|
8161477d6a | ||
|
|
158167356d | ||
|
|
3996374a8b | ||
|
|
7fa03c7967 | ||
|
|
79915cc346 | ||
|
|
45412b5cec | ||
|
|
bbb1e641a3 | ||
|
|
e02507d11e | ||
|
|
b5eb68e46c | ||
|
|
61db888b2c | ||
|
|
051167f2e5 | ||
|
|
498bc93c15 | ||
|
|
00ffc1568a | ||
|
|
cbf51dd3e2 | ||
|
|
ab903a2229 | ||
|
|
86dba195b4 | ||
|
|
bd557bc68c | ||
|
|
cf94ac07b7 | ||
|
|
d873866f52 | ||
|
|
9275dd00ad | ||
|
|
429d3bab6f | ||
|
|
f19fe6e739 | ||
|
|
1df4318399 | ||
|
|
433b467953 | ||
|
|
f46d913b20 | ||
|
|
f8a62dae76 | ||
|
|
49b9029b46 | ||
|
|
d37f3f5e84 |
27
CHANGELOG.md
27
CHANGELOG.md
@@ -1,3 +1,30 @@
|
|||||||
|
## 0.6.0 (2023-06-25)
|
||||||
|
|
||||||
|
* [BREAKING] Added support for recording capabilities for `MerkleStore` (#162).
|
||||||
|
* [BREAKING] Refactored Merkle struct APIs to use `RpoDigest` instead of `Word` (#157).
|
||||||
|
* Added initial implementation of `PartialMerkleTree` (#156).
|
||||||
|
|
||||||
|
## 0.5.0 (2023-05-26)
|
||||||
|
|
||||||
|
* Implemented `TieredSmt` (#152, #153).
|
||||||
|
* Implemented ability to extract a subset of a `MerkleStore` (#151).
|
||||||
|
* Cleaned up `SimpleSmt` interface (#149).
|
||||||
|
* Decoupled hashing and padding of peaks in `Mmr` (#148).
|
||||||
|
* Added `inner_nodes()` to `MerkleStore` (#146).
|
||||||
|
|
||||||
|
## 0.4.0 (2023-04-21)
|
||||||
|
|
||||||
|
- Exported `MmrProof` from the crate (#137).
|
||||||
|
- Allowed merging of leaves in `MerkleStore` (#138).
|
||||||
|
- [BREAKING] Refactored how existing data structures are added to `MerkleStore` (#139).
|
||||||
|
|
||||||
|
## 0.3.0 (2023-04-08)
|
||||||
|
|
||||||
|
- Added `depth` parameter to SMT constructors in `MerkleStore` (#115).
|
||||||
|
- Optimized MMR peak hashing for Miden VM (#120).
|
||||||
|
- Added `get_leaf_depth` method to `MerkleStore` (#119).
|
||||||
|
- Added inner node iterators to `MerkleTree`, `SimpleSmt`, and `Mmr` (#117, #118, #121).
|
||||||
|
|
||||||
## 0.2.0 (2023-03-24)
|
## 0.2.0 (2023-03-24)
|
||||||
|
|
||||||
- Implemented `Mmr` and related structs (#67).
|
- Implemented `Mmr` and related structs (#67).
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "miden-crypto"
|
name = "miden-crypto"
|
||||||
version = "0.2.0"
|
version = "0.6.0"
|
||||||
description = "Miden Cryptographic primitives"
|
description = "Miden Cryptographic primitives"
|
||||||
authors = ["miden contributors"]
|
authors = ["miden contributors"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/0xPolygonMiden/crypto"
|
repository = "https://github.com/0xPolygonMiden/crypto"
|
||||||
documentation = "https://docs.rs/miden-crypto/0.2.0"
|
documentation = "https://docs.rs/miden-crypto/0.6.0"
|
||||||
categories = ["cryptography", "no-std"]
|
categories = ["cryptography", "no-std"]
|
||||||
keywords = ["miden", "crypto", "hash", "merkle"]
|
keywords = ["miden", "crypto", "hash", "merkle"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
@@ -35,6 +35,6 @@ winter_math = { version = "0.6", package = "winter-math", default-features = fal
|
|||||||
winter_utils = { version = "0.6", package = "winter-utils", default-features = false }
|
winter_utils = { version = "0.6", package = "winter-utils", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.4", features = ["html_reports"] }
|
criterion = { version = "0.5", features = ["html_reports"] }
|
||||||
proptest = "1.1.0"
|
proptest = "1.1.0"
|
||||||
rand_utils = { version = "0.6", package = "winter-rand-utils" }
|
rand_utils = { version = "0.6", package = "winter-rand-utils" }
|
||||||
|
|||||||
13
README.md
13
README.md
@@ -12,17 +12,16 @@ For performance benchmarks of these hash functions and their comparison to other
|
|||||||
## Merkle
|
## Merkle
|
||||||
[Merkle module](./src/merkle/) provides a set of data structures related to Merkle trees. All these data structures are implemented using the RPO hash function described above. The data structures are:
|
[Merkle module](./src/merkle/) provides a set of data structures related to Merkle trees. All these data structures are implemented using the RPO hash function described above. The data structures are:
|
||||||
|
|
||||||
* `MerkleTree`: a regular fully-balanced binary Merkle tree. The depth of this tree can be at most 64.
|
|
||||||
* `SimpleSmt`: a Sparse Merkle Tree, mapping 63-bit keys to 4-element leaf values.
|
|
||||||
* `MerklePathSet`: a collection of Merkle authentication paths all resolving to the same root. The length of the paths can be at most 64.
|
|
||||||
* `MerkleStore`: a collection of Merkle trees of different heights designed to efficiently store trees with common subtrees.
|
|
||||||
* `Mmr`: a Merkle mountain range structure designed to function as an append-only log.
|
* `Mmr`: a Merkle mountain range structure designed to function as an append-only log.
|
||||||
|
* `MerkleTree`: a regular fully-balanced binary Merkle tree. The depth of this tree can be at most 64.
|
||||||
|
* `MerklePathSet`: a collection of Merkle authentication paths all resolving to the same root. The length of the paths can be at most 64.
|
||||||
|
* `MerkleStore`: a collection of Merkle trees of different heights designed to efficiently store trees with common subtrees. When instantiated with `RecordingMap`, a Merkle store records all accesses to the original data.
|
||||||
|
* `PartialMerkleTree`: a partial view of a Merkle tree where some sub-trees may not be known. This is similar to a collection of Merkle paths all resolving to the same root. The length of the paths can be at most 64.
|
||||||
|
* `SimpleSmt`: a Sparse Merkle Tree (with no compaction), mapping 64-bit keys to 4-element values.
|
||||||
|
* `TieredSmt`: a Sparse Merkle tree (with compaction), mapping 4-element keys to 4-element values.
|
||||||
|
|
||||||
The module also contains additional supporting components such as `NodeIndex`, `MerklePath`, and `MerkleError` to assist with tree indexation, opening proofs, and reporting inconsistent arguments/state.
|
The module also contains additional supporting components such as `NodeIndex`, `MerklePath`, and `MerkleError` to assist with tree indexation, opening proofs, and reporting inconsistent arguments/state.
|
||||||
|
|
||||||
## Extra
|
|
||||||
[Root module](./src/lib.rs) provides a set of constants, types, aliases, and utils required to use the primitives of this library.
|
|
||||||
|
|
||||||
## Crate features
|
## Crate features
|
||||||
This crate can be compiled with the following features:
|
This crate can be compiled with the following features:
|
||||||
|
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ The second scenario is that of sequential hashing where we take a sequence of le
|
|||||||
|
|
||||||
| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 |
|
| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 |
|
||||||
| ------------------- | -------| ------- | --------- | --------- | ------- |
|
| ------------------- | -------| ------- | --------- | --------- | ------- |
|
||||||
| Apple M1 Pro | 1.1 us | 1.5 us | 19.4 us | 118 us | 70 us |
|
| Apple M1 Pro | 1.0 us | 1.5 us | 19.4 us | 118 us | 70 us |
|
||||||
| Apple M2 | 1.0 us | 1.5 us | 17.4 us | 103 us | 65 us |
|
| Apple M2 | 1.0 us | 1.5 us | 17.4 us | 103 us | 65 us |
|
||||||
| Amazon Graviton 3 | 1.4 us | | | | 114 us |
|
| Amazon Graviton 3 | 1.4 us | | | | 114 us |
|
||||||
| AMD Ryzen 9 5950X | 0.8 us | 1.7 us | 15.7 us | 120 us | 72 us |
|
| AMD Ryzen 9 5950X | 0.8 us | 1.7 us | 15.7 us | 120 us | 72 us |
|
||||||
|
|||||||
@@ -106,11 +106,5 @@ fn blake3_sequential(c: &mut Criterion) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
criterion_group!(
|
criterion_group!(hash_group, rpo256_2to1, rpo256_sequential, blake3_2to1, blake3_sequential);
|
||||||
hash_group,
|
|
||||||
rpo256_2to1,
|
|
||||||
rpo256_sequential,
|
|
||||||
blake3_2to1,
|
|
||||||
blake3_sequential
|
|
||||||
);
|
|
||||||
criterion_main!(hash_group);
|
criterion_main!(hash_group);
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ fn smt_rpo(c: &mut Criterion) {
|
|||||||
(i, word)
|
(i, word)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let tree = SimpleSmt::new(depth).unwrap().with_leaves(entries).unwrap();
|
let tree = SimpleSmt::with_leaves(depth, entries).unwrap();
|
||||||
trees.push(tree);
|
trees.push((tree, count));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -29,10 +29,9 @@ fn smt_rpo(c: &mut Criterion) {
|
|||||||
|
|
||||||
let mut insert = c.benchmark_group(format!("smt update_leaf"));
|
let mut insert = c.benchmark_group(format!("smt update_leaf"));
|
||||||
|
|
||||||
for tree in trees.iter_mut() {
|
for (tree, count) in trees.iter_mut() {
|
||||||
let depth = tree.depth();
|
let depth = tree.depth();
|
||||||
let count = tree.leaves_count() as u64;
|
let key = *count >> 2;
|
||||||
let key = count >> 2;
|
|
||||||
insert.bench_with_input(
|
insert.bench_with_input(
|
||||||
format!("simple smt(depth:{depth},count:{count})"),
|
format!("simple smt(depth:{depth},count:{count})"),
|
||||||
&(key, leaf),
|
&(key, leaf),
|
||||||
@@ -48,10 +47,9 @@ fn smt_rpo(c: &mut Criterion) {
|
|||||||
|
|
||||||
let mut path = c.benchmark_group(format!("smt get_leaf_path"));
|
let mut path = c.benchmark_group(format!("smt get_leaf_path"));
|
||||||
|
|
||||||
for tree in trees.iter_mut() {
|
for (tree, count) in trees.iter_mut() {
|
||||||
let depth = tree.depth();
|
let depth = tree.depth();
|
||||||
let count = tree.leaves_count() as u64;
|
let key = *count >> 2;
|
||||||
let key = count >> 2;
|
|
||||||
path.bench_with_input(
|
path.bench_with_input(
|
||||||
format!("simple smt(depth:{depth},count:{count})"),
|
format!("simple smt(depth:{depth},count:{count})"),
|
||||||
&key,
|
&key,
|
||||||
@@ -75,10 +73,5 @@ criterion_main!(smt_group);
|
|||||||
fn generate_word(seed: &mut [u8; 32]) -> Word {
|
fn generate_word(seed: &mut [u8; 32]) -> Word {
|
||||||
swap(seed, &mut prng_array(*seed));
|
swap(seed, &mut prng_array(*seed));
|
||||||
let nums: [u64; 4] = prng_array(*seed);
|
let nums: [u64; 4] = prng_array(*seed);
|
||||||
[
|
[Felt::new(nums[0]), Felt::new(nums[1]), Felt::new(nums[2]), Felt::new(nums[3])]
|
||||||
Felt::new(nums[0]),
|
|
||||||
Felt::new(nums[1]),
|
|
||||||
Felt::new(nums[2]),
|
|
||||||
Felt::new(nums[3]),
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
179
benches/store.rs
179
benches/store.rs
@@ -1,5 +1,5 @@
|
|||||||
use criterion::{black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion};
|
use criterion::{black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion};
|
||||||
use miden_crypto::merkle::{MerkleStore, MerkleTree, NodeIndex, SimpleSmt};
|
use miden_crypto::merkle::{DefaultMerkleStore as MerkleStore, MerkleTree, NodeIndex, SimpleSmt};
|
||||||
use miden_crypto::Word;
|
use miden_crypto::Word;
|
||||||
use miden_crypto::{hash::rpo::RpoDigest, Felt};
|
use miden_crypto::{hash::rpo::RpoDigest, Felt};
|
||||||
use rand_utils::{rand_array, rand_value};
|
use rand_utils::{rand_array, rand_value};
|
||||||
@@ -18,36 +18,37 @@ fn random_word() -> Word {
|
|||||||
rand_array::<Felt, 4>().into()
|
rand_array::<Felt, 4>().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates a u64 in `0..range`.
|
/// Generates an index at the specified depth in `0..range`.
|
||||||
fn random_index(range: u64) -> u64 {
|
fn random_index(range: u64, depth: u8) -> NodeIndex {
|
||||||
rand_value::<u64>() % range
|
let value = rand_value::<u64>() % range;
|
||||||
|
NodeIndex::new(depth, value).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Benchmarks getting an empty leaf from the SMT and MerkleStore backends.
|
/// Benchmarks getting an empty leaf from the SMT and MerkleStore backends.
|
||||||
fn get_empty_leaf_simplesmt(c: &mut Criterion) {
|
fn get_empty_leaf_simplesmt(c: &mut Criterion) {
|
||||||
let mut group = c.benchmark_group("get_empty_leaf_simplesmt");
|
let mut group = c.benchmark_group("get_empty_leaf_simplesmt");
|
||||||
|
|
||||||
let depth = 63u8;
|
let depth = SimpleSmt::MAX_DEPTH;
|
||||||
let size = 2u64.pow(depth as u32);
|
let size = u64::MAX;
|
||||||
|
|
||||||
// both SMT and the store are pre-populated with empty hashes, accessing these values is what is
|
// both SMT and the store are pre-populated with empty hashes, accessing these values is what is
|
||||||
// being benchmarked here, so no values are inserted into the backends
|
// being benchmarked here, so no values are inserted into the backends
|
||||||
let smt = SimpleSmt::new(depth).unwrap();
|
let smt = SimpleSmt::new(depth).unwrap();
|
||||||
let store = MerkleStore::new();
|
let store = MerkleStore::from(&smt);
|
||||||
let root = smt.root();
|
let root = smt.root();
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("SimpleSmt", depth), |b| {
|
group.bench_function(BenchmarkId::new("SimpleSmt", depth), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size),
|
|| random_index(size, depth),
|
||||||
|value| black_box(smt.get_node(&NodeIndex::new(depth, value))),
|
|index| black_box(smt.get_node(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", depth), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", depth), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size),
|
|| random_index(size, depth),
|
||||||
|value| black_box(store.get_node(root, NodeIndex::new(depth, value))),
|
|index| black_box(store.get_node(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -65,23 +66,23 @@ fn get_leaf_merkletree(c: &mut Criterion) {
|
|||||||
|
|
||||||
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
let store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
let store = MerkleStore::from(&mtree);
|
||||||
let depth = mtree.depth();
|
let depth = mtree.depth();
|
||||||
let root = mtree.root();
|
let root = mtree.root();
|
||||||
let size_u64 = size as u64;
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(mtree.get_node(NodeIndex::new(depth, value))),
|
|index| black_box(mtree.get_node(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(store.get_node(root, NodeIndex::new(depth, value))),
|
|index| black_box(store.get_node(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -103,29 +104,24 @@ fn get_leaf_simplesmt(c: &mut Criterion) {
|
|||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
.collect::<Vec<(u64, Word)>>();
|
.collect::<Vec<(u64, Word)>>();
|
||||||
let smt = SimpleSmt::new(63)
|
let smt = SimpleSmt::with_leaves(SimpleSmt::MAX_DEPTH, smt_leaves.clone()).unwrap();
|
||||||
.unwrap()
|
let store = MerkleStore::from(&smt);
|
||||||
.with_leaves(smt_leaves.clone())
|
|
||||||
.unwrap();
|
|
||||||
let store = MerkleStore::new()
|
|
||||||
.with_sparse_merkle_tree(smt_leaves)
|
|
||||||
.unwrap();
|
|
||||||
let depth = smt.depth();
|
let depth = smt.depth();
|
||||||
let root = smt.root();
|
let root = smt.root();
|
||||||
let size_u64 = size as u64;
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(smt.get_node(&NodeIndex::new(depth, value))),
|
|index| black_box(smt.get_node(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(store.get_node(root, NodeIndex::new(depth, value))),
|
|index| black_box(store.get_node(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -136,29 +132,29 @@ fn get_leaf_simplesmt(c: &mut Criterion) {
|
|||||||
fn get_node_of_empty_simplesmt(c: &mut Criterion) {
|
fn get_node_of_empty_simplesmt(c: &mut Criterion) {
|
||||||
let mut group = c.benchmark_group("get_node_of_empty_simplesmt");
|
let mut group = c.benchmark_group("get_node_of_empty_simplesmt");
|
||||||
|
|
||||||
let depth = 63u8;
|
let depth = SimpleSmt::MAX_DEPTH;
|
||||||
let size = 2u64.pow(depth as u32);
|
|
||||||
|
|
||||||
// both SMT and the store are pre-populated with the empty hashes, accessing the internal nodes
|
// both SMT and the store are pre-populated with the empty hashes, accessing the internal nodes
|
||||||
// of these values is what is being benchmarked here, so no values are inserted into the
|
// of these values is what is being benchmarked here, so no values are inserted into the
|
||||||
// backends.
|
// backends.
|
||||||
let smt = SimpleSmt::new(depth).unwrap();
|
let smt = SimpleSmt::new(depth).unwrap();
|
||||||
let store = MerkleStore::new();
|
let store = MerkleStore::from(&smt);
|
||||||
let root = smt.root();
|
let root = smt.root();
|
||||||
let half_depth = depth / 2;
|
let half_depth = depth / 2;
|
||||||
|
let half_size = 2_u64.pow(half_depth as u32);
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("SimpleSmt", depth), |b| {
|
group.bench_function(BenchmarkId::new("SimpleSmt", depth), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size),
|
|| random_index(half_size, half_depth),
|
||||||
|value| black_box(smt.get_node(&NodeIndex::new(half_depth, value))),
|
|index| black_box(smt.get_node(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", depth), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", depth), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size),
|
|| random_index(half_size, half_depth),
|
||||||
|value| black_box(store.get_node(root, NodeIndex::new(half_depth, value))),
|
|index| black_box(store.get_node(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -177,23 +173,23 @@ fn get_node_merkletree(c: &mut Criterion) {
|
|||||||
|
|
||||||
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
let store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
let store = MerkleStore::from(&mtree);
|
||||||
let half_depth = mtree.depth() / 2;
|
|
||||||
let root = mtree.root();
|
let root = mtree.root();
|
||||||
let size_u64 = size as u64;
|
let half_depth = mtree.depth() / 2;
|
||||||
|
let half_size = 2_u64.pow(half_depth as u32);
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(half_size, half_depth),
|
||||||
|value| black_box(mtree.get_node(NodeIndex::new(half_depth, value))),
|
|index| black_box(mtree.get_node(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(half_size, half_depth),
|
||||||
|value| black_box(store.get_node(root, NodeIndex::new(half_depth, value))),
|
|index| black_box(store.get_node(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -216,29 +212,24 @@ fn get_node_simplesmt(c: &mut Criterion) {
|
|||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
.collect::<Vec<(u64, Word)>>();
|
.collect::<Vec<(u64, Word)>>();
|
||||||
let smt = SimpleSmt::new(63)
|
let smt = SimpleSmt::with_leaves(SimpleSmt::MAX_DEPTH, smt_leaves.clone()).unwrap();
|
||||||
.unwrap()
|
let store = MerkleStore::from(&smt);
|
||||||
.with_leaves(smt_leaves.clone())
|
|
||||||
.unwrap();
|
|
||||||
let store = MerkleStore::new()
|
|
||||||
.with_sparse_merkle_tree(smt_leaves)
|
|
||||||
.unwrap();
|
|
||||||
let root = smt.root();
|
let root = smt.root();
|
||||||
let size_u64 = size as u64;
|
|
||||||
let half_depth = smt.depth() / 2;
|
let half_depth = smt.depth() / 2;
|
||||||
|
let half_size = 2_u64.pow(half_depth as u32);
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(half_size, half_depth),
|
||||||
|value| black_box(smt.get_node(&NodeIndex::new(half_depth, value))),
|
|index| black_box(smt.get_node(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(half_size, half_depth),
|
||||||
|value| black_box(store.get_node(root, NodeIndex::new(half_depth, value))),
|
|index| black_box(store.get_node(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -257,23 +248,23 @@ fn get_leaf_path_merkletree(c: &mut Criterion) {
|
|||||||
|
|
||||||
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
let store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
let store = MerkleStore::from(&mtree);
|
||||||
let depth = mtree.depth();
|
let depth = mtree.depth();
|
||||||
let root = mtree.root();
|
let root = mtree.root();
|
||||||
let size_u64 = size as u64;
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(mtree.get_path(NodeIndex::new(depth, value))),
|
|index| black_box(mtree.get_path(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(store.get_path(root, NodeIndex::new(depth, value))),
|
|index| black_box(store.get_path(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -295,29 +286,24 @@ fn get_leaf_path_simplesmt(c: &mut Criterion) {
|
|||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
.collect::<Vec<(u64, Word)>>();
|
.collect::<Vec<(u64, Word)>>();
|
||||||
let smt = SimpleSmt::new(63)
|
let smt = SimpleSmt::with_leaves(SimpleSmt::MAX_DEPTH, smt_leaves.clone()).unwrap();
|
||||||
.unwrap()
|
let store = MerkleStore::from(&smt);
|
||||||
.with_leaves(smt_leaves.clone())
|
|
||||||
.unwrap();
|
|
||||||
let store = MerkleStore::new()
|
|
||||||
.with_sparse_merkle_tree(smt_leaves)
|
|
||||||
.unwrap();
|
|
||||||
let depth = smt.depth();
|
let depth = smt.depth();
|
||||||
let root = smt.root();
|
let root = smt.root();
|
||||||
let size_u64 = size as u64;
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(smt.get_path(NodeIndex::new(depth, value))),
|
|index| black_box(smt.get_path(index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| random_index(size_u64),
|
|| random_index(size_u64, depth),
|
||||||
|value| black_box(store.get_path(root, NodeIndex::new(depth, value))),
|
|index| black_box(store.get_path(root, index)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -346,16 +332,16 @@ fn new(c: &mut Criterion) {
|
|||||||
|
|
||||||
// This could be done with `bench_with_input`, however to remove variables while comparing
|
// This could be done with `bench_with_input`, however to remove variables while comparing
|
||||||
// with MerkleTree it is using `iter_batched`
|
// with MerkleTree it is using `iter_batched`
|
||||||
group.bench_function(
|
group.bench_function(BenchmarkId::new("MerkleStore::extend::MerkleTree", size), |b| {
|
||||||
BenchmarkId::new("MerkleStore::with_merkle_tree", size),
|
|
||||||
|b| {
|
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| leaves.iter().map(|v| v.into()).collect::<Vec<Word>>(),
|
|| leaves.iter().map(|v| v.into()).collect::<Vec<Word>>(),
|
||||||
|l| black_box(MerkleStore::new().with_merkle_tree(l)),
|
|l| {
|
||||||
|
let mtree = MerkleTree::new(l).unwrap();
|
||||||
|
black_box(MerkleStore::from(&mtree));
|
||||||
|
},
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
},
|
});
|
||||||
);
|
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("SimpleSmt::new", size), |b| {
|
group.bench_function(BenchmarkId::new("SimpleSmt::new", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
@@ -366,14 +352,12 @@ fn new(c: &mut Criterion) {
|
|||||||
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
.collect::<Vec<(u64, Word)>>()
|
.collect::<Vec<(u64, Word)>>()
|
||||||
},
|
},
|
||||||
|l| black_box(SimpleSmt::new(63).unwrap().with_leaves(l)),
|
|l| black_box(SimpleSmt::with_leaves(SimpleSmt::MAX_DEPTH, l)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
group.bench_function(
|
group.bench_function(BenchmarkId::new("MerkleStore::extend::SimpleSmt", size), |b| {
|
||||||
BenchmarkId::new("MerkleStore::with_sparse_merkle_tree", size),
|
|
||||||
|b| {
|
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| {
|
|| {
|
||||||
leaves
|
leaves
|
||||||
@@ -382,11 +366,13 @@ fn new(c: &mut Criterion) {
|
|||||||
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
.collect::<Vec<(u64, Word)>>()
|
.collect::<Vec<(u64, Word)>>()
|
||||||
},
|
},
|
||||||
|l| black_box(MerkleStore::new().with_sparse_merkle_tree(l)),
|
|l| {
|
||||||
|
let smt = SimpleSmt::with_leaves(SimpleSmt::MAX_DEPTH, l).unwrap();
|
||||||
|
black_box(MerkleStore::from(&smt));
|
||||||
|
},
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
},
|
});
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -402,14 +388,14 @@ fn update_leaf_merkletree(c: &mut Criterion) {
|
|||||||
|
|
||||||
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
let mut mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
let mut mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
let mut store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
let mut store = MerkleStore::from(&mtree);
|
||||||
let depth = mtree.depth();
|
let depth = mtree.depth();
|
||||||
let root = mtree.root();
|
let root = mtree.root();
|
||||||
let size_u64 = size as u64;
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| (random_index(size_u64), random_word()),
|
|| (rand_value::<u64>() % size_u64, random_word()),
|
||||||
|(index, value)| black_box(mtree.update_leaf(index, value)),
|
|(index, value)| black_box(mtree.update_leaf(index, value)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
@@ -418,15 +404,12 @@ fn update_leaf_merkletree(c: &mut Criterion) {
|
|||||||
let mut store_root = root;
|
let mut store_root = root;
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| (random_index(size_u64), random_word()),
|
|| (random_index(size_u64, depth), random_word()),
|
||||||
|(index, value)| {
|
|(index, value)| {
|
||||||
// The MerkleTree automatically updates its internal root, the Store maintains
|
// The MerkleTree automatically updates its internal root, the Store maintains
|
||||||
// the old root and adds the new one. Here we update the root to have a fair
|
// the old root and adds the new one. Here we update the root to have a fair
|
||||||
// comparison
|
// comparison
|
||||||
store_root = store
|
store_root = store.set_node(root, index, value.into()).unwrap().root;
|
||||||
.set_node(root, NodeIndex::new(depth, index), value)
|
|
||||||
.unwrap()
|
|
||||||
.root;
|
|
||||||
black_box(store_root)
|
black_box(store_root)
|
||||||
},
|
},
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
@@ -450,20 +433,15 @@ fn update_leaf_simplesmt(c: &mut Criterion) {
|
|||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
.collect::<Vec<(u64, Word)>>();
|
.collect::<Vec<(u64, Word)>>();
|
||||||
let mut smt = SimpleSmt::new(63)
|
let mut smt = SimpleSmt::with_leaves(SimpleSmt::MAX_DEPTH, smt_leaves.clone()).unwrap();
|
||||||
.unwrap()
|
let mut store = MerkleStore::from(&smt);
|
||||||
.with_leaves(smt_leaves.clone())
|
|
||||||
.unwrap();
|
|
||||||
let mut store = MerkleStore::new()
|
|
||||||
.with_sparse_merkle_tree(smt_leaves)
|
|
||||||
.unwrap();
|
|
||||||
let depth = smt.depth();
|
let depth = smt.depth();
|
||||||
let root = smt.root();
|
let root = smt.root();
|
||||||
let size_u64 = size as u64;
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
group.bench_function(BenchmarkId::new("SimpleSMT", size), |b| {
|
group.bench_function(BenchmarkId::new("SimpleSMT", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| (random_index(size_u64), random_word()),
|
|| (rand_value::<u64>() % size_u64, random_word()),
|
||||||
|(index, value)| black_box(smt.update_leaf(index, value)),
|
|(index, value)| black_box(smt.update_leaf(index, value)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
@@ -472,15 +450,12 @@ fn update_leaf_simplesmt(c: &mut Criterion) {
|
|||||||
let mut store_root = root;
|
let mut store_root = root;
|
||||||
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| (random_index(size_u64), random_word()),
|
|| (random_index(size_u64, depth), random_word()),
|
||||||
|(index, value)| {
|
|(index, value)| {
|
||||||
// The MerkleTree automatically updates its internal root, the Store maintains
|
// The MerkleTree automatically updates its internal root, the Store maintains
|
||||||
// the old root and adds the new one. Here we update the root to have a fair
|
// the old root and adds the new one. Here we update the root to have a fair
|
||||||
// comparison
|
// comparison
|
||||||
store_root = store
|
store_root = store.set_node(root, index, value.into()).unwrap().root;
|
||||||
.set_node(root, NodeIndex::new(depth, index), value)
|
|
||||||
.unwrap()
|
|
||||||
.root;
|
|
||||||
black_box(store_root)
|
black_box(store_root)
|
||||||
},
|
},
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
|
|||||||
20
rustfmt.toml
Normal file
20
rustfmt.toml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
edition = "2021"
|
||||||
|
array_width = 80
|
||||||
|
attr_fn_like_width = 80
|
||||||
|
chain_width = 80
|
||||||
|
#condense_wildcard_suffixes = true
|
||||||
|
#enum_discrim_align_threshold = 40
|
||||||
|
fn_call_width = 80
|
||||||
|
#fn_single_line = true
|
||||||
|
#format_code_in_doc_comments = true
|
||||||
|
#format_macro_matchers = true
|
||||||
|
#format_strings = true
|
||||||
|
#group_imports = "StdExternalCrate"
|
||||||
|
#hex_literal_case = "Lower"
|
||||||
|
#imports_granularity = "Crate"
|
||||||
|
newline_style = "Unix"
|
||||||
|
#normalize_doc_attributes = true
|
||||||
|
#reorder_impl_items = true
|
||||||
|
single_line_if_else_max_width = 60
|
||||||
|
use_field_init_shorthand = true
|
||||||
|
use_try_shorthand = true
|
||||||
169
src/bit.rs
169
src/bit.rs
@@ -1,169 +0,0 @@
|
|||||||
/// Yields the bits of a `u64`.
|
|
||||||
pub struct BitIterator {
|
|
||||||
/// The value that is being iterated bit-wise
|
|
||||||
value: u64,
|
|
||||||
/// True bits in the `mask` are the bits that have been visited.
|
|
||||||
mask: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitIterator {
|
|
||||||
pub fn new(value: u64) -> BitIterator {
|
|
||||||
BitIterator { value, mask: 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An efficient skip implementation.
|
|
||||||
///
|
|
||||||
/// Note: The compiler is smart enough to translate a `skip(n)` into a single shift instruction
|
|
||||||
/// if the code is inlined, however inlining does not always happen.
|
|
||||||
pub fn skip_front(mut self, n: u32) -> Self {
|
|
||||||
let mask = bitmask(n);
|
|
||||||
let ones = self.mask.trailing_ones();
|
|
||||||
let mask_position = ones;
|
|
||||||
self.mask ^= mask.checked_shl(mask_position).unwrap_or(0);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An efficient skip from the back.
|
|
||||||
///
|
|
||||||
/// Note: The compiler is smart enough to translate a `skip(n)` into a single shift instruction
|
|
||||||
/// if the code is inlined, however inlining does not always happen.
|
|
||||||
pub fn skip_back(mut self, n: u32) -> Self {
|
|
||||||
let mask = bitmask(n);
|
|
||||||
let ones = self.mask.leading_ones();
|
|
||||||
let mask_position = u64::BITS - ones - n;
|
|
||||||
self.mask ^= mask.checked_shl(mask_position).unwrap_or(0);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iterator for BitIterator {
|
|
||||||
type Item = bool;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
|
|
||||||
// trailing_ones is implemented with trailing_zeros, and the zeros are computed with the
|
|
||||||
// intrinsic cttz. [Rust 1.67.0] x86 uses the `bsf` instruction. AArch64 uses the `rbit
|
|
||||||
// clz` instructions.
|
|
||||||
let ones = self.mask.trailing_ones();
|
|
||||||
|
|
||||||
if ones == u64::BITS {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let bit_position = ones;
|
|
||||||
let mask = 1 << bit_position;
|
|
||||||
self.mask ^= mask;
|
|
||||||
let bit = self.value & mask;
|
|
||||||
Some(bit != 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DoubleEndedIterator for BitIterator {
|
|
||||||
fn next_back(&mut self) -> Option<<Self as Iterator>::Item> {
|
|
||||||
// leading_ones is implemented with leading_zeros, and the zeros are computed with the
|
|
||||||
// intrinsic ctlz. [Rust 1.67.0] x86 uses the `bsr` instruction. AArch64 uses the `clz`
|
|
||||||
// instruction.
|
|
||||||
let ones = self.mask.leading_ones();
|
|
||||||
|
|
||||||
if ones == u64::BITS {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let bit_position = u64::BITS - ones - 1;
|
|
||||||
let mask = 1 << bit_position;
|
|
||||||
self.mask ^= mask;
|
|
||||||
let bit = self.value & mask;
|
|
||||||
Some(bit != 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::BitIterator;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bit_iterator() {
|
|
||||||
let v = 0b1;
|
|
||||||
let mut it = BitIterator::new(v);
|
|
||||||
assert!(it.next().unwrap(), "first bit is true");
|
|
||||||
assert!(it.all(|v| v == false), "every other value is false");
|
|
||||||
|
|
||||||
let v = 0b10;
|
|
||||||
let mut it = BitIterator::new(v);
|
|
||||||
assert!(!it.next().unwrap(), "first bit is false");
|
|
||||||
assert!(it.next().unwrap(), "first bit is true");
|
|
||||||
assert!(it.all(|v| v == false), "every other value is false");
|
|
||||||
|
|
||||||
let v = 0b10;
|
|
||||||
let mut it = BitIterator::new(v);
|
|
||||||
assert!(!it.next_back().unwrap(), "last bit is false");
|
|
||||||
assert!(!it.next().unwrap(), "first bit is false");
|
|
||||||
assert!(it.next().unwrap(), "first bit is true");
|
|
||||||
assert!(it.all(|v| v == false), "every other value is false");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bit_iterator_skip() {
|
|
||||||
let v = 0b1;
|
|
||||||
let mut it = BitIterator::new(v).skip_front(1);
|
|
||||||
assert!(it.all(|v| v == false), "every other value is false");
|
|
||||||
|
|
||||||
let v = 0b10;
|
|
||||||
let mut it = BitIterator::new(v).skip_front(1);
|
|
||||||
assert!(it.next().unwrap(), "first bit is true");
|
|
||||||
assert!(it.all(|v| v == false), "every other value is false");
|
|
||||||
|
|
||||||
let high_bit = 0b1 << (u64::BITS - 1);
|
|
||||||
let mut it = BitIterator::new(high_bit).skip_back(1);
|
|
||||||
assert!(it.all(|v| v == false), "every other value is false");
|
|
||||||
|
|
||||||
let v = 0b10;
|
|
||||||
let mut it = BitIterator::new(v).skip_back(1);
|
|
||||||
assert!(!it.next_back().unwrap(), "last bit is false");
|
|
||||||
assert!(!it.next().unwrap(), "first bit is false");
|
|
||||||
assert!(it.next().unwrap(), "first bit is true");
|
|
||||||
assert!(it.all(|v| v == false), "every other value is false");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_skip_all() {
|
|
||||||
let v = 0b1;
|
|
||||||
let mut it = BitIterator::new(v).skip_front(u64::BITS);
|
|
||||||
assert!(it.next().is_none(), "iterator must be exhausted");
|
|
||||||
|
|
||||||
let v = 0b1;
|
|
||||||
let mut it = BitIterator::new(v).skip_back(u64::BITS);
|
|
||||||
assert!(it.next().is_none(), "iterator must be exhausted");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bit_iterator_count_bits_after_skip() {
|
|
||||||
let any_value = 0b1;
|
|
||||||
for s in 0..u64::BITS {
|
|
||||||
let it = BitIterator::new(any_value).skip_front(s);
|
|
||||||
assert_eq!(it.count() as u32, u64::BITS - s)
|
|
||||||
}
|
|
||||||
|
|
||||||
let any_value = 0b1;
|
|
||||||
for s in 1..u64::BITS {
|
|
||||||
let it = BitIterator::new(any_value).skip_back(s);
|
|
||||||
assert_eq!(it.count() as u32, u64::BITS - s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bit_iterator_rev() {
|
|
||||||
let v = 0b1;
|
|
||||||
let mut it = BitIterator::new(v).rev();
|
|
||||||
assert!(it.nth(63).unwrap(), "the last value is true");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// UTILITIES
|
|
||||||
// ===============================================================================================
|
|
||||||
|
|
||||||
fn bitmask(s: u32) -> u64 {
|
|
||||||
match 1u64.checked_shl(s) {
|
|
||||||
Some(r) => r - 1,
|
|
||||||
None => u64::MAX,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField};
|
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField};
|
||||||
use crate::utils::{
|
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
||||||
uninit_vector, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
|
|
||||||
};
|
|
||||||
use core::{
|
use core::{
|
||||||
mem::{size_of, transmute, transmute_copy},
|
mem::{size_of, transmute, transmute_copy},
|
||||||
ops::Deref,
|
ops::Deref,
|
||||||
@@ -272,10 +270,7 @@ impl Blake3_160 {
|
|||||||
/// Zero-copy ref shrink to array.
|
/// Zero-copy ref shrink to array.
|
||||||
fn shrink_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> &[u8; N] {
|
fn shrink_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> &[u8; N] {
|
||||||
// compile-time assertion
|
// compile-time assertion
|
||||||
assert!(
|
assert!(M >= N, "N should fit in M so it can be safely transmuted into a smaller slice!");
|
||||||
M >= N,
|
|
||||||
"N should fit in M so it can be safely transmuted into a smaller slice!"
|
|
||||||
);
|
|
||||||
// safety: bytes len is asserted
|
// safety: bytes len is asserted
|
||||||
unsafe { transmute(bytes) }
|
unsafe { transmute(bytes) }
|
||||||
}
|
}
|
||||||
@@ -290,15 +285,25 @@ where
|
|||||||
let digest = if Felt::IS_CANONICAL {
|
let digest = if Felt::IS_CANONICAL {
|
||||||
blake3::hash(E::elements_as_bytes(elements))
|
blake3::hash(E::elements_as_bytes(elements))
|
||||||
} else {
|
} else {
|
||||||
let base_elements = E::slice_as_base_elements(elements);
|
let mut hasher = blake3::Hasher::new();
|
||||||
let blen = base_elements.len() << 3;
|
|
||||||
|
|
||||||
let mut bytes = unsafe { uninit_vector(blen) };
|
// BLAKE3 state is 64 bytes - so, we can absorb 64 bytes into the state in a single
|
||||||
for (idx, element) in base_elements.iter().enumerate() {
|
// permutation. we move the elements into the hasher via the buffer to give the CPU
|
||||||
bytes[idx * 8..(idx + 1) * 8].copy_from_slice(&element.as_int().to_le_bytes());
|
// a chance to process multiple element-to-byte conversions in parallel
|
||||||
|
let mut buf = [0_u8; 64];
|
||||||
|
let mut chunk_iter = E::slice_as_base_elements(elements).chunks_exact(8);
|
||||||
|
for chunk in chunk_iter.by_ref() {
|
||||||
|
for i in 0..8 {
|
||||||
|
buf[i * 8..(i + 1) * 8].copy_from_slice(&chunk[i].as_int().to_le_bytes());
|
||||||
|
}
|
||||||
|
hasher.update(&buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
blake3::hash(&bytes)
|
for element in chunk_iter.remainder() {
|
||||||
|
hasher.update(&element.as_int().to_le_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
hasher.finalize()
|
||||||
};
|
};
|
||||||
*shrink_bytes(&digest.into())
|
*shrink_bytes(&digest.into())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,22 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
use crate::utils::collections::Vec;
|
use crate::utils::collections::Vec;
|
||||||
use proptest::prelude::*;
|
use proptest::prelude::*;
|
||||||
|
use rand_utils::rand_vector;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn blake3_hash_elements() {
|
||||||
|
// test multiple of 8
|
||||||
|
let elements = rand_vector::<Felt>(16);
|
||||||
|
let expected = compute_expected_element_hash(&elements);
|
||||||
|
let actual: [u8; 32] = hash_elements(&elements);
|
||||||
|
assert_eq!(&expected, &actual);
|
||||||
|
|
||||||
|
// test not multiple of 8
|
||||||
|
let elements = rand_vector::<Felt>(17);
|
||||||
|
let expected = compute_expected_element_hash(&elements);
|
||||||
|
let actual: [u8; 32] = hash_elements(&elements);
|
||||||
|
assert_eq!(&expected, &actual);
|
||||||
|
}
|
||||||
|
|
||||||
proptest! {
|
proptest! {
|
||||||
#[test]
|
#[test]
|
||||||
@@ -18,3 +34,14 @@ proptest! {
|
|||||||
Blake3_256::hash(vec);
|
Blake3_256::hash(vec);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HELPER FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
fn compute_expected_element_hash(elements: &[Felt]) -> blake3::Hash {
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
for element in elements.iter() {
|
||||||
|
bytes.extend_from_slice(&element.as_int().to_le_bytes());
|
||||||
|
}
|
||||||
|
blake3::hash(&bytes)
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ use super::{Digest, Felt, StarkField, DIGEST_SIZE, ZERO};
|
|||||||
use crate::utils::{
|
use crate::utils::{
|
||||||
string::String, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
|
string::String, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
|
||||||
};
|
};
|
||||||
use core::{cmp::Ordering, ops::Deref};
|
use core::{cmp::Ordering, fmt::Display, ops::Deref};
|
||||||
|
|
||||||
// DIGEST TRAIT IMPLEMENTATIONS
|
// DIGEST TRAIT IMPLEMENTATIONS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
@@ -85,6 +85,28 @@ impl From<RpoDigest> for [Felt; DIGEST_SIZE] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<&RpoDigest> for [u64; DIGEST_SIZE] {
|
||||||
|
fn from(value: &RpoDigest) -> Self {
|
||||||
|
[
|
||||||
|
value.0[0].as_int(),
|
||||||
|
value.0[1].as_int(),
|
||||||
|
value.0[2].as_int(),
|
||||||
|
value.0[3].as_int(),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<RpoDigest> for [u64; DIGEST_SIZE] {
|
||||||
|
fn from(value: RpoDigest) -> Self {
|
||||||
|
[
|
||||||
|
value.0[0].as_int(),
|
||||||
|
value.0[1].as_int(),
|
||||||
|
value.0[2].as_int(),
|
||||||
|
value.0[3].as_int(),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<&RpoDigest> for [u8; 32] {
|
impl From<&RpoDigest> for [u8; 32] {
|
||||||
fn from(value: &RpoDigest) -> Self {
|
fn from(value: &RpoDigest) -> Self {
|
||||||
value.as_bytes()
|
value.as_bytes()
|
||||||
@@ -118,14 +140,13 @@ impl Ord for RpoDigest {
|
|||||||
// finally, we use `Felt::inner` instead of `Felt::as_int` so we avoid performing a
|
// finally, we use `Felt::inner` instead of `Felt::as_int` so we avoid performing a
|
||||||
// montgomery reduction for every limb. that is safe because every inner element of the
|
// montgomery reduction for every limb. that is safe because every inner element of the
|
||||||
// digest is guaranteed to be in its canonical form (that is, `x in [0,p)`).
|
// digest is guaranteed to be in its canonical form (that is, `x in [0,p)`).
|
||||||
self.0
|
self.0.iter().map(Felt::inner).zip(other.0.iter().map(Felt::inner)).fold(
|
||||||
.iter()
|
Ordering::Equal,
|
||||||
.map(Felt::inner)
|
|ord, (a, b)| match ord {
|
||||||
.zip(other.0.iter().map(Felt::inner))
|
|
||||||
.fold(Ordering::Equal, |ord, (a, b)| match ord {
|
|
||||||
Ordering::Equal => a.cmp(&b),
|
Ordering::Equal => a.cmp(&b),
|
||||||
_ => ord,
|
_ => ord,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -135,6 +156,15 @@ impl PartialOrd for RpoDigest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Display for RpoDigest {
|
||||||
|
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||||
|
for byte in self.as_bytes() {
|
||||||
|
write!(f, "{byte:02x}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TESTS
|
// TESTS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,10 @@ use super::{
|
|||||||
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ALPHA, INV_ALPHA, ONE, STATE_WIDTH,
|
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ALPHA, INV_ALPHA, ONE, STATE_WIDTH,
|
||||||
ZERO,
|
ZERO,
|
||||||
};
|
};
|
||||||
use crate::utils::collections::{BTreeSet, Vec};
|
use crate::{
|
||||||
|
utils::collections::{BTreeSet, Vec},
|
||||||
|
Word,
|
||||||
|
};
|
||||||
use core::convert::TryInto;
|
use core::convert::TryInto;
|
||||||
use proptest::prelude::*;
|
use proptest::prelude::*;
|
||||||
use rand_utils::rand_value;
|
use rand_utils::rand_value;
|
||||||
@@ -203,7 +206,7 @@ fn sponge_bytes_with_remainder_length_wont_panic() {
|
|||||||
// size.
|
// size.
|
||||||
//
|
//
|
||||||
// this is a preliminary test to the fuzzy-stress of proptest.
|
// this is a preliminary test to the fuzzy-stress of proptest.
|
||||||
Rpo256::hash(&vec![0; 113]);
|
Rpo256::hash(&[0; 113]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -227,12 +230,12 @@ fn sponge_zeroes_collision() {
|
|||||||
|
|
||||||
proptest! {
|
proptest! {
|
||||||
#[test]
|
#[test]
|
||||||
fn rpo256_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
|
fn rpo256_wont_panic_with_arbitrary_input(ref bytes in any::<Vec<u8>>()) {
|
||||||
Rpo256::hash(&vec);
|
Rpo256::hash(bytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const EXPECTED: [[Felt; 4]; 19] = [
|
const EXPECTED: [Word; 19] = [
|
||||||
[
|
[
|
||||||
Felt::new(1502364727743950833),
|
Felt::new(1502364727743950833),
|
||||||
Felt::new(5880949717274681448),
|
Felt::new(5880949717274681448),
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
#[cfg_attr(test, macro_use)]
|
#[cfg_attr(test, macro_use)]
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|
||||||
mod bit;
|
|
||||||
pub mod hash;
|
pub mod hash;
|
||||||
pub mod merkle;
|
pub mod merkle;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|||||||
@@ -1,6 +1,12 @@
|
|||||||
use super::{Felt, RpoDigest, WORD_SIZE, ZERO};
|
use super::{Felt, RpoDigest, Word, WORD_SIZE, ZERO};
|
||||||
use core::slice;
|
use core::slice;
|
||||||
|
|
||||||
|
// CONSTANTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A word consisting of 4 ZERO elements.
|
||||||
|
pub const EMPTY_WORD: Word = [ZERO; WORD_SIZE];
|
||||||
|
|
||||||
// EMPTY NODES SUBTREES
|
// EMPTY NODES SUBTREES
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
@@ -1570,7 +1576,7 @@ fn all_depths_opens_to_zero() {
|
|||||||
assert_eq!(depth as usize + 1, subtree.len());
|
assert_eq!(depth as usize + 1, subtree.len());
|
||||||
|
|
||||||
// assert the opening is zero
|
// assert the opening is zero
|
||||||
let initial = RpoDigest::new([ZERO; WORD_SIZE]);
|
let initial = RpoDigest::new(EMPTY_WORD);
|
||||||
assert_eq!(initial, subtree.remove(0));
|
assert_eq!(initial, subtree.remove(0));
|
||||||
|
|
||||||
// compute every node of the path manually and compare with the output
|
// compute every node of the path manually and compare with the output
|
||||||
|
|||||||
@@ -1,13 +1,24 @@
|
|||||||
use super::{Felt, MerkleError, RpoDigest, StarkField};
|
use super::{Felt, MerkleError, RpoDigest, StarkField};
|
||||||
use crate::bit::BitIterator;
|
use core::fmt::Display;
|
||||||
|
|
||||||
// NODE INDEX
|
// NODE INDEX
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
/// A Merkle tree address to an arbitrary node.
|
/// Address to an arbitrary node in a binary tree using level order form.
|
||||||
///
|
///
|
||||||
/// The position is relative to a tree in level order, where for a given depth `d` elements are
|
/// The position is represented by the pair `(depth, pos)`, where for a given depth `d` elements
|
||||||
/// numbered from $0..2^d$.
|
/// are numbered from $0..(2^d)-1$. Example:
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// depth
|
||||||
|
/// 0 0
|
||||||
|
/// 1 0 1
|
||||||
|
/// 2 0 1 2 3
|
||||||
|
/// 3 0 1 2 3 4 5 6 7
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// The root is represented by the pair $(0, 0)$, its left child is $(1, 0)$ and its right child
|
||||||
|
/// $(1, 1)$.
|
||||||
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||||
pub struct NodeIndex {
|
pub struct NodeIndex {
|
||||||
depth: u8,
|
depth: u8,
|
||||||
@@ -19,20 +30,43 @@ impl NodeIndex {
|
|||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Creates a new node index.
|
/// Creates a new node index.
|
||||||
pub const fn new(depth: u8, value: u64) -> Self {
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if the `value` is greater than or equal to 2^{depth}.
|
||||||
|
pub const fn new(depth: u8, value: u64) -> Result<Self, MerkleError> {
|
||||||
|
if (64 - value.leading_zeros()) > depth as u32 {
|
||||||
|
Err(MerkleError::InvalidIndex { depth, value })
|
||||||
|
} else {
|
||||||
|
Ok(Self { depth, value })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new node index without checking its validity.
|
||||||
|
pub const fn new_unchecked(depth: u8, value: u64) -> Self {
|
||||||
|
debug_assert!((64 - value.leading_zeros()) <= depth as u32);
|
||||||
Self { depth, value }
|
Self { depth, value }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a new node index for testing purposes.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
/// Panics if the `value` is greater than or equal to 2^{depth}.
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn make(depth: u8, value: u64) -> Self {
|
||||||
|
Self::new(depth, value).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates a node index from a pair of field elements representing the depth and value.
|
/// Creates a node index from a pair of field elements representing the depth and value.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
/// Returns an error if:
|
||||||
/// Will error if the `u64` representation of the depth doesn't fit a `u8`.
|
/// - `depth` doesn't fit in a `u8`.
|
||||||
|
/// - `value` is greater than or equal to 2^{depth}.
|
||||||
pub fn from_elements(depth: &Felt, value: &Felt) -> Result<Self, MerkleError> {
|
pub fn from_elements(depth: &Felt, value: &Felt) -> Result<Self, MerkleError> {
|
||||||
let depth = depth.as_int();
|
let depth = depth.as_int();
|
||||||
let depth = u8::try_from(depth).map_err(|_| MerkleError::DepthTooBig(depth))?;
|
let depth = u8::try_from(depth).map_err(|_| MerkleError::DepthTooBig(depth))?;
|
||||||
let value = value.as_int();
|
let value = value.as_int();
|
||||||
Ok(Self::new(depth, value))
|
Self::new(depth, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new node index pointing to the root of the tree.
|
/// Creates a new node index pointing to the root of the tree.
|
||||||
@@ -40,15 +74,23 @@ impl NodeIndex {
|
|||||||
Self { depth: 0, value: 0 }
|
Self { depth: 0, value: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mutates the instance and returns it, replacing the depth.
|
/// Computes sibling index of the current node.
|
||||||
pub const fn with_depth(mut self, depth: u8) -> Self {
|
pub const fn sibling(mut self) -> Self {
|
||||||
self.depth = depth;
|
self.value ^= 1;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the value of the sibling of the current node.
|
/// Returns left child index of the current node.
|
||||||
pub fn sibling(mut self) -> Self {
|
pub const fn left_child(mut self) -> Self {
|
||||||
self.value ^= 1;
|
self.depth += 1;
|
||||||
|
self.value <<= 1;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns right child index of the current node.
|
||||||
|
pub const fn right_child(mut self) -> Self {
|
||||||
|
self.depth += 1;
|
||||||
|
self.value = (self.value << 1) + 1;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -83,11 +125,6 @@ impl NodeIndex {
|
|||||||
self.value
|
self.value
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if the current value fits the current depth for a binary tree.
|
|
||||||
pub const fn is_valid(&self) -> bool {
|
|
||||||
self.value < (1 << self.depth as u64)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the current instance points to a right sibling node.
|
/// Returns true if the current instance points to a right sibling node.
|
||||||
pub const fn is_value_odd(&self) -> bool {
|
pub const fn is_value_odd(&self) -> bool {
|
||||||
(self.value & 1) == 1
|
(self.value & 1) == 1
|
||||||
@@ -98,27 +135,29 @@ impl NodeIndex {
|
|||||||
self.depth == 0
|
self.depth == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a bit iterator for the `value`.
|
|
||||||
///
|
|
||||||
/// Bits read from left-to-right represent which internal node's child should be visited to
|
|
||||||
/// arrive at the leaf. From the right-to-left the bit represent the position the hash of the
|
|
||||||
/// current element should go.
|
|
||||||
///
|
|
||||||
/// Additionally, the value that is not visited are the sibling values necessary for a Merkle
|
|
||||||
/// opening.
|
|
||||||
pub fn bit_iterator(&self) -> BitIterator {
|
|
||||||
let depth: u32 = self.depth.into();
|
|
||||||
BitIterator::new(self.value).skip_back(u64::BITS - depth)
|
|
||||||
}
|
|
||||||
|
|
||||||
// STATE MUTATORS
|
// STATE MUTATORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Traverse one level towards the root, decrementing the depth by `1`.
|
/// Traverses one level towards the root, decrementing the depth by `1`.
|
||||||
pub fn move_up(&mut self) -> &mut Self {
|
pub fn move_up(&mut self) {
|
||||||
self.depth = self.depth.saturating_sub(1);
|
self.depth = self.depth.saturating_sub(1);
|
||||||
self.value >>= 1;
|
self.value >>= 1;
|
||||||
self
|
}
|
||||||
|
|
||||||
|
/// Traverses towards the root until the specified depth is reached.
|
||||||
|
///
|
||||||
|
/// Assumes that the specified depth is smaller than the current depth.
|
||||||
|
pub fn move_up_to(&mut self, depth: u8) {
|
||||||
|
debug_assert!(depth < self.depth);
|
||||||
|
let delta = self.depth.saturating_sub(depth);
|
||||||
|
self.depth = self.depth.saturating_sub(delta);
|
||||||
|
self.value >>= delta as u32;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for NodeIndex {
|
||||||
|
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||||
|
write!(f, "depth={}, value={}", self.depth, self.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -127,14 +166,40 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
use proptest::prelude::*;
|
use proptest::prelude::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_node_index_value_too_high() {
|
||||||
|
assert_eq!(NodeIndex::new(0, 0).unwrap(), NodeIndex { depth: 0, value: 0 });
|
||||||
|
match NodeIndex::new(0, 1) {
|
||||||
|
Err(MerkleError::InvalidIndex { depth, value }) => {
|
||||||
|
assert_eq!(depth, 0);
|
||||||
|
assert_eq!(value, 1);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_node_index_can_represent_depth_64() {
|
||||||
|
assert!(NodeIndex::new(64, u64::MAX).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
prop_compose! {
|
||||||
|
fn node_index()(value in 0..2u64.pow(u64::BITS - 1)) -> NodeIndex {
|
||||||
|
// unwrap never panics because the range of depth is 0..u64::BITS
|
||||||
|
let mut depth = value.ilog2() as u8;
|
||||||
|
if value > (1 << depth) { // round up
|
||||||
|
depth += 1;
|
||||||
|
}
|
||||||
|
NodeIndex::new(depth, value).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
proptest! {
|
proptest! {
|
||||||
#[test]
|
#[test]
|
||||||
fn arbitrary_index_wont_panic_on_move_up(
|
fn arbitrary_index_wont_panic_on_move_up(
|
||||||
depth in prop::num::u8::ANY,
|
mut index in node_index(),
|
||||||
value in prop::num::u64::ANY,
|
|
||||||
count in prop::num::u8::ANY,
|
count in prop::num::u8::ANY,
|
||||||
) {
|
) {
|
||||||
let mut index = NodeIndex::new(depth, value);
|
|
||||||
for _ in 0..count {
|
for _ in 0..count {
|
||||||
index.move_up();
|
index.move_up();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
use super::{Felt, MerkleError, MerklePath, NodeIndex, Rpo256, RpoDigest, Vec, Word};
|
use super::{InnerNodeInfo, MerkleError, MerklePath, NodeIndex, Rpo256, RpoDigest, Vec, Word};
|
||||||
use crate::{
|
use crate::utils::{string::String, uninit_vector, word_to_hex};
|
||||||
utils::{string::String, uninit_vector, word_to_hex},
|
use core::{fmt, ops::Deref, slice};
|
||||||
FieldElement,
|
|
||||||
};
|
|
||||||
use core::{fmt, slice};
|
|
||||||
use winter_math::log2;
|
use winter_math::log2;
|
||||||
|
|
||||||
// MERKLE TREE
|
// MERKLE TREE
|
||||||
@@ -12,7 +9,7 @@ use winter_math::log2;
|
|||||||
/// A fully-balanced binary Merkle tree (i.e., a tree where the number of leaves is a power of two).
|
/// A fully-balanced binary Merkle tree (i.e., a tree where the number of leaves is a power of two).
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct MerkleTree {
|
pub struct MerkleTree {
|
||||||
pub(crate) nodes: Vec<Word>,
|
nodes: Vec<RpoDigest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MerkleTree {
|
impl MerkleTree {
|
||||||
@@ -32,10 +29,12 @@ impl MerkleTree {
|
|||||||
|
|
||||||
// create un-initialized vector to hold all tree nodes
|
// create un-initialized vector to hold all tree nodes
|
||||||
let mut nodes = unsafe { uninit_vector(2 * n) };
|
let mut nodes = unsafe { uninit_vector(2 * n) };
|
||||||
nodes[0] = [Felt::ZERO; 4];
|
nodes[0] = RpoDigest::default();
|
||||||
|
|
||||||
// copy leaves into the second part of the nodes vector
|
// copy leaves into the second part of the nodes vector
|
||||||
nodes[n..].copy_from_slice(&leaves);
|
nodes[n..].iter_mut().zip(leaves).for_each(|(node, leaf)| {
|
||||||
|
*node = RpoDigest::from(leaf);
|
||||||
|
});
|
||||||
|
|
||||||
// re-interpret nodes as an array of two nodes fused together
|
// re-interpret nodes as an array of two nodes fused together
|
||||||
// Safety: `nodes` will never move here as it is not bound to an external lifetime (i.e.
|
// Safety: `nodes` will never move here as it is not bound to an external lifetime (i.e.
|
||||||
@@ -45,7 +44,7 @@ impl MerkleTree {
|
|||||||
|
|
||||||
// calculate all internal tree nodes
|
// calculate all internal tree nodes
|
||||||
for i in (1..n).rev() {
|
for i in (1..n).rev() {
|
||||||
nodes[i] = Rpo256::merge(&pairs[i]).into();
|
nodes[i] = Rpo256::merge(&pairs[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { nodes })
|
Ok(Self { nodes })
|
||||||
@@ -55,7 +54,7 @@ impl MerkleTree {
|
|||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Returns the root of this Merkle tree.
|
/// Returns the root of this Merkle tree.
|
||||||
pub fn root(&self) -> Word {
|
pub fn root(&self) -> RpoDigest {
|
||||||
self.nodes[1]
|
self.nodes[1]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,13 +71,11 @@ impl MerkleTree {
|
|||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * The specified depth is greater than the depth of the tree.
|
/// * The specified depth is greater than the depth of the tree.
|
||||||
/// * The specified index is not valid for the specified depth.
|
/// * The specified index is not valid for the specified depth.
|
||||||
pub fn get_node(&self, index: NodeIndex) -> Result<Word, MerkleError> {
|
pub fn get_node(&self, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||||
if index.is_root() {
|
if index.is_root() {
|
||||||
return Err(MerkleError::DepthTooSmall(index.depth()));
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
} else if index.depth() > self.depth() {
|
} else if index.depth() > self.depth() {
|
||||||
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
||||||
} else if !index.is_valid() {
|
|
||||||
return Err(MerkleError::InvalidIndex(index));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let pos = index.to_scalar_index() as usize;
|
let pos = index.to_scalar_index() as usize;
|
||||||
@@ -97,8 +94,6 @@ impl MerkleTree {
|
|||||||
return Err(MerkleError::DepthTooSmall(index.depth()));
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
} else if index.depth() > self.depth() {
|
} else if index.depth() > self.depth() {
|
||||||
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
||||||
} else if !index.is_valid() {
|
|
||||||
return Err(MerkleError::InvalidIndex(index));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO should we create a helper in `NodeIndex` that will encapsulate traversal to root so
|
// TODO should we create a helper in `NodeIndex` that will encapsulate traversal to root so
|
||||||
@@ -111,24 +106,43 @@ impl MerkleTree {
|
|||||||
index.move_up();
|
index.move_up();
|
||||||
}
|
}
|
||||||
|
|
||||||
debug_assert!(
|
debug_assert!(index.is_root(), "the path walk must go all the way to the root");
|
||||||
index.is_root(),
|
|
||||||
"the path walk must go all the way to the root"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(path.into())
|
Ok(path.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ITERATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns an iterator over the leaves of this [MerkleTree].
|
||||||
|
pub fn leaves(&self) -> impl Iterator<Item = (u64, &Word)> {
|
||||||
|
let leaves_start = self.nodes.len() / 2;
|
||||||
|
self.nodes
|
||||||
|
.iter()
|
||||||
|
.skip(leaves_start)
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, v)| (i as u64, v.deref()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns n iterator over every inner node of this [MerkleTree].
|
||||||
|
///
|
||||||
|
/// The iterator order is unspecified.
|
||||||
|
pub fn inner_nodes(&self) -> InnerNodeIterator {
|
||||||
|
InnerNodeIterator {
|
||||||
|
nodes: &self.nodes,
|
||||||
|
index: 1, // index 0 is just padding, start at 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATE MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Replaces the leaf at the specified index with the provided value.
|
/// Replaces the leaf at the specified index with the provided value.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if the specified index value is not a valid leaf value for this tree.
|
/// Returns an error if the specified index value is not a valid leaf value for this tree.
|
||||||
pub fn update_leaf<'a>(&'a mut self, index_value: u64, value: Word) -> Result<(), MerkleError> {
|
pub fn update_leaf<'a>(&'a mut self, index_value: u64, value: Word) -> Result<(), MerkleError> {
|
||||||
let depth = self.depth();
|
let mut index = NodeIndex::new(self.depth(), index_value)?;
|
||||||
let mut index = NodeIndex::new(depth, index_value);
|
|
||||||
if !index.is_valid() {
|
|
||||||
return Err(MerkleError::InvalidIndex(index));
|
|
||||||
}
|
|
||||||
|
|
||||||
// we don't need to copy the pairs into a new address as we are logically guaranteed to not
|
// we don't need to copy the pairs into a new address as we are logically guaranteed to not
|
||||||
// overlap write instructions. however, it's important to bind the lifetime of pairs to
|
// overlap write instructions. however, it's important to bind the lifetime of pairs to
|
||||||
@@ -146,13 +160,13 @@ impl MerkleTree {
|
|||||||
|
|
||||||
// update the current node
|
// update the current node
|
||||||
let pos = index.to_scalar_index() as usize;
|
let pos = index.to_scalar_index() as usize;
|
||||||
self.nodes[pos] = value;
|
self.nodes[pos] = value.into();
|
||||||
|
|
||||||
// traverse to the root, updating each node with the merged values of its parents
|
// traverse to the root, updating each node with the merged values of its parents
|
||||||
for _ in 0..index.depth() {
|
for _ in 0..index.depth() {
|
||||||
index.move_up();
|
index.move_up();
|
||||||
let pos = index.to_scalar_index() as usize;
|
let pos = index.to_scalar_index() as usize;
|
||||||
let value = Rpo256::merge(&pairs[pos]).into();
|
let value = Rpo256::merge(&pairs[pos]);
|
||||||
self.nodes[pos] = value;
|
self.nodes[pos] = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -160,7 +174,43 @@ impl MerkleTree {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Utility to vizualize a [MerkleTree] in text.
|
// ITERATORS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// An iterator over every inner node of the [MerkleTree].
|
||||||
|
///
|
||||||
|
/// Use this to extract the data of the tree, there is no guarantee on the order of the elements.
|
||||||
|
pub struct InnerNodeIterator<'a> {
|
||||||
|
nodes: &'a Vec<RpoDigest>,
|
||||||
|
index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for InnerNodeIterator<'a> {
|
||||||
|
type Item = InnerNodeInfo;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.index < self.nodes.len() / 2 {
|
||||||
|
let value = self.index;
|
||||||
|
let left = self.index * 2;
|
||||||
|
let right = left + 1;
|
||||||
|
|
||||||
|
self.index += 1;
|
||||||
|
|
||||||
|
Some(InnerNodeInfo {
|
||||||
|
value: self.nodes[value],
|
||||||
|
left: self.nodes[left],
|
||||||
|
right: self.nodes[right],
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UTILITY FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// Utility to visualize a [MerkleTree] in text.
|
||||||
pub fn tree_to_text(tree: &MerkleTree) -> Result<String, fmt::Error> {
|
pub fn tree_to_text(tree: &MerkleTree) -> Result<String, fmt::Error> {
|
||||||
let indent = " ";
|
let indent = " ";
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
@@ -169,11 +219,8 @@ pub fn tree_to_text(tree: &MerkleTree) -> Result<String, fmt::Error> {
|
|||||||
for d in 1..=tree.depth() {
|
for d in 1..=tree.depth() {
|
||||||
let entries = 2u64.pow(d.into());
|
let entries = 2u64.pow(d.into());
|
||||||
for i in 0..entries {
|
for i in 0..entries {
|
||||||
let index = NodeIndex::new(d, i);
|
let index = NodeIndex::new(d, i).expect("The index must always be valid");
|
||||||
|
let node = tree.get_node(index).expect("The node must always be found");
|
||||||
let node = tree
|
|
||||||
.get_node(index)
|
|
||||||
.expect("The index must always be valid");
|
|
||||||
|
|
||||||
for _ in 0..d {
|
for _ in 0..d {
|
||||||
s.push_str(indent);
|
s.push_str(indent);
|
||||||
@@ -186,7 +233,7 @@ pub fn tree_to_text(tree: &MerkleTree) -> Result<String, fmt::Error> {
|
|||||||
Ok(s)
|
Ok(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Utility to vizualize a [MerklePath] in text.
|
/// Utility to visualize a [MerklePath] in text.
|
||||||
pub fn path_to_text(path: &MerklePath) -> Result<String, fmt::Error> {
|
pub fn path_to_text(path: &MerklePath) -> Result<String, fmt::Error> {
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
s.push('[');
|
s.push('[');
|
||||||
@@ -212,18 +259,17 @@ pub fn path_to_text(path: &MerklePath) -> Result<String, fmt::Error> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::merkle::int_to_node;
|
use crate::{
|
||||||
|
merkle::{digests_to_words, int_to_leaf, int_to_node, InnerNodeInfo},
|
||||||
|
Felt, Word, WORD_SIZE,
|
||||||
|
};
|
||||||
use core::mem::size_of;
|
use core::mem::size_of;
|
||||||
use proptest::prelude::*;
|
use proptest::prelude::*;
|
||||||
|
|
||||||
const LEAVES4: [Word; 4] = [
|
const LEAVES4: [RpoDigest; WORD_SIZE] =
|
||||||
int_to_node(1),
|
[int_to_node(1), int_to_node(2), int_to_node(3), int_to_node(4)];
|
||||||
int_to_node(2),
|
|
||||||
int_to_node(3),
|
|
||||||
int_to_node(4),
|
|
||||||
];
|
|
||||||
|
|
||||||
const LEAVES8: [Word; 8] = [
|
const LEAVES8: [RpoDigest; 8] = [
|
||||||
int_to_node(1),
|
int_to_node(1),
|
||||||
int_to_node(2),
|
int_to_node(2),
|
||||||
int_to_node(3),
|
int_to_node(3),
|
||||||
@@ -236,7 +282,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn build_merkle_tree() {
|
fn build_merkle_tree() {
|
||||||
let tree = super::MerkleTree::new(LEAVES4.to_vec()).unwrap();
|
let tree = super::MerkleTree::new(digests_to_words(&LEAVES4)).unwrap();
|
||||||
assert_eq!(8, tree.nodes.len());
|
assert_eq!(8, tree.nodes.len());
|
||||||
|
|
||||||
// leaves were copied correctly
|
// leaves were copied correctly
|
||||||
@@ -255,58 +301,46 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_leaf() {
|
fn get_leaf() {
|
||||||
let tree = super::MerkleTree::new(LEAVES4.to_vec()).unwrap();
|
let tree = super::MerkleTree::new(digests_to_words(&LEAVES4)).unwrap();
|
||||||
|
|
||||||
// check depth 2
|
// check depth 2
|
||||||
assert_eq!(LEAVES4[0], tree.get_node(NodeIndex::new(2, 0)).unwrap());
|
assert_eq!(LEAVES4[0], tree.get_node(NodeIndex::make(2, 0)).unwrap());
|
||||||
assert_eq!(LEAVES4[1], tree.get_node(NodeIndex::new(2, 1)).unwrap());
|
assert_eq!(LEAVES4[1], tree.get_node(NodeIndex::make(2, 1)).unwrap());
|
||||||
assert_eq!(LEAVES4[2], tree.get_node(NodeIndex::new(2, 2)).unwrap());
|
assert_eq!(LEAVES4[2], tree.get_node(NodeIndex::make(2, 2)).unwrap());
|
||||||
assert_eq!(LEAVES4[3], tree.get_node(NodeIndex::new(2, 3)).unwrap());
|
assert_eq!(LEAVES4[3], tree.get_node(NodeIndex::make(2, 3)).unwrap());
|
||||||
|
|
||||||
// check depth 1
|
// check depth 1
|
||||||
let (_, node2, node3) = compute_internal_nodes();
|
let (_, node2, node3) = compute_internal_nodes();
|
||||||
|
|
||||||
assert_eq!(node2, tree.get_node(NodeIndex::new(1, 0)).unwrap());
|
assert_eq!(node2, tree.get_node(NodeIndex::make(1, 0)).unwrap());
|
||||||
assert_eq!(node3, tree.get_node(NodeIndex::new(1, 1)).unwrap());
|
assert_eq!(node3, tree.get_node(NodeIndex::make(1, 1)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_path() {
|
fn get_path() {
|
||||||
let tree = super::MerkleTree::new(LEAVES4.to_vec()).unwrap();
|
let tree = super::MerkleTree::new(digests_to_words(&LEAVES4)).unwrap();
|
||||||
|
|
||||||
let (_, node2, node3) = compute_internal_nodes();
|
let (_, node2, node3) = compute_internal_nodes();
|
||||||
|
|
||||||
// check depth 2
|
// check depth 2
|
||||||
assert_eq!(
|
assert_eq!(vec![LEAVES4[1], node3], *tree.get_path(NodeIndex::make(2, 0)).unwrap());
|
||||||
vec![LEAVES4[1], node3],
|
assert_eq!(vec![LEAVES4[0], node3], *tree.get_path(NodeIndex::make(2, 1)).unwrap());
|
||||||
*tree.get_path(NodeIndex::new(2, 0)).unwrap()
|
assert_eq!(vec![LEAVES4[3], node2], *tree.get_path(NodeIndex::make(2, 2)).unwrap());
|
||||||
);
|
assert_eq!(vec![LEAVES4[2], node2], *tree.get_path(NodeIndex::make(2, 3)).unwrap());
|
||||||
assert_eq!(
|
|
||||||
vec![LEAVES4[0], node3],
|
|
||||||
*tree.get_path(NodeIndex::new(2, 1)).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
vec![LEAVES4[3], node2],
|
|
||||||
*tree.get_path(NodeIndex::new(2, 2)).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
vec![LEAVES4[2], node2],
|
|
||||||
*tree.get_path(NodeIndex::new(2, 3)).unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
// check depth 1
|
// check depth 1
|
||||||
assert_eq!(vec![node3], *tree.get_path(NodeIndex::new(1, 0)).unwrap());
|
assert_eq!(vec![node3], *tree.get_path(NodeIndex::make(1, 0)).unwrap());
|
||||||
assert_eq!(vec![node2], *tree.get_path(NodeIndex::new(1, 1)).unwrap());
|
assert_eq!(vec![node2], *tree.get_path(NodeIndex::make(1, 1)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn update_leaf() {
|
fn update_leaf() {
|
||||||
let mut tree = super::MerkleTree::new(LEAVES8.to_vec()).unwrap();
|
let mut tree = super::MerkleTree::new(digests_to_words(&LEAVES8)).unwrap();
|
||||||
|
|
||||||
// update one leaf
|
// update one leaf
|
||||||
let value = 3;
|
let value = 3;
|
||||||
let new_node = int_to_node(9);
|
let new_node = int_to_leaf(9);
|
||||||
let mut expected_leaves = LEAVES8.to_vec();
|
let mut expected_leaves = digests_to_words(&LEAVES8);
|
||||||
expected_leaves[value as usize] = new_node;
|
expected_leaves[value as usize] = new_node;
|
||||||
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
||||||
|
|
||||||
@@ -315,7 +349,7 @@ mod tests {
|
|||||||
|
|
||||||
// update another leaf
|
// update another leaf
|
||||||
let value = 6;
|
let value = 6;
|
||||||
let new_node = int_to_node(10);
|
let new_node = int_to_leaf(10);
|
||||||
expected_leaves[value as usize] = new_node;
|
expected_leaves[value as usize] = new_node;
|
||||||
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
||||||
|
|
||||||
@@ -323,6 +357,40 @@ mod tests {
|
|||||||
assert_eq!(expected_tree.nodes, tree.nodes);
|
assert_eq!(expected_tree.nodes, tree.nodes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn nodes() -> Result<(), MerkleError> {
|
||||||
|
let tree = super::MerkleTree::new(digests_to_words(&LEAVES4)).unwrap();
|
||||||
|
let root = tree.root();
|
||||||
|
let l1n0 = tree.get_node(NodeIndex::make(1, 0))?;
|
||||||
|
let l1n1 = tree.get_node(NodeIndex::make(1, 1))?;
|
||||||
|
let l2n0 = tree.get_node(NodeIndex::make(2, 0))?;
|
||||||
|
let l2n1 = tree.get_node(NodeIndex::make(2, 1))?;
|
||||||
|
let l2n2 = tree.get_node(NodeIndex::make(2, 2))?;
|
||||||
|
let l2n3 = tree.get_node(NodeIndex::make(2, 3))?;
|
||||||
|
|
||||||
|
let nodes: Vec<InnerNodeInfo> = tree.inner_nodes().collect();
|
||||||
|
let expected = vec![
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: root,
|
||||||
|
left: l1n0,
|
||||||
|
right: l1n1,
|
||||||
|
},
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: l1n0,
|
||||||
|
left: l2n0,
|
||||||
|
right: l2n1,
|
||||||
|
},
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: l1n1,
|
||||||
|
left: l2n2,
|
||||||
|
right: l2n3,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(nodes, expected);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
proptest! {
|
proptest! {
|
||||||
#[test]
|
#[test]
|
||||||
fn arbitrary_word_can_be_represented_as_digest(
|
fn arbitrary_word_can_be_represented_as_digest(
|
||||||
@@ -340,8 +408,8 @@ mod tests {
|
|||||||
let digest = RpoDigest::from(word);
|
let digest = RpoDigest::from(word);
|
||||||
|
|
||||||
// assert the addresses are different
|
// assert the addresses are different
|
||||||
let word_ptr = (&word).as_ptr() as *const u8;
|
let word_ptr = word.as_ptr() as *const u8;
|
||||||
let digest_ptr = (&digest).as_ptr() as *const u8;
|
let digest_ptr = digest.as_ptr() as *const u8;
|
||||||
assert_ne!(word_ptr, digest_ptr);
|
assert_ne!(word_ptr, digest_ptr);
|
||||||
|
|
||||||
// compare the bytes representation
|
// compare the bytes representation
|
||||||
@@ -354,11 +422,13 @@ mod tests {
|
|||||||
// HELPER FUNCTIONS
|
// HELPER FUNCTIONS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
fn compute_internal_nodes() -> (Word, Word, Word) {
|
fn compute_internal_nodes() -> (RpoDigest, RpoDigest, RpoDigest) {
|
||||||
let node2 = Rpo256::hash_elements(&[LEAVES4[0], LEAVES4[1]].concat());
|
let node2 =
|
||||||
let node3 = Rpo256::hash_elements(&[LEAVES4[2], LEAVES4[3]].concat());
|
Rpo256::hash_elements(&[Word::from(LEAVES4[0]), Word::from(LEAVES4[1])].concat());
|
||||||
|
let node3 =
|
||||||
|
Rpo256::hash_elements(&[Word::from(LEAVES4[2]), Word::from(LEAVES4[3])].concat());
|
||||||
let root = Rpo256::merge(&[node2, node3]);
|
let root = Rpo256::merge(&[node2, node3]);
|
||||||
|
|
||||||
(root.into(), node2.into(), node3.into())
|
(root, node2, node3)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
use super::{super::Vec, MmrProof, Rpo256, Word};
|
use super::{
|
||||||
|
super::{RpoDigest, Vec, ZERO},
|
||||||
|
Felt, MmrProof, Rpo256, Word,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub struct MmrPeaks {
|
pub struct MmrPeaks {
|
||||||
@@ -8,37 +11,80 @@ pub struct MmrPeaks {
|
|||||||
/// the MMR has a power-of-two number of leaves there is a single peak.
|
/// the MMR has a power-of-two number of leaves there is a single peak.
|
||||||
///
|
///
|
||||||
/// Every tree in the MMR forest has a distinct power-of-two size, this means only the right
|
/// Every tree in the MMR forest has a distinct power-of-two size, this means only the right
|
||||||
/// most tree can have an odd number of elements (1). Additionally this means that the bits in
|
/// most tree can have an odd number of elements (e.g. `1`). Additionally this means that the bits in
|
||||||
/// `num_leaves` conveniently encode the size of each individual tree.
|
/// `num_leaves` conveniently encode the size of each individual tree.
|
||||||
///
|
///
|
||||||
/// Examples:
|
/// Examples:
|
||||||
///
|
///
|
||||||
/// Example 1: With 5 leaves, the binary 0b101. The number of set bits is equal the number
|
/// - With 5 leaves, the binary `0b101`. The number of set bits is equal the number
|
||||||
/// of peaks, in this case there are 2 peaks. The 0-indexed least-significant position of
|
/// of peaks, in this case there are 2 peaks. The 0-indexed least-significant position of
|
||||||
/// the bit determines the number of elements of a tree, so the rightmost tree has 2**0
|
/// the bit determines the number of elements of a tree, so the rightmost tree has `2**0`
|
||||||
/// elements and the left most has 2**2.
|
/// elements and the left most has `2**2`.
|
||||||
///
|
/// - With 12 leaves, the binary is `0b1100`, this case also has 2 peaks, the
|
||||||
/// Example 2: With 12 leaves, the binary is 0b1100, this case also has 2 peaks, the
|
/// leftmost tree has `2**3=8` elements, and the right most has `2**2=4` elements.
|
||||||
/// leftmost tree has 2**3=8 elements, and the right most has 2**2=4 elements.
|
|
||||||
pub num_leaves: usize,
|
pub num_leaves: usize,
|
||||||
|
|
||||||
/// All the peaks of every tree in the MMR forest. The peaks are always ordered by number of
|
/// All the peaks of every tree in the MMR forest. The peaks are always ordered by number of
|
||||||
/// leaves, starting from the peak with most children, to the one with least.
|
/// leaves, starting from the peak with most children, to the one with least.
|
||||||
///
|
///
|
||||||
/// Invariant: The length of `peaks` must be equal to the number of true bits in `num_leaves`.
|
/// Invariant: The length of `peaks` must be equal to the number of true bits in `num_leaves`.
|
||||||
pub peaks: Vec<Word>,
|
pub peaks: Vec<RpoDigest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MmrPeaks {
|
impl MmrPeaks {
|
||||||
/// Hashes the peaks sequentially, compacting it to a single digest
|
/// Hashes the peaks.
|
||||||
|
///
|
||||||
|
/// The procedure will:
|
||||||
|
/// - Flatten and pad the peaks to a vector of Felts.
|
||||||
|
/// - Hash the vector of Felts.
|
||||||
pub fn hash_peaks(&self) -> Word {
|
pub fn hash_peaks(&self) -> Word {
|
||||||
Rpo256::hash_elements(&self.peaks.as_slice().concat()).into()
|
Rpo256::hash_elements(&self.flatten_and_pad_peaks()).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify(&self, value: Word, opening: MmrProof) -> bool {
|
pub fn verify(&self, value: RpoDigest, opening: MmrProof) -> bool {
|
||||||
let root = &self.peaks[opening.peak_index()];
|
let root = &self.peaks[opening.peak_index()];
|
||||||
opening
|
opening.merkle_path.verify(opening.relative_pos() as u64, value, root)
|
||||||
.merkle_path
|
}
|
||||||
.verify(opening.relative_pos() as u64, value, root)
|
|
||||||
|
/// Flattens and pads the peaks to make hashing inside of the Miden VM easier.
|
||||||
|
///
|
||||||
|
/// The procedure will:
|
||||||
|
/// - Flatten the vector of Words into a vector of Felts.
|
||||||
|
/// - Pad the peaks with ZERO to an even number of words, this removes the need to handle RPO
|
||||||
|
/// padding.
|
||||||
|
/// - Pad the peaks to a minimum length of 16 words, which reduces the constant cost of
|
||||||
|
/// hashing.
|
||||||
|
pub fn flatten_and_pad_peaks(&self) -> Vec<Felt> {
|
||||||
|
let num_peaks = self.peaks.len();
|
||||||
|
|
||||||
|
// To achieve the padding rules above we calculate the length of the final vector.
|
||||||
|
// This is calculated as the number of field elements. Each peak is 4 field elements.
|
||||||
|
// The length is calculated as follows:
|
||||||
|
// - If there are less than 16 peaks, the data is padded to 16 peaks and as such requires
|
||||||
|
// 64 field elements.
|
||||||
|
// - If there are more than 16 peaks and the number of peaks is odd, the data is padded to
|
||||||
|
// an even number of peaks and as such requires `(num_peaks + 1) * 4` field elements.
|
||||||
|
// - If there are more than 16 peaks and the number of peaks is even, the data is not padded
|
||||||
|
// and as such requires `num_peaks * 4` field elements.
|
||||||
|
let len = if num_peaks < 16 {
|
||||||
|
64
|
||||||
|
} else if num_peaks % 2 == 1 {
|
||||||
|
(num_peaks + 1) * 4
|
||||||
|
} else {
|
||||||
|
num_peaks * 4
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut elements = Vec::with_capacity(len);
|
||||||
|
elements.extend_from_slice(
|
||||||
|
&self
|
||||||
|
.peaks
|
||||||
|
.as_slice()
|
||||||
|
.iter()
|
||||||
|
.map(|digest| digest.into())
|
||||||
|
.collect::<Vec<Word>>()
|
||||||
|
.concat(),
|
||||||
|
);
|
||||||
|
elements.resize(len, ZERO);
|
||||||
|
elements
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,9 +10,11 @@
|
|||||||
//! depths, i.e. as part of adding adding a new element to the forest the trees with same depth are
|
//! depths, i.e. as part of adding adding a new element to the forest the trees with same depth are
|
||||||
//! merged, creating a new tree with depth d+1, this process is continued until the property is
|
//! merged, creating a new tree with depth d+1, this process is continued until the property is
|
||||||
//! restabilished.
|
//! restabilished.
|
||||||
use super::bit::TrueBitPositionIterator;
|
use super::{
|
||||||
use super::{super::Vec, MmrPeaks, MmrProof, Rpo256, Word};
|
super::{InnerNodeInfo, MerklePath, RpoDigest, Vec},
|
||||||
use crate::merkle::MerklePath;
|
bit::TrueBitPositionIterator,
|
||||||
|
MmrPeaks, MmrProof, Rpo256,
|
||||||
|
};
|
||||||
use core::fmt::{Display, Formatter};
|
use core::fmt::{Display, Formatter};
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
@@ -26,6 +28,7 @@ use std::error::Error;
|
|||||||
///
|
///
|
||||||
/// Since this is a full representation of the MMR, elements are never removed and the MMR will
|
/// Since this is a full representation of the MMR, elements are never removed and the MMR will
|
||||||
/// grow roughly `O(2n)` in number of leaf elements.
|
/// grow roughly `O(2n)` in number of leaf elements.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Mmr {
|
pub struct Mmr {
|
||||||
/// Refer to the `forest` method documentation for details of the semantics of this value.
|
/// Refer to the `forest` method documentation for details of the semantics of this value.
|
||||||
pub(super) forest: usize,
|
pub(super) forest: usize,
|
||||||
@@ -36,7 +39,7 @@ pub struct Mmr {
|
|||||||
/// the elements of every tree in the forest to be stored in the same sequential buffer. It
|
/// the elements of every tree in the forest to be stored in the same sequential buffer. It
|
||||||
/// also means new elements can be added to the forest, and merging of trees is very cheap with
|
/// also means new elements can be added to the forest, and merging of trees is very cheap with
|
||||||
/// no need to copy elements.
|
/// no need to copy elements.
|
||||||
pub(super) nodes: Vec<Word>,
|
pub(super) nodes: Vec<RpoDigest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||||
@@ -127,7 +130,7 @@ impl Mmr {
|
|||||||
/// Note: The leaf position is the 0-indexed number corresponding to the order the leaves were
|
/// Note: The leaf position is the 0-indexed number corresponding to the order the leaves were
|
||||||
/// added, this corresponds to the MMR size _prior_ to adding the element. So the 1st element
|
/// added, this corresponds to the MMR size _prior_ to adding the element. So the 1st element
|
||||||
/// has position 0, the second position 1, and so on.
|
/// has position 0, the second position 1, and so on.
|
||||||
pub fn get(&self, pos: usize) -> Result<Word, MmrError> {
|
pub fn get(&self, pos: usize) -> Result<RpoDigest, MmrError> {
|
||||||
// find the target tree responsible for the MMR position
|
// find the target tree responsible for the MMR position
|
||||||
let tree_bit =
|
let tree_bit =
|
||||||
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::InvalidPosition(pos))?;
|
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::InvalidPosition(pos))?;
|
||||||
@@ -151,7 +154,7 @@ impl Mmr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a new element to the MMR.
|
/// Adds a new element to the MMR.
|
||||||
pub fn add(&mut self, el: Word) {
|
pub fn add(&mut self, el: RpoDigest) {
|
||||||
// Note: every node is also a tree of size 1, adding an element to the forest creates a new
|
// Note: every node is also a tree of size 1, adding an element to the forest creates a new
|
||||||
// rooted-tree of size 1. This may temporarily break the invariant that every tree in the
|
// rooted-tree of size 1. This may temporarily break the invariant that every tree in the
|
||||||
// forest has different sizes, the loop below will eagerly merge trees of same size and
|
// forest has different sizes, the loop below will eagerly merge trees of same size and
|
||||||
@@ -162,7 +165,7 @@ impl Mmr {
|
|||||||
let mut right = el;
|
let mut right = el;
|
||||||
let mut left_tree = 1;
|
let mut left_tree = 1;
|
||||||
while self.forest & left_tree != 0 {
|
while self.forest & left_tree != 0 {
|
||||||
right = *Rpo256::merge(&[self.nodes[left_offset].into(), right.into()]);
|
right = Rpo256::merge(&[self.nodes[left_offset], right]);
|
||||||
self.nodes.push(right);
|
self.nodes.push(right);
|
||||||
|
|
||||||
left_offset = left_offset.saturating_sub(nodes_in_forest(left_tree));
|
left_offset = left_offset.saturating_sub(nodes_in_forest(left_tree));
|
||||||
@@ -172,9 +175,9 @@ impl Mmr {
|
|||||||
self.forest += 1;
|
self.forest += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an accumulator representing the current state of the MMMR.
|
/// Returns an accumulator representing the current state of the MMR.
|
||||||
pub fn accumulator(&self) -> MmrPeaks {
|
pub fn accumulator(&self) -> MmrPeaks {
|
||||||
let peaks: Vec<Word> = TrueBitPositionIterator::new(self.forest)
|
let peaks: Vec<RpoDigest> = TrueBitPositionIterator::new(self.forest)
|
||||||
.rev()
|
.rev()
|
||||||
.map(|bit| nodes_in_forest(1 << bit))
|
.map(|bit| nodes_in_forest(1 << bit))
|
||||||
.scan(0, |offset, el| {
|
.scan(0, |offset, el| {
|
||||||
@@ -190,6 +193,16 @@ impl Mmr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An iterator over inner nodes in the MMR. The order of iteration is unspecified.
|
||||||
|
pub fn inner_nodes(&self) -> MmrNodes {
|
||||||
|
MmrNodes {
|
||||||
|
mmr: self,
|
||||||
|
forest: 0,
|
||||||
|
last_right: 0,
|
||||||
|
index: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// UTILITIES
|
// UTILITIES
|
||||||
// ============================================================================================
|
// ============================================================================================
|
||||||
|
|
||||||
@@ -200,7 +213,7 @@ impl Mmr {
|
|||||||
relative_pos: usize,
|
relative_pos: usize,
|
||||||
index_offset: usize,
|
index_offset: usize,
|
||||||
mut index: usize,
|
mut index: usize,
|
||||||
) -> (Word, Vec<Word>) {
|
) -> (RpoDigest, Vec<RpoDigest>) {
|
||||||
// collect the Merkle path
|
// collect the Merkle path
|
||||||
let mut tree_depth = tree_bit as usize;
|
let mut tree_depth = tree_bit as usize;
|
||||||
let mut path = Vec::with_capacity(tree_depth + 1);
|
let mut path = Vec::with_capacity(tree_depth + 1);
|
||||||
@@ -235,7 +248,7 @@ impl Mmr {
|
|||||||
|
|
||||||
impl<T> From<T> for Mmr
|
impl<T> From<T> for Mmr
|
||||||
where
|
where
|
||||||
T: IntoIterator<Item = Word>,
|
T: IntoIterator<Item = RpoDigest>,
|
||||||
{
|
{
|
||||||
fn from(values: T) -> Self {
|
fn from(values: T) -> Self {
|
||||||
let mut mmr = Mmr::new();
|
let mut mmr = Mmr::new();
|
||||||
@@ -246,6 +259,84 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ITERATOR
|
||||||
|
// ===============================================================================================
|
||||||
|
|
||||||
|
/// Yields inner nodes of the [Mmr].
|
||||||
|
pub struct MmrNodes<'a> {
|
||||||
|
/// [Mmr] being yielded, when its `forest` value is matched, the iterations is finished.
|
||||||
|
mmr: &'a Mmr,
|
||||||
|
/// Keeps track of the left nodes yielded so far waiting for a right pair, this matches the
|
||||||
|
/// semantics of the [Mmr]'s forest attribute, since that too works as a buffer of left nodes
|
||||||
|
/// waiting for a pair to be hashed together.
|
||||||
|
forest: usize,
|
||||||
|
/// Keeps track of the last right node yielded, after this value is set, the next iteration
|
||||||
|
/// will be its parent with its corresponding left node that has been yield already.
|
||||||
|
last_right: usize,
|
||||||
|
/// The current index in the `nodes` vector.
|
||||||
|
index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for MmrNodes<'a> {
|
||||||
|
type Item = InnerNodeInfo;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
debug_assert!(self.last_right.count_ones() <= 1, "last_right tracks zero or one element");
|
||||||
|
|
||||||
|
// only parent nodes are emitted, remove the single node tree from the forest
|
||||||
|
let target = self.mmr.forest & (usize::MAX << 1);
|
||||||
|
|
||||||
|
if self.forest < target {
|
||||||
|
if self.last_right == 0 {
|
||||||
|
// yield the left leaf
|
||||||
|
debug_assert!(self.last_right == 0, "left must be before right");
|
||||||
|
self.forest |= 1;
|
||||||
|
self.index += 1;
|
||||||
|
|
||||||
|
// yield the right leaf
|
||||||
|
debug_assert!((self.forest & 1) == 1, "right must be after left");
|
||||||
|
self.last_right |= 1;
|
||||||
|
self.index += 1;
|
||||||
|
};
|
||||||
|
|
||||||
|
debug_assert!(
|
||||||
|
self.forest & self.last_right != 0,
|
||||||
|
"parent requires both a left and right",
|
||||||
|
);
|
||||||
|
|
||||||
|
// compute the number of nodes in the right tree, this is the offset to the
|
||||||
|
// previous left parent
|
||||||
|
let right_nodes = nodes_in_forest(self.last_right);
|
||||||
|
// the next parent position is one above the position of the pair
|
||||||
|
let parent = self.last_right << 1;
|
||||||
|
|
||||||
|
// the left node has been paired and the current parent yielded, removed it from the forest
|
||||||
|
self.forest ^= self.last_right;
|
||||||
|
if self.forest & parent == 0 {
|
||||||
|
// this iteration yielded the left parent node
|
||||||
|
debug_assert!(self.forest & 1 == 0, "next iteration yields a left leaf");
|
||||||
|
self.last_right = 0;
|
||||||
|
self.forest ^= parent;
|
||||||
|
} else {
|
||||||
|
// the left node of the parent level has been yielded already, this iteration
|
||||||
|
// was the right parent. Next iteration yields their parent.
|
||||||
|
self.last_right = parent;
|
||||||
|
}
|
||||||
|
|
||||||
|
// yields a parent
|
||||||
|
let value = self.mmr.nodes[self.index];
|
||||||
|
let right = self.mmr.nodes[self.index - 1];
|
||||||
|
let left = self.mmr.nodes[self.index - 1 - right_nodes];
|
||||||
|
self.index += 1;
|
||||||
|
let node = InnerNodeInfo { value, left, right };
|
||||||
|
|
||||||
|
Some(node)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// UTILITIES
|
// UTILITIES
|
||||||
// ===============================================================================================
|
// ===============================================================================================
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ mod proof;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
use super::{Rpo256, Word};
|
use super::{Felt, Rpo256, Word};
|
||||||
|
|
||||||
// REEXPORTS
|
// REEXPORTS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|||||||
@@ -1,7 +1,14 @@
|
|||||||
use super::bit::TrueBitPositionIterator;
|
use super::{
|
||||||
use super::full::{high_bitmask, leaf_to_corresponding_tree, nodes_in_forest};
|
super::{InnerNodeInfo, Vec},
|
||||||
use super::{super::Vec, Mmr, Rpo256, Word};
|
bit::TrueBitPositionIterator,
|
||||||
use crate::merkle::{int_to_node, MerklePath};
|
full::{high_bitmask, leaf_to_corresponding_tree, nodes_in_forest},
|
||||||
|
Mmr, MmrPeaks, Rpo256,
|
||||||
|
};
|
||||||
|
use crate::{
|
||||||
|
hash::rpo::RpoDigest,
|
||||||
|
merkle::{int_to_node, MerklePath},
|
||||||
|
Felt, Word,
|
||||||
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_position_equal_or_higher_than_leafs_is_never_contained() {
|
fn test_position_equal_or_higher_than_leafs_is_never_contained() {
|
||||||
@@ -96,7 +103,7 @@ fn test_nodes_in_forest_single_bit() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const LEAVES: [Word; 7] = [
|
const LEAVES: [RpoDigest; 7] = [
|
||||||
int_to_node(0),
|
int_to_node(0),
|
||||||
int_to_node(1),
|
int_to_node(1),
|
||||||
int_to_node(2),
|
int_to_node(2),
|
||||||
@@ -111,16 +118,14 @@ fn test_mmr_simple() {
|
|||||||
let mut postorder = Vec::new();
|
let mut postorder = Vec::new();
|
||||||
postorder.push(LEAVES[0]);
|
postorder.push(LEAVES[0]);
|
||||||
postorder.push(LEAVES[1]);
|
postorder.push(LEAVES[1]);
|
||||||
postorder.push(*Rpo256::hash_elements(&[LEAVES[0], LEAVES[1]].concat()));
|
postorder.push(Rpo256::merge(&[LEAVES[0], LEAVES[1]]));
|
||||||
postorder.push(LEAVES[2]);
|
postorder.push(LEAVES[2]);
|
||||||
postorder.push(LEAVES[3]);
|
postorder.push(LEAVES[3]);
|
||||||
postorder.push(*Rpo256::hash_elements(&[LEAVES[2], LEAVES[3]].concat()));
|
postorder.push(Rpo256::merge(&[LEAVES[2], LEAVES[3]]));
|
||||||
postorder.push(*Rpo256::hash_elements(
|
postorder.push(Rpo256::merge(&[postorder[2], postorder[5]]));
|
||||||
&[postorder[2], postorder[5]].concat(),
|
|
||||||
));
|
|
||||||
postorder.push(LEAVES[4]);
|
postorder.push(LEAVES[4]);
|
||||||
postorder.push(LEAVES[5]);
|
postorder.push(LEAVES[5]);
|
||||||
postorder.push(*Rpo256::hash_elements(&[LEAVES[4], LEAVES[5]].concat()));
|
postorder.push(Rpo256::merge(&[LEAVES[4], LEAVES[5]]));
|
||||||
postorder.push(LEAVES[6]);
|
postorder.push(LEAVES[6]);
|
||||||
|
|
||||||
let mut mmr = Mmr::new();
|
let mut mmr = Mmr::new();
|
||||||
@@ -194,14 +199,11 @@ fn test_mmr_simple() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_mmr_open() {
|
fn test_mmr_open() {
|
||||||
let mmr: Mmr = LEAVES.into();
|
let mmr: Mmr = LEAVES.into();
|
||||||
let h01: Word = Rpo256::hash_elements(&LEAVES[0..2].concat()).into();
|
let h01 = Rpo256::merge(&[LEAVES[0], LEAVES[1]]);
|
||||||
let h23: Word = Rpo256::hash_elements(&LEAVES[2..4].concat()).into();
|
let h23 = Rpo256::merge(&[LEAVES[2], LEAVES[3]]);
|
||||||
|
|
||||||
// node at pos 7 is the root
|
// node at pos 7 is the root
|
||||||
assert!(
|
assert!(mmr.open(7).is_err(), "Element 7 is not in the tree, result should be None");
|
||||||
mmr.open(7).is_err(),
|
|
||||||
"Element 7 is not in the tree, result should be None"
|
|
||||||
);
|
|
||||||
|
|
||||||
// node at pos 6 is the root
|
// node at pos 6 is the root
|
||||||
let empty: MerklePath = MerklePath::new(vec![]);
|
let empty: MerklePath = MerklePath::new(vec![]);
|
||||||
@@ -216,7 +218,7 @@ fn test_mmr_open() {
|
|||||||
"MmrProof should be valid for the current accumulator."
|
"MmrProof should be valid for the current accumulator."
|
||||||
);
|
);
|
||||||
|
|
||||||
// nodes 4,5 are detph 1
|
// nodes 4,5 are depth 1
|
||||||
let root_to_path = MerklePath::new(vec![LEAVES[4]]);
|
let root_to_path = MerklePath::new(vec![LEAVES[4]]);
|
||||||
let opening = mmr
|
let opening = mmr
|
||||||
.open(5)
|
.open(5)
|
||||||
@@ -294,41 +296,13 @@ fn test_mmr_open() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_mmr_get() {
|
fn test_mmr_get() {
|
||||||
let mmr: Mmr = LEAVES.into();
|
let mmr: Mmr = LEAVES.into();
|
||||||
assert_eq!(
|
assert_eq!(mmr.get(0).unwrap(), LEAVES[0], "value at pos 0 must correspond");
|
||||||
mmr.get(0).unwrap(),
|
assert_eq!(mmr.get(1).unwrap(), LEAVES[1], "value at pos 1 must correspond");
|
||||||
LEAVES[0],
|
assert_eq!(mmr.get(2).unwrap(), LEAVES[2], "value at pos 2 must correspond");
|
||||||
"value at pos 0 must correspond"
|
assert_eq!(mmr.get(3).unwrap(), LEAVES[3], "value at pos 3 must correspond");
|
||||||
);
|
assert_eq!(mmr.get(4).unwrap(), LEAVES[4], "value at pos 4 must correspond");
|
||||||
assert_eq!(
|
assert_eq!(mmr.get(5).unwrap(), LEAVES[5], "value at pos 5 must correspond");
|
||||||
mmr.get(1).unwrap(),
|
assert_eq!(mmr.get(6).unwrap(), LEAVES[6], "value at pos 6 must correspond");
|
||||||
LEAVES[1],
|
|
||||||
"value at pos 1 must correspond"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
mmr.get(2).unwrap(),
|
|
||||||
LEAVES[2],
|
|
||||||
"value at pos 2 must correspond"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
mmr.get(3).unwrap(),
|
|
||||||
LEAVES[3],
|
|
||||||
"value at pos 3 must correspond"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
mmr.get(4).unwrap(),
|
|
||||||
LEAVES[4],
|
|
||||||
"value at pos 4 must correspond"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
mmr.get(5).unwrap(),
|
|
||||||
LEAVES[5],
|
|
||||||
"value at pos 5 must correspond"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
mmr.get(6).unwrap(),
|
|
||||||
LEAVES[6],
|
|
||||||
"value at pos 6 must correspond"
|
|
||||||
);
|
|
||||||
assert!(mmr.get(7).is_err());
|
assert!(mmr.get(7).is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -338,11 +312,7 @@ fn test_mmr_invariants() {
|
|||||||
for v in 1..=1028 {
|
for v in 1..=1028 {
|
||||||
mmr.add(int_to_node(v));
|
mmr.add(int_to_node(v));
|
||||||
let accumulator = mmr.accumulator();
|
let accumulator = mmr.accumulator();
|
||||||
assert_eq!(
|
assert_eq!(v as usize, mmr.forest(), "MMR leaf count must increase by one on every add");
|
||||||
v as usize,
|
|
||||||
mmr.forest(),
|
|
||||||
"MMR leaf count must increase by one on every add"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
v as usize, accumulator.num_leaves,
|
v as usize, accumulator.num_leaves,
|
||||||
"MMR and its accumulator must match leaves count"
|
"MMR and its accumulator must match leaves count"
|
||||||
@@ -371,45 +341,120 @@ fn test_bit_position_iterator() {
|
|||||||
assert_eq!(TrueBitPositionIterator::new(0).count(), 0);
|
assert_eq!(TrueBitPositionIterator::new(0).count(), 0);
|
||||||
assert_eq!(TrueBitPositionIterator::new(0).rev().count(), 0);
|
assert_eq!(TrueBitPositionIterator::new(0).rev().count(), 0);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(TrueBitPositionIterator::new(1).collect::<Vec<u32>>(), vec![0]);
|
||||||
TrueBitPositionIterator::new(1).collect::<Vec<u32>>(),
|
assert_eq!(TrueBitPositionIterator::new(1).rev().collect::<Vec<u32>>(), vec![0],);
|
||||||
vec![0]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TrueBitPositionIterator::new(1).rev().collect::<Vec<u32>>(),
|
|
||||||
vec![0],
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(TrueBitPositionIterator::new(2).collect::<Vec<u32>>(), vec![1]);
|
||||||
TrueBitPositionIterator::new(2).collect::<Vec<u32>>(),
|
assert_eq!(TrueBitPositionIterator::new(2).rev().collect::<Vec<u32>>(), vec![1],);
|
||||||
vec![1]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TrueBitPositionIterator::new(2).rev().collect::<Vec<u32>>(),
|
|
||||||
vec![1],
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(TrueBitPositionIterator::new(3).collect::<Vec<u32>>(), vec![0, 1],);
|
||||||
TrueBitPositionIterator::new(3).collect::<Vec<u32>>(),
|
assert_eq!(TrueBitPositionIterator::new(3).rev().collect::<Vec<u32>>(), vec![1, 0],);
|
||||||
vec![0, 1],
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
TrueBitPositionIterator::new(3).rev().collect::<Vec<u32>>(),
|
|
||||||
vec![1, 0],
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TrueBitPositionIterator::new(0b11010101).collect::<Vec<u32>>(),
|
TrueBitPositionIterator::new(0b11010101).collect::<Vec<u32>>(),
|
||||||
vec![0, 2, 4, 6, 7],
|
vec![0, 2, 4, 6, 7],
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TrueBitPositionIterator::new(0b11010101)
|
TrueBitPositionIterator::new(0b11010101).rev().collect::<Vec<u32>>(),
|
||||||
.rev()
|
|
||||||
.collect::<Vec<u32>>(),
|
|
||||||
vec![7, 6, 4, 2, 0],
|
vec![7, 6, 4, 2, 0],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_inner_nodes() {
|
||||||
|
let mmr: Mmr = LEAVES.into();
|
||||||
|
let nodes: Vec<InnerNodeInfo> = mmr.inner_nodes().collect();
|
||||||
|
|
||||||
|
let h01 = Rpo256::merge(&[LEAVES[0], LEAVES[1]]);
|
||||||
|
let h23 = Rpo256::merge(&[LEAVES[2], LEAVES[3]]);
|
||||||
|
let h0123 = Rpo256::merge(&[h01, h23]);
|
||||||
|
let h45 = Rpo256::merge(&[LEAVES[4], LEAVES[5]]);
|
||||||
|
let postorder = vec![
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: h01,
|
||||||
|
left: LEAVES[0],
|
||||||
|
right: LEAVES[1],
|
||||||
|
},
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: h23,
|
||||||
|
left: LEAVES[2],
|
||||||
|
right: LEAVES[3],
|
||||||
|
},
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: h0123,
|
||||||
|
left: h01,
|
||||||
|
right: h23,
|
||||||
|
},
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: h45,
|
||||||
|
left: LEAVES[4],
|
||||||
|
right: LEAVES[5],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
assert_eq!(postorder, nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_hash_peaks() {
|
||||||
|
let mmr: Mmr = LEAVES.into();
|
||||||
|
let peaks = mmr.accumulator();
|
||||||
|
|
||||||
|
let first_peak = Rpo256::merge(&[
|
||||||
|
Rpo256::merge(&[LEAVES[0], LEAVES[1]]),
|
||||||
|
Rpo256::merge(&[LEAVES[2], LEAVES[3]]),
|
||||||
|
]);
|
||||||
|
let second_peak = Rpo256::merge(&[LEAVES[4], LEAVES[5]]);
|
||||||
|
let third_peak = LEAVES[6];
|
||||||
|
|
||||||
|
// minimum length is 16
|
||||||
|
let mut expected_peaks = [first_peak, second_peak, third_peak].to_vec();
|
||||||
|
expected_peaks.resize(16, RpoDigest::default());
|
||||||
|
assert_eq!(
|
||||||
|
peaks.hash_peaks(),
|
||||||
|
*Rpo256::hash_elements(&digests_to_elements(&expected_peaks))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_peaks_hash_less_than_16() {
|
||||||
|
let mut peaks = Vec::new();
|
||||||
|
|
||||||
|
for i in 0..16 {
|
||||||
|
peaks.push(int_to_node(i));
|
||||||
|
let accumulator = MmrPeaks {
|
||||||
|
num_leaves: (1 << peaks.len()) - 1,
|
||||||
|
peaks: peaks.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// minimum length is 16
|
||||||
|
let mut expected_peaks = peaks.clone();
|
||||||
|
expected_peaks.resize(16, RpoDigest::default());
|
||||||
|
assert_eq!(
|
||||||
|
accumulator.hash_peaks(),
|
||||||
|
*Rpo256::hash_elements(&digests_to_elements(&expected_peaks))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_peaks_hash_odd() {
|
||||||
|
let peaks: Vec<_> = (0..=17).map(int_to_node).collect();
|
||||||
|
|
||||||
|
let accumulator = MmrPeaks {
|
||||||
|
num_leaves: (1 << peaks.len()) - 1,
|
||||||
|
peaks: peaks.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// odd length bigger than 16 is padded to the next even number
|
||||||
|
let mut expected_peaks = peaks;
|
||||||
|
expected_peaks.resize(18, RpoDigest::default());
|
||||||
|
assert_eq!(
|
||||||
|
accumulator.hash_peaks(),
|
||||||
|
*Rpo256::hash_elements(&digests_to_elements(&expected_peaks))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
mod property_tests {
|
mod property_tests {
|
||||||
use super::leaf_to_corresponding_tree;
|
use super::leaf_to_corresponding_tree;
|
||||||
use proptest::prelude::*;
|
use proptest::prelude::*;
|
||||||
@@ -438,3 +483,10 @@ mod property_tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HELPER FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
fn digests_to_elements(digests: &[RpoDigest]) -> Vec<Felt> {
|
||||||
|
digests.iter().flat_map(Word::from).collect()
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use super::{
|
use super::{
|
||||||
hash::rpo::{Rpo256, RpoDigest},
|
hash::rpo::{Rpo256, RpoDigest},
|
||||||
utils::collections::{vec, BTreeMap, BTreeSet, Vec},
|
utils::collections::{vec, BTreeMap, BTreeSet, KvMap, RecordingMap, Vec},
|
||||||
Felt, StarkField, Word, WORD_SIZE, ZERO,
|
Felt, StarkField, Word, WORD_SIZE, ZERO,
|
||||||
};
|
};
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
@@ -26,28 +26,39 @@ pub use path_set::MerklePathSet;
|
|||||||
mod simple_smt;
|
mod simple_smt;
|
||||||
pub use simple_smt::SimpleSmt;
|
pub use simple_smt::SimpleSmt;
|
||||||
|
|
||||||
|
mod tiered_smt;
|
||||||
|
pub use tiered_smt::TieredSmt;
|
||||||
|
|
||||||
mod mmr;
|
mod mmr;
|
||||||
pub use mmr::{Mmr, MmrPeaks};
|
pub use mmr::{Mmr, MmrPeaks, MmrProof};
|
||||||
|
|
||||||
mod store;
|
mod store;
|
||||||
pub use store::MerkleStore;
|
pub use store::{DefaultMerkleStore, MerkleStore, RecordingMerkleStore, StoreNode};
|
||||||
|
|
||||||
|
mod node;
|
||||||
|
pub use node::InnerNodeInfo;
|
||||||
|
|
||||||
|
mod partial_mt;
|
||||||
|
pub use partial_mt::PartialMerkleTree;
|
||||||
|
|
||||||
// ERRORS
|
// ERRORS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum MerkleError {
|
pub enum MerkleError {
|
||||||
ConflictingRoots(Vec<Word>),
|
ConflictingRoots(Vec<RpoDigest>),
|
||||||
DepthTooSmall(u8),
|
DepthTooSmall(u8),
|
||||||
DepthTooBig(u64),
|
DepthTooBig(u64),
|
||||||
NodeNotInStore(Word, NodeIndex),
|
DuplicateValuesForIndex(u64),
|
||||||
NumLeavesNotPowerOfTwo(usize),
|
DuplicateValuesForKey(RpoDigest),
|
||||||
InvalidIndex(NodeIndex),
|
InvalidIndex { depth: u8, value: u64 },
|
||||||
InvalidDepth { expected: u8, provided: u8 },
|
InvalidDepth { expected: u8, provided: u8 },
|
||||||
InvalidPath(MerklePath),
|
InvalidPath(MerklePath),
|
||||||
InvalidEntriesCount(usize, usize),
|
InvalidNumEntries(usize, usize),
|
||||||
NodeNotInSet(u64),
|
NodeNotInSet(NodeIndex),
|
||||||
RootNotInStore(Word),
|
NodeNotInStore(RpoDigest, NodeIndex),
|
||||||
|
NumLeavesNotPowerOfTwo(usize),
|
||||||
|
RootNotInStore(RpoDigest),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for MerkleError {
|
impl fmt::Display for MerkleError {
|
||||||
@@ -57,21 +68,23 @@ impl fmt::Display for MerkleError {
|
|||||||
ConflictingRoots(roots) => write!(f, "the merkle paths roots do not match {roots:?}"),
|
ConflictingRoots(roots) => write!(f, "the merkle paths roots do not match {roots:?}"),
|
||||||
DepthTooSmall(depth) => write!(f, "the provided depth {depth} is too small"),
|
DepthTooSmall(depth) => write!(f, "the provided depth {depth} is too small"),
|
||||||
DepthTooBig(depth) => write!(f, "the provided depth {depth} is too big"),
|
DepthTooBig(depth) => write!(f, "the provided depth {depth} is too big"),
|
||||||
NumLeavesNotPowerOfTwo(leaves) => {
|
DuplicateValuesForIndex(key) => write!(f, "multiple values provided for key {key}"),
|
||||||
write!(f, "the leaves count {leaves} is not a power of 2")
|
DuplicateValuesForKey(key) => write!(f, "multiple values provided for key {key}"),
|
||||||
}
|
InvalidIndex{ depth, value} => write!(
|
||||||
InvalidIndex(index) => write!(
|
|
||||||
f,
|
f,
|
||||||
"the index value {} is not valid for the depth {}", index.value(), index.depth()
|
"the index value {value} is not valid for the depth {depth}"
|
||||||
),
|
),
|
||||||
InvalidDepth { expected, provided } => write!(
|
InvalidDepth { expected, provided } => write!(
|
||||||
f,
|
f,
|
||||||
"the provided depth {provided} is not valid for {expected}"
|
"the provided depth {provided} is not valid for {expected}"
|
||||||
),
|
),
|
||||||
InvalidPath(_path) => write!(f, "the provided path is not valid"),
|
InvalidPath(_path) => write!(f, "the provided path is not valid"),
|
||||||
InvalidEntriesCount(max, provided) => write!(f, "the provided number of entries is {provided}, but the maximum for the given depth is {max}"),
|
InvalidNumEntries(max, provided) => write!(f, "the provided number of entries is {provided}, but the maximum for the given depth is {max}"),
|
||||||
NodeNotInSet(index) => write!(f, "the node indexed by {index} is not in the set"),
|
NodeNotInSet(index) => write!(f, "the node with index ({index}) is not in the set"),
|
||||||
NodeNotInStore(hash, index) => write!(f, "the node {:?} indexed by {} and depth {} is not in the store", hash, index.value(), index.depth(),),
|
NodeNotInStore(hash, index) => write!(f, "the node {hash:?} with index ({index}) is not in the store"),
|
||||||
|
NumLeavesNotPowerOfTwo(leaves) => {
|
||||||
|
write!(f, "the leaves count {leaves} is not a power of 2")
|
||||||
|
}
|
||||||
RootNotInStore(root) => write!(f, "the root {:?} is not in the store", root),
|
RootNotInStore(root) => write!(f, "the root {:?} is not in the store", root),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -84,6 +97,16 @@ impl std::error::Error for MerkleError {}
|
|||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
const fn int_to_node(value: u64) -> Word {
|
const fn int_to_node(value: u64) -> RpoDigest {
|
||||||
|
RpoDigest::new([Felt::new(value), ZERO, ZERO, ZERO])
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
const fn int_to_leaf(value: u64) -> Word {
|
||||||
[Felt::new(value), ZERO, ZERO, ZERO]
|
[Felt::new(value), ZERO, ZERO, ZERO]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn digests_to_words(digests: &[RpoDigest]) -> Vec<Word> {
|
||||||
|
digests.iter().map(|d| d.into()).collect()
|
||||||
|
}
|
||||||
|
|||||||
9
src/merkle/node.rs
Normal file
9
src/merkle/node.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
use crate::hash::rpo::RpoDigest;
|
||||||
|
|
||||||
|
/// Representation of a node with two children used for iterating over containers.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct InnerNodeInfo {
|
||||||
|
pub value: RpoDigest,
|
||||||
|
pub left: RpoDigest,
|
||||||
|
pub right: RpoDigest,
|
||||||
|
}
|
||||||
329
src/merkle/partial_mt/mod.rs
Normal file
329
src/merkle/partial_mt/mod.rs
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
use super::{
|
||||||
|
BTreeMap, BTreeSet, MerkleError, MerklePath, NodeIndex, Rpo256, RpoDigest, ValuePath, Vec, ZERO,
|
||||||
|
};
|
||||||
|
use crate::utils::{format, string::String, word_to_hex};
|
||||||
|
use core::fmt;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
// CONSTANTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// Index of the root node.
|
||||||
|
const ROOT_INDEX: NodeIndex = NodeIndex::root();
|
||||||
|
|
||||||
|
/// An RpoDigest consisting of 4 ZERO elements.
|
||||||
|
const EMPTY_DIGEST: RpoDigest = RpoDigest::new([ZERO; 4]);
|
||||||
|
|
||||||
|
// PARTIAL MERKLE TREE
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A partial Merkle tree with NodeIndex keys and 4-element RpoDigest leaf values. Partial Merkle
|
||||||
|
/// Tree allows to create Merkle Tree by providing Merkle paths of different lengths.
|
||||||
|
///
|
||||||
|
/// The root of the tree is recomputed on each new leaf update.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct PartialMerkleTree {
|
||||||
|
max_depth: u8,
|
||||||
|
nodes: BTreeMap<NodeIndex, RpoDigest>,
|
||||||
|
leaves: BTreeSet<NodeIndex>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for PartialMerkleTree {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialMerkleTree {
|
||||||
|
// CONSTANTS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Minimum supported depth.
|
||||||
|
pub const MIN_DEPTH: u8 = 1;
|
||||||
|
|
||||||
|
/// Maximum supported depth.
|
||||||
|
pub const MAX_DEPTH: u8 = 64;
|
||||||
|
|
||||||
|
// CONSTRUCTORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns a new empty [PartialMerkleTree].
|
||||||
|
pub fn new() -> Self {
|
||||||
|
PartialMerkleTree {
|
||||||
|
max_depth: 0,
|
||||||
|
nodes: BTreeMap::new(),
|
||||||
|
leaves: BTreeSet::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided paths iterator into the set.
|
||||||
|
///
|
||||||
|
/// Analogous to [Self::add_path].
|
||||||
|
pub fn with_paths<I>(paths: I) -> Result<Self, MerkleError>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = (u64, RpoDigest, MerklePath)>,
|
||||||
|
{
|
||||||
|
// create an empty tree
|
||||||
|
let tree = PartialMerkleTree::new();
|
||||||
|
|
||||||
|
paths.into_iter().try_fold(tree, |mut tree, (index, value, path)| {
|
||||||
|
tree.add_path(index, value, path)?;
|
||||||
|
Ok(tree)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUBLIC ACCESSORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns the root of this Merkle tree.
|
||||||
|
pub fn root(&self) -> RpoDigest {
|
||||||
|
self.nodes.get(&ROOT_INDEX).cloned().unwrap_or(EMPTY_DIGEST)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the depth of this Merkle tree.
|
||||||
|
pub fn max_depth(&self) -> u8 {
|
||||||
|
self.max_depth
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a node at the specified NodeIndex.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if the specified NodeIndex is not contained in the nodes map.
|
||||||
|
pub fn get_node(&self, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||||
|
self.nodes.get(&index).ok_or(MerkleError::NodeNotInSet(index)).map(|hash| *hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if provided index contains in the leaves set, false otherwise.
|
||||||
|
pub fn is_leaf(&self, index: NodeIndex) -> bool {
|
||||||
|
self.leaves.contains(&index)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a vector of paths from every leaf to the root.
|
||||||
|
pub fn paths(&self) -> Vec<(NodeIndex, ValuePath)> {
|
||||||
|
let mut paths = Vec::new();
|
||||||
|
self.leaves.iter().for_each(|&leaf| {
|
||||||
|
paths.push((
|
||||||
|
leaf,
|
||||||
|
ValuePath {
|
||||||
|
value: self.get_node(leaf).expect("Failed to get leaf node"),
|
||||||
|
path: self.get_path(leaf).expect("Failed to get path"),
|
||||||
|
},
|
||||||
|
));
|
||||||
|
});
|
||||||
|
paths
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a Merkle path from the node at the specified index to the root.
|
||||||
|
///
|
||||||
|
/// The node itself is not included in the path.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// - the specified index has depth set to 0 or the depth is greater than the depth of this
|
||||||
|
/// Merkle tree.
|
||||||
|
/// - the specified index is not contained in the nodes map.
|
||||||
|
pub fn get_path(&self, mut index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
||||||
|
if index.is_root() {
|
||||||
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
|
} else if index.depth() > self.max_depth() {
|
||||||
|
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.nodes.contains_key(&index) {
|
||||||
|
return Err(MerkleError::NodeNotInSet(index));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut path = Vec::new();
|
||||||
|
for _ in 0..index.depth() {
|
||||||
|
let sibling_index = index.sibling();
|
||||||
|
index.move_up();
|
||||||
|
let sibling =
|
||||||
|
self.nodes.get(&sibling_index).cloned().expect("Sibling node not in the map");
|
||||||
|
path.push(sibling);
|
||||||
|
}
|
||||||
|
Ok(MerklePath::new(path))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ITERATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns an iterator over the leaves of this [PartialMerkleTree].
|
||||||
|
pub fn leaves(&self) -> impl Iterator<Item = (NodeIndex, RpoDigest)> + '_ {
|
||||||
|
self.leaves.iter().map(|&leaf| {
|
||||||
|
(
|
||||||
|
leaf,
|
||||||
|
self.get_node(leaf)
|
||||||
|
.unwrap_or_else(|_| panic!("Leaf with {leaf} is not in the nodes map")),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATE MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Adds the nodes of the specified Merkle path to this [PartialMerkleTree]. The `index_value`
|
||||||
|
/// and `value` parameters specify the leaf node at which the path starts.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// - The depth of the specified node_index is greater than 64 or smaller than 1.
|
||||||
|
/// - The specified path is not consistent with other paths in the set (i.e., resolves to a
|
||||||
|
/// different root).
|
||||||
|
pub fn add_path(
|
||||||
|
&mut self,
|
||||||
|
index_value: u64,
|
||||||
|
value: RpoDigest,
|
||||||
|
path: MerklePath,
|
||||||
|
) -> Result<(), MerkleError> {
|
||||||
|
let index_value = NodeIndex::new(path.len() as u8, index_value)?;
|
||||||
|
|
||||||
|
Self::check_depth(index_value.depth())?;
|
||||||
|
self.update_depth(index_value.depth());
|
||||||
|
|
||||||
|
// add provided node and its sibling to the leaves set
|
||||||
|
self.leaves.insert(index_value);
|
||||||
|
let sibling_node_index = index_value.sibling();
|
||||||
|
self.leaves.insert(sibling_node_index);
|
||||||
|
|
||||||
|
// add provided node and its sibling to the nodes map
|
||||||
|
self.nodes.insert(index_value, value);
|
||||||
|
self.nodes.insert(sibling_node_index, path[0]);
|
||||||
|
|
||||||
|
// traverse to the root, updating the nodes
|
||||||
|
let mut index_value = index_value;
|
||||||
|
let node = Rpo256::merge(&index_value.build_node(value, path[0]));
|
||||||
|
let root = path.iter().skip(1).copied().fold(node, |node, hash| {
|
||||||
|
index_value.move_up();
|
||||||
|
// insert calculated node to the nodes map
|
||||||
|
self.nodes.insert(index_value, node);
|
||||||
|
|
||||||
|
// if the calculated node was a leaf, remove it from leaves set.
|
||||||
|
self.leaves.remove(&index_value);
|
||||||
|
|
||||||
|
let sibling_node = index_value.sibling();
|
||||||
|
|
||||||
|
// Insert node from Merkle path to the nodes map. This sibling node becomes a leaf only
|
||||||
|
// if it is a new node (it wasn't in nodes map).
|
||||||
|
// Node can be in 3 states: internal node, leaf of the tree and not a tree node at all.
|
||||||
|
// - Internal node can only stay in this state -- addition of a new path can't make it
|
||||||
|
// a leaf or remove it from the tree.
|
||||||
|
// - Leaf node can stay in the same state (remain a leaf) or can become an internal
|
||||||
|
// node. In the first case we don't need to do anything, and the second case is handled
|
||||||
|
// by the call of `self.leaves.remove(&index_value);`
|
||||||
|
// - New node can be a calculated node or a "sibling" node from a Merkle Path:
|
||||||
|
// --- Calculated node, obviously, never can be a leaf.
|
||||||
|
// --- Sibling node can be only a leaf, because otherwise it is not a new node.
|
||||||
|
if self.nodes.insert(sibling_node, hash).is_none() {
|
||||||
|
self.leaves.insert(sibling_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
Rpo256::merge(&index_value.build_node(node, hash))
|
||||||
|
});
|
||||||
|
|
||||||
|
// if the path set is empty (the root is all ZEROs), set the root to the root of the added
|
||||||
|
// path; otherwise, the root of the added path must be identical to the current root
|
||||||
|
if self.root() == EMPTY_DIGEST {
|
||||||
|
self.nodes.insert(ROOT_INDEX, root);
|
||||||
|
} else if self.root() != root {
|
||||||
|
return Err(MerkleError::ConflictingRoots([self.root(), root].to_vec()));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Updates value of the leaf at the specified index returning the old leaf value.
|
||||||
|
///
|
||||||
|
/// This also recomputes all hashes between the leaf and the root, updating the root itself.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// - The depth of the specified node_index is greater than 64 or smaller than 1.
|
||||||
|
/// - The specified node index is not corresponding to the leaf.
|
||||||
|
pub fn update_leaf(
|
||||||
|
&mut self,
|
||||||
|
node_index: NodeIndex,
|
||||||
|
value: RpoDigest,
|
||||||
|
) -> Result<RpoDigest, MerkleError> {
|
||||||
|
// check correctness of the depth and update it
|
||||||
|
Self::check_depth(node_index.depth())?;
|
||||||
|
self.update_depth(node_index.depth());
|
||||||
|
|
||||||
|
// insert NodeIndex to the leaves Set
|
||||||
|
self.leaves.insert(node_index);
|
||||||
|
|
||||||
|
// add node value to the nodes Map
|
||||||
|
let old_value = self
|
||||||
|
.nodes
|
||||||
|
.insert(node_index, value)
|
||||||
|
.ok_or(MerkleError::NodeNotInSet(node_index))?;
|
||||||
|
|
||||||
|
// if the old value and new value are the same, there is nothing to update
|
||||||
|
if value == old_value {
|
||||||
|
return Ok(old_value);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut node_index = node_index;
|
||||||
|
let mut value = value;
|
||||||
|
for _ in 0..node_index.depth() {
|
||||||
|
let sibling = self.nodes.get(&node_index.sibling()).expect("sibling should exist");
|
||||||
|
value = Rpo256::merge(&node_index.build_node(value, *sibling));
|
||||||
|
node_index.move_up();
|
||||||
|
self.nodes.insert(node_index, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(old_value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UTILITY FUNCTIONS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Utility to visualize a [PartialMerkleTree] in text.
|
||||||
|
pub fn print(&self) -> Result<String, fmt::Error> {
|
||||||
|
let indent = " ";
|
||||||
|
let mut s = String::new();
|
||||||
|
s.push_str("root: ");
|
||||||
|
s.push_str(&word_to_hex(&self.root())?);
|
||||||
|
s.push('\n');
|
||||||
|
for d in 1..=self.max_depth() {
|
||||||
|
let entries = 2u64.pow(d.into());
|
||||||
|
for i in 0..entries {
|
||||||
|
let index = NodeIndex::new(d, i).expect("The index must always be valid");
|
||||||
|
let node = self.get_node(index);
|
||||||
|
let node = match node {
|
||||||
|
Err(_) => continue,
|
||||||
|
Ok(node) => node,
|
||||||
|
};
|
||||||
|
|
||||||
|
for _ in 0..d {
|
||||||
|
s.push_str(indent);
|
||||||
|
}
|
||||||
|
s.push_str(&format!("({}, {}): ", index.depth(), index.value()));
|
||||||
|
s.push_str(&word_to_hex(&node)?);
|
||||||
|
s.push('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HELPER METHODS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Updates depth value with the maximum of current and provided depth.
|
||||||
|
fn update_depth(&mut self, new_depth: u8) {
|
||||||
|
self.max_depth = new_depth.max(self.max_depth);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an error if the depth is 0 or is greater than 64.
|
||||||
|
fn check_depth(depth: u8) -> Result<(), MerkleError> {
|
||||||
|
// validate the range of the depth.
|
||||||
|
if depth < Self::MIN_DEPTH {
|
||||||
|
return Err(MerkleError::DepthTooSmall(depth));
|
||||||
|
} else if Self::MAX_DEPTH < depth {
|
||||||
|
return Err(MerkleError::DepthTooBig(depth as u64));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
313
src/merkle/partial_mt/tests.rs
Normal file
313
src/merkle/partial_mt/tests.rs
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
use super::{
|
||||||
|
super::{
|
||||||
|
digests_to_words, int_to_node, DefaultMerkleStore as MerkleStore, MerkleTree, NodeIndex,
|
||||||
|
PartialMerkleTree,
|
||||||
|
},
|
||||||
|
RpoDigest, ValuePath, Vec,
|
||||||
|
};
|
||||||
|
|
||||||
|
// TEST DATA
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
const NODE10: NodeIndex = NodeIndex::new_unchecked(1, 0);
|
||||||
|
const NODE11: NodeIndex = NodeIndex::new_unchecked(1, 1);
|
||||||
|
|
||||||
|
const NODE20: NodeIndex = NodeIndex::new_unchecked(2, 0);
|
||||||
|
const NODE22: NodeIndex = NodeIndex::new_unchecked(2, 2);
|
||||||
|
const NODE23: NodeIndex = NodeIndex::new_unchecked(2, 3);
|
||||||
|
|
||||||
|
const NODE30: NodeIndex = NodeIndex::new_unchecked(3, 0);
|
||||||
|
const NODE31: NodeIndex = NodeIndex::new_unchecked(3, 1);
|
||||||
|
const NODE32: NodeIndex = NodeIndex::new_unchecked(3, 2);
|
||||||
|
const NODE33: NodeIndex = NodeIndex::new_unchecked(3, 3);
|
||||||
|
|
||||||
|
const VALUES8: [RpoDigest; 8] = [
|
||||||
|
int_to_node(30),
|
||||||
|
int_to_node(31),
|
||||||
|
int_to_node(32),
|
||||||
|
int_to_node(33),
|
||||||
|
int_to_node(34),
|
||||||
|
int_to_node(35),
|
||||||
|
int_to_node(36),
|
||||||
|
int_to_node(37),
|
||||||
|
];
|
||||||
|
|
||||||
|
// TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
// For the Partial Merkle Tree tests we will use parts of the Merkle Tree which full form is
|
||||||
|
// illustrated below:
|
||||||
|
//
|
||||||
|
// __________ root __________
|
||||||
|
// / \
|
||||||
|
// ____ 10 ____ ____ 11 ____
|
||||||
|
// / \ / \
|
||||||
|
// 20 21 22 23
|
||||||
|
// / \ / \ / \ / \
|
||||||
|
// (30) (31) (32) (33) (34) (35) (36) (37)
|
||||||
|
//
|
||||||
|
// Where node number is a concatenation of its depth and index. For example, node with
|
||||||
|
// NodeIndex(3, 5) will be labeled as `35`. Leaves of the tree are shown as nodes with parenthesis
|
||||||
|
// (33).
|
||||||
|
|
||||||
|
/// Checks that root returned by `root()` function is equal to the expected one.
|
||||||
|
#[test]
|
||||||
|
fn get_root() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
let path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
|
||||||
|
let pmt = PartialMerkleTree::with_paths([(3, path33.value, path33.path)]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(pmt.root(), expected_root);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This test checks correctness of the `add_path()` and `get_path()` functions. First it creates a
|
||||||
|
/// PMT using `add_path()` by adding Merkle Paths from node 33 and node 22 to the empty PMT. Then
|
||||||
|
/// it checks that paths returned by `get_path()` function are equal to the expected ones.
|
||||||
|
#[test]
|
||||||
|
fn add_and_get_paths() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
|
||||||
|
let expected_path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
let expected_path22 = ms.get_path(expected_root, NODE22).unwrap();
|
||||||
|
|
||||||
|
let mut pmt = PartialMerkleTree::new();
|
||||||
|
pmt.add_path(3, expected_path33.value, expected_path33.path.clone()).unwrap();
|
||||||
|
pmt.add_path(2, expected_path22.value, expected_path22.path.clone()).unwrap();
|
||||||
|
|
||||||
|
let path33 = pmt.get_path(NODE33).unwrap();
|
||||||
|
let path22 = pmt.get_path(NODE22).unwrap();
|
||||||
|
let actual_root = pmt.root();
|
||||||
|
|
||||||
|
assert_eq!(expected_path33.path, path33);
|
||||||
|
assert_eq!(expected_path22.path, path22);
|
||||||
|
assert_eq!(expected_root, actual_root);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that function `get_node` used on nodes 10 and 32 returns expected values.
|
||||||
|
#[test]
|
||||||
|
fn get_node() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
|
||||||
|
let path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
|
||||||
|
let pmt = PartialMerkleTree::with_paths([(3, path33.value, path33.path)]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(ms.get_node(expected_root, NODE32).unwrap(), pmt.get_node(NODE32).unwrap());
|
||||||
|
assert_eq!(ms.get_node(expected_root, NODE10).unwrap(), pmt.get_node(NODE10).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Updates leaves of the PMT using `update_leaf()` function and checks that new root of the tree
|
||||||
|
/// is equal to the expected one.
|
||||||
|
#[test]
|
||||||
|
fn update_leaf() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let root = mt.root();
|
||||||
|
|
||||||
|
let mut ms = MerkleStore::from(&mt);
|
||||||
|
let path33 = ms.get_path(root, NODE33).unwrap();
|
||||||
|
|
||||||
|
let mut pmt = PartialMerkleTree::with_paths([(3, path33.value, path33.path)]).unwrap();
|
||||||
|
|
||||||
|
let new_value32 = int_to_node(132);
|
||||||
|
let expected_root = ms.set_node(root, NODE32, new_value32).unwrap().root;
|
||||||
|
|
||||||
|
pmt.update_leaf(NODE32, new_value32).unwrap();
|
||||||
|
let actual_root = pmt.root();
|
||||||
|
|
||||||
|
assert_eq!(expected_root, actual_root);
|
||||||
|
|
||||||
|
let new_value20 = int_to_node(120);
|
||||||
|
let expected_root = ms.set_node(expected_root, NODE20, new_value20).unwrap().root;
|
||||||
|
|
||||||
|
pmt.update_leaf(NODE20, new_value20).unwrap();
|
||||||
|
let actual_root = pmt.root();
|
||||||
|
|
||||||
|
assert_eq!(expected_root, actual_root);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that paths of the PMT returned by `paths()` function are equal to the expected ones.
|
||||||
|
#[test]
|
||||||
|
fn get_paths() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
|
||||||
|
let path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
let path22 = ms.get_path(expected_root, NODE22).unwrap();
|
||||||
|
|
||||||
|
let mut pmt = PartialMerkleTree::new();
|
||||||
|
pmt.add_path(3, path33.value, path33.path).unwrap();
|
||||||
|
pmt.add_path(2, path22.value, path22.path).unwrap();
|
||||||
|
// After PMT creation with path33 (33; 32, 20, 11) and path22 (22; 23, 10) we will have this
|
||||||
|
// tree:
|
||||||
|
//
|
||||||
|
// ______root______
|
||||||
|
// / \
|
||||||
|
// ___10___ ___11___
|
||||||
|
// / \ / \
|
||||||
|
// (20) 21 (22) (23)
|
||||||
|
// / \
|
||||||
|
// (32) (33)
|
||||||
|
//
|
||||||
|
// Which have leaf nodes 20, 22, 23, 32 and 33. Hence overall we will have 5 paths -- one path
|
||||||
|
// for each leaf.
|
||||||
|
|
||||||
|
let leaves = vec![NODE20, NODE22, NODE23, NODE32, NODE33];
|
||||||
|
let expected_paths: Vec<(NodeIndex, ValuePath)> = leaves
|
||||||
|
.iter()
|
||||||
|
.map(|&leaf| {
|
||||||
|
(
|
||||||
|
leaf,
|
||||||
|
ValuePath {
|
||||||
|
value: mt.get_node(leaf).unwrap(),
|
||||||
|
path: mt.get_path(leaf).unwrap(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let actual_paths = pmt.paths();
|
||||||
|
|
||||||
|
assert_eq!(expected_paths, actual_paths);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks correctness of leaves determination when using the `leaves()` function.
|
||||||
|
#[test]
|
||||||
|
fn leaves() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
|
||||||
|
let path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
let path22 = ms.get_path(expected_root, NODE22).unwrap();
|
||||||
|
|
||||||
|
let mut pmt = PartialMerkleTree::with_paths([(3, path33.value, path33.path)]).unwrap();
|
||||||
|
// After PMT creation with path33 (33; 32, 20, 11) we will have this tree:
|
||||||
|
//
|
||||||
|
// ______root______
|
||||||
|
// / \
|
||||||
|
// ___10___ (11)
|
||||||
|
// / \
|
||||||
|
// (20) 21
|
||||||
|
// / \
|
||||||
|
// (32) (33)
|
||||||
|
//
|
||||||
|
// Which have leaf nodes 11, 20, 32 and 33.
|
||||||
|
|
||||||
|
let value11 = mt.get_node(NODE11).unwrap();
|
||||||
|
let value20 = mt.get_node(NODE20).unwrap();
|
||||||
|
let value32 = mt.get_node(NODE32).unwrap();
|
||||||
|
let value33 = mt.get_node(NODE33).unwrap();
|
||||||
|
|
||||||
|
let leaves = vec![(NODE11, value11), (NODE20, value20), (NODE32, value32), (NODE33, value33)];
|
||||||
|
|
||||||
|
let expected_leaves = leaves.iter().copied();
|
||||||
|
assert!(expected_leaves.eq(pmt.leaves()));
|
||||||
|
|
||||||
|
pmt.add_path(2, path22.value, path22.path).unwrap();
|
||||||
|
// After adding the path22 (22; 23, 10) to the existing PMT we will have this tree:
|
||||||
|
//
|
||||||
|
// ______root______
|
||||||
|
// / \
|
||||||
|
// ___10___ ___11___
|
||||||
|
// / \ / \
|
||||||
|
// (20) 21 (22) (23)
|
||||||
|
// / \
|
||||||
|
// (32) (33)
|
||||||
|
//
|
||||||
|
// Which have leaf nodes 20, 22, 23, 32 and 33.
|
||||||
|
|
||||||
|
let value20 = mt.get_node(NODE20).unwrap();
|
||||||
|
let value22 = mt.get_node(NODE22).unwrap();
|
||||||
|
let value23 = mt.get_node(NODE23).unwrap();
|
||||||
|
let value32 = mt.get_node(NODE32).unwrap();
|
||||||
|
let value33 = mt.get_node(NODE33).unwrap();
|
||||||
|
|
||||||
|
let leaves = vec![
|
||||||
|
(NODE20, value20),
|
||||||
|
(NODE22, value22),
|
||||||
|
(NODE23, value23),
|
||||||
|
(NODE32, value32),
|
||||||
|
(NODE33, value33),
|
||||||
|
];
|
||||||
|
|
||||||
|
let expected_leaves = leaves.iter().copied();
|
||||||
|
assert!(expected_leaves.eq(pmt.leaves()));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that addition of the path with different root will cause an error.
|
||||||
|
#[test]
|
||||||
|
fn err_add_path() {
|
||||||
|
let path33 = vec![int_to_node(1), int_to_node(2), int_to_node(3)].into();
|
||||||
|
let path22 = vec![int_to_node(4), int_to_node(5)].into();
|
||||||
|
|
||||||
|
let mut pmt = PartialMerkleTree::new();
|
||||||
|
pmt.add_path(3, int_to_node(6), path33).unwrap();
|
||||||
|
|
||||||
|
assert!(pmt.add_path(2, int_to_node(7), path22).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that the request of the node which is not in the PMT will cause an error.
|
||||||
|
#[test]
|
||||||
|
fn err_get_node() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
|
||||||
|
let path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
|
||||||
|
let pmt = PartialMerkleTree::with_paths([(3, path33.value, path33.path)]).unwrap();
|
||||||
|
|
||||||
|
assert!(pmt.get_node(NODE22).is_err());
|
||||||
|
assert!(pmt.get_node(NODE23).is_err());
|
||||||
|
assert!(pmt.get_node(NODE30).is_err());
|
||||||
|
assert!(pmt.get_node(NODE31).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks that the request of the path from the leaf which is not in the PMT will cause an error.
|
||||||
|
#[test]
|
||||||
|
fn err_get_path() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
|
||||||
|
let path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
|
||||||
|
let pmt = PartialMerkleTree::with_paths([(3, path33.value, path33.path)]).unwrap();
|
||||||
|
|
||||||
|
assert!(pmt.get_path(NODE22).is_err());
|
||||||
|
assert!(pmt.get_path(NODE23).is_err());
|
||||||
|
assert!(pmt.get_path(NODE30).is_err());
|
||||||
|
assert!(pmt.get_path(NODE31).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn err_update_leaf() {
|
||||||
|
let mt = MerkleTree::new(digests_to_words(&VALUES8)).unwrap();
|
||||||
|
let expected_root = mt.root();
|
||||||
|
|
||||||
|
let ms = MerkleStore::from(&mt);
|
||||||
|
|
||||||
|
let path33 = ms.get_path(expected_root, NODE33).unwrap();
|
||||||
|
|
||||||
|
let mut pmt = PartialMerkleTree::with_paths([(3, path33.value, path33.path)]).unwrap();
|
||||||
|
|
||||||
|
assert!(pmt.update_leaf(NODE22, int_to_node(22)).is_err());
|
||||||
|
assert!(pmt.update_leaf(NODE23, int_to_node(23)).is_err());
|
||||||
|
assert!(pmt.update_leaf(NODE30, int_to_node(30)).is_err());
|
||||||
|
assert!(pmt.update_leaf(NODE31, int_to_node(31)).is_err());
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
use super::{vec, NodeIndex, Rpo256, Vec, Word};
|
use super::{vec, InnerNodeInfo, MerkleError, NodeIndex, Rpo256, RpoDigest, Vec};
|
||||||
use core::ops::{Deref, DerefMut};
|
use core::ops::{Deref, DerefMut};
|
||||||
|
|
||||||
// MERKLE PATH
|
// MERKLE PATH
|
||||||
@@ -7,7 +7,7 @@ use core::ops::{Deref, DerefMut};
|
|||||||
/// A merkle path container, composed of a sequence of nodes of a Merkle tree.
|
/// A merkle path container, composed of a sequence of nodes of a Merkle tree.
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
pub struct MerklePath {
|
pub struct MerklePath {
|
||||||
nodes: Vec<Word>,
|
nodes: Vec<RpoDigest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MerklePath {
|
impl MerklePath {
|
||||||
@@ -15,39 +15,67 @@ impl MerklePath {
|
|||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Creates a new Merkle path from a list of nodes.
|
/// Creates a new Merkle path from a list of nodes.
|
||||||
pub fn new(nodes: Vec<Word>) -> Self {
|
pub fn new(nodes: Vec<RpoDigest>) -> Self {
|
||||||
Self { nodes }
|
Self { nodes }
|
||||||
}
|
}
|
||||||
|
|
||||||
// PROVIDERS
|
// PROVIDERS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Computes the merkle root for this opening.
|
|
||||||
pub fn compute_root(&self, index_value: u64, node: Word) -> Word {
|
|
||||||
let mut index = NodeIndex::new(self.depth(), index_value);
|
|
||||||
self.nodes.iter().copied().fold(node, |node, sibling| {
|
|
||||||
// compute the node and move to the next iteration.
|
|
||||||
let input = index.build_node(node.into(), sibling.into());
|
|
||||||
index.move_up();
|
|
||||||
Rpo256::merge(&input).into()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the depth in which this Merkle path proof is valid.
|
/// Returns the depth in which this Merkle path proof is valid.
|
||||||
pub fn depth(&self) -> u8 {
|
pub fn depth(&self) -> u8 {
|
||||||
self.nodes.len() as u8
|
self.nodes.len() as u8
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Computes the merkle root for this opening.
|
||||||
|
pub fn compute_root(&self, index: u64, node: RpoDigest) -> Result<RpoDigest, MerkleError> {
|
||||||
|
let mut index = NodeIndex::new(self.depth(), index)?;
|
||||||
|
let root = self.nodes.iter().copied().fold(node, |node, sibling| {
|
||||||
|
// compute the node and move to the next iteration.
|
||||||
|
let input = index.build_node(node, sibling);
|
||||||
|
index.move_up();
|
||||||
|
Rpo256::merge(&input)
|
||||||
|
});
|
||||||
|
Ok(root)
|
||||||
|
}
|
||||||
|
|
||||||
/// Verifies the Merkle opening proof towards the provided root.
|
/// Verifies the Merkle opening proof towards the provided root.
|
||||||
///
|
///
|
||||||
/// Returns `true` if `node` exists at `index` in a Merkle tree with `root`.
|
/// Returns `true` if `node` exists at `index` in a Merkle tree with `root`.
|
||||||
pub fn verify(&self, index: u64, node: Word, root: &Word) -> bool {
|
pub fn verify(&self, index: u64, node: RpoDigest, root: &RpoDigest) -> bool {
|
||||||
root == &self.compute_root(index, node)
|
match self.compute_root(index, node) {
|
||||||
|
Ok(computed_root) => root == &computed_root,
|
||||||
|
Err(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over every inner node of this [MerklePath].
|
||||||
|
///
|
||||||
|
/// The iteration order is unspecified.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if the specified index is not valid for this path.
|
||||||
|
pub fn inner_nodes(
|
||||||
|
&self,
|
||||||
|
index: u64,
|
||||||
|
node: RpoDigest,
|
||||||
|
) -> Result<InnerNodeIterator, MerkleError> {
|
||||||
|
Ok(InnerNodeIterator {
|
||||||
|
nodes: &self.nodes,
|
||||||
|
index: NodeIndex::new(self.depth(), index)?,
|
||||||
|
value: node,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<Word>> for MerklePath {
|
impl From<MerklePath> for Vec<RpoDigest> {
|
||||||
fn from(path: Vec<Word>) -> Self {
|
fn from(path: MerklePath) -> Self {
|
||||||
|
path.nodes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<RpoDigest>> for MerklePath {
|
||||||
|
fn from(path: Vec<RpoDigest>) -> Self {
|
||||||
Self::new(path)
|
Self::new(path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -55,7 +83,7 @@ impl From<Vec<Word>> for MerklePath {
|
|||||||
impl Deref for MerklePath {
|
impl Deref for MerklePath {
|
||||||
// we use `Vec` here instead of slice so we can call vector mutation methods directly from the
|
// we use `Vec` here instead of slice so we can call vector mutation methods directly from the
|
||||||
// merkle path (example: `Vec::remove`).
|
// merkle path (example: `Vec::remove`).
|
||||||
type Target = Vec<Word>;
|
type Target = Vec<RpoDigest>;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.nodes
|
&self.nodes
|
||||||
@@ -68,21 +96,57 @@ impl DerefMut for MerklePath {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromIterator<Word> for MerklePath {
|
// ITERATORS
|
||||||
fn from_iter<T: IntoIterator<Item = Word>>(iter: T) -> Self {
|
// ================================================================================================
|
||||||
|
|
||||||
|
impl FromIterator<RpoDigest> for MerklePath {
|
||||||
|
fn from_iter<T: IntoIterator<Item = RpoDigest>>(iter: T) -> Self {
|
||||||
Self::new(iter.into_iter().collect())
|
Self::new(iter.into_iter().collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoIterator for MerklePath {
|
impl IntoIterator for MerklePath {
|
||||||
type Item = Word;
|
type Item = RpoDigest;
|
||||||
type IntoIter = vec::IntoIter<Word>;
|
type IntoIter = vec::IntoIter<RpoDigest>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
self.nodes.into_iter()
|
self.nodes.into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An iterator over internal nodes of a [MerklePath].
|
||||||
|
pub struct InnerNodeIterator<'a> {
|
||||||
|
nodes: &'a Vec<RpoDigest>,
|
||||||
|
index: NodeIndex,
|
||||||
|
value: RpoDigest,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for InnerNodeIterator<'a> {
|
||||||
|
type Item = InnerNodeInfo;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if !self.index.is_root() {
|
||||||
|
let sibling_pos = self.nodes.len() - self.index.depth() as usize;
|
||||||
|
let (left, right) = if self.index.is_value_odd() {
|
||||||
|
(self.nodes[sibling_pos], self.value)
|
||||||
|
} else {
|
||||||
|
(self.value, self.nodes[sibling_pos])
|
||||||
|
};
|
||||||
|
|
||||||
|
self.value = Rpo256::merge(&[left, right]);
|
||||||
|
self.index.move_up();
|
||||||
|
|
||||||
|
Some(InnerNodeInfo {
|
||||||
|
value: self.value,
|
||||||
|
left,
|
||||||
|
right,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// MERKLE PATH CONTAINERS
|
// MERKLE PATH CONTAINERS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
@@ -90,7 +154,7 @@ impl IntoIterator for MerklePath {
|
|||||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
pub struct ValuePath {
|
pub struct ValuePath {
|
||||||
/// The node value opening for `path`.
|
/// The node value opening for `path`.
|
||||||
pub value: Word,
|
pub value: RpoDigest,
|
||||||
/// The path from `value` to `root` (exclusive).
|
/// The path from `value` to `root` (exclusive).
|
||||||
pub path: MerklePath,
|
pub path: MerklePath,
|
||||||
}
|
}
|
||||||
@@ -102,7 +166,29 @@ pub struct ValuePath {
|
|||||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
pub struct RootPath {
|
pub struct RootPath {
|
||||||
/// The node value opening for `path`.
|
/// The node value opening for `path`.
|
||||||
pub root: Word,
|
pub root: RpoDigest,
|
||||||
/// The path from `value` to `root` (exclusive).
|
/// The path from `value` to `root` (exclusive).
|
||||||
pub path: MerklePath,
|
pub path: MerklePath,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::merkle::{int_to_node, MerklePath};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_inner_nodes() {
|
||||||
|
let nodes = vec![int_to_node(1), int_to_node(2), int_to_node(3), int_to_node(4)];
|
||||||
|
let merkle_path = MerklePath::new(nodes);
|
||||||
|
|
||||||
|
let index = 6;
|
||||||
|
let node = int_to_node(5);
|
||||||
|
let root = merkle_path.compute_root(index, node).unwrap();
|
||||||
|
|
||||||
|
let inner_root = merkle_path.inner_nodes(index, node).unwrap().last().unwrap().value;
|
||||||
|
|
||||||
|
assert_eq!(root, inner_root);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use super::{BTreeMap, MerkleError, MerklePath, NodeIndex, Rpo256, ValuePath, Vec, Word, ZERO};
|
use super::{BTreeMap, MerkleError, MerklePath, NodeIndex, Rpo256, ValuePath, Vec};
|
||||||
|
use crate::{hash::rpo::RpoDigest, Word};
|
||||||
|
|
||||||
// MERKLE PATH SET
|
// MERKLE PATH SET
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
@@ -6,7 +7,7 @@ use super::{BTreeMap, MerkleError, MerklePath, NodeIndex, Rpo256, ValuePath, Vec
|
|||||||
/// A set of Merkle paths.
|
/// A set of Merkle paths.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct MerklePathSet {
|
pub struct MerklePathSet {
|
||||||
root: Word,
|
root: RpoDigest,
|
||||||
total_depth: u8,
|
total_depth: u8,
|
||||||
paths: BTreeMap<u64, MerklePath>,
|
paths: BTreeMap<u64, MerklePath>,
|
||||||
}
|
}
|
||||||
@@ -17,7 +18,7 @@ impl MerklePathSet {
|
|||||||
|
|
||||||
/// Returns an empty MerklePathSet.
|
/// Returns an empty MerklePathSet.
|
||||||
pub fn new(depth: u8) -> Self {
|
pub fn new(depth: u8) -> Self {
|
||||||
let root = [ZERO; 4];
|
let root = RpoDigest::default();
|
||||||
let paths = BTreeMap::new();
|
let paths = BTreeMap::new();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
@@ -32,12 +33,10 @@ impl MerklePathSet {
|
|||||||
/// Analogous to `[Self::add_path]`.
|
/// Analogous to `[Self::add_path]`.
|
||||||
pub fn with_paths<I>(self, paths: I) -> Result<Self, MerkleError>
|
pub fn with_paths<I>(self, paths: I) -> Result<Self, MerkleError>
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = (u64, Word, MerklePath)>,
|
I: IntoIterator<Item = (u64, RpoDigest, MerklePath)>,
|
||||||
{
|
{
|
||||||
paths
|
paths.into_iter().try_fold(self, |mut set, (index, value, path)| {
|
||||||
.into_iter()
|
set.add_path(index, value.into(), path)?;
|
||||||
.try_fold(self, |mut set, (index, value, path)| {
|
|
||||||
set.add_path(index, value, path)?;
|
|
||||||
Ok(set)
|
Ok(set)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -46,7 +45,7 @@ impl MerklePathSet {
|
|||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Returns the root to which all paths in this set resolve.
|
/// Returns the root to which all paths in this set resolve.
|
||||||
pub const fn root(&self) -> Word {
|
pub const fn root(&self) -> RpoDigest {
|
||||||
self.root
|
self.root
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -63,12 +62,7 @@ impl MerklePathSet {
|
|||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * The specified index is not valid for the depth of structure.
|
/// * The specified index is not valid for the depth of structure.
|
||||||
/// * Requested node does not exist in the set.
|
/// * Requested node does not exist in the set.
|
||||||
pub fn get_node(&self, index: NodeIndex) -> Result<Word, MerkleError> {
|
pub fn get_node(&self, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||||
if !index.with_depth(self.total_depth).is_valid() {
|
|
||||||
return Err(MerkleError::InvalidIndex(
|
|
||||||
index.with_depth(self.total_depth),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if index.depth() != self.total_depth {
|
if index.depth() != self.total_depth {
|
||||||
return Err(MerkleError::InvalidDepth {
|
return Err(MerkleError::InvalidDepth {
|
||||||
expected: self.total_depth,
|
expected: self.total_depth,
|
||||||
@@ -80,7 +74,7 @@ impl MerklePathSet {
|
|||||||
let path_key = index.value() - parity;
|
let path_key = index.value() - parity;
|
||||||
self.paths
|
self.paths
|
||||||
.get(&path_key)
|
.get(&path_key)
|
||||||
.ok_or(MerkleError::NodeNotInSet(path_key))
|
.ok_or(MerkleError::NodeNotInSet(index))
|
||||||
.map(|path| path[parity as usize])
|
.map(|path| path[parity as usize])
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -90,7 +84,8 @@ impl MerklePathSet {
|
|||||||
/// * The specified index is not valid for the depth of the structure.
|
/// * The specified index is not valid for the depth of the structure.
|
||||||
/// * Leaf with the requested path does not exist in the set.
|
/// * Leaf with the requested path does not exist in the set.
|
||||||
pub fn get_leaf(&self, index: u64) -> Result<Word, MerkleError> {
|
pub fn get_leaf(&self, index: u64) -> Result<Word, MerkleError> {
|
||||||
self.get_node(NodeIndex::new(self.depth(), index))
|
let index = NodeIndex::new(self.depth(), index)?;
|
||||||
|
Ok(self.get_node(index)?.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a Merkle path to the node at the specified index. The node itself is
|
/// Returns a Merkle path to the node at the specified index. The node itself is
|
||||||
@@ -101,9 +96,6 @@ impl MerklePathSet {
|
|||||||
/// * The specified index is not valid for the depth of structure.
|
/// * The specified index is not valid for the depth of structure.
|
||||||
/// * Node of the requested path does not exist in the set.
|
/// * Node of the requested path does not exist in the set.
|
||||||
pub fn get_path(&self, index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
pub fn get_path(&self, index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
||||||
if !index.with_depth(self.total_depth).is_valid() {
|
|
||||||
return Err(MerkleError::InvalidIndex(index));
|
|
||||||
}
|
|
||||||
if index.depth() != self.total_depth {
|
if index.depth() != self.total_depth {
|
||||||
return Err(MerkleError::InvalidDepth {
|
return Err(MerkleError::InvalidDepth {
|
||||||
expected: self.total_depth,
|
expected: self.total_depth,
|
||||||
@@ -113,11 +105,8 @@ impl MerklePathSet {
|
|||||||
|
|
||||||
let parity = index.value() & 1;
|
let parity = index.value() & 1;
|
||||||
let path_key = index.value() - parity;
|
let path_key = index.value() - parity;
|
||||||
let mut path = self
|
let mut path =
|
||||||
.paths
|
self.paths.get(&path_key).cloned().ok_or(MerkleError::NodeNotInSet(index))?;
|
||||||
.get(&path_key)
|
|
||||||
.cloned()
|
|
||||||
.ok_or(MerkleError::NodeNotInSet(index.value()))?;
|
|
||||||
path.remove(parity as usize);
|
path.remove(parity as usize);
|
||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
@@ -165,8 +154,7 @@ impl MerklePathSet {
|
|||||||
value: Word,
|
value: Word,
|
||||||
mut path: MerklePath,
|
mut path: MerklePath,
|
||||||
) -> Result<(), MerkleError> {
|
) -> Result<(), MerkleError> {
|
||||||
let depth = path.len() as u8;
|
let mut index = NodeIndex::new(path.len() as u8, index_value)?;
|
||||||
let mut index = NodeIndex::new(depth, index_value);
|
|
||||||
if index.depth() != self.total_depth {
|
if index.depth() != self.total_depth {
|
||||||
return Err(MerkleError::InvalidDepth {
|
return Err(MerkleError::InvalidDepth {
|
||||||
expected: self.total_depth,
|
expected: self.total_depth,
|
||||||
@@ -176,21 +164,21 @@ impl MerklePathSet {
|
|||||||
|
|
||||||
// update the current path
|
// update the current path
|
||||||
let parity = index_value & 1;
|
let parity = index_value & 1;
|
||||||
path.insert(parity as usize, value);
|
path.insert(parity as usize, value.into());
|
||||||
|
|
||||||
// traverse to the root, updating the nodes
|
// traverse to the root, updating the nodes
|
||||||
let root: Word = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
let root = Rpo256::merge(&[path[0], path[1]]);
|
||||||
let root = path.iter().skip(2).copied().fold(root, |root, hash| {
|
let root = path.iter().skip(2).copied().fold(root, |root, hash| {
|
||||||
index.move_up();
|
index.move_up();
|
||||||
Rpo256::merge(&index.build_node(root.into(), hash.into())).into()
|
Rpo256::merge(&index.build_node(root, hash))
|
||||||
});
|
});
|
||||||
|
|
||||||
// if the path set is empty (the root is all ZEROs), set the root to the root of the added
|
// if the path set is empty (the root is all ZEROs), set the root to the root of the added
|
||||||
// path; otherwise, the root of the added path must be identical to the current root
|
// path; otherwise, the root of the added path must be identical to the current root
|
||||||
if self.root == [ZERO; 4] {
|
if self.root == RpoDigest::default() {
|
||||||
self.root = root;
|
self.root = root;
|
||||||
} else if self.root != root {
|
} else if self.root != root {
|
||||||
return Err(MerkleError::InvalidPath(path));
|
return Err(MerkleError::ConflictingRoots([self.root, root].to_vec()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// finish updating the path
|
// finish updating the path
|
||||||
@@ -205,40 +193,35 @@ impl MerklePathSet {
|
|||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * Requested node does not exist in the set.
|
/// * Requested node does not exist in the set.
|
||||||
pub fn update_leaf(&mut self, base_index_value: u64, value: Word) -> Result<(), MerkleError> {
|
pub fn update_leaf(&mut self, base_index_value: u64, value: Word) -> Result<(), MerkleError> {
|
||||||
let depth = self.depth();
|
let mut index = NodeIndex::new(self.depth(), base_index_value)?;
|
||||||
let mut index = NodeIndex::new(depth, base_index_value);
|
|
||||||
if !index.is_valid() {
|
|
||||||
return Err(MerkleError::InvalidIndex(index));
|
|
||||||
}
|
|
||||||
|
|
||||||
let parity = index.value() & 1;
|
let parity = index.value() & 1;
|
||||||
let path_key = index.value() - parity;
|
let path_key = index.value() - parity;
|
||||||
let path = match self.paths.get_mut(&path_key) {
|
let path = match self.paths.get_mut(&path_key) {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => return Err(MerkleError::NodeNotInSet(base_index_value)),
|
None => return Err(MerkleError::NodeNotInSet(index)),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Fill old_hashes vector -----------------------------------------------------------------
|
// Fill old_hashes vector -----------------------------------------------------------------
|
||||||
let mut current_index = index;
|
let mut current_index = index;
|
||||||
let mut old_hashes = Vec::with_capacity(path.len().saturating_sub(2));
|
let mut old_hashes = Vec::with_capacity(path.len().saturating_sub(2));
|
||||||
let mut root: Word = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
let mut root = Rpo256::merge(&[path[0], path[1]]);
|
||||||
for hash in path.iter().skip(2).copied() {
|
for hash in path.iter().skip(2).copied() {
|
||||||
old_hashes.push(root);
|
old_hashes.push(root);
|
||||||
current_index.move_up();
|
current_index.move_up();
|
||||||
let input = current_index.build_node(hash.into(), root.into());
|
let input = current_index.build_node(hash, root);
|
||||||
root = Rpo256::merge(&input).into();
|
root = Rpo256::merge(&input);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fill new_hashes vector -----------------------------------------------------------------
|
// Fill new_hashes vector -----------------------------------------------------------------
|
||||||
path[index.is_value_odd() as usize] = value;
|
path[index.is_value_odd() as usize] = value.into();
|
||||||
|
|
||||||
let mut new_hashes = Vec::with_capacity(path.len().saturating_sub(2));
|
let mut new_hashes = Vec::with_capacity(path.len().saturating_sub(2));
|
||||||
let mut new_root: Word = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
let mut new_root = Rpo256::merge(&[path[0], path[1]]);
|
||||||
for path_hash in path.iter().skip(2).copied() {
|
for path_hash in path.iter().skip(2).copied() {
|
||||||
new_hashes.push(new_root);
|
new_hashes.push(new_root);
|
||||||
index.move_up();
|
index.move_up();
|
||||||
let input = current_index.build_node(path_hash.into(), new_root.into());
|
let input = current_index.build_node(path_hash, new_root);
|
||||||
new_root = Rpo256::merge(&input).into();
|
new_root = Rpo256::merge(&input);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.root = new_root;
|
self.root = new_root;
|
||||||
@@ -263,7 +246,7 @@ impl MerklePathSet {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::merkle::int_to_node;
|
use crate::merkle::{int_to_leaf, int_to_node};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_root() {
|
fn get_root() {
|
||||||
@@ -293,10 +276,9 @@ mod tests {
|
|||||||
let set = super::MerklePathSet::new(depth)
|
let set = super::MerklePathSet::new(depth)
|
||||||
.with_paths([(index, hash_6, path_6.clone().into())])
|
.with_paths([(index, hash_6, path_6.clone().into())])
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let stored_path_6 = set.get_path(NodeIndex::new(depth, index)).unwrap();
|
let stored_path_6 = set.get_path(NodeIndex::make(depth, index)).unwrap();
|
||||||
|
|
||||||
assert_eq!(path_6, *stored_path_6);
|
assert_eq!(path_6, *stored_path_6);
|
||||||
assert!(set.get_path(NodeIndex::new(depth, 15_u64)).is_err())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -305,15 +287,9 @@ mod tests {
|
|||||||
let hash_6 = int_to_node(6);
|
let hash_6 = int_to_node(6);
|
||||||
let index = 6_u64;
|
let index = 6_u64;
|
||||||
let depth = 3_u8;
|
let depth = 3_u8;
|
||||||
let set = MerklePathSet::new(depth)
|
let set = MerklePathSet::new(depth).with_paths([(index, hash_6, path_6.into())]).unwrap();
|
||||||
.with_paths([(index, hash_6, path_6.into())])
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(int_to_node(6u64), set.get_node(NodeIndex::make(depth, index)).unwrap());
|
||||||
int_to_node(6u64),
|
|
||||||
set.get_node(NodeIndex::new(depth, index)).unwrap()
|
|
||||||
);
|
|
||||||
assert!(set.get_node(NodeIndex::new(depth, 15_u64)).is_err());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -343,20 +319,20 @@ mod tests {
|
|||||||
])
|
])
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let new_hash_6 = int_to_node(100);
|
let new_hash_6 = int_to_leaf(100);
|
||||||
let new_hash_5 = int_to_node(55);
|
let new_hash_5 = int_to_leaf(55);
|
||||||
|
|
||||||
set.update_leaf(index_6, new_hash_6).unwrap();
|
set.update_leaf(index_6, new_hash_6).unwrap();
|
||||||
let new_path_4 = set.get_path(NodeIndex::new(depth, index_4)).unwrap();
|
let new_path_4 = set.get_path(NodeIndex::make(depth, index_4)).unwrap();
|
||||||
let new_hash_67 = calculate_parent_hash(new_hash_6, 14_u64, hash_7);
|
let new_hash_67 = calculate_parent_hash(new_hash_6.into(), 14_u64, hash_7);
|
||||||
assert_eq!(new_hash_67, new_path_4[1]);
|
assert_eq!(new_hash_67, new_path_4[1]);
|
||||||
|
|
||||||
set.update_leaf(index_5, new_hash_5).unwrap();
|
set.update_leaf(index_5, new_hash_5).unwrap();
|
||||||
let new_path_4 = set.get_path(NodeIndex::new(depth, index_4)).unwrap();
|
let new_path_4 = set.get_path(NodeIndex::make(depth, index_4)).unwrap();
|
||||||
let new_path_6 = set.get_path(NodeIndex::new(depth, index_6)).unwrap();
|
let new_path_6 = set.get_path(NodeIndex::make(depth, index_6)).unwrap();
|
||||||
let new_hash_45 = calculate_parent_hash(new_hash_5, 13_u64, hash_4);
|
let new_hash_45 = calculate_parent_hash(new_hash_5.into(), 13_u64, hash_4);
|
||||||
assert_eq!(new_hash_45, new_path_6[1]);
|
assert_eq!(new_hash_45, new_path_6[1]);
|
||||||
assert_eq!(new_hash_5, new_path_4[0]);
|
assert_eq!(RpoDigest::from(new_hash_5), new_path_4[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -370,45 +346,45 @@ mod tests {
|
|||||||
let g = int_to_node(7);
|
let g = int_to_node(7);
|
||||||
let h = int_to_node(8);
|
let h = int_to_node(8);
|
||||||
|
|
||||||
let i = Rpo256::merge(&[a.into(), b.into()]);
|
let i = Rpo256::merge(&[a, b]);
|
||||||
let j = Rpo256::merge(&[c.into(), d.into()]);
|
let j = Rpo256::merge(&[c, d]);
|
||||||
let k = Rpo256::merge(&[e.into(), f.into()]);
|
let k = Rpo256::merge(&[e, f]);
|
||||||
let l = Rpo256::merge(&[g.into(), h.into()]);
|
let l = Rpo256::merge(&[g, h]);
|
||||||
|
|
||||||
let m = Rpo256::merge(&[i.into(), j.into()]);
|
let m = Rpo256::merge(&[i, j]);
|
||||||
let n = Rpo256::merge(&[k.into(), l.into()]);
|
let n = Rpo256::merge(&[k, l]);
|
||||||
|
|
||||||
let root = Rpo256::merge(&[m.into(), n.into()]);
|
let root = Rpo256::merge(&[m, n]);
|
||||||
|
|
||||||
let mut set = MerklePathSet::new(3);
|
let mut set = MerklePathSet::new(3);
|
||||||
|
|
||||||
let value = b;
|
let value = b;
|
||||||
let index = 1;
|
let index = 1;
|
||||||
let path = MerklePath::new([a.into(), j.into(), n.into()].to_vec());
|
let path = MerklePath::new([a, j, n].to_vec());
|
||||||
set.add_path(index, value, path.clone()).unwrap();
|
set.add_path(index, value.into(), path).unwrap();
|
||||||
assert_eq!(value, set.get_leaf(index).unwrap());
|
assert_eq!(*value, set.get_leaf(index).unwrap());
|
||||||
assert_eq!(Word::from(root), set.root());
|
assert_eq!(root, set.root());
|
||||||
|
|
||||||
let value = e;
|
let value = e;
|
||||||
let index = 4;
|
let index = 4;
|
||||||
let path = MerklePath::new([f.into(), l.into(), m.into()].to_vec());
|
let path = MerklePath::new([f, l, m].to_vec());
|
||||||
set.add_path(index, value, path.clone()).unwrap();
|
set.add_path(index, value.into(), path).unwrap();
|
||||||
assert_eq!(value, set.get_leaf(index).unwrap());
|
assert_eq!(*value, set.get_leaf(index).unwrap());
|
||||||
assert_eq!(Word::from(root), set.root());
|
assert_eq!(root, set.root());
|
||||||
|
|
||||||
let value = a;
|
let value = a;
|
||||||
let index = 0;
|
let index = 0;
|
||||||
let path = MerklePath::new([b.into(), j.into(), n.into()].to_vec());
|
let path = MerklePath::new([b, j, n].to_vec());
|
||||||
set.add_path(index, value, path.clone()).unwrap();
|
set.add_path(index, value.into(), path).unwrap();
|
||||||
assert_eq!(value, set.get_leaf(index).unwrap());
|
assert_eq!(*value, set.get_leaf(index).unwrap());
|
||||||
assert_eq!(Word::from(root), set.root());
|
assert_eq!(root, set.root());
|
||||||
|
|
||||||
let value = h;
|
let value = h;
|
||||||
let index = 7;
|
let index = 7;
|
||||||
let path = MerklePath::new([g.into(), k.into(), m.into()].to_vec());
|
let path = MerklePath::new([g, k, m].to_vec());
|
||||||
set.add_path(index, value, path.clone()).unwrap();
|
set.add_path(index, value.into(), path).unwrap();
|
||||||
assert_eq!(value, set.get_leaf(index).unwrap());
|
assert_eq!(*value, set.get_leaf(index).unwrap());
|
||||||
assert_eq!(Word::from(root), set.root());
|
assert_eq!(root, set.root());
|
||||||
}
|
}
|
||||||
|
|
||||||
// HELPER FUNCTIONS
|
// HELPER FUNCTIONS
|
||||||
@@ -422,11 +398,11 @@ mod tests {
|
|||||||
/// - node — current node
|
/// - node — current node
|
||||||
/// - node_pos — position of the current node
|
/// - node_pos — position of the current node
|
||||||
/// - sibling — neighboring vertex in the tree
|
/// - sibling — neighboring vertex in the tree
|
||||||
fn calculate_parent_hash(node: Word, node_pos: u64, sibling: Word) -> Word {
|
fn calculate_parent_hash(node: RpoDigest, node_pos: u64, sibling: RpoDigest) -> RpoDigest {
|
||||||
if is_even(node_pos) {
|
if is_even(node_pos) {
|
||||||
Rpo256::merge(&[node.into(), sibling.into()]).into()
|
Rpo256::merge(&[node, sibling])
|
||||||
} else {
|
} else {
|
||||||
Rpo256::merge(&[sibling.into(), node.into()]).into()
|
Rpo256::merge(&[sibling, node])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use super::{
|
use super::{
|
||||||
BTreeMap, EmptySubtreeRoots, MerkleError, MerklePath, NodeIndex, Rpo256, RpoDigest, Vec, Word,
|
BTreeMap, BTreeSet, EmptySubtreeRoots, InnerNodeInfo, MerkleError, MerklePath, NodeIndex,
|
||||||
|
Rpo256, RpoDigest, Vec, Word,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -8,14 +9,16 @@ mod tests;
|
|||||||
// SPARSE MERKLE TREE
|
// SPARSE MERKLE TREE
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
/// A sparse Merkle tree with 63-bit keys and 4-element leaf values, without compaction.
|
/// A sparse Merkle tree with 64-bit keys and 4-element leaf values, without compaction.
|
||||||
/// Manipulation and retrieval of leaves and internal nodes is provided by its internal `Store`.
|
///
|
||||||
/// The root of the tree is recomputed on each new leaf update.
|
/// The root of the tree is recomputed on each new leaf update.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct SimpleSmt {
|
pub struct SimpleSmt {
|
||||||
root: Word,
|
|
||||||
depth: u8,
|
depth: u8,
|
||||||
pub(crate) store: Store,
|
root: RpoDigest,
|
||||||
|
leaves: BTreeMap<u64, Word>,
|
||||||
|
branches: BTreeMap<NodeIndex, BranchNode>,
|
||||||
|
empty_hashes: Vec<RpoDigest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleSmt {
|
impl SimpleSmt {
|
||||||
@@ -26,12 +29,20 @@ impl SimpleSmt {
|
|||||||
pub const MIN_DEPTH: u8 = 1;
|
pub const MIN_DEPTH: u8 = 1;
|
||||||
|
|
||||||
/// Maximum supported depth.
|
/// Maximum supported depth.
|
||||||
pub const MAX_DEPTH: u8 = 63;
|
pub const MAX_DEPTH: u8 = 64;
|
||||||
|
|
||||||
|
/// Value of an empty leaf.
|
||||||
|
pub const EMPTY_VALUE: Word = super::empty_roots::EMPTY_WORD;
|
||||||
|
|
||||||
// CONSTRUCTORS
|
// CONSTRUCTORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Creates a new simple SMT with the provided depth.
|
/// Returns a new [SimpleSmt] instantiated with the specified depth.
|
||||||
|
///
|
||||||
|
/// All leaves in the returned tree are set to [ZERO; 4].
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if the depth is 0 or is greater than 64.
|
||||||
pub fn new(depth: u8) -> Result<Self, MerkleError> {
|
pub fn new(depth: u8) -> Result<Self, MerkleError> {
|
||||||
// validate the range of the depth.
|
// validate the range of the depth.
|
||||||
if depth < Self::MIN_DEPTH {
|
if depth < Self::MIN_DEPTH {
|
||||||
@@ -40,48 +51,66 @@ impl SimpleSmt {
|
|||||||
return Err(MerkleError::DepthTooBig(depth as u64));
|
return Err(MerkleError::DepthTooBig(depth as u64));
|
||||||
}
|
}
|
||||||
|
|
||||||
let (store, root) = Store::new(depth);
|
let empty_hashes = EmptySubtreeRoots::empty_hashes(depth).to_vec();
|
||||||
Ok(Self { root, depth, store })
|
let root = empty_hashes[0];
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
root,
|
||||||
|
depth,
|
||||||
|
empty_hashes,
|
||||||
|
leaves: BTreeMap::new(),
|
||||||
|
branches: BTreeMap::new(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Appends the provided entries as leaves of the tree.
|
/// Returns a new [SimpleSmt] instantiated with the specified depth and with leaves
|
||||||
|
/// set as specified by the provided entries.
|
||||||
|
///
|
||||||
|
/// All leaves omitted from the entries list are set to [ZERO; 4].
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
/// Returns an error if:
|
||||||
/// The function will fail if the provided entries count exceed the maximum tree capacity, that
|
/// - If the depth is 0 or is greater than 64.
|
||||||
/// is `2^{depth}`.
|
/// - The number of entries exceeds the maximum tree capacity, that is 2^{depth}.
|
||||||
pub fn with_leaves<R, I>(mut self, entries: R) -> Result<Self, MerkleError>
|
/// - The provided entries contain multiple values for the same key.
|
||||||
|
pub fn with_leaves<R, I>(depth: u8, entries: R) -> Result<Self, MerkleError>
|
||||||
where
|
where
|
||||||
R: IntoIterator<IntoIter = I>,
|
R: IntoIterator<IntoIter = I>,
|
||||||
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
||||||
{
|
{
|
||||||
// check if the leaves count will fit the depth setup
|
// create an empty tree
|
||||||
let mut entries = entries.into_iter();
|
let mut tree = Self::new(depth)?;
|
||||||
let max = 1 << self.depth;
|
|
||||||
|
// check if the number of leaves can be accommodated by the tree's depth; we use a min
|
||||||
|
// depth of 63 because we consider passing in a vector of size 2^64 infeasible.
|
||||||
|
let entries = entries.into_iter();
|
||||||
|
let max = 1 << tree.depth.min(63);
|
||||||
if entries.len() > max {
|
if entries.len() > max {
|
||||||
return Err(MerkleError::InvalidEntriesCount(max, entries.len()));
|
return Err(MerkleError::InvalidNumEntries(max, entries.len()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// append leaves and return
|
// append leaves to the tree returning an error if a duplicate entry for the same key
|
||||||
entries.try_for_each(|(key, leaf)| self.insert_leaf(key, leaf))?;
|
// is found
|
||||||
Ok(self)
|
let mut empty_entries = BTreeSet::new();
|
||||||
|
for (key, value) in entries {
|
||||||
|
let old_value = tree.update_leaf(key, value)?;
|
||||||
|
if old_value != Self::EMPTY_VALUE || empty_entries.contains(&key) {
|
||||||
|
return Err(MerkleError::DuplicateValuesForIndex(key));
|
||||||
}
|
}
|
||||||
|
// if we've processed an empty entry, add the key to the set of empty entry keys, and
|
||||||
/// Replaces the internal empty digests used when a given depth doesn't contain a node.
|
// if this key was already in the set, return an error
|
||||||
pub fn with_empty_subtrees<I>(mut self, hashes: I) -> Self
|
if value == Self::EMPTY_VALUE && !empty_entries.insert(key) {
|
||||||
where
|
return Err(MerkleError::DuplicateValuesForIndex(key));
|
||||||
I: IntoIterator<Item = RpoDigest>,
|
}
|
||||||
{
|
}
|
||||||
self.store
|
Ok(tree)
|
||||||
.replace_empty_subtrees(hashes.into_iter().collect());
|
|
||||||
self
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// PUBLIC ACCESSORS
|
// PUBLIC ACCESSORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Returns the root of this Merkle tree.
|
/// Returns the root of this Merkle tree.
|
||||||
pub const fn root(&self) -> Word {
|
pub const fn root(&self) -> RpoDigest {
|
||||||
self.root
|
self.root
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -90,47 +119,44 @@ impl SimpleSmt {
|
|||||||
self.depth
|
self.depth
|
||||||
}
|
}
|
||||||
|
|
||||||
// PROVIDERS
|
/// Returns a node at the specified index.
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
/// Returns the set count of the keys of the leaves.
|
|
||||||
pub fn leaves_count(&self) -> usize {
|
|
||||||
self.store.leaves_count()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a node at the specified key
|
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if the specified index has depth set to 0 or the depth is greater than
|
||||||
/// * The specified depth is greater than the depth of the tree.
|
/// the depth of this Merkle tree.
|
||||||
pub fn get_node(&self, index: &NodeIndex) -> Result<Word, MerkleError> {
|
pub fn get_node(&self, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||||
if index.is_root() {
|
if index.is_root() {
|
||||||
Err(MerkleError::DepthTooSmall(index.depth()))
|
Err(MerkleError::DepthTooSmall(index.depth()))
|
||||||
} else if index.depth() > self.depth() {
|
} else if index.depth() > self.depth() {
|
||||||
Err(MerkleError::DepthTooBig(index.depth() as u64))
|
Err(MerkleError::DepthTooBig(index.depth() as u64))
|
||||||
} else if index.depth() == self.depth() {
|
} else if index.depth() == self.depth() {
|
||||||
self.store
|
// the lookup in empty_hashes could fail only if empty_hashes were not built correctly
|
||||||
.get_leaf_node(index.value())
|
// by the constructor as we check the depth of the lookup above.
|
||||||
.or_else(|| {
|
Ok(RpoDigest::from(
|
||||||
self.store
|
self.get_leaf_node(index.value())
|
||||||
.empty_hashes
|
.unwrap_or_else(|| *self.empty_hashes[index.depth() as usize]),
|
||||||
.get(index.depth() as usize)
|
))
|
||||||
.copied()
|
|
||||||
.map(Word::from)
|
|
||||||
})
|
|
||||||
.ok_or(MerkleError::InvalidIndex(*index))
|
|
||||||
} else {
|
} else {
|
||||||
let branch_node = self.store.get_branch_node(index);
|
Ok(self.get_branch_node(&index).parent())
|
||||||
Ok(Rpo256::merge(&[branch_node.left, branch_node.right]).into())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a Merkle path from the node at the specified key to the root. The node itself is
|
/// Returns a value of the leaf at the specified index.
|
||||||
/// not included in the path.
|
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if the index is greater than the maximum tree capacity, that is 2^{depth}.
|
||||||
/// * The specified depth is greater than the depth of the tree.
|
pub fn get_leaf(&self, index: u64) -> Result<Word, MerkleError> {
|
||||||
|
let index = NodeIndex::new(self.depth, index)?;
|
||||||
|
Ok(self.get_node(index)?.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a Merkle path from the node at the specified index to the root.
|
||||||
|
///
|
||||||
|
/// The node itself is not included in the path.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if the specified index has depth set to 0 or the depth is greater than
|
||||||
|
/// the depth of this Merkle tree.
|
||||||
pub fn get_path(&self, mut index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
pub fn get_path(&self, mut index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
||||||
if index.is_root() {
|
if index.is_root() {
|
||||||
return Err(MerkleError::DepthTooSmall(index.depth()));
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
@@ -142,117 +168,81 @@ impl SimpleSmt {
|
|||||||
for _ in 0..index.depth() {
|
for _ in 0..index.depth() {
|
||||||
let is_right = index.is_value_odd();
|
let is_right = index.is_value_odd();
|
||||||
index.move_up();
|
index.move_up();
|
||||||
let BranchNode { left, right } = self.store.get_branch_node(&index);
|
let BranchNode { left, right } = self.get_branch_node(&index);
|
||||||
let value = if is_right { left } else { right };
|
let value = if is_right { left } else { right };
|
||||||
path.push(*value);
|
path.push(value);
|
||||||
}
|
}
|
||||||
Ok(path.into())
|
Ok(MerklePath::new(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a Merkle path from the leaf at the specified key to the root. The leaf itself is not
|
/// Return a Merkle path from the leaf at the specified index to the root.
|
||||||
/// included in the path.
|
///
|
||||||
|
/// The leaf itself is not included in the path.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if the index is greater than the maximum tree capacity, that is 2^{depth}.
|
||||||
/// * The specified key does not exist as a leaf node.
|
pub fn get_leaf_path(&self, index: u64) -> Result<MerklePath, MerkleError> {
|
||||||
pub fn get_leaf_path(&self, key: u64) -> Result<MerklePath, MerkleError> {
|
let index = NodeIndex::new(self.depth(), index)?;
|
||||||
self.get_path(NodeIndex::new(self.depth(), key))
|
self.get_path(index)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ITERATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns an iterator over the leaves of this [SimpleSmt].
|
||||||
|
pub fn leaves(&self) -> impl Iterator<Item = (u64, &Word)> {
|
||||||
|
self.leaves.iter().map(|(i, w)| (*i, w))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over the inner nodes of this Merkle tree.
|
||||||
|
pub fn inner_nodes(&self) -> impl Iterator<Item = InnerNodeInfo> + '_ {
|
||||||
|
self.branches.values().map(|e| InnerNodeInfo {
|
||||||
|
value: e.parent(),
|
||||||
|
left: e.left,
|
||||||
|
right: e.right,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// STATE MUTATORS
|
// STATE MUTATORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Replaces the leaf located at the specified key, and recomputes hashes by walking up the tree
|
/// Updates value of the leaf at the specified index returning the old leaf value.
|
||||||
|
///
|
||||||
|
/// This also recomputes all hashes between the leaf and the root, updating the root itself.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if the specified key is not a valid leaf index for this tree.
|
/// Returns an error if the index is greater than the maximum tree capacity, that is 2^{depth}.
|
||||||
pub fn update_leaf(&mut self, key: u64, value: Word) -> Result<(), MerkleError> {
|
pub fn update_leaf(&mut self, index: u64, value: Word) -> Result<Word, MerkleError> {
|
||||||
if !self.store.check_leaf_node_exists(key) {
|
let old_value = self.insert_leaf_node(index, value).unwrap_or(Self::EMPTY_VALUE);
|
||||||
return Err(MerkleError::InvalidIndex(NodeIndex::new(self.depth(), key)));
|
|
||||||
}
|
|
||||||
self.insert_leaf(key, value)?;
|
|
||||||
|
|
||||||
Ok(())
|
// if the old value and new value are the same, there is nothing to update
|
||||||
|
if value == old_value {
|
||||||
|
return Ok(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Inserts a leaf located at the specified key, and recomputes hashes by walking up the tree
|
let mut index = NodeIndex::new(self.depth(), index)?;
|
||||||
pub fn insert_leaf(&mut self, key: u64, value: Word) -> Result<(), MerkleError> {
|
|
||||||
self.store.insert_leaf_node(key, value);
|
|
||||||
|
|
||||||
// TODO consider using a map `index |-> word` instead of `index |-> (word, word)`
|
|
||||||
let mut index = NodeIndex::new(self.depth(), key);
|
|
||||||
let mut value = RpoDigest::from(value);
|
let mut value = RpoDigest::from(value);
|
||||||
for _ in 0..index.depth() {
|
for _ in 0..index.depth() {
|
||||||
let is_right = index.is_value_odd();
|
let is_right = index.is_value_odd();
|
||||||
index.move_up();
|
index.move_up();
|
||||||
let BranchNode { left, right } = self.store.get_branch_node(&index);
|
let BranchNode { left, right } = self.get_branch_node(&index);
|
||||||
let (left, right) = if is_right {
|
let (left, right) = if is_right { (left, value) } else { (value, right) };
|
||||||
(left, value)
|
self.insert_branch_node(index, left, right);
|
||||||
} else {
|
|
||||||
(value, right)
|
|
||||||
};
|
|
||||||
self.store.insert_branch_node(index, left, right);
|
|
||||||
value = Rpo256::merge(&[left, right]);
|
value = Rpo256::merge(&[left, right]);
|
||||||
}
|
}
|
||||||
self.root = value.into();
|
self.root = value;
|
||||||
Ok(())
|
Ok(old_value)
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// STORE
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
/// A data store for sparse Merkle tree key-value pairs.
|
|
||||||
/// Leaves and branch nodes are stored separately in B-tree maps, indexed by key and (key, depth)
|
|
||||||
/// respectively. Hashes for blank subtrees at each layer are stored in `empty_hashes`, beginning
|
|
||||||
/// with the root hash of an empty tree, and ending with the zero value of a leaf node.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub(crate) struct Store {
|
|
||||||
pub(crate) branches: BTreeMap<NodeIndex, BranchNode>,
|
|
||||||
leaves: BTreeMap<u64, Word>,
|
|
||||||
pub(crate) empty_hashes: Vec<RpoDigest>,
|
|
||||||
depth: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
|
||||||
pub(crate) struct BranchNode {
|
|
||||||
pub(crate) left: RpoDigest,
|
|
||||||
pub(crate) right: RpoDigest,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Store {
|
|
||||||
fn new(depth: u8) -> (Self, Word) {
|
|
||||||
let branches = BTreeMap::new();
|
|
||||||
let leaves = BTreeMap::new();
|
|
||||||
|
|
||||||
// Construct empty node digests for each layer of the tree
|
|
||||||
let empty_hashes = EmptySubtreeRoots::empty_hashes(depth).to_vec();
|
|
||||||
|
|
||||||
let root = empty_hashes[0].into();
|
|
||||||
let store = Self {
|
|
||||||
branches,
|
|
||||||
leaves,
|
|
||||||
empty_hashes,
|
|
||||||
depth,
|
|
||||||
};
|
|
||||||
|
|
||||||
(store, root)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replace_empty_subtrees(&mut self, hashes: Vec<RpoDigest>) {
|
// HELPER METHODS
|
||||||
self.empty_hashes = hashes;
|
// --------------------------------------------------------------------------------------------
|
||||||
}
|
|
||||||
|
|
||||||
fn check_leaf_node_exists(&self, key: u64) -> bool {
|
|
||||||
self.leaves.contains_key(&key)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_leaf_node(&self, key: u64) -> Option<Word> {
|
fn get_leaf_node(&self, key: u64) -> Option<Word> {
|
||||||
self.leaves.get(&key).copied()
|
self.leaves.get(&key).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_leaf_node(&mut self, key: u64, node: Word) {
|
fn insert_leaf_node(&mut self, key: u64, node: Word) -> Option<Word> {
|
||||||
self.leaves.insert(key, node);
|
self.leaves.insert(key, node)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_branch_node(&self, index: &NodeIndex) -> BranchNode {
|
fn get_branch_node(&self, index: &NodeIndex) -> BranchNode {
|
||||||
@@ -269,8 +259,19 @@ impl Store {
|
|||||||
let branch = BranchNode { left, right };
|
let branch = BranchNode { left, right };
|
||||||
self.branches.insert(index, branch);
|
self.branches.insert(index, branch);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn leaves_count(&self) -> usize {
|
// BRANCH NODE
|
||||||
self.leaves.len()
|
// ================================================================================================
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||||
|
struct BranchNode {
|
||||||
|
left: RpoDigest,
|
||||||
|
right: RpoDigest,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BranchNode {
|
||||||
|
fn parent(&self) -> RpoDigest {
|
||||||
|
Rpo256::merge(&[self.left, self.right])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,21 +1,21 @@
|
|||||||
use super::{
|
use super::{
|
||||||
super::{int_to_node, MerkleTree, RpoDigest, SimpleSmt},
|
super::{InnerNodeInfo, MerkleError, MerkleTree, RpoDigest, SimpleSmt},
|
||||||
NodeIndex, Rpo256, Vec, Word,
|
NodeIndex, Rpo256, Vec,
|
||||||
};
|
};
|
||||||
use proptest::prelude::*;
|
use crate::{
|
||||||
use rand_utils::prng_array;
|
merkle::{digests_to_words, empty_roots::EMPTY_WORD, int_to_leaf, int_to_node},
|
||||||
|
Word,
|
||||||
|
};
|
||||||
|
|
||||||
|
// TEST DATA
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
const KEYS4: [u64; 4] = [0, 1, 2, 3];
|
const KEYS4: [u64; 4] = [0, 1, 2, 3];
|
||||||
const KEYS8: [u64; 8] = [0, 1, 2, 3, 4, 5, 6, 7];
|
const KEYS8: [u64; 8] = [0, 1, 2, 3, 4, 5, 6, 7];
|
||||||
|
|
||||||
const VALUES4: [Word; 4] = [
|
const VALUES4: [RpoDigest; 4] = [int_to_node(1), int_to_node(2), int_to_node(3), int_to_node(4)];
|
||||||
int_to_node(1),
|
|
||||||
int_to_node(2),
|
|
||||||
int_to_node(3),
|
|
||||||
int_to_node(4),
|
|
||||||
];
|
|
||||||
|
|
||||||
const VALUES8: [Word; 8] = [
|
const VALUES8: [RpoDigest; 8] = [
|
||||||
int_to_node(1),
|
int_to_node(1),
|
||||||
int_to_node(2),
|
int_to_node(2),
|
||||||
int_to_node(3),
|
int_to_node(3),
|
||||||
@@ -26,27 +26,19 @@ const VALUES8: [Word; 8] = [
|
|||||||
int_to_node(8),
|
int_to_node(8),
|
||||||
];
|
];
|
||||||
|
|
||||||
const ZERO_VALUES8: [Word; 8] = [int_to_node(0); 8];
|
const ZERO_VALUES8: [Word; 8] = [int_to_leaf(0); 8];
|
||||||
|
|
||||||
|
// TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn build_empty_tree() {
|
fn build_empty_tree() {
|
||||||
|
// tree of depth 3
|
||||||
let smt = SimpleSmt::new(3).unwrap();
|
let smt = SimpleSmt::new(3).unwrap();
|
||||||
let mt = MerkleTree::new(ZERO_VALUES8.to_vec()).unwrap();
|
let mt = MerkleTree::new(ZERO_VALUES8.to_vec()).unwrap();
|
||||||
assert_eq!(mt.root(), smt.root());
|
assert_eq!(mt.root(), smt.root());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn empty_digests_are_consistent() {
|
|
||||||
let depth = 5;
|
|
||||||
let root = SimpleSmt::new(depth).unwrap().root();
|
|
||||||
let computed: [RpoDigest; 2] = (0..depth).fold([Default::default(); 2], |state, _| {
|
|
||||||
let digest = Rpo256::merge(&state);
|
|
||||||
[digest; 2]
|
|
||||||
});
|
|
||||||
|
|
||||||
assert_eq!(Word::from(computed[0]), root);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn build_sparse_tree() {
|
fn build_sparse_tree() {
|
||||||
let mut smt = SimpleSmt::new(3).unwrap();
|
let mut smt = SimpleSmt::new(3).unwrap();
|
||||||
@@ -54,121 +46,130 @@ fn build_sparse_tree() {
|
|||||||
|
|
||||||
// insert single value
|
// insert single value
|
||||||
let key = 6;
|
let key = 6;
|
||||||
let new_node = int_to_node(7);
|
let new_node = int_to_leaf(7);
|
||||||
values[key as usize] = new_node;
|
values[key as usize] = new_node;
|
||||||
smt.insert_leaf(key, new_node)
|
let old_value = smt.update_leaf(key, new_node).expect("Failed to update leaf");
|
||||||
.expect("Failed to insert leaf");
|
|
||||||
let mt2 = MerkleTree::new(values.clone()).unwrap();
|
let mt2 = MerkleTree::new(values.clone()).unwrap();
|
||||||
assert_eq!(mt2.root(), smt.root());
|
assert_eq!(mt2.root(), smt.root());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mt2.get_path(NodeIndex::new(3, 6)).unwrap(),
|
mt2.get_path(NodeIndex::make(3, 6)).unwrap(),
|
||||||
smt.get_path(NodeIndex::new(3, 6)).unwrap()
|
smt.get_path(NodeIndex::make(3, 6)).unwrap()
|
||||||
);
|
);
|
||||||
|
assert_eq!(old_value, EMPTY_WORD);
|
||||||
|
|
||||||
// insert second value at distinct leaf branch
|
// insert second value at distinct leaf branch
|
||||||
let key = 2;
|
let key = 2;
|
||||||
let new_node = int_to_node(3);
|
let new_node = int_to_leaf(3);
|
||||||
values[key as usize] = new_node;
|
values[key as usize] = new_node;
|
||||||
smt.insert_leaf(key, new_node)
|
let old_value = smt.update_leaf(key, new_node).expect("Failed to update leaf");
|
||||||
.expect("Failed to insert leaf");
|
|
||||||
let mt3 = MerkleTree::new(values).unwrap();
|
let mt3 = MerkleTree::new(values).unwrap();
|
||||||
assert_eq!(mt3.root(), smt.root());
|
assert_eq!(mt3.root(), smt.root());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mt3.get_path(NodeIndex::new(3, 2)).unwrap(),
|
mt3.get_path(NodeIndex::make(3, 2)).unwrap(),
|
||||||
smt.get_path(NodeIndex::new(3, 2)).unwrap()
|
smt.get_path(NodeIndex::make(3, 2)).unwrap()
|
||||||
);
|
);
|
||||||
|
assert_eq!(old_value, EMPTY_WORD);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn build_full_tree() {
|
fn test_depth2_tree() {
|
||||||
let tree = SimpleSmt::new(2)
|
let tree =
|
||||||
.unwrap()
|
SimpleSmt::with_leaves(2, KEYS4.into_iter().zip(digests_to_words(&VALUES4).into_iter()))
|
||||||
.with_leaves(KEYS4.into_iter().zip(VALUES4.into_iter()))
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
// check internal structure
|
||||||
let (root, node2, node3) = compute_internal_nodes();
|
let (root, node2, node3) = compute_internal_nodes();
|
||||||
assert_eq!(root, tree.root());
|
assert_eq!(root, tree.root());
|
||||||
assert_eq!(node2, tree.get_node(&NodeIndex::new(1, 0)).unwrap());
|
assert_eq!(node2, tree.get_node(NodeIndex::make(1, 0)).unwrap());
|
||||||
assert_eq!(node3, tree.get_node(&NodeIndex::new(1, 1)).unwrap());
|
assert_eq!(node3, tree.get_node(NodeIndex::make(1, 1)).unwrap());
|
||||||
|
|
||||||
|
// check get_node()
|
||||||
|
assert_eq!(VALUES4[0], tree.get_node(NodeIndex::make(2, 0)).unwrap());
|
||||||
|
assert_eq!(VALUES4[1], tree.get_node(NodeIndex::make(2, 1)).unwrap());
|
||||||
|
assert_eq!(VALUES4[2], tree.get_node(NodeIndex::make(2, 2)).unwrap());
|
||||||
|
assert_eq!(VALUES4[3], tree.get_node(NodeIndex::make(2, 3)).unwrap());
|
||||||
|
|
||||||
|
// check get_path(): depth 2
|
||||||
|
assert_eq!(vec![VALUES4[1], node3], *tree.get_path(NodeIndex::make(2, 0)).unwrap());
|
||||||
|
assert_eq!(vec![VALUES4[0], node3], *tree.get_path(NodeIndex::make(2, 1)).unwrap());
|
||||||
|
assert_eq!(vec![VALUES4[3], node2], *tree.get_path(NodeIndex::make(2, 2)).unwrap());
|
||||||
|
assert_eq!(vec![VALUES4[2], node2], *tree.get_path(NodeIndex::make(2, 3)).unwrap());
|
||||||
|
|
||||||
|
// check get_path(): depth 1
|
||||||
|
assert_eq!(vec![node3], *tree.get_path(NodeIndex::make(1, 0)).unwrap());
|
||||||
|
assert_eq!(vec![node2], *tree.get_path(NodeIndex::make(1, 1)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_values() {
|
fn test_inner_node_iterator() -> Result<(), MerkleError> {
|
||||||
let tree = SimpleSmt::new(2)
|
let tree =
|
||||||
.unwrap()
|
SimpleSmt::with_leaves(2, KEYS4.into_iter().zip(digests_to_words(&VALUES4).into_iter()))
|
||||||
.with_leaves(KEYS4.into_iter().zip(VALUES4.into_iter()))
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// check depth 2
|
// check depth 2
|
||||||
assert_eq!(VALUES4[0], tree.get_node(&NodeIndex::new(2, 0)).unwrap());
|
assert_eq!(VALUES4[0], tree.get_node(NodeIndex::make(2, 0)).unwrap());
|
||||||
assert_eq!(VALUES4[1], tree.get_node(&NodeIndex::new(2, 1)).unwrap());
|
assert_eq!(VALUES4[1], tree.get_node(NodeIndex::make(2, 1)).unwrap());
|
||||||
assert_eq!(VALUES4[2], tree.get_node(&NodeIndex::new(2, 2)).unwrap());
|
assert_eq!(VALUES4[2], tree.get_node(NodeIndex::make(2, 2)).unwrap());
|
||||||
assert_eq!(VALUES4[3], tree.get_node(&NodeIndex::new(2, 3)).unwrap());
|
assert_eq!(VALUES4[3], tree.get_node(NodeIndex::make(2, 3)).unwrap());
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
// get parent nodes
|
||||||
fn get_path() {
|
let root = tree.root();
|
||||||
let tree = SimpleSmt::new(2)
|
let l1n0 = tree.get_node(NodeIndex::make(1, 0))?;
|
||||||
.unwrap()
|
let l1n1 = tree.get_node(NodeIndex::make(1, 1))?;
|
||||||
.with_leaves(KEYS4.into_iter().zip(VALUES4.into_iter()))
|
let l2n0 = tree.get_node(NodeIndex::make(2, 0))?;
|
||||||
.unwrap();
|
let l2n1 = tree.get_node(NodeIndex::make(2, 1))?;
|
||||||
|
let l2n2 = tree.get_node(NodeIndex::make(2, 2))?;
|
||||||
|
let l2n3 = tree.get_node(NodeIndex::make(2, 3))?;
|
||||||
|
|
||||||
let (_, node2, node3) = compute_internal_nodes();
|
let nodes: Vec<InnerNodeInfo> = tree.inner_nodes().collect();
|
||||||
|
let expected = vec![
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: root,
|
||||||
|
left: l1n0,
|
||||||
|
right: l1n1,
|
||||||
|
},
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: l1n0,
|
||||||
|
left: l2n0,
|
||||||
|
right: l2n1,
|
||||||
|
},
|
||||||
|
InnerNodeInfo {
|
||||||
|
value: l1n1,
|
||||||
|
left: l2n2,
|
||||||
|
right: l2n3,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert_eq!(nodes, expected);
|
||||||
|
|
||||||
// check depth 2
|
Ok(())
|
||||||
assert_eq!(
|
|
||||||
vec![VALUES4[1], node3],
|
|
||||||
*tree.get_path(NodeIndex::new(2, 0)).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
vec![VALUES4[0], node3],
|
|
||||||
*tree.get_path(NodeIndex::new(2, 1)).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
vec![VALUES4[3], node2],
|
|
||||||
*tree.get_path(NodeIndex::new(2, 2)).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
vec![VALUES4[2], node2],
|
|
||||||
*tree.get_path(NodeIndex::new(2, 3)).unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
// check depth 1
|
|
||||||
assert_eq!(vec![node3], *tree.get_path(NodeIndex::new(1, 0)).unwrap());
|
|
||||||
assert_eq!(vec![node2], *tree.get_path(NodeIndex::new(1, 1)).unwrap());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn update_leaf() {
|
fn update_leaf() {
|
||||||
let mut tree = SimpleSmt::new(3)
|
let mut tree =
|
||||||
.unwrap()
|
SimpleSmt::with_leaves(3, KEYS8.into_iter().zip(digests_to_words(&VALUES8).into_iter()))
|
||||||
.with_leaves(KEYS8.into_iter().zip(VALUES8.into_iter()))
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// update one value
|
// update one value
|
||||||
let key = 3;
|
let key = 3;
|
||||||
let new_node = int_to_node(9);
|
let new_node = int_to_leaf(9);
|
||||||
let mut expected_values = VALUES8.to_vec();
|
let mut expected_values = digests_to_words(&VALUES8);
|
||||||
expected_values[key] = new_node;
|
expected_values[key] = new_node;
|
||||||
let expected_tree = SimpleSmt::new(3)
|
let expected_tree = MerkleTree::new(expected_values.clone()).unwrap();
|
||||||
.unwrap()
|
|
||||||
.with_leaves(KEYS8.into_iter().zip(expected_values.clone().into_iter()))
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
tree.update_leaf(key as u64, new_node).unwrap();
|
let old_leaf = tree.update_leaf(key as u64, new_node).unwrap();
|
||||||
assert_eq!(expected_tree.root, tree.root);
|
assert_eq!(expected_tree.root(), tree.root);
|
||||||
|
assert_eq!(old_leaf, *VALUES8[key]);
|
||||||
|
|
||||||
// update another value
|
// update another value
|
||||||
let key = 6;
|
let key = 6;
|
||||||
let new_node = int_to_node(10);
|
let new_node = int_to_leaf(10);
|
||||||
expected_values[key] = new_node;
|
expected_values[key] = new_node;
|
||||||
let expected_tree = SimpleSmt::new(3)
|
let expected_tree = MerkleTree::new(expected_values.clone()).unwrap();
|
||||||
.unwrap()
|
|
||||||
.with_leaves(KEYS8.into_iter().zip(expected_values.into_iter()))
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
tree.update_leaf(key as u64, new_node).unwrap();
|
let old_leaf = tree.update_leaf(key as u64, new_node).unwrap();
|
||||||
assert_eq!(expected_tree.root, tree.root);
|
assert_eq!(expected_tree.root(), tree.root);
|
||||||
|
assert_eq!(old_leaf, *VALUES8[key]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -181,34 +182,34 @@ fn small_tree_opening_is_consistent() {
|
|||||||
// / \ / \ / \ / \
|
// / \ / \ / \ / \
|
||||||
// a b 0 0 c 0 0 d
|
// a b 0 0 c 0 0 d
|
||||||
|
|
||||||
let z = Word::from(RpoDigest::default());
|
let z = EMPTY_WORD;
|
||||||
|
|
||||||
let a = Word::from(Rpo256::merge(&[z.into(); 2]));
|
let a = Word::from(Rpo256::merge(&[z.into(); 2]));
|
||||||
let b = Word::from(Rpo256::merge(&[a.into(); 2]));
|
let b = Word::from(Rpo256::merge(&[a.into(); 2]));
|
||||||
let c = Word::from(Rpo256::merge(&[b.into(); 2]));
|
let c = Word::from(Rpo256::merge(&[b.into(); 2]));
|
||||||
let d = Word::from(Rpo256::merge(&[c.into(); 2]));
|
let d = Word::from(Rpo256::merge(&[c.into(); 2]));
|
||||||
|
|
||||||
let e = Word::from(Rpo256::merge(&[a.into(), b.into()]));
|
let e = Rpo256::merge(&[a.into(), b.into()]);
|
||||||
let f = Word::from(Rpo256::merge(&[z.into(), z.into()]));
|
let f = Rpo256::merge(&[z.into(), z.into()]);
|
||||||
let g = Word::from(Rpo256::merge(&[c.into(), z.into()]));
|
let g = Rpo256::merge(&[c.into(), z.into()]);
|
||||||
let h = Word::from(Rpo256::merge(&[z.into(), d.into()]));
|
let h = Rpo256::merge(&[z.into(), d.into()]);
|
||||||
|
|
||||||
let i = Word::from(Rpo256::merge(&[e.into(), f.into()]));
|
let i = Rpo256::merge(&[e, f]);
|
||||||
let j = Word::from(Rpo256::merge(&[g.into(), h.into()]));
|
let j = Rpo256::merge(&[g, h]);
|
||||||
|
|
||||||
let k = Word::from(Rpo256::merge(&[i.into(), j.into()]));
|
let k = Rpo256::merge(&[i, j]);
|
||||||
|
|
||||||
let depth = 3;
|
let depth = 3;
|
||||||
let entries = vec![(0, a), (1, b), (4, c), (7, d)];
|
let entries = vec![(0, a), (1, b), (4, c), (7, d)];
|
||||||
let tree = SimpleSmt::new(depth).unwrap().with_leaves(entries).unwrap();
|
let tree = SimpleSmt::with_leaves(depth, entries).unwrap();
|
||||||
|
|
||||||
assert_eq!(tree.root(), Word::from(k));
|
assert_eq!(tree.root(), k);
|
||||||
|
|
||||||
let cases: Vec<(u8, u64, Vec<Word>)> = vec![
|
let cases: Vec<(u8, u64, Vec<RpoDigest>)> = vec![
|
||||||
(3, 0, vec![b, f, j]),
|
(3, 0, vec![b.into(), f, j]),
|
||||||
(3, 1, vec![a, f, j]),
|
(3, 1, vec![a.into(), f, j]),
|
||||||
(3, 4, vec![z, h, i]),
|
(3, 4, vec![z.into(), h, i]),
|
||||||
(3, 7, vec![z, g, i]),
|
(3, 7, vec![z.into(), g, i]),
|
||||||
(2, 0, vec![f, j]),
|
(2, 0, vec![f, j]),
|
||||||
(2, 1, vec![e, j]),
|
(2, 1, vec![e, j]),
|
||||||
(2, 2, vec![h, i]),
|
(2, 2, vec![h, i]),
|
||||||
@@ -218,71 +219,45 @@ fn small_tree_opening_is_consistent() {
|
|||||||
];
|
];
|
||||||
|
|
||||||
for (depth, key, path) in cases {
|
for (depth, key, path) in cases {
|
||||||
let opening = tree.get_path(NodeIndex::new(depth, key)).unwrap();
|
let opening = tree.get_path(NodeIndex::make(depth, key)).unwrap();
|
||||||
|
|
||||||
assert_eq!(path, *opening);
|
assert_eq!(path, *opening);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
proptest! {
|
#[test]
|
||||||
#[test]
|
fn fail_on_duplicates() {
|
||||||
fn arbitrary_openings_single_leaf(
|
let entries = [(1_u64, int_to_leaf(1)), (5, int_to_leaf(2)), (1_u64, int_to_leaf(3))];
|
||||||
depth in SimpleSmt::MIN_DEPTH..SimpleSmt::MAX_DEPTH,
|
let smt = SimpleSmt::with_leaves(64, entries);
|
||||||
key in prop::num::u64::ANY,
|
assert!(smt.is_err());
|
||||||
leaf in prop::num::u64::ANY,
|
|
||||||
) {
|
|
||||||
let mut tree = SimpleSmt::new(depth).unwrap();
|
|
||||||
|
|
||||||
let key = key % (1 << depth as u64);
|
let entries = [(1_u64, int_to_leaf(0)), (5, int_to_leaf(2)), (1_u64, int_to_leaf(0))];
|
||||||
let leaf = int_to_node(leaf);
|
let smt = SimpleSmt::with_leaves(64, entries);
|
||||||
|
assert!(smt.is_err());
|
||||||
|
|
||||||
tree.insert_leaf(key, leaf.into()).unwrap();
|
let entries = [(1_u64, int_to_leaf(0)), (5, int_to_leaf(2)), (1_u64, int_to_leaf(1))];
|
||||||
tree.get_leaf_path(key).unwrap();
|
let smt = SimpleSmt::with_leaves(64, entries);
|
||||||
|
assert!(smt.is_err());
|
||||||
|
|
||||||
// traverse to root, fetching all paths
|
let entries = [(1_u64, int_to_leaf(1)), (5, int_to_leaf(2)), (1_u64, int_to_leaf(0))];
|
||||||
for d in 1..depth {
|
let smt = SimpleSmt::with_leaves(64, entries);
|
||||||
let k = key >> (depth - d);
|
assert!(smt.is_err());
|
||||||
tree.get_path(NodeIndex::new(d, k)).unwrap();
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn arbitrary_openings_multiple_leaves(
|
fn with_no_duplicates_empty_node() {
|
||||||
depth in SimpleSmt::MIN_DEPTH..SimpleSmt::MAX_DEPTH,
|
let entries = [(1_u64, int_to_leaf(0)), (5, int_to_leaf(2))];
|
||||||
count in 2u8..10u8,
|
let smt = SimpleSmt::with_leaves(64, entries);
|
||||||
ref seed in any::<[u8; 32]>()
|
assert!(smt.is_ok());
|
||||||
) {
|
|
||||||
let mut tree = SimpleSmt::new(depth).unwrap();
|
|
||||||
let mut seed = *seed;
|
|
||||||
let leaves = (1 << depth) - 1;
|
|
||||||
|
|
||||||
for _ in 0..count {
|
|
||||||
seed = prng_array(seed);
|
|
||||||
|
|
||||||
let mut key = [0u8; 8];
|
|
||||||
let mut leaf = [0u8; 8];
|
|
||||||
|
|
||||||
key.copy_from_slice(&seed[..8]);
|
|
||||||
leaf.copy_from_slice(&seed[8..16]);
|
|
||||||
|
|
||||||
let key = u64::from_le_bytes(key);
|
|
||||||
let key = key % leaves;
|
|
||||||
let leaf = u64::from_le_bytes(leaf);
|
|
||||||
let leaf = int_to_node(leaf);
|
|
||||||
|
|
||||||
tree.insert_leaf(key, leaf).unwrap();
|
|
||||||
tree.get_leaf_path(key).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// HELPER FUNCTIONS
|
// HELPER FUNCTIONS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
fn compute_internal_nodes() -> (Word, Word, Word) {
|
fn compute_internal_nodes() -> (RpoDigest, RpoDigest, RpoDigest) {
|
||||||
let node2 = Rpo256::hash_elements(&[VALUES4[0], VALUES4[1]].concat());
|
let node2 = Rpo256::merge(&[VALUES4[0], VALUES4[1]]);
|
||||||
let node3 = Rpo256::hash_elements(&[VALUES4[2], VALUES4[3]].concat());
|
let node3 = Rpo256::merge(&[VALUES4[2], VALUES4[3]]);
|
||||||
let root = Rpo256::merge(&[node2, node3]);
|
let root = Rpo256::merge(&[node2, node3]);
|
||||||
|
|
||||||
(root.into(), node2.into(), node3.into())
|
(root, node2, node3)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,30 @@
|
|||||||
use super::{
|
use super::{
|
||||||
BTreeMap, BTreeSet, EmptySubtreeRoots, MerkleError, MerklePath, MerklePathSet, MerkleTree,
|
mmr::Mmr, BTreeMap, EmptySubtreeRoots, InnerNodeInfo, KvMap, MerkleError, MerklePath,
|
||||||
NodeIndex, RootPath, Rpo256, RpoDigest, SimpleSmt, ValuePath, Vec, Word,
|
MerklePathSet, MerkleTree, NodeIndex, RecordingMap, RootPath, Rpo256, RpoDigest, SimpleSmt,
|
||||||
|
TieredSmt, ValuePath, Vec,
|
||||||
};
|
};
|
||||||
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
||||||
|
use core::borrow::Borrow;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
// MERKLE STORE
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A default [MerkleStore] which uses a simple [BTreeMap] as the backing storage.
|
||||||
|
pub type DefaultMerkleStore = MerkleStore<BTreeMap<RpoDigest, StoreNode>>;
|
||||||
|
|
||||||
|
/// A [MerkleStore] with recording capabilities which uses [RecordingMap] as the backing storage.
|
||||||
|
pub type RecordingMerkleStore = MerkleStore<RecordingMap<RpoDigest, StoreNode>>;
|
||||||
|
|
||||||
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct Node {
|
pub struct StoreNode {
|
||||||
left: RpoDigest,
|
left: RpoDigest,
|
||||||
right: RpoDigest,
|
right: RpoDigest,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An in-memory data store for Merkle-lized data.
|
/// An in-memory data store for Merkelized data.
|
||||||
///
|
///
|
||||||
/// This is a in memory data store for Merkle trees, this store allows all the nodes of multiple
|
/// This is a in memory data store for Merkle trees, this store allows all the nodes of multiple
|
||||||
/// trees to live as long as necessary and without duplication, this allows the implementation of
|
/// trees to live as long as necessary and without duplication, this allows the implementation of
|
||||||
@@ -41,26 +52,33 @@ pub struct Node {
|
|||||||
/// # let T1 = MerkleTree::new([A, B, C, D, E, F, G, H1].to_vec()).expect("even number of leaves provided");
|
/// # let T1 = MerkleTree::new([A, B, C, D, E, F, G, H1].to_vec()).expect("even number of leaves provided");
|
||||||
/// # let ROOT0 = T0.root();
|
/// # let ROOT0 = T0.root();
|
||||||
/// # let ROOT1 = T1.root();
|
/// # let ROOT1 = T1.root();
|
||||||
/// let mut store = MerkleStore::new();
|
/// let mut store: MerkleStore = MerkleStore::new();
|
||||||
///
|
///
|
||||||
/// // the store is initialized with the SMT empty nodes
|
/// // the store is initialized with the SMT empty nodes
|
||||||
/// assert_eq!(store.num_internal_nodes(), 255);
|
/// assert_eq!(store.num_internal_nodes(), 255);
|
||||||
///
|
///
|
||||||
|
/// let tree1 = MerkleTree::new(vec![A, B, C, D, E, F, G, H0]).unwrap();
|
||||||
|
/// let tree2 = MerkleTree::new(vec![A, B, C, D, E, F, G, H1]).unwrap();
|
||||||
|
///
|
||||||
/// // populates the store with two merkle trees, common nodes are shared
|
/// // populates the store with two merkle trees, common nodes are shared
|
||||||
/// store.add_merkle_tree([A, B, C, D, E, F, G, H0]);
|
/// store.extend(tree1.inner_nodes());
|
||||||
/// store.add_merkle_tree([A, B, C, D, E, F, G, H1]);
|
/// store.extend(tree2.inner_nodes());
|
||||||
///
|
///
|
||||||
/// // every leaf except the last are the same
|
/// // every leaf except the last are the same
|
||||||
/// for i in 0..7 {
|
/// for i in 0..7 {
|
||||||
/// let d0 = store.get_node(ROOT0, NodeIndex::new(3, i)).unwrap();
|
/// let idx0 = NodeIndex::new(3, i).unwrap();
|
||||||
/// let d1 = store.get_node(ROOT1, NodeIndex::new(3, i)).unwrap();
|
/// let d0 = store.get_node(ROOT0, idx0).unwrap();
|
||||||
|
/// let idx1 = NodeIndex::new(3, i).unwrap();
|
||||||
|
/// let d1 = store.get_node(ROOT1, idx1).unwrap();
|
||||||
/// assert_eq!(d0, d1, "Both trees have the same leaf at pos {i}");
|
/// assert_eq!(d0, d1, "Both trees have the same leaf at pos {i}");
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// // The leafs A-B-C-D are the same for both trees, so are their 2 immediate parents
|
/// // The leafs A-B-C-D are the same for both trees, so are their 2 immediate parents
|
||||||
/// for i in 0..4 {
|
/// for i in 0..4 {
|
||||||
/// let d0 = store.get_path(ROOT0, NodeIndex::new(3, i)).unwrap();
|
/// let idx0 = NodeIndex::new(3, i).unwrap();
|
||||||
/// let d1 = store.get_path(ROOT1, NodeIndex::new(3, i)).unwrap();
|
/// let d0 = store.get_path(ROOT0, idx0).unwrap();
|
||||||
|
/// let idx1 = NodeIndex::new(3, i).unwrap();
|
||||||
|
/// let d1 = store.get_path(ROOT1, idx1).unwrap();
|
||||||
/// assert_eq!(d0.path[0..2], d1.path[0..2], "Both sub-trees are equal up to two levels");
|
/// assert_eq!(d0.path[0..2], d1.path[0..2], "Both sub-trees are equal up to two levels");
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
@@ -69,82 +87,27 @@ pub struct Node {
|
|||||||
/// assert_eq!(store.num_internal_nodes() - 255, 10);
|
/// assert_eq!(store.num_internal_nodes() - 255, 10);
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct MerkleStore {
|
pub struct MerkleStore<T: KvMap<RpoDigest, StoreNode> = BTreeMap<RpoDigest, StoreNode>> {
|
||||||
nodes: BTreeMap<RpoDigest, Node>,
|
nodes: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for MerkleStore {
|
impl<T: KvMap<RpoDigest, StoreNode>> Default for MerkleStore<T> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::new()
|
Self::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MerkleStore {
|
impl<T: KvMap<RpoDigest, StoreNode>> MerkleStore<T> {
|
||||||
// CONSTRUCTORS
|
// CONSTRUCTORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Creates an empty `MerkleStore` instance.
|
/// Creates an empty `MerkleStore` instance.
|
||||||
pub fn new() -> MerkleStore {
|
pub fn new() -> MerkleStore<T> {
|
||||||
// pre-populate the store with the empty hashes
|
// pre-populate the store with the empty hashes
|
||||||
let subtrees = EmptySubtreeRoots::empty_hashes(255);
|
let nodes = empty_hashes().into_iter().collect();
|
||||||
let nodes = subtrees
|
|
||||||
.iter()
|
|
||||||
.rev()
|
|
||||||
.copied()
|
|
||||||
.zip(subtrees.iter().rev().skip(1).copied())
|
|
||||||
.map(|(child, parent)| {
|
|
||||||
(
|
|
||||||
parent,
|
|
||||||
Node {
|
|
||||||
left: child,
|
|
||||||
right: child,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
MerkleStore { nodes }
|
MerkleStore { nodes }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Appends the provided merkle tree represented by its `leaves` to the set.
|
|
||||||
pub fn with_merkle_tree<I>(mut self, leaves: I) -> Result<Self, MerkleError>
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = Word>,
|
|
||||||
{
|
|
||||||
self.add_merkle_tree(leaves)?;
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Appends the provided sparse merkle tree represented by its `entries` to the set.
|
|
||||||
pub fn with_sparse_merkle_tree<R, I>(mut self, entries: R) -> Result<Self, MerkleError>
|
|
||||||
where
|
|
||||||
R: IntoIterator<IntoIter = I>,
|
|
||||||
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
|
||||||
{
|
|
||||||
self.add_sparse_merkle_tree(entries)?;
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Appends the provided merkle path set.
|
|
||||||
pub fn with_merkle_path(
|
|
||||||
mut self,
|
|
||||||
index_value: u64,
|
|
||||||
node: Word,
|
|
||||||
path: MerklePath,
|
|
||||||
) -> Result<Self, MerkleError> {
|
|
||||||
self.add_merkle_path(index_value, node, path)?;
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Appends the provided merkle path set.
|
|
||||||
pub fn with_merkle_paths<I>(mut self, paths: I) -> Result<Self, MerkleError>
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = (u64, Word, MerklePath)>,
|
|
||||||
{
|
|
||||||
self.add_merkle_paths(paths)?;
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PUBLIC ACCESSORS
|
// PUBLIC ACCESSORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -156,27 +119,24 @@ impl MerkleStore {
|
|||||||
/// Returns the node at `index` rooted on the tree `root`.
|
/// Returns the node at `index` rooted on the tree `root`.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
|
||||||
/// This method can return the following errors:
|
/// This method can return the following errors:
|
||||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||||
pub fn get_node(&self, root: Word, index: NodeIndex) -> Result<Word, MerkleError> {
|
/// the store.
|
||||||
let mut hash: RpoDigest = root.into();
|
pub fn get_node(&self, root: RpoDigest, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||||
|
let mut hash = root;
|
||||||
|
|
||||||
// corner case: check the root is in the store when called with index `NodeIndex::root()`
|
// corner case: check the root is in the store when called with index `NodeIndex::root()`
|
||||||
self.nodes
|
self.nodes.get(&hash).ok_or(MerkleError::RootNotInStore(hash))?;
|
||||||
.get(&hash)
|
|
||||||
.ok_or(MerkleError::RootNotInStore(hash.into()))?;
|
|
||||||
|
|
||||||
for bit in index.bit_iterator().rev() {
|
for i in (0..index.depth()).rev() {
|
||||||
let node = self
|
let node = self.nodes.get(&hash).ok_or(MerkleError::NodeNotInStore(hash, index))?;
|
||||||
.nodes
|
|
||||||
.get(&hash)
|
let bit = (index.value() >> i) & 1;
|
||||||
.ok_or(MerkleError::NodeNotInStore(hash.into(), index))?;
|
hash = if bit == 0 { node.left } else { node.right }
|
||||||
hash = if bit { node.right } else { node.left }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(hash.into())
|
Ok(hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the node at the specified `index` and its opening to the `root`.
|
/// Returns the node at the specified `index` and its opening to the `root`.
|
||||||
@@ -184,31 +144,27 @@ impl MerkleStore {
|
|||||||
/// The path starts at the sibling of the target leaf.
|
/// The path starts at the sibling of the target leaf.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
|
||||||
/// This method can return the following errors:
|
/// This method can return the following errors:
|
||||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||||
pub fn get_path(&self, root: Word, index: NodeIndex) -> Result<ValuePath, MerkleError> {
|
/// the store.
|
||||||
let mut hash: RpoDigest = root.into();
|
pub fn get_path(&self, root: RpoDigest, index: NodeIndex) -> Result<ValuePath, MerkleError> {
|
||||||
|
let mut hash = root;
|
||||||
let mut path = Vec::with_capacity(index.depth().into());
|
let mut path = Vec::with_capacity(index.depth().into());
|
||||||
|
|
||||||
// corner case: check the root is in the store when called with index `NodeIndex::root()`
|
// corner case: check the root is in the store when called with index `NodeIndex::root()`
|
||||||
self.nodes
|
self.nodes.get(&hash).ok_or(MerkleError::RootNotInStore(hash))?;
|
||||||
.get(&hash)
|
|
||||||
.ok_or(MerkleError::RootNotInStore(hash.into()))?;
|
|
||||||
|
|
||||||
for bit in index.bit_iterator().rev() {
|
for i in (0..index.depth()).rev() {
|
||||||
let node = self
|
let node = self.nodes.get(&hash).ok_or(MerkleError::NodeNotInStore(hash, index))?;
|
||||||
.nodes
|
|
||||||
.get(&hash)
|
|
||||||
.ok_or(MerkleError::NodeNotInStore(hash.into(), index))?;
|
|
||||||
|
|
||||||
hash = if bit {
|
let bit = (index.value() >> i) & 1;
|
||||||
path.push(node.left.into());
|
hash = if bit == 0 {
|
||||||
node.right
|
path.push(node.right);
|
||||||
} else {
|
|
||||||
path.push(node.right.into());
|
|
||||||
node.left
|
node.left
|
||||||
|
} else {
|
||||||
|
path.push(node.left);
|
||||||
|
node.right
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -216,90 +172,114 @@ impl MerkleStore {
|
|||||||
path.reverse();
|
path.reverse();
|
||||||
|
|
||||||
Ok(ValuePath {
|
Ok(ValuePath {
|
||||||
value: hash.into(),
|
value: hash,
|
||||||
path: MerklePath::new(path),
|
path: MerklePath::new(path),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Reconstructs a path from the root until a leaf or empty node and returns its depth.
|
||||||
|
///
|
||||||
|
/// The `tree_depth` parameter defines up to which depth the tree will be traversed, starting
|
||||||
|
/// from `root`. The maximum value the argument accepts is [u64::BITS].
|
||||||
|
///
|
||||||
|
/// The traversed path from leaf to root will start at the least significant bit of `index`,
|
||||||
|
/// and will be executed for `tree_depth` bits.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Will return an error if:
|
||||||
|
/// - The provided root is not found.
|
||||||
|
/// - The path from the root continues to a depth greater than `tree_depth`.
|
||||||
|
/// - The provided `tree_depth` is greater than `64.
|
||||||
|
/// - The provided `index` is not valid for a depth equivalent to `tree_depth`. For more
|
||||||
|
/// information, check [NodeIndex::new].
|
||||||
|
pub fn get_leaf_depth(
|
||||||
|
&self,
|
||||||
|
root: RpoDigest,
|
||||||
|
tree_depth: u8,
|
||||||
|
index: u64,
|
||||||
|
) -> Result<u8, MerkleError> {
|
||||||
|
// validate depth and index
|
||||||
|
if tree_depth > 64 {
|
||||||
|
return Err(MerkleError::DepthTooBig(tree_depth as u64));
|
||||||
|
}
|
||||||
|
NodeIndex::new(tree_depth, index)?;
|
||||||
|
|
||||||
|
// it's not illegal to have a maximum depth of `0`; we should just return the root in that
|
||||||
|
// case. this check will simplify the implementation as we could overflow bits for depth
|
||||||
|
// `0`.
|
||||||
|
if tree_depth == 0 {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if the root exists, providing the proper error report if it doesn't
|
||||||
|
let empty = EmptySubtreeRoots::empty_hashes(tree_depth);
|
||||||
|
let mut hash = root;
|
||||||
|
if !self.nodes.contains_key(&hash) {
|
||||||
|
return Err(MerkleError::RootNotInStore(hash));
|
||||||
|
}
|
||||||
|
|
||||||
|
// we traverse from root to leaf, so the path is reversed
|
||||||
|
let mut path = (index << (64 - tree_depth)).reverse_bits();
|
||||||
|
|
||||||
|
// iterate every depth and reconstruct the path from root to leaf
|
||||||
|
for depth in 0..tree_depth {
|
||||||
|
// we short-circuit if an empty node has been found
|
||||||
|
if hash == empty[depth as usize] {
|
||||||
|
return Ok(depth);
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetch the children pair, mapped by its parent hash
|
||||||
|
let children = match self.nodes.get(&hash) {
|
||||||
|
Some(node) => node,
|
||||||
|
None => return Ok(depth),
|
||||||
|
};
|
||||||
|
|
||||||
|
// traverse down
|
||||||
|
hash = if path & 1 == 0 { children.left } else { children.right };
|
||||||
|
path >>= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// at max depth assert it doesn't have sub-trees
|
||||||
|
if self.nodes.contains_key(&hash) {
|
||||||
|
return Err(MerkleError::DepthTooBig(tree_depth as u64 + 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
// depleted bits; return max depth
|
||||||
|
Ok(tree_depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DATA EXTRACTORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns a subset of this Merkle store such that the returned Merkle store contains all
|
||||||
|
/// nodes which are descendants of the specified roots.
|
||||||
|
///
|
||||||
|
/// The roots for which no descendants exist in this Merkle store are ignored.
|
||||||
|
pub fn subset<I, R>(&self, roots: I) -> MerkleStore<T>
|
||||||
|
where
|
||||||
|
I: Iterator<Item = R>,
|
||||||
|
R: Borrow<RpoDigest>,
|
||||||
|
{
|
||||||
|
let mut store = MerkleStore::new();
|
||||||
|
for root in roots {
|
||||||
|
let root = *root.borrow();
|
||||||
|
store.clone_tree_from(root, self);
|
||||||
|
}
|
||||||
|
store
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterator over the inner nodes of the [MerkleStore].
|
||||||
|
pub fn inner_nodes(&self) -> impl Iterator<Item = InnerNodeInfo> + '_ {
|
||||||
|
self.nodes.iter().map(|(r, n)| InnerNodeInfo {
|
||||||
|
value: *r,
|
||||||
|
left: n.left,
|
||||||
|
right: n.right,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// STATE MUTATORS
|
// STATE MUTATORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Adds all the nodes of a Merkle tree represented by `leaves`.
|
|
||||||
///
|
|
||||||
/// This will instantiate a Merkle tree using `leaves` and include all the nodes into the
|
|
||||||
/// store.
|
|
||||||
///
|
|
||||||
/// # Errors
|
|
||||||
///
|
|
||||||
/// This method may return the following errors:
|
|
||||||
/// - `DepthTooSmall` if leaves is empty or contains only 1 element
|
|
||||||
/// - `NumLeavesNotPowerOfTwo` if the number of leaves is not a power-of-two
|
|
||||||
pub fn add_merkle_tree<I>(&mut self, leaves: I) -> Result<Word, MerkleError>
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = Word>,
|
|
||||||
{
|
|
||||||
let leaves: Vec<_> = leaves.into_iter().collect();
|
|
||||||
if leaves.len() < 2 {
|
|
||||||
return Err(MerkleError::DepthTooSmall(leaves.len() as u8));
|
|
||||||
}
|
|
||||||
|
|
||||||
let layers = leaves.len().ilog2();
|
|
||||||
let tree = MerkleTree::new(leaves)?;
|
|
||||||
|
|
||||||
let mut depth = 0;
|
|
||||||
let mut parent_offset = 1;
|
|
||||||
let mut child_offset = 2;
|
|
||||||
while depth < layers {
|
|
||||||
let layer_size = 1usize << depth;
|
|
||||||
for _ in 0..layer_size {
|
|
||||||
// merkle tree is using level form representation, so left and right siblings are
|
|
||||||
// next to each other
|
|
||||||
let left = tree.nodes[child_offset];
|
|
||||||
let right = tree.nodes[child_offset + 1];
|
|
||||||
self.nodes.insert(
|
|
||||||
tree.nodes[parent_offset].into(),
|
|
||||||
Node {
|
|
||||||
left: left.into(),
|
|
||||||
right: right.into(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
parent_offset += 1;
|
|
||||||
child_offset += 2;
|
|
||||||
}
|
|
||||||
depth += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(tree.nodes[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds all the nodes of a Sparse Merkle tree represented by `entries`.
|
|
||||||
///
|
|
||||||
/// This will instantiate a Sparse Merkle tree using `entries` and include all the nodes into
|
|
||||||
/// the store.
|
|
||||||
///
|
|
||||||
/// # Errors
|
|
||||||
///
|
|
||||||
/// This will return `InvalidEntriesCount` if the length of `entries` is not `63`.
|
|
||||||
pub fn add_sparse_merkle_tree<R, I>(&mut self, entries: R) -> Result<Word, MerkleError>
|
|
||||||
where
|
|
||||||
R: IntoIterator<IntoIter = I>,
|
|
||||||
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
|
||||||
{
|
|
||||||
let smt = SimpleSmt::new(SimpleSmt::MAX_DEPTH)?.with_leaves(entries)?;
|
|
||||||
for branch in smt.store.branches.values() {
|
|
||||||
let parent = Rpo256::merge(&[branch.left, branch.right]);
|
|
||||||
self.nodes.insert(
|
|
||||||
parent,
|
|
||||||
Node {
|
|
||||||
left: branch.left,
|
|
||||||
right: branch.right,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(smt.root())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds all the nodes of a Merkle path represented by `path`, opening to `node`. Returns the
|
/// Adds all the nodes of a Merkle path represented by `path`, opening to `node`. Returns the
|
||||||
/// new root.
|
/// new root.
|
||||||
///
|
///
|
||||||
@@ -307,31 +287,21 @@ impl MerkleStore {
|
|||||||
/// include all the nodes into the store.
|
/// include all the nodes into the store.
|
||||||
pub fn add_merkle_path(
|
pub fn add_merkle_path(
|
||||||
&mut self,
|
&mut self,
|
||||||
index_value: u64,
|
index: u64,
|
||||||
mut node: Word,
|
node: RpoDigest,
|
||||||
path: MerklePath,
|
path: MerklePath,
|
||||||
) -> Result<Word, MerkleError> {
|
) -> Result<RpoDigest, MerkleError> {
|
||||||
let mut index = NodeIndex::new(self.nodes.len() as u8, index_value);
|
let root = path.inner_nodes(index, node)?.fold(RpoDigest::default(), |_, node| {
|
||||||
|
let value: RpoDigest = node.value;
|
||||||
|
let left: RpoDigest = node.left;
|
||||||
|
let right: RpoDigest = node.right;
|
||||||
|
|
||||||
for sibling in path {
|
debug_assert_eq!(Rpo256::merge(&[left, right]), value);
|
||||||
let (left, right) = match index.is_value_odd() {
|
self.nodes.insert(value, StoreNode { left, right });
|
||||||
true => (sibling, node),
|
|
||||||
false => (node, sibling),
|
|
||||||
};
|
|
||||||
let parent = Rpo256::merge(&[left.into(), right.into()]);
|
|
||||||
self.nodes.insert(
|
|
||||||
parent,
|
|
||||||
Node {
|
|
||||||
left: left.into(),
|
|
||||||
right: right.into(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
index.move_up();
|
node.value
|
||||||
node = parent.into();
|
});
|
||||||
}
|
Ok(root)
|
||||||
|
|
||||||
Ok(node)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds all the nodes of multiple Merkle paths into the store.
|
/// Adds all the nodes of multiple Merkle paths into the store.
|
||||||
@@ -340,39 +310,23 @@ impl MerkleStore {
|
|||||||
/// into the store.
|
/// into the store.
|
||||||
///
|
///
|
||||||
/// For further reference, check [MerkleStore::add_merkle_path].
|
/// For further reference, check [MerkleStore::add_merkle_path].
|
||||||
///
|
pub fn add_merkle_paths<I>(&mut self, paths: I) -> Result<(), MerkleError>
|
||||||
/// # Errors
|
|
||||||
///
|
|
||||||
/// Every path must resolve to the same root, otherwise this will return an `ConflictingRoots`
|
|
||||||
/// error.
|
|
||||||
pub fn add_merkle_paths<I>(&mut self, paths: I) -> Result<Word, MerkleError>
|
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = (u64, Word, MerklePath)>,
|
I: IntoIterator<Item = (u64, RpoDigest, MerklePath)>,
|
||||||
{
|
{
|
||||||
let paths: Vec<(u64, Word, MerklePath)> = paths.into_iter().collect();
|
for (index_value, node, path) in paths.into_iter() {
|
||||||
|
|
||||||
let roots: BTreeSet<RpoDigest> = paths
|
|
||||||
.iter()
|
|
||||||
.map(|(index, node, path)| path.compute_root(*index, *node).into())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if roots.len() != 1 {
|
|
||||||
return Err(MerkleError::ConflictingRoots(
|
|
||||||
roots.iter().map(|v| Word::from(*v)).collect(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
for (index_value, node, path) in paths {
|
|
||||||
self.add_merkle_path(index_value, node, path)?;
|
self.add_merkle_path(index_value, node, path)?;
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
Ok(roots.iter().next().unwrap().into())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Appends the provided [MerklePathSet] into the store.
|
/// Appends the provided [MerklePathSet] into the store.
|
||||||
///
|
///
|
||||||
/// For further reference, check [MerkleStore::add_merkle_path].
|
/// For further reference, check [MerkleStore::add_merkle_path].
|
||||||
pub fn add_merkle_path_set(&mut self, path_set: &MerklePathSet) -> Result<Word, MerkleError> {
|
pub fn add_merkle_path_set(
|
||||||
|
&mut self,
|
||||||
|
path_set: &MerklePathSet,
|
||||||
|
) -> Result<RpoDigest, MerkleError> {
|
||||||
let root = path_set.root();
|
let root = path_set.root();
|
||||||
for (index, path) in path_set.to_paths() {
|
for (index, path) in path_set.to_paths() {
|
||||||
self.add_merkle_path(index, path.value, path.path)?;
|
self.add_merkle_path(index, path.value, path.path)?;
|
||||||
@@ -383,15 +337,15 @@ impl MerkleStore {
|
|||||||
/// Sets a node to `value`.
|
/// Sets a node to `value`.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
|
||||||
/// This method can return the following errors:
|
/// This method can return the following errors:
|
||||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||||
|
/// the store.
|
||||||
pub fn set_node(
|
pub fn set_node(
|
||||||
&mut self,
|
&mut self,
|
||||||
mut root: Word,
|
mut root: RpoDigest,
|
||||||
index: NodeIndex,
|
index: NodeIndex,
|
||||||
value: Word,
|
value: RpoDigest,
|
||||||
) -> Result<RootPath, MerkleError> {
|
) -> Result<RootPath, MerkleError> {
|
||||||
let node = value;
|
let node = value;
|
||||||
let ValuePath { value, path } = self.get_path(root, index)?;
|
let ValuePath { value, path } = self.get_path(root, index)?;
|
||||||
@@ -404,54 +358,141 @@ impl MerkleStore {
|
|||||||
Ok(RootPath { root, path })
|
Ok(RootPath { root, path })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn merge_roots(&mut self, root1: Word, root2: Word) -> Result<Word, MerkleError> {
|
/// Merges two elements and adds the resulting node into the store.
|
||||||
let root1: RpoDigest = root1.into();
|
///
|
||||||
let root2: RpoDigest = root2.into();
|
/// Merges arbitrary values. They may be leafs, nodes, or a mixture of both.
|
||||||
|
pub fn merge_roots(
|
||||||
if !self.nodes.contains_key(&root1) {
|
&mut self,
|
||||||
Err(MerkleError::NodeNotInStore(
|
left_root: RpoDigest,
|
||||||
root1.into(),
|
right_root: RpoDigest,
|
||||||
NodeIndex::new(0, 0),
|
) -> Result<RpoDigest, MerkleError> {
|
||||||
))
|
let parent = Rpo256::merge(&[left_root, right_root]);
|
||||||
} else if !self.nodes.contains_key(&root1) {
|
|
||||||
Err(MerkleError::NodeNotInStore(
|
|
||||||
root2.into(),
|
|
||||||
NodeIndex::new(0, 0),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
let parent: Word = Rpo256::merge(&[root1, root2]).into();
|
|
||||||
self.nodes.insert(
|
self.nodes.insert(
|
||||||
parent.into(),
|
parent,
|
||||||
Node {
|
StoreNode {
|
||||||
left: root1,
|
left: left_root,
|
||||||
right: root2,
|
right: right_root,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(parent)
|
Ok(parent)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DESTRUCTURING
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns the inner storage of this MerkleStore while consuming `self`.
|
||||||
|
pub fn into_inner(self) -> T {
|
||||||
|
self.nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// HELPER METHODS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Recursively clones a tree with the specified root from the specified source into self.
|
||||||
|
///
|
||||||
|
/// If the source store does not contain a tree with the specified root, this is a noop.
|
||||||
|
fn clone_tree_from(&mut self, root: RpoDigest, source: &Self) {
|
||||||
|
// process the node only if it is in the source
|
||||||
|
if let Some(node) = source.nodes.get(&root) {
|
||||||
|
// if the node has already been inserted, no need to process it further as all of its
|
||||||
|
// descendants should be already cloned from the source store
|
||||||
|
if self.nodes.insert(root, *node).is_none() {
|
||||||
|
self.clone_tree_from(node.left, source);
|
||||||
|
self.clone_tree_from(node.right, source);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CONVERSIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> From<&MerkleTree> for MerkleStore<T> {
|
||||||
|
fn from(value: &MerkleTree) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> From<&SimpleSmt> for MerkleStore<T> {
|
||||||
|
fn from(value: &SimpleSmt) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> From<&Mmr> for MerkleStore<T> {
|
||||||
|
fn from(value: &Mmr) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> From<&TieredSmt> for MerkleStore<T> {
|
||||||
|
fn from(value: &TieredSmt) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> From<T> for MerkleStore<T> {
|
||||||
|
fn from(values: T) -> Self {
|
||||||
|
let nodes = values.into_iter().chain(empty_hashes().into_iter()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> FromIterator<InnerNodeInfo> for MerkleStore<T> {
|
||||||
|
fn from_iter<I: IntoIterator<Item = InnerNodeInfo>>(iter: I) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(iter.into_iter()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> FromIterator<(RpoDigest, StoreNode)> for MerkleStore<T> {
|
||||||
|
fn from_iter<I: IntoIterator<Item = (RpoDigest, StoreNode)>>(iter: I) -> Self {
|
||||||
|
let nodes = iter.into_iter().chain(empty_hashes().into_iter()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ITERATORS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, StoreNode>> Extend<InnerNodeInfo> for MerkleStore<T> {
|
||||||
|
fn extend<I: IntoIterator<Item = InnerNodeInfo>>(&mut self, iter: I) {
|
||||||
|
self.nodes.extend(iter.into_iter().map(|info| {
|
||||||
|
(
|
||||||
|
info.value,
|
||||||
|
StoreNode {
|
||||||
|
left: info.left,
|
||||||
|
right: info.right,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SERIALIZATION
|
// SERIALIZATION
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
impl Serializable for Node {
|
impl Serializable for StoreNode {
|
||||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
self.left.write_into(target);
|
self.left.write_into(target);
|
||||||
self.right.write_into(target);
|
self.right.write_into(target);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deserializable for Node {
|
impl Deserializable for StoreNode {
|
||||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||||
let left = RpoDigest::read_from(source)?;
|
let left = RpoDigest::read_from(source)?;
|
||||||
let right = RpoDigest::read_from(source)?;
|
let right = RpoDigest::read_from(source)?;
|
||||||
Ok(Node { left, right })
|
Ok(StoreNode { left, right })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serializable for MerkleStore {
|
impl<T: KvMap<RpoDigest, StoreNode>> Serializable for MerkleStore<T> {
|
||||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
target.write_u64(self.nodes.len() as u64);
|
target.write_u64(self.nodes.len() as u64);
|
||||||
|
|
||||||
@@ -462,17 +503,55 @@ impl Serializable for MerkleStore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deserializable for MerkleStore {
|
impl<T: KvMap<RpoDigest, StoreNode>> Deserializable for MerkleStore<T> {
|
||||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||||
let len = source.read_u64()?;
|
let len = source.read_u64()?;
|
||||||
let mut nodes: BTreeMap<RpoDigest, Node> = BTreeMap::new();
|
let mut nodes: Vec<(RpoDigest, StoreNode)> = Vec::with_capacity(len as usize);
|
||||||
|
|
||||||
for _ in 0..len {
|
for _ in 0..len {
|
||||||
let key = RpoDigest::read_from(source)?;
|
let key = RpoDigest::read_from(source)?;
|
||||||
let value = Node::read_from(source)?;
|
let value = StoreNode::read_from(source)?;
|
||||||
nodes.insert(key, value);
|
nodes.push((key, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(MerkleStore { nodes })
|
Ok(nodes.into_iter().collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HELPER FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// Creates empty hashes for all the subtrees of a tree with a max depth of 255.
|
||||||
|
fn empty_hashes() -> impl IntoIterator<Item = (RpoDigest, StoreNode)> {
|
||||||
|
let subtrees = EmptySubtreeRoots::empty_hashes(255);
|
||||||
|
subtrees.iter().rev().copied().zip(subtrees.iter().rev().skip(1).copied()).map(
|
||||||
|
|(child, parent)| {
|
||||||
|
(
|
||||||
|
parent,
|
||||||
|
StoreNode {
|
||||||
|
left: child,
|
||||||
|
right: child,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Consumes an iterator of [InnerNodeInfo] and returns an iterator of `(value, node)` tuples
|
||||||
|
/// which includes the nodes associate with roots of empty subtrees up to a depth of 255.
|
||||||
|
fn combine_nodes_with_empty_hashes(
|
||||||
|
nodes: impl IntoIterator<Item = InnerNodeInfo>,
|
||||||
|
) -> impl Iterator<Item = (RpoDigest, StoreNode)> {
|
||||||
|
nodes
|
||||||
|
.into_iter()
|
||||||
|
.map(|info| {
|
||||||
|
(
|
||||||
|
info.value,
|
||||||
|
StoreNode {
|
||||||
|
left: info.left,
|
||||||
|
right: info.right,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.chain(empty_hashes().into_iter())
|
||||||
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
485
src/merkle/tiered_smt/mod.rs
Normal file
485
src/merkle/tiered_smt/mod.rs
Normal file
@@ -0,0 +1,485 @@
|
|||||||
|
use super::{
|
||||||
|
BTreeMap, BTreeSet, EmptySubtreeRoots, Felt, InnerNodeInfo, MerkleError, MerklePath, NodeIndex,
|
||||||
|
Rpo256, RpoDigest, StarkField, Vec, Word, ZERO,
|
||||||
|
};
|
||||||
|
use core::cmp;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
// TIERED SPARSE MERKLE TREE
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// Tiered (compacted) Sparse Merkle tree mapping 256-bit keys to 256-bit values. Both keys and
|
||||||
|
/// values are represented by 4 field elements.
|
||||||
|
///
|
||||||
|
/// Leaves in the tree can exist only on specific depths called "tiers". These depths are: 16, 32,
|
||||||
|
/// 48, and 64. Initially, when a tree is empty, it is equivalent to an empty Sparse Merkle tree
|
||||||
|
/// of depth 64 (i.e., leaves at depth 64 are set to [ZERO; 4]). As non-empty values are inserted
|
||||||
|
/// into the tree they are added to the first available tier.
|
||||||
|
///
|
||||||
|
/// For example, when the first key-value is inserted, it will be stored in a node at depth 16
|
||||||
|
/// such that the first 16 bits of the key determine the position of the node at depth 16. If
|
||||||
|
/// another value with a key sharing the same 16-bit prefix is inserted, both values move into
|
||||||
|
/// the next tier (depth 32). This process is repeated until values end up at tier 64. If multiple
|
||||||
|
/// values have keys with a common 64-bit prefix, such key-value pairs are stored in a sorted list
|
||||||
|
/// at the last tier (depth = 64).
|
||||||
|
///
|
||||||
|
/// To differentiate between internal and leaf nodes, node values are computed as follows:
|
||||||
|
/// - Internal nodes: hash(left_child, right_child).
|
||||||
|
/// - Leaf node at depths 16, 32, or 64: hash(rem_key, value, domain=depth).
|
||||||
|
/// - Leaf node at depth 64: hash([rem_key_0, value_0, ..., rem_key_n, value_n, domain=64]).
|
||||||
|
///
|
||||||
|
/// Where rem_key is computed by replacing d most significant bits of the key with zeros where d
|
||||||
|
/// is depth (i.e., for a leaf at depth 16, we replace 16 most significant bits of the key with 0).
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct TieredSmt {
|
||||||
|
root: RpoDigest,
|
||||||
|
nodes: BTreeMap<NodeIndex, RpoDigest>,
|
||||||
|
upper_leaves: BTreeMap<NodeIndex, RpoDigest>, // node_index |-> key map
|
||||||
|
bottom_leaves: BTreeMap<u64, BottomLeaf>, // leaves of depth 64
|
||||||
|
values: BTreeMap<RpoDigest, Word>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TieredSmt {
|
||||||
|
// CONSTANTS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// The number of levels between tiers.
|
||||||
|
const TIER_SIZE: u8 = 16;
|
||||||
|
|
||||||
|
/// Depths at which leaves can exist in a tiered SMT.
|
||||||
|
const TIER_DEPTHS: [u8; 4] = [16, 32, 48, 64];
|
||||||
|
|
||||||
|
/// Maximum node depth. This is also the bottom tier of the tree.
|
||||||
|
const MAX_DEPTH: u8 = 64;
|
||||||
|
|
||||||
|
/// Value of an empty leaf.
|
||||||
|
pub const EMPTY_VALUE: Word = super::empty_roots::EMPTY_WORD;
|
||||||
|
|
||||||
|
// CONSTRUCTORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns a new [TieredSmt] instantiated with the specified key-value pairs.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if the provided entries contain multiple values for the same key.
|
||||||
|
pub fn with_leaves<R, I>(entries: R) -> Result<Self, MerkleError>
|
||||||
|
where
|
||||||
|
R: IntoIterator<IntoIter = I>,
|
||||||
|
I: Iterator<Item = (RpoDigest, Word)> + ExactSizeIterator,
|
||||||
|
{
|
||||||
|
// create an empty tree
|
||||||
|
let mut tree = Self::default();
|
||||||
|
|
||||||
|
// append leaves to the tree returning an error if a duplicate entry for the same key
|
||||||
|
// is found
|
||||||
|
let mut empty_entries = BTreeSet::new();
|
||||||
|
for (key, value) in entries {
|
||||||
|
let old_value = tree.insert(key, value);
|
||||||
|
if old_value != Self::EMPTY_VALUE || empty_entries.contains(&key) {
|
||||||
|
return Err(MerkleError::DuplicateValuesForKey(key));
|
||||||
|
}
|
||||||
|
// if we've processed an empty entry, add the key to the set of empty entry keys, and
|
||||||
|
// if this key was already in the set, return an error
|
||||||
|
if value == Self::EMPTY_VALUE && !empty_entries.insert(key) {
|
||||||
|
return Err(MerkleError::DuplicateValuesForKey(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUBLIC ACCESSORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns the root of this Merkle tree.
|
||||||
|
pub const fn root(&self) -> RpoDigest {
|
||||||
|
self.root
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a node at the specified index.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// - The specified index depth is 0 or greater than 64.
|
||||||
|
/// - The node with the specified index does not exists in the Merkle tree. This is possible
|
||||||
|
/// when a leaf node with the same index prefix exists at a tier higher than the requested
|
||||||
|
/// node.
|
||||||
|
pub fn get_node(&self, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||||
|
self.validate_node_access(index)?;
|
||||||
|
Ok(self.get_node_unchecked(&index))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a Merkle path from the node at the specified index to the root.
|
||||||
|
///
|
||||||
|
/// The node itself is not included in the path.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// - The specified index depth is 0 or greater than 64.
|
||||||
|
/// - The node with the specified index does not exists in the Merkle tree. This is possible
|
||||||
|
/// when a leaf node with the same index prefix exists at a tier higher than the node to
|
||||||
|
/// which the path is requested.
|
||||||
|
pub fn get_path(&self, mut index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
||||||
|
self.validate_node_access(index)?;
|
||||||
|
|
||||||
|
let mut path = Vec::with_capacity(index.depth() as usize);
|
||||||
|
for _ in 0..index.depth() {
|
||||||
|
let node = self.get_node_unchecked(&index.sibling());
|
||||||
|
path.push(node);
|
||||||
|
index.move_up();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(path.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the value associated with the specified key.
|
||||||
|
///
|
||||||
|
/// If nothing was inserted into this tree for the specified key, [ZERO; 4] is returned.
|
||||||
|
pub fn get_value(&self, key: RpoDigest) -> Word {
|
||||||
|
match self.values.get(&key) {
|
||||||
|
Some(value) => *value,
|
||||||
|
None => Self::EMPTY_VALUE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATE MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Inserts the provided value into the tree under the specified key and returns the value
|
||||||
|
/// previously stored under this key.
|
||||||
|
///
|
||||||
|
/// If the value for the specified key was not previously set, [ZERO; 4] is returned.
|
||||||
|
pub fn insert(&mut self, key: RpoDigest, value: Word) -> Word {
|
||||||
|
// insert the value into the key-value map, and if nothing has changed, return
|
||||||
|
let old_value = self.values.insert(key, value).unwrap_or(Self::EMPTY_VALUE);
|
||||||
|
if old_value == value {
|
||||||
|
return old_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// determine the index for the value node; this index could have 3 different meanings:
|
||||||
|
// - it points to a root of an empty subtree (excluding depth = 64); in this case, we can
|
||||||
|
// replace the node with the value node immediately.
|
||||||
|
// - it points to a node at the bottom tier (i.e., depth = 64); in this case, we need to
|
||||||
|
// process bottom-tier insertion which will be handled by insert_node().
|
||||||
|
// - it points to a leaf node; this node could be a node with the same key or a different
|
||||||
|
// key with a common prefix; in the latter case, we'll need to move the leaf to a lower
|
||||||
|
// tier; for this scenario the `leaf_key` will contain the key of the leaf node
|
||||||
|
let (mut index, leaf_key) = self.get_insert_location(&key);
|
||||||
|
|
||||||
|
// if the returned index points to a leaf, and this leaf is for a different key, we need
|
||||||
|
// to move the leaf to a lower tier
|
||||||
|
if let Some(other_key) = leaf_key {
|
||||||
|
if other_key != key {
|
||||||
|
// determine how far down the tree should we move the existing leaf
|
||||||
|
let common_prefix_len = get_common_prefix_tier(&key, &other_key);
|
||||||
|
let depth = cmp::min(common_prefix_len + Self::TIER_SIZE, Self::MAX_DEPTH);
|
||||||
|
|
||||||
|
// move the leaf to the new location; this requires first removing the existing
|
||||||
|
// index, re-computing node value, and inserting the node at a new location
|
||||||
|
let other_index = key_to_index(&other_key, depth);
|
||||||
|
let other_value = *self.values.get(&other_key).expect("no value for other key");
|
||||||
|
self.upper_leaves.remove(&index).expect("other node key not in map");
|
||||||
|
self.insert_node(other_index, other_key, other_value);
|
||||||
|
|
||||||
|
// the new leaf also needs to move down to the same tier
|
||||||
|
index = key_to_index(&key, depth);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert the node and return the old value
|
||||||
|
self.insert_node(index, key, value);
|
||||||
|
old_value
|
||||||
|
}
|
||||||
|
|
||||||
|
// ITERATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns an iterator over all inner nodes of this [TieredSmt] (i.e., nodes not at depths 16
|
||||||
|
/// 32, 48, or 64).
|
||||||
|
///
|
||||||
|
/// The iterator order is unspecified.
|
||||||
|
pub fn inner_nodes(&self) -> impl Iterator<Item = InnerNodeInfo> + '_ {
|
||||||
|
self.nodes.iter().filter_map(|(index, node)| {
|
||||||
|
if is_inner_node(index) {
|
||||||
|
Some(InnerNodeInfo {
|
||||||
|
value: *node,
|
||||||
|
left: self.get_node_unchecked(&index.left_child()),
|
||||||
|
right: self.get_node_unchecked(&index.right_child()),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over upper leaves (i.e., depth = 16, 32, or 48) for this [TieredSmt].
|
||||||
|
///
|
||||||
|
/// Each yielded item is a (node, key, value) tuple where key is a full un-truncated key (i.e.,
|
||||||
|
/// with key[3] element unmodified).
|
||||||
|
///
|
||||||
|
/// The iterator order is unspecified.
|
||||||
|
pub fn upper_leaves(&self) -> impl Iterator<Item = (RpoDigest, RpoDigest, Word)> + '_ {
|
||||||
|
self.upper_leaves.iter().map(|(index, key)| {
|
||||||
|
let node = self.get_node_unchecked(index);
|
||||||
|
let value = self.get_value(*key);
|
||||||
|
(node, *key, value)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over bottom leaves (i.e., depth = 64) of this [TieredSmt].
|
||||||
|
///
|
||||||
|
/// Each yielded item consists of the hash of the leaf and its contents, where contents is
|
||||||
|
/// a vector containing key-value pairs of entries storied in this leaf. Note that keys are
|
||||||
|
/// un-truncated keys (i.e., with key[3] element unmodified).
|
||||||
|
///
|
||||||
|
/// The iterator order is unspecified.
|
||||||
|
pub fn bottom_leaves(&self) -> impl Iterator<Item = (RpoDigest, Vec<(RpoDigest, Word)>)> + '_ {
|
||||||
|
self.bottom_leaves.values().map(|leaf| (leaf.hash(), leaf.contents()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HELPER METHODS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Checks if the specified index is valid in the context of this Merkle tree.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// - The specified index depth is 0 or greater than 64.
|
||||||
|
/// - The node for the specified index does not exists in the Merkle tree. This is possible
|
||||||
|
/// when an ancestors of the specified index is a leaf node.
|
||||||
|
fn validate_node_access(&self, index: NodeIndex) -> Result<(), MerkleError> {
|
||||||
|
if index.is_root() {
|
||||||
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
|
} else if index.depth() > Self::MAX_DEPTH {
|
||||||
|
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
||||||
|
} else {
|
||||||
|
// make sure that there are no leaf nodes in the ancestors of the index; since leaf
|
||||||
|
// nodes can live at specific depth, we just need to check these depths.
|
||||||
|
let tier = get_index_tier(&index);
|
||||||
|
let mut tier_index = index;
|
||||||
|
for &depth in Self::TIER_DEPTHS[..tier].iter().rev() {
|
||||||
|
tier_index.move_up_to(depth);
|
||||||
|
if self.upper_leaves.contains_key(&tier_index) {
|
||||||
|
return Err(MerkleError::NodeNotInSet(index));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a node at the specified index. If the node does not exist at this index, a root
|
||||||
|
/// for an empty subtree at the index's depth is returned.
|
||||||
|
///
|
||||||
|
/// Unlike [TieredSmt::get_node()] this does not perform any checks to verify that the returned
|
||||||
|
/// node is valid in the context of this tree.
|
||||||
|
fn get_node_unchecked(&self, index: &NodeIndex) -> RpoDigest {
|
||||||
|
match self.nodes.get(index) {
|
||||||
|
Some(node) => *node,
|
||||||
|
None => EmptySubtreeRoots::empty_hashes(Self::MAX_DEPTH)[index.depth() as usize],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an index at which a node for the specified key should be inserted. If a leaf node
|
||||||
|
/// already exists at that index, returns the key associated with that leaf node.
|
||||||
|
///
|
||||||
|
/// In case the index falls into the bottom tier (depth = 64), leaf node key is not returned
|
||||||
|
/// as the bottom tier may contain multiple key-value pairs in the same leaf.
|
||||||
|
fn get_insert_location(&self, key: &RpoDigest) -> (NodeIndex, Option<RpoDigest>) {
|
||||||
|
// traverse the tree from the root down checking nodes at tiers 16, 32, and 48. Return if
|
||||||
|
// a node at any of the tiers is either a leaf or a root of an empty subtree.
|
||||||
|
let mse = Word::from(key)[3].as_int();
|
||||||
|
for depth in (Self::TIER_DEPTHS[0]..Self::MAX_DEPTH).step_by(Self::TIER_SIZE as usize) {
|
||||||
|
let index = NodeIndex::new_unchecked(depth, mse >> (Self::MAX_DEPTH - depth));
|
||||||
|
if let Some(leaf_key) = self.upper_leaves.get(&index) {
|
||||||
|
return (index, Some(*leaf_key));
|
||||||
|
} else if !self.nodes.contains_key(&index) {
|
||||||
|
return (index, None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we got here, that means all of the nodes checked so far are internal nodes, and
|
||||||
|
// the new node would need to be inserted in the bottom tier.
|
||||||
|
let index = NodeIndex::new_unchecked(Self::MAX_DEPTH, mse);
|
||||||
|
(index, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inserts the provided key-value pair at the specified index and updates the root of this
|
||||||
|
/// Merkle tree by recomputing the path to the root.
|
||||||
|
fn insert_node(&mut self, mut index: NodeIndex, key: RpoDigest, value: Word) {
|
||||||
|
let depth = index.depth();
|
||||||
|
|
||||||
|
// insert the key into index-key map and compute the new value of the node
|
||||||
|
let mut node = if index.depth() == Self::MAX_DEPTH {
|
||||||
|
// for the bottom tier, we add the key-value pair to the existing leaf, or create a
|
||||||
|
// new leaf with this key-value pair
|
||||||
|
self.bottom_leaves
|
||||||
|
.entry(index.value())
|
||||||
|
.and_modify(|leaves| leaves.add_value(key, value))
|
||||||
|
.or_insert(BottomLeaf::new(key, value))
|
||||||
|
.hash()
|
||||||
|
} else {
|
||||||
|
// for the upper tiers, we just update the index-key map and compute the value of the
|
||||||
|
// node
|
||||||
|
self.upper_leaves.insert(index, key);
|
||||||
|
// the node value is computed as: hash(remaining_key || value, domain = depth)
|
||||||
|
let remaining_path = get_remaining_path(key, depth.into());
|
||||||
|
Rpo256::merge_in_domain(&[remaining_path, value.into()], depth.into())
|
||||||
|
};
|
||||||
|
|
||||||
|
// insert the node and update the path from the node to the root
|
||||||
|
for _ in 0..index.depth() {
|
||||||
|
self.nodes.insert(index, node);
|
||||||
|
let sibling = self.get_node_unchecked(&index.sibling());
|
||||||
|
node = Rpo256::merge(&index.build_node(node, sibling));
|
||||||
|
index.move_up();
|
||||||
|
}
|
||||||
|
|
||||||
|
// update the root
|
||||||
|
self.nodes.insert(NodeIndex::root(), node);
|
||||||
|
self.root = node;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TieredSmt {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
root: EmptySubtreeRoots::empty_hashes(Self::MAX_DEPTH)[0],
|
||||||
|
nodes: BTreeMap::new(),
|
||||||
|
upper_leaves: BTreeMap::new(),
|
||||||
|
bottom_leaves: BTreeMap::new(),
|
||||||
|
values: BTreeMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HELPER FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// Returns the remaining path for the specified key at the specified depth.
|
||||||
|
///
|
||||||
|
/// Remaining path is computed by setting n most significant bits of the key to zeros, where n is
|
||||||
|
/// the specified depth.
|
||||||
|
fn get_remaining_path(key: RpoDigest, depth: u32) -> RpoDigest {
|
||||||
|
let mut key = Word::from(key);
|
||||||
|
key[3] = if depth == 64 {
|
||||||
|
ZERO
|
||||||
|
} else {
|
||||||
|
// remove `depth` bits from the most significant key element
|
||||||
|
((key[3].as_int() << depth) >> depth).into()
|
||||||
|
};
|
||||||
|
key.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns index for the specified key inserted at the specified depth.
|
||||||
|
///
|
||||||
|
/// The value for the key is computed by taking n most significant bits from the most significant
|
||||||
|
/// element of the key, where n is the specified depth.
|
||||||
|
fn key_to_index(key: &RpoDigest, depth: u8) -> NodeIndex {
|
||||||
|
let mse = Word::from(key)[3].as_int();
|
||||||
|
let value = match depth {
|
||||||
|
16 | 32 | 48 | 64 => mse >> ((TieredSmt::MAX_DEPTH - depth) as u32),
|
||||||
|
_ => unreachable!("invalid depth: {depth}"),
|
||||||
|
};
|
||||||
|
NodeIndex::new_unchecked(depth, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns tiered common prefix length between the most significant elements of the provided keys.
|
||||||
|
///
|
||||||
|
/// Specifically:
|
||||||
|
/// - returns 64 if the most significant elements are equal.
|
||||||
|
/// - returns 48 if the common prefix is between 48 and 63 bits.
|
||||||
|
/// - returns 32 if the common prefix is between 32 and 47 bits.
|
||||||
|
/// - returns 16 if the common prefix is between 16 and 31 bits.
|
||||||
|
/// - returns 0 if the common prefix is fewer than 16 bits.
|
||||||
|
fn get_common_prefix_tier(key1: &RpoDigest, key2: &RpoDigest) -> u8 {
|
||||||
|
let e1 = Word::from(key1)[3].as_int();
|
||||||
|
let e2 = Word::from(key2)[3].as_int();
|
||||||
|
let ex = (e1 ^ e2).leading_zeros() as u8;
|
||||||
|
(ex / 16) * 16
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a tier for the specified index.
|
||||||
|
///
|
||||||
|
/// The tiers are defined as follows:
|
||||||
|
/// - Tier 0: depth 0 through 16 (inclusive).
|
||||||
|
/// - Tier 1: depth 17 through 32 (inclusive).
|
||||||
|
/// - Tier 2: depth 33 through 48 (inclusive).
|
||||||
|
/// - Tier 3: depth 49 through 64 (inclusive).
|
||||||
|
const fn get_index_tier(index: &NodeIndex) -> usize {
|
||||||
|
debug_assert!(index.depth() <= TieredSmt::MAX_DEPTH, "invalid depth");
|
||||||
|
match index.depth() {
|
||||||
|
0..=16 => 0,
|
||||||
|
17..=32 => 1,
|
||||||
|
33..=48 => 2,
|
||||||
|
_ => 3,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the specified index is an index for an inner node (i.e., the depth is not 16,
|
||||||
|
/// 32, 48, or 64).
|
||||||
|
const fn is_inner_node(index: &NodeIndex) -> bool {
|
||||||
|
!matches!(index.depth(), 16 | 32 | 48 | 64)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BOTTOM LEAF
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// Stores contents of the bottom leaf (i.e., leaf at depth = 64) in a [TieredSmt].
|
||||||
|
///
|
||||||
|
/// Bottom leaf can contain one or more key-value pairs all sharing the same 64-bit key prefix.
|
||||||
|
/// The values are sorted by key to make sure the structure of the leaf is independent of the
|
||||||
|
/// insertion order. This guarantees that a leaf with the same set of key-value pairs always has
|
||||||
|
/// the same hash value.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
struct BottomLeaf {
|
||||||
|
prefix: u64,
|
||||||
|
values: BTreeMap<[u64; 4], Word>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BottomLeaf {
|
||||||
|
/// Returns a new [BottomLeaf] with a single key-value pair added.
|
||||||
|
pub fn new(key: RpoDigest, value: Word) -> Self {
|
||||||
|
let prefix = Word::from(key)[3].as_int();
|
||||||
|
let mut values = BTreeMap::new();
|
||||||
|
let key = get_remaining_path(key, TieredSmt::MAX_DEPTH as u32);
|
||||||
|
values.insert(key.into(), value);
|
||||||
|
Self { prefix, values }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a new key-value pair to this leaf.
|
||||||
|
pub fn add_value(&mut self, key: RpoDigest, value: Word) {
|
||||||
|
let key = get_remaining_path(key, TieredSmt::MAX_DEPTH as u32);
|
||||||
|
self.values.insert(key.into(), value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes a hash of this leaf.
|
||||||
|
pub fn hash(&self) -> RpoDigest {
|
||||||
|
let mut elements = Vec::with_capacity(self.values.len() * 2);
|
||||||
|
for (key, val) in self.values.iter() {
|
||||||
|
key.iter().for_each(|&v| elements.push(Felt::new(v)));
|
||||||
|
elements.extend_from_slice(val.as_slice());
|
||||||
|
}
|
||||||
|
// TODO: hash in domain
|
||||||
|
Rpo256::hash_elements(&elements)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns contents of this leaf as a vector of (key, value) pairs.
|
||||||
|
///
|
||||||
|
/// The keys are returned in their un-truncated form.
|
||||||
|
pub fn contents(&self) -> Vec<(RpoDigest, Word)> {
|
||||||
|
self.values
|
||||||
|
.iter()
|
||||||
|
.map(|(key, val)| {
|
||||||
|
let key = RpoDigest::from([
|
||||||
|
Felt::new(key[0]),
|
||||||
|
Felt::new(key[1]),
|
||||||
|
Felt::new(key[2]),
|
||||||
|
Felt::new(self.prefix),
|
||||||
|
]);
|
||||||
|
(key, *val)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
441
src/merkle/tiered_smt/tests.rs
Normal file
441
src/merkle/tiered_smt/tests.rs
Normal file
@@ -0,0 +1,441 @@
|
|||||||
|
use super::{
|
||||||
|
super::{super::ONE, Felt, MerkleStore, WORD_SIZE, ZERO},
|
||||||
|
get_remaining_path, EmptySubtreeRoots, InnerNodeInfo, NodeIndex, Rpo256, RpoDigest, TieredSmt,
|
||||||
|
Vec, Word,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_insert_one() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
let raw = 0b_01101001_01101100_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw)]);
|
||||||
|
let value = [ONE; WORD_SIZE];
|
||||||
|
|
||||||
|
// since the tree is empty, the first node will be inserted at depth 16 and the index will be
|
||||||
|
// 16 most significant bits of the key
|
||||||
|
let index = NodeIndex::make(16, raw >> 48);
|
||||||
|
let leaf_node = build_leaf_node(key, value, 16);
|
||||||
|
let tree_root = store.set_node(smt.root(), index, leaf_node).unwrap().root;
|
||||||
|
|
||||||
|
smt.insert(key, value);
|
||||||
|
|
||||||
|
assert_eq!(smt.root(), tree_root);
|
||||||
|
|
||||||
|
// make sure the value was inserted, and the node is at the expected index
|
||||||
|
assert_eq!(smt.get_value(key), value);
|
||||||
|
assert_eq!(smt.get_node(index).unwrap(), leaf_node);
|
||||||
|
|
||||||
|
// make sure the paths we get from the store and the tree match
|
||||||
|
let expected_path = store.get_path(tree_root, index).unwrap();
|
||||||
|
assert_eq!(smt.get_path(index).unwrap(), expected_path.path);
|
||||||
|
|
||||||
|
// make sure inner nodes match
|
||||||
|
let expected_nodes = get_non_empty_nodes(&store);
|
||||||
|
let actual_nodes = smt.inner_nodes().collect::<Vec<_>>();
|
||||||
|
assert_eq!(actual_nodes.len(), expected_nodes.len());
|
||||||
|
actual_nodes.iter().for_each(|node| assert!(expected_nodes.contains(node)));
|
||||||
|
|
||||||
|
// make sure leaves are returned correctly
|
||||||
|
let mut leaves = smt.upper_leaves();
|
||||||
|
assert_eq!(leaves.next(), Some((leaf_node, key, value)));
|
||||||
|
assert_eq!(leaves.next(), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_insert_two_16() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
// --- insert the first value ---------------------------------------------
|
||||||
|
let raw_a = 0b_10101010_10101010_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_a = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_a)]);
|
||||||
|
let val_a = [ONE; WORD_SIZE];
|
||||||
|
smt.insert(key_a, val_a);
|
||||||
|
|
||||||
|
// --- insert the second value --------------------------------------------
|
||||||
|
// the key for this value has the same 16-bit prefix as the key for the first value,
|
||||||
|
// thus, on insertions, both values should be pushed to depth 32 tier
|
||||||
|
let raw_b = 0b_10101010_10101010_10011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_b = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_b)]);
|
||||||
|
let val_b = [Felt::new(2); WORD_SIZE];
|
||||||
|
smt.insert(key_b, val_b);
|
||||||
|
|
||||||
|
// --- build Merkle store with equivalent data ----------------------------
|
||||||
|
let mut tree_root = get_init_root();
|
||||||
|
let index_a = NodeIndex::make(32, raw_a >> 32);
|
||||||
|
let leaf_node_a = build_leaf_node(key_a, val_a, 32);
|
||||||
|
tree_root = store.set_node(tree_root, index_a, leaf_node_a).unwrap().root;
|
||||||
|
|
||||||
|
let index_b = NodeIndex::make(32, raw_b >> 32);
|
||||||
|
let leaf_node_b = build_leaf_node(key_b, val_b, 32);
|
||||||
|
tree_root = store.set_node(tree_root, index_b, leaf_node_b).unwrap().root;
|
||||||
|
|
||||||
|
// --- verify that data is consistent between store and tree --------------
|
||||||
|
|
||||||
|
assert_eq!(smt.root(), tree_root);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_a), val_a);
|
||||||
|
assert_eq!(smt.get_node(index_a).unwrap(), leaf_node_a);
|
||||||
|
let expected_path = store.get_path(tree_root, index_a).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_a).unwrap(), expected_path);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_b), val_b);
|
||||||
|
assert_eq!(smt.get_node(index_b).unwrap(), leaf_node_b);
|
||||||
|
let expected_path = store.get_path(tree_root, index_b).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_b).unwrap(), expected_path);
|
||||||
|
|
||||||
|
// make sure inner nodes match - the store contains more entries because it keeps track of
|
||||||
|
// all prior state - so, we don't check that the number of inner nodes is the same in both
|
||||||
|
let expected_nodes = get_non_empty_nodes(&store);
|
||||||
|
let actual_nodes = smt.inner_nodes().collect::<Vec<_>>();
|
||||||
|
actual_nodes.iter().for_each(|node| assert!(expected_nodes.contains(node)));
|
||||||
|
|
||||||
|
// make sure leaves are returned correctly
|
||||||
|
let mut leaves = smt.upper_leaves();
|
||||||
|
assert_eq!(leaves.next(), Some((leaf_node_a, key_a, val_a)));
|
||||||
|
assert_eq!(leaves.next(), Some((leaf_node_b, key_b, val_b)));
|
||||||
|
assert_eq!(leaves.next(), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_insert_two_32() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
// --- insert the first value ---------------------------------------------
|
||||||
|
let raw_a = 0b_10101010_10101010_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_a = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_a)]);
|
||||||
|
let val_a = [ONE; WORD_SIZE];
|
||||||
|
smt.insert(key_a, val_a);
|
||||||
|
|
||||||
|
// --- insert the second value --------------------------------------------
|
||||||
|
// the key for this value has the same 32-bit prefix as the key for the first value,
|
||||||
|
// thus, on insertions, both values should be pushed to depth 48 tier
|
||||||
|
let raw_b = 0b_10101010_10101010_00011111_11111111_00010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_b = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_b)]);
|
||||||
|
let val_b = [Felt::new(2); WORD_SIZE];
|
||||||
|
smt.insert(key_b, val_b);
|
||||||
|
|
||||||
|
// --- build Merkle store with equivalent data ----------------------------
|
||||||
|
let mut tree_root = get_init_root();
|
||||||
|
let index_a = NodeIndex::make(48, raw_a >> 16);
|
||||||
|
let leaf_node_a = build_leaf_node(key_a, val_a, 48);
|
||||||
|
tree_root = store.set_node(tree_root, index_a, leaf_node_a).unwrap().root;
|
||||||
|
|
||||||
|
let index_b = NodeIndex::make(48, raw_b >> 16);
|
||||||
|
let leaf_node_b = build_leaf_node(key_b, val_b, 48);
|
||||||
|
tree_root = store.set_node(tree_root, index_b, leaf_node_b).unwrap().root;
|
||||||
|
|
||||||
|
// --- verify that data is consistent between store and tree --------------
|
||||||
|
|
||||||
|
assert_eq!(smt.root(), tree_root);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_a), val_a);
|
||||||
|
assert_eq!(smt.get_node(index_a).unwrap(), leaf_node_a);
|
||||||
|
let expected_path = store.get_path(tree_root, index_a).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_a).unwrap(), expected_path);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_b), val_b);
|
||||||
|
assert_eq!(smt.get_node(index_b).unwrap(), leaf_node_b);
|
||||||
|
let expected_path = store.get_path(tree_root, index_b).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_b).unwrap(), expected_path);
|
||||||
|
|
||||||
|
// make sure inner nodes match - the store contains more entries because it keeps track of
|
||||||
|
// all prior state - so, we don't check that the number of inner nodes is the same in both
|
||||||
|
let expected_nodes = get_non_empty_nodes(&store);
|
||||||
|
let actual_nodes = smt.inner_nodes().collect::<Vec<_>>();
|
||||||
|
actual_nodes.iter().for_each(|node| assert!(expected_nodes.contains(node)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_insert_three() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
// --- insert the first value ---------------------------------------------
|
||||||
|
let raw_a = 0b_10101010_10101010_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_a = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_a)]);
|
||||||
|
let val_a = [ONE; WORD_SIZE];
|
||||||
|
smt.insert(key_a, val_a);
|
||||||
|
|
||||||
|
// --- insert the second value --------------------------------------------
|
||||||
|
// the key for this value has the same 16-bit prefix as the key for the first value,
|
||||||
|
// thus, on insertions, both values should be pushed to depth 32 tier
|
||||||
|
let raw_b = 0b_10101010_10101010_10011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_b = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_b)]);
|
||||||
|
let val_b = [Felt::new(2); WORD_SIZE];
|
||||||
|
smt.insert(key_b, val_b);
|
||||||
|
|
||||||
|
// --- insert the third value ---------------------------------------------
|
||||||
|
// the key for this value has the same 16-bit prefix as the keys for the first two,
|
||||||
|
// values; thus, on insertions, it will be inserted into depth 32 tier, but will not
|
||||||
|
// affect locations of the other two values
|
||||||
|
let raw_c = 0b_10101010_10101010_11011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_c = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_c)]);
|
||||||
|
let val_c = [Felt::new(3); WORD_SIZE];
|
||||||
|
smt.insert(key_c, val_c);
|
||||||
|
|
||||||
|
// --- build Merkle store with equivalent data ----------------------------
|
||||||
|
let mut tree_root = get_init_root();
|
||||||
|
let index_a = NodeIndex::make(32, raw_a >> 32);
|
||||||
|
let leaf_node_a = build_leaf_node(key_a, val_a, 32);
|
||||||
|
tree_root = store.set_node(tree_root, index_a, leaf_node_a).unwrap().root;
|
||||||
|
|
||||||
|
let index_b = NodeIndex::make(32, raw_b >> 32);
|
||||||
|
let leaf_node_b = build_leaf_node(key_b, val_b, 32);
|
||||||
|
tree_root = store.set_node(tree_root, index_b, leaf_node_b).unwrap().root;
|
||||||
|
|
||||||
|
let index_c = NodeIndex::make(32, raw_c >> 32);
|
||||||
|
let leaf_node_c = build_leaf_node(key_c, val_c, 32);
|
||||||
|
tree_root = store.set_node(tree_root, index_c, leaf_node_c).unwrap().root;
|
||||||
|
|
||||||
|
// --- verify that data is consistent between store and tree --------------
|
||||||
|
|
||||||
|
assert_eq!(smt.root(), tree_root);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_a), val_a);
|
||||||
|
assert_eq!(smt.get_node(index_a).unwrap(), leaf_node_a);
|
||||||
|
let expected_path = store.get_path(tree_root, index_a).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_a).unwrap(), expected_path);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_b), val_b);
|
||||||
|
assert_eq!(smt.get_node(index_b).unwrap(), leaf_node_b);
|
||||||
|
let expected_path = store.get_path(tree_root, index_b).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_b).unwrap(), expected_path);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_c), val_c);
|
||||||
|
assert_eq!(smt.get_node(index_c).unwrap(), leaf_node_c);
|
||||||
|
let expected_path = store.get_path(tree_root, index_c).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_c).unwrap(), expected_path);
|
||||||
|
|
||||||
|
// make sure inner nodes match - the store contains more entries because it keeps track of
|
||||||
|
// all prior state - so, we don't check that the number of inner nodes is the same in both
|
||||||
|
let expected_nodes = get_non_empty_nodes(&store);
|
||||||
|
let actual_nodes = smt.inner_nodes().collect::<Vec<_>>();
|
||||||
|
actual_nodes.iter().for_each(|node| assert!(expected_nodes.contains(node)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_update() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
// --- insert a value into the tree ---------------------------------------
|
||||||
|
let raw = 0b_01101001_01101100_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw)]);
|
||||||
|
let value_a = [ONE; WORD_SIZE];
|
||||||
|
smt.insert(key, value_a);
|
||||||
|
|
||||||
|
// --- update the value ---------------------------------------------------
|
||||||
|
let value_b = [Felt::new(2); WORD_SIZE];
|
||||||
|
smt.insert(key, value_b);
|
||||||
|
|
||||||
|
// --- verify consistency -------------------------------------------------
|
||||||
|
let mut tree_root = get_init_root();
|
||||||
|
let index = NodeIndex::make(16, raw >> 48);
|
||||||
|
let leaf_node = build_leaf_node(key, value_b, 16);
|
||||||
|
tree_root = store.set_node(tree_root, index, leaf_node).unwrap().root;
|
||||||
|
|
||||||
|
assert_eq!(smt.root(), tree_root);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key), value_b);
|
||||||
|
assert_eq!(smt.get_node(index).unwrap(), leaf_node);
|
||||||
|
let expected_path = store.get_path(tree_root, index).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index).unwrap(), expected_path);
|
||||||
|
|
||||||
|
// make sure inner nodes match - the store contains more entries because it keeps track of
|
||||||
|
// all prior state - so, we don't check that the number of inner nodes is the same in both
|
||||||
|
let expected_nodes = get_non_empty_nodes(&store);
|
||||||
|
let actual_nodes = smt.inner_nodes().collect::<Vec<_>>();
|
||||||
|
actual_nodes.iter().for_each(|node| assert!(expected_nodes.contains(node)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// BOTTOM TIER TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_bottom_tier() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
// common prefix for the keys
|
||||||
|
let prefix = 0b_10101010_10101010_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
|
||||||
|
// --- insert the first value ---------------------------------------------
|
||||||
|
let key_a = RpoDigest::from([ONE, ONE, ONE, Felt::new(prefix)]);
|
||||||
|
let val_a = [ONE; WORD_SIZE];
|
||||||
|
smt.insert(key_a, val_a);
|
||||||
|
|
||||||
|
// --- insert the second value --------------------------------------------
|
||||||
|
// this key has the same 64-bit prefix and thus both values should end up in the same
|
||||||
|
// node at depth 64
|
||||||
|
let key_b = RpoDigest::from([ZERO, ONE, ONE, Felt::new(prefix)]);
|
||||||
|
let val_b = [Felt::new(2); WORD_SIZE];
|
||||||
|
smt.insert(key_b, val_b);
|
||||||
|
|
||||||
|
// --- build Merkle store with equivalent data ----------------------------
|
||||||
|
let index = NodeIndex::make(64, prefix);
|
||||||
|
// to build bottom leaf we sort by key starting with the least significant element, thus
|
||||||
|
// key_b is smaller than key_a.
|
||||||
|
let leaf_node = build_bottom_leaf_node(&[key_b, key_a], &[val_b, val_a]);
|
||||||
|
let mut tree_root = get_init_root();
|
||||||
|
tree_root = store.set_node(tree_root, index, leaf_node).unwrap().root;
|
||||||
|
|
||||||
|
// --- verify that data is consistent between store and tree --------------
|
||||||
|
|
||||||
|
assert_eq!(smt.root(), tree_root);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_a), val_a);
|
||||||
|
assert_eq!(smt.get_value(key_b), val_b);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_node(index).unwrap(), leaf_node);
|
||||||
|
let expected_path = store.get_path(tree_root, index).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index).unwrap(), expected_path);
|
||||||
|
|
||||||
|
// make sure inner nodes match - the store contains more entries because it keeps track of
|
||||||
|
// all prior state - so, we don't check that the number of inner nodes is the same in both
|
||||||
|
let expected_nodes = get_non_empty_nodes(&store);
|
||||||
|
let actual_nodes = smt.inner_nodes().collect::<Vec<_>>();
|
||||||
|
actual_nodes.iter().for_each(|node| assert!(expected_nodes.contains(node)));
|
||||||
|
|
||||||
|
// make sure leaves are returned correctly
|
||||||
|
let mut leaves = smt.bottom_leaves();
|
||||||
|
assert_eq!(leaves.next(), Some((leaf_node, vec![(key_b, val_b), (key_a, val_a)])));
|
||||||
|
assert_eq!(leaves.next(), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_bottom_tier_two() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
// --- insert the first value ---------------------------------------------
|
||||||
|
let raw_a = 0b_10101010_10101010_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key_a = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_a)]);
|
||||||
|
let val_a = [ONE; WORD_SIZE];
|
||||||
|
smt.insert(key_a, val_a);
|
||||||
|
|
||||||
|
// --- insert the second value --------------------------------------------
|
||||||
|
// the key for this value has the same 48-bit prefix as the key for the first value,
|
||||||
|
// thus, on insertions, both should end up in different nodes at depth 64
|
||||||
|
let raw_b = 0b_10101010_10101010_00011111_11111111_10010110_10010011_01100000_00000000_u64;
|
||||||
|
let key_b = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw_b)]);
|
||||||
|
let val_b = [Felt::new(2); WORD_SIZE];
|
||||||
|
smt.insert(key_b, val_b);
|
||||||
|
|
||||||
|
// --- build Merkle store with equivalent data ----------------------------
|
||||||
|
let mut tree_root = get_init_root();
|
||||||
|
let index_a = NodeIndex::make(64, raw_a);
|
||||||
|
let leaf_node_a = build_bottom_leaf_node(&[key_a], &[val_a]);
|
||||||
|
tree_root = store.set_node(tree_root, index_a, leaf_node_a).unwrap().root;
|
||||||
|
|
||||||
|
let index_b = NodeIndex::make(64, raw_b);
|
||||||
|
let leaf_node_b = build_bottom_leaf_node(&[key_b], &[val_b]);
|
||||||
|
tree_root = store.set_node(tree_root, index_b, leaf_node_b).unwrap().root;
|
||||||
|
|
||||||
|
// --- verify that data is consistent between store and tree --------------
|
||||||
|
|
||||||
|
assert_eq!(smt.root(), tree_root);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_a), val_a);
|
||||||
|
assert_eq!(smt.get_node(index_a).unwrap(), leaf_node_a);
|
||||||
|
let expected_path = store.get_path(tree_root, index_a).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_a).unwrap(), expected_path);
|
||||||
|
|
||||||
|
assert_eq!(smt.get_value(key_b), val_b);
|
||||||
|
assert_eq!(smt.get_node(index_b).unwrap(), leaf_node_b);
|
||||||
|
let expected_path = store.get_path(tree_root, index_b).unwrap().path;
|
||||||
|
assert_eq!(smt.get_path(index_b).unwrap(), expected_path);
|
||||||
|
|
||||||
|
// make sure inner nodes match - the store contains more entries because it keeps track of
|
||||||
|
// all prior state - so, we don't check that the number of inner nodes is the same in both
|
||||||
|
let expected_nodes = get_non_empty_nodes(&store);
|
||||||
|
let actual_nodes = smt.inner_nodes().collect::<Vec<_>>();
|
||||||
|
actual_nodes.iter().for_each(|node| assert!(expected_nodes.contains(node)));
|
||||||
|
|
||||||
|
// make sure leaves are returned correctly
|
||||||
|
let mut leaves = smt.bottom_leaves();
|
||||||
|
assert_eq!(leaves.next(), Some((leaf_node_b, vec![(key_b, val_b)])));
|
||||||
|
assert_eq!(leaves.next(), Some((leaf_node_a, vec![(key_a, val_a)])));
|
||||||
|
assert_eq!(leaves.next(), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ERROR TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tsmt_node_not_available() {
|
||||||
|
let mut smt = TieredSmt::default();
|
||||||
|
|
||||||
|
let raw = 0b_10101010_10101010_00011111_11111111_10010110_10010011_11100000_00000000_u64;
|
||||||
|
let key = RpoDigest::from([ONE, ONE, ONE, Felt::new(raw)]);
|
||||||
|
let value = [ONE; WORD_SIZE];
|
||||||
|
|
||||||
|
// build an index which is just below the inserted leaf node
|
||||||
|
let index = NodeIndex::make(17, raw >> 47);
|
||||||
|
|
||||||
|
// since we haven't inserted the node yet, we should be able to get node and path to this index
|
||||||
|
assert!(smt.get_node(index).is_ok());
|
||||||
|
assert!(smt.get_path(index).is_ok());
|
||||||
|
|
||||||
|
smt.insert(key, value);
|
||||||
|
|
||||||
|
// but once the node is inserted, everything under it should be unavailable
|
||||||
|
assert!(smt.get_node(index).is_err());
|
||||||
|
assert!(smt.get_path(index).is_err());
|
||||||
|
|
||||||
|
let index = NodeIndex::make(32, raw >> 32);
|
||||||
|
assert!(smt.get_node(index).is_err());
|
||||||
|
assert!(smt.get_path(index).is_err());
|
||||||
|
|
||||||
|
let index = NodeIndex::make(34, raw >> 30);
|
||||||
|
assert!(smt.get_node(index).is_err());
|
||||||
|
assert!(smt.get_path(index).is_err());
|
||||||
|
|
||||||
|
let index = NodeIndex::make(50, raw >> 14);
|
||||||
|
assert!(smt.get_node(index).is_err());
|
||||||
|
assert!(smt.get_path(index).is_err());
|
||||||
|
|
||||||
|
let index = NodeIndex::make(64, raw);
|
||||||
|
assert!(smt.get_node(index).is_err());
|
||||||
|
assert!(smt.get_path(index).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
// HELPER FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
fn get_init_root() -> RpoDigest {
|
||||||
|
EmptySubtreeRoots::empty_hashes(64)[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_leaf_node(key: RpoDigest, value: Word, depth: u8) -> RpoDigest {
|
||||||
|
let remaining_path = get_remaining_path(key, depth as u32);
|
||||||
|
Rpo256::merge_in_domain(&[remaining_path, value.into()], depth.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_bottom_leaf_node(keys: &[RpoDigest], values: &[Word]) -> RpoDigest {
|
||||||
|
assert_eq!(keys.len(), values.len());
|
||||||
|
|
||||||
|
let mut elements = Vec::with_capacity(keys.len());
|
||||||
|
for (key, val) in keys.iter().zip(values.iter()) {
|
||||||
|
let mut key = Word::from(key);
|
||||||
|
key[3] = ZERO;
|
||||||
|
elements.extend_from_slice(&key);
|
||||||
|
elements.extend_from_slice(val.as_slice());
|
||||||
|
}
|
||||||
|
|
||||||
|
Rpo256::hash_elements(&elements)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_non_empty_nodes(store: &MerkleStore) -> Vec<InnerNodeInfo> {
|
||||||
|
store
|
||||||
|
.inner_nodes()
|
||||||
|
.filter(|node| !is_empty_subtree(&node.value))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty_subtree(node: &RpoDigest) -> bool {
|
||||||
|
EmptySubtreeRoots::empty_hashes(255).contains(node)
|
||||||
|
}
|
||||||
21
src/utils.rs
21
src/utils.rs
@@ -1,21 +0,0 @@
|
|||||||
use super::Word;
|
|
||||||
use crate::utils::string::String;
|
|
||||||
use core::fmt::{self, Write};
|
|
||||||
|
|
||||||
// RE-EXPORTS
|
|
||||||
// ================================================================================================
|
|
||||||
pub use winter_utils::{
|
|
||||||
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
|
|
||||||
DeserializationError, Serializable, SliceReader,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Converts a [Word] into hex.
|
|
||||||
pub fn word_to_hex(w: &Word) -> Result<String, fmt::Error> {
|
|
||||||
let mut s = String::new();
|
|
||||||
|
|
||||||
for byte in w.iter().flat_map(|e| e.to_bytes()) {
|
|
||||||
write!(s, "{byte:02x}")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(s)
|
|
||||||
}
|
|
||||||
324
src/utils/kv_map.rs
Normal file
324
src/utils/kv_map.rs
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
use core::cell::RefCell;
|
||||||
|
use winter_utils::{
|
||||||
|
collections::{btree_map::IntoIter, BTreeMap, BTreeSet},
|
||||||
|
Box,
|
||||||
|
};
|
||||||
|
|
||||||
|
// KEY-VALUE MAP TRAIT
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A trait that defines the interface for a key-value map.
|
||||||
|
pub trait KvMap<K: Ord + Clone, V: Clone>:
|
||||||
|
Extend<(K, V)> + FromIterator<(K, V)> + IntoIterator<Item = (K, V)>
|
||||||
|
{
|
||||||
|
fn get(&self, key: &K) -> Option<&V>;
|
||||||
|
fn contains_key(&self, key: &K) -> bool;
|
||||||
|
fn len(&self) -> usize;
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.len() == 0
|
||||||
|
}
|
||||||
|
fn insert(&mut self, key: K, value: V) -> Option<V>;
|
||||||
|
|
||||||
|
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// BTREE MAP `KvMap` IMPLEMENTATION
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for BTreeMap<K, V> {
|
||||||
|
fn get(&self, key: &K) -> Option<&V> {
|
||||||
|
self.get(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn contains_key(&self, key: &K) -> bool {
|
||||||
|
self.contains_key(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
||||||
|
self.insert(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
||||||
|
Box::new(self.iter())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RECORDING MAP
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A [RecordingMap] that records read requests to the underlying key-value map.
|
||||||
|
///
|
||||||
|
/// The data recorder is used to generate a proof for read requests.
|
||||||
|
///
|
||||||
|
/// The [RecordingMap] is composed of three parts:
|
||||||
|
/// - `data`: which contains the current set of key-value pairs in the map.
|
||||||
|
/// - `updates`: which tracks keys for which values have been since the map was instantiated.
|
||||||
|
/// updates include both insertions and updates of values under existing keys.
|
||||||
|
/// - `trace`: which contains the key-value pairs from the original data which have been accesses
|
||||||
|
/// since the map was instantiated.
|
||||||
|
#[derive(Debug, Default, Clone, Eq, PartialEq)]
|
||||||
|
pub struct RecordingMap<K, V> {
|
||||||
|
data: BTreeMap<K, V>,
|
||||||
|
updates: BTreeSet<K>,
|
||||||
|
trace: RefCell<BTreeMap<K, V>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Ord + Clone, V: Clone> RecordingMap<K, V> {
|
||||||
|
// CONSTRUCTOR
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
/// Returns a new [RecordingMap] instance initialized with the provided key-value pairs.
|
||||||
|
/// ([BTreeMap]).
|
||||||
|
pub fn new(init: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||||
|
RecordingMap {
|
||||||
|
data: init.into_iter().collect(),
|
||||||
|
updates: BTreeSet::new(),
|
||||||
|
trace: RefCell::new(BTreeMap::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FINALIZER
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Consumes the [RecordingMap] and returns a [BTreeMap] containing the key-value pairs from
|
||||||
|
/// the initial data set that were read during recording.
|
||||||
|
pub fn into_proof(self) -> BTreeMap<K, V> {
|
||||||
|
self.trace.take()
|
||||||
|
}
|
||||||
|
|
||||||
|
// TEST HELPERS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn trace_len(&self) -> usize {
|
||||||
|
self.trace.borrow().len()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn updates_len(&self) -> usize {
|
||||||
|
self.updates.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for RecordingMap<K, V> {
|
||||||
|
// PUBLIC ACCESSORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns a reference to the value associated with the given key if the value exists.
|
||||||
|
///
|
||||||
|
/// If the key is part of the initial data set, the key access is recorded.
|
||||||
|
fn get(&self, key: &K) -> Option<&V> {
|
||||||
|
self.data.get(key).map(|value| {
|
||||||
|
if !self.updates.contains(key) {
|
||||||
|
self.trace.borrow_mut().insert(key.clone(), value.clone());
|
||||||
|
}
|
||||||
|
value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a boolean to indicate whether the given key exists in the data set.
|
||||||
|
///
|
||||||
|
/// If the key is part of the initial data set, the key access is recorded.
|
||||||
|
fn contains_key(&self, key: &K) -> bool {
|
||||||
|
self.get(key).is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the number of key-value pairs in the data set.
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.data.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
// MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Inserts a key-value pair into the data set.
|
||||||
|
///
|
||||||
|
/// If the key already exists in the data set, the value is updated and the old value is
|
||||||
|
/// returned.
|
||||||
|
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
||||||
|
let new_update = self.updates.insert(key.clone());
|
||||||
|
self.data.insert(key.clone(), value).map(|old_value| {
|
||||||
|
if new_update {
|
||||||
|
self.trace.borrow_mut().insert(key, old_value.clone());
|
||||||
|
}
|
||||||
|
old_value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ITERATION
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns an iterator over the key-value pairs in the data set.
|
||||||
|
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
||||||
|
Box::new(self.data.iter())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord, V: Clone> Extend<(K, V)> for RecordingMap<K, V> {
|
||||||
|
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
|
||||||
|
iter.into_iter().for_each(move |(k, v)| {
|
||||||
|
self.insert(k, v);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord, V: Clone> FromIterator<(K, V)> for RecordingMap<K, V> {
|
||||||
|
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
|
||||||
|
Self::new(iter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord, V: Clone> IntoIterator for RecordingMap<K, V> {
|
||||||
|
type Item = (K, V);
|
||||||
|
type IntoIter = IntoIter<K, V>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.data.into_iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
const ITEMS: [(u64, u64); 5] = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)];
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_item() {
|
||||||
|
// instantiate a recording map
|
||||||
|
let map = RecordingMap::new(ITEMS.to_vec());
|
||||||
|
|
||||||
|
// get a few items
|
||||||
|
let get_items = [0, 1, 2];
|
||||||
|
for key in get_items.iter() {
|
||||||
|
map.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert the map into a proof
|
||||||
|
let proof = map.into_proof();
|
||||||
|
|
||||||
|
// check that the proof contains the expected values
|
||||||
|
for (key, value) in ITEMS.iter() {
|
||||||
|
match get_items.contains(key) {
|
||||||
|
true => assert_eq!(proof.get(key), Some(value)),
|
||||||
|
false => assert_eq!(proof.get(key), None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_contains_key() {
|
||||||
|
// instantiate a recording map
|
||||||
|
let map = RecordingMap::new(ITEMS.to_vec());
|
||||||
|
|
||||||
|
// check if the map contains a few items
|
||||||
|
let get_items = [0, 1, 2];
|
||||||
|
for key in get_items.iter() {
|
||||||
|
map.contains_key(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert the map into a proof
|
||||||
|
let proof = map.into_proof();
|
||||||
|
|
||||||
|
// check that the proof contains the expected values
|
||||||
|
for (key, _) in ITEMS.iter() {
|
||||||
|
match get_items.contains(key) {
|
||||||
|
true => assert_eq!(proof.contains_key(key), true),
|
||||||
|
false => assert_eq!(proof.contains_key(key), false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_len() {
|
||||||
|
// instantiate a recording map
|
||||||
|
let mut map = RecordingMap::new(ITEMS.to_vec());
|
||||||
|
// length of the map should be equal to the number of items
|
||||||
|
assert_eq!(map.len(), ITEMS.len());
|
||||||
|
|
||||||
|
// inserting entry with key that already exists should not change the length, but it does
|
||||||
|
// add entries to the trace and update sets
|
||||||
|
map.insert(4, 5);
|
||||||
|
assert_eq!(map.len(), ITEMS.len());
|
||||||
|
assert_eq!(map.trace_len(), 1);
|
||||||
|
assert_eq!(map.updates_len(), 1);
|
||||||
|
|
||||||
|
// inserting entry with new key should increase the length; it should also record the key
|
||||||
|
// as an updated key, but the trace length does not change since old values were not touched
|
||||||
|
map.insert(5, 5);
|
||||||
|
assert_eq!(map.len(), ITEMS.len() + 1);
|
||||||
|
assert_eq!(map.trace_len(), 1);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// get some items so that they are saved in the trace; this should record original items
|
||||||
|
// in the trace, but should not affect the set of updates
|
||||||
|
let get_items = [0, 1, 2];
|
||||||
|
for key in get_items.iter() {
|
||||||
|
map.contains_key(key);
|
||||||
|
}
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// read the same items again, this should not have any effect on either length, trace, or
|
||||||
|
// the set of updates
|
||||||
|
let get_items = [0, 1, 2];
|
||||||
|
for key in get_items.iter() {
|
||||||
|
map.contains_key(key);
|
||||||
|
}
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// read a newly inserted item; this should not affect either length, trace, or the set of
|
||||||
|
// updates
|
||||||
|
let _val = map.get(&5).unwrap();
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// update a newly inserted item; this should not affect either length, trace, or the set
|
||||||
|
// of updates
|
||||||
|
map.insert(5, 11);
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// Note: The length reported by the proof will be different to the length originally
|
||||||
|
// reported by the map.
|
||||||
|
let proof = map.into_proof();
|
||||||
|
|
||||||
|
// length of the proof should be equal to get_items + 1. The extra item is the original
|
||||||
|
// value at key = 4u64
|
||||||
|
assert_eq!(proof.len(), get_items.len() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_iter() {
|
||||||
|
let mut map = RecordingMap::new(ITEMS.to_vec());
|
||||||
|
assert!(map.iter().all(|(x, y)| ITEMS.contains(&(*x, *y))));
|
||||||
|
|
||||||
|
// when inserting entry with key that already exists the iterator should return the new value
|
||||||
|
let new_value = 5;
|
||||||
|
map.insert(4, new_value);
|
||||||
|
assert_eq!(map.iter().count(), ITEMS.len());
|
||||||
|
assert!(map.iter().all(|(x, y)| if x == &4 {
|
||||||
|
y == &new_value
|
||||||
|
} else {
|
||||||
|
ITEMS.contains(&(*x, *y))
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_empty() {
|
||||||
|
// instantiate an empty recording map
|
||||||
|
let empty_map: RecordingMap<u64, u64> = RecordingMap::default();
|
||||||
|
assert!(empty_map.is_empty());
|
||||||
|
|
||||||
|
// instantiate a non-empty recording map
|
||||||
|
let map = RecordingMap::new(ITEMS.to_vec());
|
||||||
|
assert!(!map.is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
36
src/utils/mod.rs
Normal file
36
src/utils/mod.rs
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
use super::{utils::string::String, Word};
|
||||||
|
use core::fmt::{self, Write};
|
||||||
|
|
||||||
|
#[cfg(not(feature = "std"))]
|
||||||
|
pub use alloc::format;
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
pub use std::format;
|
||||||
|
|
||||||
|
mod kv_map;
|
||||||
|
|
||||||
|
// RE-EXPORTS
|
||||||
|
// ================================================================================================
|
||||||
|
pub use winter_utils::{
|
||||||
|
string, uninit_vector, Box, ByteReader, ByteWriter, Deserializable, DeserializationError,
|
||||||
|
Serializable, SliceReader,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub mod collections {
|
||||||
|
pub use super::kv_map::*;
|
||||||
|
pub use winter_utils::collections::*;
|
||||||
|
}
|
||||||
|
|
||||||
|
// UTILITY FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// Converts a [Word] into hex.
|
||||||
|
pub fn word_to_hex(w: &Word) -> Result<String, fmt::Error> {
|
||||||
|
let mut s = String::new();
|
||||||
|
|
||||||
|
for byte in w.iter().flat_map(|e| e.to_bytes()) {
|
||||||
|
write!(s, "{byte:02x}")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user