mirror of
https://github.com/arnaucube/miden-crypto.git
synced 2026-01-12 00:51:29 +01:00
Compare commits
70 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9389f2fb40 | ||
|
|
703692553d | ||
|
|
d68be83bc4 | ||
|
|
80171af872 | ||
|
|
75af3d474b | ||
|
|
9e6c8ff700 | ||
|
|
a58922756a | ||
|
|
bf15e1331a | ||
|
|
7957cc929a | ||
|
|
854892ba9d | ||
|
|
ce38ee388d | ||
|
|
4d1b3628d3 | ||
|
|
2d1bc3ba34 | ||
|
|
2ff96f40cb | ||
|
|
9531d2bd34 | ||
|
|
c79351be99 | ||
|
|
b7678619b0 | ||
|
|
0375f31035 | ||
|
|
c96047af9d | ||
|
|
b250752883 | ||
|
|
482dab94c5 | ||
|
|
d6cbd178e1 | ||
|
|
ef342cec23 | ||
|
|
7305a72295 | ||
|
|
84086bdb95 | ||
|
|
a681952982 | ||
|
|
78e82f2ee6 | ||
|
|
f07ed69d2f | ||
|
|
17eb8d78d3 | ||
|
|
8cb245dc1f | ||
|
|
867b772d9a | ||
|
|
33d37d82e2 | ||
|
|
5703fef226 | ||
|
|
669ebb49fb | ||
|
|
931bcc3cc3 | ||
|
|
91667fd7de | ||
|
|
e4ddf6ffaf | ||
|
|
88a646031f | ||
|
|
2871e4eb27 | ||
|
|
3a6a4fcce6 | ||
|
|
7ffa0cd97d | ||
|
|
32d37f1591 | ||
|
|
bc12fcafe9 | ||
|
|
8c08243f7a | ||
|
|
956e4c6fad | ||
|
|
efa39e5ce0 | ||
|
|
ae3f14e0ff | ||
|
|
962a07292f | ||
|
|
dfb073f784 | ||
|
|
41c38b4b5d | ||
|
|
c4eb4a6b98 | ||
|
|
35b255b5eb | ||
|
|
e94b0c70a9 | ||
|
|
e6bf497500 | ||
|
|
835142d432 | ||
|
|
85ba3f1a34 | ||
|
|
6aa226e9bb | ||
|
|
0af45b75f4 | ||
|
|
822c52a1d2 | ||
|
|
3c9a5235a0 | ||
|
|
2d97153fd0 | ||
|
|
325b3abf8b | ||
|
|
b1a5ed6b5d | ||
|
|
9307178873 | ||
|
|
3af53e63cf | ||
|
|
0799b1bb9d | ||
|
|
0c242d2c51 | ||
|
|
21a8cbcb45 | ||
|
|
66da469ec4 | ||
|
|
ed36ebc542 |
2
.git-blame-ignore-revs
Normal file
2
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# initial run of pre-commit
|
||||||
|
956e4c6fad779ef15eaa27702b26f05f65d31494
|
||||||
43
.pre-commit-config.yaml
Normal file
43
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# See https://pre-commit.com for more information
|
||||||
|
# See https://pre-commit.com/hooks.html for more hooks
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v3.2.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: check-yaml
|
||||||
|
- id: check-json
|
||||||
|
- id: check-toml
|
||||||
|
- id: pretty-format-json
|
||||||
|
- id: check-added-large-files
|
||||||
|
- id: check-case-conflict
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
|
- id: check-merge-conflict
|
||||||
|
- id: detect-private-key
|
||||||
|
- repo: https://github.com/hackaugusto/pre-commit-cargo
|
||||||
|
rev: v1.0.0
|
||||||
|
hooks:
|
||||||
|
# Allows cargo fmt to modify the source code prior to the commit
|
||||||
|
- id: cargo
|
||||||
|
name: Cargo fmt
|
||||||
|
args: ["+stable", "fmt", "--all"]
|
||||||
|
stages: [commit]
|
||||||
|
# Requires code to be properly formatted prior to pushing upstream
|
||||||
|
- id: cargo
|
||||||
|
name: Cargo fmt --check
|
||||||
|
args: ["+stable", "fmt", "--all", "--check"]
|
||||||
|
stages: [push, manual]
|
||||||
|
- id: cargo
|
||||||
|
name: Cargo check --all-targets
|
||||||
|
args: ["+stable", "check", "--all-targets"]
|
||||||
|
- id: cargo
|
||||||
|
name: Cargo check --all-targets --no-default-features
|
||||||
|
args: ["+stable", "check", "--all-targets", "--no-default-features"]
|
||||||
|
- id: cargo
|
||||||
|
name: Cargo check --all-targets --all-features
|
||||||
|
args: ["+stable", "check", "--all-targets", "--all-features"]
|
||||||
|
# Unlike fmt, clippy will not be automatically applied
|
||||||
|
- id: cargo
|
||||||
|
name: Cargo clippy
|
||||||
|
args: ["+nightly", "clippy", "--workspace", "--", "--deny", "clippy::all", "--deny", "warnings"]
|
||||||
23
CHANGELOG.md
23
CHANGELOG.md
@@ -1,3 +1,26 @@
|
|||||||
|
## 0.2.0 (2023-03-24)
|
||||||
|
|
||||||
|
- Implemented `Mmr` and related structs (#67).
|
||||||
|
- Implemented `MerkleStore` (#93, #94, #95, #107 #112).
|
||||||
|
- Added benchmarks for `MerkleStore` vs. other structs (#97).
|
||||||
|
- Added Merkle path containers (#99).
|
||||||
|
- Fixed depth handling in `MerklePathSet` (#110).
|
||||||
|
- Updated Winterfell dependency to v0.6.
|
||||||
|
|
||||||
|
## 0.1.4 (2023-02-22)
|
||||||
|
|
||||||
|
- Re-export winter-crypto Hasher, Digest & ElementHasher (#72)
|
||||||
|
|
||||||
|
## 0.1.3 (2023-02-20)
|
||||||
|
|
||||||
|
- Updated Winterfell dependency to v0.5.1 (#68)
|
||||||
|
|
||||||
|
## 0.1.2 (2023-02-17)
|
||||||
|
|
||||||
|
- Fixed `Rpo256::hash` pad that was panicking on input (#44)
|
||||||
|
- Added `MerklePath` wrapper to encapsulate Merkle opening verification and root computation (#53)
|
||||||
|
- Added `NodeIndex` Merkle wrapper to encapsulate Merkle tree traversal and mappings (#54)
|
||||||
|
|
||||||
## 0.1.1 (2023-02-06)
|
## 0.1.1 (2023-02-06)
|
||||||
|
|
||||||
- Introduced `merge_in_domain` for the RPO hash function, to allow using a specified domain value in the second capacity register when hashing two digests together.
|
- Introduced `merge_in_domain` for the RPO hash function, to allow using a specified domain value in the second capacity register when hashing two digests together.
|
||||||
|
|||||||
24
Cargo.toml
24
Cargo.toml
@@ -1,14 +1,16 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "miden-crypto"
|
name = "miden-crypto"
|
||||||
version = "0.1.1"
|
version = "0.2.0"
|
||||||
description="Miden Cryptographic primitives"
|
description = "Miden Cryptographic primitives"
|
||||||
authors = ["miden contributors"]
|
authors = ["miden contributors"]
|
||||||
readme="README.md"
|
readme = "README.md"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/0xPolygonMiden/crypto"
|
repository = "https://github.com/0xPolygonMiden/crypto"
|
||||||
|
documentation = "https://docs.rs/miden-crypto/0.2.0"
|
||||||
categories = ["cryptography", "no-std"]
|
categories = ["cryptography", "no-std"]
|
||||||
keywords = ["miden", "crypto", "hash", "merkle"]
|
keywords = ["miden", "crypto", "hash", "merkle"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
rust-version = "1.67"
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "hash"
|
name = "hash"
|
||||||
@@ -18,17 +20,21 @@ harness = false
|
|||||||
name = "smt"
|
name = "smt"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "store"
|
||||||
|
harness = false
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["blake3/default", "std", "winter_crypto/default", "winter_math/default", "winter_utils/default"]
|
default = ["blake3/default", "std", "winter_crypto/default", "winter_math/default", "winter_utils/default"]
|
||||||
std = ["blake3/std", "winter_crypto/std", "winter_math/std", "winter_utils/std"]
|
std = ["blake3/std", "winter_crypto/std", "winter_math/std", "winter_utils/std"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
blake3 = { version = "1.0", default-features = false }
|
blake3 = { version = "1.3", default-features = false }
|
||||||
winter_crypto = { version = "0.4.1", package = "winter-crypto", default-features = false }
|
winter_crypto = { version = "0.6", package = "winter-crypto", default-features = false }
|
||||||
winter_math = { version = "0.4.1", package = "winter-math", default-features = false }
|
winter_math = { version = "0.6", package = "winter-math", default-features = false }
|
||||||
winter_utils = { version = "0.4.1", package = "winter-utils", default-features = false }
|
winter_utils = { version = "0.6", package = "winter-utils", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.4", features = ["html_reports"] }
|
criterion = { version = "0.4", features = ["html_reports"] }
|
||||||
proptest = "1.0.0"
|
proptest = "1.1.0"
|
||||||
rand_utils = { version = "0.4", package = "winter-rand-utils" }
|
rand_utils = { version = "0.6", package = "winter-rand-utils" }
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Polygon Miden
|
Copyright (c) 2023 Polygon Miden
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
24
README.md
24
README.md
@@ -13,7 +13,15 @@ For performance benchmarks of these hash functions and their comparison to other
|
|||||||
[Merkle module](./src/merkle/) provides a set of data structures related to Merkle trees. All these data structures are implemented using the RPO hash function described above. The data structures are:
|
[Merkle module](./src/merkle/) provides a set of data structures related to Merkle trees. All these data structures are implemented using the RPO hash function described above. The data structures are:
|
||||||
|
|
||||||
* `MerkleTree`: a regular fully-balanced binary Merkle tree. The depth of this tree can be at most 64.
|
* `MerkleTree`: a regular fully-balanced binary Merkle tree. The depth of this tree can be at most 64.
|
||||||
|
* `SimpleSmt`: a Sparse Merkle Tree, mapping 63-bit keys to 4-element leaf values.
|
||||||
* `MerklePathSet`: a collection of Merkle authentication paths all resolving to the same root. The length of the paths can be at most 64.
|
* `MerklePathSet`: a collection of Merkle authentication paths all resolving to the same root. The length of the paths can be at most 64.
|
||||||
|
* `MerkleStore`: a collection of Merkle trees of different heights designed to efficiently store trees with common subtrees.
|
||||||
|
* `Mmr`: a Merkle mountain range structure designed to function as an append-only log.
|
||||||
|
|
||||||
|
The module also contains additional supporting components such as `NodeIndex`, `MerklePath`, and `MerkleError` to assist with tree indexation, opening proofs, and reporting inconsistent arguments/state.
|
||||||
|
|
||||||
|
## Extra
|
||||||
|
[Root module](./src/lib.rs) provides a set of constants, types, aliases, and utils required to use the primitives of this library.
|
||||||
|
|
||||||
## Crate features
|
## Crate features
|
||||||
This crate can be compiled with the following features:
|
This crate can be compiled with the following features:
|
||||||
@@ -25,5 +33,21 @@ Both of these features imply the use of [alloc](https://doc.rust-lang.org/alloc/
|
|||||||
|
|
||||||
To compile with `no_std`, disable default features via `--no-default-features` flag.
|
To compile with `no_std`, disable default features via `--no-default-features` flag.
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
You can use cargo defaults to test the library:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cargo test
|
||||||
|
```
|
||||||
|
|
||||||
|
However, some of the functions are heavy and might take a while for the tests to complete. In order to test in release mode, we have to replicate the test conditions of the development mode so all debug assertions can be verified.
|
||||||
|
|
||||||
|
We do that by enabling some special [flags](https://doc.rust-lang.org/cargo/reference/profiles.html) for the compilation.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
RUSTFLAGS="-C debug-assertions -C overflow-checks -C debuginfo=2" cargo test --release
|
||||||
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
This project is [MIT licensed](./LICENSE).
|
This project is [MIT licensed](./LICENSE).
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ fn smt_rpo(c: &mut Criterion) {
|
|||||||
(i, word)
|
(i, word)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let tree = SimpleSmt::new(entries, depth).unwrap();
|
let tree = SimpleSmt::new(depth).unwrap().with_leaves(entries).unwrap();
|
||||||
trees.push(tree);
|
trees.push(tree);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
506
benches/store.rs
Normal file
506
benches/store.rs
Normal file
@@ -0,0 +1,506 @@
|
|||||||
|
use criterion::{black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion};
|
||||||
|
use miden_crypto::merkle::{MerkleStore, MerkleTree, NodeIndex, SimpleSmt};
|
||||||
|
use miden_crypto::Word;
|
||||||
|
use miden_crypto::{hash::rpo::RpoDigest, Felt};
|
||||||
|
use rand_utils::{rand_array, rand_value};
|
||||||
|
|
||||||
|
/// Since MerkleTree can only be created when a power-of-two number of elements is used, the sample
|
||||||
|
/// sizes are limited to that.
|
||||||
|
static BATCH_SIZES: [usize; 3] = [2usize.pow(4), 2usize.pow(7), 2usize.pow(10)];
|
||||||
|
|
||||||
|
/// Generates a random `RpoDigest`.
|
||||||
|
fn random_rpo_digest() -> RpoDigest {
|
||||||
|
rand_array::<Felt, 4>().into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generates a random `Word`.
|
||||||
|
fn random_word() -> Word {
|
||||||
|
rand_array::<Felt, 4>().into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generates a u64 in `0..range`.
|
||||||
|
fn random_index(range: u64) -> u64 {
|
||||||
|
rand_value::<u64>() % range
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting an empty leaf from the SMT and MerkleStore backends.
|
||||||
|
fn get_empty_leaf_simplesmt(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_empty_leaf_simplesmt");
|
||||||
|
|
||||||
|
let depth = 63u8;
|
||||||
|
let size = 2u64.pow(depth as u32);
|
||||||
|
|
||||||
|
// both SMT and the store are pre-populated with empty hashes, accessing these values is what is
|
||||||
|
// being benchmarked here, so no values are inserted into the backends
|
||||||
|
let smt = SimpleSmt::new(depth).unwrap();
|
||||||
|
let store = MerkleStore::new();
|
||||||
|
let root = smt.root();
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("SimpleSmt", depth), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size),
|
||||||
|
|value| black_box(smt.get_node(&NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", depth), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size),
|
||||||
|
|value| black_box(store.get_node(root, NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting a leaf on Merkle trees and Merkle stores of varying power-of-two sizes.
|
||||||
|
fn get_leaf_merkletree(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_leaf_merkletree");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
|
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
|
let store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
||||||
|
let depth = mtree.depth();
|
||||||
|
let root = mtree.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(mtree.get_node(NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(store.get_node(root, NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting a leaf on SMT and Merkle stores of varying power-of-two sizes.
|
||||||
|
fn get_leaf_simplesmt(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_leaf_simplesmt");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let smt_leaves = leaves
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
|
.collect::<Vec<(u64, Word)>>();
|
||||||
|
let smt = SimpleSmt::new(63)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(smt_leaves.clone())
|
||||||
|
.unwrap();
|
||||||
|
let store = MerkleStore::new()
|
||||||
|
.with_sparse_merkle_tree(smt_leaves)
|
||||||
|
.unwrap();
|
||||||
|
let depth = smt.depth();
|
||||||
|
let root = smt.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(smt.get_node(&NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(store.get_node(root, NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting a node at half of the depth of an empty SMT and an empty Merkle store.
|
||||||
|
fn get_node_of_empty_simplesmt(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_node_of_empty_simplesmt");
|
||||||
|
|
||||||
|
let depth = 63u8;
|
||||||
|
let size = 2u64.pow(depth as u32);
|
||||||
|
|
||||||
|
// both SMT and the store are pre-populated with the empty hashes, accessing the internal nodes
|
||||||
|
// of these values is what is being benchmarked here, so no values are inserted into the
|
||||||
|
// backends.
|
||||||
|
let smt = SimpleSmt::new(depth).unwrap();
|
||||||
|
let store = MerkleStore::new();
|
||||||
|
let root = smt.root();
|
||||||
|
let half_depth = depth / 2;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("SimpleSmt", depth), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size),
|
||||||
|
|value| black_box(smt.get_node(&NodeIndex::new(half_depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", depth), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size),
|
||||||
|
|value| black_box(store.get_node(root, NodeIndex::new(half_depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting a node at half of the depth of a Merkle tree and Merkle store of varying
|
||||||
|
/// power-of-two sizes.
|
||||||
|
fn get_node_merkletree(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_node_merkletree");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
|
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
|
let store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
||||||
|
let half_depth = mtree.depth() / 2;
|
||||||
|
let root = mtree.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(mtree.get_node(NodeIndex::new(half_depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(store.get_node(root, NodeIndex::new(half_depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting a node at half the depth on SMT and Merkle stores of varying power-of-two
|
||||||
|
/// sizes.
|
||||||
|
fn get_node_simplesmt(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_node_simplesmt");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let smt_leaves = leaves
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
|
.collect::<Vec<(u64, Word)>>();
|
||||||
|
let smt = SimpleSmt::new(63)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(smt_leaves.clone())
|
||||||
|
.unwrap();
|
||||||
|
let store = MerkleStore::new()
|
||||||
|
.with_sparse_merkle_tree(smt_leaves)
|
||||||
|
.unwrap();
|
||||||
|
let root = smt.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
let half_depth = smt.depth() / 2;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(smt.get_node(&NodeIndex::new(half_depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(store.get_node(root, NodeIndex::new(half_depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting a path of a leaf on the Merkle tree and Merkle store backends.
|
||||||
|
fn get_leaf_path_merkletree(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_leaf_path_merkletree");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
|
let mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
|
let store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
||||||
|
let depth = mtree.depth();
|
||||||
|
let root = mtree.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(mtree.get_path(NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(store.get_path(root, NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks getting a path of a leaf on the SMT and Merkle store backends.
|
||||||
|
fn get_leaf_path_simplesmt(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("get_leaf_path_simplesmt");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let smt_leaves = leaves
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
|
.collect::<Vec<(u64, Word)>>();
|
||||||
|
let smt = SimpleSmt::new(63)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(smt_leaves.clone())
|
||||||
|
.unwrap();
|
||||||
|
let store = MerkleStore::new()
|
||||||
|
.with_sparse_merkle_tree(smt_leaves)
|
||||||
|
.unwrap();
|
||||||
|
let depth = smt.depth();
|
||||||
|
let root = smt.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("SimpleSmt", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(smt.get_path(NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| random_index(size_u64),
|
||||||
|
|value| black_box(store.get_path(root, NodeIndex::new(depth, value))),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks creation of the different storage backends
|
||||||
|
fn new(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("new");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
// MerkleTree constructor is optimized to work with vectors. Create a new copy of the data
|
||||||
|
// and pass it to the benchmark function
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleTree::new", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| leaves.iter().map(|v| v.into()).collect::<Vec<Word>>(),
|
||||||
|
|l| black_box(MerkleTree::new(l)),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
// This could be done with `bench_with_input`, however to remove variables while comparing
|
||||||
|
// with MerkleTree it is using `iter_batched`
|
||||||
|
group.bench_function(
|
||||||
|
BenchmarkId::new("MerkleStore::with_merkle_tree", size),
|
||||||
|
|b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| leaves.iter().map(|v| v.into()).collect::<Vec<Word>>(),
|
||||||
|
|l| black_box(MerkleStore::new().with_merkle_tree(l)),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("SimpleSmt::new", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| {
|
||||||
|
leaves
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
|
.collect::<Vec<(u64, Word)>>()
|
||||||
|
},
|
||||||
|
|l| black_box(SimpleSmt::new(63).unwrap().with_leaves(l)),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function(
|
||||||
|
BenchmarkId::new("MerkleStore::with_sparse_merkle_tree", size),
|
||||||
|
|b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| {
|
||||||
|
leaves
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
|
.collect::<Vec<(u64, Word)>>()
|
||||||
|
},
|
||||||
|
|l| black_box(MerkleStore::new().with_sparse_merkle_tree(l)),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks updating a leaf on MerkleTree and MerkleStore backends.
|
||||||
|
fn update_leaf_merkletree(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("update_leaf_merkletree");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let mtree_leaves: Vec<Word> = leaves.iter().map(|v| v.into()).collect();
|
||||||
|
let mut mtree = MerkleTree::new(mtree_leaves.clone()).unwrap();
|
||||||
|
let mut store = MerkleStore::new().with_merkle_tree(mtree_leaves).unwrap();
|
||||||
|
let depth = mtree.depth();
|
||||||
|
let root = mtree.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleTree", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| (random_index(size_u64), random_word()),
|
||||||
|
|(index, value)| black_box(mtree.update_leaf(index, value)),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut store_root = root;
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| (random_index(size_u64), random_word()),
|
||||||
|
|(index, value)| {
|
||||||
|
// The MerkleTree automatically updates its internal root, the Store maintains
|
||||||
|
// the old root and adds the new one. Here we update the root to have a fair
|
||||||
|
// comparison
|
||||||
|
store_root = store
|
||||||
|
.set_node(root, NodeIndex::new(depth, index), value)
|
||||||
|
.unwrap()
|
||||||
|
.root;
|
||||||
|
black_box(store_root)
|
||||||
|
},
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Benchmarks updating a leaf on SMT and MerkleStore backends.
|
||||||
|
fn update_leaf_simplesmt(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("update_leaf_simplesmt");
|
||||||
|
|
||||||
|
let random_data_size = BATCH_SIZES.into_iter().max().unwrap();
|
||||||
|
let random_data: Vec<RpoDigest> = (0..random_data_size).map(|_| random_rpo_digest()).collect();
|
||||||
|
|
||||||
|
for size in BATCH_SIZES {
|
||||||
|
let leaves = &random_data[..size];
|
||||||
|
|
||||||
|
let smt_leaves = leaves
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(c, v)| (c.try_into().unwrap(), v.into()))
|
||||||
|
.collect::<Vec<(u64, Word)>>();
|
||||||
|
let mut smt = SimpleSmt::new(63)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(smt_leaves.clone())
|
||||||
|
.unwrap();
|
||||||
|
let mut store = MerkleStore::new()
|
||||||
|
.with_sparse_merkle_tree(smt_leaves)
|
||||||
|
.unwrap();
|
||||||
|
let depth = smt.depth();
|
||||||
|
let root = smt.root();
|
||||||
|
let size_u64 = size as u64;
|
||||||
|
|
||||||
|
group.bench_function(BenchmarkId::new("SimpleSMT", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| (random_index(size_u64), random_word()),
|
||||||
|
|(index, value)| black_box(smt.update_leaf(index, value)),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut store_root = root;
|
||||||
|
group.bench_function(BenchmarkId::new("MerkleStore", size), |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| (random_index(size_u64), random_word()),
|
||||||
|
|(index, value)| {
|
||||||
|
// The MerkleTree automatically updates its internal root, the Store maintains
|
||||||
|
// the old root and adds the new one. Here we update the root to have a fair
|
||||||
|
// comparison
|
||||||
|
store_root = store
|
||||||
|
.set_node(root, NodeIndex::new(depth, index), value)
|
||||||
|
.unwrap()
|
||||||
|
.root;
|
||||||
|
black_box(store_root)
|
||||||
|
},
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(
|
||||||
|
store_group,
|
||||||
|
get_empty_leaf_simplesmt,
|
||||||
|
get_leaf_merkletree,
|
||||||
|
get_leaf_path_merkletree,
|
||||||
|
get_leaf_path_simplesmt,
|
||||||
|
get_leaf_simplesmt,
|
||||||
|
get_node_merkletree,
|
||||||
|
get_node_of_empty_simplesmt,
|
||||||
|
get_node_simplesmt,
|
||||||
|
new,
|
||||||
|
update_leaf_merkletree,
|
||||||
|
update_leaf_simplesmt,
|
||||||
|
);
|
||||||
|
criterion_main!(store_group);
|
||||||
169
src/bit.rs
Normal file
169
src/bit.rs
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
/// Yields the bits of a `u64`.
|
||||||
|
pub struct BitIterator {
|
||||||
|
/// The value that is being iterated bit-wise
|
||||||
|
value: u64,
|
||||||
|
/// True bits in the `mask` are the bits that have been visited.
|
||||||
|
mask: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitIterator {
|
||||||
|
pub fn new(value: u64) -> BitIterator {
|
||||||
|
BitIterator { value, mask: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An efficient skip implementation.
|
||||||
|
///
|
||||||
|
/// Note: The compiler is smart enough to translate a `skip(n)` into a single shift instruction
|
||||||
|
/// if the code is inlined, however inlining does not always happen.
|
||||||
|
pub fn skip_front(mut self, n: u32) -> Self {
|
||||||
|
let mask = bitmask(n);
|
||||||
|
let ones = self.mask.trailing_ones();
|
||||||
|
let mask_position = ones;
|
||||||
|
self.mask ^= mask.checked_shl(mask_position).unwrap_or(0);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An efficient skip from the back.
|
||||||
|
///
|
||||||
|
/// Note: The compiler is smart enough to translate a `skip(n)` into a single shift instruction
|
||||||
|
/// if the code is inlined, however inlining does not always happen.
|
||||||
|
pub fn skip_back(mut self, n: u32) -> Self {
|
||||||
|
let mask = bitmask(n);
|
||||||
|
let ones = self.mask.leading_ones();
|
||||||
|
let mask_position = u64::BITS - ones - n;
|
||||||
|
self.mask ^= mask.checked_shl(mask_position).unwrap_or(0);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for BitIterator {
|
||||||
|
type Item = bool;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
|
||||||
|
// trailing_ones is implemented with trailing_zeros, and the zeros are computed with the
|
||||||
|
// intrinsic cttz. [Rust 1.67.0] x86 uses the `bsf` instruction. AArch64 uses the `rbit
|
||||||
|
// clz` instructions.
|
||||||
|
let ones = self.mask.trailing_ones();
|
||||||
|
|
||||||
|
if ones == u64::BITS {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let bit_position = ones;
|
||||||
|
let mask = 1 << bit_position;
|
||||||
|
self.mask ^= mask;
|
||||||
|
let bit = self.value & mask;
|
||||||
|
Some(bit != 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DoubleEndedIterator for BitIterator {
|
||||||
|
fn next_back(&mut self) -> Option<<Self as Iterator>::Item> {
|
||||||
|
// leading_ones is implemented with leading_zeros, and the zeros are computed with the
|
||||||
|
// intrinsic ctlz. [Rust 1.67.0] x86 uses the `bsr` instruction. AArch64 uses the `clz`
|
||||||
|
// instruction.
|
||||||
|
let ones = self.mask.leading_ones();
|
||||||
|
|
||||||
|
if ones == u64::BITS {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let bit_position = u64::BITS - ones - 1;
|
||||||
|
let mask = 1 << bit_position;
|
||||||
|
self.mask ^= mask;
|
||||||
|
let bit = self.value & mask;
|
||||||
|
Some(bit != 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::BitIterator;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bit_iterator() {
|
||||||
|
let v = 0b1;
|
||||||
|
let mut it = BitIterator::new(v);
|
||||||
|
assert!(it.next().unwrap(), "first bit is true");
|
||||||
|
assert!(it.all(|v| v == false), "every other value is false");
|
||||||
|
|
||||||
|
let v = 0b10;
|
||||||
|
let mut it = BitIterator::new(v);
|
||||||
|
assert!(!it.next().unwrap(), "first bit is false");
|
||||||
|
assert!(it.next().unwrap(), "first bit is true");
|
||||||
|
assert!(it.all(|v| v == false), "every other value is false");
|
||||||
|
|
||||||
|
let v = 0b10;
|
||||||
|
let mut it = BitIterator::new(v);
|
||||||
|
assert!(!it.next_back().unwrap(), "last bit is false");
|
||||||
|
assert!(!it.next().unwrap(), "first bit is false");
|
||||||
|
assert!(it.next().unwrap(), "first bit is true");
|
||||||
|
assert!(it.all(|v| v == false), "every other value is false");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bit_iterator_skip() {
|
||||||
|
let v = 0b1;
|
||||||
|
let mut it = BitIterator::new(v).skip_front(1);
|
||||||
|
assert!(it.all(|v| v == false), "every other value is false");
|
||||||
|
|
||||||
|
let v = 0b10;
|
||||||
|
let mut it = BitIterator::new(v).skip_front(1);
|
||||||
|
assert!(it.next().unwrap(), "first bit is true");
|
||||||
|
assert!(it.all(|v| v == false), "every other value is false");
|
||||||
|
|
||||||
|
let high_bit = 0b1 << (u64::BITS - 1);
|
||||||
|
let mut it = BitIterator::new(high_bit).skip_back(1);
|
||||||
|
assert!(it.all(|v| v == false), "every other value is false");
|
||||||
|
|
||||||
|
let v = 0b10;
|
||||||
|
let mut it = BitIterator::new(v).skip_back(1);
|
||||||
|
assert!(!it.next_back().unwrap(), "last bit is false");
|
||||||
|
assert!(!it.next().unwrap(), "first bit is false");
|
||||||
|
assert!(it.next().unwrap(), "first bit is true");
|
||||||
|
assert!(it.all(|v| v == false), "every other value is false");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_skip_all() {
|
||||||
|
let v = 0b1;
|
||||||
|
let mut it = BitIterator::new(v).skip_front(u64::BITS);
|
||||||
|
assert!(it.next().is_none(), "iterator must be exhausted");
|
||||||
|
|
||||||
|
let v = 0b1;
|
||||||
|
let mut it = BitIterator::new(v).skip_back(u64::BITS);
|
||||||
|
assert!(it.next().is_none(), "iterator must be exhausted");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bit_iterator_count_bits_after_skip() {
|
||||||
|
let any_value = 0b1;
|
||||||
|
for s in 0..u64::BITS {
|
||||||
|
let it = BitIterator::new(any_value).skip_front(s);
|
||||||
|
assert_eq!(it.count() as u32, u64::BITS - s)
|
||||||
|
}
|
||||||
|
|
||||||
|
let any_value = 0b1;
|
||||||
|
for s in 1..u64::BITS {
|
||||||
|
let it = BitIterator::new(any_value).skip_back(s);
|
||||||
|
assert_eq!(it.count() as u32, u64::BITS - s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bit_iterator_rev() {
|
||||||
|
let v = 0b1;
|
||||||
|
let mut it = BitIterator::new(v).rev();
|
||||||
|
assert!(it.nth(63).unwrap(), "the last value is true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UTILITIES
|
||||||
|
// ===============================================================================================
|
||||||
|
|
||||||
|
fn bitmask(s: u32) -> u64 {
|
||||||
|
match 1u64.checked_shl(s) {
|
||||||
|
Some(r) => r - 1,
|
||||||
|
None => u64::MAX,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -56,13 +56,13 @@ impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
|
|||||||
|
|
||||||
impl<const N: usize> Serializable for Blake3Digest<N> {
|
impl<const N: usize> Serializable for Blake3Digest<N> {
|
||||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
target.write_u8_slice(&self.0);
|
target.write_bytes(&self.0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const N: usize> Deserializable for Blake3Digest<N> {
|
impl<const N: usize> Deserializable for Blake3Digest<N> {
|
||||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||||
source.read_u8_array().map(Self)
|
source.read_array().map(Self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,9 +78,13 @@ impl<const N: usize> Digest for Blake3Digest<N> {
|
|||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
/// 256-bit output blake3 hasher.
|
/// 256-bit output blake3 hasher.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct Blake3_256;
|
pub struct Blake3_256;
|
||||||
|
|
||||||
impl Hasher for Blake3_256 {
|
impl Hasher for Blake3_256 {
|
||||||
|
/// Blake3 collision resistance is 128-bits for 32-bytes output.
|
||||||
|
const COLLISION_RESISTANCE: u32 = 128;
|
||||||
|
|
||||||
type Digest = Blake3Digest<32>;
|
type Digest = Blake3Digest<32>;
|
||||||
|
|
||||||
fn hash(bytes: &[u8]) -> Self::Digest {
|
fn hash(bytes: &[u8]) -> Self::Digest {
|
||||||
@@ -138,9 +142,13 @@ impl Blake3_256 {
|
|||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
/// 192-bit output blake3 hasher.
|
/// 192-bit output blake3 hasher.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct Blake3_192;
|
pub struct Blake3_192;
|
||||||
|
|
||||||
impl Hasher for Blake3_192 {
|
impl Hasher for Blake3_192 {
|
||||||
|
/// Blake3 collision resistance is 96-bits for 24-bytes output.
|
||||||
|
const COLLISION_RESISTANCE: u32 = 96;
|
||||||
|
|
||||||
type Digest = Blake3Digest<24>;
|
type Digest = Blake3Digest<24>;
|
||||||
|
|
||||||
fn hash(bytes: &[u8]) -> Self::Digest {
|
fn hash(bytes: &[u8]) -> Self::Digest {
|
||||||
@@ -198,9 +206,13 @@ impl Blake3_192 {
|
|||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
/// 160-bit output blake3 hasher.
|
/// 160-bit output blake3 hasher.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct Blake3_160;
|
pub struct Blake3_160;
|
||||||
|
|
||||||
impl Hasher for Blake3_160 {
|
impl Hasher for Blake3_160 {
|
||||||
|
/// Blake3 collision resistance is 80-bits for 20-bytes output.
|
||||||
|
const COLLISION_RESISTANCE: u32 = 80;
|
||||||
|
|
||||||
type Digest = Blake3Digest<20>;
|
type Digest = Blake3Digest<20>;
|
||||||
|
|
||||||
fn hash(bytes: &[u8]) -> Self::Digest {
|
fn hash(bytes: &[u8]) -> Self::Digest {
|
||||||
@@ -278,7 +290,7 @@ where
|
|||||||
let digest = if Felt::IS_CANONICAL {
|
let digest = if Felt::IS_CANONICAL {
|
||||||
blake3::hash(E::elements_as_bytes(elements))
|
blake3::hash(E::elements_as_bytes(elements))
|
||||||
} else {
|
} else {
|
||||||
let base_elements = E::as_base_elements(elements);
|
let base_elements = E::slice_as_base_elements(elements);
|
||||||
let blen = base_elements.len() << 3;
|
let blen = base_elements.len() << 3;
|
||||||
|
|
||||||
let mut bytes = unsafe { uninit_vector(blen) };
|
let mut bytes = unsafe { uninit_vector(blen) };
|
||||||
|
|||||||
@@ -1,5 +1,9 @@
|
|||||||
use super::{Felt, FieldElement, StarkField, ONE, ZERO};
|
use super::{Felt, FieldElement, StarkField, ONE, ZERO};
|
||||||
use winter_crypto::{Digest, ElementHasher, Hasher};
|
|
||||||
|
|
||||||
pub mod blake;
|
pub mod blake;
|
||||||
pub mod rpo;
|
pub mod rpo;
|
||||||
|
|
||||||
|
// RE-EXPORTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
pub use winter_crypto::{Digest, ElementHasher, Hasher};
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ use core::{cmp::Ordering, ops::Deref};
|
|||||||
pub struct RpoDigest([Felt; DIGEST_SIZE]);
|
pub struct RpoDigest([Felt; DIGEST_SIZE]);
|
||||||
|
|
||||||
impl RpoDigest {
|
impl RpoDigest {
|
||||||
pub fn new(value: [Felt; DIGEST_SIZE]) -> Self {
|
pub const fn new(value: [Felt; DIGEST_SIZE]) -> Self {
|
||||||
Self(value)
|
Self(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ impl Digest for RpoDigest {
|
|||||||
|
|
||||||
impl Serializable for RpoDigest {
|
impl Serializable for RpoDigest {
|
||||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
target.write_u8_slice(&self.as_bytes());
|
target.write_bytes(&self.as_bytes());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,12 +73,24 @@ impl From<[Felt; DIGEST_SIZE]> for RpoDigest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<&RpoDigest> for [Felt; DIGEST_SIZE] {
|
||||||
|
fn from(value: &RpoDigest) -> Self {
|
||||||
|
value.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<RpoDigest> for [Felt; DIGEST_SIZE] {
|
impl From<RpoDigest> for [Felt; DIGEST_SIZE] {
|
||||||
fn from(value: RpoDigest) -> Self {
|
fn from(value: RpoDigest) -> Self {
|
||||||
value.0
|
value.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<&RpoDigest> for [u8; 32] {
|
||||||
|
fn from(value: &RpoDigest) -> Self {
|
||||||
|
value.as_bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<RpoDigest> for [u8; 32] {
|
impl From<RpoDigest> for [u8; 32] {
|
||||||
fn from(value: RpoDigest) -> Self {
|
fn from(value: RpoDigest) -> Self {
|
||||||
value.as_bytes()
|
value.as_bytes()
|
||||||
|
|||||||
@@ -88,67 +88,80 @@ const INV_ALPHA: u64 = 10540996611094048183;
|
|||||||
/// to deserialize them into field elements and then hash them using
|
/// to deserialize them into field elements and then hash them using
|
||||||
/// [hash_elements()](Rpo256::hash_elements) function rather then hashing the serialized bytes
|
/// [hash_elements()](Rpo256::hash_elements) function rather then hashing the serialized bytes
|
||||||
/// using [hash()](Rpo256::hash) function.
|
/// using [hash()](Rpo256::hash) function.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct Rpo256();
|
pub struct Rpo256();
|
||||||
|
|
||||||
impl Hasher for Rpo256 {
|
impl Hasher for Rpo256 {
|
||||||
|
/// Rpo256 collision resistance is the same as the security level, that is 128-bits.
|
||||||
|
///
|
||||||
|
/// #### Collision resistance
|
||||||
|
///
|
||||||
|
/// However, our setup of the capacity registers might drop it to 126.
|
||||||
|
///
|
||||||
|
/// Related issue: [#69](https://github.com/0xPolygonMiden/crypto/issues/69)
|
||||||
|
const COLLISION_RESISTANCE: u32 = 128;
|
||||||
|
|
||||||
type Digest = RpoDigest;
|
type Digest = RpoDigest;
|
||||||
|
|
||||||
fn hash(bytes: &[u8]) -> Self::Digest {
|
fn hash(bytes: &[u8]) -> Self::Digest {
|
||||||
// compute the number of elements required to represent the string; we will be processing
|
// initialize the state with zeroes
|
||||||
// the string in BINARY_CHUNK_SIZE-byte chunks, thus the number of elements will be equal
|
|
||||||
// to the number of such chunks (including a potential partial chunk at the end).
|
|
||||||
let num_elements = if bytes.len() % BINARY_CHUNK_SIZE == 0 {
|
|
||||||
bytes.len() / BINARY_CHUNK_SIZE
|
|
||||||
} else {
|
|
||||||
bytes.len() / BINARY_CHUNK_SIZE + 1
|
|
||||||
};
|
|
||||||
|
|
||||||
// initialize state to all zeros, except for the first element of the capacity part, which
|
|
||||||
// is set to the number of elements to be hashed. this is done so that adding zero elements
|
|
||||||
// at the end of the list always results in a different hash.
|
|
||||||
let mut state = [ZERO; STATE_WIDTH];
|
let mut state = [ZERO; STATE_WIDTH];
|
||||||
state[CAPACITY_RANGE.start] = Felt::new(num_elements as u64);
|
|
||||||
|
|
||||||
// break the string into BINARY_CHUNK_SIZE-byte chunks, convert each chunk into a field
|
// set the capacity (first element) to a flag on whether or not the input length is evenly
|
||||||
// element, and absorb the element into the rate portion of the state. we use
|
// divided by the rate. this will prevent collisions between padded and non-padded inputs,
|
||||||
// BINARY_CHUNK_SIZE-byte chunks because every BINARY_CHUNK_SIZE-byte chunk is guaranteed
|
// and will rule out the need to perform an extra permutation in case of evenly divided
|
||||||
// to map to some field element.
|
// inputs.
|
||||||
let mut i = 0;
|
let is_rate_multiple = bytes.len() % RATE_WIDTH == 0;
|
||||||
let mut buf = [0_u8; 8];
|
if !is_rate_multiple {
|
||||||
for chunk in bytes.chunks(BINARY_CHUNK_SIZE) {
|
state[CAPACITY_RANGE.start] = ONE;
|
||||||
if i < num_elements - 1 {
|
|
||||||
buf[..BINARY_CHUNK_SIZE].copy_from_slice(chunk);
|
|
||||||
} else {
|
|
||||||
// if we are dealing with the last chunk, it may be smaller than BINARY_CHUNK_SIZE
|
|
||||||
// bytes long, so we need to handle it slightly differently. We also append a byte
|
|
||||||
// with value 1 to the end of the string; this pads the string in such a way that
|
|
||||||
// adding trailing zeros results in different hash
|
|
||||||
let chunk_len = chunk.len();
|
|
||||||
buf = [0_u8; 8];
|
|
||||||
buf[..chunk_len].copy_from_slice(chunk);
|
|
||||||
buf[chunk_len] = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// convert the bytes into a field element and absorb it into the rate portion of the
|
|
||||||
// state; if the rate is filled up, apply the Rescue permutation and start absorbing
|
|
||||||
// again from zero index.
|
|
||||||
state[RATE_RANGE.start + i] = Felt::new(u64::from_le_bytes(buf));
|
|
||||||
i += 1;
|
|
||||||
if i % RATE_WIDTH == 0 {
|
|
||||||
Self::apply_permutation(&mut state);
|
|
||||||
i = 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// initialize a buffer to receive the little-endian elements.
|
||||||
|
let mut buf = [0_u8; 8];
|
||||||
|
|
||||||
|
// iterate the chunks of bytes, creating a field element from each chunk and copying it
|
||||||
|
// into the state.
|
||||||
|
//
|
||||||
|
// every time the rate range is filled, a permutation is performed. if the final value of
|
||||||
|
// `i` is not zero, then the chunks count wasn't enough to fill the state range, and an
|
||||||
|
// additional permutation must be performed.
|
||||||
|
let i = bytes.chunks(BINARY_CHUNK_SIZE).fold(0, |i, chunk| {
|
||||||
|
// the last element of the iteration may or may not be a full chunk. if it's not, then
|
||||||
|
// we need to pad the remainder bytes of the chunk with zeroes, separated by a `1`.
|
||||||
|
// this will avoid collisions.
|
||||||
|
if chunk.len() == BINARY_CHUNK_SIZE {
|
||||||
|
buf[..BINARY_CHUNK_SIZE].copy_from_slice(chunk);
|
||||||
|
} else {
|
||||||
|
buf.fill(0);
|
||||||
|
buf[..chunk.len()].copy_from_slice(chunk);
|
||||||
|
buf[chunk.len()] = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// set the current rate element to the input. since we take at most 7 bytes, we are
|
||||||
|
// guaranteed that the inputs data will fit into a single field element.
|
||||||
|
state[RATE_RANGE.start + i] = Felt::new(u64::from_le_bytes(buf));
|
||||||
|
|
||||||
|
// proceed filling the range. if it's full, then we apply a permutation and reset the
|
||||||
|
// counter to the beginning of the range.
|
||||||
|
if i == RATE_WIDTH - 1 {
|
||||||
|
Self::apply_permutation(&mut state);
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
i + 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// if we absorbed some elements but didn't apply a permutation to them (would happen when
|
// if we absorbed some elements but didn't apply a permutation to them (would happen when
|
||||||
// the number of elements is not a multiple of RATE_WIDTH), apply the RPO permutation.
|
// the number of elements is not a multiple of RATE_WIDTH), apply the RPO permutation. we
|
||||||
// we don't need to apply any extra padding because we injected total number of elements
|
// don't need to apply any extra padding because the first capacity element containts a
|
||||||
// in the input list into the capacity portion of the state during initialization.
|
// flag indicating whether the input is evenly divisible by the rate.
|
||||||
if i > 0 {
|
if i != 0 {
|
||||||
|
state[RATE_RANGE.start + i..RATE_RANGE.end].fill(ZERO);
|
||||||
|
state[RATE_RANGE.start + i] = ONE;
|
||||||
Self::apply_permutation(&mut state);
|
Self::apply_permutation(&mut state);
|
||||||
}
|
}
|
||||||
|
|
||||||
// return the first 4 elements of the state as hash result
|
// return the first 4 elements of the rate as hash result.
|
||||||
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
|
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -199,7 +212,7 @@ impl ElementHasher for Rpo256 {
|
|||||||
|
|
||||||
fn hash_elements<E: FieldElement<BaseField = Self::BaseField>>(elements: &[E]) -> Self::Digest {
|
fn hash_elements<E: FieldElement<BaseField = Self::BaseField>>(elements: &[E]) -> Self::Digest {
|
||||||
// convert the elements into a list of base field elements
|
// convert the elements into a list of base field elements
|
||||||
let elements = E::as_base_elements(elements);
|
let elements = E::slice_as_base_elements(elements);
|
||||||
|
|
||||||
// initialize state to all zeros, except for the first element of the capacity part, which
|
// initialize state to all zeros, except for the first element of the capacity part, which
|
||||||
// is set to 1 if the number of elements is not a multiple of RATE_WIDTH.
|
// is set to 1 if the number of elements is not a multiple of RATE_WIDTH.
|
||||||
|
|||||||
@@ -2,7 +2,9 @@ use super::{
|
|||||||
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ALPHA, INV_ALPHA, ONE, STATE_WIDTH,
|
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ALPHA, INV_ALPHA, ONE, STATE_WIDTH,
|
||||||
ZERO,
|
ZERO,
|
||||||
};
|
};
|
||||||
|
use crate::utils::collections::{BTreeSet, Vec};
|
||||||
use core::convert::TryInto;
|
use core::convert::TryInto;
|
||||||
|
use proptest::prelude::*;
|
||||||
use rand_utils::rand_value;
|
use rand_utils::rand_value;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -193,6 +195,43 @@ fn hash_test_vectors() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sponge_bytes_with_remainder_length_wont_panic() {
|
||||||
|
// this test targets to assert that no panic will happen with the edge case of having an inputs
|
||||||
|
// with length that is not divisible by the used binary chunk size. 113 is a non-negligible
|
||||||
|
// input length that is prime; hence guaranteed to not be divisible by any choice of chunk
|
||||||
|
// size.
|
||||||
|
//
|
||||||
|
// this is a preliminary test to the fuzzy-stress of proptest.
|
||||||
|
Rpo256::hash(&vec![0; 113]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sponge_collision_for_wrapped_field_element() {
|
||||||
|
let a = Rpo256::hash(&[0; 8]);
|
||||||
|
let b = Rpo256::hash(&Felt::MODULUS.to_le_bytes());
|
||||||
|
assert_ne!(a, b);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sponge_zeroes_collision() {
|
||||||
|
let mut zeroes = Vec::with_capacity(255);
|
||||||
|
let mut set = BTreeSet::new();
|
||||||
|
(0..255).for_each(|_| {
|
||||||
|
let hash = Rpo256::hash(&zeroes);
|
||||||
|
zeroes.push(0);
|
||||||
|
// panic if a collision was found
|
||||||
|
assert!(set.insert(hash));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
proptest! {
|
||||||
|
#[test]
|
||||||
|
fn rpo256_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
|
||||||
|
Rpo256::hash(&vec);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const EXPECTED: [[Felt; 4]; 19] = [
|
const EXPECTED: [[Felt; 4]; 19] = [
|
||||||
[
|
[
|
||||||
Felt::new(1502364727743950833),
|
Felt::new(1502364727743950833),
|
||||||
|
|||||||
39
src/lib.rs
39
src/lib.rs
@@ -4,23 +4,17 @@
|
|||||||
#[cfg_attr(test, macro_use)]
|
#[cfg_attr(test, macro_use)]
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|
||||||
|
mod bit;
|
||||||
pub mod hash;
|
pub mod hash;
|
||||||
pub mod merkle;
|
pub mod merkle;
|
||||||
|
pub mod utils;
|
||||||
|
|
||||||
// RE-EXPORTS
|
// RE-EXPORTS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
pub use winter_crypto::{RandomCoin, RandomCoinError};
|
pub use winter_crypto::{RandomCoin, RandomCoinError};
|
||||||
|
|
||||||
pub use winter_math::{fields::f64::BaseElement as Felt, FieldElement, StarkField};
|
pub use winter_math::{fields::f64::BaseElement as Felt, FieldElement, StarkField};
|
||||||
|
|
||||||
pub mod utils {
|
|
||||||
pub use winter_utils::{
|
|
||||||
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
|
|
||||||
DeserializationError, Serializable, SliceReader,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// TYPE ALIASES
|
// TYPE ALIASES
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
@@ -38,3 +32,32 @@ pub const ZERO: Felt = Felt::ZERO;
|
|||||||
|
|
||||||
/// Field element representing ONE in the Miden base filed.
|
/// Field element representing ONE in the Miden base filed.
|
||||||
pub const ONE: Felt = Felt::ONE;
|
pub const ONE: Felt = Felt::ONE;
|
||||||
|
|
||||||
|
// TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn debug_assert_is_checked() {
|
||||||
|
// enforce the release checks to always have `RUSTFLAGS="-C debug-assertions".
|
||||||
|
//
|
||||||
|
// some upstream tests are performed with `debug_assert`, and we want to assert its correctness
|
||||||
|
// downstream.
|
||||||
|
//
|
||||||
|
// for reference, check
|
||||||
|
// https://github.com/0xPolygonMiden/miden-vm/issues/433
|
||||||
|
debug_assert!(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
#[allow(arithmetic_overflow)]
|
||||||
|
fn overflow_panics_for_test() {
|
||||||
|
// overflows might be disabled if tests are performed in release mode. these are critical,
|
||||||
|
// mandatory checks as overflows might be attack vectors.
|
||||||
|
//
|
||||||
|
// to enable overflow checks in release mode, ensure `RUSTFLAGS="-C overflow-checks"`
|
||||||
|
let a = 1_u64;
|
||||||
|
let b = 64;
|
||||||
|
assert_ne!(a << b, 0);
|
||||||
|
}
|
||||||
|
|||||||
1585
src/merkle/empty_roots.rs
Normal file
1585
src/merkle/empty_roots.rs
Normal file
File diff suppressed because it is too large
Load Diff
143
src/merkle/index.rs
Normal file
143
src/merkle/index.rs
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
use super::{Felt, MerkleError, RpoDigest, StarkField};
|
||||||
|
use crate::bit::BitIterator;
|
||||||
|
|
||||||
|
// NODE INDEX
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A Merkle tree address to an arbitrary node.
|
||||||
|
///
|
||||||
|
/// The position is relative to a tree in level order, where for a given depth `d` elements are
|
||||||
|
/// numbered from $0..2^d$.
|
||||||
|
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||||
|
pub struct NodeIndex {
|
||||||
|
depth: u8,
|
||||||
|
value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NodeIndex {
|
||||||
|
// CONSTRUCTORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Creates a new node index.
|
||||||
|
pub const fn new(depth: u8, value: u64) -> Self {
|
||||||
|
Self { depth, value }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a node index from a pair of field elements representing the depth and value.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Will error if the `u64` representation of the depth doesn't fit a `u8`.
|
||||||
|
pub fn from_elements(depth: &Felt, value: &Felt) -> Result<Self, MerkleError> {
|
||||||
|
let depth = depth.as_int();
|
||||||
|
let depth = u8::try_from(depth).map_err(|_| MerkleError::DepthTooBig(depth))?;
|
||||||
|
let value = value.as_int();
|
||||||
|
Ok(Self::new(depth, value))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new node index pointing to the root of the tree.
|
||||||
|
pub const fn root() -> Self {
|
||||||
|
Self { depth: 0, value: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mutates the instance and returns it, replacing the depth.
|
||||||
|
pub const fn with_depth(mut self, depth: u8) -> Self {
|
||||||
|
self.depth = depth;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes the value of the sibling of the current node.
|
||||||
|
pub fn sibling(mut self) -> Self {
|
||||||
|
self.value ^= 1;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
// PROVIDERS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Builds a node to be used as input of a hash function when computing a Merkle path.
|
||||||
|
///
|
||||||
|
/// Will evaluate the parity of the current instance to define the result.
|
||||||
|
pub const fn build_node(&self, slf: RpoDigest, sibling: RpoDigest) -> [RpoDigest; 2] {
|
||||||
|
if self.is_value_odd() {
|
||||||
|
[sibling, slf]
|
||||||
|
} else {
|
||||||
|
[slf, sibling]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the scalar representation of the depth/value pair.
|
||||||
|
///
|
||||||
|
/// It is computed as `2^depth + value`.
|
||||||
|
pub const fn to_scalar_index(&self) -> u64 {
|
||||||
|
(1 << self.depth as u64) + self.value
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the depth of the current instance.
|
||||||
|
pub const fn depth(&self) -> u8 {
|
||||||
|
self.depth
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the value of this index.
|
||||||
|
pub const fn value(&self) -> u64 {
|
||||||
|
self.value
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the current value fits the current depth for a binary tree.
|
||||||
|
pub const fn is_valid(&self) -> bool {
|
||||||
|
self.value < (1 << self.depth as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the current instance points to a right sibling node.
|
||||||
|
pub const fn is_value_odd(&self) -> bool {
|
||||||
|
(self.value & 1) == 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the depth is `0`.
|
||||||
|
pub const fn is_root(&self) -> bool {
|
||||||
|
self.depth == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a bit iterator for the `value`.
|
||||||
|
///
|
||||||
|
/// Bits read from left-to-right represent which internal node's child should be visited to
|
||||||
|
/// arrive at the leaf. From the right-to-left the bit represent the position the hash of the
|
||||||
|
/// current element should go.
|
||||||
|
///
|
||||||
|
/// Additionally, the value that is not visited are the sibling values necessary for a Merkle
|
||||||
|
/// opening.
|
||||||
|
pub fn bit_iterator(&self) -> BitIterator {
|
||||||
|
let depth: u32 = self.depth.into();
|
||||||
|
BitIterator::new(self.value).skip_back(u64::BITS - depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATE MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Traverse one level towards the root, decrementing the depth by `1`.
|
||||||
|
pub fn move_up(&mut self) -> &mut Self {
|
||||||
|
self.depth = self.depth.saturating_sub(1);
|
||||||
|
self.value >>= 1;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use proptest::prelude::*;
|
||||||
|
|
||||||
|
proptest! {
|
||||||
|
#[test]
|
||||||
|
fn arbitrary_index_wont_panic_on_move_up(
|
||||||
|
depth in prop::num::u8::ANY,
|
||||||
|
value in prop::num::u64::ANY,
|
||||||
|
count in prop::num::u8::ANY,
|
||||||
|
) {
|
||||||
|
let mut index = NodeIndex::new(depth, value);
|
||||||
|
for _ in 0..count {
|
||||||
|
index.move_up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,344 +0,0 @@
|
|||||||
use super::{BTreeMap, MerkleError, Rpo256, Vec, Word, ZERO};
|
|
||||||
|
|
||||||
// MERKLE PATH SET
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
/// A set of Merkle paths.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub struct MerklePathSet {
|
|
||||||
root: Word,
|
|
||||||
total_depth: u32,
|
|
||||||
paths: BTreeMap<u64, Vec<Word>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MerklePathSet {
|
|
||||||
// CONSTRUCTOR
|
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
/// Returns an empty MerklePathSet.
|
|
||||||
pub fn new(depth: u32) -> Result<Self, MerkleError> {
|
|
||||||
let root = [ZERO; 4];
|
|
||||||
let paths = BTreeMap::<u64, Vec<Word>>::new();
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
root,
|
|
||||||
total_depth: depth,
|
|
||||||
paths,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// PUBLIC ACCESSORS
|
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
/// Adds the specified Merkle path to this [MerklePathSet]. The `index` and `value` parameters
|
|
||||||
/// specify the leaf node at which the path starts.
|
|
||||||
///
|
|
||||||
/// # Errors
|
|
||||||
/// Returns an error if:
|
|
||||||
/// - The specified index is not valid in the context of this Merkle path set (i.e., the index
|
|
||||||
/// implies a greater depth than is specified for this set).
|
|
||||||
/// - The specified path is not consistent with other paths in the set (i.e., resolves to a
|
|
||||||
/// different root).
|
|
||||||
pub fn add_path(
|
|
||||||
&mut self,
|
|
||||||
index: u64,
|
|
||||||
value: Word,
|
|
||||||
path: Vec<Word>,
|
|
||||||
) -> Result<(), MerkleError> {
|
|
||||||
let depth = (path.len() + 1) as u32;
|
|
||||||
if depth != self.total_depth {
|
|
||||||
return Err(MerkleError::InvalidDepth(self.total_depth, depth));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Actual number of node in tree
|
|
||||||
let pos = 2u64.pow(self.total_depth) + index;
|
|
||||||
|
|
||||||
// Index of the leaf path in map. Paths of neighboring leaves are stored in one key-value pair
|
|
||||||
let half_pos = pos / 2;
|
|
||||||
|
|
||||||
let mut extended_path = path;
|
|
||||||
if is_even(pos) {
|
|
||||||
extended_path.insert(0, value);
|
|
||||||
} else {
|
|
||||||
extended_path.insert(1, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
let root_of_current_path = compute_path_root(&extended_path, depth, index);
|
|
||||||
if self.root == [ZERO; 4] {
|
|
||||||
self.root = root_of_current_path;
|
|
||||||
} else if self.root != root_of_current_path {
|
|
||||||
return Err(MerkleError::InvalidPath(extended_path));
|
|
||||||
}
|
|
||||||
self.paths.insert(half_pos, extended_path);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the root to which all paths in this set resolve.
|
|
||||||
pub fn root(&self) -> Word {
|
|
||||||
self.root
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the depth of the Merkle tree implied by the paths stored in this set.
|
|
||||||
///
|
|
||||||
/// Merkle tree of depth 1 has two leaves, depth 2 has four leaves etc.
|
|
||||||
pub fn depth(&self) -> u32 {
|
|
||||||
self.total_depth
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a node at the specified index.
|
|
||||||
///
|
|
||||||
/// # Errors
|
|
||||||
/// Returns an error if:
|
|
||||||
/// * The specified index not valid for the depth of structure.
|
|
||||||
/// * Requested node does not exist in the set.
|
|
||||||
pub fn get_node(&self, depth: u32, index: u64) -> Result<Word, MerkleError> {
|
|
||||||
if index >= 2u64.pow(self.total_depth) {
|
|
||||||
return Err(MerkleError::InvalidIndex(self.total_depth, index));
|
|
||||||
}
|
|
||||||
if depth != self.total_depth {
|
|
||||||
return Err(MerkleError::InvalidDepth(self.total_depth, depth));
|
|
||||||
}
|
|
||||||
|
|
||||||
let pos = 2u64.pow(depth) + index;
|
|
||||||
let index = pos / 2;
|
|
||||||
|
|
||||||
match self.paths.get(&index) {
|
|
||||||
None => Err(MerkleError::NodeNotInSet(index)),
|
|
||||||
Some(path) => {
|
|
||||||
if is_even(pos) {
|
|
||||||
Ok(path[0])
|
|
||||||
} else {
|
|
||||||
Ok(path[1])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a Merkle path to the node at the specified index. The node itself is
|
|
||||||
/// not included in the path.
|
|
||||||
///
|
|
||||||
/// # Errors
|
|
||||||
/// Returns an error if:
|
|
||||||
/// * The specified index not valid for the depth of structure.
|
|
||||||
/// * Node of the requested path does not exist in the set.
|
|
||||||
pub fn get_path(&self, depth: u32, index: u64) -> Result<Vec<Word>, MerkleError> {
|
|
||||||
if index >= 2u64.pow(self.total_depth) {
|
|
||||||
return Err(MerkleError::InvalidIndex(self.total_depth, index));
|
|
||||||
}
|
|
||||||
if depth != self.total_depth {
|
|
||||||
return Err(MerkleError::InvalidDepth(self.total_depth, depth));
|
|
||||||
}
|
|
||||||
|
|
||||||
let pos = 2u64.pow(depth) + index;
|
|
||||||
let index = pos / 2;
|
|
||||||
|
|
||||||
match self.paths.get(&index) {
|
|
||||||
None => Err(MerkleError::NodeNotInSet(index)),
|
|
||||||
Some(path) => {
|
|
||||||
let mut local_path = path.clone();
|
|
||||||
if is_even(pos) {
|
|
||||||
local_path.remove(0);
|
|
||||||
Ok(local_path)
|
|
||||||
} else {
|
|
||||||
local_path.remove(1);
|
|
||||||
Ok(local_path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Replaces the leaf at the specified index with the provided value.
|
|
||||||
///
|
|
||||||
/// # Errors
|
|
||||||
/// Returns an error if:
|
|
||||||
/// * Requested node does not exist in the set.
|
|
||||||
pub fn update_leaf(&mut self, index: u64, value: Word) -> Result<(), MerkleError> {
|
|
||||||
let depth = self.depth();
|
|
||||||
if index >= 2u64.pow(depth) {
|
|
||||||
return Err(MerkleError::InvalidIndex(depth, index));
|
|
||||||
}
|
|
||||||
let pos = 2u64.pow(depth) + index;
|
|
||||||
|
|
||||||
let path = match self.paths.get_mut(&(pos / 2)) {
|
|
||||||
None => return Err(MerkleError::NodeNotInSet(index)),
|
|
||||||
Some(path) => path,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Fill old_hashes vector -----------------------------------------------------------------
|
|
||||||
let (old_hashes, _) = compute_path_trace(path, depth, index);
|
|
||||||
|
|
||||||
// Fill new_hashes vector -----------------------------------------------------------------
|
|
||||||
if is_even(pos) {
|
|
||||||
path[0] = value;
|
|
||||||
} else {
|
|
||||||
path[1] = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (new_hashes, new_root) = compute_path_trace(path, depth, index);
|
|
||||||
self.root = new_root;
|
|
||||||
|
|
||||||
// update paths ---------------------------------------------------------------------------
|
|
||||||
for path in self.paths.values_mut() {
|
|
||||||
for i in (0..old_hashes.len()).rev() {
|
|
||||||
if path[i + 2] == old_hashes[i] {
|
|
||||||
path[i + 2] = new_hashes[i];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// HELPER FUNCTIONS
|
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
fn is_even(pos: u64) -> bool {
|
|
||||||
pos & 1 == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Calculates the hash of the parent node by two sibling ones
|
|
||||||
/// - node — current node
|
|
||||||
/// - node_pos — position of the current node
|
|
||||||
/// - sibling — neighboring vertex in the tree
|
|
||||||
fn calculate_parent_hash(node: Word, node_pos: u64, sibling: Word) -> Word {
|
|
||||||
if is_even(node_pos) {
|
|
||||||
Rpo256::merge(&[node.into(), sibling.into()]).into()
|
|
||||||
} else {
|
|
||||||
Rpo256::merge(&[sibling.into(), node.into()]).into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns vector of hashes from current to the root
|
|
||||||
fn compute_path_trace(path: &[Word], depth: u32, index: u64) -> (Vec<Word>, Word) {
|
|
||||||
let mut pos = 2u64.pow(depth) + index;
|
|
||||||
|
|
||||||
let mut computed_hashes = Vec::<Word>::new();
|
|
||||||
|
|
||||||
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
|
||||||
|
|
||||||
if path.len() != 2 {
|
|
||||||
for path_hash in path.iter().skip(2) {
|
|
||||||
computed_hashes.push(comp_hash);
|
|
||||||
pos /= 2;
|
|
||||||
comp_hash = calculate_parent_hash(comp_hash, pos, *path_hash);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(computed_hashes, comp_hash)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns hash of the root
|
|
||||||
fn compute_path_root(path: &[Word], depth: u32, index: u64) -> Word {
|
|
||||||
let mut pos = 2u64.pow(depth) + index;
|
|
||||||
|
|
||||||
// hash that is obtained after calculating the current hash and path hash
|
|
||||||
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
|
||||||
|
|
||||||
for path_hash in path.iter().skip(2) {
|
|
||||||
pos /= 2;
|
|
||||||
comp_hash = calculate_parent_hash(comp_hash, pos, *path_hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
comp_hash
|
|
||||||
}
|
|
||||||
|
|
||||||
// TESTS
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::calculate_parent_hash;
|
|
||||||
use crate::merkle::int_to_node;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn get_root() {
|
|
||||||
let leaf0 = int_to_node(0);
|
|
||||||
let leaf1 = int_to_node(1);
|
|
||||||
let leaf2 = int_to_node(2);
|
|
||||||
let leaf3 = int_to_node(3);
|
|
||||||
|
|
||||||
let parent0 = calculate_parent_hash(leaf0, 0, leaf1);
|
|
||||||
let parent1 = calculate_parent_hash(leaf2, 2, leaf3);
|
|
||||||
|
|
||||||
let root_exp = calculate_parent_hash(parent0, 0, parent1);
|
|
||||||
|
|
||||||
let mut set = super::MerklePathSet::new(3).unwrap();
|
|
||||||
|
|
||||||
set.add_path(0, leaf0, vec![leaf1, parent1]).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(set.root(), root_exp);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_and_get_path() {
|
|
||||||
let path_6 = vec![int_to_node(7), int_to_node(45), int_to_node(123)];
|
|
||||||
let hash_6 = int_to_node(6);
|
|
||||||
let index = 6u64;
|
|
||||||
let depth = 4u32;
|
|
||||||
let mut set = super::MerklePathSet::new(depth).unwrap();
|
|
||||||
|
|
||||||
set.add_path(index, hash_6, path_6.clone()).unwrap();
|
|
||||||
let stored_path_6 = set.get_path(depth, index).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(path_6, stored_path_6);
|
|
||||||
assert!(set.get_path(depth, 15u64).is_err())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn get_node() {
|
|
||||||
let path_6 = vec![int_to_node(7), int_to_node(45), int_to_node(123)];
|
|
||||||
let hash_6 = int_to_node(6);
|
|
||||||
let index = 6u64;
|
|
||||||
let depth = 4u32;
|
|
||||||
let mut set = super::MerklePathSet::new(depth).unwrap();
|
|
||||||
|
|
||||||
set.add_path(index, hash_6, path_6).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(int_to_node(6u64), set.get_node(depth, index).unwrap());
|
|
||||||
assert!(set.get_node(depth, 15u64).is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn update_leaf() {
|
|
||||||
let hash_4 = int_to_node(4);
|
|
||||||
let hash_5 = int_to_node(5);
|
|
||||||
let hash_6 = int_to_node(6);
|
|
||||||
let hash_7 = int_to_node(7);
|
|
||||||
let hash_45 = calculate_parent_hash(hash_4, 12u64, hash_5);
|
|
||||||
let hash_67 = calculate_parent_hash(hash_6, 14u64, hash_7);
|
|
||||||
|
|
||||||
let hash_0123 = int_to_node(123);
|
|
||||||
|
|
||||||
let path_6 = vec![hash_7, hash_45, hash_0123];
|
|
||||||
let path_5 = vec![hash_4, hash_67, hash_0123];
|
|
||||||
let path_4 = vec![hash_5, hash_67, hash_0123];
|
|
||||||
|
|
||||||
let index_6 = 6u64;
|
|
||||||
let index_5 = 5u64;
|
|
||||||
let index_4 = 4u64;
|
|
||||||
let depth = 4u32;
|
|
||||||
let mut set = super::MerklePathSet::new(depth).unwrap();
|
|
||||||
|
|
||||||
set.add_path(index_6, hash_6, path_6).unwrap();
|
|
||||||
set.add_path(index_5, hash_5, path_5).unwrap();
|
|
||||||
set.add_path(index_4, hash_4, path_4).unwrap();
|
|
||||||
|
|
||||||
let new_hash_6 = int_to_node(100);
|
|
||||||
let new_hash_5 = int_to_node(55);
|
|
||||||
|
|
||||||
set.update_leaf(index_6, new_hash_6).unwrap();
|
|
||||||
let new_path_4 = set.get_path(depth, index_4).unwrap();
|
|
||||||
let new_hash_67 = calculate_parent_hash(new_hash_6, 14u64, hash_7);
|
|
||||||
assert_eq!(new_hash_67, new_path_4[1]);
|
|
||||||
|
|
||||||
set.update_leaf(index_5, new_hash_5).unwrap();
|
|
||||||
let new_path_4 = set.get_path(depth, index_4).unwrap();
|
|
||||||
let new_path_6 = set.get_path(depth, index_6).unwrap();
|
|
||||||
let new_hash_45 = calculate_parent_hash(new_hash_5, 13u64, hash_4);
|
|
||||||
assert_eq!(new_hash_45, new_path_6[1]);
|
|
||||||
assert_eq!(new_hash_5, new_path_4[0]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,9 @@
|
|||||||
use super::{Felt, MerkleError, Rpo256, RpoDigest, Vec, Word};
|
use super::{Felt, MerkleError, MerklePath, NodeIndex, Rpo256, RpoDigest, Vec, Word};
|
||||||
use crate::{utils::uninit_vector, FieldElement};
|
use crate::{
|
||||||
use core::slice;
|
utils::{string::String, uninit_vector, word_to_hex},
|
||||||
|
FieldElement,
|
||||||
|
};
|
||||||
|
use core::{fmt, slice};
|
||||||
use winter_math::log2;
|
use winter_math::log2;
|
||||||
|
|
||||||
// MERKLE TREE
|
// MERKLE TREE
|
||||||
@@ -9,7 +12,7 @@ use winter_math::log2;
|
|||||||
/// A fully-balanced binary Merkle tree (i.e., a tree where the number of leaves is a power of two).
|
/// A fully-balanced binary Merkle tree (i.e., a tree where the number of leaves is a power of two).
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct MerkleTree {
|
pub struct MerkleTree {
|
||||||
nodes: Vec<Word>,
|
pub(crate) nodes: Vec<Word>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MerkleTree {
|
impl MerkleTree {
|
||||||
@@ -22,7 +25,7 @@ impl MerkleTree {
|
|||||||
pub fn new(leaves: Vec<Word>) -> Result<Self, MerkleError> {
|
pub fn new(leaves: Vec<Word>) -> Result<Self, MerkleError> {
|
||||||
let n = leaves.len();
|
let n = leaves.len();
|
||||||
if n <= 1 {
|
if n <= 1 {
|
||||||
return Err(MerkleError::DepthTooSmall(n as u32));
|
return Err(MerkleError::DepthTooSmall(n as u8));
|
||||||
} else if !n.is_power_of_two() {
|
} else if !n.is_power_of_two() {
|
||||||
return Err(MerkleError::NumLeavesNotPowerOfTwo(n));
|
return Err(MerkleError::NumLeavesNotPowerOfTwo(n));
|
||||||
}
|
}
|
||||||
@@ -35,12 +38,14 @@ impl MerkleTree {
|
|||||||
nodes[n..].copy_from_slice(&leaves);
|
nodes[n..].copy_from_slice(&leaves);
|
||||||
|
|
||||||
// re-interpret nodes as an array of two nodes fused together
|
// re-interpret nodes as an array of two nodes fused together
|
||||||
let two_nodes =
|
// Safety: `nodes` will never move here as it is not bound to an external lifetime (i.e.
|
||||||
unsafe { slice::from_raw_parts(nodes.as_ptr() as *const [RpoDigest; 2], n) };
|
// `self`).
|
||||||
|
let ptr = nodes.as_ptr() as *const [RpoDigest; 2];
|
||||||
|
let pairs = unsafe { slice::from_raw_parts(ptr, n) };
|
||||||
|
|
||||||
// calculate all internal tree nodes
|
// calculate all internal tree nodes
|
||||||
for i in (1..n).rev() {
|
for i in (1..n).rev() {
|
||||||
nodes[i] = Rpo256::merge(&two_nodes[i]).into();
|
nodes[i] = Rpo256::merge(&pairs[i]).into();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { nodes })
|
Ok(Self { nodes })
|
||||||
@@ -57,93 +62,159 @@ impl MerkleTree {
|
|||||||
/// Returns the depth of this Merkle tree.
|
/// Returns the depth of this Merkle tree.
|
||||||
///
|
///
|
||||||
/// Merkle tree of depth 1 has two leaves, depth 2 has four leaves etc.
|
/// Merkle tree of depth 1 has two leaves, depth 2 has four leaves etc.
|
||||||
pub fn depth(&self) -> u32 {
|
pub fn depth(&self) -> u8 {
|
||||||
log2(self.nodes.len() / 2)
|
log2(self.nodes.len() / 2) as u8
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a node at the specified depth and index.
|
/// Returns a node at the specified depth and index value.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * The specified depth is greater than the depth of the tree.
|
/// * The specified depth is greater than the depth of the tree.
|
||||||
/// * The specified index not valid for the specified depth.
|
/// * The specified index is not valid for the specified depth.
|
||||||
pub fn get_node(&self, depth: u32, index: u64) -> Result<Word, MerkleError> {
|
pub fn get_node(&self, index: NodeIndex) -> Result<Word, MerkleError> {
|
||||||
if depth == 0 {
|
if index.is_root() {
|
||||||
return Err(MerkleError::DepthTooSmall(depth));
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
} else if depth > self.depth() {
|
} else if index.depth() > self.depth() {
|
||||||
return Err(MerkleError::DepthTooBig(depth));
|
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
||||||
}
|
} else if !index.is_valid() {
|
||||||
if index >= 2u64.pow(depth) {
|
return Err(MerkleError::InvalidIndex(index));
|
||||||
return Err(MerkleError::InvalidIndex(depth, index));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let pos = 2_usize.pow(depth) + (index as usize);
|
let pos = index.to_scalar_index() as usize;
|
||||||
Ok(self.nodes[pos])
|
Ok(self.nodes[pos])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a Merkle path to the node at the specified depth and index. The node itself is
|
/// Returns a Merkle path to the node at the specified depth and index value. The node itself
|
||||||
/// not included in the path.
|
/// is not included in the path.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * The specified depth is greater than the depth of the tree.
|
/// * The specified depth is greater than the depth of the tree.
|
||||||
/// * The specified index not valid for the specified depth.
|
/// * The specified value is not valid for the specified depth.
|
||||||
pub fn get_path(&self, depth: u32, index: u64) -> Result<Vec<Word>, MerkleError> {
|
pub fn get_path(&self, mut index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
||||||
if depth == 0 {
|
if index.is_root() {
|
||||||
return Err(MerkleError::DepthTooSmall(depth));
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
} else if depth > self.depth() {
|
} else if index.depth() > self.depth() {
|
||||||
return Err(MerkleError::DepthTooBig(depth));
|
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
||||||
}
|
} else if !index.is_valid() {
|
||||||
if index >= 2u64.pow(depth) {
|
return Err(MerkleError::InvalidIndex(index));
|
||||||
return Err(MerkleError::InvalidIndex(depth, index));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut path = Vec::with_capacity(depth as usize);
|
// TODO should we create a helper in `NodeIndex` that will encapsulate traversal to root so
|
||||||
let mut pos = 2_usize.pow(depth) + (index as usize);
|
// we always use inlined `for` instead of `while`? the reason to use `for` is because its
|
||||||
|
// easier for the compiler to vectorize.
|
||||||
while pos > 1 {
|
let mut path = Vec::with_capacity(index.depth() as usize);
|
||||||
path.push(self.nodes[pos ^ 1]);
|
for _ in 0..index.depth() {
|
||||||
pos >>= 1;
|
let sibling = index.sibling().to_scalar_index() as usize;
|
||||||
|
path.push(self.nodes[sibling]);
|
||||||
|
index.move_up();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(path)
|
debug_assert!(
|
||||||
|
index.is_root(),
|
||||||
|
"the path walk must go all the way to the root"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(path.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replaces the leaf at the specified index with the provided value.
|
/// Replaces the leaf at the specified index with the provided value.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if the specified index is not a valid leaf index for this tree.
|
/// Returns an error if the specified index value is not a valid leaf value for this tree.
|
||||||
pub fn update_leaf(&mut self, index: u64, value: Word) -> Result<(), MerkleError> {
|
pub fn update_leaf<'a>(&'a mut self, index_value: u64, value: Word) -> Result<(), MerkleError> {
|
||||||
let depth = self.depth();
|
let depth = self.depth();
|
||||||
if index >= 2u64.pow(depth) {
|
let mut index = NodeIndex::new(depth, index_value);
|
||||||
return Err(MerkleError::InvalidIndex(depth, index));
|
if !index.is_valid() {
|
||||||
|
return Err(MerkleError::InvalidIndex(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut index = 2usize.pow(depth) + index as usize;
|
// we don't need to copy the pairs into a new address as we are logically guaranteed to not
|
||||||
self.nodes[index] = value;
|
// overlap write instructions. however, it's important to bind the lifetime of pairs to
|
||||||
|
// `self.nodes` so the compiler will never move one without moving the other.
|
||||||
|
debug_assert_eq!(self.nodes.len() & 1, 0);
|
||||||
let n = self.nodes.len() / 2;
|
let n = self.nodes.len() / 2;
|
||||||
let two_nodes =
|
|
||||||
unsafe { slice::from_raw_parts(self.nodes.as_ptr() as *const [RpoDigest; 2], n) };
|
|
||||||
|
|
||||||
for _ in 0..depth {
|
// Safety: the length of nodes is guaranteed to contain pairs of words; hence, pairs of
|
||||||
index /= 2;
|
// digests. we explicitly bind the lifetime here so we add an extra layer of guarantee that
|
||||||
self.nodes[index] = Rpo256::merge(&two_nodes[index]).into();
|
// `self.nodes` will be moved only if `pairs` is moved as well. also, the algorithm is
|
||||||
|
// logically guaranteed to not overlap write positions as the write index is always half
|
||||||
|
// the index from which we read the digest input.
|
||||||
|
let ptr = self.nodes.as_ptr() as *const [RpoDigest; 2];
|
||||||
|
let pairs: &'a [[RpoDigest; 2]] = unsafe { slice::from_raw_parts(ptr, n) };
|
||||||
|
|
||||||
|
// update the current node
|
||||||
|
let pos = index.to_scalar_index() as usize;
|
||||||
|
self.nodes[pos] = value;
|
||||||
|
|
||||||
|
// traverse to the root, updating each node with the merged values of its parents
|
||||||
|
for _ in 0..index.depth() {
|
||||||
|
index.move_up();
|
||||||
|
let pos = index.to_scalar_index() as usize;
|
||||||
|
let value = Rpo256::merge(&pairs[pos]).into();
|
||||||
|
self.nodes[pos] = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Utility to vizualize a [MerkleTree] in text.
|
||||||
|
pub fn tree_to_text(tree: &MerkleTree) -> Result<String, fmt::Error> {
|
||||||
|
let indent = " ";
|
||||||
|
let mut s = String::new();
|
||||||
|
s.push_str(&word_to_hex(&tree.root())?);
|
||||||
|
s.push('\n');
|
||||||
|
for d in 1..=tree.depth() {
|
||||||
|
let entries = 2u64.pow(d.into());
|
||||||
|
for i in 0..entries {
|
||||||
|
let index = NodeIndex::new(d, i);
|
||||||
|
|
||||||
|
let node = tree
|
||||||
|
.get_node(index)
|
||||||
|
.expect("The index must always be valid");
|
||||||
|
|
||||||
|
for _ in 0..d {
|
||||||
|
s.push_str(indent);
|
||||||
|
}
|
||||||
|
s.push_str(&word_to_hex(&node)?);
|
||||||
|
s.push('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Utility to vizualize a [MerklePath] in text.
|
||||||
|
pub fn path_to_text(path: &MerklePath) -> Result<String, fmt::Error> {
|
||||||
|
let mut s = String::new();
|
||||||
|
s.push('[');
|
||||||
|
|
||||||
|
for el in path.iter() {
|
||||||
|
s.push_str(&word_to_hex(el)?);
|
||||||
|
s.push_str(", ");
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove the last ", "
|
||||||
|
if path.len() != 0 {
|
||||||
|
s.pop();
|
||||||
|
s.pop();
|
||||||
|
}
|
||||||
|
s.push(']');
|
||||||
|
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
|
|
||||||
// TESTS
|
// TESTS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{
|
use super::*;
|
||||||
super::{int_to_node, Rpo256},
|
use crate::merkle::int_to_node;
|
||||||
Word,
|
use core::mem::size_of;
|
||||||
};
|
use proptest::prelude::*;
|
||||||
|
|
||||||
const LEAVES4: [Word; 4] = [
|
const LEAVES4: [Word; 4] = [
|
||||||
int_to_node(1),
|
int_to_node(1),
|
||||||
@@ -187,16 +258,16 @@ mod tests {
|
|||||||
let tree = super::MerkleTree::new(LEAVES4.to_vec()).unwrap();
|
let tree = super::MerkleTree::new(LEAVES4.to_vec()).unwrap();
|
||||||
|
|
||||||
// check depth 2
|
// check depth 2
|
||||||
assert_eq!(LEAVES4[0], tree.get_node(2, 0).unwrap());
|
assert_eq!(LEAVES4[0], tree.get_node(NodeIndex::new(2, 0)).unwrap());
|
||||||
assert_eq!(LEAVES4[1], tree.get_node(2, 1).unwrap());
|
assert_eq!(LEAVES4[1], tree.get_node(NodeIndex::new(2, 1)).unwrap());
|
||||||
assert_eq!(LEAVES4[2], tree.get_node(2, 2).unwrap());
|
assert_eq!(LEAVES4[2], tree.get_node(NodeIndex::new(2, 2)).unwrap());
|
||||||
assert_eq!(LEAVES4[3], tree.get_node(2, 3).unwrap());
|
assert_eq!(LEAVES4[3], tree.get_node(NodeIndex::new(2, 3)).unwrap());
|
||||||
|
|
||||||
// check depth 1
|
// check depth 1
|
||||||
let (_, node2, node3) = compute_internal_nodes();
|
let (_, node2, node3) = compute_internal_nodes();
|
||||||
|
|
||||||
assert_eq!(node2, tree.get_node(1, 0).unwrap());
|
assert_eq!(node2, tree.get_node(NodeIndex::new(1, 0)).unwrap());
|
||||||
assert_eq!(node3, tree.get_node(1, 1).unwrap());
|
assert_eq!(node3, tree.get_node(NodeIndex::new(1, 1)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -206,14 +277,26 @@ mod tests {
|
|||||||
let (_, node2, node3) = compute_internal_nodes();
|
let (_, node2, node3) = compute_internal_nodes();
|
||||||
|
|
||||||
// check depth 2
|
// check depth 2
|
||||||
assert_eq!(vec![LEAVES4[1], node3], tree.get_path(2, 0).unwrap());
|
assert_eq!(
|
||||||
assert_eq!(vec![LEAVES4[0], node3], tree.get_path(2, 1).unwrap());
|
vec![LEAVES4[1], node3],
|
||||||
assert_eq!(vec![LEAVES4[3], node2], tree.get_path(2, 2).unwrap());
|
*tree.get_path(NodeIndex::new(2, 0)).unwrap()
|
||||||
assert_eq!(vec![LEAVES4[2], node2], tree.get_path(2, 3).unwrap());
|
);
|
||||||
|
assert_eq!(
|
||||||
|
vec![LEAVES4[0], node3],
|
||||||
|
*tree.get_path(NodeIndex::new(2, 1)).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
vec![LEAVES4[3], node2],
|
||||||
|
*tree.get_path(NodeIndex::new(2, 2)).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
vec![LEAVES4[2], node2],
|
||||||
|
*tree.get_path(NodeIndex::new(2, 3)).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
// check depth 1
|
// check depth 1
|
||||||
assert_eq!(vec![node3], tree.get_path(1, 0).unwrap());
|
assert_eq!(vec![node3], *tree.get_path(NodeIndex::new(1, 0)).unwrap());
|
||||||
assert_eq!(vec![node2], tree.get_path(1, 1).unwrap());
|
assert_eq!(vec![node2], *tree.get_path(NodeIndex::new(1, 1)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -221,25 +304,53 @@ mod tests {
|
|||||||
let mut tree = super::MerkleTree::new(LEAVES8.to_vec()).unwrap();
|
let mut tree = super::MerkleTree::new(LEAVES8.to_vec()).unwrap();
|
||||||
|
|
||||||
// update one leaf
|
// update one leaf
|
||||||
let index = 3;
|
let value = 3;
|
||||||
let new_node = int_to_node(9);
|
let new_node = int_to_node(9);
|
||||||
let mut expected_leaves = LEAVES8.to_vec();
|
let mut expected_leaves = LEAVES8.to_vec();
|
||||||
expected_leaves[index as usize] = new_node;
|
expected_leaves[value as usize] = new_node;
|
||||||
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
||||||
|
|
||||||
tree.update_leaf(index, new_node).unwrap();
|
tree.update_leaf(value, new_node).unwrap();
|
||||||
assert_eq!(expected_tree.nodes, tree.nodes);
|
assert_eq!(expected_tree.nodes, tree.nodes);
|
||||||
|
|
||||||
// update another leaf
|
// update another leaf
|
||||||
let index = 6;
|
let value = 6;
|
||||||
let new_node = int_to_node(10);
|
let new_node = int_to_node(10);
|
||||||
expected_leaves[index as usize] = new_node;
|
expected_leaves[value as usize] = new_node;
|
||||||
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
let expected_tree = super::MerkleTree::new(expected_leaves.clone()).unwrap();
|
||||||
|
|
||||||
tree.update_leaf(index, new_node).unwrap();
|
tree.update_leaf(value, new_node).unwrap();
|
||||||
assert_eq!(expected_tree.nodes, tree.nodes);
|
assert_eq!(expected_tree.nodes, tree.nodes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
proptest! {
|
||||||
|
#[test]
|
||||||
|
fn arbitrary_word_can_be_represented_as_digest(
|
||||||
|
a in prop::num::u64::ANY,
|
||||||
|
b in prop::num::u64::ANY,
|
||||||
|
c in prop::num::u64::ANY,
|
||||||
|
d in prop::num::u64::ANY,
|
||||||
|
) {
|
||||||
|
// this test will assert the memory equivalence between word and digest.
|
||||||
|
// it is used to safeguard the `[MerkleTee::update_leaf]` implementation
|
||||||
|
// that assumes this equivalence.
|
||||||
|
|
||||||
|
// build a word and copy it to another address as digest
|
||||||
|
let word = [Felt::new(a), Felt::new(b), Felt::new(c), Felt::new(d)];
|
||||||
|
let digest = RpoDigest::from(word);
|
||||||
|
|
||||||
|
// assert the addresses are different
|
||||||
|
let word_ptr = (&word).as_ptr() as *const u8;
|
||||||
|
let digest_ptr = (&digest).as_ptr() as *const u8;
|
||||||
|
assert_ne!(word_ptr, digest_ptr);
|
||||||
|
|
||||||
|
// compare the bytes representation
|
||||||
|
let word_bytes = unsafe { slice::from_raw_parts(word_ptr, size_of::<Word>()) };
|
||||||
|
let digest_bytes = unsafe { slice::from_raw_parts(digest_ptr, size_of::<RpoDigest>()) };
|
||||||
|
assert_eq!(word_bytes, digest_bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// HELPER FUNCTIONS
|
// HELPER FUNCTIONS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|||||||
44
src/merkle/mmr/accumulator.rs
Normal file
44
src/merkle/mmr/accumulator.rs
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
use super::{super::Vec, MmrProof, Rpo256, Word};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct MmrPeaks {
|
||||||
|
/// The number of leaves is used to differentiate accumulators that have the same number of
|
||||||
|
/// peaks. This happens because the number of peaks goes up-and-down as the structure is used
|
||||||
|
/// causing existing trees to be merged and new ones to be created. As an example, every time
|
||||||
|
/// the MMR has a power-of-two number of leaves there is a single peak.
|
||||||
|
///
|
||||||
|
/// Every tree in the MMR forest has a distinct power-of-two size, this means only the right
|
||||||
|
/// most tree can have an odd number of elements (1). Additionally this means that the bits in
|
||||||
|
/// `num_leaves` conveniently encode the size of each individual tree.
|
||||||
|
///
|
||||||
|
/// Examples:
|
||||||
|
///
|
||||||
|
/// Example 1: With 5 leaves, the binary 0b101. The number of set bits is equal the number
|
||||||
|
/// of peaks, in this case there are 2 peaks. The 0-indexed least-significant position of
|
||||||
|
/// the bit determines the number of elements of a tree, so the rightmost tree has 2**0
|
||||||
|
/// elements and the left most has 2**2.
|
||||||
|
///
|
||||||
|
/// Example 2: With 12 leaves, the binary is 0b1100, this case also has 2 peaks, the
|
||||||
|
/// leftmost tree has 2**3=8 elements, and the right most has 2**2=4 elements.
|
||||||
|
pub num_leaves: usize,
|
||||||
|
|
||||||
|
/// All the peaks of every tree in the MMR forest. The peaks are always ordered by number of
|
||||||
|
/// leaves, starting from the peak with most children, to the one with least.
|
||||||
|
///
|
||||||
|
/// Invariant: The length of `peaks` must be equal to the number of true bits in `num_leaves`.
|
||||||
|
pub peaks: Vec<Word>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MmrPeaks {
|
||||||
|
/// Hashes the peaks sequentially, compacting it to a single digest
|
||||||
|
pub fn hash_peaks(&self) -> Word {
|
||||||
|
Rpo256::hash_elements(&self.peaks.as_slice().concat()).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify(&self, value: Word, opening: MmrProof) -> bool {
|
||||||
|
let root = &self.peaks[opening.peak_index()];
|
||||||
|
opening
|
||||||
|
.merkle_path
|
||||||
|
.verify(opening.relative_pos() as u64, value, root)
|
||||||
|
}
|
||||||
|
}
|
||||||
46
src/merkle/mmr/bit.rs
Normal file
46
src/merkle/mmr/bit.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
/// Iterate over the bits of a `usize` and yields the bit positions for the true bits.
|
||||||
|
pub struct TrueBitPositionIterator {
|
||||||
|
value: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TrueBitPositionIterator {
|
||||||
|
pub fn new(value: usize) -> TrueBitPositionIterator {
|
||||||
|
TrueBitPositionIterator { value }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for TrueBitPositionIterator {
|
||||||
|
type Item = u32;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
|
||||||
|
// trailing_zeros is computed with the intrinsic cttz. [Rust 1.67.0] x86 uses the `bsf`
|
||||||
|
// instruction. AArch64 uses the `rbit clz` instructions.
|
||||||
|
let zeros = self.value.trailing_zeros();
|
||||||
|
|
||||||
|
if zeros == usize::BITS {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let bit_position = zeros;
|
||||||
|
let mask = 1 << bit_position;
|
||||||
|
self.value ^= mask;
|
||||||
|
Some(bit_position)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DoubleEndedIterator for TrueBitPositionIterator {
|
||||||
|
fn next_back(&mut self) -> Option<<Self as Iterator>::Item> {
|
||||||
|
// trailing_zeros is computed with the intrinsic ctlz. [Rust 1.67.0] x86 uses the `bsr`
|
||||||
|
// instruction. AArch64 uses the `clz` instruction.
|
||||||
|
let zeros = self.value.leading_zeros();
|
||||||
|
|
||||||
|
if zeros == usize::BITS {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let bit_position = usize::BITS - zeros - 1;
|
||||||
|
let mask = 1 << bit_position;
|
||||||
|
self.value ^= mask;
|
||||||
|
Some(bit_position)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
299
src/merkle/mmr/full.rs
Normal file
299
src/merkle/mmr/full.rs
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
//! A fully materialized Merkle mountain range (MMR).
|
||||||
|
//!
|
||||||
|
//! A MMR is a forest structure, i.e. it is an ordered set of disjoint rooted trees. The trees are
|
||||||
|
//! ordered by size, from the most to least number of leaves. Every tree is a perfect binary tree,
|
||||||
|
//! meaning a tree has all its leaves at the same depth, and every inner node has a branch-factor
|
||||||
|
//! of 2 with both children set.
|
||||||
|
//!
|
||||||
|
//! Additionally the structure only supports adding leaves to the right-most tree, the one with the
|
||||||
|
//! least number of leaves. The structure preserves the invariant that each tree has different
|
||||||
|
//! depths, i.e. as part of adding adding a new element to the forest the trees with same depth are
|
||||||
|
//! merged, creating a new tree with depth d+1, this process is continued until the property is
|
||||||
|
//! restabilished.
|
||||||
|
use super::bit::TrueBitPositionIterator;
|
||||||
|
use super::{super::Vec, MmrPeaks, MmrProof, Rpo256, Word};
|
||||||
|
use crate::merkle::MerklePath;
|
||||||
|
use core::fmt::{Display, Formatter};
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
// MMR
|
||||||
|
// ===============================================================================================
|
||||||
|
|
||||||
|
/// A fully materialized Merkle Mountain Range, with every tree in the forest and all their
|
||||||
|
/// elements.
|
||||||
|
///
|
||||||
|
/// Since this is a full representation of the MMR, elements are never removed and the MMR will
|
||||||
|
/// grow roughly `O(2n)` in number of leaf elements.
|
||||||
|
pub struct Mmr {
|
||||||
|
/// Refer to the `forest` method documentation for details of the semantics of this value.
|
||||||
|
pub(super) forest: usize,
|
||||||
|
|
||||||
|
/// Contains every element of the forest.
|
||||||
|
///
|
||||||
|
/// The trees are in postorder sequential representation. This representation allows for all
|
||||||
|
/// the elements of every tree in the forest to be stored in the same sequential buffer. It
|
||||||
|
/// also means new elements can be added to the forest, and merging of trees is very cheap with
|
||||||
|
/// no need to copy elements.
|
||||||
|
pub(super) nodes: Vec<Word>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||||
|
pub enum MmrError {
|
||||||
|
InvalidPosition(usize),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for MmrError {
|
||||||
|
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||||
|
match self {
|
||||||
|
MmrError::InvalidPosition(pos) => write!(fmt, "Mmr does not contain position {pos}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
impl Error for MmrError {}
|
||||||
|
|
||||||
|
impl Default for Mmr {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Mmr {
|
||||||
|
// CONSTRUCTORS
|
||||||
|
// ============================================================================================
|
||||||
|
|
||||||
|
/// Constructor for an empty `Mmr`.
|
||||||
|
pub fn new() -> Mmr {
|
||||||
|
Mmr {
|
||||||
|
forest: 0,
|
||||||
|
nodes: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ACCESSORS
|
||||||
|
// ============================================================================================
|
||||||
|
|
||||||
|
/// Returns the MMR forest representation.
|
||||||
|
///
|
||||||
|
/// The forest value has the following interpretations:
|
||||||
|
/// - its value is the number of elements in the forest
|
||||||
|
/// - bit count corresponds to the number of trees in the forest
|
||||||
|
/// - each true bit position determines the depth of a tree in the forest
|
||||||
|
pub const fn forest(&self) -> usize {
|
||||||
|
self.forest
|
||||||
|
}
|
||||||
|
|
||||||
|
// FUNCTIONALITY
|
||||||
|
// ============================================================================================
|
||||||
|
|
||||||
|
/// Given a leaf position, returns the Merkle path to its corresponding peak. If the position
|
||||||
|
/// is greater-or-equal than the tree size an error is returned.
|
||||||
|
///
|
||||||
|
/// Note: The leaf position is the 0-indexed number corresponding to the order the leaves were
|
||||||
|
/// added, this corresponds to the MMR size _prior_ to adding the element. So the 1st element
|
||||||
|
/// has position 0, the second position 1, and so on.
|
||||||
|
pub fn open(&self, pos: usize) -> Result<MmrProof, MmrError> {
|
||||||
|
// find the target tree responsible for the MMR position
|
||||||
|
let tree_bit =
|
||||||
|
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::InvalidPosition(pos))?;
|
||||||
|
let forest_target = 1usize << tree_bit;
|
||||||
|
|
||||||
|
// isolate the trees before the target
|
||||||
|
let forest_before = self.forest & high_bitmask(tree_bit + 1);
|
||||||
|
let index_offset = nodes_in_forest(forest_before);
|
||||||
|
|
||||||
|
// find the root
|
||||||
|
let index = nodes_in_forest(forest_target) - 1;
|
||||||
|
|
||||||
|
// update the value position from global to the target tree
|
||||||
|
let relative_pos = pos - forest_before;
|
||||||
|
|
||||||
|
// collect the path and the final index of the target value
|
||||||
|
let (_, path) =
|
||||||
|
self.collect_merkle_path_and_value(tree_bit, relative_pos, index_offset, index);
|
||||||
|
|
||||||
|
Ok(MmrProof {
|
||||||
|
forest: self.forest,
|
||||||
|
position: pos,
|
||||||
|
merkle_path: MerklePath::new(path),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the leaf value at position `pos`.
|
||||||
|
///
|
||||||
|
/// Note: The leaf position is the 0-indexed number corresponding to the order the leaves were
|
||||||
|
/// added, this corresponds to the MMR size _prior_ to adding the element. So the 1st element
|
||||||
|
/// has position 0, the second position 1, and so on.
|
||||||
|
pub fn get(&self, pos: usize) -> Result<Word, MmrError> {
|
||||||
|
// find the target tree responsible for the MMR position
|
||||||
|
let tree_bit =
|
||||||
|
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::InvalidPosition(pos))?;
|
||||||
|
let forest_target = 1usize << tree_bit;
|
||||||
|
|
||||||
|
// isolate the trees before the target
|
||||||
|
let forest_before = self.forest & high_bitmask(tree_bit + 1);
|
||||||
|
let index_offset = nodes_in_forest(forest_before);
|
||||||
|
|
||||||
|
// find the root
|
||||||
|
let index = nodes_in_forest(forest_target) - 1;
|
||||||
|
|
||||||
|
// update the value position from global to the target tree
|
||||||
|
let relative_pos = pos - forest_before;
|
||||||
|
|
||||||
|
// collect the path and the final index of the target value
|
||||||
|
let (value, _) =
|
||||||
|
self.collect_merkle_path_and_value(tree_bit, relative_pos, index_offset, index);
|
||||||
|
|
||||||
|
Ok(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a new element to the MMR.
|
||||||
|
pub fn add(&mut self, el: Word) {
|
||||||
|
// Note: every node is also a tree of size 1, adding an element to the forest creates a new
|
||||||
|
// rooted-tree of size 1. This may temporarily break the invariant that every tree in the
|
||||||
|
// forest has different sizes, the loop below will eagerly merge trees of same size and
|
||||||
|
// restore the invariant.
|
||||||
|
self.nodes.push(el);
|
||||||
|
|
||||||
|
let mut left_offset = self.nodes.len().saturating_sub(2);
|
||||||
|
let mut right = el;
|
||||||
|
let mut left_tree = 1;
|
||||||
|
while self.forest & left_tree != 0 {
|
||||||
|
right = *Rpo256::merge(&[self.nodes[left_offset].into(), right.into()]);
|
||||||
|
self.nodes.push(right);
|
||||||
|
|
||||||
|
left_offset = left_offset.saturating_sub(nodes_in_forest(left_tree));
|
||||||
|
left_tree <<= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.forest += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an accumulator representing the current state of the MMMR.
|
||||||
|
pub fn accumulator(&self) -> MmrPeaks {
|
||||||
|
let peaks: Vec<Word> = TrueBitPositionIterator::new(self.forest)
|
||||||
|
.rev()
|
||||||
|
.map(|bit| nodes_in_forest(1 << bit))
|
||||||
|
.scan(0, |offset, el| {
|
||||||
|
*offset += el;
|
||||||
|
Some(*offset)
|
||||||
|
})
|
||||||
|
.map(|offset| self.nodes[offset - 1])
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
MmrPeaks {
|
||||||
|
num_leaves: self.forest,
|
||||||
|
peaks,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UTILITIES
|
||||||
|
// ============================================================================================
|
||||||
|
|
||||||
|
/// Internal function used to collect the Merkle path of a value.
|
||||||
|
fn collect_merkle_path_and_value(
|
||||||
|
&self,
|
||||||
|
tree_bit: u32,
|
||||||
|
relative_pos: usize,
|
||||||
|
index_offset: usize,
|
||||||
|
mut index: usize,
|
||||||
|
) -> (Word, Vec<Word>) {
|
||||||
|
// collect the Merkle path
|
||||||
|
let mut tree_depth = tree_bit as usize;
|
||||||
|
let mut path = Vec::with_capacity(tree_depth + 1);
|
||||||
|
while tree_depth > 0 {
|
||||||
|
let bit = relative_pos & tree_depth;
|
||||||
|
let right_offset = index - 1;
|
||||||
|
let left_offset = right_offset - nodes_in_forest(tree_depth);
|
||||||
|
|
||||||
|
// Elements to the right have a higher position because they were
|
||||||
|
// added later. Therefore when the bit is true the node's path is
|
||||||
|
// to the right, and its sibling to the left.
|
||||||
|
let sibling = if bit != 0 {
|
||||||
|
index = right_offset;
|
||||||
|
self.nodes[index_offset + left_offset]
|
||||||
|
} else {
|
||||||
|
index = left_offset;
|
||||||
|
self.nodes[index_offset + right_offset]
|
||||||
|
};
|
||||||
|
|
||||||
|
tree_depth >>= 1;
|
||||||
|
path.push(sibling);
|
||||||
|
}
|
||||||
|
|
||||||
|
// the rest of the codebase has the elements going from leaf to root, adjust it here for
|
||||||
|
// easy of use/consistency sake
|
||||||
|
path.reverse();
|
||||||
|
|
||||||
|
let value = self.nodes[index_offset + index];
|
||||||
|
(value, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for Mmr
|
||||||
|
where
|
||||||
|
T: IntoIterator<Item = Word>,
|
||||||
|
{
|
||||||
|
fn from(values: T) -> Self {
|
||||||
|
let mut mmr = Mmr::new();
|
||||||
|
for v in values {
|
||||||
|
mmr.add(v)
|
||||||
|
}
|
||||||
|
mmr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UTILITIES
|
||||||
|
// ===============================================================================================
|
||||||
|
|
||||||
|
/// Given a 0-indexed leaf position and the current forest, return the tree number responsible for
|
||||||
|
/// the position.
|
||||||
|
///
|
||||||
|
/// Note:
|
||||||
|
/// The result is a tree position `p`, it has the following interpretations. $p+1$ is the depth of
|
||||||
|
/// the tree, which corresponds to the size of a Merkle proof for that tree. $2^p$ is equal to the
|
||||||
|
/// number of leaves in this particular tree. and $2^(p+1)-1$ corresponds to size of the tree.
|
||||||
|
pub(crate) const fn leaf_to_corresponding_tree(pos: usize, forest: usize) -> Option<u32> {
|
||||||
|
if pos >= forest {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
// - each bit in the forest is a unique tree and the bit position its power-of-two size
|
||||||
|
// - each tree owns a consecutive range of positions equal to its size from left-to-right
|
||||||
|
// - this means the first tree owns from `0` up to the `2^k_0` first positions, where `k_0`
|
||||||
|
// is the highest true bit position, the second tree from `2^k_0 + 1` up to `2^k_1` where
|
||||||
|
// `k_1` is the second higest bit, so on.
|
||||||
|
// - this means the highest bits work as a category marker, and the position is owned by
|
||||||
|
// the first tree which doesn't share a high bit with the position
|
||||||
|
let before = forest & pos;
|
||||||
|
let after = forest ^ before;
|
||||||
|
let tree = after.ilog2();
|
||||||
|
|
||||||
|
Some(tree)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a bitmask for the bits including and above the given position.
|
||||||
|
pub(crate) const fn high_bitmask(bit: u32) -> usize {
|
||||||
|
if bit > usize::BITS - 1 {
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
usize::MAX << bit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the total number of nodes of a given forest
|
||||||
|
///
|
||||||
|
/// Panics:
|
||||||
|
///
|
||||||
|
/// This will panic if the forest has size greater than `usize::MAX / 2`
|
||||||
|
pub(crate) const fn nodes_in_forest(forest: usize) -> usize {
|
||||||
|
// - the size of a perfect binary tree is $2^{k+1}-1$ or $2*2^k-1$
|
||||||
|
// - the forest represents the sum of $2^k$ so a single multiplication is necessary
|
||||||
|
// - the number of `-1` is the same as the number of trees, which is the same as the number
|
||||||
|
// bits set
|
||||||
|
let tree_count = forest.count_ones() as usize;
|
||||||
|
forest * 2 - tree_count
|
||||||
|
}
|
||||||
15
src/merkle/mmr/mod.rs
Normal file
15
src/merkle/mmr/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
mod accumulator;
|
||||||
|
mod bit;
|
||||||
|
mod full;
|
||||||
|
mod proof;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
use super::{Rpo256, Word};
|
||||||
|
|
||||||
|
// REEXPORTS
|
||||||
|
// ================================================================================================
|
||||||
|
pub use accumulator::MmrPeaks;
|
||||||
|
pub use full::Mmr;
|
||||||
|
pub use proof::MmrProof;
|
||||||
33
src/merkle/mmr/proof.rs
Normal file
33
src/merkle/mmr/proof.rs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
/// The representation of a single Merkle path.
|
||||||
|
use super::super::MerklePath;
|
||||||
|
use super::full::{high_bitmask, leaf_to_corresponding_tree};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct MmrProof {
|
||||||
|
/// The state of the MMR when the MmrProof was created.
|
||||||
|
pub forest: usize,
|
||||||
|
|
||||||
|
/// The position of the leaf value on this MmrProof.
|
||||||
|
pub position: usize,
|
||||||
|
|
||||||
|
/// The Merkle opening, starting from the value's sibling up to and excluding the root of the
|
||||||
|
/// responsible tree.
|
||||||
|
pub merkle_path: MerklePath,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MmrProof {
|
||||||
|
/// Converts the leaf global position into a local position that can be used to verify the
|
||||||
|
/// merkle_path.
|
||||||
|
pub fn relative_pos(&self) -> usize {
|
||||||
|
let tree_bit = leaf_to_corresponding_tree(self.position, self.forest)
|
||||||
|
.expect("position must be part of the forest");
|
||||||
|
let forest_before = self.forest & high_bitmask(tree_bit + 1);
|
||||||
|
self.position - forest_before
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn peak_index(&self) -> usize {
|
||||||
|
let root = leaf_to_corresponding_tree(self.position, self.forest)
|
||||||
|
.expect("position must be part of the forest");
|
||||||
|
(self.forest.count_ones() - root - 1) as usize
|
||||||
|
}
|
||||||
|
}
|
||||||
440
src/merkle/mmr/tests.rs
Normal file
440
src/merkle/mmr/tests.rs
Normal file
@@ -0,0 +1,440 @@
|
|||||||
|
use super::bit::TrueBitPositionIterator;
|
||||||
|
use super::full::{high_bitmask, leaf_to_corresponding_tree, nodes_in_forest};
|
||||||
|
use super::{super::Vec, Mmr, Rpo256, Word};
|
||||||
|
use crate::merkle::{int_to_node, MerklePath};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_position_equal_or_higher_than_leafs_is_never_contained() {
|
||||||
|
let empty_forest = 0;
|
||||||
|
for pos in 1..1024 {
|
||||||
|
// pos is index, 0 based
|
||||||
|
// tree is a length counter, 1 based
|
||||||
|
// so a valid pos is always smaller, not equal, to tree
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(pos, pos), None);
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(pos, pos - 1), None);
|
||||||
|
// and empty forest has no trees, so no position is valid
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(pos, empty_forest), None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_position_zero_is_always_contained_by_the_highest_tree() {
|
||||||
|
for leaves in 1..1024usize {
|
||||||
|
let tree = leaves.ilog2();
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(0, leaves), Some(tree));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_leaf_to_corresponding_tree() {
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(0, 0b0001), Some(0));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(0, 0b0010), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(0, 0b0011), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(0, 0b1011), Some(3));
|
||||||
|
|
||||||
|
// position one is always owned by the left-most tree
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(1, 0b0010), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(1, 0b0011), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(1, 0b1011), Some(3));
|
||||||
|
|
||||||
|
// position two starts as its own root, and then it is merged with the left-most tree
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(2, 0b0011), Some(0));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(2, 0b0100), Some(2));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(2, 0b1011), Some(3));
|
||||||
|
|
||||||
|
// position tree is merged on the left-most tree
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(3, 0b0011), None);
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(3, 0b0100), Some(2));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(3, 0b1011), Some(3));
|
||||||
|
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(4, 0b0101), Some(0));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(4, 0b0110), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(4, 0b0111), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(4, 0b1000), Some(3));
|
||||||
|
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(12, 0b01101), Some(0));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(12, 0b01110), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(12, 0b01111), Some(1));
|
||||||
|
assert_eq!(leaf_to_corresponding_tree(12, 0b10000), Some(4));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_high_bitmask() {
|
||||||
|
assert_eq!(high_bitmask(0), usize::MAX);
|
||||||
|
assert_eq!(high_bitmask(1), usize::MAX << 1);
|
||||||
|
assert_eq!(high_bitmask(usize::BITS - 2), 0b11usize.rotate_right(2));
|
||||||
|
assert_eq!(high_bitmask(usize::BITS - 1), 0b1usize.rotate_right(1));
|
||||||
|
assert_eq!(high_bitmask(usize::BITS), 0, "overflow should be handled");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nodes_in_forest() {
|
||||||
|
assert_eq!(nodes_in_forest(0b0000), 0);
|
||||||
|
assert_eq!(nodes_in_forest(0b0001), 1);
|
||||||
|
assert_eq!(nodes_in_forest(0b0010), 3);
|
||||||
|
assert_eq!(nodes_in_forest(0b0011), 4);
|
||||||
|
assert_eq!(nodes_in_forest(0b0100), 7);
|
||||||
|
assert_eq!(nodes_in_forest(0b0101), 8);
|
||||||
|
assert_eq!(nodes_in_forest(0b0110), 10);
|
||||||
|
assert_eq!(nodes_in_forest(0b0111), 11);
|
||||||
|
assert_eq!(nodes_in_forest(0b1000), 15);
|
||||||
|
assert_eq!(nodes_in_forest(0b1001), 16);
|
||||||
|
assert_eq!(nodes_in_forest(0b1010), 18);
|
||||||
|
assert_eq!(nodes_in_forest(0b1011), 19);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nodes_in_forest_single_bit() {
|
||||||
|
assert_eq!(nodes_in_forest(2usize.pow(0)), 2usize.pow(1) - 1);
|
||||||
|
assert_eq!(nodes_in_forest(2usize.pow(1)), 2usize.pow(2) - 1);
|
||||||
|
assert_eq!(nodes_in_forest(2usize.pow(2)), 2usize.pow(3) - 1);
|
||||||
|
assert_eq!(nodes_in_forest(2usize.pow(3)), 2usize.pow(4) - 1);
|
||||||
|
|
||||||
|
for bit in 0..(usize::BITS - 1) {
|
||||||
|
let size = 2usize.pow(bit + 1) - 1;
|
||||||
|
assert_eq!(nodes_in_forest(1usize << bit), size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const LEAVES: [Word; 7] = [
|
||||||
|
int_to_node(0),
|
||||||
|
int_to_node(1),
|
||||||
|
int_to_node(2),
|
||||||
|
int_to_node(3),
|
||||||
|
int_to_node(4),
|
||||||
|
int_to_node(5),
|
||||||
|
int_to_node(6),
|
||||||
|
];
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_simple() {
|
||||||
|
let mut postorder = Vec::new();
|
||||||
|
postorder.push(LEAVES[0]);
|
||||||
|
postorder.push(LEAVES[1]);
|
||||||
|
postorder.push(*Rpo256::hash_elements(&[LEAVES[0], LEAVES[1]].concat()));
|
||||||
|
postorder.push(LEAVES[2]);
|
||||||
|
postorder.push(LEAVES[3]);
|
||||||
|
postorder.push(*Rpo256::hash_elements(&[LEAVES[2], LEAVES[3]].concat()));
|
||||||
|
postorder.push(*Rpo256::hash_elements(
|
||||||
|
&[postorder[2], postorder[5]].concat(),
|
||||||
|
));
|
||||||
|
postorder.push(LEAVES[4]);
|
||||||
|
postorder.push(LEAVES[5]);
|
||||||
|
postorder.push(*Rpo256::hash_elements(&[LEAVES[4], LEAVES[5]].concat()));
|
||||||
|
postorder.push(LEAVES[6]);
|
||||||
|
|
||||||
|
let mut mmr = Mmr::new();
|
||||||
|
assert_eq!(mmr.forest(), 0);
|
||||||
|
assert_eq!(mmr.nodes.len(), 0);
|
||||||
|
|
||||||
|
mmr.add(LEAVES[0]);
|
||||||
|
assert_eq!(mmr.forest(), 1);
|
||||||
|
assert_eq!(mmr.nodes.len(), 1);
|
||||||
|
assert_eq!(mmr.nodes.as_slice(), &postorder[0..mmr.nodes.len()]);
|
||||||
|
|
||||||
|
let acc = mmr.accumulator();
|
||||||
|
assert_eq!(acc.num_leaves, 1);
|
||||||
|
assert_eq!(acc.peaks, &[postorder[0]]);
|
||||||
|
|
||||||
|
mmr.add(LEAVES[1]);
|
||||||
|
assert_eq!(mmr.forest(), 2);
|
||||||
|
assert_eq!(mmr.nodes.len(), 3);
|
||||||
|
assert_eq!(mmr.nodes.as_slice(), &postorder[0..mmr.nodes.len()]);
|
||||||
|
|
||||||
|
let acc = mmr.accumulator();
|
||||||
|
assert_eq!(acc.num_leaves, 2);
|
||||||
|
assert_eq!(acc.peaks, &[postorder[2]]);
|
||||||
|
|
||||||
|
mmr.add(LEAVES[2]);
|
||||||
|
assert_eq!(mmr.forest(), 3);
|
||||||
|
assert_eq!(mmr.nodes.len(), 4);
|
||||||
|
assert_eq!(mmr.nodes.as_slice(), &postorder[0..mmr.nodes.len()]);
|
||||||
|
|
||||||
|
let acc = mmr.accumulator();
|
||||||
|
assert_eq!(acc.num_leaves, 3);
|
||||||
|
assert_eq!(acc.peaks, &[postorder[2], postorder[3]]);
|
||||||
|
|
||||||
|
mmr.add(LEAVES[3]);
|
||||||
|
assert_eq!(mmr.forest(), 4);
|
||||||
|
assert_eq!(mmr.nodes.len(), 7);
|
||||||
|
assert_eq!(mmr.nodes.as_slice(), &postorder[0..mmr.nodes.len()]);
|
||||||
|
|
||||||
|
let acc = mmr.accumulator();
|
||||||
|
assert_eq!(acc.num_leaves, 4);
|
||||||
|
assert_eq!(acc.peaks, &[postorder[6]]);
|
||||||
|
|
||||||
|
mmr.add(LEAVES[4]);
|
||||||
|
assert_eq!(mmr.forest(), 5);
|
||||||
|
assert_eq!(mmr.nodes.len(), 8);
|
||||||
|
assert_eq!(mmr.nodes.as_slice(), &postorder[0..mmr.nodes.len()]);
|
||||||
|
|
||||||
|
let acc = mmr.accumulator();
|
||||||
|
assert_eq!(acc.num_leaves, 5);
|
||||||
|
assert_eq!(acc.peaks, &[postorder[6], postorder[7]]);
|
||||||
|
|
||||||
|
mmr.add(LEAVES[5]);
|
||||||
|
assert_eq!(mmr.forest(), 6);
|
||||||
|
assert_eq!(mmr.nodes.len(), 10);
|
||||||
|
assert_eq!(mmr.nodes.as_slice(), &postorder[0..mmr.nodes.len()]);
|
||||||
|
|
||||||
|
let acc = mmr.accumulator();
|
||||||
|
assert_eq!(acc.num_leaves, 6);
|
||||||
|
assert_eq!(acc.peaks, &[postorder[6], postorder[9]]);
|
||||||
|
|
||||||
|
mmr.add(LEAVES[6]);
|
||||||
|
assert_eq!(mmr.forest(), 7);
|
||||||
|
assert_eq!(mmr.nodes.len(), 11);
|
||||||
|
assert_eq!(mmr.nodes.as_slice(), &postorder[0..mmr.nodes.len()]);
|
||||||
|
|
||||||
|
let acc = mmr.accumulator();
|
||||||
|
assert_eq!(acc.num_leaves, 7);
|
||||||
|
assert_eq!(acc.peaks, &[postorder[6], postorder[9], postorder[10]]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_open() {
|
||||||
|
let mmr: Mmr = LEAVES.into();
|
||||||
|
let h01: Word = Rpo256::hash_elements(&LEAVES[0..2].concat()).into();
|
||||||
|
let h23: Word = Rpo256::hash_elements(&LEAVES[2..4].concat()).into();
|
||||||
|
|
||||||
|
// node at pos 7 is the root
|
||||||
|
assert!(
|
||||||
|
mmr.open(7).is_err(),
|
||||||
|
"Element 7 is not in the tree, result should be None"
|
||||||
|
);
|
||||||
|
|
||||||
|
// node at pos 6 is the root
|
||||||
|
let empty: MerklePath = MerklePath::new(vec![]);
|
||||||
|
let opening = mmr
|
||||||
|
.open(6)
|
||||||
|
.expect("Element 6 is contained in the tree, expected an opening result.");
|
||||||
|
assert_eq!(opening.merkle_path, empty);
|
||||||
|
assert_eq!(opening.forest, mmr.forest);
|
||||||
|
assert_eq!(opening.position, 6);
|
||||||
|
assert!(
|
||||||
|
mmr.accumulator().verify(LEAVES[6], opening),
|
||||||
|
"MmrProof should be valid for the current accumulator."
|
||||||
|
);
|
||||||
|
|
||||||
|
// nodes 4,5 are detph 1
|
||||||
|
let root_to_path = MerklePath::new(vec![LEAVES[4]]);
|
||||||
|
let opening = mmr
|
||||||
|
.open(5)
|
||||||
|
.expect("Element 5 is contained in the tree, expected an opening result.");
|
||||||
|
assert_eq!(opening.merkle_path, root_to_path);
|
||||||
|
assert_eq!(opening.forest, mmr.forest);
|
||||||
|
assert_eq!(opening.position, 5);
|
||||||
|
assert!(
|
||||||
|
mmr.accumulator().verify(LEAVES[5], opening),
|
||||||
|
"MmrProof should be valid for the current accumulator."
|
||||||
|
);
|
||||||
|
|
||||||
|
let root_to_path = MerklePath::new(vec![LEAVES[5]]);
|
||||||
|
let opening = mmr
|
||||||
|
.open(4)
|
||||||
|
.expect("Element 4 is contained in the tree, expected an opening result.");
|
||||||
|
assert_eq!(opening.merkle_path, root_to_path);
|
||||||
|
assert_eq!(opening.forest, mmr.forest);
|
||||||
|
assert_eq!(opening.position, 4);
|
||||||
|
assert!(
|
||||||
|
mmr.accumulator().verify(LEAVES[4], opening),
|
||||||
|
"MmrProof should be valid for the current accumulator."
|
||||||
|
);
|
||||||
|
|
||||||
|
// nodes 0,1,2,3 are detph 2
|
||||||
|
let root_to_path = MerklePath::new(vec![LEAVES[2], h01]);
|
||||||
|
let opening = mmr
|
||||||
|
.open(3)
|
||||||
|
.expect("Element 3 is contained in the tree, expected an opening result.");
|
||||||
|
assert_eq!(opening.merkle_path, root_to_path);
|
||||||
|
assert_eq!(opening.forest, mmr.forest);
|
||||||
|
assert_eq!(opening.position, 3);
|
||||||
|
assert!(
|
||||||
|
mmr.accumulator().verify(LEAVES[3], opening),
|
||||||
|
"MmrProof should be valid for the current accumulator."
|
||||||
|
);
|
||||||
|
|
||||||
|
let root_to_path = MerklePath::new(vec![LEAVES[3], h01]);
|
||||||
|
let opening = mmr
|
||||||
|
.open(2)
|
||||||
|
.expect("Element 2 is contained in the tree, expected an opening result.");
|
||||||
|
assert_eq!(opening.merkle_path, root_to_path);
|
||||||
|
assert_eq!(opening.forest, mmr.forest);
|
||||||
|
assert_eq!(opening.position, 2);
|
||||||
|
assert!(
|
||||||
|
mmr.accumulator().verify(LEAVES[2], opening),
|
||||||
|
"MmrProof should be valid for the current accumulator."
|
||||||
|
);
|
||||||
|
|
||||||
|
let root_to_path = MerklePath::new(vec![LEAVES[0], h23]);
|
||||||
|
let opening = mmr
|
||||||
|
.open(1)
|
||||||
|
.expect("Element 1 is contained in the tree, expected an opening result.");
|
||||||
|
assert_eq!(opening.merkle_path, root_to_path);
|
||||||
|
assert_eq!(opening.forest, mmr.forest);
|
||||||
|
assert_eq!(opening.position, 1);
|
||||||
|
assert!(
|
||||||
|
mmr.accumulator().verify(LEAVES[1], opening),
|
||||||
|
"MmrProof should be valid for the current accumulator."
|
||||||
|
);
|
||||||
|
|
||||||
|
let root_to_path = MerklePath::new(vec![LEAVES[1], h23]);
|
||||||
|
let opening = mmr
|
||||||
|
.open(0)
|
||||||
|
.expect("Element 0 is contained in the tree, expected an opening result.");
|
||||||
|
assert_eq!(opening.merkle_path, root_to_path);
|
||||||
|
assert_eq!(opening.forest, mmr.forest);
|
||||||
|
assert_eq!(opening.position, 0);
|
||||||
|
assert!(
|
||||||
|
mmr.accumulator().verify(LEAVES[0], opening),
|
||||||
|
"MmrProof should be valid for the current accumulator."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_get() {
|
||||||
|
let mmr: Mmr = LEAVES.into();
|
||||||
|
assert_eq!(
|
||||||
|
mmr.get(0).unwrap(),
|
||||||
|
LEAVES[0],
|
||||||
|
"value at pos 0 must correspond"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mmr.get(1).unwrap(),
|
||||||
|
LEAVES[1],
|
||||||
|
"value at pos 1 must correspond"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mmr.get(2).unwrap(),
|
||||||
|
LEAVES[2],
|
||||||
|
"value at pos 2 must correspond"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mmr.get(3).unwrap(),
|
||||||
|
LEAVES[3],
|
||||||
|
"value at pos 3 must correspond"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mmr.get(4).unwrap(),
|
||||||
|
LEAVES[4],
|
||||||
|
"value at pos 4 must correspond"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mmr.get(5).unwrap(),
|
||||||
|
LEAVES[5],
|
||||||
|
"value at pos 5 must correspond"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mmr.get(6).unwrap(),
|
||||||
|
LEAVES[6],
|
||||||
|
"value at pos 6 must correspond"
|
||||||
|
);
|
||||||
|
assert!(mmr.get(7).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mmr_invariants() {
|
||||||
|
let mut mmr = Mmr::new();
|
||||||
|
for v in 1..=1028 {
|
||||||
|
mmr.add(int_to_node(v));
|
||||||
|
let accumulator = mmr.accumulator();
|
||||||
|
assert_eq!(
|
||||||
|
v as usize,
|
||||||
|
mmr.forest(),
|
||||||
|
"MMR leaf count must increase by one on every add"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
v as usize, accumulator.num_leaves,
|
||||||
|
"MMR and its accumulator must match leaves count"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
accumulator.num_leaves.count_ones() as usize,
|
||||||
|
accumulator.peaks.len(),
|
||||||
|
"bits on leaves must match the number of peaks"
|
||||||
|
);
|
||||||
|
|
||||||
|
let expected_nodes: usize = TrueBitPositionIterator::new(mmr.forest())
|
||||||
|
.map(|bit_pos| nodes_in_forest(1 << bit_pos))
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
expected_nodes,
|
||||||
|
mmr.nodes.len(),
|
||||||
|
"the sum of every tree size must be equal to the number of nodes in the MMR (forest: {:b})",
|
||||||
|
mmr.forest(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bit_position_iterator() {
|
||||||
|
assert_eq!(TrueBitPositionIterator::new(0).count(), 0);
|
||||||
|
assert_eq!(TrueBitPositionIterator::new(0).rev().count(), 0);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(1).collect::<Vec<u32>>(),
|
||||||
|
vec![0]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(1).rev().collect::<Vec<u32>>(),
|
||||||
|
vec![0],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(2).collect::<Vec<u32>>(),
|
||||||
|
vec![1]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(2).rev().collect::<Vec<u32>>(),
|
||||||
|
vec![1],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(3).collect::<Vec<u32>>(),
|
||||||
|
vec![0, 1],
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(3).rev().collect::<Vec<u32>>(),
|
||||||
|
vec![1, 0],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(0b11010101).collect::<Vec<u32>>(),
|
||||||
|
vec![0, 2, 4, 6, 7],
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
TrueBitPositionIterator::new(0b11010101)
|
||||||
|
.rev()
|
||||||
|
.collect::<Vec<u32>>(),
|
||||||
|
vec![7, 6, 4, 2, 0],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
mod property_tests {
|
||||||
|
use super::leaf_to_corresponding_tree;
|
||||||
|
use proptest::prelude::*;
|
||||||
|
|
||||||
|
proptest! {
|
||||||
|
#[test]
|
||||||
|
fn test_last_position_is_always_contained_in_the_last_tree(leaves in any::<usize>().prop_filter("cant have an empty tree", |v| *v != 0)) {
|
||||||
|
let last_pos = leaves - 1;
|
||||||
|
let lowest_bit = leaves.trailing_zeros();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
leaf_to_corresponding_tree(last_pos, leaves),
|
||||||
|
Some(lowest_bit),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
proptest! {
|
||||||
|
#[test]
|
||||||
|
fn test_contained_tree_is_always_power_of_two((leaves, pos) in any::<usize>().prop_flat_map(|v| (Just(v), 0..v))) {
|
||||||
|
let tree = leaf_to_corresponding_tree(pos, leaves).expect("pos is smaller than leaves, there should always be a corresponding tree");
|
||||||
|
let mask = 1usize << tree;
|
||||||
|
|
||||||
|
assert!(tree < usize::BITS, "the result must be a bit in usize");
|
||||||
|
assert!(mask & leaves != 0, "the result should be a tree in leaves");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,54 +1,78 @@
|
|||||||
use super::{
|
use super::{
|
||||||
hash::rpo::{Rpo256, RpoDigest},
|
hash::rpo::{Rpo256, RpoDigest},
|
||||||
utils::collections::{BTreeMap, Vec},
|
utils::collections::{vec, BTreeMap, BTreeSet, Vec},
|
||||||
Felt, Word, ZERO,
|
Felt, StarkField, Word, WORD_SIZE, ZERO,
|
||||||
};
|
};
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
mod merkle_tree;
|
// REEXPORTS
|
||||||
pub use merkle_tree::MerkleTree;
|
// ================================================================================================
|
||||||
|
|
||||||
mod merkle_path_set;
|
mod empty_roots;
|
||||||
pub use merkle_path_set::MerklePathSet;
|
pub use empty_roots::EmptySubtreeRoots;
|
||||||
|
|
||||||
|
mod index;
|
||||||
|
pub use index::NodeIndex;
|
||||||
|
|
||||||
|
mod merkle_tree;
|
||||||
|
pub use merkle_tree::{path_to_text, tree_to_text, MerkleTree};
|
||||||
|
|
||||||
|
mod path;
|
||||||
|
pub use path::{MerklePath, RootPath, ValuePath};
|
||||||
|
|
||||||
|
mod path_set;
|
||||||
|
pub use path_set::MerklePathSet;
|
||||||
|
|
||||||
mod simple_smt;
|
mod simple_smt;
|
||||||
pub use simple_smt::SimpleSmt;
|
pub use simple_smt::SimpleSmt;
|
||||||
|
|
||||||
|
mod mmr;
|
||||||
|
pub use mmr::{Mmr, MmrPeaks};
|
||||||
|
|
||||||
|
mod store;
|
||||||
|
pub use store::MerkleStore;
|
||||||
|
|
||||||
// ERRORS
|
// ERRORS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum MerkleError {
|
pub enum MerkleError {
|
||||||
DepthTooSmall(u32),
|
ConflictingRoots(Vec<Word>),
|
||||||
DepthTooBig(u32),
|
DepthTooSmall(u8),
|
||||||
|
DepthTooBig(u64),
|
||||||
|
NodeNotInStore(Word, NodeIndex),
|
||||||
NumLeavesNotPowerOfTwo(usize),
|
NumLeavesNotPowerOfTwo(usize),
|
||||||
InvalidIndex(u32, u64),
|
InvalidIndex(NodeIndex),
|
||||||
InvalidDepth(u32, u32),
|
InvalidDepth { expected: u8, provided: u8 },
|
||||||
InvalidPath(Vec<Word>),
|
InvalidPath(MerklePath),
|
||||||
InvalidEntriesCount(usize, usize),
|
InvalidEntriesCount(usize, usize),
|
||||||
NodeNotInSet(u64),
|
NodeNotInSet(u64),
|
||||||
|
RootNotInStore(Word),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for MerkleError {
|
impl fmt::Display for MerkleError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
use MerkleError::*;
|
use MerkleError::*;
|
||||||
match self {
|
match self {
|
||||||
|
ConflictingRoots(roots) => write!(f, "the merkle paths roots do not match {roots:?}"),
|
||||||
DepthTooSmall(depth) => write!(f, "the provided depth {depth} is too small"),
|
DepthTooSmall(depth) => write!(f, "the provided depth {depth} is too small"),
|
||||||
DepthTooBig(depth) => write!(f, "the provided depth {depth} is too big"),
|
DepthTooBig(depth) => write!(f, "the provided depth {depth} is too big"),
|
||||||
NumLeavesNotPowerOfTwo(leaves) => {
|
NumLeavesNotPowerOfTwo(leaves) => {
|
||||||
write!(f, "the leaves count {leaves} is not a power of 2")
|
write!(f, "the leaves count {leaves} is not a power of 2")
|
||||||
}
|
}
|
||||||
InvalidIndex(depth, index) => write!(
|
InvalidIndex(index) => write!(
|
||||||
f,
|
f,
|
||||||
"the leaf index {index} is not valid for the depth {depth}"
|
"the index value {} is not valid for the depth {}", index.value(), index.depth()
|
||||||
),
|
),
|
||||||
InvalidDepth(expected, provided) => write!(
|
InvalidDepth { expected, provided } => write!(
|
||||||
f,
|
f,
|
||||||
"the provided depth {provided} is not valid for {expected}"
|
"the provided depth {provided} is not valid for {expected}"
|
||||||
),
|
),
|
||||||
InvalidPath(_path) => write!(f, "the provided path is not valid"),
|
InvalidPath(_path) => write!(f, "the provided path is not valid"),
|
||||||
InvalidEntriesCount(max, provided) => write!(f, "the provided number of entries is {provided}, but the maximum for the given depth is {max}"),
|
InvalidEntriesCount(max, provided) => write!(f, "the provided number of entries is {provided}, but the maximum for the given depth is {max}"),
|
||||||
NodeNotInSet(index) => write!(f, "the node indexed by {index} is not in the set"),
|
NodeNotInSet(index) => write!(f, "the node indexed by {index} is not in the set"),
|
||||||
|
NodeNotInStore(hash, index) => write!(f, "the node {:?} indexed by {} and depth {} is not in the store", hash, index.value(), index.depth(),),
|
||||||
|
RootNotInStore(root) => write!(f, "the root {:?} is not in the store", root),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
108
src/merkle/path.rs
Normal file
108
src/merkle/path.rs
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
use super::{vec, NodeIndex, Rpo256, Vec, Word};
|
||||||
|
use core::ops::{Deref, DerefMut};
|
||||||
|
|
||||||
|
// MERKLE PATH
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A merkle path container, composed of a sequence of nodes of a Merkle tree.
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
|
pub struct MerklePath {
|
||||||
|
nodes: Vec<Word>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MerklePath {
|
||||||
|
// CONSTRUCTORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Creates a new Merkle path from a list of nodes.
|
||||||
|
pub fn new(nodes: Vec<Word>) -> Self {
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
|
||||||
|
// PROVIDERS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Computes the merkle root for this opening.
|
||||||
|
pub fn compute_root(&self, index_value: u64, node: Word) -> Word {
|
||||||
|
let mut index = NodeIndex::new(self.depth(), index_value);
|
||||||
|
self.nodes.iter().copied().fold(node, |node, sibling| {
|
||||||
|
// compute the node and move to the next iteration.
|
||||||
|
let input = index.build_node(node.into(), sibling.into());
|
||||||
|
index.move_up();
|
||||||
|
Rpo256::merge(&input).into()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the depth in which this Merkle path proof is valid.
|
||||||
|
pub fn depth(&self) -> u8 {
|
||||||
|
self.nodes.len() as u8
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verifies the Merkle opening proof towards the provided root.
|
||||||
|
///
|
||||||
|
/// Returns `true` if `node` exists at `index` in a Merkle tree with `root`.
|
||||||
|
pub fn verify(&self, index: u64, node: Word, root: &Word) -> bool {
|
||||||
|
root == &self.compute_root(index, node)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<Word>> for MerklePath {
|
||||||
|
fn from(path: Vec<Word>) -> Self {
|
||||||
|
Self::new(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for MerklePath {
|
||||||
|
// we use `Vec` here instead of slice so we can call vector mutation methods directly from the
|
||||||
|
// merkle path (example: `Vec::remove`).
|
||||||
|
type Target = Vec<Word>;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.nodes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for MerklePath {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
&mut self.nodes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromIterator<Word> for MerklePath {
|
||||||
|
fn from_iter<T: IntoIterator<Item = Word>>(iter: T) -> Self {
|
||||||
|
Self::new(iter.into_iter().collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoIterator for MerklePath {
|
||||||
|
type Item = Word;
|
||||||
|
type IntoIter = vec::IntoIter<Word>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.nodes.into_iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MERKLE PATH CONTAINERS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A container for a [Word] value and its [MerklePath] opening.
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
|
pub struct ValuePath {
|
||||||
|
/// The node value opening for `path`.
|
||||||
|
pub value: Word,
|
||||||
|
/// The path from `value` to `root` (exclusive).
|
||||||
|
pub path: MerklePath,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A container for a [MerklePath] and its [Word] root.
|
||||||
|
///
|
||||||
|
/// This structure does not provide any guarantees regarding the correctness of the path to the
|
||||||
|
/// root. For more information, check [MerklePath::verify].
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
|
pub struct RootPath {
|
||||||
|
/// The node value opening for `path`.
|
||||||
|
pub root: Word,
|
||||||
|
/// The path from `value` to `root` (exclusive).
|
||||||
|
pub path: MerklePath,
|
||||||
|
}
|
||||||
432
src/merkle/path_set.rs
Normal file
432
src/merkle/path_set.rs
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
use super::{BTreeMap, MerkleError, MerklePath, NodeIndex, Rpo256, ValuePath, Vec, Word, ZERO};
|
||||||
|
|
||||||
|
// MERKLE PATH SET
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A set of Merkle paths.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct MerklePathSet {
|
||||||
|
root: Word,
|
||||||
|
total_depth: u8,
|
||||||
|
paths: BTreeMap<u64, MerklePath>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MerklePathSet {
|
||||||
|
// CONSTRUCTOR
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns an empty MerklePathSet.
|
||||||
|
pub fn new(depth: u8) -> Self {
|
||||||
|
let root = [ZERO; 4];
|
||||||
|
let paths = BTreeMap::new();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
root,
|
||||||
|
total_depth: depth,
|
||||||
|
paths,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided paths iterator into the set.
|
||||||
|
///
|
||||||
|
/// Analogous to `[Self::add_path]`.
|
||||||
|
pub fn with_paths<I>(self, paths: I) -> Result<Self, MerkleError>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = (u64, Word, MerklePath)>,
|
||||||
|
{
|
||||||
|
paths
|
||||||
|
.into_iter()
|
||||||
|
.try_fold(self, |mut set, (index, value, path)| {
|
||||||
|
set.add_path(index, value, path)?;
|
||||||
|
Ok(set)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUBLIC ACCESSORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns the root to which all paths in this set resolve.
|
||||||
|
pub const fn root(&self) -> Word {
|
||||||
|
self.root
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the depth of the Merkle tree implied by the paths stored in this set.
|
||||||
|
///
|
||||||
|
/// Merkle tree of depth 1 has two leaves, depth 2 has four leaves etc.
|
||||||
|
pub const fn depth(&self) -> u8 {
|
||||||
|
self.total_depth
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a node at the specified index.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// * The specified index is not valid for the depth of structure.
|
||||||
|
/// * Requested node does not exist in the set.
|
||||||
|
pub fn get_node(&self, index: NodeIndex) -> Result<Word, MerkleError> {
|
||||||
|
if !index.with_depth(self.total_depth).is_valid() {
|
||||||
|
return Err(MerkleError::InvalidIndex(
|
||||||
|
index.with_depth(self.total_depth),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if index.depth() != self.total_depth {
|
||||||
|
return Err(MerkleError::InvalidDepth {
|
||||||
|
expected: self.total_depth,
|
||||||
|
provided: index.depth(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let parity = index.value() & 1;
|
||||||
|
let path_key = index.value() - parity;
|
||||||
|
self.paths
|
||||||
|
.get(&path_key)
|
||||||
|
.ok_or(MerkleError::NodeNotInSet(path_key))
|
||||||
|
.map(|path| path[parity as usize])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a leaf at the specified index.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// * The specified index is not valid for the depth of the structure.
|
||||||
|
/// * Leaf with the requested path does not exist in the set.
|
||||||
|
pub fn get_leaf(&self, index: u64) -> Result<Word, MerkleError> {
|
||||||
|
self.get_node(NodeIndex::new(self.depth(), index))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a Merkle path to the node at the specified index. The node itself is
|
||||||
|
/// not included in the path.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// * The specified index is not valid for the depth of structure.
|
||||||
|
/// * Node of the requested path does not exist in the set.
|
||||||
|
pub fn get_path(&self, index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
||||||
|
if !index.with_depth(self.total_depth).is_valid() {
|
||||||
|
return Err(MerkleError::InvalidIndex(index));
|
||||||
|
}
|
||||||
|
if index.depth() != self.total_depth {
|
||||||
|
return Err(MerkleError::InvalidDepth {
|
||||||
|
expected: self.total_depth,
|
||||||
|
provided: index.depth(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let parity = index.value() & 1;
|
||||||
|
let path_key = index.value() - parity;
|
||||||
|
let mut path = self
|
||||||
|
.paths
|
||||||
|
.get(&path_key)
|
||||||
|
.cloned()
|
||||||
|
.ok_or(MerkleError::NodeNotInSet(index.value()))?;
|
||||||
|
path.remove(parity as usize);
|
||||||
|
Ok(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns all paths in this path set together with their indexes.
|
||||||
|
pub fn to_paths(&self) -> Vec<(u64, ValuePath)> {
|
||||||
|
let mut result = Vec::with_capacity(self.paths.len() * 2);
|
||||||
|
|
||||||
|
for (&index, path) in self.paths.iter() {
|
||||||
|
// push path for the even index into the result
|
||||||
|
let path1 = ValuePath {
|
||||||
|
value: path[0],
|
||||||
|
path: MerklePath::new(path[1..].to_vec()),
|
||||||
|
};
|
||||||
|
result.push((index, path1));
|
||||||
|
|
||||||
|
// push path for the odd index into the result
|
||||||
|
let mut path2 = path.clone();
|
||||||
|
let leaf2 = path2.remove(1);
|
||||||
|
let path2 = ValuePath {
|
||||||
|
value: leaf2,
|
||||||
|
path: path2,
|
||||||
|
};
|
||||||
|
result.push((index + 1, path2));
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATE MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Adds the specified Merkle path to this [MerklePathSet]. The `index` and `value` parameters
|
||||||
|
/// specify the leaf node at which the path starts.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// - The specified index is is not valid in the context of this Merkle path set (i.e., the
|
||||||
|
/// index implies a greater depth than is specified for this set).
|
||||||
|
/// - The specified path is not consistent with other paths in the set (i.e., resolves to a
|
||||||
|
/// different root).
|
||||||
|
pub fn add_path(
|
||||||
|
&mut self,
|
||||||
|
index_value: u64,
|
||||||
|
value: Word,
|
||||||
|
mut path: MerklePath,
|
||||||
|
) -> Result<(), MerkleError> {
|
||||||
|
let depth = path.len() as u8;
|
||||||
|
let mut index = NodeIndex::new(depth, index_value);
|
||||||
|
if index.depth() != self.total_depth {
|
||||||
|
return Err(MerkleError::InvalidDepth {
|
||||||
|
expected: self.total_depth,
|
||||||
|
provided: index.depth(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// update the current path
|
||||||
|
let parity = index_value & 1;
|
||||||
|
path.insert(parity as usize, value);
|
||||||
|
|
||||||
|
// traverse to the root, updating the nodes
|
||||||
|
let root: Word = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
||||||
|
let root = path.iter().skip(2).copied().fold(root, |root, hash| {
|
||||||
|
index.move_up();
|
||||||
|
Rpo256::merge(&index.build_node(root.into(), hash.into())).into()
|
||||||
|
});
|
||||||
|
|
||||||
|
// if the path set is empty (the root is all ZEROs), set the root to the root of the added
|
||||||
|
// path; otherwise, the root of the added path must be identical to the current root
|
||||||
|
if self.root == [ZERO; 4] {
|
||||||
|
self.root = root;
|
||||||
|
} else if self.root != root {
|
||||||
|
return Err(MerkleError::InvalidPath(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
// finish updating the path
|
||||||
|
let path_key = index_value - parity;
|
||||||
|
self.paths.insert(path_key, path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Replaces the leaf at the specified index with the provided value.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
/// Returns an error if:
|
||||||
|
/// * Requested node does not exist in the set.
|
||||||
|
pub fn update_leaf(&mut self, base_index_value: u64, value: Word) -> Result<(), MerkleError> {
|
||||||
|
let depth = self.depth();
|
||||||
|
let mut index = NodeIndex::new(depth, base_index_value);
|
||||||
|
if !index.is_valid() {
|
||||||
|
return Err(MerkleError::InvalidIndex(index));
|
||||||
|
}
|
||||||
|
|
||||||
|
let parity = index.value() & 1;
|
||||||
|
let path_key = index.value() - parity;
|
||||||
|
let path = match self.paths.get_mut(&path_key) {
|
||||||
|
Some(path) => path,
|
||||||
|
None => return Err(MerkleError::NodeNotInSet(base_index_value)),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Fill old_hashes vector -----------------------------------------------------------------
|
||||||
|
let mut current_index = index;
|
||||||
|
let mut old_hashes = Vec::with_capacity(path.len().saturating_sub(2));
|
||||||
|
let mut root: Word = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
||||||
|
for hash in path.iter().skip(2).copied() {
|
||||||
|
old_hashes.push(root);
|
||||||
|
current_index.move_up();
|
||||||
|
let input = current_index.build_node(hash.into(), root.into());
|
||||||
|
root = Rpo256::merge(&input).into();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fill new_hashes vector -----------------------------------------------------------------
|
||||||
|
path[index.is_value_odd() as usize] = value;
|
||||||
|
|
||||||
|
let mut new_hashes = Vec::with_capacity(path.len().saturating_sub(2));
|
||||||
|
let mut new_root: Word = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
|
||||||
|
for path_hash in path.iter().skip(2).copied() {
|
||||||
|
new_hashes.push(new_root);
|
||||||
|
index.move_up();
|
||||||
|
let input = current_index.build_node(path_hash.into(), new_root.into());
|
||||||
|
new_root = Rpo256::merge(&input).into();
|
||||||
|
}
|
||||||
|
|
||||||
|
self.root = new_root;
|
||||||
|
|
||||||
|
// update paths ---------------------------------------------------------------------------
|
||||||
|
for path in self.paths.values_mut() {
|
||||||
|
for i in (0..old_hashes.len()).rev() {
|
||||||
|
if path[i + 2] == old_hashes[i] {
|
||||||
|
path[i + 2] = new_hashes[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TESTS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::merkle::int_to_node;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_root() {
|
||||||
|
let leaf0 = int_to_node(0);
|
||||||
|
let leaf1 = int_to_node(1);
|
||||||
|
let leaf2 = int_to_node(2);
|
||||||
|
let leaf3 = int_to_node(3);
|
||||||
|
|
||||||
|
let parent0 = calculate_parent_hash(leaf0, 0, leaf1);
|
||||||
|
let parent1 = calculate_parent_hash(leaf2, 2, leaf3);
|
||||||
|
|
||||||
|
let root_exp = calculate_parent_hash(parent0, 0, parent1);
|
||||||
|
|
||||||
|
let set = super::MerklePathSet::new(2)
|
||||||
|
.with_paths([(0, leaf0, vec![leaf1, parent1].into())])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(set.root(), root_exp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_and_get_path() {
|
||||||
|
let path_6 = vec![int_to_node(7), int_to_node(45), int_to_node(123)];
|
||||||
|
let hash_6 = int_to_node(6);
|
||||||
|
let index = 6_u64;
|
||||||
|
let depth = 3_u8;
|
||||||
|
let set = super::MerklePathSet::new(depth)
|
||||||
|
.with_paths([(index, hash_6, path_6.clone().into())])
|
||||||
|
.unwrap();
|
||||||
|
let stored_path_6 = set.get_path(NodeIndex::new(depth, index)).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(path_6, *stored_path_6);
|
||||||
|
assert!(set.get_path(NodeIndex::new(depth, 15_u64)).is_err())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_node() {
|
||||||
|
let path_6 = vec![int_to_node(7), int_to_node(45), int_to_node(123)];
|
||||||
|
let hash_6 = int_to_node(6);
|
||||||
|
let index = 6_u64;
|
||||||
|
let depth = 3_u8;
|
||||||
|
let set = MerklePathSet::new(depth)
|
||||||
|
.with_paths([(index, hash_6, path_6.into())])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
int_to_node(6u64),
|
||||||
|
set.get_node(NodeIndex::new(depth, index)).unwrap()
|
||||||
|
);
|
||||||
|
assert!(set.get_node(NodeIndex::new(depth, 15_u64)).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn update_leaf() {
|
||||||
|
let hash_4 = int_to_node(4);
|
||||||
|
let hash_5 = int_to_node(5);
|
||||||
|
let hash_6 = int_to_node(6);
|
||||||
|
let hash_7 = int_to_node(7);
|
||||||
|
let hash_45 = calculate_parent_hash(hash_4, 12u64, hash_5);
|
||||||
|
let hash_67 = calculate_parent_hash(hash_6, 14u64, hash_7);
|
||||||
|
|
||||||
|
let hash_0123 = int_to_node(123);
|
||||||
|
|
||||||
|
let path_6 = vec![hash_7, hash_45, hash_0123];
|
||||||
|
let path_5 = vec![hash_4, hash_67, hash_0123];
|
||||||
|
let path_4 = vec![hash_5, hash_67, hash_0123];
|
||||||
|
|
||||||
|
let index_6 = 6_u64;
|
||||||
|
let index_5 = 5_u64;
|
||||||
|
let index_4 = 4_u64;
|
||||||
|
let depth = 3_u8;
|
||||||
|
let mut set = MerklePathSet::new(depth)
|
||||||
|
.with_paths([
|
||||||
|
(index_6, hash_6, path_6.into()),
|
||||||
|
(index_5, hash_5, path_5.into()),
|
||||||
|
(index_4, hash_4, path_4.into()),
|
||||||
|
])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let new_hash_6 = int_to_node(100);
|
||||||
|
let new_hash_5 = int_to_node(55);
|
||||||
|
|
||||||
|
set.update_leaf(index_6, new_hash_6).unwrap();
|
||||||
|
let new_path_4 = set.get_path(NodeIndex::new(depth, index_4)).unwrap();
|
||||||
|
let new_hash_67 = calculate_parent_hash(new_hash_6, 14_u64, hash_7);
|
||||||
|
assert_eq!(new_hash_67, new_path_4[1]);
|
||||||
|
|
||||||
|
set.update_leaf(index_5, new_hash_5).unwrap();
|
||||||
|
let new_path_4 = set.get_path(NodeIndex::new(depth, index_4)).unwrap();
|
||||||
|
let new_path_6 = set.get_path(NodeIndex::new(depth, index_6)).unwrap();
|
||||||
|
let new_hash_45 = calculate_parent_hash(new_hash_5, 13_u64, hash_4);
|
||||||
|
assert_eq!(new_hash_45, new_path_6[1]);
|
||||||
|
assert_eq!(new_hash_5, new_path_4[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn depth_3_is_correct() {
|
||||||
|
let a = int_to_node(1);
|
||||||
|
let b = int_to_node(2);
|
||||||
|
let c = int_to_node(3);
|
||||||
|
let d = int_to_node(4);
|
||||||
|
let e = int_to_node(5);
|
||||||
|
let f = int_to_node(6);
|
||||||
|
let g = int_to_node(7);
|
||||||
|
let h = int_to_node(8);
|
||||||
|
|
||||||
|
let i = Rpo256::merge(&[a.into(), b.into()]);
|
||||||
|
let j = Rpo256::merge(&[c.into(), d.into()]);
|
||||||
|
let k = Rpo256::merge(&[e.into(), f.into()]);
|
||||||
|
let l = Rpo256::merge(&[g.into(), h.into()]);
|
||||||
|
|
||||||
|
let m = Rpo256::merge(&[i.into(), j.into()]);
|
||||||
|
let n = Rpo256::merge(&[k.into(), l.into()]);
|
||||||
|
|
||||||
|
let root = Rpo256::merge(&[m.into(), n.into()]);
|
||||||
|
|
||||||
|
let mut set = MerklePathSet::new(3);
|
||||||
|
|
||||||
|
let value = b;
|
||||||
|
let index = 1;
|
||||||
|
let path = MerklePath::new([a.into(), j.into(), n.into()].to_vec());
|
||||||
|
set.add_path(index, value, path.clone()).unwrap();
|
||||||
|
assert_eq!(value, set.get_leaf(index).unwrap());
|
||||||
|
assert_eq!(Word::from(root), set.root());
|
||||||
|
|
||||||
|
let value = e;
|
||||||
|
let index = 4;
|
||||||
|
let path = MerklePath::new([f.into(), l.into(), m.into()].to_vec());
|
||||||
|
set.add_path(index, value, path.clone()).unwrap();
|
||||||
|
assert_eq!(value, set.get_leaf(index).unwrap());
|
||||||
|
assert_eq!(Word::from(root), set.root());
|
||||||
|
|
||||||
|
let value = a;
|
||||||
|
let index = 0;
|
||||||
|
let path = MerklePath::new([b.into(), j.into(), n.into()].to_vec());
|
||||||
|
set.add_path(index, value, path.clone()).unwrap();
|
||||||
|
assert_eq!(value, set.get_leaf(index).unwrap());
|
||||||
|
assert_eq!(Word::from(root), set.root());
|
||||||
|
|
||||||
|
let value = h;
|
||||||
|
let index = 7;
|
||||||
|
let path = MerklePath::new([g.into(), k.into(), m.into()].to_vec());
|
||||||
|
set.add_path(index, value, path.clone()).unwrap();
|
||||||
|
assert_eq!(value, set.get_leaf(index).unwrap());
|
||||||
|
assert_eq!(Word::from(root), set.root());
|
||||||
|
}
|
||||||
|
|
||||||
|
// HELPER FUNCTIONS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
const fn is_even(pos: u64) -> bool {
|
||||||
|
pos & 1 == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Calculates the hash of the parent node by two sibling ones
|
||||||
|
/// - node — current node
|
||||||
|
/// - node_pos — position of the current node
|
||||||
|
/// - sibling — neighboring vertex in the tree
|
||||||
|
fn calculate_parent_hash(node: Word, node_pos: u64, sibling: Word) -> Word {
|
||||||
|
if is_even(node_pos) {
|
||||||
|
Rpo256::merge(&[node.into(), sibling.into()]).into()
|
||||||
|
} else {
|
||||||
|
Rpo256::merge(&[sibling.into(), node.into()]).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
use super::{BTreeMap, MerkleError, Rpo256, RpoDigest, Vec, Word};
|
use super::{
|
||||||
|
BTreeMap, EmptySubtreeRoots, MerkleError, MerklePath, NodeIndex, Rpo256, RpoDigest, Vec, Word,
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
@@ -12,8 +14,8 @@ mod tests;
|
|||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct SimpleSmt {
|
pub struct SimpleSmt {
|
||||||
root: Word,
|
root: Word,
|
||||||
depth: u32,
|
depth: u8,
|
||||||
store: Store,
|
pub(crate) store: Store,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleSmt {
|
impl SimpleSmt {
|
||||||
@@ -21,56 +23,76 @@ impl SimpleSmt {
|
|||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Minimum supported depth.
|
/// Minimum supported depth.
|
||||||
pub const MIN_DEPTH: u32 = 1;
|
pub const MIN_DEPTH: u8 = 1;
|
||||||
|
|
||||||
/// Maximum supported depth.
|
/// Maximum supported depth.
|
||||||
pub const MAX_DEPTH: u32 = 63;
|
pub const MAX_DEPTH: u8 = 63;
|
||||||
|
|
||||||
// CONSTRUCTORS
|
// CONSTRUCTORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Creates a new simple SMT.
|
/// Creates a new simple SMT with the provided depth.
|
||||||
///
|
pub fn new(depth: u8) -> Result<Self, MerkleError> {
|
||||||
/// The provided entries will be tuples of the leaves and their corresponding keys.
|
// validate the range of the depth.
|
||||||
|
if depth < Self::MIN_DEPTH {
|
||||||
|
return Err(MerkleError::DepthTooSmall(depth));
|
||||||
|
} else if Self::MAX_DEPTH < depth {
|
||||||
|
return Err(MerkleError::DepthTooBig(depth as u64));
|
||||||
|
}
|
||||||
|
|
||||||
|
let (store, root) = Store::new(depth);
|
||||||
|
Ok(Self { root, depth, store })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided entries as leaves of the tree.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
///
|
||||||
/// The function will fail if the provided entries count exceed the maximum tree capacity, that
|
/// The function will fail if the provided entries count exceed the maximum tree capacity, that
|
||||||
/// is `2^{depth}`.
|
/// is `2^{depth}`.
|
||||||
pub fn new<R, I>(entries: R, depth: u32) -> Result<Self, MerkleError>
|
pub fn with_leaves<R, I>(mut self, entries: R) -> Result<Self, MerkleError>
|
||||||
where
|
where
|
||||||
R: IntoIterator<IntoIter = I>,
|
R: IntoIterator<IntoIter = I>,
|
||||||
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
||||||
{
|
{
|
||||||
|
// check if the leaves count will fit the depth setup
|
||||||
let mut entries = entries.into_iter();
|
let mut entries = entries.into_iter();
|
||||||
|
let max = 1 << self.depth;
|
||||||
// validate the range of the depth.
|
if entries.len() > max {
|
||||||
let max = 1 << depth;
|
|
||||||
if depth < Self::MIN_DEPTH {
|
|
||||||
return Err(MerkleError::DepthTooSmall(depth));
|
|
||||||
} else if Self::MAX_DEPTH < depth {
|
|
||||||
return Err(MerkleError::DepthTooBig(depth));
|
|
||||||
} else if entries.len() > max {
|
|
||||||
return Err(MerkleError::InvalidEntriesCount(max, entries.len()));
|
return Err(MerkleError::InvalidEntriesCount(max, entries.len()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let (store, root) = Store::new(depth);
|
// append leaves and return
|
||||||
let mut tree = Self { root, depth, store };
|
entries.try_for_each(|(key, leaf)| self.insert_leaf(key, leaf))?;
|
||||||
entries.try_for_each(|(key, leaf)| tree.insert_leaf(key, leaf))?;
|
Ok(self)
|
||||||
|
|
||||||
Ok(tree)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Replaces the internal empty digests used when a given depth doesn't contain a node.
|
||||||
|
pub fn with_empty_subtrees<I>(mut self, hashes: I) -> Self
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = RpoDigest>,
|
||||||
|
{
|
||||||
|
self.store
|
||||||
|
.replace_empty_subtrees(hashes.into_iter().collect());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUBLIC ACCESSORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Returns the root of this Merkle tree.
|
/// Returns the root of this Merkle tree.
|
||||||
pub const fn root(&self) -> Word {
|
pub const fn root(&self) -> Word {
|
||||||
self.root
|
self.root
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the depth of this Merkle tree.
|
/// Returns the depth of this Merkle tree.
|
||||||
pub const fn depth(&self) -> u32 {
|
pub const fn depth(&self) -> u8 {
|
||||||
self.depth
|
self.depth
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// PROVIDERS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Returns the set count of the keys of the leaves.
|
/// Returns the set count of the keys of the leaves.
|
||||||
pub fn leaves_count(&self) -> usize {
|
pub fn leaves_count(&self) -> usize {
|
||||||
self.store.leaves_count()
|
self.store.leaves_count()
|
||||||
@@ -81,16 +103,24 @@ impl SimpleSmt {
|
|||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * The specified depth is greater than the depth of the tree.
|
/// * The specified depth is greater than the depth of the tree.
|
||||||
/// * The specified key does not exist
|
pub fn get_node(&self, index: &NodeIndex) -> Result<Word, MerkleError> {
|
||||||
pub fn get_node(&self, depth: u32, key: u64) -> Result<Word, MerkleError> {
|
if index.is_root() {
|
||||||
if depth == 0 {
|
Err(MerkleError::DepthTooSmall(index.depth()))
|
||||||
Err(MerkleError::DepthTooSmall(depth))
|
} else if index.depth() > self.depth() {
|
||||||
} else if depth > self.depth() {
|
Err(MerkleError::DepthTooBig(index.depth() as u64))
|
||||||
Err(MerkleError::DepthTooBig(depth))
|
} else if index.depth() == self.depth() {
|
||||||
} else if depth == self.depth() {
|
self.store
|
||||||
self.store.get_leaf_node(key)
|
.get_leaf_node(index.value())
|
||||||
|
.or_else(|| {
|
||||||
|
self.store
|
||||||
|
.empty_hashes
|
||||||
|
.get(index.depth() as usize)
|
||||||
|
.copied()
|
||||||
|
.map(Word::from)
|
||||||
|
})
|
||||||
|
.ok_or(MerkleError::InvalidIndex(*index))
|
||||||
} else {
|
} else {
|
||||||
let branch_node = self.store.get_branch_node(key, depth)?;
|
let branch_node = self.store.get_branch_node(index);
|
||||||
Ok(Rpo256::merge(&[branch_node.left, branch_node.right]).into())
|
Ok(Rpo256::merge(&[branch_node.left, branch_node.right]).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -100,31 +130,23 @@ impl SimpleSmt {
|
|||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * The specified key does not exist as a branch or leaf node
|
|
||||||
/// * The specified depth is greater than the depth of the tree.
|
/// * The specified depth is greater than the depth of the tree.
|
||||||
pub fn get_path(&self, depth: u32, key: u64) -> Result<Vec<Word>, MerkleError> {
|
pub fn get_path(&self, mut index: NodeIndex) -> Result<MerklePath, MerkleError> {
|
||||||
if depth == 0 {
|
if index.is_root() {
|
||||||
return Err(MerkleError::DepthTooSmall(depth));
|
return Err(MerkleError::DepthTooSmall(index.depth()));
|
||||||
} else if depth > self.depth() {
|
} else if index.depth() > self.depth() {
|
||||||
return Err(MerkleError::DepthTooBig(depth));
|
return Err(MerkleError::DepthTooBig(index.depth() as u64));
|
||||||
} else if depth == self.depth() && !self.store.check_leaf_node_exists(key) {
|
|
||||||
return Err(MerkleError::InvalidIndex(self.depth(), key));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut path = Vec::with_capacity(depth as usize);
|
let mut path = Vec::with_capacity(index.depth() as usize);
|
||||||
let mut curr_key = key;
|
for _ in 0..index.depth() {
|
||||||
for n in (0..depth).rev() {
|
let is_right = index.is_value_odd();
|
||||||
let parent_key = curr_key >> 1;
|
index.move_up();
|
||||||
let parent_node = self.store.get_branch_node(parent_key, n)?;
|
let BranchNode { left, right } = self.store.get_branch_node(&index);
|
||||||
let sibling_node = if curr_key & 1 == 1 {
|
let value = if is_right { left } else { right };
|
||||||
parent_node.left
|
path.push(*value);
|
||||||
} else {
|
|
||||||
parent_node.right
|
|
||||||
};
|
|
||||||
path.push(sibling_node.into());
|
|
||||||
curr_key >>= 1;
|
|
||||||
}
|
}
|
||||||
Ok(path)
|
Ok(path.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a Merkle path from the leaf at the specified key to the root. The leaf itself is not
|
/// Return a Merkle path from the leaf at the specified key to the root. The leaf itself is not
|
||||||
@@ -133,17 +155,20 @@ impl SimpleSmt {
|
|||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if:
|
/// Returns an error if:
|
||||||
/// * The specified key does not exist as a leaf node.
|
/// * The specified key does not exist as a leaf node.
|
||||||
pub fn get_leaf_path(&self, key: u64) -> Result<Vec<Word>, MerkleError> {
|
pub fn get_leaf_path(&self, key: u64) -> Result<MerklePath, MerkleError> {
|
||||||
self.get_path(self.depth(), key)
|
self.get_path(NodeIndex::new(self.depth(), key))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// STATE MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Replaces the leaf located at the specified key, and recomputes hashes by walking up the tree
|
/// Replaces the leaf located at the specified key, and recomputes hashes by walking up the tree
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Returns an error if the specified key is not a valid leaf index for this tree.
|
/// Returns an error if the specified key is not a valid leaf index for this tree.
|
||||||
pub fn update_leaf(&mut self, key: u64, value: Word) -> Result<(), MerkleError> {
|
pub fn update_leaf(&mut self, key: u64, value: Word) -> Result<(), MerkleError> {
|
||||||
if !self.store.check_leaf_node_exists(key) {
|
if !self.store.check_leaf_node_exists(key) {
|
||||||
return Err(MerkleError::InvalidIndex(self.depth(), key));
|
return Err(MerkleError::InvalidIndex(NodeIndex::new(self.depth(), key)));
|
||||||
}
|
}
|
||||||
self.insert_leaf(key, value)?;
|
self.insert_leaf(key, value)?;
|
||||||
|
|
||||||
@@ -154,27 +179,22 @@ impl SimpleSmt {
|
|||||||
pub fn insert_leaf(&mut self, key: u64, value: Word) -> Result<(), MerkleError> {
|
pub fn insert_leaf(&mut self, key: u64, value: Word) -> Result<(), MerkleError> {
|
||||||
self.store.insert_leaf_node(key, value);
|
self.store.insert_leaf_node(key, value);
|
||||||
|
|
||||||
let depth = self.depth();
|
// TODO consider using a map `index |-> word` instead of `index |-> (word, word)`
|
||||||
let mut curr_key = key;
|
let mut index = NodeIndex::new(self.depth(), key);
|
||||||
let mut curr_node: RpoDigest = value.into();
|
let mut value = RpoDigest::from(value);
|
||||||
for n in (0..depth).rev() {
|
for _ in 0..index.depth() {
|
||||||
let parent_key = curr_key >> 1;
|
let is_right = index.is_value_odd();
|
||||||
let parent_node = self
|
index.move_up();
|
||||||
.store
|
let BranchNode { left, right } = self.store.get_branch_node(&index);
|
||||||
.get_branch_node(parent_key, n)
|
let (left, right) = if is_right {
|
||||||
.unwrap_or_else(|_| self.store.get_empty_node((n + 1) as usize));
|
(left, value)
|
||||||
let (left, right) = if curr_key & 1 == 1 {
|
|
||||||
(parent_node.left, curr_node)
|
|
||||||
} else {
|
} else {
|
||||||
(curr_node, parent_node.right)
|
(value, right)
|
||||||
};
|
};
|
||||||
|
self.store.insert_branch_node(index, left, right);
|
||||||
self.store.insert_branch_node(parent_key, n, left, right);
|
value = Rpo256::merge(&[left, right]);
|
||||||
curr_key = parent_key;
|
|
||||||
curr_node = Rpo256::merge(&[left, right]);
|
|
||||||
}
|
}
|
||||||
self.root = curr_node.into();
|
self.root = value.into();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -187,35 +207,26 @@ impl SimpleSmt {
|
|||||||
/// respectively. Hashes for blank subtrees at each layer are stored in `empty_hashes`, beginning
|
/// respectively. Hashes for blank subtrees at each layer are stored in `empty_hashes`, beginning
|
||||||
/// with the root hash of an empty tree, and ending with the zero value of a leaf node.
|
/// with the root hash of an empty tree, and ending with the zero value of a leaf node.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
struct Store {
|
pub(crate) struct Store {
|
||||||
branches: BTreeMap<(u64, u32), BranchNode>,
|
pub(crate) branches: BTreeMap<NodeIndex, BranchNode>,
|
||||||
leaves: BTreeMap<u64, Word>,
|
leaves: BTreeMap<u64, Word>,
|
||||||
empty_hashes: Vec<RpoDigest>,
|
pub(crate) empty_hashes: Vec<RpoDigest>,
|
||||||
depth: u32,
|
depth: u8,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||||
struct BranchNode {
|
pub(crate) struct BranchNode {
|
||||||
left: RpoDigest,
|
pub(crate) left: RpoDigest,
|
||||||
right: RpoDigest,
|
pub(crate) right: RpoDigest,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Store {
|
impl Store {
|
||||||
fn new(depth: u32) -> (Self, Word) {
|
fn new(depth: u8) -> (Self, Word) {
|
||||||
let branches = BTreeMap::new();
|
let branches = BTreeMap::new();
|
||||||
let leaves = BTreeMap::new();
|
let leaves = BTreeMap::new();
|
||||||
|
|
||||||
// Construct empty node digests for each layer of the tree
|
// Construct empty node digests for each layer of the tree
|
||||||
let empty_hashes: Vec<RpoDigest> = (0..depth + 1)
|
let empty_hashes = EmptySubtreeRoots::empty_hashes(depth).to_vec();
|
||||||
.scan(Word::default().into(), |state, _| {
|
|
||||||
let value = *state;
|
|
||||||
*state = Rpo256::merge(&[value, value]);
|
|
||||||
Some(value)
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.into_iter()
|
|
||||||
.rev()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let root = empty_hashes[0].into();
|
let root = empty_hashes[0].into();
|
||||||
let store = Self {
|
let store = Self {
|
||||||
@@ -228,39 +239,35 @@ impl Store {
|
|||||||
(store, root)
|
(store, root)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_empty_node(&self, depth: usize) -> BranchNode {
|
fn replace_empty_subtrees(&mut self, hashes: Vec<RpoDigest>) {
|
||||||
let digest = self.empty_hashes[depth];
|
self.empty_hashes = hashes;
|
||||||
BranchNode {
|
|
||||||
left: digest,
|
|
||||||
right: digest,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_leaf_node_exists(&self, key: u64) -> bool {
|
fn check_leaf_node_exists(&self, key: u64) -> bool {
|
||||||
self.leaves.contains_key(&key)
|
self.leaves.contains_key(&key)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_leaf_node(&self, key: u64) -> Result<Word, MerkleError> {
|
fn get_leaf_node(&self, key: u64) -> Option<Word> {
|
||||||
self.leaves
|
self.leaves.get(&key).copied()
|
||||||
.get(&key)
|
|
||||||
.cloned()
|
|
||||||
.ok_or(MerkleError::InvalidIndex(self.depth, key))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_leaf_node(&mut self, key: u64, node: Word) {
|
fn insert_leaf_node(&mut self, key: u64, node: Word) {
|
||||||
self.leaves.insert(key, node);
|
self.leaves.insert(key, node);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_branch_node(&self, key: u64, depth: u32) -> Result<BranchNode, MerkleError> {
|
fn get_branch_node(&self, index: &NodeIndex) -> BranchNode {
|
||||||
self.branches
|
self.branches.get(index).cloned().unwrap_or_else(|| {
|
||||||
.get(&(key, depth))
|
let node = self.empty_hashes[index.depth() as usize + 1];
|
||||||
.cloned()
|
BranchNode {
|
||||||
.ok_or(MerkleError::InvalidIndex(depth, key))
|
left: node,
|
||||||
|
right: node,
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_branch_node(&mut self, key: u64, depth: u32, left: RpoDigest, right: RpoDigest) {
|
fn insert_branch_node(&mut self, index: NodeIndex, left: RpoDigest, right: RpoDigest) {
|
||||||
let node = BranchNode { left, right };
|
let branch = BranchNode { left, right };
|
||||||
self.branches.insert((key, depth), node);
|
self.branches.insert(index, branch);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn leaves_count(&self) -> usize {
|
fn leaves_count(&self) -> usize {
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
use super::{
|
use super::{
|
||||||
super::{MerkleTree, RpoDigest, SimpleSmt},
|
super::{int_to_node, MerkleTree, RpoDigest, SimpleSmt},
|
||||||
Rpo256, Vec, Word,
|
NodeIndex, Rpo256, Vec, Word,
|
||||||
};
|
};
|
||||||
use crate::{Felt, FieldElement};
|
|
||||||
use core::iter;
|
|
||||||
use proptest::prelude::*;
|
use proptest::prelude::*;
|
||||||
use rand_utils::prng_array;
|
use rand_utils::prng_array;
|
||||||
|
|
||||||
@@ -32,7 +30,7 @@ const ZERO_VALUES8: [Word; 8] = [int_to_node(0); 8];
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn build_empty_tree() {
|
fn build_empty_tree() {
|
||||||
let smt = SimpleSmt::new(iter::empty(), 3).unwrap();
|
let smt = SimpleSmt::new(3).unwrap();
|
||||||
let mt = MerkleTree::new(ZERO_VALUES8.to_vec()).unwrap();
|
let mt = MerkleTree::new(ZERO_VALUES8.to_vec()).unwrap();
|
||||||
assert_eq!(mt.root(), smt.root());
|
assert_eq!(mt.root(), smt.root());
|
||||||
}
|
}
|
||||||
@@ -40,7 +38,7 @@ fn build_empty_tree() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn empty_digests_are_consistent() {
|
fn empty_digests_are_consistent() {
|
||||||
let depth = 5;
|
let depth = 5;
|
||||||
let root = SimpleSmt::new(iter::empty(), depth).unwrap().root();
|
let root = SimpleSmt::new(depth).unwrap().root();
|
||||||
let computed: [RpoDigest; 2] = (0..depth).fold([Default::default(); 2], |state, _| {
|
let computed: [RpoDigest; 2] = (0..depth).fold([Default::default(); 2], |state, _| {
|
||||||
let digest = Rpo256::merge(&state);
|
let digest = Rpo256::merge(&state);
|
||||||
[digest; 2]
|
[digest; 2]
|
||||||
@@ -51,7 +49,7 @@ fn empty_digests_are_consistent() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn build_sparse_tree() {
|
fn build_sparse_tree() {
|
||||||
let mut smt = SimpleSmt::new(iter::empty(), 3).unwrap();
|
let mut smt = SimpleSmt::new(3).unwrap();
|
||||||
let mut values = ZERO_VALUES8.to_vec();
|
let mut values = ZERO_VALUES8.to_vec();
|
||||||
|
|
||||||
// insert single value
|
// insert single value
|
||||||
@@ -62,7 +60,10 @@ fn build_sparse_tree() {
|
|||||||
.expect("Failed to insert leaf");
|
.expect("Failed to insert leaf");
|
||||||
let mt2 = MerkleTree::new(values.clone()).unwrap();
|
let mt2 = MerkleTree::new(values.clone()).unwrap();
|
||||||
assert_eq!(mt2.root(), smt.root());
|
assert_eq!(mt2.root(), smt.root());
|
||||||
assert_eq!(mt2.get_path(3, 6).unwrap(), smt.get_path(3, 6).unwrap());
|
assert_eq!(
|
||||||
|
mt2.get_path(NodeIndex::new(3, 6)).unwrap(),
|
||||||
|
smt.get_path(NodeIndex::new(3, 6)).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
// insert second value at distinct leaf branch
|
// insert second value at distinct leaf branch
|
||||||
let key = 2;
|
let key = 2;
|
||||||
@@ -72,61 +73,87 @@ fn build_sparse_tree() {
|
|||||||
.expect("Failed to insert leaf");
|
.expect("Failed to insert leaf");
|
||||||
let mt3 = MerkleTree::new(values).unwrap();
|
let mt3 = MerkleTree::new(values).unwrap();
|
||||||
assert_eq!(mt3.root(), smt.root());
|
assert_eq!(mt3.root(), smt.root());
|
||||||
assert_eq!(mt3.get_path(3, 2).unwrap(), smt.get_path(3, 2).unwrap());
|
assert_eq!(
|
||||||
|
mt3.get_path(NodeIndex::new(3, 2)).unwrap(),
|
||||||
|
smt.get_path(NodeIndex::new(3, 2)).unwrap()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn build_full_tree() {
|
fn build_full_tree() {
|
||||||
let tree = SimpleSmt::new(KEYS4.into_iter().zip(VALUES4.into_iter()), 2).unwrap();
|
let tree = SimpleSmt::new(2)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(KEYS4.into_iter().zip(VALUES4.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let (root, node2, node3) = compute_internal_nodes();
|
let (root, node2, node3) = compute_internal_nodes();
|
||||||
assert_eq!(root, tree.root());
|
assert_eq!(root, tree.root());
|
||||||
assert_eq!(node2, tree.get_node(1, 0).unwrap());
|
assert_eq!(node2, tree.get_node(&NodeIndex::new(1, 0)).unwrap());
|
||||||
assert_eq!(node3, tree.get_node(1, 1).unwrap());
|
assert_eq!(node3, tree.get_node(&NodeIndex::new(1, 1)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_values() {
|
fn get_values() {
|
||||||
let tree = SimpleSmt::new(KEYS4.into_iter().zip(VALUES4.into_iter()), 2).unwrap();
|
let tree = SimpleSmt::new(2)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(KEYS4.into_iter().zip(VALUES4.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// check depth 2
|
// check depth 2
|
||||||
assert_eq!(VALUES4[0], tree.get_node(2, 0).unwrap());
|
assert_eq!(VALUES4[0], tree.get_node(&NodeIndex::new(2, 0)).unwrap());
|
||||||
assert_eq!(VALUES4[1], tree.get_node(2, 1).unwrap());
|
assert_eq!(VALUES4[1], tree.get_node(&NodeIndex::new(2, 1)).unwrap());
|
||||||
assert_eq!(VALUES4[2], tree.get_node(2, 2).unwrap());
|
assert_eq!(VALUES4[2], tree.get_node(&NodeIndex::new(2, 2)).unwrap());
|
||||||
assert_eq!(VALUES4[3], tree.get_node(2, 3).unwrap());
|
assert_eq!(VALUES4[3], tree.get_node(&NodeIndex::new(2, 3)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn get_path() {
|
fn get_path() {
|
||||||
let tree = SimpleSmt::new(KEYS4.into_iter().zip(VALUES4.into_iter()), 2).unwrap();
|
let tree = SimpleSmt::new(2)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(KEYS4.into_iter().zip(VALUES4.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let (_, node2, node3) = compute_internal_nodes();
|
let (_, node2, node3) = compute_internal_nodes();
|
||||||
|
|
||||||
// check depth 2
|
// check depth 2
|
||||||
assert_eq!(vec![VALUES4[1], node3], tree.get_path(2, 0).unwrap());
|
assert_eq!(
|
||||||
assert_eq!(vec![VALUES4[0], node3], tree.get_path(2, 1).unwrap());
|
vec![VALUES4[1], node3],
|
||||||
assert_eq!(vec![VALUES4[3], node2], tree.get_path(2, 2).unwrap());
|
*tree.get_path(NodeIndex::new(2, 0)).unwrap()
|
||||||
assert_eq!(vec![VALUES4[2], node2], tree.get_path(2, 3).unwrap());
|
);
|
||||||
|
assert_eq!(
|
||||||
|
vec![VALUES4[0], node3],
|
||||||
|
*tree.get_path(NodeIndex::new(2, 1)).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
vec![VALUES4[3], node2],
|
||||||
|
*tree.get_path(NodeIndex::new(2, 2)).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
vec![VALUES4[2], node2],
|
||||||
|
*tree.get_path(NodeIndex::new(2, 3)).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
// check depth 1
|
// check depth 1
|
||||||
assert_eq!(vec![node3], tree.get_path(1, 0).unwrap());
|
assert_eq!(vec![node3], *tree.get_path(NodeIndex::new(1, 0)).unwrap());
|
||||||
assert_eq!(vec![node2], tree.get_path(1, 1).unwrap());
|
assert_eq!(vec![node2], *tree.get_path(NodeIndex::new(1, 1)).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn update_leaf() {
|
fn update_leaf() {
|
||||||
let mut tree = SimpleSmt::new(KEYS8.into_iter().zip(VALUES8.into_iter()), 3).unwrap();
|
let mut tree = SimpleSmt::new(3)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(KEYS8.into_iter().zip(VALUES8.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// update one value
|
// update one value
|
||||||
let key = 3;
|
let key = 3;
|
||||||
let new_node = int_to_node(9);
|
let new_node = int_to_node(9);
|
||||||
let mut expected_values = VALUES8.to_vec();
|
let mut expected_values = VALUES8.to_vec();
|
||||||
expected_values[key] = new_node;
|
expected_values[key] = new_node;
|
||||||
let expected_tree = SimpleSmt::new(
|
let expected_tree = SimpleSmt::new(3)
|
||||||
KEYS8.into_iter().zip(expected_values.clone().into_iter()),
|
.unwrap()
|
||||||
3,
|
.with_leaves(KEYS8.into_iter().zip(expected_values.clone().into_iter()))
|
||||||
)
|
.unwrap();
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
tree.update_leaf(key as u64, new_node).unwrap();
|
tree.update_leaf(key as u64, new_node).unwrap();
|
||||||
assert_eq!(expected_tree.root, tree.root);
|
assert_eq!(expected_tree.root, tree.root);
|
||||||
@@ -135,8 +162,10 @@ fn update_leaf() {
|
|||||||
let key = 6;
|
let key = 6;
|
||||||
let new_node = int_to_node(10);
|
let new_node = int_to_node(10);
|
||||||
expected_values[key] = new_node;
|
expected_values[key] = new_node;
|
||||||
let expected_tree =
|
let expected_tree = SimpleSmt::new(3)
|
||||||
SimpleSmt::new(KEYS8.into_iter().zip(expected_values.into_iter()), 3).unwrap();
|
.unwrap()
|
||||||
|
.with_leaves(KEYS8.into_iter().zip(expected_values.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
tree.update_leaf(key as u64, new_node).unwrap();
|
tree.update_leaf(key as u64, new_node).unwrap();
|
||||||
assert_eq!(expected_tree.root, tree.root);
|
assert_eq!(expected_tree.root, tree.root);
|
||||||
@@ -171,11 +200,11 @@ fn small_tree_opening_is_consistent() {
|
|||||||
|
|
||||||
let depth = 3;
|
let depth = 3;
|
||||||
let entries = vec![(0, a), (1, b), (4, c), (7, d)];
|
let entries = vec![(0, a), (1, b), (4, c), (7, d)];
|
||||||
let tree = SimpleSmt::new(entries, depth).unwrap();
|
let tree = SimpleSmt::new(depth).unwrap().with_leaves(entries).unwrap();
|
||||||
|
|
||||||
assert_eq!(tree.root(), Word::from(k));
|
assert_eq!(tree.root(), Word::from(k));
|
||||||
|
|
||||||
let cases: Vec<(u32, u64, Vec<Word>)> = vec![
|
let cases: Vec<(u8, u64, Vec<Word>)> = vec![
|
||||||
(3, 0, vec![b, f, j]),
|
(3, 0, vec![b, f, j]),
|
||||||
(3, 1, vec![a, f, j]),
|
(3, 1, vec![a, f, j]),
|
||||||
(3, 4, vec![z, h, i]),
|
(3, 4, vec![z, h, i]),
|
||||||
@@ -189,9 +218,9 @@ fn small_tree_opening_is_consistent() {
|
|||||||
];
|
];
|
||||||
|
|
||||||
for (depth, key, path) in cases {
|
for (depth, key, path) in cases {
|
||||||
let opening = tree.get_path(depth, key).unwrap();
|
let opening = tree.get_path(NodeIndex::new(depth, key)).unwrap();
|
||||||
|
|
||||||
assert_eq!(path, opening);
|
assert_eq!(path, *opening);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -202,7 +231,7 @@ proptest! {
|
|||||||
key in prop::num::u64::ANY,
|
key in prop::num::u64::ANY,
|
||||||
leaf in prop::num::u64::ANY,
|
leaf in prop::num::u64::ANY,
|
||||||
) {
|
) {
|
||||||
let mut tree = SimpleSmt::new(iter::empty(), depth).unwrap();
|
let mut tree = SimpleSmt::new(depth).unwrap();
|
||||||
|
|
||||||
let key = key % (1 << depth as u64);
|
let key = key % (1 << depth as u64);
|
||||||
let leaf = int_to_node(leaf);
|
let leaf = int_to_node(leaf);
|
||||||
@@ -213,7 +242,7 @@ proptest! {
|
|||||||
// traverse to root, fetching all paths
|
// traverse to root, fetching all paths
|
||||||
for d in 1..depth {
|
for d in 1..depth {
|
||||||
let k = key >> (depth - d);
|
let k = key >> (depth - d);
|
||||||
tree.get_path(d, k).unwrap();
|
tree.get_path(NodeIndex::new(d, k)).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -223,7 +252,7 @@ proptest! {
|
|||||||
count in 2u8..10u8,
|
count in 2u8..10u8,
|
||||||
ref seed in any::<[u8; 32]>()
|
ref seed in any::<[u8; 32]>()
|
||||||
) {
|
) {
|
||||||
let mut tree = SimpleSmt::new(iter::empty(), depth).unwrap();
|
let mut tree = SimpleSmt::new(depth).unwrap();
|
||||||
let mut seed = *seed;
|
let mut seed = *seed;
|
||||||
let leaves = (1 << depth) - 1;
|
let leaves = (1 << depth) - 1;
|
||||||
|
|
||||||
@@ -257,7 +286,3 @@ fn compute_internal_nodes() -> (Word, Word, Word) {
|
|||||||
|
|
||||||
(root.into(), node2.into(), node3.into())
|
(root.into(), node2.into(), node3.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn int_to_node(value: u64) -> Word {
|
|
||||||
[Felt::new(value), Felt::ZERO, Felt::ZERO, Felt::ZERO]
|
|
||||||
}
|
|
||||||
|
|||||||
478
src/merkle/store/mod.rs
Normal file
478
src/merkle/store/mod.rs
Normal file
@@ -0,0 +1,478 @@
|
|||||||
|
use super::{
|
||||||
|
BTreeMap, BTreeSet, EmptySubtreeRoots, MerkleError, MerklePath, MerklePathSet, MerkleTree,
|
||||||
|
NodeIndex, RootPath, Rpo256, RpoDigest, SimpleSmt, ValuePath, Vec, Word,
|
||||||
|
};
|
||||||
|
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub struct Node {
|
||||||
|
left: RpoDigest,
|
||||||
|
right: RpoDigest,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An in-memory data store for Merkle-lized data.
|
||||||
|
///
|
||||||
|
/// This is a in memory data store for Merkle trees, this store allows all the nodes of multiple
|
||||||
|
/// trees to live as long as necessary and without duplication, this allows the implementation of
|
||||||
|
/// space efficient persistent data structures.
|
||||||
|
///
|
||||||
|
/// Example usage:
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// # use miden_crypto::{ZERO, Felt, Word};
|
||||||
|
/// # use miden_crypto::merkle::{NodeIndex, MerkleStore, MerkleTree};
|
||||||
|
/// # use miden_crypto::hash::rpo::Rpo256;
|
||||||
|
/// # const fn int_to_node(value: u64) -> Word {
|
||||||
|
/// # [Felt::new(value), ZERO, ZERO, ZERO]
|
||||||
|
/// # }
|
||||||
|
/// # let A = int_to_node(1);
|
||||||
|
/// # let B = int_to_node(2);
|
||||||
|
/// # let C = int_to_node(3);
|
||||||
|
/// # let D = int_to_node(4);
|
||||||
|
/// # let E = int_to_node(5);
|
||||||
|
/// # let F = int_to_node(6);
|
||||||
|
/// # let G = int_to_node(7);
|
||||||
|
/// # let H0 = int_to_node(8);
|
||||||
|
/// # let H1 = int_to_node(9);
|
||||||
|
/// # let T0 = MerkleTree::new([A, B, C, D, E, F, G, H0].to_vec()).expect("even number of leaves provided");
|
||||||
|
/// # let T1 = MerkleTree::new([A, B, C, D, E, F, G, H1].to_vec()).expect("even number of leaves provided");
|
||||||
|
/// # let ROOT0 = T0.root();
|
||||||
|
/// # let ROOT1 = T1.root();
|
||||||
|
/// let mut store = MerkleStore::new();
|
||||||
|
///
|
||||||
|
/// // the store is initialized with the SMT empty nodes
|
||||||
|
/// assert_eq!(store.num_internal_nodes(), 255);
|
||||||
|
///
|
||||||
|
/// // populates the store with two merkle trees, common nodes are shared
|
||||||
|
/// store.add_merkle_tree([A, B, C, D, E, F, G, H0]);
|
||||||
|
/// store.add_merkle_tree([A, B, C, D, E, F, G, H1]);
|
||||||
|
///
|
||||||
|
/// // every leaf except the last are the same
|
||||||
|
/// for i in 0..7 {
|
||||||
|
/// let d0 = store.get_node(ROOT0, NodeIndex::new(3, i)).unwrap();
|
||||||
|
/// let d1 = store.get_node(ROOT1, NodeIndex::new(3, i)).unwrap();
|
||||||
|
/// assert_eq!(d0, d1, "Both trees have the same leaf at pos {i}");
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // The leafs A-B-C-D are the same for both trees, so are their 2 immediate parents
|
||||||
|
/// for i in 0..4 {
|
||||||
|
/// let d0 = store.get_path(ROOT0, NodeIndex::new(3, i)).unwrap();
|
||||||
|
/// let d1 = store.get_path(ROOT1, NodeIndex::new(3, i)).unwrap();
|
||||||
|
/// assert_eq!(d0.path[0..2], d1.path[0..2], "Both sub-trees are equal up to two levels");
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // Common internal nodes are shared, the two added trees have a total of 30, but the store has
|
||||||
|
/// // only 10 new entries, corresponding to the 10 unique internal nodes of these trees.
|
||||||
|
/// assert_eq!(store.num_internal_nodes() - 255, 10);
|
||||||
|
/// ```
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct MerkleStore {
|
||||||
|
nodes: BTreeMap<RpoDigest, Node>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for MerkleStore {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MerkleStore {
|
||||||
|
// CONSTRUCTORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Creates an empty `MerkleStore` instance.
|
||||||
|
pub fn new() -> MerkleStore {
|
||||||
|
// pre-populate the store with the empty hashes
|
||||||
|
let subtrees = EmptySubtreeRoots::empty_hashes(255);
|
||||||
|
let nodes = subtrees
|
||||||
|
.iter()
|
||||||
|
.rev()
|
||||||
|
.copied()
|
||||||
|
.zip(subtrees.iter().rev().skip(1).copied())
|
||||||
|
.map(|(child, parent)| {
|
||||||
|
(
|
||||||
|
parent,
|
||||||
|
Node {
|
||||||
|
left: child,
|
||||||
|
right: child,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
MerkleStore { nodes }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided merkle tree represented by its `leaves` to the set.
|
||||||
|
pub fn with_merkle_tree<I>(mut self, leaves: I) -> Result<Self, MerkleError>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = Word>,
|
||||||
|
{
|
||||||
|
self.add_merkle_tree(leaves)?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided sparse merkle tree represented by its `entries` to the set.
|
||||||
|
pub fn with_sparse_merkle_tree<R, I>(mut self, entries: R) -> Result<Self, MerkleError>
|
||||||
|
where
|
||||||
|
R: IntoIterator<IntoIter = I>,
|
||||||
|
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
||||||
|
{
|
||||||
|
self.add_sparse_merkle_tree(entries)?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided merkle path set.
|
||||||
|
pub fn with_merkle_path(
|
||||||
|
mut self,
|
||||||
|
index_value: u64,
|
||||||
|
node: Word,
|
||||||
|
path: MerklePath,
|
||||||
|
) -> Result<Self, MerkleError> {
|
||||||
|
self.add_merkle_path(index_value, node, path)?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided merkle path set.
|
||||||
|
pub fn with_merkle_paths<I>(mut self, paths: I) -> Result<Self, MerkleError>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = (u64, Word, MerklePath)>,
|
||||||
|
{
|
||||||
|
self.add_merkle_paths(paths)?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUBLIC ACCESSORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Return a count of the non-leaf nodes in the store.
|
||||||
|
pub fn num_internal_nodes(&self) -> usize {
|
||||||
|
self.nodes.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the node at `index` rooted on the tree `root`.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// This method can return the following errors:
|
||||||
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
||||||
|
pub fn get_node(&self, root: Word, index: NodeIndex) -> Result<Word, MerkleError> {
|
||||||
|
let mut hash: RpoDigest = root.into();
|
||||||
|
|
||||||
|
// corner case: check the root is in the store when called with index `NodeIndex::root()`
|
||||||
|
self.nodes
|
||||||
|
.get(&hash)
|
||||||
|
.ok_or(MerkleError::RootNotInStore(hash.into()))?;
|
||||||
|
|
||||||
|
for bit in index.bit_iterator().rev() {
|
||||||
|
let node = self
|
||||||
|
.nodes
|
||||||
|
.get(&hash)
|
||||||
|
.ok_or(MerkleError::NodeNotInStore(hash.into(), index))?;
|
||||||
|
hash = if bit { node.right } else { node.left }
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(hash.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the node at the specified `index` and its opening to the `root`.
|
||||||
|
///
|
||||||
|
/// The path starts at the sibling of the target leaf.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// This method can return the following errors:
|
||||||
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
||||||
|
pub fn get_path(&self, root: Word, index: NodeIndex) -> Result<ValuePath, MerkleError> {
|
||||||
|
let mut hash: RpoDigest = root.into();
|
||||||
|
let mut path = Vec::with_capacity(index.depth().into());
|
||||||
|
|
||||||
|
// corner case: check the root is in the store when called with index `NodeIndex::root()`
|
||||||
|
self.nodes
|
||||||
|
.get(&hash)
|
||||||
|
.ok_or(MerkleError::RootNotInStore(hash.into()))?;
|
||||||
|
|
||||||
|
for bit in index.bit_iterator().rev() {
|
||||||
|
let node = self
|
||||||
|
.nodes
|
||||||
|
.get(&hash)
|
||||||
|
.ok_or(MerkleError::NodeNotInStore(hash.into(), index))?;
|
||||||
|
|
||||||
|
hash = if bit {
|
||||||
|
path.push(node.left.into());
|
||||||
|
node.right
|
||||||
|
} else {
|
||||||
|
path.push(node.right.into());
|
||||||
|
node.left
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// the path is computed from root to leaf, so it must be reversed
|
||||||
|
path.reverse();
|
||||||
|
|
||||||
|
Ok(ValuePath {
|
||||||
|
value: hash.into(),
|
||||||
|
path: MerklePath::new(path),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATE MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Adds all the nodes of a Merkle tree represented by `leaves`.
|
||||||
|
///
|
||||||
|
/// This will instantiate a Merkle tree using `leaves` and include all the nodes into the
|
||||||
|
/// store.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// This method may return the following errors:
|
||||||
|
/// - `DepthTooSmall` if leaves is empty or contains only 1 element
|
||||||
|
/// - `NumLeavesNotPowerOfTwo` if the number of leaves is not a power-of-two
|
||||||
|
pub fn add_merkle_tree<I>(&mut self, leaves: I) -> Result<Word, MerkleError>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = Word>,
|
||||||
|
{
|
||||||
|
let leaves: Vec<_> = leaves.into_iter().collect();
|
||||||
|
if leaves.len() < 2 {
|
||||||
|
return Err(MerkleError::DepthTooSmall(leaves.len() as u8));
|
||||||
|
}
|
||||||
|
|
||||||
|
let layers = leaves.len().ilog2();
|
||||||
|
let tree = MerkleTree::new(leaves)?;
|
||||||
|
|
||||||
|
let mut depth = 0;
|
||||||
|
let mut parent_offset = 1;
|
||||||
|
let mut child_offset = 2;
|
||||||
|
while depth < layers {
|
||||||
|
let layer_size = 1usize << depth;
|
||||||
|
for _ in 0..layer_size {
|
||||||
|
// merkle tree is using level form representation, so left and right siblings are
|
||||||
|
// next to each other
|
||||||
|
let left = tree.nodes[child_offset];
|
||||||
|
let right = tree.nodes[child_offset + 1];
|
||||||
|
self.nodes.insert(
|
||||||
|
tree.nodes[parent_offset].into(),
|
||||||
|
Node {
|
||||||
|
left: left.into(),
|
||||||
|
right: right.into(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
parent_offset += 1;
|
||||||
|
child_offset += 2;
|
||||||
|
}
|
||||||
|
depth += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tree.nodes[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds all the nodes of a Sparse Merkle tree represented by `entries`.
|
||||||
|
///
|
||||||
|
/// This will instantiate a Sparse Merkle tree using `entries` and include all the nodes into
|
||||||
|
/// the store.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// This will return `InvalidEntriesCount` if the length of `entries` is not `63`.
|
||||||
|
pub fn add_sparse_merkle_tree<R, I>(&mut self, entries: R) -> Result<Word, MerkleError>
|
||||||
|
where
|
||||||
|
R: IntoIterator<IntoIter = I>,
|
||||||
|
I: Iterator<Item = (u64, Word)> + ExactSizeIterator,
|
||||||
|
{
|
||||||
|
let smt = SimpleSmt::new(SimpleSmt::MAX_DEPTH)?.with_leaves(entries)?;
|
||||||
|
for branch in smt.store.branches.values() {
|
||||||
|
let parent = Rpo256::merge(&[branch.left, branch.right]);
|
||||||
|
self.nodes.insert(
|
||||||
|
parent,
|
||||||
|
Node {
|
||||||
|
left: branch.left,
|
||||||
|
right: branch.right,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(smt.root())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds all the nodes of a Merkle path represented by `path`, opening to `node`. Returns the
|
||||||
|
/// new root.
|
||||||
|
///
|
||||||
|
/// This will compute the sibling elements determined by the Merkle `path` and `node`, and
|
||||||
|
/// include all the nodes into the store.
|
||||||
|
pub fn add_merkle_path(
|
||||||
|
&mut self,
|
||||||
|
index_value: u64,
|
||||||
|
mut node: Word,
|
||||||
|
path: MerklePath,
|
||||||
|
) -> Result<Word, MerkleError> {
|
||||||
|
let mut index = NodeIndex::new(self.nodes.len() as u8, index_value);
|
||||||
|
|
||||||
|
for sibling in path {
|
||||||
|
let (left, right) = match index.is_value_odd() {
|
||||||
|
true => (sibling, node),
|
||||||
|
false => (node, sibling),
|
||||||
|
};
|
||||||
|
let parent = Rpo256::merge(&[left.into(), right.into()]);
|
||||||
|
self.nodes.insert(
|
||||||
|
parent,
|
||||||
|
Node {
|
||||||
|
left: left.into(),
|
||||||
|
right: right.into(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
index.move_up();
|
||||||
|
node = parent.into();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds all the nodes of multiple Merkle paths into the store.
|
||||||
|
///
|
||||||
|
/// This will compute the sibling elements for each Merkle `path` and include all the nodes
|
||||||
|
/// into the store.
|
||||||
|
///
|
||||||
|
/// For further reference, check [MerkleStore::add_merkle_path].
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Every path must resolve to the same root, otherwise this will return an `ConflictingRoots`
|
||||||
|
/// error.
|
||||||
|
pub fn add_merkle_paths<I>(&mut self, paths: I) -> Result<Word, MerkleError>
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = (u64, Word, MerklePath)>,
|
||||||
|
{
|
||||||
|
let paths: Vec<(u64, Word, MerklePath)> = paths.into_iter().collect();
|
||||||
|
|
||||||
|
let roots: BTreeSet<RpoDigest> = paths
|
||||||
|
.iter()
|
||||||
|
.map(|(index, node, path)| path.compute_root(*index, *node).into())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if roots.len() != 1 {
|
||||||
|
return Err(MerkleError::ConflictingRoots(
|
||||||
|
roots.iter().map(|v| Word::from(*v)).collect(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (index_value, node, path) in paths {
|
||||||
|
self.add_merkle_path(index_value, node, path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(roots.iter().next().unwrap().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends the provided [MerklePathSet] into the store.
|
||||||
|
///
|
||||||
|
/// For further reference, check [MerkleStore::add_merkle_path].
|
||||||
|
pub fn add_merkle_path_set(&mut self, path_set: &MerklePathSet) -> Result<Word, MerkleError> {
|
||||||
|
let root = path_set.root();
|
||||||
|
for (index, path) in path_set.to_paths() {
|
||||||
|
self.add_merkle_path(index, path.value, path.path)?;
|
||||||
|
}
|
||||||
|
Ok(root)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets a node to `value`.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// This method can return the following errors:
|
||||||
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
||||||
|
pub fn set_node(
|
||||||
|
&mut self,
|
||||||
|
mut root: Word,
|
||||||
|
index: NodeIndex,
|
||||||
|
value: Word,
|
||||||
|
) -> Result<RootPath, MerkleError> {
|
||||||
|
let node = value;
|
||||||
|
let ValuePath { value, path } = self.get_path(root, index)?;
|
||||||
|
|
||||||
|
// performs the update only if the node value differs from the opening
|
||||||
|
if node != value {
|
||||||
|
root = self.add_merkle_path(index.value(), node, path.clone())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(RootPath { root, path })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn merge_roots(&mut self, root1: Word, root2: Word) -> Result<Word, MerkleError> {
|
||||||
|
let root1: RpoDigest = root1.into();
|
||||||
|
let root2: RpoDigest = root2.into();
|
||||||
|
|
||||||
|
if !self.nodes.contains_key(&root1) {
|
||||||
|
Err(MerkleError::NodeNotInStore(
|
||||||
|
root1.into(),
|
||||||
|
NodeIndex::new(0, 0),
|
||||||
|
))
|
||||||
|
} else if !self.nodes.contains_key(&root1) {
|
||||||
|
Err(MerkleError::NodeNotInStore(
|
||||||
|
root2.into(),
|
||||||
|
NodeIndex::new(0, 0),
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
let parent: Word = Rpo256::merge(&[root1, root2]).into();
|
||||||
|
self.nodes.insert(
|
||||||
|
parent.into(),
|
||||||
|
Node {
|
||||||
|
left: root1,
|
||||||
|
right: root2,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(parent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SERIALIZATION
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
impl Serializable for Node {
|
||||||
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
|
self.left.write_into(target);
|
||||||
|
self.right.write_into(target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deserializable for Node {
|
||||||
|
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||||
|
let left = RpoDigest::read_from(source)?;
|
||||||
|
let right = RpoDigest::read_from(source)?;
|
||||||
|
Ok(Node { left, right })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serializable for MerkleStore {
|
||||||
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
|
target.write_u64(self.nodes.len() as u64);
|
||||||
|
|
||||||
|
for (k, v) in self.nodes.iter() {
|
||||||
|
k.write_into(target);
|
||||||
|
v.write_into(target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deserializable for MerkleStore {
|
||||||
|
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||||
|
let len = source.read_u64()?;
|
||||||
|
let mut nodes: BTreeMap<RpoDigest, Node> = BTreeMap::new();
|
||||||
|
|
||||||
|
for _ in 0..len {
|
||||||
|
let key = RpoDigest::read_from(source)?;
|
||||||
|
let value = Node::read_from(source)?;
|
||||||
|
nodes.insert(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(MerkleStore { nodes })
|
||||||
|
}
|
||||||
|
}
|
||||||
637
src/merkle/store/tests.rs
Normal file
637
src/merkle/store/tests.rs
Normal file
@@ -0,0 +1,637 @@
|
|||||||
|
use super::*;
|
||||||
|
use crate::{
|
||||||
|
hash::rpo::Rpo256,
|
||||||
|
merkle::{int_to_node, MerklePathSet},
|
||||||
|
Felt, Word, WORD_SIZE, ZERO,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(std)]
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
const KEYS4: [u64; 4] = [0, 1, 2, 3];
|
||||||
|
const LEAVES4: [Word; 4] = [
|
||||||
|
int_to_node(1),
|
||||||
|
int_to_node(2),
|
||||||
|
int_to_node(3),
|
||||||
|
int_to_node(4),
|
||||||
|
];
|
||||||
|
const EMPTY: Word = [ZERO; WORD_SIZE];
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_root_not_in_store() -> Result<(), MerkleError> {
|
||||||
|
let mtree = MerkleTree::new(LEAVES4.to_vec())?;
|
||||||
|
let store = MerkleStore::default().with_merkle_tree(LEAVES4)?;
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(LEAVES4[0], NodeIndex::new(mtree.depth(), 0)),
|
||||||
|
Err(MerkleError::RootNotInStore(LEAVES4[0])),
|
||||||
|
"Leaf 0 is not a root"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_path(LEAVES4[0], NodeIndex::new(mtree.depth(), 0)),
|
||||||
|
Err(MerkleError::RootNotInStore(LEAVES4[0])),
|
||||||
|
"Leaf 0 is not a root"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merkle_tree() -> Result<(), MerkleError> {
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
|
||||||
|
let mtree = MerkleTree::new(LEAVES4.to_vec())?;
|
||||||
|
store.add_merkle_tree(LEAVES4.to_vec())?;
|
||||||
|
|
||||||
|
// STORE LEAVES ARE CORRECT ==============================================================
|
||||||
|
// checks the leaves in the store corresponds to the expected values
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 0)),
|
||||||
|
Ok(LEAVES4[0]),
|
||||||
|
"node 0 must be in the tree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 1)),
|
||||||
|
Ok(LEAVES4[1]),
|
||||||
|
"node 1 must be in the tree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 2)),
|
||||||
|
Ok(LEAVES4[2]),
|
||||||
|
"node 2 must be in the tree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 3)),
|
||||||
|
Ok(LEAVES4[3]),
|
||||||
|
"node 3 must be in the tree"
|
||||||
|
);
|
||||||
|
|
||||||
|
// STORE LEAVES MATCH TREE ===============================================================
|
||||||
|
// sanity check the values returned by the store and the tree
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_node(NodeIndex::new(mtree.depth(), 0)),
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 0)),
|
||||||
|
"node 0 must be the same for both MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_node(NodeIndex::new(mtree.depth(), 1)),
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 1)),
|
||||||
|
"node 1 must be the same for both MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_node(NodeIndex::new(mtree.depth(), 2)),
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 2)),
|
||||||
|
"node 2 must be the same for both MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_node(NodeIndex::new(mtree.depth(), 3)),
|
||||||
|
store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 3)),
|
||||||
|
"node 3 must be the same for both MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
// STORE MERKLE PATH MATCHS ==============================================================
|
||||||
|
// assert the merkle path returned by the store is the same as the one in the tree
|
||||||
|
let result = store
|
||||||
|
.get_path(mtree.root(), NodeIndex::new(mtree.depth(), 0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[0], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_path(NodeIndex::new(mtree.depth(), 0)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(mtree.root(), NodeIndex::new(mtree.depth(), 1))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[1], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_path(NodeIndex::new(mtree.depth(), 1)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 1 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(mtree.root(), NodeIndex::new(mtree.depth(), 2))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[2], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_path(NodeIndex::new(mtree.depth(), 2)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(mtree.root(), NodeIndex::new(mtree.depth(), 3))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[3], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
mtree.get_path(NodeIndex::new(mtree.depth(), 3)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_roots() {
|
||||||
|
let store = MerkleStore::default();
|
||||||
|
let mut root = RpoDigest::new(EMPTY);
|
||||||
|
|
||||||
|
for depth in 0..255 {
|
||||||
|
root = Rpo256::merge(&[root; 2]);
|
||||||
|
assert!(
|
||||||
|
store.get_node(root.into(), NodeIndex::new(0, 0)).is_ok(),
|
||||||
|
"The root of the empty tree of depth {depth} must be registered"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_leaf_paths_for_empty_trees() -> Result<(), MerkleError> {
|
||||||
|
let store = MerkleStore::default();
|
||||||
|
|
||||||
|
// Starts at 1 because leafs are not included in the store.
|
||||||
|
// Ends at 64 because it is not possible to represent an index of a depth greater than 64,
|
||||||
|
// because a u64 is used to index the leaf.
|
||||||
|
for depth in 1..64 {
|
||||||
|
let smt = SimpleSmt::new(depth)?;
|
||||||
|
|
||||||
|
let index = NodeIndex::new(depth, 0);
|
||||||
|
let store_path = store.get_path(smt.root(), index)?;
|
||||||
|
let smt_path = smt.get_path(index)?;
|
||||||
|
assert_eq!(
|
||||||
|
store_path.value, EMPTY,
|
||||||
|
"the leaf of an empty tree is always ZERO"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store_path.path, smt_path,
|
||||||
|
"the returned merkle path does not match the computed values"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store_path.path.compute_root(depth.into(), EMPTY),
|
||||||
|
smt.root(),
|
||||||
|
"computed root from the path must match the empty tree root"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_invalid_node() {
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
let mtree = MerkleTree::new(LEAVES4.to_vec()).expect("creating a merkle tree must work");
|
||||||
|
store
|
||||||
|
.add_merkle_tree(LEAVES4.to_vec())
|
||||||
|
.expect("adding a merkle tree to the store must work");
|
||||||
|
let _ = store.get_node(mtree.root(), NodeIndex::new(mtree.depth(), 3));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_add_sparse_merkle_tree_one_level() -> Result<(), MerkleError> {
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
let keys2: [u64; 2] = [0, 1];
|
||||||
|
let leaves2: [Word; 2] = [int_to_node(1), int_to_node(2)];
|
||||||
|
store.add_sparse_merkle_tree(keys2.into_iter().zip(leaves2.into_iter()))?;
|
||||||
|
let smt = SimpleSmt::new(1)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(keys2.into_iter().zip(leaves2.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let idx = NodeIndex::new(1, 0);
|
||||||
|
assert_eq!(smt.get_node(&idx).unwrap(), leaves2[0]);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(smt.root(), idx).unwrap(),
|
||||||
|
smt.get_node(&idx).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
let idx = NodeIndex::new(1, 1);
|
||||||
|
assert_eq!(smt.get_node(&idx).unwrap(), leaves2[1]);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(smt.root(), idx).unwrap(),
|
||||||
|
smt.get_node(&idx).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sparse_merkle_tree() -> Result<(), MerkleError> {
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
store.add_sparse_merkle_tree(KEYS4.into_iter().zip(LEAVES4.into_iter()))?;
|
||||||
|
|
||||||
|
let smt = SimpleSmt::new(SimpleSmt::MAX_DEPTH)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(KEYS4.into_iter().zip(LEAVES4.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// STORE LEAVES ARE CORRECT ==============================================================
|
||||||
|
// checks the leaves in the store corresponds to the expected values
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 0)),
|
||||||
|
Ok(LEAVES4[0]),
|
||||||
|
"node 0 must be in the tree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 1)),
|
||||||
|
Ok(LEAVES4[1]),
|
||||||
|
"node 1 must be in the tree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 2)),
|
||||||
|
Ok(LEAVES4[2]),
|
||||||
|
"node 2 must be in the tree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 3)),
|
||||||
|
Ok(LEAVES4[3]),
|
||||||
|
"node 3 must be in the tree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 4)),
|
||||||
|
Ok(EMPTY),
|
||||||
|
"unmodified node 4 must be ZERO"
|
||||||
|
);
|
||||||
|
|
||||||
|
// STORE LEAVES MATCH TREE ===============================================================
|
||||||
|
// sanity check the values returned by the store and the tree
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_node(&NodeIndex::new(smt.depth(), 0)),
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 0)),
|
||||||
|
"node 0 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_node(&NodeIndex::new(smt.depth(), 1)),
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 1)),
|
||||||
|
"node 1 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_node(&NodeIndex::new(smt.depth(), 2)),
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 2)),
|
||||||
|
"node 2 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_node(&NodeIndex::new(smt.depth(), 3)),
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 3)),
|
||||||
|
"node 3 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_node(&NodeIndex::new(smt.depth(), 4)),
|
||||||
|
store.get_node(smt.root(), NodeIndex::new(smt.depth(), 4)),
|
||||||
|
"node 4 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
// STORE MERKLE PATH MATCHS ==============================================================
|
||||||
|
// assert the merkle path returned by the store is the same as the one in the tree
|
||||||
|
let result = store
|
||||||
|
.get_path(smt.root(), NodeIndex::new(smt.depth(), 0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[0], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_path(NodeIndex::new(smt.depth(), 0)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(smt.root(), NodeIndex::new(smt.depth(), 1))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[1], result.value,
|
||||||
|
"Value for merkle path at index 1 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_path(NodeIndex::new(smt.depth(), 1)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 1 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(smt.root(), NodeIndex::new(smt.depth(), 2))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[2], result.value,
|
||||||
|
"Value for merkle path at index 2 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_path(NodeIndex::new(smt.depth(), 2)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 2 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(smt.root(), NodeIndex::new(smt.depth(), 3))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[3], result.value,
|
||||||
|
"Value for merkle path at index 3 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_path(NodeIndex::new(smt.depth(), 3)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 3 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(smt.root(), NodeIndex::new(smt.depth(), 4))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
EMPTY, result.value,
|
||||||
|
"Value for merkle path at index 4 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
smt.get_path(NodeIndex::new(smt.depth(), 4)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 4 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_add_merkle_paths() -> Result<(), MerkleError> {
|
||||||
|
let mtree = MerkleTree::new(LEAVES4.to_vec())?;
|
||||||
|
|
||||||
|
let i0 = 0;
|
||||||
|
let p0 = mtree.get_path(NodeIndex::new(2, i0)).unwrap();
|
||||||
|
|
||||||
|
let i1 = 1;
|
||||||
|
let p1 = mtree.get_path(NodeIndex::new(2, i1)).unwrap();
|
||||||
|
|
||||||
|
let i2 = 2;
|
||||||
|
let p2 = mtree.get_path(NodeIndex::new(2, i2)).unwrap();
|
||||||
|
|
||||||
|
let i3 = 3;
|
||||||
|
let p3 = mtree.get_path(NodeIndex::new(2, i3)).unwrap();
|
||||||
|
|
||||||
|
let paths = [
|
||||||
|
(i0, LEAVES4[i0 as usize], p0),
|
||||||
|
(i1, LEAVES4[i1 as usize], p1),
|
||||||
|
(i2, LEAVES4[i2 as usize], p2),
|
||||||
|
(i3, LEAVES4[i3 as usize], p3),
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut store = MerkleStore::default();
|
||||||
|
store
|
||||||
|
.add_merkle_paths(paths.clone())
|
||||||
|
.expect("the valid paths must work");
|
||||||
|
|
||||||
|
let depth = 2;
|
||||||
|
let set = MerklePathSet::new(depth).with_paths(paths).unwrap();
|
||||||
|
|
||||||
|
// STORE LEAVES ARE CORRECT ==============================================================
|
||||||
|
// checks the leaves in the store corresponds to the expected values
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 0)),
|
||||||
|
Ok(LEAVES4[0]),
|
||||||
|
"node 0 must be in the set"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 1)),
|
||||||
|
Ok(LEAVES4[1]),
|
||||||
|
"node 1 must be in the set"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 2)),
|
||||||
|
Ok(LEAVES4[2]),
|
||||||
|
"node 2 must be in the set"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 3)),
|
||||||
|
Ok(LEAVES4[3]),
|
||||||
|
"node 3 must be in the set"
|
||||||
|
);
|
||||||
|
|
||||||
|
// STORE LEAVES MATCH SET ================================================================
|
||||||
|
// sanity check the values returned by the store and the set
|
||||||
|
assert_eq!(
|
||||||
|
set.get_node(NodeIndex::new(set.depth(), 0)),
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 0)),
|
||||||
|
"node 0 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
set.get_node(NodeIndex::new(set.depth(), 1)),
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 1)),
|
||||||
|
"node 1 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
set.get_node(NodeIndex::new(set.depth(), 2)),
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 2)),
|
||||||
|
"node 2 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
set.get_node(NodeIndex::new(set.depth(), 3)),
|
||||||
|
store.get_node(set.root(), NodeIndex::new(set.depth(), 3)),
|
||||||
|
"node 3 must be the same for both SparseMerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
// STORE MERKLE PATH MATCHS ==============================================================
|
||||||
|
// assert the merkle path returned by the store is the same as the one in the set
|
||||||
|
let result = store
|
||||||
|
.get_path(set.root(), NodeIndex::new(set.depth(), 0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[0], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
set.get_path(NodeIndex::new(set.depth(), 0)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(set.root(), NodeIndex::new(set.depth(), 1))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[1], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
set.get_path(NodeIndex::new(set.depth(), 1)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 1 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(set.root(), NodeIndex::new(set.depth(), 2))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[2], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
set.get_path(NodeIndex::new(set.depth(), 2)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = store
|
||||||
|
.get_path(set.root(), NodeIndex::new(set.depth(), 3))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
LEAVES4[3], result.value,
|
||||||
|
"Value for merkle path at index 0 must match leaf value"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
set.get_path(NodeIndex::new(set.depth(), 3)),
|
||||||
|
Ok(result.path),
|
||||||
|
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn wont_open_to_different_depth_root() {
|
||||||
|
let empty = EmptySubtreeRoots::empty_hashes(64);
|
||||||
|
let a = [Felt::new(1); 4];
|
||||||
|
let b = [Felt::new(2); 4];
|
||||||
|
|
||||||
|
// Compute the root for a different depth. We cherry-pick this specific depth to prevent a
|
||||||
|
// regression to a bug in the past that allowed the user to fetch a node at a depth lower than
|
||||||
|
// the inserted path of a Merkle tree.
|
||||||
|
let mut root = Rpo256::merge(&[a.into(), b.into()]);
|
||||||
|
for depth in (1..=63).rev() {
|
||||||
|
root = Rpo256::merge(&[root, empty[depth]]);
|
||||||
|
}
|
||||||
|
let root = Word::from(root);
|
||||||
|
|
||||||
|
// For this example, the depth of the Merkle tree is 1, as we have only two leaves. Here we
|
||||||
|
// attempt to fetch a node on the maximum depth, and it should fail because the root shouldn't
|
||||||
|
// exist for the set.
|
||||||
|
let store = MerkleStore::default().with_merkle_tree([a, b]).unwrap();
|
||||||
|
let index = NodeIndex::root();
|
||||||
|
let err = store.get_node(root, index).err().unwrap();
|
||||||
|
assert_eq!(err, MerkleError::RootNotInStore(root));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn store_path_opens_from_leaf() {
|
||||||
|
let a = [Felt::new(1); 4];
|
||||||
|
let b = [Felt::new(2); 4];
|
||||||
|
let c = [Felt::new(3); 4];
|
||||||
|
let d = [Felt::new(4); 4];
|
||||||
|
let e = [Felt::new(5); 4];
|
||||||
|
let f = [Felt::new(6); 4];
|
||||||
|
let g = [Felt::new(7); 4];
|
||||||
|
let h = [Felt::new(8); 4];
|
||||||
|
|
||||||
|
let i = Rpo256::merge(&[a.into(), b.into()]);
|
||||||
|
let j = Rpo256::merge(&[c.into(), d.into()]);
|
||||||
|
let k = Rpo256::merge(&[e.into(), f.into()]);
|
||||||
|
let l = Rpo256::merge(&[g.into(), h.into()]);
|
||||||
|
|
||||||
|
let m = Rpo256::merge(&[i.into(), j.into()]);
|
||||||
|
let n = Rpo256::merge(&[k.into(), l.into()]);
|
||||||
|
|
||||||
|
let root = Rpo256::merge(&[m.into(), n.into()]);
|
||||||
|
|
||||||
|
let store = MerkleStore::default()
|
||||||
|
.with_merkle_tree([a, b, c, d, e, f, g, h])
|
||||||
|
.unwrap();
|
||||||
|
let path = store
|
||||||
|
.get_path(root.into(), NodeIndex::new(3, 1))
|
||||||
|
.unwrap()
|
||||||
|
.path;
|
||||||
|
|
||||||
|
let expected = MerklePath::new([a.into(), j.into(), n.into()].to_vec());
|
||||||
|
assert_eq!(path, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_set_node() -> Result<(), MerkleError> {
|
||||||
|
let mtree = MerkleTree::new(LEAVES4.to_vec())?;
|
||||||
|
let mut store = MerkleStore::default().with_merkle_tree(LEAVES4)?;
|
||||||
|
let value = int_to_node(42);
|
||||||
|
let index = NodeIndex::new(mtree.depth(), 0);
|
||||||
|
let new_root = store.set_node(mtree.root(), index, value)?.root;
|
||||||
|
assert_eq!(
|
||||||
|
store.get_node(new_root, index),
|
||||||
|
Ok(value),
|
||||||
|
"Value must have changed"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_constructors() -> Result<(), MerkleError> {
|
||||||
|
let store = MerkleStore::new().with_merkle_tree(LEAVES4)?;
|
||||||
|
let mtree = MerkleTree::new(LEAVES4.to_vec())?;
|
||||||
|
|
||||||
|
let depth = mtree.depth();
|
||||||
|
let leaves = 2u64.pow(depth.into());
|
||||||
|
for index in 0..leaves {
|
||||||
|
let index = NodeIndex::new(depth, index);
|
||||||
|
let value_path = store.get_path(mtree.root(), index)?;
|
||||||
|
assert_eq!(mtree.get_path(index)?, value_path.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
let store = MerkleStore::default()
|
||||||
|
.with_sparse_merkle_tree(KEYS4.into_iter().zip(LEAVES4.into_iter()))?;
|
||||||
|
let smt = SimpleSmt::new(SimpleSmt::MAX_DEPTH)
|
||||||
|
.unwrap()
|
||||||
|
.with_leaves(KEYS4.into_iter().zip(LEAVES4.into_iter()))
|
||||||
|
.unwrap();
|
||||||
|
let depth = smt.depth();
|
||||||
|
|
||||||
|
for key in KEYS4 {
|
||||||
|
let index = NodeIndex::new(depth, key);
|
||||||
|
let value_path = store.get_path(smt.root(), index)?;
|
||||||
|
assert_eq!(smt.get_path(index)?, value_path.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
let d = 2;
|
||||||
|
let paths = [
|
||||||
|
(0, LEAVES4[0], mtree.get_path(NodeIndex::new(d, 0)).unwrap()),
|
||||||
|
(1, LEAVES4[1], mtree.get_path(NodeIndex::new(d, 1)).unwrap()),
|
||||||
|
(2, LEAVES4[2], mtree.get_path(NodeIndex::new(d, 2)).unwrap()),
|
||||||
|
(3, LEAVES4[3], mtree.get_path(NodeIndex::new(d, 3)).unwrap()),
|
||||||
|
];
|
||||||
|
|
||||||
|
let store1 = MerkleStore::default().with_merkle_paths(paths.clone())?;
|
||||||
|
let store2 = MerkleStore::default()
|
||||||
|
.with_merkle_path(0, LEAVES4[0], mtree.get_path(NodeIndex::new(d, 0))?)?
|
||||||
|
.with_merkle_path(1, LEAVES4[1], mtree.get_path(NodeIndex::new(d, 1))?)?
|
||||||
|
.with_merkle_path(2, LEAVES4[2], mtree.get_path(NodeIndex::new(d, 2))?)?
|
||||||
|
.with_merkle_path(3, LEAVES4[3], mtree.get_path(NodeIndex::new(d, 3))?)?;
|
||||||
|
let set = MerklePathSet::new(d).with_paths(paths).unwrap();
|
||||||
|
|
||||||
|
for key in [0, 1, 2, 3] {
|
||||||
|
let index = NodeIndex::new(d, key);
|
||||||
|
let value_path1 = store1.get_path(set.root(), index)?;
|
||||||
|
let value_path2 = store2.get_path(set.root(), index)?;
|
||||||
|
assert_eq!(value_path1, value_path2);
|
||||||
|
|
||||||
|
let index = NodeIndex::new(d, key);
|
||||||
|
assert_eq!(set.get_path(index)?, value_path1.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(std)]
|
||||||
|
#[test]
|
||||||
|
fn test_serialization() -> Result<(), Box<dyn Error>> {
|
||||||
|
let original = MerkleStore::new().with_merkle_tree(LEAVES4)?;
|
||||||
|
let decoded = MerkleStore::read_from_bytes(&original.to_bytes())?;
|
||||||
|
assert_eq!(original, decoded);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
21
src/utils.rs
Normal file
21
src/utils.rs
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
use super::Word;
|
||||||
|
use crate::utils::string::String;
|
||||||
|
use core::fmt::{self, Write};
|
||||||
|
|
||||||
|
// RE-EXPORTS
|
||||||
|
// ================================================================================================
|
||||||
|
pub use winter_utils::{
|
||||||
|
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
|
||||||
|
DeserializationError, Serializable, SliceReader,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Converts a [Word] into hex.
|
||||||
|
pub fn word_to_hex(w: &Word) -> Result<String, fmt::Error> {
|
||||||
|
let mut s = String::new();
|
||||||
|
|
||||||
|
for byte in w.iter().flat_map(|e| e.to_bytes()) {
|
||||||
|
write!(s, "{byte:02x}")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user