chore: clean up create interfaces

This commit is contained in:
Bobbin Threadbare
2022-11-30 21:17:52 -08:00
parent 0257538a1e
commit ee1d5dc2e1
13 changed files with 248 additions and 324 deletions

View File

@@ -1,12 +1,9 @@
use super::{MerkleError, Word};
use crate::{hash::merge, BTreeMap, Vec, ZERO};
use super::{BTreeMap, MerkleError, Rpo256, Vec, Word, ZERO};
// MERKLE PATH SET
// ================================================================================================
/// A set of Merkle paths.
///
/// This struct is intended to be used as one of the variants of the MerkleSet enum.
#[derive(Clone, Debug)]
pub struct MerklePathSet {
root: Word,
@@ -208,9 +205,9 @@ fn is_even(pos: u64) -> bool {
/// - sibling — neighboring vertex in the tree
fn calculate_parent_hash(node: Word, node_pos: u64, sibling: Word) -> Word {
if is_even(node_pos) {
merge(&[node.into(), sibling.into()]).into()
Rpo256::merge(&[node.into(), sibling.into()]).into()
} else {
merge(&[sibling.into(), node.into()]).into()
Rpo256::merge(&[sibling.into(), node.into()]).into()
}
}
@@ -220,7 +217,7 @@ fn compute_path_trace(path: &[Word], depth: u32, index: u64) -> (Vec<Word>, Word
let mut computed_hashes = Vec::<Word>::new();
let mut comp_hash = merge(&[path[0].into(), path[1].into()]).into();
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
if path.len() != 2 {
for path_hash in path.iter().skip(2) {
@@ -238,7 +235,7 @@ fn compute_path_root(path: &[Word], depth: u32, index: u64) -> Word {
let mut pos = 2u64.pow(depth) + index;
// hash that is obtained after calculating the current hash and path hash
let mut comp_hash = merge(&[path[0].into(), path[1].into()]).into();
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
for path_hash in path.iter().skip(2) {
pos /= 2;

View File

@@ -1,16 +1,12 @@
use super::MerkleError;
use crate::{
hash::{merge, Digest},
log2, uninit_vector, Felt, FieldElement, Vec, Word,
};
use super::{Digest, Felt, MerkleError, Rpo256, Vec, Word};
use crate::{utils::uninit_vector, FieldElement};
use core::slice;
use winter_math::log2;
// MERKLE TREE
// ================================================================================================
/// A fully-balanced binary Merkle tree (i.e., a tree where the number of leaves is a power of two).
///
/// This struct is intended to be used as one of the variants of the MerkleSet enum.
#[derive(Clone, Debug)]
pub struct MerkleTree {
nodes: Vec<Word>,
@@ -43,7 +39,7 @@ impl MerkleTree {
// calculate all internal tree nodes
for i in (1..n).rev() {
nodes[i] = merge(&two_nodes[i]).into();
nodes[i] = Rpo256::merge(&two_nodes[i]).into();
}
Ok(Self { nodes })
@@ -131,7 +127,7 @@ impl MerkleTree {
for _ in 0..depth {
index /= 2;
self.nodes[index] = merge(&two_nodes[index]).into();
self.nodes[index] = Rpo256::merge(&two_nodes[index]).into();
}
Ok(())
@@ -143,7 +139,10 @@ impl MerkleTree {
#[cfg(test)]
mod tests {
use crate::{hash::Hasher, merkle::int_to_node, ElementHasher, HashFn, Word};
use super::{
super::{int_to_node, Rpo256},
Word,
};
const LEAVES4: [Word; 4] = [
int_to_node(1),
@@ -244,9 +243,9 @@ mod tests {
// --------------------------------------------------------------------------------------------
fn compute_internal_nodes() -> (Word, Word, Word) {
let node2 = Hasher::hash_elements(&[LEAVES4[0], LEAVES4[1]].concat());
let node3 = Hasher::hash_elements(&[LEAVES4[2], LEAVES4[3]].concat());
let root = Hasher::merge(&[node2, node3]);
let node2 = Rpo256::hash_elements(&[LEAVES4[0], LEAVES4[1]].concat());
let node3 = Rpo256::hash_elements(&[LEAVES4[2], LEAVES4[3]].concat());
let root = Rpo256::merge(&[node2, node3]);
(root.into(), node2.into(), node3.into())
}

View File

@@ -1,10 +1,14 @@
use crate::{Vec, Word};
use super::{
hash::rpo::{Rpo256, RpoDigest as Digest},
utils::collections::{BTreeMap, Vec},
Felt, Word, ZERO,
};
#[cfg(test)]
use crate::{Felt, ZERO};
mod merkle_tree;
pub use merkle_tree::MerkleTree;
pub mod merkle_path_set;
pub mod merkle_tree;
mod merkle_path_set;
pub use merkle_path_set::MerklePathSet;
// ERRORS
// ================================================================================================