Browse Source

chore: clean up create interfaces

al-gkr-basic-workflow
Bobbin Threadbare 2 years ago
parent
commit
ee1d5dc2e1
13 changed files with 248 additions and 324 deletions
  1. +1
    -1
      Cargo.toml
  2. +9
    -9
      benches/hash.rs
  3. +96
    -7
      src/hash/blake/mod.rs
  4. +1
    -1
      src/hash/blake/tests.rs
  5. +4
    -20
      src/hash/mod.rs
  6. +25
    -28
      src/hash/rpo/digest.rs
  7. +1
    -3
      src/hash/rpo/mds_freq.rs
  8. +61
    -183
      src/hash/rpo/mod.rs
  9. +9
    -32
      src/hash/rpo/tests.rs
  10. +15
    -14
      src/lib.rs
  11. +5
    -8
      src/merkle/merkle_path_set.rs
  12. +12
    -13
      src/merkle/merkle_tree.rs
  13. +9
    -5
      src/merkle/mod.rs

+ 1
- 1
Cargo.toml

@ -25,6 +25,6 @@ winter_math = { version = "0.4.1", package = "winter-math", default-features = f
winter_utils = { version = "0.4.1", package = "winter-utils", default-features = false }
[dev-dependencies]
criterion = "0.4"
proptest = "1.0.0"
rand_utils = { version = "0.4", package = "winter-rand-utils" }
criterion = "0.4"

+ 9
- 9
benches/hash.rs

@ -1,25 +1,25 @@
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
use miden_crypto::{
hash::{Digest, Hasher},
ElementHasher, Felt, HashFn,
hash::rpo::{Rpo256, RpoDigest},
Felt,
};
use rand_utils::rand_value;
fn rpo256_2to1(c: &mut Criterion) {
let v: [Digest; 2] = [Hasher::hash(&[1_u8]), Hasher::hash(&[2_u8])];
let v: [RpoDigest; 2] = [Rpo256::hash(&[1_u8]), Rpo256::hash(&[2_u8])];
c.bench_function("RPO256 2-to-1 hashing (cached)", |bench| {
bench.iter(|| Hasher::merge(black_box(&v)))
bench.iter(|| Rpo256::merge(black_box(&v)))
});
c.bench_function("RPO256 2-to-1 hashing (random)", |bench| {
bench.iter_batched(
|| {
[
Hasher::hash(&rand_value::<u64>().to_le_bytes()),
Hasher::hash(&rand_value::<u64>().to_le_bytes()),
Rpo256::hash(&rand_value::<u64>().to_le_bytes()),
Rpo256::hash(&rand_value::<u64>().to_le_bytes()),
]
},
|state| Hasher::merge(&state),
|state| Rpo256::merge(&state),
BatchSize::SmallInput,
)
});
@ -33,7 +33,7 @@ fn rpo256_sequential(c: &mut Criterion) {
.try_into()
.expect("should not fail");
c.bench_function("RPO256 sequential hashing (cached)", |bench| {
bench.iter(|| Hasher::hash_elements(black_box(&v)))
bench.iter(|| Rpo256::hash_elements(black_box(&v)))
});
c.bench_function("RPO256 sequential hashing (random)", |bench| {
@ -47,7 +47,7 @@ fn rpo256_sequential(c: &mut Criterion) {
.expect("should not fail");
v
},
|state| Hasher::hash_elements(&state),
|state| Rpo256::hash_elements(&state),
BatchSize::SmallInput,
)
});

+ 96
- 7
src/hash/blake/mod.rs

@ -1,7 +1,5 @@
use crate::{
ByteReader, ByteWriter, Deserializable, DeserializationError, Digest, ElementHasher, Felt,
FieldElement, HashFn, Serializable, StarkField,
};
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField};
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
use core::{
mem::{size_of, transmute, transmute_copy},
ops::Deref,
@ -11,6 +9,13 @@ use core::{
#[cfg(test)]
mod tests;
// CONSTANTS
// ================================================================================================
const DIGEST32_BYTES: usize = 32;
const DIGEST24_BYTES: usize = 24;
const DIGEST20_BYTES: usize = 20;
// BLAKE3 N-BIT OUTPUT
// ================================================================================================
@ -35,6 +40,18 @@ impl Deref for Blake3Digest {
}
}
impl<const N: usize> From<Blake3Digest<N>> for [u8; N] {
fn from(value: Blake3Digest<N>) -> Self {
value.0
}
}
impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
fn from(value: [u8; N]) -> Self {
Self(value)
}
}
impl<const N: usize> Serializable for Blake3Digest<N> {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write_u8_slice(&self.0);
@ -61,7 +78,7 @@ impl Digest for Blake3Digest {
/// 256-bit output blake3 hasher.
pub struct Blake3_256;
impl HashFn for Blake3_256 {
impl Hasher for Blake3_256 {
type Digest = Blake3Digest<32>;
fn hash(bytes: &[u8]) -> Self::Digest {
@ -91,13 +108,37 @@ impl ElementHasher for Blake3_256 {
}
}
impl Blake3_256 {
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST32_BYTES> {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[Blake3Digest<DIGEST32_BYTES>; 2]) -> Blake3Digest<DIGEST32_BYTES> {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST32_BYTES>
where
E: FieldElement<BaseField = Felt>,
{
<Self as ElementHasher>::hash_elements(elements)
}
}
// BLAKE3 192-BIT OUTPUT
// ================================================================================================
/// 192-bit output blake3 hasher.
pub struct Blake3_192;
impl HashFn for Blake3_192 {
impl Hasher for Blake3_192 {
type Digest = Blake3Digest<24>;
fn hash(bytes: &[u8]) -> Self::Digest {
@ -127,13 +168,37 @@ impl ElementHasher for Blake3_192 {
}
}
impl Blake3_192 {
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST24_BYTES> {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[Blake3Digest<DIGEST24_BYTES>; 2]) -> Blake3Digest<DIGEST24_BYTES> {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST24_BYTES>
where
E: FieldElement<BaseField = Felt>,
{
<Self as ElementHasher>::hash_elements(elements)
}
}
// BLAKE3 160-BIT OUTPUT
// ================================================================================================
/// 160-bit output blake3 hasher.
pub struct Blake3_160;
impl HashFn for Blake3_160 {
impl Hasher for Blake3_160 {
type Digest = Blake3Digest<20>;
fn hash(bytes: &[u8]) -> Self::Digest {
@ -163,6 +228,30 @@ impl ElementHasher for Blake3_160 {
}
}
impl Blake3_160 {
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST20_BYTES> {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[Blake3Digest<DIGEST20_BYTES>; 2]) -> Blake3Digest<DIGEST20_BYTES> {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST20_BYTES>
where
E: FieldElement<BaseField = Felt>,
{
<Self as ElementHasher>::hash_elements(elements)
}
}
// HELPER FUNCTIONS
// ================================================================================================

+ 1
- 1
src/hash/blake/tests.rs

@ -1,5 +1,5 @@
use super::*;
use crate::Vec;
use crate::utils::collections::Vec;
use proptest::prelude::*;
proptest! {

+ 4
- 20
src/hash/mod.rs

@ -1,21 +1,5 @@
use crate::{ElementHasher, HashFn};
use super::{Felt, FieldElement, StarkField, ONE, ZERO};
use winter_crypto::{Digest, ElementHasher, Hasher};
mod blake;
pub use blake::{Blake3Digest, Blake3_160, Blake3_192, Blake3_256};
mod rpo;
pub use rpo::Rpo256 as Hasher;
pub use rpo::{INV_MDS, MDS};
// TYPE ALIASES
// ================================================================================================
pub type Digest = <Hasher as HashFn>::Digest;
// HELPER FUNCTIONS
// ================================================================================================
#[inline(always)]
pub fn merge(values: &[Digest; 2]) -> Digest {
Hasher::merge(values)
}
pub mod blake;
pub mod rpo;

+ 25
- 28
src/hash/rpo/digest.rs

@ -1,17 +1,16 @@
use super::DIGEST_SIZE;
use crate::{
ByteReader, ByteWriter, Deserializable, DeserializationError, Digest, Felt, Serializable,
StarkField, String, ZERO,
use super::{Digest, Felt, StarkField, DIGEST_SIZE, ZERO};
use crate::utils::{
string::String, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
};
use core::{cmp::Ordering, ops::Deref};
// DIGEST TRAIT IMPLEMENTATIONS
// ================================================================================================
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct RpoDigest256([Felt; DIGEST_SIZE]);
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
pub struct RpoDigest([Felt; DIGEST_SIZE]);
impl RpoDigest256 {
impl RpoDigest {
pub fn new(value: [Felt; DIGEST_SIZE]) -> Self {
Self(value)
}
@ -20,6 +19,10 @@ impl RpoDigest256 {
self.as_ref()
}
pub fn as_bytes(&self) -> [u8; 32] {
<Self as Digest>::as_bytes(self)
}
pub fn digests_as_elements<'a, I>(digests: I) -> impl Iterator<Item = &'a Felt>
where
I: Iterator<Item = &'a Self>,
@ -28,7 +31,7 @@ impl RpoDigest256 {
}
}
impl Digest for RpoDigest256 {
impl Digest for RpoDigest {
fn as_bytes(&self) -> [u8; 32] {
let mut result = [0; 32];
@ -41,19 +44,13 @@ impl Digest for RpoDigest256 {
}
}
impl Default for RpoDigest256 {
fn default() -> Self {
RpoDigest256([Felt::default(); DIGEST_SIZE])
}
}
impl Serializable for RpoDigest256 {
impl Serializable for RpoDigest {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write_u8_slice(&self.as_bytes());
}
}
impl Deserializable for RpoDigest256 {
impl Deserializable for RpoDigest {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
let mut inner: [Felt; DIGEST_SIZE] = [ZERO; DIGEST_SIZE];
for inner in inner.iter_mut() {
@ -70,25 +67,25 @@ impl Deserializable for RpoDigest256 {
}
}
impl From<[Felt; DIGEST_SIZE]> for RpoDigest256 {
impl From<[Felt; DIGEST_SIZE]> for RpoDigest {
fn from(value: [Felt; DIGEST_SIZE]) -> Self {
Self(value)
}
}
impl From<RpoDigest256> for [Felt; DIGEST_SIZE] {
fn from(value: RpoDigest256) -> Self {
impl From<RpoDigest> for [Felt; DIGEST_SIZE] {
fn from(value: RpoDigest) -> Self {
value.0
}
}
impl From<RpoDigest256> for [u8; 32] {
fn from(value: RpoDigest256) -> Self {
impl From<RpoDigest> for [u8; 32] {
fn from(value: RpoDigest) -> Self {
value.as_bytes()
}
}
impl Deref for RpoDigest256 {
impl Deref for RpoDigest {
type Target = [Felt; DIGEST_SIZE];
fn deref(&self) -> &Self::Target {
@ -96,7 +93,7 @@ impl Deref for RpoDigest256 {
}
}
impl Ord for RpoDigest256 {
impl Ord for RpoDigest {
fn cmp(&self, other: &Self) -> Ordering {
// compare the inner u64 of both elements.
//
@ -120,7 +117,7 @@ impl Ord for RpoDigest256 {
}
}
impl PartialOrd for RpoDigest256 {
impl PartialOrd for RpoDigest {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
@ -132,8 +129,8 @@ impl PartialOrd for RpoDigest256 {
#[cfg(test)]
mod tests {
use super::RpoDigest256;
use crate::{Deserializable, Felt, Serializable, SliceReader};
use super::{Deserializable, Felt, RpoDigest, Serializable};
use crate::utils::SliceReader;
use rand_utils::rand_value;
#[test]
@ -143,14 +140,14 @@ mod tests {
let e3 = Felt::new(rand_value());
let e4 = Felt::new(rand_value());
let d1 = RpoDigest256([e1, e2, e3, e4]);
let d1 = RpoDigest([e1, e2, e3, e4]);
let mut bytes = vec![];
d1.write_into(&mut bytes);
assert_eq!(32, bytes.len());
let mut reader = SliceReader::new(&bytes);
let d2 = RpoDigest256::read_from(&mut reader).unwrap();
let d2 = RpoDigest::read_from(&mut reader).unwrap();
assert_eq!(d1, d2);
}

+ 1
- 3
src/hash/rpo/mds_freq.rs

@ -156,9 +156,7 @@ const fn block3(x: [i64; 3], y: [i64; 3]) -> [i64; 3] {
#[cfg(test)]
mod tests {
use super::super::Rpo256;
use crate::hash::rpo::MDS;
use crate::{Felt, FieldElement};
use super::super::{Felt, FieldElement, Rpo256, MDS};
use proptest::prelude::*;
const STATE_WIDTH: usize = 12;

+ 61
- 183
src/hash/rpo/mod.rs

@ -1,9 +1,8 @@
use super::{ElementHasher, HashFn};
use crate::{Felt, FieldElement, StarkField, ONE, ZERO};
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField, ONE, ZERO};
use core::{convert::TryInto, ops::Range};
mod digest;
pub use digest::RpoDigest256;
pub use digest::RpoDigest;
mod mds_freq;
use mds_freq::mds_multiply_freq;
@ -53,7 +52,7 @@ const INV_ALPHA: u64 = 10540996611094048183;
// HASHER IMPLEMENTATION
// ================================================================================================
/// Implementation of [Hasher] trait for Rescue Prime Optimized (Rpo256) hash function with 256-bit output.
/// Implementation of the Rescue Prime Optimized hash function with 256-bit output.
///
/// The hash function is implemented according to the Rescue Prime Optimized
/// [specifications](https://github.com/ASDiscreteMathematics/rpo)
@ -91,8 +90,8 @@ const INV_ALPHA: u64 = 10540996611094048183;
/// using [hash()](Rpo256::hash) function.
pub struct Rpo256();
impl HashFn for Rpo256 {
type Digest = RpoDigest256;
impl Hasher for Rpo256 {
type Digest = RpoDigest;
fn hash(bytes: &[u8]) -> Self::Digest {
// compute the number of elements required to represent the string; we will be processing
@ -150,7 +149,7 @@ impl HashFn for Rpo256 {
}
// return the first 4 elements of the state as hash result
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
@ -164,7 +163,7 @@ impl HashFn for Rpo256 {
// apply the RPO permutation and return the first four elements of the state
Self::apply_permutation(&mut state);
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
@ -191,7 +190,7 @@ impl HashFn for Rpo256 {
// apply the RPO permutation and return the first four elements of the state
Self::apply_permutation(&mut state);
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
}
@ -237,7 +236,7 @@ impl ElementHasher for Rpo256 {
}
// return the first 4 elements of the state as hash result
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
}
@ -245,10 +244,61 @@ impl ElementHasher for Rpo256 {
// ================================================================================================
impl Rpo256 {
// CONSTANTS
// --------------------------------------------------------------------------------------------
/// The number of rounds is set to 7 to target 128-bit security level.
pub const NUM_ROUNDS: usize = NUM_ROUNDS;
/// Sponge state is set to 12 field elements or 768 bytes; 8 elements are reserved for rate and
/// the remaining 4 elements are reserved for capacity.
pub const STATE_WIDTH: usize = STATE_WIDTH;
/// The rate portion of the state is located in elements 4 through 11 (inclusive).
pub const RATE_RANGE: Range<usize> = RATE_RANGE;
/// The capacity portion of the state is located in elements 0, 1, 2, and 3.
pub const CAPACITY_RANGE: Range<usize> = CAPACITY_RANGE;
/// The output of the hash function can be read from state elements 4, 5, 6, and 7.
pub const DIGEST_RANGE: Range<usize> = DIGEST_RANGE;
/// MDS matrix used for computing the linear layer in a RPO round.
pub const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = MDS;
/// Round constants added to the hasher state in the first half of the RPO round.
pub const ARK1: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = ARK1;
/// Round constants added to the hasher state in the second half of the RPO round.
pub const ARK2: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = ARK2;
// TRAIT PASS-THROUGH FUNCTIONS
// --------------------------------------------------------------------------------------------
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> RpoDigest {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[RpoDigest; 2]) -> RpoDigest {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E: FieldElement<BaseField = Felt>>(elements: &[E]) -> RpoDigest {
<Self as ElementHasher>::hash_elements(elements)
}
// RESCUE PERMUTATION
// --------------------------------------------------------------------------------------------
/// Applies RPO permutation to the provided state.
#[inline(always)]
pub fn apply_permutation(state: &mut [Felt; STATE_WIDTH]) {
for i in 0..NUM_ROUNDS {
Self::apply_round(state, i);
@ -378,7 +428,7 @@ impl Rpo256 {
// MDS
// ================================================================================================
/// RPO MDS matrix
pub const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
[
Felt::new(7),
Felt::new(23),
@ -549,178 +599,6 @@ pub const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
],
];
/// RPO Inverse MDS matrix
pub const INV_MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
[
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
],
[
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
],
[
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
],
[
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
],
[
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
],
[
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
],
[
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
],
[
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
],
[
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
],
[
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
],
[
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
],
[
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
],
];
// ROUND CONSTANTS
// ================================================================================================

+ 9
- 32
src/hash/rpo/tests.rs

@ -1,32 +1,9 @@
use super::{
ElementHasher, Felt, FieldElement, HashFn, Rpo256, RpoDigest256, StarkField, ALPHA, INV_ALPHA,
INV_MDS, MDS, STATE_WIDTH, ZERO,
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ALPHA, INV_ALPHA, STATE_WIDTH, ZERO,
};
use core::convert::TryInto;
use rand_utils::rand_value;
#[test]
#[allow(clippy::needless_range_loop)]
fn mds_inv_test() {
let mut mul_result = [[Felt::new(0); STATE_WIDTH]; STATE_WIDTH];
for i in 0..STATE_WIDTH {
for j in 0..STATE_WIDTH {
let result = {
let mut result = Felt::new(0);
for k in 0..STATE_WIDTH {
result += MDS[i][k] * INV_MDS[k][j]
}
result
};
mul_result[i][j] = result;
if i == j {
assert_eq!(result, Felt::new(1));
} else {
assert_eq!(result, Felt::new(0));
}
}
}
}
#[test]
fn test_alphas() {
let e: Felt = Felt::new(rand_value());
@ -64,9 +41,9 @@ fn test_inv_sbox() {
fn hash_elements_vs_merge() {
let elements = [Felt::new(rand_value()); 8];
let digests: [RpoDigest256; 2] = [
RpoDigest256::new(elements[..4].try_into().unwrap()),
RpoDigest256::new(elements[4..].try_into().unwrap()),
let digests: [RpoDigest; 2] = [
RpoDigest::new(elements[..4].try_into().unwrap()),
RpoDigest::new(elements[4..].try_into().unwrap()),
];
let m_result = Rpo256::merge(&digests);
@ -77,7 +54,7 @@ fn hash_elements_vs_merge() {
#[test]
fn hash_elements_vs_merge_with_int() {
let tmp = [Felt::new(rand_value()); 4];
let seed = RpoDigest256::new(tmp);
let seed = RpoDigest::new(tmp);
// ----- value fits into a field element ------------------------------------------------------
let val: Felt = Felt::new(rand_value());
@ -147,9 +124,9 @@ fn hash_elements() {
Felt::new(7),
];
let digests: [RpoDigest256; 2] = [
RpoDigest256::new(elements[..4].try_into().unwrap()),
RpoDigest256::new(elements[4..8].try_into().unwrap()),
let digests: [RpoDigest; 2] = [
RpoDigest::new(elements[..4].try_into().unwrap()),
RpoDigest::new(elements[4..8].try_into().unwrap()),
];
let m_result = Rpo256::merge(&digests);
@ -182,7 +159,7 @@ fn hash_test_vectors() {
];
for i in 0..elements.len() {
let expected = RpoDigest256::new(EXPECTED[i]);
let expected = RpoDigest::new(EXPECTED[i]);
let result = Rpo256::hash_elements(&elements[..(i + 1)]);
assert_eq!(result, expected);
}

+ 15
- 14
src/lib.rs

@ -4,31 +4,32 @@
#[cfg_attr(test, macro_use)]
extern crate alloc;
pub use winter_crypto::{Digest, ElementHasher, Hasher as HashFn};
pub use winter_math::{
fields::{f64::BaseElement as Felt, QuadExtension},
log2, ExtensionOf, FieldElement, StarkField,
};
pub use winter_utils::{
collections::{BTreeMap, Vec},
string::String,
uninit_vector, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
SliceReader,
};
pub mod hash;
pub mod merkle;
// RE-EXPORTS
// ================================================================================================
pub use winter_math::{fields::f64::BaseElement as Felt, FieldElement, StarkField};
pub mod utils {
pub use winter_utils::{
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
DeserializationError, Serializable, SliceReader,
};
}
// TYPE ALIASES
// ================================================================================================
/// A group of four field elements in the Miden base field.
pub type Word = [Felt; 4];
// CONSTANTS
// ================================================================================================
/// Field element representing ZERO in the base field of the VM.
/// Field element representing ZERO in the Miden base filed.
pub const ZERO: Felt = Felt::ZERO;
/// Field element representing ONE in the base field of the VM.
/// Field element representing ONE in the Miden base filed.
pub const ONE: Felt = Felt::ONE;

+ 5
- 8
src/merkle/merkle_path_set.rs

@ -1,12 +1,9 @@
use super::{MerkleError, Word};
use crate::{hash::merge, BTreeMap, Vec, ZERO};
use super::{BTreeMap, MerkleError, Rpo256, Vec, Word, ZERO};
// MERKLE PATH SET
// ================================================================================================
/// A set of Merkle paths.
///
/// This struct is intended to be used as one of the variants of the MerkleSet enum.
#[derive(Clone, Debug)]
pub struct MerklePathSet {
root: Word,
@ -208,9 +205,9 @@ fn is_even(pos: u64) -> bool {
/// - sibling — neighboring vertex in the tree
fn calculate_parent_hash(node: Word, node_pos: u64, sibling: Word) -> Word {
if is_even(node_pos) {
merge(&[node.into(), sibling.into()]).into()
Rpo256::merge(&[node.into(), sibling.into()]).into()
} else {
merge(&[sibling.into(), node.into()]).into()
Rpo256::merge(&[sibling.into(), node.into()]).into()
}
}
@ -220,7 +217,7 @@ fn compute_path_trace(path: &[Word], depth: u32, index: u64) -> (Vec, Word
let mut computed_hashes = Vec::<Word>::new();
let mut comp_hash = merge(&[path[0].into(), path[1].into()]).into();
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
if path.len() != 2 {
for path_hash in path.iter().skip(2) {
@ -238,7 +235,7 @@ fn compute_path_root(path: &[Word], depth: u32, index: u64) -> Word {
let mut pos = 2u64.pow(depth) + index;
// hash that is obtained after calculating the current hash and path hash
let mut comp_hash = merge(&[path[0].into(), path[1].into()]).into();
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
for path_hash in path.iter().skip(2) {
pos /= 2;

+ 12
- 13
src/merkle/merkle_tree.rs

@ -1,16 +1,12 @@
use super::MerkleError;
use crate::{
hash::{merge, Digest},
log2, uninit_vector, Felt, FieldElement, Vec, Word,
};
use super::{Digest, Felt, MerkleError, Rpo256, Vec, Word};
use crate::{utils::uninit_vector, FieldElement};
use core::slice;
use winter_math::log2;
// MERKLE TREE
// ================================================================================================
/// A fully-balanced binary Merkle tree (i.e., a tree where the number of leaves is a power of two).
///
/// This struct is intended to be used as one of the variants of the MerkleSet enum.
#[derive(Clone, Debug)]
pub struct MerkleTree {
nodes: Vec<Word>,
@ -43,7 +39,7 @@ impl MerkleTree {
// calculate all internal tree nodes
for i in (1..n).rev() {
nodes[i] = merge(&two_nodes[i]).into();
nodes[i] = Rpo256::merge(&two_nodes[i]).into();
}
Ok(Self { nodes })
@ -131,7 +127,7 @@ impl MerkleTree {
for _ in 0..depth {
index /= 2;
self.nodes[index] = merge(&two_nodes[index]).into();
self.nodes[index] = Rpo256::merge(&two_nodes[index]).into();
}
Ok(())
@ -143,7 +139,10 @@ impl MerkleTree {
#[cfg(test)]
mod tests {
use crate::{hash::Hasher, merkle::int_to_node, ElementHasher, HashFn, Word};
use super::{
super::{int_to_node, Rpo256},
Word,
};
const LEAVES4: [Word; 4] = [
int_to_node(1),
@ -244,9 +243,9 @@ mod tests {
// --------------------------------------------------------------------------------------------
fn compute_internal_nodes() -> (Word, Word, Word) {
let node2 = Hasher::hash_elements(&[LEAVES4[0], LEAVES4[1]].concat());
let node3 = Hasher::hash_elements(&[LEAVES4[2], LEAVES4[3]].concat());
let root = Hasher::merge(&[node2, node3]);
let node2 = Rpo256::hash_elements(&[LEAVES4[0], LEAVES4[1]].concat());
let node3 = Rpo256::hash_elements(&[LEAVES4[2], LEAVES4[3]].concat());
let root = Rpo256::merge(&[node2, node3]);
(root.into(), node2.into(), node3.into())
}

+ 9
- 5
src/merkle/mod.rs

@ -1,10 +1,14 @@
use crate::{Vec, Word};
use super::{
hash::rpo::{Rpo256, RpoDigest as Digest},
utils::collections::{BTreeMap, Vec},
Felt, Word, ZERO,
};
#[cfg(test)]
use crate::{Felt, ZERO};
mod merkle_tree;
pub use merkle_tree::MerkleTree;
pub mod merkle_path_set;
pub mod merkle_tree;
mod merkle_path_set;
pub use merkle_path_set::MerklePathSet;
// ERRORS
// ================================================================================================

Loading…
Cancel
Save