Browse Source

Merge pull request #16 from 0xPolygonMiden/next

Tracking PR for v0.1 release
al-gkr-basic-workflow
Bobbin Threadbare 2 years ago
committed by GitHub
parent
commit
ed07f89fe7
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 654 additions and 319 deletions
  1. +35
    -6
      .github/workflows/ci.yml
  2. +13
    -3
      Cargo.toml
  3. +27
    -2
      README.md
  4. +57
    -0
      benches/hash.rs
  5. +319
    -0
      src/hash/blake/mod.rs
  6. +20
    -0
      src/hash/blake/tests.rs
  7. +4
    -17
      src/hash/mod.rs
  8. +57
    -30
      src/hash/rpo/digest.rs
  9. +1
    -3
      src/hash/rpo/mds_freq.rs
  10. +62
    -184
      src/hash/rpo/mod.rs
  11. +9
    -32
      src/hash/rpo/tests.rs
  12. +20
    -12
      src/lib.rs
  13. +7
    -10
      src/merkle/merkle_path_set.rs
  14. +14
    -15
      src/merkle/merkle_tree.rs
  15. +9
    -5
      src/merkle/mod.rs

+ 35
- 6
.github/workflows/ci.yml

@ -7,14 +7,39 @@ on:
types: [opened, repoened, synchronize]
jobs:
build:
name: Build ${{matrix.toolchain}} on ${{matrix.os}} with ${{matrix.args}}
runs-on: ${{matrix.os}}-latest
strategy:
fail-fast: false
matrix:
toolchain: [stable, nightly]
os: [ubuntu]
target: [wasm32-unknown-unknown]
args: [--no-default-features --target wasm32-unknown-unknown]
steps:
- uses: actions/checkout@main
- name: Install rust
uses: actions-rs/toolchain@v1
with:
toolchain: ${{matrix.toolchain}}
override: true
- run: rustup target add ${{matrix.target}}
- name: Test
uses: actions-rs/cargo@v1
with:
command: build
args: ${{matrix.args}}
test:
name: Test Rust ${{matrix.toolchain}} on ${{matrix.os}}
name: Test ${{matrix.toolchain}} on ${{matrix.os}} with ${{matrix.features}}
runs-on: ${{matrix.os}}-latest
strategy:
fail-fast: false
matrix:
toolchain: [stable, nightly]
os: [ubuntu]
features: [--all-features, --no-default-features]
steps:
- uses: actions/checkout@main
- name: Install rust
@ -26,25 +51,29 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: test
args: ${{matrix.features}}
clippy:
name: Clippy
name: Clippy with ${{matrix.features}}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
features: [--all-features, --no-default-features]
steps:
- uses: actions/checkout@main
- name: Install minimal stable with clippy
- name: Install minimal nightly with clippy
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
toolchain: nightly
components: clippy
override: true
- name: Clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all -- -D clippy::all -D warnings
args: --all ${{matrix.features}} -- -D clippy::all -D warnings
rustfmt:
name: rustfmt

+ 13
- 3
Cargo.toml

@ -10,11 +10,21 @@ categories = ["cryptography", "no-std"]
keywords = ["miden", "crypto", "hash", "merkle"]
edition = "2021"
[[bench]]
name = "hash"
harness = false
[features]
default = ["blake3/default", "std", "winter_crypto/default", "winter_math/default", "winter_utils/default"]
std = ["blake3/std", "winter_crypto/std", "winter_math/std", "winter_utils/std"]
[dependencies]
winter_crypto = { version = "0.4.1", package = "winter-crypto" }
winter_math = { version = "0.4.1", package = "winter-math" }
winter_utils = { version = "0.4.1", package = "winter-utils" }
blake3 = { version = "1.0", default-features = false }
winter_crypto = { version = "0.4.1", package = "winter-crypto", default-features = false }
winter_math = { version = "0.4.1", package = "winter-math", default-features = false }
winter_utils = { version = "0.4.1", package = "winter-utils", default-features = false }
[dev-dependencies]
criterion = "0.4"
proptest = "1.0.0"
rand_utils = { version = "0.4", package = "winter-rand-utils" }

+ 27
- 2
README.md

@ -1,2 +1,27 @@
# crypto
Cryptographic primitives used in Polygon Miden rollup
# Miden Crypto
This crate contains cryptographic primitives used in Polygon Miden.
## Hash
[Hash module](./src/hash) provides a set of cryptographic hash functions which are used by Miden VM and Miden Rollup. Currently, these functions are:
* [BLAKE3](https://github.com/BLAKE3-team/BLAKE3) hash function with 256-bit, 192-bit, or 160-bit output. The 192-bit and 160-bit outputs are obtained by truncating the 256-bit output of the standard BLAKE3.
* [RPO](https://eprint.iacr.org/2022/1577) hash function with 256-bit output. This hash function is an algebraic hash function suitable for recursive STARKs.
## Merkle
[Merkle module](./src/merkle/) provides a set of data structures related to Merkle tree. All these data structures are implemented using RPO hash function described above. The data structure are:
* `MerkleTree`: a regular fully-balanced binary Merkle tree. The depth of this tree can be at most 64.
* `MerklePathSet`: a collection of Merkle authentication paths all resolving to the same root. The length of the paths can be at most 64.
## Crate features
This carate can be compiled with the following features:
* `std` - enabled by default and relies on the Rust standard library.
* `no_std` does not rely on the Rust standard library and enables compilation to WebAssembly.
Both of these features imply use of [alloc](https://doc.rust-lang.org/alloc/) to support heap-allocated collections.
To compile with `no_std`, disable default features via `--no-default-features` flag.
## License
This project is [MIT licensed](./LICENSE).

+ 57
- 0
benches/hash.rs

@ -0,0 +1,57 @@
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
use miden_crypto::{
hash::rpo::{Rpo256, RpoDigest},
Felt,
};
use rand_utils::rand_value;
fn rpo256_2to1(c: &mut Criterion) {
let v: [RpoDigest; 2] = [Rpo256::hash(&[1_u8]), Rpo256::hash(&[2_u8])];
c.bench_function("RPO256 2-to-1 hashing (cached)", |bench| {
bench.iter(|| Rpo256::merge(black_box(&v)))
});
c.bench_function("RPO256 2-to-1 hashing (random)", |bench| {
bench.iter_batched(
|| {
[
Rpo256::hash(&rand_value::<u64>().to_le_bytes()),
Rpo256::hash(&rand_value::<u64>().to_le_bytes()),
]
},
|state| Rpo256::merge(&state),
BatchSize::SmallInput,
)
});
}
fn rpo256_sequential(c: &mut Criterion) {
let v: [Felt; 100] = (0..100)
.into_iter()
.map(Felt::new)
.collect::<Vec<Felt>>()
.try_into()
.expect("should not fail");
c.bench_function("RPO256 sequential hashing (cached)", |bench| {
bench.iter(|| Rpo256::hash_elements(black_box(&v)))
});
c.bench_function("RPO256 sequential hashing (random)", |bench| {
bench.iter_batched(
|| {
let v: [Felt; 100] = (0..100)
.into_iter()
.map(|_| Felt::new(rand_value()))
.collect::<Vec<Felt>>()
.try_into()
.expect("should not fail");
v
},
|state| Rpo256::hash_elements(&state),
BatchSize::SmallInput,
)
});
}
criterion_group!(hash_group, rpo256_sequential, rpo256_2to1);
criterion_main!(hash_group);

+ 319
- 0
src/hash/blake/mod.rs

@ -0,0 +1,319 @@
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField};
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
use core::{
mem::{size_of, transmute, transmute_copy},
ops::Deref,
slice::from_raw_parts,
};
#[cfg(test)]
mod tests;
// CONSTANTS
// ================================================================================================
const DIGEST32_BYTES: usize = 32;
const DIGEST24_BYTES: usize = 24;
const DIGEST20_BYTES: usize = 20;
// BLAKE3 N-BIT OUTPUT
// ================================================================================================
/// N-bytes output of a blake3 function.
///
/// Note: `N` can't be greater than `32` because [`Digest::as_bytes`] currently supports only 32
/// bytes.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Blake3Digest<const N: usize>([u8; N]);
impl<const N: usize> Default for Blake3Digest<N> {
fn default() -> Self {
Self([0; N])
}
}
impl<const N: usize> Deref for Blake3Digest<N> {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<const N: usize> From<Blake3Digest<N>> for [u8; N] {
fn from(value: Blake3Digest<N>) -> Self {
value.0
}
}
impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
fn from(value: [u8; N]) -> Self {
Self(value)
}
}
impl<const N: usize> Serializable for Blake3Digest<N> {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write_u8_slice(&self.0);
}
}
impl<const N: usize> Deserializable for Blake3Digest<N> {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
source.read_u8_array().map(Self)
}
}
impl<const N: usize> Digest for Blake3Digest<N> {
fn as_bytes(&self) -> [u8; 32] {
// compile-time assertion
assert!(N <= 32, "digest currently supports only 32 bytes!");
expand_bytes(&self.0)
}
}
// BLAKE3 256-BIT OUTPUT
// ================================================================================================
/// 256-bit output blake3 hasher.
pub struct Blake3_256;
impl Hasher for Blake3_256 {
type Digest = Blake3Digest<32>;
fn hash(bytes: &[u8]) -> Self::Digest {
Blake3Digest(blake3::hash(bytes).into())
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
hasher.update(&value.to_le_bytes());
Blake3Digest(hasher.finalize().into())
}
}
impl ElementHasher for Blake3_256 {
type BaseField = Felt;
fn hash_elements<E>(elements: &[E]) -> Self::Digest
where
E: FieldElement<BaseField = Self::BaseField>,
{
Blake3Digest(hash_elements(elements))
}
}
impl Blake3_256 {
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST32_BYTES> {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[Blake3Digest<DIGEST32_BYTES>; 2]) -> Blake3Digest<DIGEST32_BYTES> {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST32_BYTES>
where
E: FieldElement<BaseField = Felt>,
{
<Self as ElementHasher>::hash_elements(elements)
}
}
// BLAKE3 192-BIT OUTPUT
// ================================================================================================
/// 192-bit output blake3 hasher.
pub struct Blake3_192;
impl Hasher for Blake3_192 {
type Digest = Blake3Digest<24>;
fn hash(bytes: &[u8]) -> Self::Digest {
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
hasher.update(&value.to_le_bytes());
Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
}
}
impl ElementHasher for Blake3_192 {
type BaseField = Felt;
fn hash_elements<E>(elements: &[E]) -> Self::Digest
where
E: FieldElement<BaseField = Self::BaseField>,
{
Blake3Digest(hash_elements(elements))
}
}
impl Blake3_192 {
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST24_BYTES> {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[Blake3Digest<DIGEST24_BYTES>; 2]) -> Blake3Digest<DIGEST24_BYTES> {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST24_BYTES>
where
E: FieldElement<BaseField = Felt>,
{
<Self as ElementHasher>::hash_elements(elements)
}
}
// BLAKE3 160-BIT OUTPUT
// ================================================================================================
/// 160-bit output blake3 hasher.
pub struct Blake3_160;
impl Hasher for Blake3_160 {
type Digest = Blake3Digest<20>;
fn hash(bytes: &[u8]) -> Self::Digest {
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
hasher.update(&value.to_le_bytes());
Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
}
}
impl ElementHasher for Blake3_160 {
type BaseField = Felt;
fn hash_elements<E>(elements: &[E]) -> Self::Digest
where
E: FieldElement<BaseField = Self::BaseField>,
{
Blake3Digest(hash_elements(elements))
}
}
impl Blake3_160 {
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST20_BYTES> {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[Blake3Digest<DIGEST20_BYTES>; 2]) -> Blake3Digest<DIGEST20_BYTES> {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST20_BYTES>
where
E: FieldElement<BaseField = Felt>,
{
<Self as ElementHasher>::hash_elements(elements)
}
}
// HELPER FUNCTIONS
// ================================================================================================
/// Zero-copy ref shrink to array.
fn shrink_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> &[u8; N] {
// compile-time assertion
assert!(
M >= N,
"N should fit in M so it can be safely transmuted into a smaller slice!"
);
// safety: bytes len is asserted
unsafe { transmute(bytes) }
}
/// Hash the elements into bytes and shrink the output.
fn hash_elements<const N: usize, E>(elements: &[E]) -> [u8; N]
where
E: FieldElement<BaseField = Felt>,
{
// don't leak assumptions from felt and check its actual implementation.
// this is a compile-time branch so it is for free
let digest = if Felt::IS_CANONICAL {
blake3::hash(E::elements_as_bytes(elements))
} else {
E::as_base_elements(elements)
.iter()
.fold(blake3::Hasher::new(), |mut hasher, felt| {
hasher.update(&felt.as_int().to_le_bytes());
hasher
})
.finalize()
};
*shrink_bytes(&digest.into())
}
/// Owned bytes expansion.
fn expand_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> [u8; N] {
// compile-time assertion
assert!(M <= N, "M should fit in N so M can be expanded!");
// this branch is constant so it will be optimized to be either one of the variants in release
// mode
if M == N {
// safety: the sizes are checked to be the same
unsafe { transmute_copy(bytes) }
} else {
let mut expanded = [0u8; N];
expanded[..M].copy_from_slice(bytes);
expanded
}
}
// Cast the slice into contiguous bytes.
fn prepare_merge<const N: usize, D>(args: &[D; N]) -> &[u8]
where
D: Deref<Target = [u8]>,
{
// compile-time assertion
assert!(N > 0, "N shouldn't represent an empty slice!");
let values = args.as_ptr() as *const u8;
let len = size_of::<D>() * N;
// safety: the values are tested to be contiguous
let bytes = unsafe { from_raw_parts(values, len) };
debug_assert_eq!(args[0].deref(), &bytes[..len / N]);
bytes
}

+ 20
- 0
src/hash/blake/tests.rs

@ -0,0 +1,20 @@
use super::*;
use crate::utils::collections::Vec;
use proptest::prelude::*;
proptest! {
#[test]
fn blake160_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
Blake3_160::hash(vec);
}
#[test]
fn blake192_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
Blake3_192::hash(vec);
}
#[test]
fn blake256_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
Blake3_256::hash(vec);
}
}

+ 4
- 17
src/hash/mod.rs

@ -1,18 +1,5 @@
use crate::{ElementHasher, HashFn};
use super::{Felt, FieldElement, StarkField, ONE, ZERO};
use winter_crypto::{Digest, ElementHasher, Hasher};
mod rpo;
pub use rpo::Rpo256 as Hasher;
pub use rpo::{INV_MDS, MDS};
// TYPE ALIASES
// ================================================================================================
pub type Digest = <Hasher as HashFn>::Digest;
// HELPER FUNCTIONS
// ================================================================================================
#[inline(always)]
pub fn merge(values: &[Digest; 2]) -> Digest {
Hasher::merge(values)
}
pub mod blake;
pub mod rpo;

+ 57
- 30
src/hash/rpo/digest.rs

@ -1,17 +1,16 @@
use super::DIGEST_SIZE;
use crate::{
ByteReader, ByteWriter, Deserializable, DeserializationError, Digest, Felt, Serializable,
StarkField, ZERO,
use super::{Digest, Felt, StarkField, DIGEST_SIZE, ZERO};
use crate::utils::{
string::String, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
};
use core::ops::Deref;
use core::{cmp::Ordering, ops::Deref};
// DIGEST TRAIT IMPLEMENTATIONS
// ================================================================================================
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct RpoDigest256([Felt; DIGEST_SIZE]);
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
pub struct RpoDigest([Felt; DIGEST_SIZE]);
impl RpoDigest256 {
impl RpoDigest {
pub fn new(value: [Felt; DIGEST_SIZE]) -> Self {
Self(value)
}
@ -20,6 +19,10 @@ impl RpoDigest256 {
self.as_ref()
}
pub fn as_bytes(&self) -> [u8; 32] {
<Self as Digest>::as_bytes(self)
}
pub fn digests_as_elements<'a, I>(digests: I) -> impl Iterator<Item = &'a Felt>
where
I: Iterator<Item = &'a Self>,
@ -28,7 +31,7 @@ impl RpoDigest256 {
}
}
impl Digest for RpoDigest256 {
impl Digest for RpoDigest {
fn as_bytes(&self) -> [u8; 32] {
let mut result = [0; 32];
@ -41,27 +44,21 @@ impl Digest for RpoDigest256 {
}
}
impl Default for RpoDigest256 {
fn default() -> Self {
RpoDigest256([Felt::default(); DIGEST_SIZE])
}
}
impl Serializable for RpoDigest256 {
impl Serializable for RpoDigest {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write_u8_slice(&self.as_bytes());
}
}
impl Deserializable for RpoDigest256 {
impl Deserializable for RpoDigest {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
let mut inner: [Felt; DIGEST_SIZE] = [ZERO; DIGEST_SIZE];
for inner in inner.iter_mut() {
let e = source.read_u64()?;
if e >= Felt::MODULUS {
return Err(DeserializationError::InvalidValue(
"Value not in the appropriate range".to_owned(),
));
return Err(DeserializationError::InvalidValue(String::from(
"Value not in the appropriate range",
)));
}
*inner = Felt::new(e);
}
@ -70,25 +67,25 @@ impl Deserializable for RpoDigest256 {
}
}
impl From<[Felt; DIGEST_SIZE]> for RpoDigest256 {
impl From<[Felt; DIGEST_SIZE]> for RpoDigest {
fn from(value: [Felt; DIGEST_SIZE]) -> Self {
Self(value)
}
}
impl From<RpoDigest256> for [Felt; DIGEST_SIZE] {
fn from(value: RpoDigest256) -> Self {
impl From<RpoDigest> for [Felt; DIGEST_SIZE] {
fn from(value: RpoDigest) -> Self {
value.0
}
}
impl From<RpoDigest256> for [u8; 32] {
fn from(value: RpoDigest256) -> Self {
impl From<RpoDigest> for [u8; 32] {
fn from(value: RpoDigest) -> Self {
value.as_bytes()
}
}
impl Deref for RpoDigest256 {
impl Deref for RpoDigest {
type Target = [Felt; DIGEST_SIZE];
fn deref(&self) -> &Self::Target {
@ -96,14 +93,44 @@ impl Deref for RpoDigest256 {
}
}
impl Ord for RpoDigest {
fn cmp(&self, other: &Self) -> Ordering {
// compare the inner u64 of both elements.
//
// it will iterate the elements and will return the first computation different than
// `Equal`. Otherwise, the ordering is equal.
//
// the endianness is irrelevant here because since, this being a cryptographically secure
// hash computation, the digest shouldn't have any ordered property of its input.
//
// finally, we use `Felt::inner` instead of `Felt::as_int` so we avoid performing a
// montgomery reduction for every limb. that is safe because every inner element of the
// digest is guaranteed to be in its canonical form (that is, `x in [0,p)`).
self.0
.iter()
.map(Felt::inner)
.zip(other.0.iter().map(Felt::inner))
.fold(Ordering::Equal, |ord, (a, b)| match ord {
Ordering::Equal => a.cmp(&b),
_ => ord,
})
}
}
impl PartialOrd for RpoDigest {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
// TESTS
// ================================================================================================
#[cfg(test)]
mod tests {
use super::RpoDigest256;
use crate::{Deserializable, Felt, Serializable, SliceReader};
use super::{Deserializable, Felt, RpoDigest, Serializable};
use crate::utils::SliceReader;
use rand_utils::rand_value;
#[test]
@ -113,14 +140,14 @@ mod tests {
let e3 = Felt::new(rand_value());
let e4 = Felt::new(rand_value());
let d1 = RpoDigest256([e1, e2, e3, e4]);
let d1 = RpoDigest([e1, e2, e3, e4]);
let mut bytes = vec![];
d1.write_into(&mut bytes);
assert_eq!(32, bytes.len());
let mut reader = SliceReader::new(&bytes);
let d2 = RpoDigest256::read_from(&mut reader).unwrap();
let d2 = RpoDigest::read_from(&mut reader).unwrap();
assert_eq!(d1, d2);
}

+ 1
- 3
src/hash/rpo/mds_freq.rs

@ -156,9 +156,7 @@ const fn block3(x: [i64; 3], y: [i64; 3]) -> [i64; 3] {
#[cfg(test)]
mod tests {
use super::super::Rpo256;
use crate::hash::rpo::MDS;
use crate::{Felt, FieldElement};
use super::super::{Felt, FieldElement, Rpo256, MDS};
use proptest::prelude::*;
const STATE_WIDTH: usize = 12;

+ 62
- 184
src/hash/rpo/mod.rs

@ -1,9 +1,8 @@
use super::{ElementHasher, HashFn};
use crate::{Felt, FieldElement, StarkField, ONE, ZERO};
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField, ONE, ZERO};
use core::{convert::TryInto, ops::Range};
mod digest;
pub use digest::RpoDigest256;
pub use digest::RpoDigest;
mod mds_freq;
use mds_freq::mds_multiply_freq;
@ -53,10 +52,10 @@ const INV_ALPHA: u64 = 10540996611094048183;
// HASHER IMPLEMENTATION
// ================================================================================================
/// Implementation of [Hasher] trait for Rescue Prime Optimized (Rpo256) hash function with 256-bit output.
/// Implementation of the Rescue Prime Optimized hash function with 256-bit output.
///
/// The hash function is implemented according to the Rescue Prime Optimized
/// [specifications](https://github.com/ASDiscreteMathematics/rpo)
/// [specifications](https://eprint.iacr.org/2022/1577)
///
/// The parameters used to instantiate the function are:
/// * Field: 64-bit prime field with modulus 2^64 - 2^32 + 1.
@ -91,8 +90,8 @@ const INV_ALPHA: u64 = 10540996611094048183;
/// using [hash()](Rpo256::hash) function.
pub struct Rpo256();
impl HashFn for Rpo256 {
type Digest = RpoDigest256;
impl Hasher for Rpo256 {
type Digest = RpoDigest;
fn hash(bytes: &[u8]) -> Self::Digest {
// compute the number of elements required to represent the string; we will be processing
@ -150,7 +149,7 @@ impl HashFn for Rpo256 {
}
// return the first 4 elements of the state as hash result
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
@ -164,7 +163,7 @@ impl HashFn for Rpo256 {
// apply the RPO permutation and return the first four elements of the state
Self::apply_permutation(&mut state);
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
@ -191,7 +190,7 @@ impl HashFn for Rpo256 {
// apply the RPO permutation and return the first four elements of the state
Self::apply_permutation(&mut state);
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
}
@ -237,7 +236,7 @@ impl ElementHasher for Rpo256 {
}
// return the first 4 elements of the state as hash result
RpoDigest256::new(state[DIGEST_RANGE].try_into().unwrap())
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
}
@ -245,10 +244,61 @@ impl ElementHasher for Rpo256 {
// ================================================================================================
impl Rpo256 {
// CONSTANTS
// --------------------------------------------------------------------------------------------
/// The number of rounds is set to 7 to target 128-bit security level.
pub const NUM_ROUNDS: usize = NUM_ROUNDS;
/// Sponge state is set to 12 field elements or 768 bytes; 8 elements are reserved for rate and
/// the remaining 4 elements are reserved for capacity.
pub const STATE_WIDTH: usize = STATE_WIDTH;
/// The rate portion of the state is located in elements 4 through 11 (inclusive).
pub const RATE_RANGE: Range<usize> = RATE_RANGE;
/// The capacity portion of the state is located in elements 0, 1, 2, and 3.
pub const CAPACITY_RANGE: Range<usize> = CAPACITY_RANGE;
/// The output of the hash function can be read from state elements 4, 5, 6, and 7.
pub const DIGEST_RANGE: Range<usize> = DIGEST_RANGE;
/// MDS matrix used for computing the linear layer in a RPO round.
pub const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = MDS;
/// Round constants added to the hasher state in the first half of the RPO round.
pub const ARK1: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = ARK1;
/// Round constants added to the hasher state in the second half of the RPO round.
pub const ARK2: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = ARK2;
// TRAIT PASS-THROUGH FUNCTIONS
// --------------------------------------------------------------------------------------------
/// Returns a hash of the provided sequence of bytes.
#[inline(always)]
pub fn hash(bytes: &[u8]) -> RpoDigest {
<Self as Hasher>::hash(bytes)
}
/// Returns a hash of two digests. This method is intended for use in construction of
/// Merkle trees and verification of Merkle paths.
#[inline(always)]
pub fn merge(values: &[RpoDigest; 2]) -> RpoDigest {
<Self as Hasher>::merge(values)
}
/// Returns a hash of the provided field elements.
#[inline(always)]
pub fn hash_elements<E: FieldElement<BaseField = Felt>>(elements: &[E]) -> RpoDigest {
<Self as ElementHasher>::hash_elements(elements)
}
// RESCUE PERMUTATION
// --------------------------------------------------------------------------------------------
/// Applies RPO permutation to the provided state.
#[inline(always)]
pub fn apply_permutation(state: &mut [Felt; STATE_WIDTH]) {
for i in 0..NUM_ROUNDS {
Self::apply_round(state, i);
@ -378,7 +428,7 @@ impl Rpo256 {
// MDS
// ================================================================================================
/// RPO MDS matrix
pub const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
[
Felt::new(7),
Felt::new(23),
@ -549,178 +599,6 @@ pub const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
],
];
/// RPO Inverse MDS matrix
pub const INV_MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = [
[
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
],
[
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
],
[
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
],
[
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
],
[
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
],
[
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
],
[
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
],
[
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
],
[
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
],
[
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
],
[
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
Felt::new(13278298489594233127),
],
[
Felt::new(13278298489594233127),
Felt::new(389999932707070822),
Felt::new(9782021734907796003),
Felt::new(4829905704463175582),
Felt::new(7567822018949214430),
Felt::new(14205019324568680367),
Felt::new(15489674211196160593),
Felt::new(17636013826542227504),
Felt::new(16254215311946436093),
Felt::new(3641486184877122796),
Felt::new(11069068059762973582),
Felt::new(14868391535953158196),
],
];
// ROUND CONSTANTS
// ================================================================================================

+ 9
- 32
src/hash/rpo/tests.rs

@ -1,32 +1,9 @@
use super::{
ElementHasher, Felt, FieldElement, HashFn, Rpo256, RpoDigest256, StarkField, ALPHA, INV_ALPHA,
INV_MDS, MDS, STATE_WIDTH, ZERO,
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ALPHA, INV_ALPHA, STATE_WIDTH, ZERO,
};
use core::convert::TryInto;
use rand_utils::rand_value;
#[test]
#[allow(clippy::needless_range_loop)]
fn mds_inv_test() {
let mut mul_result = [[Felt::new(0); STATE_WIDTH]; STATE_WIDTH];
for i in 0..STATE_WIDTH {
for j in 0..STATE_WIDTH {
let result = {
let mut result = Felt::new(0);
for k in 0..STATE_WIDTH {
result += MDS[i][k] * INV_MDS[k][j]
}
result
};
mul_result[i][j] = result;
if i == j {
assert_eq!(result, Felt::new(1));
} else {
assert_eq!(result, Felt::new(0));
}
}
}
}
#[test]
fn test_alphas() {
let e: Felt = Felt::new(rand_value());
@ -64,9 +41,9 @@ fn test_inv_sbox() {
fn hash_elements_vs_merge() {
let elements = [Felt::new(rand_value()); 8];
let digests: [RpoDigest256; 2] = [
RpoDigest256::new(elements[..4].try_into().unwrap()),
RpoDigest256::new(elements[4..].try_into().unwrap()),
let digests: [RpoDigest; 2] = [
RpoDigest::new(elements[..4].try_into().unwrap()),
RpoDigest::new(elements[4..].try_into().unwrap()),
];
let m_result = Rpo256::merge(&digests);
@ -77,7 +54,7 @@ fn hash_elements_vs_merge() {
#[test]
fn hash_elements_vs_merge_with_int() {
let tmp = [Felt::new(rand_value()); 4];
let seed = RpoDigest256::new(tmp);
let seed = RpoDigest::new(tmp);
// ----- value fits into a field element ------------------------------------------------------
let val: Felt = Felt::new(rand_value());
@ -147,9 +124,9 @@ fn hash_elements() {
Felt::new(7),
];
let digests: [RpoDigest256; 2] = [
RpoDigest256::new(elements[..4].try_into().unwrap()),
RpoDigest256::new(elements[4..8].try_into().unwrap()),
let digests: [RpoDigest; 2] = [
RpoDigest::new(elements[..4].try_into().unwrap()),
RpoDigest::new(elements[4..8].try_into().unwrap()),
];
let m_result = Rpo256::merge(&digests);
@ -182,7 +159,7 @@ fn hash_test_vectors() {
];
for i in 0..elements.len() {
let expected = RpoDigest256::new(EXPECTED[i]);
let expected = RpoDigest::new(EXPECTED[i]);
let result = Rpo256::hash_elements(&elements[..(i + 1)]);
assert_eq!(result, expected);
}

+ 20
- 12
src/lib.rs

@ -1,27 +1,35 @@
pub use winter_crypto::{Digest, ElementHasher, Hasher as HashFn};
pub use winter_math::{
fields::{f64::BaseElement as Felt, QuadExtension},
log2, ExtensionOf, FieldElement, StarkField,
};
pub use winter_utils::{
collections::{BTreeMap, Vec},
uninit_vector, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
SliceReader,
};
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
#[cfg_attr(test, macro_use)]
extern crate alloc;
pub mod hash;
pub mod merkle;
// RE-EXPORTS
// ================================================================================================
pub use winter_math::{fields::f64::BaseElement as Felt, FieldElement, StarkField};
pub mod utils {
pub use winter_utils::{
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
DeserializationError, Serializable, SliceReader,
};
}
// TYPE ALIASES
// ================================================================================================
/// A group of four field elements in the Miden base field.
pub type Word = [Felt; 4];
// CONSTANTS
// ================================================================================================
/// Field element representing ZERO in the base field of the VM.
/// Field element representing ZERO in the Miden base filed.
pub const ZERO: Felt = Felt::ZERO;
/// Field element representing ONE in the base field of the VM.
/// Field element representing ONE in the Miden base filed.
pub const ONE: Felt = Felt::ONE;

+ 7
- 10
src/merkle/merkle_path_set.rs

@ -1,12 +1,9 @@
use super::{MerkleError, Word};
use crate::{hash::merge, BTreeMap, Vec, ZERO};
use super::{BTreeMap, MerkleError, Rpo256, Vec, Word, ZERO};
// MERKLE PATH SET
// ================================================================================================
/// A set of Merkle paths.
///
/// This struct is intended to be used as one of the variants of the MerkleSet enum.
#[derive(Clone, Debug)]
pub struct MerklePathSet {
root: Word,
@ -57,7 +54,7 @@ impl MerklePathSet {
let pos = 2u64.pow(self.total_depth) + index;
// Index of the leaf path in map. Paths of neighboring leaves are stored in one key-value pair
let half_pos = (pos / 2) as u64;
let half_pos = pos / 2;
let mut extended_path = path;
if is_even(pos) {
@ -104,7 +101,7 @@ impl MerklePathSet {
}
let pos = 2u64.pow(depth) + index;
let index = (pos / 2) as u64;
let index = pos / 2;
match self.paths.get(&index) {
None => Err(MerkleError::NodeNotInSet(index)),
@ -208,9 +205,9 @@ fn is_even(pos: u64) -> bool {
/// - sibling — neighboring vertex in the tree
fn calculate_parent_hash(node: Word, node_pos: u64, sibling: Word) -> Word {
if is_even(node_pos) {
merge(&[node.into(), sibling.into()]).into()
Rpo256::merge(&[node.into(), sibling.into()]).into()
} else {
merge(&[sibling.into(), node.into()]).into()
Rpo256::merge(&[sibling.into(), node.into()]).into()
}
}
@ -220,7 +217,7 @@ fn compute_path_trace(path: &[Word], depth: u32, index: u64) -> (Vec, Word
let mut computed_hashes = Vec::<Word>::new();
let mut comp_hash = merge(&[path[0].into(), path[1].into()]).into();
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
if path.len() != 2 {
for path_hash in path.iter().skip(2) {
@ -238,7 +235,7 @@ fn compute_path_root(path: &[Word], depth: u32, index: u64) -> Word {
let mut pos = 2u64.pow(depth) + index;
// hash that is obtained after calculating the current hash and path hash
let mut comp_hash = merge(&[path[0].into(), path[1].into()]).into();
let mut comp_hash = Rpo256::merge(&[path[0].into(), path[1].into()]).into();
for path_hash in path.iter().skip(2) {
pos /= 2;

+ 14
- 15
src/merkle/merkle_tree.rs

@ -1,16 +1,12 @@
use super::MerkleError;
use crate::{
hash::{merge, Digest},
log2, uninit_vector, Felt, FieldElement, Word,
};
use super::{Digest, Felt, MerkleError, Rpo256, Vec, Word};
use crate::{utils::uninit_vector, FieldElement};
use core::slice;
use winter_math::log2;
// MERKLE TREE
// ================================================================================================
/// A fully-balanced binary Merkle tree (i.e., a tree where the number of leaves is a power of two).
///
/// This struct is intended to be used as one of the variants of the MerkleSet enum.
#[derive(Clone, Debug)]
pub struct MerkleTree {
nodes: Vec<Word>,
@ -43,7 +39,7 @@ impl MerkleTree {
// calculate all internal tree nodes
for i in (1..n).rev() {
nodes[i] = merge(&two_nodes[i]).into();
nodes[i] = Rpo256::merge(&two_nodes[i]).into();
}
Ok(Self { nodes })
@ -80,7 +76,7 @@ impl MerkleTree {
return Err(MerkleError::InvalidIndex(depth, index));
}
let pos = 2usize.pow(depth as u32) + (index as usize);
let pos = 2_usize.pow(depth) + (index as usize);
Ok(self.nodes[pos])
}
@ -102,7 +98,7 @@ impl MerkleTree {
}
let mut path = Vec::with_capacity(depth as usize);
let mut pos = 2usize.pow(depth as u32) + (index as usize);
let mut pos = 2_usize.pow(depth) + (index as usize);
while pos > 1 {
path.push(self.nodes[pos ^ 1]);
@ -131,7 +127,7 @@ impl MerkleTree {
for _ in 0..depth {
index /= 2;
self.nodes[index] = merge(&two_nodes[index]).into();
self.nodes[index] = Rpo256::merge(&two_nodes[index]).into();
}
Ok(())
@ -143,7 +139,10 @@ impl MerkleTree {
#[cfg(test)]
mod tests {
use crate::{hash::Hasher, merkle::int_to_node, ElementHasher, HashFn, Word};
use super::{
super::{int_to_node, Rpo256},
Word,
};
const LEAVES4: [Word; 4] = [
int_to_node(1),
@ -244,9 +243,9 @@ mod tests {
// --------------------------------------------------------------------------------------------
fn compute_internal_nodes() -> (Word, Word, Word) {
let node2 = Hasher::hash_elements(&[LEAVES4[0], LEAVES4[1]].concat());
let node3 = Hasher::hash_elements(&[LEAVES4[2], LEAVES4[3]].concat());
let root = Hasher::merge(&[node2, node3]);
let node2 = Rpo256::hash_elements(&[LEAVES4[0], LEAVES4[1]].concat());
let node3 = Rpo256::hash_elements(&[LEAVES4[2], LEAVES4[3]].concat());
let root = Rpo256::merge(&[node2, node3]);
(root.into(), node2.into(), node3.into())
}

+ 9
- 5
src/merkle/mod.rs

@ -1,10 +1,14 @@
use crate::Word;
use super::{
hash::rpo::{Rpo256, RpoDigest as Digest},
utils::collections::{BTreeMap, Vec},
Felt, Word, ZERO,
};
#[cfg(test)]
use crate::{Felt, ZERO};
mod merkle_tree;
pub use merkle_tree::MerkleTree;
pub mod merkle_path_set;
pub mod merkle_tree;
mod merkle_path_set;
pub use merkle_path_set::MerklePathSet;
// ERRORS
// ================================================================================================

Loading…
Cancel
Save