Tracking PR for v0.1 releaseal-gkr-basic-workflow
@ -1,2 +1,27 @@ |
|||
# crypto |
|||
Cryptographic primitives used in Polygon Miden rollup |
|||
# Miden Crypto |
|||
This crate contains cryptographic primitives used in Polygon Miden. |
|||
|
|||
## Hash |
|||
[Hash module](./src/hash) provides a set of cryptographic hash functions which are used by Miden VM and Miden Rollup. Currently, these functions are: |
|||
|
|||
* [BLAKE3](https://github.com/BLAKE3-team/BLAKE3) hash function with 256-bit, 192-bit, or 160-bit output. The 192-bit and 160-bit outputs are obtained by truncating the 256-bit output of the standard BLAKE3. |
|||
* [RPO](https://eprint.iacr.org/2022/1577) hash function with 256-bit output. This hash function is an algebraic hash function suitable for recursive STARKs. |
|||
|
|||
## Merkle |
|||
[Merkle module](./src/merkle/) provides a set of data structures related to Merkle tree. All these data structures are implemented using RPO hash function described above. The data structure are: |
|||
|
|||
* `MerkleTree`: a regular fully-balanced binary Merkle tree. The depth of this tree can be at most 64. |
|||
* `MerklePathSet`: a collection of Merkle authentication paths all resolving to the same root. The length of the paths can be at most 64. |
|||
|
|||
## Crate features |
|||
This carate can be compiled with the following features: |
|||
|
|||
* `std` - enabled by default and relies on the Rust standard library. |
|||
* `no_std` does not rely on the Rust standard library and enables compilation to WebAssembly. |
|||
|
|||
Both of these features imply use of [alloc](https://doc.rust-lang.org/alloc/) to support heap-allocated collections. |
|||
|
|||
To compile with `no_std`, disable default features via `--no-default-features` flag. |
|||
|
|||
## License |
|||
This project is [MIT licensed](./LICENSE). |
@ -0,0 +1,57 @@ |
|||
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
|
|||
use miden_crypto::{
|
|||
hash::rpo::{Rpo256, RpoDigest},
|
|||
Felt,
|
|||
};
|
|||
use rand_utils::rand_value;
|
|||
|
|||
fn rpo256_2to1(c: &mut Criterion) {
|
|||
let v: [RpoDigest; 2] = [Rpo256::hash(&[1_u8]), Rpo256::hash(&[2_u8])];
|
|||
c.bench_function("RPO256 2-to-1 hashing (cached)", |bench| {
|
|||
bench.iter(|| Rpo256::merge(black_box(&v)))
|
|||
});
|
|||
|
|||
c.bench_function("RPO256 2-to-1 hashing (random)", |bench| {
|
|||
bench.iter_batched(
|
|||
|| {
|
|||
[
|
|||
Rpo256::hash(&rand_value::<u64>().to_le_bytes()),
|
|||
Rpo256::hash(&rand_value::<u64>().to_le_bytes()),
|
|||
]
|
|||
},
|
|||
|state| Rpo256::merge(&state),
|
|||
BatchSize::SmallInput,
|
|||
)
|
|||
});
|
|||
}
|
|||
|
|||
fn rpo256_sequential(c: &mut Criterion) {
|
|||
let v: [Felt; 100] = (0..100)
|
|||
.into_iter()
|
|||
.map(Felt::new)
|
|||
.collect::<Vec<Felt>>()
|
|||
.try_into()
|
|||
.expect("should not fail");
|
|||
c.bench_function("RPO256 sequential hashing (cached)", |bench| {
|
|||
bench.iter(|| Rpo256::hash_elements(black_box(&v)))
|
|||
});
|
|||
|
|||
c.bench_function("RPO256 sequential hashing (random)", |bench| {
|
|||
bench.iter_batched(
|
|||
|| {
|
|||
let v: [Felt; 100] = (0..100)
|
|||
.into_iter()
|
|||
.map(|_| Felt::new(rand_value()))
|
|||
.collect::<Vec<Felt>>()
|
|||
.try_into()
|
|||
.expect("should not fail");
|
|||
v
|
|||
},
|
|||
|state| Rpo256::hash_elements(&state),
|
|||
BatchSize::SmallInput,
|
|||
)
|
|||
});
|
|||
}
|
|||
|
|||
criterion_group!(hash_group, rpo256_sequential, rpo256_2to1);
|
|||
criterion_main!(hash_group);
|
@ -0,0 +1,319 @@ |
|||
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField};
|
|||
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
|||
use core::{
|
|||
mem::{size_of, transmute, transmute_copy},
|
|||
ops::Deref,
|
|||
slice::from_raw_parts,
|
|||
};
|
|||
|
|||
#[cfg(test)]
|
|||
mod tests;
|
|||
|
|||
// CONSTANTS
|
|||
// ================================================================================================
|
|||
|
|||
const DIGEST32_BYTES: usize = 32;
|
|||
const DIGEST24_BYTES: usize = 24;
|
|||
const DIGEST20_BYTES: usize = 20;
|
|||
|
|||
// BLAKE3 N-BIT OUTPUT
|
|||
// ================================================================================================
|
|||
|
|||
/// N-bytes output of a blake3 function.
|
|||
///
|
|||
/// Note: `N` can't be greater than `32` because [`Digest::as_bytes`] currently supports only 32
|
|||
/// bytes.
|
|||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
|||
pub struct Blake3Digest<const N: usize>([u8; N]);
|
|||
|
|||
impl<const N: usize> Default for Blake3Digest<N> {
|
|||
fn default() -> Self {
|
|||
Self([0; N])
|
|||
}
|
|||
}
|
|||
|
|||
impl<const N: usize> Deref for Blake3Digest<N> {
|
|||
type Target = [u8];
|
|||
|
|||
fn deref(&self) -> &Self::Target {
|
|||
&self.0
|
|||
}
|
|||
}
|
|||
|
|||
impl<const N: usize> From<Blake3Digest<N>> for [u8; N] {
|
|||
fn from(value: Blake3Digest<N>) -> Self {
|
|||
value.0
|
|||
}
|
|||
}
|
|||
|
|||
impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
|
|||
fn from(value: [u8; N]) -> Self {
|
|||
Self(value)
|
|||
}
|
|||
}
|
|||
|
|||
impl<const N: usize> Serializable for Blake3Digest<N> {
|
|||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
|||
target.write_u8_slice(&self.0);
|
|||
}
|
|||
}
|
|||
|
|||
impl<const N: usize> Deserializable for Blake3Digest<N> {
|
|||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
|||
source.read_u8_array().map(Self)
|
|||
}
|
|||
}
|
|||
|
|||
impl<const N: usize> Digest for Blake3Digest<N> {
|
|||
fn as_bytes(&self) -> [u8; 32] {
|
|||
// compile-time assertion
|
|||
assert!(N <= 32, "digest currently supports only 32 bytes!");
|
|||
expand_bytes(&self.0)
|
|||
}
|
|||
}
|
|||
|
|||
// BLAKE3 256-BIT OUTPUT
|
|||
// ================================================================================================
|
|||
|
|||
/// 256-bit output blake3 hasher.
|
|||
pub struct Blake3_256;
|
|||
|
|||
impl Hasher for Blake3_256 {
|
|||
type Digest = Blake3Digest<32>;
|
|||
|
|||
fn hash(bytes: &[u8]) -> Self::Digest {
|
|||
Blake3Digest(blake3::hash(bytes).into())
|
|||
}
|
|||
|
|||
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
|
|||
Self::hash(prepare_merge(values))
|
|||
}
|
|||
|
|||
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
|
|||
let mut hasher = blake3::Hasher::new();
|
|||
hasher.update(&seed.0);
|
|||
hasher.update(&value.to_le_bytes());
|
|||
Blake3Digest(hasher.finalize().into())
|
|||
}
|
|||
}
|
|||
|
|||
impl ElementHasher for Blake3_256 {
|
|||
type BaseField = Felt;
|
|||
|
|||
fn hash_elements<E>(elements: &[E]) -> Self::Digest
|
|||
where
|
|||
E: FieldElement<BaseField = Self::BaseField>,
|
|||
{
|
|||
Blake3Digest(hash_elements(elements))
|
|||
}
|
|||
}
|
|||
|
|||
impl Blake3_256 {
|
|||
/// Returns a hash of the provided sequence of bytes.
|
|||
#[inline(always)]
|
|||
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST32_BYTES> {
|
|||
<Self as Hasher>::hash(bytes)
|
|||
}
|
|||
|
|||
/// Returns a hash of two digests. This method is intended for use in construction of
|
|||
/// Merkle trees and verification of Merkle paths.
|
|||
#[inline(always)]
|
|||
pub fn merge(values: &[Blake3Digest<DIGEST32_BYTES>; 2]) -> Blake3Digest<DIGEST32_BYTES> {
|
|||
<Self as Hasher>::merge(values)
|
|||
}
|
|||
|
|||
/// Returns a hash of the provided field elements.
|
|||
#[inline(always)]
|
|||
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST32_BYTES>
|
|||
where
|
|||
E: FieldElement<BaseField = Felt>,
|
|||
{
|
|||
<Self as ElementHasher>::hash_elements(elements)
|
|||
}
|
|||
}
|
|||
|
|||
// BLAKE3 192-BIT OUTPUT
|
|||
// ================================================================================================
|
|||
|
|||
/// 192-bit output blake3 hasher.
|
|||
pub struct Blake3_192;
|
|||
|
|||
impl Hasher for Blake3_192 {
|
|||
type Digest = Blake3Digest<24>;
|
|||
|
|||
fn hash(bytes: &[u8]) -> Self::Digest {
|
|||
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
|
|||
}
|
|||
|
|||
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
|
|||
Self::hash(prepare_merge(values))
|
|||
}
|
|||
|
|||
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
|
|||
let mut hasher = blake3::Hasher::new();
|
|||
hasher.update(&seed.0);
|
|||
hasher.update(&value.to_le_bytes());
|
|||
Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
|
|||
}
|
|||
}
|
|||
|
|||
impl ElementHasher for Blake3_192 {
|
|||
type BaseField = Felt;
|
|||
|
|||
fn hash_elements<E>(elements: &[E]) -> Self::Digest
|
|||
where
|
|||
E: FieldElement<BaseField = Self::BaseField>,
|
|||
{
|
|||
Blake3Digest(hash_elements(elements))
|
|||
}
|
|||
}
|
|||
|
|||
impl Blake3_192 {
|
|||
/// Returns a hash of the provided sequence of bytes.
|
|||
#[inline(always)]
|
|||
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST24_BYTES> {
|
|||
<Self as Hasher>::hash(bytes)
|
|||
}
|
|||
|
|||
/// Returns a hash of two digests. This method is intended for use in construction of
|
|||
/// Merkle trees and verification of Merkle paths.
|
|||
#[inline(always)]
|
|||
pub fn merge(values: &[Blake3Digest<DIGEST24_BYTES>; 2]) -> Blake3Digest<DIGEST24_BYTES> {
|
|||
<Self as Hasher>::merge(values)
|
|||
}
|
|||
|
|||
/// Returns a hash of the provided field elements.
|
|||
#[inline(always)]
|
|||
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST24_BYTES>
|
|||
where
|
|||
E: FieldElement<BaseField = Felt>,
|
|||
{
|
|||
<Self as ElementHasher>::hash_elements(elements)
|
|||
}
|
|||
}
|
|||
|
|||
// BLAKE3 160-BIT OUTPUT
|
|||
// ================================================================================================
|
|||
|
|||
/// 160-bit output blake3 hasher.
|
|||
pub struct Blake3_160;
|
|||
|
|||
impl Hasher for Blake3_160 {
|
|||
type Digest = Blake3Digest<20>;
|
|||
|
|||
fn hash(bytes: &[u8]) -> Self::Digest {
|
|||
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
|
|||
}
|
|||
|
|||
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
|
|||
Self::hash(prepare_merge(values))
|
|||
}
|
|||
|
|||
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
|
|||
let mut hasher = blake3::Hasher::new();
|
|||
hasher.update(&seed.0);
|
|||
hasher.update(&value.to_le_bytes());
|
|||
Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
|
|||
}
|
|||
}
|
|||
|
|||
impl ElementHasher for Blake3_160 {
|
|||
type BaseField = Felt;
|
|||
|
|||
fn hash_elements<E>(elements: &[E]) -> Self::Digest
|
|||
where
|
|||
E: FieldElement<BaseField = Self::BaseField>,
|
|||
{
|
|||
Blake3Digest(hash_elements(elements))
|
|||
}
|
|||
}
|
|||
|
|||
impl Blake3_160 {
|
|||
/// Returns a hash of the provided sequence of bytes.
|
|||
#[inline(always)]
|
|||
pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST20_BYTES> {
|
|||
<Self as Hasher>::hash(bytes)
|
|||
}
|
|||
|
|||
/// Returns a hash of two digests. This method is intended for use in construction of
|
|||
/// Merkle trees and verification of Merkle paths.
|
|||
#[inline(always)]
|
|||
pub fn merge(values: &[Blake3Digest<DIGEST20_BYTES>; 2]) -> Blake3Digest<DIGEST20_BYTES> {
|
|||
<Self as Hasher>::merge(values)
|
|||
}
|
|||
|
|||
/// Returns a hash of the provided field elements.
|
|||
#[inline(always)]
|
|||
pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST20_BYTES>
|
|||
where
|
|||
E: FieldElement<BaseField = Felt>,
|
|||
{
|
|||
<Self as ElementHasher>::hash_elements(elements)
|
|||
}
|
|||
}
|
|||
|
|||
// HELPER FUNCTIONS
|
|||
// ================================================================================================
|
|||
|
|||
/// Zero-copy ref shrink to array.
|
|||
fn shrink_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> &[u8; N] {
|
|||
// compile-time assertion
|
|||
assert!(
|
|||
M >= N,
|
|||
"N should fit in M so it can be safely transmuted into a smaller slice!"
|
|||
);
|
|||
// safety: bytes len is asserted
|
|||
unsafe { transmute(bytes) }
|
|||
}
|
|||
|
|||
/// Hash the elements into bytes and shrink the output.
|
|||
fn hash_elements<const N: usize, E>(elements: &[E]) -> [u8; N]
|
|||
where
|
|||
E: FieldElement<BaseField = Felt>,
|
|||
{
|
|||
// don't leak assumptions from felt and check its actual implementation.
|
|||
// this is a compile-time branch so it is for free
|
|||
let digest = if Felt::IS_CANONICAL {
|
|||
blake3::hash(E::elements_as_bytes(elements))
|
|||
} else {
|
|||
E::as_base_elements(elements)
|
|||
.iter()
|
|||
.fold(blake3::Hasher::new(), |mut hasher, felt| {
|
|||
hasher.update(&felt.as_int().to_le_bytes());
|
|||
hasher
|
|||
})
|
|||
.finalize()
|
|||
};
|
|||
*shrink_bytes(&digest.into())
|
|||
}
|
|||
|
|||
/// Owned bytes expansion.
|
|||
fn expand_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> [u8; N] {
|
|||
// compile-time assertion
|
|||
assert!(M <= N, "M should fit in N so M can be expanded!");
|
|||
// this branch is constant so it will be optimized to be either one of the variants in release
|
|||
// mode
|
|||
if M == N {
|
|||
// safety: the sizes are checked to be the same
|
|||
unsafe { transmute_copy(bytes) }
|
|||
} else {
|
|||
let mut expanded = [0u8; N];
|
|||
expanded[..M].copy_from_slice(bytes);
|
|||
expanded
|
|||
}
|
|||
}
|
|||
|
|||
// Cast the slice into contiguous bytes.
|
|||
fn prepare_merge<const N: usize, D>(args: &[D; N]) -> &[u8]
|
|||
where
|
|||
D: Deref<Target = [u8]>,
|
|||
{
|
|||
// compile-time assertion
|
|||
assert!(N > 0, "N shouldn't represent an empty slice!");
|
|||
let values = args.as_ptr() as *const u8;
|
|||
let len = size_of::<D>() * N;
|
|||
// safety: the values are tested to be contiguous
|
|||
let bytes = unsafe { from_raw_parts(values, len) };
|
|||
debug_assert_eq!(args[0].deref(), &bytes[..len / N]);
|
|||
bytes
|
|||
}
|
@ -0,0 +1,20 @@ |
|||
use super::*;
|
|||
use crate::utils::collections::Vec;
|
|||
use proptest::prelude::*;
|
|||
|
|||
proptest! {
|
|||
#[test]
|
|||
fn blake160_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
|
|||
Blake3_160::hash(vec);
|
|||
}
|
|||
|
|||
#[test]
|
|||
fn blake192_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
|
|||
Blake3_192::hash(vec);
|
|||
}
|
|||
|
|||
#[test]
|
|||
fn blake256_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
|
|||
Blake3_256::hash(vec);
|
|||
}
|
|||
}
|
@ -1,18 +1,5 @@ |
|||
use crate::{ElementHasher, HashFn};
|
|||
use super::{Felt, FieldElement, StarkField, ONE, ZERO};
|
|||
use winter_crypto::{Digest, ElementHasher, Hasher};
|
|||
|
|||
mod rpo;
|
|||
pub use rpo::Rpo256 as Hasher;
|
|||
pub use rpo::{INV_MDS, MDS};
|
|||
|
|||
// TYPE ALIASES
|
|||
// ================================================================================================
|
|||
|
|||
pub type Digest = <Hasher as HashFn>::Digest;
|
|||
|
|||
// HELPER FUNCTIONS
|
|||
// ================================================================================================
|
|||
|
|||
#[inline(always)]
|
|||
pub fn merge(values: &[Digest; 2]) -> Digest {
|
|||
Hasher::merge(values)
|
|||
}
|
|||
pub mod blake;
|
|||
pub mod rpo;
|
@ -1,27 +1,35 @@ |
|||
pub use winter_crypto::{Digest, ElementHasher, Hasher as HashFn};
|
|||
pub use winter_math::{
|
|||
fields::{f64::BaseElement as Felt, QuadExtension},
|
|||
log2, ExtensionOf, FieldElement, StarkField,
|
|||
};
|
|||
pub use winter_utils::{
|
|||
collections::{BTreeMap, Vec},
|
|||
uninit_vector, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
|
|||
SliceReader,
|
|||
};
|
|||
#![cfg_attr(not(feature = "std"), no_std)]
|
|||
|
|||
#[cfg(not(feature = "std"))]
|
|||
#[cfg_attr(test, macro_use)]
|
|||
extern crate alloc;
|
|||
|
|||
pub mod hash;
|
|||
pub mod merkle;
|
|||
|
|||
// RE-EXPORTS
|
|||
// ================================================================================================
|
|||
|
|||
pub use winter_math::{fields::f64::BaseElement as Felt, FieldElement, StarkField};
|
|||
|
|||
pub mod utils {
|
|||
pub use winter_utils::{
|
|||
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
|
|||
DeserializationError, Serializable, SliceReader,
|
|||
};
|
|||
}
|
|||
|
|||
// TYPE ALIASES
|
|||
// ================================================================================================
|
|||
|
|||
/// A group of four field elements in the Miden base field.
|
|||
pub type Word = [Felt; 4];
|
|||
|
|||
// CONSTANTS
|
|||
// ================================================================================================
|
|||
|
|||
/// Field element representing ZERO in the base field of the VM.
|
|||
/// Field element representing ZERO in the Miden base filed.
|
|||
pub const ZERO: Felt = Felt::ZERO;
|
|||
|
|||
/// Field element representing ONE in the base field of the VM.
|
|||
/// Field element representing ONE in the Miden base filed.
|
|||
pub const ONE: Felt = Felt::ONE;
|