Browse Source

Merge pull request #18 from 0xPolygonMiden/add-blake3

feat: add blake3 160, 192 & 256 bits
al-gkr-basic-workflow
Bobbin Threadbare 2 years ago
committed by GitHub
parent
commit
0257538a1e
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 256 additions and 2 deletions
  1. +3
    -2
      Cargo.toml
  2. +230
    -0
      src/hash/blake/mod.rs
  3. +20
    -0
      src/hash/blake/tests.rs
  4. +3
    -0
      src/hash/mod.rs

+ 3
- 2
Cargo.toml

@ -15,10 +15,11 @@ name = "hash"
harness = false
[features]
default = ["std", "winter_crypto/default", "winter_math/default", "winter_utils/default"]
std = ["winter_crypto/std", "winter_math/std", "winter_utils/std"]
default = ["blake3/default", "std", "winter_crypto/default", "winter_math/default", "winter_utils/default"]
std = ["blake3/std", "winter_crypto/std", "winter_math/std", "winter_utils/std"]
[dependencies]
blake3 = { version = "1.0", default-features = false }
winter_crypto = { version = "0.4.1", package = "winter-crypto", default-features = false }
winter_math = { version = "0.4.1", package = "winter-math", default-features = false }
winter_utils = { version = "0.4.1", package = "winter-utils", default-features = false }

+ 230
- 0
src/hash/blake/mod.rs

@ -0,0 +1,230 @@
use crate::{
ByteReader, ByteWriter, Deserializable, DeserializationError, Digest, ElementHasher, Felt,
FieldElement, HashFn, Serializable, StarkField,
};
use core::{
mem::{size_of, transmute, transmute_copy},
ops::Deref,
slice::from_raw_parts,
};
#[cfg(test)]
mod tests;
// BLAKE3 N-BIT OUTPUT
// ================================================================================================
/// N-bytes output of a blake3 function.
///
/// Note: `N` can't be greater than `32` because [`Digest::as_bytes`] currently supports only 32
/// bytes.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Blake3Digest<const N: usize>([u8; N]);
impl<const N: usize> Default for Blake3Digest<N> {
fn default() -> Self {
Self([0; N])
}
}
impl<const N: usize> Deref for Blake3Digest<N> {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<const N: usize> Serializable for Blake3Digest<N> {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write_u8_slice(&self.0);
}
}
impl<const N: usize> Deserializable for Blake3Digest<N> {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
source.read_u8_array().map(Self)
}
}
impl<const N: usize> Digest for Blake3Digest<N> {
fn as_bytes(&self) -> [u8; 32] {
// compile-time assertion
assert!(N <= 32, "digest currently supports only 32 bytes!");
expand_bytes(&self.0)
}
}
// BLAKE3 256-BIT OUTPUT
// ================================================================================================
/// 256-bit output blake3 hasher.
pub struct Blake3_256;
impl HashFn for Blake3_256 {
type Digest = Blake3Digest<32>;
fn hash(bytes: &[u8]) -> Self::Digest {
Blake3Digest(blake3::hash(bytes).into())
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
hasher.update(&value.to_le_bytes());
Blake3Digest(hasher.finalize().into())
}
}
impl ElementHasher for Blake3_256 {
type BaseField = Felt;
fn hash_elements<E>(elements: &[E]) -> Self::Digest
where
E: FieldElement<BaseField = Self::BaseField>,
{
Blake3Digest(hash_elements(elements))
}
}
// BLAKE3 192-BIT OUTPUT
// ================================================================================================
/// 192-bit output blake3 hasher.
pub struct Blake3_192;
impl HashFn for Blake3_192 {
type Digest = Blake3Digest<24>;
fn hash(bytes: &[u8]) -> Self::Digest {
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
hasher.update(&value.to_le_bytes());
Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
}
}
impl ElementHasher for Blake3_192 {
type BaseField = Felt;
fn hash_elements<E>(elements: &[E]) -> Self::Digest
where
E: FieldElement<BaseField = Self::BaseField>,
{
Blake3Digest(hash_elements(elements))
}
}
// BLAKE3 160-BIT OUTPUT
// ================================================================================================
/// 160-bit output blake3 hasher.
pub struct Blake3_160;
impl HashFn for Blake3_160 {
type Digest = Blake3Digest<20>;
fn hash(bytes: &[u8]) -> Self::Digest {
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
hasher.update(&value.to_le_bytes());
Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
}
}
impl ElementHasher for Blake3_160 {
type BaseField = Felt;
fn hash_elements<E>(elements: &[E]) -> Self::Digest
where
E: FieldElement<BaseField = Self::BaseField>,
{
Blake3Digest(hash_elements(elements))
}
}
// HELPER FUNCTIONS
// ================================================================================================
/// Zero-copy ref shrink to array.
fn shrink_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> &[u8; N] {
// compile-time assertion
assert!(
M >= N,
"N should fit in M so it can be safely transmuted into a smaller slice!"
);
// safety: bytes len is asserted
unsafe { transmute(bytes) }
}
/// Hash the elements into bytes and shrink the output.
fn hash_elements<const N: usize, E>(elements: &[E]) -> [u8; N]
where
E: FieldElement<BaseField = Felt>,
{
// don't leak assumptions from felt and check its actual implementation.
// this is a compile-time branch so it is for free
let digest = if Felt::IS_CANONICAL {
blake3::hash(E::elements_as_bytes(elements))
} else {
E::as_base_elements(elements)
.iter()
.fold(blake3::Hasher::new(), |mut hasher, felt| {
hasher.update(&felt.as_int().to_le_bytes());
hasher
})
.finalize()
};
*shrink_bytes(&digest.into())
}
/// Owned bytes expansion.
fn expand_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> [u8; N] {
// compile-time assertion
assert!(M <= N, "M should fit in N so M can be expanded!");
// this branch is constant so it will be optimized to be either one of the variants in release
// mode
if M == N {
// safety: the sizes are checked to be the same
unsafe { transmute_copy(bytes) }
} else {
let mut expanded = [0u8; N];
expanded[..M].copy_from_slice(bytes);
expanded
}
}
// Cast the slice into contiguous bytes.
fn prepare_merge<const N: usize, D>(args: &[D; N]) -> &[u8]
where
D: Deref<Target = [u8]>,
{
// compile-time assertion
assert!(N > 0, "N shouldn't represent an empty slice!");
let values = args.as_ptr() as *const u8;
let len = size_of::<D>() * N;
// safety: the values are tested to be contiguous
let bytes = unsafe { from_raw_parts(values, len) };
debug_assert_eq!(args[0].deref(), &bytes[..len / N]);
bytes
}

+ 20
- 0
src/hash/blake/tests.rs

@ -0,0 +1,20 @@
use super::*;
use crate::Vec;
use proptest::prelude::*;
proptest! {
#[test]
fn blake160_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
Blake3_160::hash(vec);
}
#[test]
fn blake192_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
Blake3_192::hash(vec);
}
#[test]
fn blake256_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
Blake3_256::hash(vec);
}
}

+ 3
- 0
src/hash/mod.rs

@ -1,5 +1,8 @@
use crate::{ElementHasher, HashFn};
mod blake;
pub use blake::{Blake3Digest, Blake3_160, Blake3_192, Blake3_256};
mod rpo;
pub use rpo::Rpo256 as Hasher;
pub use rpo::{INV_MDS, MDS};

Loading…
Cancel
Save