Browse Source

Merge pull request #28 from 0xPolygonMiden/bobbin-blake3-improv

Improve BLAKE3 sequential hashing performance
al-gkr-basic-workflow
Bobbin Threadbare 2 years ago
committed by GitHub
parent
commit
4ed0611463
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 12 additions and 8 deletions
  1. +12
    -8
      src/hash/blake/mod.rs

+ 12
- 8
src/hash/blake/mod.rs

@ -1,5 +1,7 @@
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField};
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
use crate::utils::{
uninit_vector, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
};
use core::{
mem::{size_of, transmute, transmute_copy},
ops::Deref,
@ -276,13 +278,15 @@ where
let digest = if Felt::IS_CANONICAL {
blake3::hash(E::elements_as_bytes(elements))
} else {
E::as_base_elements(elements)
.iter()
.fold(blake3::Hasher::new(), |mut hasher, felt| {
hasher.update(&felt.as_int().to_le_bytes());
hasher
})
.finalize()
let base_elements = E::as_base_elements(elements);
let blen = base_elements.len() << 3;
let mut bytes = unsafe { uninit_vector(blen) };
for (idx, element) in base_elements.iter().enumerate() {
bytes[idx * 8..(idx + 1) * 8].copy_from_slice(&element.as_int().to_le_bytes());
}
blake3::hash(&bytes)
};
*shrink_bytes(&digest.into())
}

Loading…
Cancel
Save