mirror of
https://github.com/arnaucube/miden-crypto.git
synced 2026-01-12 00:51:29 +01:00
feat: RPX (xHash12) hash function implementation
This commit is contained in:
committed by
Bobbin Threadbare
parent
f33a982f29
commit
3125144445
348
src/hash/rescue/rpo/digest.rs
Normal file
348
src/hash/rescue/rpo/digest.rs
Normal file
@@ -0,0 +1,348 @@
|
||||
use super::{Digest, Felt, StarkField, DIGEST_SIZE, ZERO};
|
||||
use crate::utils::{
|
||||
bytes_to_hex_string, hex_to_bytes, string::String, ByteReader, ByteWriter, Deserializable,
|
||||
DeserializationError, HexParseError, Serializable,
|
||||
};
|
||||
use core::{cmp::Ordering, fmt::Display, ops::Deref};
|
||||
use winter_utils::Randomizable;
|
||||
|
||||
/// The number of bytes needed to encoded a digest
|
||||
pub const DIGEST_BYTES: usize = 32;
|
||||
|
||||
// DIGEST TRAIT IMPLEMENTATIONS
|
||||
// ================================================================================================
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
|
||||
#[cfg_attr(feature = "serde", serde(into = "String", try_from = "&str"))]
|
||||
pub struct RpoDigest([Felt; DIGEST_SIZE]);
|
||||
|
||||
impl RpoDigest {
|
||||
pub const fn new(value: [Felt; DIGEST_SIZE]) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
|
||||
pub fn as_elements(&self) -> &[Felt] {
|
||||
self.as_ref()
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> [u8; DIGEST_BYTES] {
|
||||
<Self as Digest>::as_bytes(self)
|
||||
}
|
||||
|
||||
pub fn digests_as_elements<'a, I>(digests: I) -> impl Iterator<Item = &'a Felt>
|
||||
where
|
||||
I: Iterator<Item = &'a Self>,
|
||||
{
|
||||
digests.flat_map(|d| d.0.iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl Digest for RpoDigest {
|
||||
fn as_bytes(&self) -> [u8; DIGEST_BYTES] {
|
||||
let mut result = [0; DIGEST_BYTES];
|
||||
|
||||
result[..8].copy_from_slice(&self.0[0].as_int().to_le_bytes());
|
||||
result[8..16].copy_from_slice(&self.0[1].as_int().to_le_bytes());
|
||||
result[16..24].copy_from_slice(&self.0[2].as_int().to_le_bytes());
|
||||
result[24..].copy_from_slice(&self.0[3].as_int().to_le_bytes());
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for RpoDigest {
|
||||
type Target = [Felt; DIGEST_SIZE];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for RpoDigest {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
// compare the inner u64 of both elements.
|
||||
//
|
||||
// it will iterate the elements and will return the first computation different than
|
||||
// `Equal`. Otherwise, the ordering is equal.
|
||||
//
|
||||
// the endianness is irrelevant here because since, this being a cryptographically secure
|
||||
// hash computation, the digest shouldn't have any ordered property of its input.
|
||||
//
|
||||
// finally, we use `Felt::inner` instead of `Felt::as_int` so we avoid performing a
|
||||
// montgomery reduction for every limb. that is safe because every inner element of the
|
||||
// digest is guaranteed to be in its canonical form (that is, `x in [0,p)`).
|
||||
self.0.iter().map(Felt::inner).zip(other.0.iter().map(Felt::inner)).fold(
|
||||
Ordering::Equal,
|
||||
|ord, (a, b)| match ord {
|
||||
Ordering::Equal => a.cmp(&b),
|
||||
_ => ord,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for RpoDigest {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for RpoDigest {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
let encoded: String = self.into();
|
||||
write!(f, "{}", encoded)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Randomizable for RpoDigest {
|
||||
const VALUE_SIZE: usize = DIGEST_BYTES;
|
||||
|
||||
fn from_random_bytes(bytes: &[u8]) -> Option<Self> {
|
||||
let bytes_array: Option<[u8; 32]> = bytes.try_into().ok();
|
||||
if let Some(bytes_array) = bytes_array {
|
||||
Self::try_from(bytes_array).ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CONVERSIONS: FROM RPO DIGEST
|
||||
// ================================================================================================
|
||||
|
||||
impl From<&RpoDigest> for [Felt; DIGEST_SIZE] {
|
||||
fn from(value: &RpoDigest) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RpoDigest> for [Felt; DIGEST_SIZE] {
|
||||
fn from(value: RpoDigest) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&RpoDigest> for [u64; DIGEST_SIZE] {
|
||||
fn from(value: &RpoDigest) -> Self {
|
||||
[
|
||||
value.0[0].as_int(),
|
||||
value.0[1].as_int(),
|
||||
value.0[2].as_int(),
|
||||
value.0[3].as_int(),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RpoDigest> for [u64; DIGEST_SIZE] {
|
||||
fn from(value: RpoDigest) -> Self {
|
||||
[
|
||||
value.0[0].as_int(),
|
||||
value.0[1].as_int(),
|
||||
value.0[2].as_int(),
|
||||
value.0[3].as_int(),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&RpoDigest> for [u8; DIGEST_BYTES] {
|
||||
fn from(value: &RpoDigest) -> Self {
|
||||
value.as_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RpoDigest> for [u8; DIGEST_BYTES] {
|
||||
fn from(value: RpoDigest) -> Self {
|
||||
value.as_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RpoDigest> for String {
|
||||
/// The returned string starts with `0x`.
|
||||
fn from(value: RpoDigest) -> Self {
|
||||
bytes_to_hex_string(value.as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&RpoDigest> for String {
|
||||
/// The returned string starts with `0x`.
|
||||
fn from(value: &RpoDigest) -> Self {
|
||||
(*value).into()
|
||||
}
|
||||
}
|
||||
|
||||
// CONVERSIONS: TO DIGEST
|
||||
// ================================================================================================
|
||||
|
||||
impl From<[Felt; DIGEST_SIZE]> for RpoDigest {
|
||||
fn from(value: [Felt; DIGEST_SIZE]) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<[u8; DIGEST_BYTES]> for RpoDigest {
|
||||
type Error = HexParseError;
|
||||
|
||||
fn try_from(value: [u8; DIGEST_BYTES]) -> Result<Self, Self::Error> {
|
||||
// Note: the input length is known, the conversion from slice to array must succeed so the
|
||||
// `unwrap`s below are safe
|
||||
let a = u64::from_le_bytes(value[0..8].try_into().unwrap());
|
||||
let b = u64::from_le_bytes(value[8..16].try_into().unwrap());
|
||||
let c = u64::from_le_bytes(value[16..24].try_into().unwrap());
|
||||
let d = u64::from_le_bytes(value[24..32].try_into().unwrap());
|
||||
|
||||
if [a, b, c, d].iter().any(|v| *v >= Felt::MODULUS) {
|
||||
return Err(HexParseError::OutOfRange);
|
||||
}
|
||||
|
||||
Ok(RpoDigest([Felt::new(a), Felt::new(b), Felt::new(c), Felt::new(d)]))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for RpoDigest {
|
||||
type Error = HexParseError;
|
||||
|
||||
/// Expects the string to start with `0x`.
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
hex_to_bytes(value).and_then(|v| v.try_into())
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for RpoDigest {
|
||||
type Error = HexParseError;
|
||||
|
||||
/// Expects the string to start with `0x`.
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
value.as_str().try_into()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&String> for RpoDigest {
|
||||
type Error = HexParseError;
|
||||
|
||||
/// Expects the string to start with `0x`.
|
||||
fn try_from(value: &String) -> Result<Self, Self::Error> {
|
||||
value.as_str().try_into()
|
||||
}
|
||||
}
|
||||
|
||||
// SERIALIZATION / DESERIALIZATION
|
||||
// ================================================================================================
|
||||
|
||||
impl Serializable for RpoDigest {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
target.write_bytes(&self.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserializable for RpoDigest {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
let mut inner: [Felt; DIGEST_SIZE] = [ZERO; DIGEST_SIZE];
|
||||
for inner in inner.iter_mut() {
|
||||
let e = source.read_u64()?;
|
||||
if e >= Felt::MODULUS {
|
||||
return Err(DeserializationError::InvalidValue(String::from(
|
||||
"Value not in the appropriate range",
|
||||
)));
|
||||
}
|
||||
*inner = Felt::new(e);
|
||||
}
|
||||
|
||||
Ok(Self(inner))
|
||||
}
|
||||
}
|
||||
|
||||
// TESTS
|
||||
// ================================================================================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{Deserializable, Felt, RpoDigest, Serializable, DIGEST_BYTES};
|
||||
use crate::utils::SliceReader;
|
||||
use rand_utils::rand_value;
|
||||
|
||||
#[test]
|
||||
fn digest_serialization() {
|
||||
let e1 = Felt::new(rand_value());
|
||||
let e2 = Felt::new(rand_value());
|
||||
let e3 = Felt::new(rand_value());
|
||||
let e4 = Felt::new(rand_value());
|
||||
|
||||
let d1 = RpoDigest([e1, e2, e3, e4]);
|
||||
|
||||
let mut bytes = vec![];
|
||||
d1.write_into(&mut bytes);
|
||||
assert_eq!(DIGEST_BYTES, bytes.len());
|
||||
|
||||
let mut reader = SliceReader::new(&bytes);
|
||||
let d2 = RpoDigest::read_from(&mut reader).unwrap();
|
||||
|
||||
assert_eq!(d1, d2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn digest_encoding() {
|
||||
let digest = RpoDigest([
|
||||
Felt::new(rand_value()),
|
||||
Felt::new(rand_value()),
|
||||
Felt::new(rand_value()),
|
||||
Felt::new(rand_value()),
|
||||
]);
|
||||
|
||||
let string: String = digest.into();
|
||||
let round_trip: RpoDigest = string.try_into().expect("decoding failed");
|
||||
|
||||
assert_eq!(digest, round_trip);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conversions() {
|
||||
let digest = RpoDigest([
|
||||
Felt::new(rand_value()),
|
||||
Felt::new(rand_value()),
|
||||
Felt::new(rand_value()),
|
||||
Felt::new(rand_value()),
|
||||
]);
|
||||
|
||||
let v: [Felt; DIGEST_SIZE] = digest.into();
|
||||
let v2: RpoDigest = v.into();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: [Felt; DIGEST_SIZE] = (&digest).into();
|
||||
let v2: RpoDigest = v.into();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: [u64; DIGEST_SIZE] = digest.into();
|
||||
let v2: RpoDigest = v.try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: [u64; DIGEST_SIZE] = (&digest).into();
|
||||
let v2: RpoDigest = v.try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: [u8; DIGEST_BYTES] = digest.into();
|
||||
let v2: RpoDigest = v.try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: [u8; DIGEST_BYTES] = (&digest).into();
|
||||
let v2: RpoDigest = v.try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: String = digest.into();
|
||||
let v2: RpoDigest = v.try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: String = (&digest).into();
|
||||
let v2: RpoDigest = v.try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: [u8; DIGEST_BYTES] = digest.into();
|
||||
let v2: RpoDigest = (&v).try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
|
||||
let v: [u8; DIGEST_BYTES] = (&digest).into();
|
||||
let v2: RpoDigest = (&v).try_into().unwrap();
|
||||
assert_eq!(digest, v2);
|
||||
}
|
||||
}
|
||||
324
src/hash/rescue/rpo/mod.rs
Normal file
324
src/hash/rescue/rpo/mod.rs
Normal file
@@ -0,0 +1,324 @@
|
||||
use super::{
|
||||
add_constants, apply_inv_sbox, apply_mds, apply_sbox,
|
||||
optimized_add_constants_and_apply_inv_sbox, optimized_add_constants_and_apply_sbox, Digest,
|
||||
ElementHasher, Felt, FieldElement, Hasher, StarkField, ARK1, ARK2, BINARY_CHUNK_SIZE,
|
||||
CAPACITY_RANGE, DIGEST_RANGE, DIGEST_SIZE, INPUT1_RANGE, INPUT2_RANGE, MDS, NUM_ROUNDS, ONE,
|
||||
RATE_RANGE, RATE_WIDTH, STATE_WIDTH, ZERO,
|
||||
};
|
||||
use core::{convert::TryInto, ops::Range};
|
||||
|
||||
mod digest;
|
||||
pub use digest::RpoDigest;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// HASHER IMPLEMENTATION
|
||||
// ================================================================================================
|
||||
|
||||
/// Implementation of the Rescue Prime Optimized hash function with 256-bit output.
|
||||
///
|
||||
/// The hash function is implemented according to the Rescue Prime Optimized
|
||||
/// [specifications](https://eprint.iacr.org/2022/1577)
|
||||
///
|
||||
/// The parameters used to instantiate the function are:
|
||||
/// * Field: 64-bit prime field with modulus 2^64 - 2^32 + 1.
|
||||
/// * State width: 12 field elements.
|
||||
/// * Capacity size: 4 field elements.
|
||||
/// * Number of founds: 7.
|
||||
/// * S-Box degree: 7.
|
||||
///
|
||||
/// The above parameters target 128-bit security level. The digest consists of four field elements
|
||||
/// and it can be serialized into 32 bytes (256 bits).
|
||||
///
|
||||
/// ## Hash output consistency
|
||||
/// Functions [hash_elements()](Rpo256::hash_elements), [merge()](Rpo256::merge), and
|
||||
/// [merge_with_int()](Rpo256::merge_with_int) are internally consistent. That is, computing
|
||||
/// a hash for the same set of elements using these functions will always produce the same
|
||||
/// result. For example, merging two digests using [merge()](Rpo256::merge) will produce the
|
||||
/// same result as hashing 8 elements which make up these digests using
|
||||
/// [hash_elements()](Rpo256::hash_elements) function.
|
||||
///
|
||||
/// However, [hash()](Rpo256::hash) function is not consistent with functions mentioned above.
|
||||
/// For example, if we take two field elements, serialize them to bytes and hash them using
|
||||
/// [hash()](Rpo256::hash), the result will differ from the result obtained by hashing these
|
||||
/// elements directly using [hash_elements()](Rpo256::hash_elements) function. The reason for
|
||||
/// this difference is that [hash()](Rpo256::hash) function needs to be able to handle
|
||||
/// arbitrary binary strings, which may or may not encode valid field elements - and thus,
|
||||
/// deserialization procedure used by this function is different from the procedure used to
|
||||
/// deserialize valid field elements.
|
||||
///
|
||||
/// Thus, if the underlying data consists of valid field elements, it might make more sense
|
||||
/// to deserialize them into field elements and then hash them using
|
||||
/// [hash_elements()](Rpo256::hash_elements) function rather then hashing the serialized bytes
|
||||
/// using [hash()](Rpo256::hash) function.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub struct Rpo256();
|
||||
|
||||
impl Hasher for Rpo256 {
|
||||
/// Rpo256 collision resistance is the same as the security level, that is 128-bits.
|
||||
///
|
||||
/// #### Collision resistance
|
||||
///
|
||||
/// However, our setup of the capacity registers might drop it to 126.
|
||||
///
|
||||
/// Related issue: [#69](https://github.com/0xPolygonMiden/crypto/issues/69)
|
||||
const COLLISION_RESISTANCE: u32 = 128;
|
||||
|
||||
type Digest = RpoDigest;
|
||||
|
||||
fn hash(bytes: &[u8]) -> Self::Digest {
|
||||
// initialize the state with zeroes
|
||||
let mut state = [ZERO; STATE_WIDTH];
|
||||
|
||||
// set the capacity (first element) to a flag on whether or not the input length is evenly
|
||||
// divided by the rate. this will prevent collisions between padded and non-padded inputs,
|
||||
// and will rule out the need to perform an extra permutation in case of evenly divided
|
||||
// inputs.
|
||||
let is_rate_multiple = bytes.len() % RATE_WIDTH == 0;
|
||||
if !is_rate_multiple {
|
||||
state[CAPACITY_RANGE.start] = ONE;
|
||||
}
|
||||
|
||||
// initialize a buffer to receive the little-endian elements.
|
||||
let mut buf = [0_u8; 8];
|
||||
|
||||
// iterate the chunks of bytes, creating a field element from each chunk and copying it
|
||||
// into the state.
|
||||
//
|
||||
// every time the rate range is filled, a permutation is performed. if the final value of
|
||||
// `i` is not zero, then the chunks count wasn't enough to fill the state range, and an
|
||||
// additional permutation must be performed.
|
||||
let i = bytes.chunks(BINARY_CHUNK_SIZE).fold(0, |i, chunk| {
|
||||
// the last element of the iteration may or may not be a full chunk. if it's not, then
|
||||
// we need to pad the remainder bytes of the chunk with zeroes, separated by a `1`.
|
||||
// this will avoid collisions.
|
||||
if chunk.len() == BINARY_CHUNK_SIZE {
|
||||
buf[..BINARY_CHUNK_SIZE].copy_from_slice(chunk);
|
||||
} else {
|
||||
buf.fill(0);
|
||||
buf[..chunk.len()].copy_from_slice(chunk);
|
||||
buf[chunk.len()] = 1;
|
||||
}
|
||||
|
||||
// set the current rate element to the input. since we take at most 7 bytes, we are
|
||||
// guaranteed that the inputs data will fit into a single field element.
|
||||
state[RATE_RANGE.start + i] = Felt::new(u64::from_le_bytes(buf));
|
||||
|
||||
// proceed filling the range. if it's full, then we apply a permutation and reset the
|
||||
// counter to the beginning of the range.
|
||||
if i == RATE_WIDTH - 1 {
|
||||
Self::apply_permutation(&mut state);
|
||||
0
|
||||
} else {
|
||||
i + 1
|
||||
}
|
||||
});
|
||||
|
||||
// if we absorbed some elements but didn't apply a permutation to them (would happen when
|
||||
// the number of elements is not a multiple of RATE_WIDTH), apply the RPO permutation. we
|
||||
// don't need to apply any extra padding because the first capacity element containts a
|
||||
// flag indicating whether the input is evenly divisible by the rate.
|
||||
if i != 0 {
|
||||
state[RATE_RANGE.start + i..RATE_RANGE.end].fill(ZERO);
|
||||
state[RATE_RANGE.start + i] = ONE;
|
||||
Self::apply_permutation(&mut state);
|
||||
}
|
||||
|
||||
// return the first 4 elements of the rate as hash result.
|
||||
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
|
||||
}
|
||||
|
||||
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
|
||||
// initialize the state by copying the digest elements into the rate portion of the state
|
||||
// (8 total elements), and set the capacity elements to 0.
|
||||
let mut state = [ZERO; STATE_WIDTH];
|
||||
let it = Self::Digest::digests_as_elements(values.iter());
|
||||
for (i, v) in it.enumerate() {
|
||||
state[RATE_RANGE.start + i] = *v;
|
||||
}
|
||||
|
||||
// apply the RPO permutation and return the first four elements of the state
|
||||
Self::apply_permutation(&mut state);
|
||||
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
|
||||
}
|
||||
|
||||
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
|
||||
// initialize the state as follows:
|
||||
// - seed is copied into the first 4 elements of the rate portion of the state.
|
||||
// - if the value fits into a single field element, copy it into the fifth rate element
|
||||
// and set the sixth rate element to 1.
|
||||
// - if the value doesn't fit into a single field element, split it into two field
|
||||
// elements, copy them into rate elements 5 and 6, and set the seventh rate element
|
||||
// to 1.
|
||||
// - set the first capacity element to 1
|
||||
let mut state = [ZERO; STATE_WIDTH];
|
||||
state[INPUT1_RANGE].copy_from_slice(seed.as_elements());
|
||||
state[INPUT2_RANGE.start] = Felt::new(value);
|
||||
if value < Felt::MODULUS {
|
||||
state[INPUT2_RANGE.start + 1] = ONE;
|
||||
} else {
|
||||
state[INPUT2_RANGE.start + 1] = Felt::new(value / Felt::MODULUS);
|
||||
state[INPUT2_RANGE.start + 2] = ONE;
|
||||
}
|
||||
|
||||
// common padding for both cases
|
||||
state[CAPACITY_RANGE.start] = ONE;
|
||||
|
||||
// apply the RPO permutation and return the first four elements of the state
|
||||
Self::apply_permutation(&mut state);
|
||||
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl ElementHasher for Rpo256 {
|
||||
type BaseField = Felt;
|
||||
|
||||
fn hash_elements<E: FieldElement<BaseField = Self::BaseField>>(elements: &[E]) -> Self::Digest {
|
||||
// convert the elements into a list of base field elements
|
||||
let elements = E::slice_as_base_elements(elements);
|
||||
|
||||
// initialize state to all zeros, except for the first element of the capacity part, which
|
||||
// is set to 1 if the number of elements is not a multiple of RATE_WIDTH.
|
||||
let mut state = [ZERO; STATE_WIDTH];
|
||||
if elements.len() % RATE_WIDTH != 0 {
|
||||
state[CAPACITY_RANGE.start] = ONE;
|
||||
}
|
||||
|
||||
// absorb elements into the state one by one until the rate portion of the state is filled
|
||||
// up; then apply the Rescue permutation and start absorbing again; repeat until all
|
||||
// elements have been absorbed
|
||||
let mut i = 0;
|
||||
for &element in elements.iter() {
|
||||
state[RATE_RANGE.start + i] = element;
|
||||
i += 1;
|
||||
if i % RATE_WIDTH == 0 {
|
||||
Self::apply_permutation(&mut state);
|
||||
i = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// if we absorbed some elements but didn't apply a permutation to them (would happen when
|
||||
// the number of elements is not a multiple of RATE_WIDTH), apply the RPO permutation after
|
||||
// padding by appending a 1 followed by as many 0 as necessary to make the input length a
|
||||
// multiple of the RATE_WIDTH.
|
||||
if i > 0 {
|
||||
state[RATE_RANGE.start + i] = ONE;
|
||||
i += 1;
|
||||
while i != RATE_WIDTH {
|
||||
state[RATE_RANGE.start + i] = ZERO;
|
||||
i += 1;
|
||||
}
|
||||
Self::apply_permutation(&mut state);
|
||||
}
|
||||
|
||||
// return the first 4 elements of the state as hash result
|
||||
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
// HASH FUNCTION IMPLEMENTATION
|
||||
// ================================================================================================
|
||||
|
||||
impl Rpo256 {
|
||||
// CONSTANTS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// The number of rounds is set to 7 to target 128-bit security level.
|
||||
pub const NUM_ROUNDS: usize = NUM_ROUNDS;
|
||||
|
||||
/// Sponge state is set to 12 field elements or 768 bytes; 8 elements are reserved for rate and
|
||||
/// the remaining 4 elements are reserved for capacity.
|
||||
pub const STATE_WIDTH: usize = STATE_WIDTH;
|
||||
|
||||
/// The rate portion of the state is located in elements 4 through 11 (inclusive).
|
||||
pub const RATE_RANGE: Range<usize> = RATE_RANGE;
|
||||
|
||||
/// The capacity portion of the state is located in elements 0, 1, 2, and 3.
|
||||
pub const CAPACITY_RANGE: Range<usize> = CAPACITY_RANGE;
|
||||
|
||||
/// The output of the hash function can be read from state elements 4, 5, 6, and 7.
|
||||
pub const DIGEST_RANGE: Range<usize> = DIGEST_RANGE;
|
||||
|
||||
/// MDS matrix used for computing the linear layer in a RPO round.
|
||||
pub const MDS: [[Felt; STATE_WIDTH]; STATE_WIDTH] = MDS;
|
||||
|
||||
/// Round constants added to the hasher state in the first half of the RPO round.
|
||||
pub const ARK1: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = ARK1;
|
||||
|
||||
/// Round constants added to the hasher state in the second half of the RPO round.
|
||||
pub const ARK2: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = ARK2;
|
||||
|
||||
// TRAIT PASS-THROUGH FUNCTIONS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Returns a hash of the provided sequence of bytes.
|
||||
#[inline(always)]
|
||||
pub fn hash(bytes: &[u8]) -> RpoDigest {
|
||||
<Self as Hasher>::hash(bytes)
|
||||
}
|
||||
|
||||
/// Returns a hash of two digests. This method is intended for use in construction of
|
||||
/// Merkle trees and verification of Merkle paths.
|
||||
#[inline(always)]
|
||||
pub fn merge(values: &[RpoDigest; 2]) -> RpoDigest {
|
||||
<Self as Hasher>::merge(values)
|
||||
}
|
||||
|
||||
/// Returns a hash of the provided field elements.
|
||||
#[inline(always)]
|
||||
pub fn hash_elements<E: FieldElement<BaseField = Felt>>(elements: &[E]) -> RpoDigest {
|
||||
<Self as ElementHasher>::hash_elements(elements)
|
||||
}
|
||||
|
||||
// DOMAIN IDENTIFIER
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Returns a hash of two digests and a domain identifier.
|
||||
pub fn merge_in_domain(values: &[RpoDigest; 2], domain: Felt) -> RpoDigest {
|
||||
// initialize the state by copying the digest elements into the rate portion of the state
|
||||
// (8 total elements), and set the capacity elements to 0.
|
||||
let mut state = [ZERO; STATE_WIDTH];
|
||||
let it = RpoDigest::digests_as_elements(values.iter());
|
||||
for (i, v) in it.enumerate() {
|
||||
state[RATE_RANGE.start + i] = *v;
|
||||
}
|
||||
|
||||
// set the second capacity element to the domain value. The first capacity element is used
|
||||
// for padding purposes.
|
||||
state[CAPACITY_RANGE.start + 1] = domain;
|
||||
|
||||
// apply the RPO permutation and return the first four elements of the state
|
||||
Self::apply_permutation(&mut state);
|
||||
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
|
||||
}
|
||||
|
||||
// RESCUE PERMUTATION
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Applies RPO permutation to the provided state.
|
||||
#[inline(always)]
|
||||
pub fn apply_permutation(state: &mut [Felt; STATE_WIDTH]) {
|
||||
for i in 0..NUM_ROUNDS {
|
||||
Self::apply_round(state, i);
|
||||
}
|
||||
}
|
||||
|
||||
/// RPO round function.
|
||||
#[inline(always)]
|
||||
pub fn apply_round(state: &mut [Felt; STATE_WIDTH], round: usize) {
|
||||
// apply first half of RPO round
|
||||
apply_mds(state);
|
||||
if !optimized_add_constants_and_apply_sbox(state, &ARK1[round]) {
|
||||
add_constants(state, &ARK1[round]);
|
||||
apply_sbox(state);
|
||||
}
|
||||
|
||||
// apply second half of RPO round
|
||||
apply_mds(state);
|
||||
if !optimized_add_constants_and_apply_inv_sbox(state, &ARK2[round]) {
|
||||
add_constants(state, &ARK2[round]);
|
||||
apply_inv_sbox(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
346
src/hash/rescue/rpo/tests.rs
Normal file
346
src/hash/rescue/rpo/tests.rs
Normal file
@@ -0,0 +1,346 @@
|
||||
use super::{
|
||||
super::{apply_inv_sbox, apply_sbox, ALPHA, INV_ALPHA},
|
||||
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ONE, STATE_WIDTH, ZERO,
|
||||
};
|
||||
use crate::{
|
||||
utils::collections::{BTreeSet, Vec},
|
||||
Word,
|
||||
};
|
||||
use core::convert::TryInto;
|
||||
use proptest::prelude::*;
|
||||
use rand_utils::rand_value;
|
||||
|
||||
#[test]
|
||||
fn test_sbox() {
|
||||
let state = [Felt::new(rand_value()); STATE_WIDTH];
|
||||
|
||||
let mut expected = state;
|
||||
expected.iter_mut().for_each(|v| *v = v.exp(ALPHA));
|
||||
|
||||
let mut actual = state;
|
||||
apply_sbox(&mut actual);
|
||||
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inv_sbox() {
|
||||
let state = [Felt::new(rand_value()); STATE_WIDTH];
|
||||
|
||||
let mut expected = state;
|
||||
expected.iter_mut().for_each(|v| *v = v.exp(INV_ALPHA));
|
||||
|
||||
let mut actual = state;
|
||||
apply_inv_sbox(&mut actual);
|
||||
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_elements_vs_merge() {
|
||||
let elements = [Felt::new(rand_value()); 8];
|
||||
|
||||
let digests: [RpoDigest; 2] = [
|
||||
RpoDigest::new(elements[..4].try_into().unwrap()),
|
||||
RpoDigest::new(elements[4..].try_into().unwrap()),
|
||||
];
|
||||
|
||||
let m_result = Rpo256::merge(&digests);
|
||||
let h_result = Rpo256::hash_elements(&elements);
|
||||
assert_eq!(m_result, h_result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge_vs_merge_in_domain() {
|
||||
let elements = [Felt::new(rand_value()); 8];
|
||||
|
||||
let digests: [RpoDigest; 2] = [
|
||||
RpoDigest::new(elements[..4].try_into().unwrap()),
|
||||
RpoDigest::new(elements[4..].try_into().unwrap()),
|
||||
];
|
||||
let merge_result = Rpo256::merge(&digests);
|
||||
|
||||
// ------------- merge with domain = 0 ----------------------------------------------------------
|
||||
|
||||
// set domain to ZERO. This should not change the result.
|
||||
let domain = ZERO;
|
||||
|
||||
let merge_in_domain_result = Rpo256::merge_in_domain(&digests, domain);
|
||||
assert_eq!(merge_result, merge_in_domain_result);
|
||||
|
||||
// ------------- merge with domain = 1 ----------------------------------------------------------
|
||||
|
||||
// set domain to ONE. This should change the result.
|
||||
let domain = ONE;
|
||||
|
||||
let merge_in_domain_result = Rpo256::merge_in_domain(&digests, domain);
|
||||
assert_ne!(merge_result, merge_in_domain_result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_elements_vs_merge_with_int() {
|
||||
let tmp = [Felt::new(rand_value()); 4];
|
||||
let seed = RpoDigest::new(tmp);
|
||||
|
||||
// ----- value fits into a field element ------------------------------------------------------
|
||||
let val: Felt = Felt::new(rand_value());
|
||||
let m_result = Rpo256::merge_with_int(seed, val.as_int());
|
||||
|
||||
let mut elements = seed.as_elements().to_vec();
|
||||
elements.push(val);
|
||||
let h_result = Rpo256::hash_elements(&elements);
|
||||
|
||||
assert_eq!(m_result, h_result);
|
||||
|
||||
// ----- value does not fit into a field element ----------------------------------------------
|
||||
let val = Felt::MODULUS + 2;
|
||||
let m_result = Rpo256::merge_with_int(seed, val);
|
||||
|
||||
let mut elements = seed.as_elements().to_vec();
|
||||
elements.push(Felt::new(val));
|
||||
elements.push(ONE);
|
||||
let h_result = Rpo256::hash_elements(&elements);
|
||||
|
||||
assert_eq!(m_result, h_result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_padding() {
|
||||
// adding a zero bytes at the end of a byte string should result in a different hash
|
||||
let r1 = Rpo256::hash(&[1_u8, 2, 3]);
|
||||
let r2 = Rpo256::hash(&[1_u8, 2, 3, 0]);
|
||||
assert_ne!(r1, r2);
|
||||
|
||||
// same as above but with bigger inputs
|
||||
let r1 = Rpo256::hash(&[1_u8, 2, 3, 4, 5, 6]);
|
||||
let r2 = Rpo256::hash(&[1_u8, 2, 3, 4, 5, 6, 0]);
|
||||
assert_ne!(r1, r2);
|
||||
|
||||
// same as above but with input splitting over two elements
|
||||
let r1 = Rpo256::hash(&[1_u8, 2, 3, 4, 5, 6, 7]);
|
||||
let r2 = Rpo256::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0]);
|
||||
assert_ne!(r1, r2);
|
||||
|
||||
// same as above but with multiple zeros
|
||||
let r1 = Rpo256::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0, 0]);
|
||||
let r2 = Rpo256::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0, 0, 0, 0]);
|
||||
assert_ne!(r1, r2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_elements_padding() {
|
||||
let e1 = [Felt::new(rand_value()); 2];
|
||||
let e2 = [e1[0], e1[1], ZERO];
|
||||
|
||||
let r1 = Rpo256::hash_elements(&e1);
|
||||
let r2 = Rpo256::hash_elements(&e2);
|
||||
assert_ne!(r1, r2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_elements() {
|
||||
let elements = [
|
||||
ZERO,
|
||||
ONE,
|
||||
Felt::new(2),
|
||||
Felt::new(3),
|
||||
Felt::new(4),
|
||||
Felt::new(5),
|
||||
Felt::new(6),
|
||||
Felt::new(7),
|
||||
];
|
||||
|
||||
let digests: [RpoDigest; 2] = [
|
||||
RpoDigest::new(elements[..4].try_into().unwrap()),
|
||||
RpoDigest::new(elements[4..8].try_into().unwrap()),
|
||||
];
|
||||
|
||||
let m_result = Rpo256::merge(&digests);
|
||||
let h_result = Rpo256::hash_elements(&elements);
|
||||
assert_eq!(m_result, h_result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_test_vectors() {
|
||||
let elements = [
|
||||
ZERO,
|
||||
ONE,
|
||||
Felt::new(2),
|
||||
Felt::new(3),
|
||||
Felt::new(4),
|
||||
Felt::new(5),
|
||||
Felt::new(6),
|
||||
Felt::new(7),
|
||||
Felt::new(8),
|
||||
Felt::new(9),
|
||||
Felt::new(10),
|
||||
Felt::new(11),
|
||||
Felt::new(12),
|
||||
Felt::new(13),
|
||||
Felt::new(14),
|
||||
Felt::new(15),
|
||||
Felt::new(16),
|
||||
Felt::new(17),
|
||||
Felt::new(18),
|
||||
];
|
||||
|
||||
for i in 0..elements.len() {
|
||||
let expected = RpoDigest::new(EXPECTED[i]);
|
||||
let result = Rpo256::hash_elements(&elements[..(i + 1)]);
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sponge_bytes_with_remainder_length_wont_panic() {
|
||||
// this test targets to assert that no panic will happen with the edge case of having an inputs
|
||||
// with length that is not divisible by the used binary chunk size. 113 is a non-negligible
|
||||
// input length that is prime; hence guaranteed to not be divisible by any choice of chunk
|
||||
// size.
|
||||
//
|
||||
// this is a preliminary test to the fuzzy-stress of proptest.
|
||||
Rpo256::hash(&[0; 113]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sponge_collision_for_wrapped_field_element() {
|
||||
let a = Rpo256::hash(&[0; 8]);
|
||||
let b = Rpo256::hash(&Felt::MODULUS.to_le_bytes());
|
||||
assert_ne!(a, b);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sponge_zeroes_collision() {
|
||||
let mut zeroes = Vec::with_capacity(255);
|
||||
let mut set = BTreeSet::new();
|
||||
(0..255).for_each(|_| {
|
||||
let hash = Rpo256::hash(&zeroes);
|
||||
zeroes.push(0);
|
||||
// panic if a collision was found
|
||||
assert!(set.insert(hash));
|
||||
});
|
||||
}
|
||||
|
||||
proptest! {
|
||||
#[test]
|
||||
fn rpo256_wont_panic_with_arbitrary_input(ref bytes in any::<Vec<u8>>()) {
|
||||
Rpo256::hash(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
const EXPECTED: [Word; 19] = [
|
||||
[
|
||||
Felt::new(1502364727743950833),
|
||||
Felt::new(5880949717274681448),
|
||||
Felt::new(162790463902224431),
|
||||
Felt::new(6901340476773664264),
|
||||
],
|
||||
[
|
||||
Felt::new(7478710183745780580),
|
||||
Felt::new(3308077307559720969),
|
||||
Felt::new(3383561985796182409),
|
||||
Felt::new(17205078494700259815),
|
||||
],
|
||||
[
|
||||
Felt::new(17439912364295172999),
|
||||
Felt::new(17979156346142712171),
|
||||
Felt::new(8280795511427637894),
|
||||
Felt::new(9349844417834368814),
|
||||
],
|
||||
[
|
||||
Felt::new(5105868198472766874),
|
||||
Felt::new(13090564195691924742),
|
||||
Felt::new(1058904296915798891),
|
||||
Felt::new(18379501748825152268),
|
||||
],
|
||||
[
|
||||
Felt::new(9133662113608941286),
|
||||
Felt::new(12096627591905525991),
|
||||
Felt::new(14963426595993304047),
|
||||
Felt::new(13290205840019973377),
|
||||
],
|
||||
[
|
||||
Felt::new(3134262397541159485),
|
||||
Felt::new(10106105871979362399),
|
||||
Felt::new(138768814855329459),
|
||||
Felt::new(15044809212457404677),
|
||||
],
|
||||
[
|
||||
Felt::new(162696376578462826),
|
||||
Felt::new(4991300494838863586),
|
||||
Felt::new(660346084748120605),
|
||||
Felt::new(13179389528641752698),
|
||||
],
|
||||
[
|
||||
Felt::new(2242391899857912644),
|
||||
Felt::new(12689382052053305418),
|
||||
Felt::new(235236990017815546),
|
||||
Felt::new(5046143039268215739),
|
||||
],
|
||||
[
|
||||
Felt::new(9585630502158073976),
|
||||
Felt::new(1310051013427303477),
|
||||
Felt::new(7491921222636097758),
|
||||
Felt::new(9417501558995216762),
|
||||
],
|
||||
[
|
||||
Felt::new(1994394001720334744),
|
||||
Felt::new(10866209900885216467),
|
||||
Felt::new(13836092831163031683),
|
||||
Felt::new(10814636682252756697),
|
||||
],
|
||||
[
|
||||
Felt::new(17486854790732826405),
|
||||
Felt::new(17376549265955727562),
|
||||
Felt::new(2371059831956435003),
|
||||
Felt::new(17585704935858006533),
|
||||
],
|
||||
[
|
||||
Felt::new(11368277489137713825),
|
||||
Felt::new(3906270146963049287),
|
||||
Felt::new(10236262408213059745),
|
||||
Felt::new(78552867005814007),
|
||||
],
|
||||
[
|
||||
Felt::new(17899847381280262181),
|
||||
Felt::new(14717912805498651446),
|
||||
Felt::new(10769146203951775298),
|
||||
Felt::new(2774289833490417856),
|
||||
],
|
||||
[
|
||||
Felt::new(3794717687462954368),
|
||||
Felt::new(4386865643074822822),
|
||||
Felt::new(8854162840275334305),
|
||||
Felt::new(7129983987107225269),
|
||||
],
|
||||
[
|
||||
Felt::new(7244773535611633983),
|
||||
Felt::new(19359923075859320),
|
||||
Felt::new(10898655967774994333),
|
||||
Felt::new(9319339563065736480),
|
||||
],
|
||||
[
|
||||
Felt::new(4935426252518736883),
|
||||
Felt::new(12584230452580950419),
|
||||
Felt::new(8762518969632303998),
|
||||
Felt::new(18159875708229758073),
|
||||
],
|
||||
[
|
||||
Felt::new(14871230873837295931),
|
||||
Felt::new(11225255908868362971),
|
||||
Felt::new(18100987641405432308),
|
||||
Felt::new(1559244340089644233),
|
||||
],
|
||||
[
|
||||
Felt::new(8348203744950016968),
|
||||
Felt::new(4041411241960726733),
|
||||
Felt::new(17584743399305468057),
|
||||
Felt::new(16836952610803537051),
|
||||
],
|
||||
[
|
||||
Felt::new(16139797453633030050),
|
||||
Felt::new(1090233424040889412),
|
||||
Felt::new(10770255347785669036),
|
||||
Felt::new(16982398877290254028),
|
||||
],
|
||||
];
|
||||
Reference in New Issue
Block a user