From b4e2d63c10910db9daabc0459cdc4e05f1ae70a6 Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Thu, 26 Oct 2023 16:10:17 +0200 Subject: [PATCH] docs: added RPX benchmarks --- benches/README.md | 33 +++++----- src/hash/rescue/mod.rs | 3 + src/hash/rescue/rpo/digest.rs | 63 ++++++++++++++++--- src/hash/rescue/rpo/mod.rs | 4 +- src/hash/rescue/rpx/digest.rs | 111 ++++++++++++++++++++++++++++++++-- src/hash/rescue/rpx/mod.rs | 4 +- 6 files changed, 185 insertions(+), 33 deletions(-) diff --git a/benches/README.md b/benches/README.md index a1dddd0..d6b7253 100644 --- a/benches/README.md +++ b/benches/README.md @@ -6,6 +6,7 @@ In the Miden VM, we make use of different hash functions. Some of these are "tra * **Poseidon** as specified [here](https://eprint.iacr.org/2019/458.pdf) and implemented [here](https://github.com/mir-protocol/plonky2/blob/806b88d7d6e69a30dc0b4775f7ba275c45e8b63b/plonky2/src/hash/poseidon_goldilocks.rs) (but in pure Rust, without vectorized instructions). * **Rescue Prime (RP)** as specified [here](https://eprint.iacr.org/2020/1143) and implemented [here](https://github.com/novifinancial/winterfell/blob/46dce1adf0/crypto/src/hash/rescue/rp64_256/mod.rs). * **Rescue Prime Optimized (RPO)** as specified [here](https://eprint.iacr.org/2022/1577) and implemented in this crate. +* **Rescue Prime Extended (RPX)** a variant of the [xHash](https://eprint.iacr.org/2023/1045) hash function as implemented in this crate. ## Comparison and Instructions @@ -15,25 +16,25 @@ The second scenario is that of sequential hashing where we take a sequence of le #### Scenario 1: 2-to-1 hashing `h(a,b)` -| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 | -| ------------------- | ------ | --------| --------- | --------- | ------- | -| Apple M1 Pro | 80 ns | 245 ns | 1.5 us | 9.1 us | 5.4 us | -| Apple M2 | 76 ns | 233 ns | 1.3 us | 7.9 us | 5.0 us | -| Amazon Graviton 3 | 108 ns | | | | 5.3 us | -| AMD Ryzen 9 5950X | 64 ns | 273 ns | 1.2 us | 9.1 us | 5.5 us | -| Intel Core i5-8279U | 80 ns | | | | 8.7 us | -| Intel Xeon 8375C | 67 ns | | | | 8.2 us | +| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 | RPX_256 | +| ------------------- | ------ | ------- | --------- | --------- | ------- | ------- | +| Apple M1 Pro | 76 ns | 245 ns | 1.5 µs | 9.1 µs | 5.2 µs | 2.7 µs | +| Apple M2 Max | 71 ns | 233 ns | 1.3 µs | 7.9 µs | 4.6 µs | 2.4 µs | +| Amazon Graviton 3 | 108 ns | | | | 5.3 µs | 3.1 µs | +| AMD Ryzen 9 5950X | 64 ns | 273 ns | 1.2 µs | 9.1 µs | 5.5 µs | | +| Intel Core i5-8279U | 68 ns | 536 ns | 2.0 µs | 13.6 µs | 8.5 µs | 4.4 µs | +| Intel Xeon 8375C | 67 ns | | | | 8.2 µs | | #### Scenario 2: Sequential hashing of 100 elements `h([a_0,...,a_99])` -| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 | -| ------------------- | -------| ------- | --------- | --------- | ------- | -| Apple M1 Pro | 1.0 us | 1.5 us | 19.4 us | 118 us | 70 us | -| Apple M2 | 1.0 us | 1.5 us | 17.4 us | 103 us | 65 us | -| Amazon Graviton 3 | 1.4 us | | | | 69 us | -| AMD Ryzen 9 5950X | 0.8 us | 1.7 us | 15.7 us | 120 us | 72 us | -| Intel Core i5-8279U | 1.0 us | | | | 116 us | -| Intel Xeon 8375C | 0.8 ns | | | | 110 us | +| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 | RPX_256 | +| ------------------- | -------| ------- | --------- | --------- | ------- | ------- | +| Apple M1 Pro | 1.0 µs | 1.5 µs | 19.4 µs | 118 µs | 69 µs | 35 µs | +| Apple M2 Max | 0.9 µs | 1.5 µs | 17.4 µs | 103 µs | 60 µs | 31 µs | +| Amazon Graviton 3 | 1.4 µs | | | | 69 µs | 41 µs | +| AMD Ryzen 9 5950X | 0.8 µs | 1.7 µs | 15.7 µs | 120 µs | 72 µs | | +| Intel Core i5-8279U | 0.9 µs | | | | 107 µs | 56 µs | +| Intel Xeon 8375C | 0.8 µs | | | | 110 µs | | Notes: - On Graviton 3, RPO256 is run with SVE acceleration enabled. diff --git a/src/hash/rescue/mod.rs b/src/hash/rescue/mod.rs index 3d76203..2fa942a 100644 --- a/src/hash/rescue/mod.rs +++ b/src/hash/rescue/mod.rs @@ -43,6 +43,9 @@ const CAPACITY_RANGE: Range = 0..4; const DIGEST_RANGE: Range = 4..8; const DIGEST_SIZE: usize = DIGEST_RANGE.end - DIGEST_RANGE.start; +/// The number of bytes needed to encoded a digest +const DIGEST_BYTES: usize = 32; + /// The number of byte chunks defining a field element when hashing a sequence of bytes const BINARY_CHUNK_SIZE: usize = 7; diff --git a/src/hash/rescue/rpo/digest.rs b/src/hash/rescue/rpo/digest.rs index 8252bef..9bed097 100644 --- a/src/hash/rescue/rpo/digest.rs +++ b/src/hash/rescue/rpo/digest.rs @@ -1,4 +1,4 @@ -use super::{Digest, Felt, StarkField, DIGEST_SIZE, ZERO}; +use super::{Digest, Felt, StarkField, DIGEST_BYTES, DIGEST_SIZE, ZERO}; use crate::utils::{ bytes_to_hex_string, hex_to_bytes, string::String, ByteReader, ByteWriter, Deserializable, DeserializationError, HexParseError, Serializable, @@ -6,9 +6,6 @@ use crate::utils::{ use core::{cmp::Ordering, fmt::Display, ops::Deref}; use winter_utils::Randomizable; -/// The number of bytes needed to encoded a digest -pub const DIGEST_BYTES: usize = 32; - // DIGEST TRAIT IMPLEMENTATIONS // ================================================================================================ @@ -172,9 +169,21 @@ impl From<&RpoDigest> for String { } } -// CONVERSIONS: TO DIGEST +// CONVERSIONS: TO RPO DIGEST // ================================================================================================ +#[derive(Copy, Clone, Debug)] +pub enum RpoDigestError { + /// The provided u64 integer does not fit in the field's moduli. + InvalidInteger, +} + +impl From<&[Felt; DIGEST_SIZE]> for RpoDigest { + fn from(value: &[Felt; DIGEST_SIZE]) -> Self { + Self(*value) + } +} + impl From<[Felt; DIGEST_SIZE]> for RpoDigest { fn from(value: [Felt; DIGEST_SIZE]) -> Self { Self(value) @@ -200,6 +209,46 @@ impl TryFrom<[u8; DIGEST_BYTES]> for RpoDigest { } } +impl TryFrom<&[u8; DIGEST_BYTES]> for RpoDigest { + type Error = HexParseError; + + fn try_from(value: &[u8; DIGEST_BYTES]) -> Result { + (*value).try_into() + } +} + +impl TryFrom<&[u8]> for RpoDigest { + type Error = HexParseError; + + fn try_from(value: &[u8]) -> Result { + (*value).try_into() + } +} + +impl TryFrom<[u64; DIGEST_SIZE]> for RpoDigest { + type Error = RpoDigestError; + + fn try_from(value: [u64; DIGEST_SIZE]) -> Result { + if value[0] >= Felt::MODULUS + || value[1] >= Felt::MODULUS + || value[2] >= Felt::MODULUS + || value[3] >= Felt::MODULUS + { + return Err(RpoDigestError::InvalidInteger); + } + + Ok(Self([value[0].into(), value[1].into(), value[2].into(), value[3].into()])) + } +} + +impl TryFrom<&[u64; DIGEST_SIZE]> for RpoDigest { + type Error = RpoDigestError; + + fn try_from(value: &[u64; DIGEST_SIZE]) -> Result { + (*value).try_into() + } +} + impl TryFrom<&str> for RpoDigest { type Error = HexParseError; @@ -258,8 +307,8 @@ impl Deserializable for RpoDigest { #[cfg(test)] mod tests { - use super::{Deserializable, Felt, RpoDigest, Serializable, DIGEST_BYTES}; - use crate::utils::SliceReader; + use super::{Deserializable, Felt, RpoDigest, Serializable, DIGEST_BYTES, DIGEST_SIZE}; + use crate::utils::{string::String, SliceReader}; use rand_utils::rand_value; #[test] diff --git a/src/hash/rescue/rpo/mod.rs b/src/hash/rescue/rpo/mod.rs index a708629..c28f87d 100644 --- a/src/hash/rescue/rpo/mod.rs +++ b/src/hash/rescue/rpo/mod.rs @@ -2,8 +2,8 @@ use super::{ add_constants, apply_inv_sbox, apply_mds, apply_sbox, optimized_add_constants_and_apply_inv_sbox, optimized_add_constants_and_apply_sbox, Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField, ARK1, ARK2, BINARY_CHUNK_SIZE, - CAPACITY_RANGE, DIGEST_RANGE, DIGEST_SIZE, INPUT1_RANGE, INPUT2_RANGE, MDS, NUM_ROUNDS, ONE, - RATE_RANGE, RATE_WIDTH, STATE_WIDTH, ZERO, + CAPACITY_RANGE, DIGEST_BYTES, DIGEST_RANGE, DIGEST_SIZE, INPUT1_RANGE, INPUT2_RANGE, MDS, + NUM_ROUNDS, ONE, RATE_RANGE, RATE_WIDTH, STATE_WIDTH, ZERO, }; use core::{convert::TryInto, ops::Range}; diff --git a/src/hash/rescue/rpx/digest.rs b/src/hash/rescue/rpx/digest.rs index 26a9bee..a9a236a 100644 --- a/src/hash/rescue/rpx/digest.rs +++ b/src/hash/rescue/rpx/digest.rs @@ -1,4 +1,4 @@ -use super::{Digest, Felt, StarkField, DIGEST_SIZE, ZERO}; +use super::{Digest, Felt, StarkField, DIGEST_BYTES, DIGEST_SIZE, ZERO}; use crate::utils::{ bytes_to_hex_string, hex_to_bytes, string::String, ByteReader, ByteWriter, Deserializable, DeserializationError, HexParseError, Serializable, @@ -6,9 +6,6 @@ use crate::utils::{ use core::{cmp::Ordering, fmt::Display, ops::Deref}; use winter_utils::Randomizable; -/// The number of bytes needed to encoded a digest -pub const DIGEST_BYTES: usize = 32; - // DIGEST TRAIT IMPLEMENTATIONS // ================================================================================================ @@ -175,6 +172,18 @@ impl From<&RpxDigest> for String { // CONVERSIONS: TO RPX DIGEST // ================================================================================================ +#[derive(Copy, Clone, Debug)] +pub enum RpxDigestError { + /// The provided u64 integer does not fit in the field's moduli. + InvalidInteger, +} + +impl From<&[Felt; DIGEST_SIZE]> for RpxDigest { + fn from(value: &[Felt; DIGEST_SIZE]) -> Self { + Self(*value) + } +} + impl From<[Felt; DIGEST_SIZE]> for RpxDigest { fn from(value: [Felt; DIGEST_SIZE]) -> Self { Self(value) @@ -200,6 +209,46 @@ impl TryFrom<[u8; DIGEST_BYTES]> for RpxDigest { } } +impl TryFrom<&[u8; DIGEST_BYTES]> for RpxDigest { + type Error = HexParseError; + + fn try_from(value: &[u8; DIGEST_BYTES]) -> Result { + (*value).try_into() + } +} + +impl TryFrom<&[u8]> for RpxDigest { + type Error = HexParseError; + + fn try_from(value: &[u8]) -> Result { + (*value).try_into() + } +} + +impl TryFrom<[u64; DIGEST_SIZE]> for RpxDigest { + type Error = RpxDigestError; + + fn try_from(value: [u64; DIGEST_SIZE]) -> Result { + if value[0] >= Felt::MODULUS + || value[1] >= Felt::MODULUS + || value[2] >= Felt::MODULUS + || value[3] >= Felt::MODULUS + { + return Err(RpxDigestError::InvalidInteger); + } + + Ok(Self([value[0].into(), value[1].into(), value[2].into(), value[3].into()])) + } +} + +impl TryFrom<&[u64; DIGEST_SIZE]> for RpxDigest { + type Error = RpxDigestError; + + fn try_from(value: &[u64; DIGEST_SIZE]) -> Result { + (*value).try_into() + } +} + impl TryFrom<&str> for RpxDigest { type Error = HexParseError; @@ -258,8 +307,8 @@ impl Deserializable for RpxDigest { #[cfg(test)] mod tests { - use super::{Deserializable, Felt, RpxDigest, Serializable, DIGEST_BYTES}; - use crate::utils::SliceReader; + use super::{Deserializable, Felt, RpxDigest, Serializable, DIGEST_BYTES, DIGEST_SIZE}; + use crate::utils::{string::String, SliceReader}; use rand_utils::rand_value; #[test] @@ -296,4 +345,54 @@ mod tests { assert_eq!(digest, round_trip); } + + #[test] + fn test_conversions() { + let digest = RpxDigest([ + Felt::new(rand_value()), + Felt::new(rand_value()), + Felt::new(rand_value()), + Felt::new(rand_value()), + ]); + + let v: [Felt; DIGEST_SIZE] = digest.into(); + let v2: RpxDigest = v.into(); + assert_eq!(digest, v2); + + let v: [Felt; DIGEST_SIZE] = (&digest).into(); + let v2: RpxDigest = v.into(); + assert_eq!(digest, v2); + + let v: [u64; DIGEST_SIZE] = digest.into(); + let v2: RpxDigest = v.try_into().unwrap(); + assert_eq!(digest, v2); + + let v: [u64; DIGEST_SIZE] = (&digest).into(); + let v2: RpxDigest = v.try_into().unwrap(); + assert_eq!(digest, v2); + + let v: [u8; DIGEST_BYTES] = digest.into(); + let v2: RpxDigest = v.try_into().unwrap(); + assert_eq!(digest, v2); + + let v: [u8; DIGEST_BYTES] = (&digest).into(); + let v2: RpxDigest = v.try_into().unwrap(); + assert_eq!(digest, v2); + + let v: String = digest.into(); + let v2: RpxDigest = v.try_into().unwrap(); + assert_eq!(digest, v2); + + let v: String = (&digest).into(); + let v2: RpxDigest = v.try_into().unwrap(); + assert_eq!(digest, v2); + + let v: [u8; DIGEST_BYTES] = digest.into(); + let v2: RpxDigest = (&v).try_into().unwrap(); + assert_eq!(digest, v2); + + let v: [u8; DIGEST_BYTES] = (&digest).into(); + let v2: RpxDigest = (&v).try_into().unwrap(); + assert_eq!(digest, v2); + } } diff --git a/src/hash/rescue/rpx/mod.rs b/src/hash/rescue/rpx/mod.rs index 38161eb..541310e 100644 --- a/src/hash/rescue/rpx/mod.rs +++ b/src/hash/rescue/rpx/mod.rs @@ -2,8 +2,8 @@ use super::{ add_constants, apply_inv_sbox, apply_mds, apply_sbox, optimized_add_constants_and_apply_inv_sbox, optimized_add_constants_and_apply_sbox, CubeExtension, Digest, ElementHasher, Felt, FieldElement, Hasher, StarkField, ARK1, ARK2, - BINARY_CHUNK_SIZE, CAPACITY_RANGE, DIGEST_RANGE, DIGEST_SIZE, INPUT1_RANGE, INPUT2_RANGE, MDS, - NUM_ROUNDS, ONE, RATE_RANGE, RATE_WIDTH, STATE_WIDTH, ZERO, + BINARY_CHUNK_SIZE, CAPACITY_RANGE, DIGEST_BYTES, DIGEST_RANGE, DIGEST_SIZE, INPUT1_RANGE, + INPUT2_RANGE, MDS, NUM_ROUNDS, ONE, RATE_RANGE, RATE_WIDTH, STATE_WIDTH, ZERO, }; use core::{convert::TryInto, ops::Range};