mirror of
https://github.com/arnaucube/ark-curves-cherry-picked.git
synced 2026-01-08 15:01:29 +01:00
Non-canonical infinity point & bad flags in BLS12-381 serialization should fail (#176)
Co-authored-by: Kevaundray Wedderburn <kevtheappdev@gmail.com>
This commit is contained in:
@@ -14,6 +14,8 @@
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- [\#176](https://github.com/arkworks-rs/curves/pull/176) Non-canonical infinity point and bad flags in BLS12-381 serialization should fail.
|
||||
|
||||
## v0.4.0
|
||||
- [\#76](https://github.com/arkworks-rs/curves/pull/76) twisted Edwards parameters for bls12-377
|
||||
- Fixed curve benches
|
||||
|
||||
@@ -180,6 +180,7 @@ mod test {
|
||||
|
||||
use super::*;
|
||||
use crate::g1;
|
||||
use ark_serialize::CanonicalDeserialize;
|
||||
use ark_std::{rand::Rng, UniformRand};
|
||||
|
||||
fn sample_unchecked() -> Affine<g1::Config> {
|
||||
@@ -204,4 +205,55 @@ mod test {
|
||||
assert!(p.is_in_correct_subgroup_assuming_on_curve());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_canonical_identity_point() {
|
||||
let non_canonical_hex = "c01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
|
||||
let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
|
||||
assert_eq!(non_canonical_bytes.len(), 48);
|
||||
|
||||
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
|
||||
CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
|
||||
|
||||
assert!(maybe_affine_point.is_err());
|
||||
|
||||
let non_canonical_hex_uncompressed = "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001";
|
||||
let non_canonical_bytes = hex::decode(non_canonical_hex_uncompressed).unwrap();
|
||||
assert_eq!(non_canonical_bytes.len(), 96);
|
||||
|
||||
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
|
||||
CanonicalDeserialize::deserialize_uncompressed(&non_canonical_bytes[..]);
|
||||
|
||||
assert!(maybe_affine_point.is_err())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_flag_combination() {
|
||||
// See https://github.com/zkcrypto/pairing/tree/fa8103764a07bd273927447d434de18aace252d3/src/bls12_381#serialization
|
||||
// - Bit 1 is compressed/uncompressed
|
||||
// - Bit 2 is infinity
|
||||
// - Bit 3 is lexicographical order for compressed point deserialization
|
||||
// Hence `0b1110` ("e" in hex) or `0b0110` ("6" in hex") are both nonsensical.
|
||||
|
||||
// uncompressed, but lexicographically largest flag is set
|
||||
let non_canonical_hex = "600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
|
||||
let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
|
||||
assert_eq!(non_canonical_bytes.len(), 48);
|
||||
|
||||
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
|
||||
CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
|
||||
|
||||
assert!(maybe_affine_point.is_err());
|
||||
|
||||
// compressed, but infinity flag is set and lexicographically largest flag is
|
||||
// set
|
||||
let non_canonical_hex_2 = "e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
let non_canonical_bytes = hex::decode(non_canonical_hex_2).unwrap();
|
||||
assert_eq!(non_canonical_bytes.len(), 48);
|
||||
|
||||
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
|
||||
CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
|
||||
assert!(maybe_affine_point.is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,17 +14,28 @@ pub struct EncodingFlags {
|
||||
}
|
||||
|
||||
impl EncodingFlags {
|
||||
pub fn get_flags(bytes: &[u8]) -> Self {
|
||||
/// Fetches the flags from the byte-string
|
||||
pub fn get_flags(bytes: &[u8]) -> Result<Self, SerializationError> {
|
||||
let compression_flag_set = (bytes[0] >> 7) & 1;
|
||||
let infinity_flag_set = (bytes[0] >> 6) & 1;
|
||||
let sort_flag_set = (bytes[0] >> 5) & 1;
|
||||
|
||||
Self {
|
||||
is_compressed: compression_flag_set == 1,
|
||||
is_infinity: infinity_flag_set == 1,
|
||||
is_lexographically_largest: sort_flag_set == 1,
|
||||
let is_compressed = compression_flag_set == 1;
|
||||
let is_infinity = infinity_flag_set == 1;
|
||||
let is_lexographically_largest = sort_flag_set == 1;
|
||||
|
||||
if is_lexographically_largest && (!is_compressed || is_infinity) {
|
||||
return Err(SerializationError::InvalidData);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
is_compressed,
|
||||
is_infinity,
|
||||
is_lexographically_largest,
|
||||
})
|
||||
}
|
||||
|
||||
/// Encodes the flags into the byte-string
|
||||
pub fn encode_flags(&self, bytes: &mut [u8]) {
|
||||
if self.is_compressed {
|
||||
bytes[0] |= 1 << 7;
|
||||
@@ -38,6 +49,13 @@ impl EncodingFlags {
|
||||
bytes[0] |= 1 << 5;
|
||||
}
|
||||
}
|
||||
|
||||
/// Removes the flags from the byte-string.
|
||||
///
|
||||
/// This reverses the effects of `encode_flags`.
|
||||
pub fn remove_flags(bytes: &mut [u8]) {
|
||||
bytes[0] &= 0b0001_1111;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn deserialize_fq(bytes: [u8; 48]) -> Option<Fq> {
|
||||
@@ -71,20 +89,15 @@ pub(crate) fn serialize_fq(field: Fq) -> [u8; 48] {
|
||||
result
|
||||
}
|
||||
|
||||
pub(crate) fn read_fq_with_offset(
|
||||
bytes: &[u8],
|
||||
offset: usize,
|
||||
mask: bool,
|
||||
) -> Result<Fq, ark_serialize::SerializationError> {
|
||||
fn read_bytes_with_offset(bytes: &[u8], offset: usize, mask: bool) -> [u8; G1_SERIALIZED_SIZE] {
|
||||
let mut tmp = [0; G1_SERIALIZED_SIZE];
|
||||
// read `G1_SERIALIZED_SIZE` bytes
|
||||
tmp.copy_from_slice(&bytes[offset * G1_SERIALIZED_SIZE..G1_SERIALIZED_SIZE * (offset + 1)]);
|
||||
|
||||
if mask {
|
||||
// Mask away the flag bits
|
||||
tmp[0] &= 0b0001_1111;
|
||||
EncodingFlags::remove_flags(&mut tmp);
|
||||
}
|
||||
deserialize_fq(tmp).ok_or(SerializationError::InvalidData)
|
||||
tmp
|
||||
}
|
||||
|
||||
pub(crate) fn read_g1_compressed<R: ark_serialize::Read>(
|
||||
@@ -97,20 +110,26 @@ pub(crate) fn read_g1_compressed<R: ark_serialize::Read>(
|
||||
.ok_or(SerializationError::InvalidData)?;
|
||||
|
||||
// Obtain the three flags from the start of the byte sequence
|
||||
let flags = EncodingFlags::get_flags(&bytes[..]);
|
||||
let flags = EncodingFlags::get_flags(&bytes[..])?;
|
||||
|
||||
// we expect to be deserializing a compressed point
|
||||
// We expect to be deserializing a compressed point
|
||||
if !flags.is_compressed {
|
||||
return Err(SerializationError::UnexpectedFlags);
|
||||
}
|
||||
|
||||
// Attempt to obtain the x-coordinate
|
||||
let x_bytes = read_bytes_with_offset(&bytes, 0, true);
|
||||
|
||||
if flags.is_infinity {
|
||||
// Check that the `x` co-ordinate was `0`
|
||||
if x_bytes != [0u8; 48] {
|
||||
return Err(SerializationError::InvalidData);
|
||||
}
|
||||
|
||||
return Ok(G1Affine::zero());
|
||||
}
|
||||
|
||||
// Attempt to obtain the x-coordinate
|
||||
let x = read_fq_with_offset(&bytes, 0, true)?;
|
||||
|
||||
let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
|
||||
.ok_or(SerializationError::InvalidData)?;
|
||||
|
||||
@@ -126,22 +145,27 @@ pub(crate) fn read_g1_uncompressed<R: ark_serialize::Read>(
|
||||
.map_err(|_| SerializationError::InvalidData)?;
|
||||
|
||||
// Obtain the three flags from the start of the byte sequence
|
||||
let flags = EncodingFlags::get_flags(&bytes[..]);
|
||||
let flags = EncodingFlags::get_flags(&bytes[..])?;
|
||||
|
||||
// we expect to be deserializing an uncompressed point
|
||||
if flags.is_compressed {
|
||||
return Err(SerializationError::UnexpectedFlags);
|
||||
}
|
||||
|
||||
let x_bytes = read_bytes_with_offset(&bytes, 0, true);
|
||||
let y_bytes = read_bytes_with_offset(&bytes, 1, false);
|
||||
|
||||
if flags.is_infinity {
|
||||
if x_bytes != [0u8; 48] || y_bytes != [0u8; 48] {
|
||||
return Err(SerializationError::InvalidData);
|
||||
}
|
||||
return Ok(G1Affine::zero());
|
||||
}
|
||||
|
||||
// Attempt to obtain the x-coordinate
|
||||
let x = read_fq_with_offset(&bytes, 0, true)?;
|
||||
let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
// Attempt to obtain the y-coordinate
|
||||
let y = read_fq_with_offset(&bytes, 1, false)?;
|
||||
|
||||
let y = deserialize_fq(y_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
let p = G1Affine::new_unchecked(x, y);
|
||||
|
||||
Ok(p)
|
||||
@@ -156,21 +180,26 @@ pub(crate) fn read_g2_compressed<R: ark_serialize::Read>(
|
||||
.map_err(|_| SerializationError::InvalidData)?;
|
||||
|
||||
// Obtain the three flags from the start of the byte sequence
|
||||
let flags = EncodingFlags::get_flags(&bytes);
|
||||
let flags = EncodingFlags::get_flags(&bytes)?;
|
||||
|
||||
// we expect to be deserializing a compressed point
|
||||
if !flags.is_compressed {
|
||||
return Err(SerializationError::UnexpectedFlags);
|
||||
}
|
||||
|
||||
let xc1_bytes = read_bytes_with_offset(&bytes, 0, true);
|
||||
let xc0_bytes = read_bytes_with_offset(&bytes, 1, false);
|
||||
|
||||
if flags.is_infinity {
|
||||
if xc1_bytes != [0u8; 48] || xc0_bytes != [0u8; 48] {
|
||||
return Err(SerializationError::InvalidData);
|
||||
}
|
||||
return Ok(G2Affine::zero());
|
||||
}
|
||||
|
||||
// Attempt to obtain the x-coordinate
|
||||
let xc1 = read_fq_with_offset(&bytes, 0, true)?;
|
||||
let xc0 = read_fq_with_offset(&bytes, 1, false)?;
|
||||
|
||||
let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
let x = Fq2::new(xc0, xc1);
|
||||
|
||||
let p = G2Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
|
||||
@@ -188,25 +217,39 @@ pub(crate) fn read_g2_uncompressed<R: ark_serialize::Read>(
|
||||
.map_err(|_| SerializationError::InvalidData)?;
|
||||
|
||||
// Obtain the three flags from the start of the byte sequence
|
||||
let flags = EncodingFlags::get_flags(&bytes);
|
||||
let flags = EncodingFlags::get_flags(&bytes)?;
|
||||
|
||||
// we expect to be deserializing an uncompressed point
|
||||
if flags.is_compressed {
|
||||
return Err(SerializationError::UnexpectedFlags);
|
||||
}
|
||||
|
||||
let xc1_bytes = read_bytes_with_offset(&bytes, 0, true);
|
||||
let xc0_bytes = read_bytes_with_offset(&bytes, 1, false);
|
||||
|
||||
let yc1_bytes = read_bytes_with_offset(&bytes, 2, false);
|
||||
let yc0_bytes = read_bytes_with_offset(&bytes, 3, false);
|
||||
|
||||
if flags.is_infinity {
|
||||
if xc1_bytes != [0u8; 48]
|
||||
|| xc0_bytes != [0u8; 48]
|
||||
|| yc1_bytes != [0u8; 48]
|
||||
|| yc0_bytes != [0u8; 48]
|
||||
{
|
||||
return Err(SerializationError::InvalidData);
|
||||
}
|
||||
return Ok(G2Affine::zero());
|
||||
}
|
||||
|
||||
let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
let yc1 = deserialize_fq(yc1_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
let yc0 = deserialize_fq(yc0_bytes).ok_or(SerializationError::InvalidData)?;
|
||||
|
||||
// Attempt to obtain the x-coordinate
|
||||
let xc1 = read_fq_with_offset(&bytes, 0, true)?;
|
||||
let xc0 = read_fq_with_offset(&bytes, 1, false)?;
|
||||
let x = Fq2::new(xc0, xc1);
|
||||
|
||||
// Attempt to obtain the y-coordinate
|
||||
let yc1 = read_fq_with_offset(&bytes, 2, false)?;
|
||||
let yc0 = read_fq_with_offset(&bytes, 3, false)?;
|
||||
let y = Fq2::new(yc0, yc1);
|
||||
|
||||
let p = G2Affine::new_unchecked(x, y);
|
||||
|
||||
Reference in New Issue
Block a user