Browse Source

Zcash-style serialization for BLS12-381 (#129)

Co-authored-by: kevaundray <37423678+kevaundray@users.noreply.github.com>
Co-authored-by: Pratyush Mishra <pratyushmishra@berkeley.edu>
cherry-pick
mmagician 2 years ago
committed by GitHub
parent
commit
138b23f2fa
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 480 additions and 64 deletions
  1. +2
    -1
      bls12_381/Cargo.toml
  2. +66
    -0
      bls12_381/src/curves/g1.rs
  3. +75
    -1
      bls12_381/src/curves/g2.rs
  4. +1
    -0
      bls12_381/src/curves/mod.rs
  5. +0
    -62
      bls12_381/src/curves/tests.rs
  6. BIN
      bls12_381/src/curves/tests/g1_compressed_valid_test_vectors.dat
  7. BIN
      bls12_381/src/curves/tests/g1_uncompressed_valid_test_vectors.dat
  8. BIN
      bls12_381/src/curves/tests/g2_compressed_valid_test_vectors.dat
  9. BIN
      bls12_381/src/curves/tests/g2_uncompressed_valid_test_vectors.dat
  10. +119
    -0
      bls12_381/src/curves/tests/mod.rs
  11. +217
    -0
      bls12_381/src/curves/util.rs

+ 2
- 1
bls12_381/Cargo.toml

@ -16,11 +16,12 @@ edition = "2021"
ark-ff = { version="^0.3.0", default-features = false } ark-ff = { version="^0.3.0", default-features = false }
ark-ec = { version="^0.3.0", default-features = false } ark-ec = { version="^0.3.0", default-features = false }
ark-std = { version="^0.3.0", default-features = false } ark-std = { version="^0.3.0", default-features = false }
ark-serialize = { version="^0.3.0", default-features = false }
[dev-dependencies] [dev-dependencies]
ark-serialize = { version="^0.3.0", default-features = false }
ark-algebra-test-templates = { version="^0.3.0", default-features = false } ark-algebra-test-templates = { version="^0.3.0", default-features = false }
ark-algebra-bench-templates = { version = "^0.3.0", default-features = false } ark-algebra-bench-templates = { version = "^0.3.0", default-features = false }
hex = "^0.4.0"
[features] [features]
default = [ "curve" ] default = [ "curve" ]

+ 66
- 0
bls12_381/src/curves/g1.rs

@ -7,7 +7,13 @@ use ark_ec::{
AffineRepr, Group, AffineRepr, Group,
}; };
use ark_ff::{Field, MontFp, PrimeField, Zero}; use ark_ff::{Field, MontFp, PrimeField, Zero};
use ark_serialize::{Compress, SerializationError};
use ark_std::{ops::Neg, One}; use ark_std::{ops::Neg, One};
use crate::util::{
read_g1_compressed, read_g1_uncompressed, serialize_fq, EncodingFlags, G1_SERIALIZED_SIZE,
};
pub type G1Affine = bls12::G1Affine<crate::Parameters>; pub type G1Affine = bls12::G1Affine<crate::Parameters>;
pub type G1Projective = bls12::G1Projective<crate::Parameters>; pub type G1Projective = bls12::G1Projective<crate::Parameters>;
@ -70,6 +76,66 @@ impl SWCurveConfig for Parameters {
let h_eff = one_minus_x().into_bigint(); let h_eff = one_minus_x().into_bigint();
Parameters::mul_affine(&p, h_eff.as_ref()).into() Parameters::mul_affine(&p, h_eff.as_ref()).into()
} }
fn deserialize_with_mode<R: ark_serialize::Read>(
mut reader: R,
compress: ark_serialize::Compress,
validate: ark_serialize::Validate,
) -> Result<Affine<Self>, ark_serialize::SerializationError> {
let p = if compress == ark_serialize::Compress::Yes {
read_g1_compressed(&mut reader)?
} else {
read_g1_uncompressed(&mut reader)?
};
if validate == ark_serialize::Validate::Yes && !p.is_in_correct_subgroup_assuming_on_curve()
{
return Err(SerializationError::InvalidData);
}
Ok(p)
}
fn serialize_with_mode<W: ark_serialize::Write>(
item: &Affine<Self>,
mut writer: W,
compress: ark_serialize::Compress,
) -> Result<(), SerializationError> {
let encoding = EncodingFlags {
is_compressed: compress == ark_serialize::Compress::Yes,
is_infinity: item.is_zero(),
is_lexographically_largest: item.y > -item.y,
};
let mut p = *item;
if encoding.is_infinity {
p = G1Affine::zero();
}
// need to access the field struct `x` directly, otherwise we get None from xy()
// method
let x_bytes = serialize_fq(p.x);
if encoding.is_compressed {
let mut bytes: [u8; G1_SERIALIZED_SIZE] = x_bytes;
encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
} else {
let mut bytes = [0u8; 2 * G1_SERIALIZED_SIZE];
bytes[0..G1_SERIALIZED_SIZE].copy_from_slice(&x_bytes[..]);
bytes[G1_SERIALIZED_SIZE..].copy_from_slice(&serialize_fq(p.y)[..]);
encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
};
Ok(())
}
fn serialized_size(compress: Compress) -> usize {
if compress == Compress::Yes {
G1_SERIALIZED_SIZE
} else {
G1_SERIALIZED_SIZE * 2
}
}
} }
fn one_minus_x() -> Fr { fn one_minus_x() -> Fr {

+ 75
- 1
bls12_381/src/curves/g2.rs

@ -8,8 +8,13 @@ use ark_ec::{
AffineRepr, CurveGroup, Group, AffineRepr, CurveGroup, Group,
}; };
use ark_ff::{Field, MontFp, Zero}; use ark_ff::{Field, MontFp, Zero};
use ark_serialize::{Compress, SerializationError};
use crate::*;
use super::util::{serialize_fq, EncodingFlags, G2_SERIALIZED_SIZE};
use crate::{
util::{read_g2_compressed, read_g2_uncompressed},
*,
};
pub type G2Affine = bls12::G2Affine<crate::Parameters>; pub type G2Affine = bls12::G2Affine<crate::Parameters>;
pub type G2Projective = bls12::G2Projective<crate::Parameters>; pub type G2Projective = bls12::G2Projective<crate::Parameters>;
@ -105,6 +110,75 @@ impl SWCurveConfig for Parameters {
psi2_p2 += &-psi_p; psi2_p2 += &-psi_p;
(psi2_p2 - p_projective).into_affine() (psi2_p2 - p_projective).into_affine()
} }
fn deserialize_with_mode<R: ark_serialize::Read>(
mut reader: R,
compress: ark_serialize::Compress,
validate: ark_serialize::Validate,
) -> Result<Affine<Self>, ark_serialize::SerializationError> {
let p = if compress == ark_serialize::Compress::Yes {
read_g2_compressed(&mut reader)?
} else {
read_g2_uncompressed(&mut reader)?
};
if validate == ark_serialize::Validate::Yes && !p.is_in_correct_subgroup_assuming_on_curve()
{
return Err(SerializationError::InvalidData);
}
Ok(p)
}
fn serialize_with_mode<W: ark_serialize::Write>(
item: &Affine<Self>,
mut writer: W,
compress: ark_serialize::Compress,
) -> Result<(), SerializationError> {
let encoding = EncodingFlags {
is_compressed: compress == ark_serialize::Compress::Yes,
is_infinity: item.is_zero(),
is_lexographically_largest: item.y > -item.y,
};
let mut p = *item;
if encoding.is_infinity {
p = G2Affine::zero();
}
let mut x_bytes = [0u8; G2_SERIALIZED_SIZE];
let c1_bytes = serialize_fq(p.x.c1);
let c0_bytes = serialize_fq(p.x.c0);
x_bytes[0..48].copy_from_slice(&c1_bytes[..]);
x_bytes[48..96].copy_from_slice(&c0_bytes[..]);
if encoding.is_compressed {
let mut bytes: [u8; G2_SERIALIZED_SIZE] = x_bytes;
encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
} else {
let mut bytes = [0u8; 2 * G2_SERIALIZED_SIZE];
let mut y_bytes = [0u8; G2_SERIALIZED_SIZE];
let c1_bytes = serialize_fq(p.y.c1);
let c0_bytes = serialize_fq(p.y.c0);
y_bytes[0..48].copy_from_slice(&c1_bytes[..]);
y_bytes[48..96].copy_from_slice(&c0_bytes[..]);
bytes[0..G2_SERIALIZED_SIZE].copy_from_slice(&x_bytes);
bytes[G2_SERIALIZED_SIZE..].copy_from_slice(&y_bytes);
encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
};
Ok(())
}
fn serialized_size(compress: ark_serialize::Compress) -> usize {
if compress == Compress::Yes {
G2_SERIALIZED_SIZE
} else {
2 * G2_SERIALIZED_SIZE
}
}
} }
pub const G2_GENERATOR_X: Fq2 = Fq2::new(G2_GENERATOR_X_C0, G2_GENERATOR_X_C1); pub const G2_GENERATOR_X: Fq2 = Fq2::new(G2_GENERATOR_X_C0, G2_GENERATOR_X_C1);

+ 1
- 0
bls12_381/src/curves/mod.rs

@ -4,6 +4,7 @@ use crate::{Fq, Fq12Config, Fq2Config, Fq6Config};
pub mod g1; pub mod g1;
pub mod g2; pub mod g2;
pub(crate) mod util;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;

+ 0
- 62
bls12_381/src/curves/tests.rs

@ -1,62 +0,0 @@
use ark_algebra_test_templates::*;
use ark_ec::{AffineRepr, CurveGroup};
use ark_ff::{fields::Field, One, UniformRand, Zero};
use ark_std::{rand::Rng, test_rng};
use crate::{Bls12_381, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
test_group!(g1; G1Projective; sw);
test_group!(g2; G2Projective; sw);
test_group!(pairing_output; ark_ec::pairing::PairingOutput<Bls12_381>; msm);
test_pairing!(pairing; crate::Bls12_381);
#[test]
fn test_g1_endomorphism_beta() {
assert!(crate::g1::BETA.pow(&[3u64]).is_one());
}
#[test]
fn test_g1_subgroup_membership_via_endomorphism() {
let mut rng = test_rng();
let generator = G1Projective::rand(&mut rng).into_affine();
assert!(generator.is_in_correct_subgroup_assuming_on_curve());
}
#[test]
fn test_g1_subgroup_non_membership_via_endomorphism() {
let mut rng = test_rng();
loop {
let x = Fq::rand(&mut rng);
let greatest = rng.gen();
if let Some(p) = G1Affine::get_point_from_x_unchecked(x, greatest) {
if !p.mul_bigint(Fr::characteristic()).is_zero() {
assert!(!p.is_in_correct_subgroup_assuming_on_curve());
return;
}
}
}
}
#[test]
fn test_g2_subgroup_membership_via_endomorphism() {
let mut rng = test_rng();
let generator = G2Projective::rand(&mut rng).into_affine();
assert!(generator.is_in_correct_subgroup_assuming_on_curve());
}
#[test]
fn test_g2_subgroup_non_membership_via_endomorphism() {
let mut rng = test_rng();
loop {
let x = Fq2::rand(&mut rng);
let greatest = rng.gen();
if let Some(p) = G2Affine::get_point_from_x_unchecked(x, greatest) {
if !p.mul_bigint(Fr::characteristic()).is_zero() {
assert!(!p.is_in_correct_subgroup_assuming_on_curve());
return;
}
}
}
}

BIN
bls12_381/src/curves/tests/g1_compressed_valid_test_vectors.dat


BIN
bls12_381/src/curves/tests/g1_uncompressed_valid_test_vectors.dat


BIN
bls12_381/src/curves/tests/g2_compressed_valid_test_vectors.dat


BIN
bls12_381/src/curves/tests/g2_uncompressed_valid_test_vectors.dat


+ 119
- 0
bls12_381/src/curves/tests/mod.rs

@ -0,0 +1,119 @@
use ark_algebra_test_templates::*;
use ark_ec::{AffineRepr, CurveGroup, Group};
use ark_ff::{fields::Field, One, UniformRand, Zero};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use ark_std::{rand::Rng, test_rng, vec};
use crate::{Bls12_381, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
test_group!(g1; G1Projective; sw);
test_group!(g2; G2Projective; sw);
test_group!(pairing_output; ark_ec::pairing::PairingOutput<Bls12_381>; msm);
test_pairing!(pairing; crate::Bls12_381);
#[test]
fn test_g1_endomorphism_beta() {
assert!(crate::g1::BETA.pow(&[3u64]).is_one());
}
#[test]
fn test_g1_subgroup_membership_via_endomorphism() {
let mut rng = test_rng();
let generator = G1Projective::rand(&mut rng).into_affine();
assert!(generator.is_in_correct_subgroup_assuming_on_curve());
}
#[test]
fn test_g1_subgroup_non_membership_via_endomorphism() {
let mut rng = test_rng();
loop {
let x = Fq::rand(&mut rng);
let greatest = rng.gen();
if let Some(p) = G1Affine::get_point_from_x_unchecked(x, greatest) {
if !p.mul_bigint(Fr::characteristic()).is_zero() {
assert!(!p.is_in_correct_subgroup_assuming_on_curve());
return;
}
}
}
}
#[test]
fn test_g2_subgroup_membership_via_endomorphism() {
let mut rng = test_rng();
let generator = G2Projective::rand(&mut rng).into_affine();
assert!(generator.is_in_correct_subgroup_assuming_on_curve());
}
#[test]
fn test_g2_subgroup_non_membership_via_endomorphism() {
let mut rng = test_rng();
loop {
let x = Fq2::rand(&mut rng);
let greatest = rng.gen();
if let Some(p) = G2Affine::get_point_from_x_unchecked(x, greatest) {
if !p.mul_bigint(Fr::characteristic()).is_zero() {
assert!(!p.is_in_correct_subgroup_assuming_on_curve());
return;
}
}
}
}
// Test vectors and macro adapted from https://github.com/zkcrypto/bls12_381/blob/e224ad4ea1babfc582ccd751c2bf128611d10936/src/tests/mod.rs
macro_rules! test_vectors {
($projective:ident, $affine:ident, $compress:expr, $expected:ident) => {
let mut e = $projective::zero();
let mut v = vec![];
{
let mut expected = $expected;
for _ in 0..1000 {
let e_affine = $affine::from(e);
let mut serialized = vec![0u8; e.serialized_size($compress)];
e_affine
.serialize_with_mode(serialized.as_mut_slice(), $compress)
.unwrap();
v.extend_from_slice(&serialized[..]);
let mut decoded = serialized;
let len_of_encoding = decoded.len();
(&mut decoded[..]).copy_from_slice(&expected[0..len_of_encoding]);
expected = &expected[len_of_encoding..];
let decoded =
$affine::deserialize_with_mode(&decoded[..], $compress, Validate::Yes).unwrap();
assert_eq!(e_affine, decoded);
e += &$projective::generator();
}
}
assert_eq!(&v[..], $expected);
};
}
#[test]
fn g1_compressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g1_compressed_valid_test_vectors.dat");
test_vectors!(G1Projective, G1Affine, Compress::Yes, bytes);
}
#[test]
fn g1_uncompressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g1_uncompressed_valid_test_vectors.dat");
test_vectors!(G1Projective, G1Affine, Compress::No, bytes);
}
#[test]
fn g2_compressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g2_compressed_valid_test_vectors.dat");
test_vectors!(G2Projective, G2Affine, Compress::Yes, bytes);
}
#[test]
fn g2_uncompressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g2_uncompressed_valid_test_vectors.dat");
test_vectors!(G2Projective, G2Affine, Compress::No, bytes);
}

+ 217
- 0
bls12_381/src/curves/util.rs

@ -0,0 +1,217 @@
use ark_ec::{short_weierstrass::Affine, AffineRepr};
use ark_ff::{BigInteger384, PrimeField};
use ark_serialize::SerializationError;
use crate::{
g1::Parameters as G1Parameters, g2::Parameters as G2Parameters, Fq, Fq2, G1Affine, G2Affine,
};
pub const G1_SERIALIZED_SIZE: usize = 48;
pub const G2_SERIALIZED_SIZE: usize = 96;
pub struct EncodingFlags {
pub is_compressed: bool,
pub is_infinity: bool,
pub is_lexographically_largest: bool,
}
impl EncodingFlags {
pub fn get_flags(bytes: &[u8]) -> Self {
let compression_flag_set = (bytes[0] >> 7) & 1;
let infinity_flag_set = (bytes[0] >> 6) & 1;
let sort_flag_set = (bytes[0] >> 5) & 1;
Self {
is_compressed: compression_flag_set == 1,
is_infinity: infinity_flag_set == 1,
is_lexographically_largest: sort_flag_set == 1,
}
}
pub fn encode_flags(&self, bytes: &mut [u8]) {
if self.is_compressed {
bytes[0] |= 1 << 7;
}
if self.is_infinity {
bytes[0] |= 1 << 6;
}
if self.is_compressed && !self.is_infinity && self.is_lexographically_largest {
bytes[0] |= 1 << 5;
}
}
}
pub(crate) fn deserialize_fq(bytes: [u8; 48]) -> Option<Fq> {
let mut tmp = BigInteger384::new([0, 0, 0, 0, 0, 0]);
// Note: The following unwraps are if the compiler cannot convert
// the byte slice into [u8;8], we know this is infallible since we
// are providing the indices at compile time and bytes has a fixed size
tmp.0[5] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[0..8]).unwrap());
tmp.0[4] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[8..16]).unwrap());
tmp.0[3] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[16..24]).unwrap());
tmp.0[2] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[24..32]).unwrap());
tmp.0[1] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[32..40]).unwrap());
tmp.0[0] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[40..48]).unwrap());
Fq::from_bigint(tmp)
}
pub(crate) fn serialize_fq(field: Fq) -> [u8; 48] {
let mut result = [0u8; 48];
let rep = field.into_bigint();
result[0..8].copy_from_slice(&rep.0[5].to_be_bytes());
result[8..16].copy_from_slice(&rep.0[4].to_be_bytes());
result[16..24].copy_from_slice(&rep.0[3].to_be_bytes());
result[24..32].copy_from_slice(&rep.0[2].to_be_bytes());
result[32..40].copy_from_slice(&rep.0[1].to_be_bytes());
result[40..48].copy_from_slice(&rep.0[0].to_be_bytes());
result
}
pub(crate) fn read_fq_with_offset(
bytes: &[u8],
offset: usize,
mask: bool,
) -> Result<Fq, ark_serialize::SerializationError> {
let mut tmp = [0; G1_SERIALIZED_SIZE];
// read `G1_SERIALIZED_SIZE` bytes
tmp.copy_from_slice(&bytes[offset * G1_SERIALIZED_SIZE..G1_SERIALIZED_SIZE * (offset + 1)]);
if mask {
// Mask away the flag bits
tmp[0] &= 0b0001_1111;
}
deserialize_fq(tmp).ok_or(SerializationError::InvalidData)
}
pub(crate) fn read_g1_compressed<R: ark_serialize::Read>(
mut reader: R,
) -> Result<Affine<G1Parameters>, ark_serialize::SerializationError> {
let mut bytes = [0u8; G1_SERIALIZED_SIZE];
reader
.read_exact(&mut bytes)
.ok()
.ok_or(SerializationError::InvalidData)?;
// Obtain the three flags from the start of the byte sequence
let flags = EncodingFlags::get_flags(&bytes[..]);
// we expect to be deserializing a compressed point
if !flags.is_compressed {
return Err(SerializationError::UnexpectedFlags);
}
if flags.is_infinity {
return Ok(G1Affine::zero());
}
// Attempt to obtain the x-coordinate
let x = read_fq_with_offset(&bytes, 0, true)?;
let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
.ok_or(SerializationError::InvalidData)?;
Ok(p)
}
pub(crate) fn read_g1_uncompressed<R: ark_serialize::Read>(
mut reader: R,
) -> Result<Affine<G1Parameters>, ark_serialize::SerializationError> {
let mut bytes = [0u8; 2 * G1_SERIALIZED_SIZE];
reader
.read_exact(&mut bytes)
.map_err(|_| SerializationError::InvalidData)?;
// Obtain the three flags from the start of the byte sequence
let flags = EncodingFlags::get_flags(&bytes[..]);
// we expect to be deserializing an uncompressed point
if flags.is_compressed {
return Err(SerializationError::UnexpectedFlags);
}
if flags.is_infinity {
return Ok(G1Affine::zero());
}
// Attempt to obtain the x-coordinate
let x = read_fq_with_offset(&bytes, 0, true)?;
// Attempt to obtain the y-coordinate
let y = read_fq_with_offset(&bytes, 1, false)?;
let p = G1Affine::new_unchecked(x, y);
Ok(p)
}
pub(crate) fn read_g2_compressed<R: ark_serialize::Read>(
mut reader: R,
) -> Result<Affine<G2Parameters>, ark_serialize::SerializationError> {
let mut bytes = [0u8; G2_SERIALIZED_SIZE];
reader
.read_exact(&mut bytes)
.map_err(|_| SerializationError::InvalidData)?;
// Obtain the three flags from the start of the byte sequence
let flags = EncodingFlags::get_flags(&bytes);
// we expect to be deserializing a compressed point
if !flags.is_compressed {
return Err(SerializationError::UnexpectedFlags);
}
if flags.is_infinity {
return Ok(G2Affine::zero());
}
// Attempt to obtain the x-coordinate
let xc1 = read_fq_with_offset(&bytes, 0, true)?;
let xc0 = read_fq_with_offset(&bytes, 1, false)?;
let x = Fq2::new(xc0, xc1);
let p = G2Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
.ok_or(SerializationError::InvalidData)?;
Ok(p)
}
pub(crate) fn read_g2_uncompressed<R: ark_serialize::Read>(
mut reader: R,
) -> Result<Affine<G2Parameters>, ark_serialize::SerializationError> {
let mut bytes = [0u8; 2 * G2_SERIALIZED_SIZE];
reader
.read_exact(&mut bytes)
.map_err(|_| SerializationError::InvalidData)?;
// Obtain the three flags from the start of the byte sequence
let flags = EncodingFlags::get_flags(&bytes);
// we expect to be deserializing an uncompressed point
if flags.is_compressed {
return Err(SerializationError::UnexpectedFlags);
}
if flags.is_infinity {
return Ok(G2Affine::zero());
}
// Attempt to obtain the x-coordinate
let xc1 = read_fq_with_offset(&bytes, 0, true)?;
let xc0 = read_fq_with_offset(&bytes, 1, false)?;
let x = Fq2::new(xc0, xc1);
// Attempt to obtain the y-coordinate
let yc1 = read_fq_with_offset(&bytes, 2, false)?;
let yc0 = read_fq_with_offset(&bytes, 3, false)?;
let y = Fq2::new(yc0, yc1);
let p = G2Affine::new_unchecked(x, y);
Ok(p)
}

Loading…
Cancel
Save