Bump to arkworks-0.4.0 (#126)

* Bump to arkworks-0.4.0

* Replace remaining usages of `msm_bigint` with `msm_unchecked`

Using `msm_unchecked` instead of `msm_bigint` allows to delete the BigInt conversion code by letting the library take care of it.
This commit is contained in:
Ivan Mikushin
2023-02-21 05:53:49 -08:00
committed by GitHub
parent f64bfe6c2a
commit 56b1085c11
26 changed files with 443 additions and 483 deletions

View File

@@ -6,11 +6,11 @@ edition = "2021"
[dependencies]
arithmetic = { path = "../arithmetic" }
ark-ec = { version = "^0.3.0", default-features = false }
ark-ff = { version = "^0.3.0", default-features = false }
ark-poly = { version = "^0.3.0", default-features = false }
ark-serialize = { version = "^0.3.0", default-features = false, features = [ "derive" ] }
ark-std = { version = "^0.3.0", default-features = false }
ark-ec = { version = "^0.4.0", default-features = false }
ark-ff = { version = "^0.4.0", default-features = false }
ark-poly = { version = "^0.4.0", default-features = false }
ark-serialize = { version = "^0.4.0", default-features = false, features = [ "derive" ] }
ark-std = { version = "^0.4.0", default-features = false }
displaydoc = { version = "0.2.3", default-features = false }
rayon = { version = "1.5.2", default-features = false, optional = true }
subroutines = { path = "../subroutines" }
@@ -18,7 +18,7 @@ transcript = { path = "../transcript" }
util = { path = "../util" }
[dev-dependencies]
ark-bls12-381 = { version = "0.3.0", default-features = false, features = [ "curve" ] }
ark-bls12-381 = { version = "0.4.0", default-features = false, features = [ "curve" ] }
# Benchmarks
[[bench]]
name = "hyperplonk-benches"

View File

@@ -58,7 +58,7 @@ fn main() -> Result<(), HyperPlonkErrors> {
fn read_srs() -> Result<MultilinearUniversalParams<Bls12_381>, io::Error> {
let mut f = File::open("srs.params")?;
Ok(MultilinearUniversalParams::<Bls12_381>::deserialize_unchecked(&mut f).unwrap())
Ok(MultilinearUniversalParams::<Bls12_381>::deserialize_compressed_unchecked(&mut f).unwrap())
}
fn write_srs(pcs_srs: &MultilinearUniversalParams<Bls12_381>) {
@@ -74,7 +74,7 @@ fn bench_vanilla_plonk(
let mut file = File::create(filename).unwrap();
for nv in MIN_NUM_VARS..=MAX_NUM_VARS {
let vanilla_gate = CustomizedGates::vanilla_plonk_gate();
bench_mock_circuit_zkp_helper(&mut file, nv, &vanilla_gate, &pcs_srs)?;
bench_mock_circuit_zkp_helper(&mut file, nv, &vanilla_gate, pcs_srs)?;
}
Ok(())
@@ -88,7 +88,7 @@ fn bench_jellyfish_plonk(
let mut file = File::create(filename).unwrap();
for nv in MIN_NUM_VARS..=MAX_NUM_VARS {
let jf_gate = CustomizedGates::jellyfish_turbo_plonk_gate();
bench_mock_circuit_zkp_helper(&mut file, nv, &jf_gate, &pcs_srs)?;
bench_mock_circuit_zkp_helper(&mut file, nv, &jf_gate, pcs_srs)?;
}
Ok(())
@@ -103,7 +103,7 @@ fn bench_high_degree_plonk(
let mut file = File::create(filename).unwrap();
println!("custom gate of degree {}", degree);
let vanilla_gate = CustomizedGates::mock_gate(2, degree);
bench_mock_circuit_zkp_helper(&mut file, HIGH_DEGREE_TEST_NV, &vanilla_gate, &pcs_srs)?;
bench_mock_circuit_zkp_helper(&mut file, HIGH_DEGREE_TEST_NV, &vanilla_gate, pcs_srs)?;
Ok(())
}
@@ -133,7 +133,7 @@ fn bench_mock_circuit_zkp_helper(
let (_pk, _vk) = <PolyIOP<Fr> as HyperPlonkSNARK<
Bls12_381,
MultilinearKzgPCS<Bls12_381>,
>>::preprocess(&index, &pcs_srs)?;
>>::preprocess(&index, pcs_srs)?;
}
println!(
"key extraction for {} variables: {} us",
@@ -142,7 +142,7 @@ fn bench_mock_circuit_zkp_helper(
);
let (pk, vk) =
<PolyIOP<Fr> as HyperPlonkSNARK<Bls12_381, MultilinearKzgPCS<Bls12_381>>>::preprocess(
&index, &pcs_srs,
&index, pcs_srs,
)?;
//==========================================================
// generate a proof

View File

@@ -6,7 +6,7 @@
//! Main module for the HyperPlonk SNARK.
use ark_ec::PairingEngine;
use ark_ec::pairing::Pairing;
use errors::HyperPlonkErrors;
use subroutines::{pcs::prelude::PolynomialCommitmentScheme, poly_iop::prelude::PermutationCheck};
use witness::WitnessColumn;
@@ -25,7 +25,7 @@ mod witness;
/// A HyperPlonk is derived from ZeroChecks and PermutationChecks.
pub trait HyperPlonkSNARK<E, PCS>: PermutationCheck<E, PCS>
where
E: PairingEngine,
E: Pairing,
PCS: PolynomialCommitmentScheme<E>,
{
type Index;
@@ -58,8 +58,8 @@ where
/// - The HyperPlonk SNARK proof.
fn prove(
pk: &Self::ProvingKey,
pub_input: &[E::Fr],
witnesses: &[WitnessColumn<E::Fr>],
pub_input: &[E::ScalarField],
witnesses: &[WitnessColumn<E::ScalarField>],
) -> Result<Self::Proof, HyperPlonkErrors>;
/// Verify the HyperPlonk proof.
@@ -72,7 +72,7 @@ where
/// - Return a boolean on whether the verification is successful
fn verify(
vk: &Self::VerifyingKey,
pub_input: &[E::Fr],
pub_input: &[E::ScalarField],
proof: &Self::Proof,
) -> Result<bool, HyperPlonkErrors>;
}

View File

@@ -193,7 +193,7 @@ mod test {
// generate pk and vks
let (pk, vk) =
<PolyIOP<Fr> as HyperPlonkSNARK<Bls12_381, MultilinearKzgPCS<Bls12_381>>>::preprocess(
&index, &pcs_srs,
&index, pcs_srs,
)?;
// generate a proof and verify
let proof =

View File

@@ -12,7 +12,7 @@ use crate::{
HyperPlonkSNARK,
};
use arithmetic::{evaluate_opt, gen_eval_point, VPAuxInfo};
use ark_ec::PairingEngine;
use ark_ec::pairing::Pairing;
use ark_poly::DenseMultilinearExtension;
use ark_std::{end_timer, log2, start_timer, One, Zero};
use rayon::iter::IntoParallelRefIterator;
@@ -29,22 +29,22 @@ use subroutines::{
};
use transcript::IOPTranscript;
impl<E, PCS> HyperPlonkSNARK<E, PCS> for PolyIOP<E::Fr>
impl<E, PCS> HyperPlonkSNARK<E, PCS> for PolyIOP<E::ScalarField>
where
E: PairingEngine,
E: Pairing,
// Ideally we want to access polynomial as PCS::Polynomial, instead of instantiating it here.
// But since PCS::Polynomial can be both univariate or multivariate in our implementation
// we cannot bound PCS::Polynomial with a property trait bound.
PCS: PolynomialCommitmentScheme<
E,
Polynomial = Arc<DenseMultilinearExtension<E::Fr>>,
Point = Vec<E::Fr>,
Evaluation = E::Fr,
Polynomial = Arc<DenseMultilinearExtension<E::ScalarField>>,
Point = Vec<E::ScalarField>,
Evaluation = E::ScalarField,
Commitment = Commitment<E>,
BatchProof = BatchProof<E, PCS>,
>,
{
type Index = HyperPlonkIndex<E::Fr>;
type Index = HyperPlonkIndex<E::ScalarField>;
type ProvingKey = HyperPlonkProvingKey<E, PCS>;
type VerifyingKey = HyperPlonkVerifyingKey<E, PCS>;
type Proof = HyperPlonkProof<E, Self, PCS>;
@@ -75,7 +75,7 @@ where
}
// build selector oracles and commit to it
let selector_oracles: Vec<Arc<DenseMultilinearExtension<E::Fr>>> = index
let selector_oracles: Vec<Arc<DenseMultilinearExtension<E::ScalarField>>> = index
.selectors
.iter()
.map(|s| Arc::new(DenseMultilinearExtension::from(s)))
@@ -153,11 +153,11 @@ where
/// - 5. deferred batch opening
fn prove(
pk: &Self::ProvingKey,
pub_input: &[E::Fr],
witnesses: &[WitnessColumn<E::Fr>],
pub_input: &[E::ScalarField],
witnesses: &[WitnessColumn<E::ScalarField>],
) -> Result<Self::Proof, HyperPlonkErrors> {
let start = start_timer!(|| "hyperplonk proving");
let mut transcript = IOPTranscript::<E::Fr>::new(b"hyperplonk");
let mut transcript = IOPTranscript::<E::ScalarField>::new(b"hyperplonk");
prover_sanity_check(&pk.params, pub_input, witnesses)?;
@@ -177,7 +177,7 @@ where
// =======================================================================
let step = start_timer!(|| "commit witnesses");
let witness_polys: Vec<Arc<DenseMultilinearExtension<E::Fr>>> = witnesses
let witness_polys: Vec<Arc<DenseMultilinearExtension<E::ScalarField>>> = witnesses
.iter()
.map(|w| Arc::new(DenseMultilinearExtension::from(w)))
.collect();
@@ -212,7 +212,7 @@ where
&witness_polys,
)?;
let zero_check_proof = <Self as ZeroCheck<E::Fr>>::prove(&fx, &mut transcript)?;
let zero_check_proof = <Self as ZeroCheck<E::ScalarField>>::prove(&fx, &mut transcript)?;
end_timer!(step);
// =======================================================================
// 3. Run permutation check on `\{w_i(x)\}` and `permutation_oracle`, and
@@ -259,12 +259,20 @@ where
let step = start_timer!(|| "opening and evaluations");
// (perm_check_point[2..n], 0)
let perm_check_point_0 = [&[E::Fr::zero()], &perm_check_point[0..num_vars - 1]].concat();
let perm_check_point_0 = [
&[E::ScalarField::zero()],
&perm_check_point[0..num_vars - 1],
]
.concat();
// (perm_check_point[2..n], 1)
let perm_check_point_1 = [&[E::Fr::one()], &perm_check_point[0..num_vars - 1]].concat();
let perm_check_point_1 =
[&[E::ScalarField::one()], &perm_check_point[0..num_vars - 1]].concat();
// (1, ..., 1, 0)
let prod_final_query_point =
[vec![E::Fr::zero()], vec![E::Fr::one(); num_vars - 1]].concat();
let prod_final_query_point = [
vec![E::ScalarField::zero()],
vec![E::ScalarField::one(); num_vars - 1],
]
.concat();
// prod(x)'s points
pcs_acc.insert_poly_and_points(&prod_x, &perm_check_proof.prod_x_comm, perm_check_point);
@@ -319,7 +327,7 @@ where
// - pi_poly(r_pi) where r_pi is sampled from transcript
let r_pi = transcript.get_and_append_challenge_vectors(b"r_pi", ell)?;
// padded with zeros
let r_pi_padded = [r_pi, vec![E::Fr::zero(); num_vars - ell]].concat();
let r_pi_padded = [r_pi, vec![E::ScalarField::zero(); num_vars - ell]].concat();
// Evaluate witness_poly[0] at r_pi||0s which is equal to public_input evaluated
// at r_pi. Assumes that public_input is a power of 2
pcs_acc.insert_poly_and_points(&witness_polys[0], &witness_commits[0], &r_pi_padded);
@@ -379,12 +387,12 @@ where
/// - public input consistency checks
fn verify(
vk: &Self::VerifyingKey,
pub_input: &[E::Fr],
pub_input: &[E::ScalarField],
proof: &Self::Proof,
) -> Result<bool, HyperPlonkErrors> {
let start = start_timer!(|| "hyperplonk verification");
let mut transcript = IOPTranscript::<E::Fr>::new(b"hyperplonk");
let mut transcript = IOPTranscript::<E::ScalarField>::new(b"hyperplonk");
let num_selectors = vk.params.num_selector_columns();
let num_witnesses = vk.params.num_witness_columns();
@@ -429,7 +437,7 @@ where
// =======================================================================
let step = start_timer!(|| "verify zero check");
// Zero check and perm check have different AuxInfo
let zero_check_aux_info = VPAuxInfo::<E::Fr> {
let zero_check_aux_info = VPAuxInfo::<E::ScalarField> {
max_degree: vk.params.gate_func.degree(),
num_variables: num_vars,
phantom: PhantomData::default(),
@@ -439,7 +447,7 @@ where
transcript.append_serializable_element(b"w", w_com)?;
}
let zero_check_sub_claim = <Self as ZeroCheck<E::Fr>>::verify(
let zero_check_sub_claim = <Self as ZeroCheck<E::ScalarField>>::verify(
&proof.zero_check_proof,
&zero_check_aux_info,
&mut transcript,
@@ -462,7 +470,7 @@ where
let step = start_timer!(|| "verify permutation check");
// Zero check and perm check have different AuxInfo
let perm_check_aux_info = VPAuxInfo::<E::Fr> {
let perm_check_aux_info = VPAuxInfo::<E::ScalarField> {
// Prod(x) has a max degree of witnesses.len() + 1
max_degree: proof.witness_commits.len() + 1,
num_variables: num_vars,
@@ -522,10 +530,18 @@ where
let mut comms = vec![];
let mut points = vec![];
let perm_check_point_0 = [&[E::Fr::zero()], &perm_check_point[0..num_vars - 1]].concat();
let perm_check_point_1 = [&[E::Fr::one()], &perm_check_point[0..num_vars - 1]].concat();
let prod_final_query_point =
[vec![E::Fr::zero()], vec![E::Fr::one(); num_vars - 1]].concat();
let perm_check_point_0 = [
&[E::ScalarField::zero()],
&perm_check_point[0..num_vars - 1],
]
.concat();
let perm_check_point_1 =
[&[E::ScalarField::one()], &perm_check_point[0..num_vars - 1]].concat();
let prod_final_query_point = [
vec![E::ScalarField::zero()],
vec![E::ScalarField::one(); num_vars - 1],
]
.concat();
// prod(x)'s points
comms.push(proof.perm_check_proof.prod_x_comm);
@@ -581,7 +597,7 @@ where
pi_eval, expect_pi_eval,
)));
}
let r_pi_padded = [r_pi, vec![E::Fr::zero(); num_vars - ell]].concat();
let r_pi_padded = [r_pi, vec![E::ScalarField::zero(); num_vars - ell]].concat();
comms.push(proof.witness_commits[0]);
points.push(r_pi_padded);
@@ -638,7 +654,7 @@ mod tests {
test_hyperplonk_helper::<Bls12_381>(gates)
}
fn test_hyperplonk_helper<E: PairingEngine>(
fn test_hyperplonk_helper<E: Pairing>(
gate_func: CustomizedGates,
) -> Result<(), HyperPlonkErrors> {
let mut rng = test_rng();
@@ -656,7 +672,12 @@ mod tests {
gate_func,
};
let permutation = identity_permutation(nv, num_witnesses);
let q1 = SelectorColumn(vec![E::Fr::one(), E::Fr::one(), E::Fr::one(), E::Fr::one()]);
let q1 = SelectorColumn(vec![
E::ScalarField::one(),
E::ScalarField::one(),
E::ScalarField::one(),
E::ScalarField::one(),
]);
let index = HyperPlonkIndex {
params,
permutation,
@@ -664,58 +685,59 @@ mod tests {
};
// generate pk and vks
let (pk, vk) = <PolyIOP<E::Fr> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::preprocess(
&index, &pcs_srs,
)?;
let (pk, vk) =
<PolyIOP<E::ScalarField> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::preprocess(
&index, &pcs_srs,
)?;
// w1 := [0, 1, 2, 3]
let w1 = WitnessColumn(vec![
E::Fr::zero(),
E::Fr::one(),
E::Fr::from(2u128),
E::Fr::from(3u128),
E::ScalarField::zero(),
E::ScalarField::one(),
E::ScalarField::from(2u128),
E::ScalarField::from(3u128),
]);
// w2 := [0^5, 1^5, 2^5, 3^5]
let w2 = WitnessColumn(vec![
E::Fr::zero(),
E::Fr::one(),
E::Fr::from(32u128),
E::Fr::from(243u128),
E::ScalarField::zero(),
E::ScalarField::one(),
E::ScalarField::from(32u128),
E::ScalarField::from(243u128),
]);
// public input = w1
let pi = w1.clone();
// generate a proof and verify
let proof = <PolyIOP<E::Fr> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::prove(
let proof = <PolyIOP<E::ScalarField> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::prove(
&pk,
&pi.0,
&[w1.clone(), w2.clone()],
)?;
let _verify = <PolyIOP<E::Fr> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::verify(
&vk, &pi.0, &proof,
)?;
let _verify =
<PolyIOP<E::ScalarField> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::verify(
&vk, &pi.0, &proof,
)?;
// bad path 1: wrong permutation
let rand_perm: Vec<E::Fr> = random_permutation(nv, num_witnesses, &mut rng);
let mut bad_index = index.clone();
let rand_perm: Vec<E::ScalarField> = random_permutation(nv, num_witnesses, &mut rng);
let mut bad_index = index;
bad_index.permutation = rand_perm;
// generate pk and vks
let (_, bad_vk) = <PolyIOP<E::Fr> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::preprocess(
&bad_index, &pcs_srs,
)?;
assert_eq!(
<PolyIOP<E::Fr> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::verify(
&bad_vk, &pi.0, &proof,
)?,
false
);
let (_, bad_vk) =
<PolyIOP<E::ScalarField> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::preprocess(
&bad_index, &pcs_srs,
)?;
assert!(!<PolyIOP<E::ScalarField> as HyperPlonkSNARK<
E,
MultilinearKzgPCS<E>,
>>::verify(&bad_vk, &pi.0, &proof,)?);
// bad path 2: wrong witness
let mut w1_bad = w1.clone();
w1_bad.0[0] = E::Fr::one();
let mut w1_bad = w1;
w1_bad.0[0] = E::ScalarField::one();
assert!(
<PolyIOP<E::Fr> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::prove(
<PolyIOP<E::ScalarField> as HyperPlonkSNARK<E, MultilinearKzgPCS<E>>>::prove(
&pk,
&pi.0,
&[w1_bad, w2],

View File

@@ -7,7 +7,7 @@
//! Main module for the HyperPlonk PolyIOP.
use crate::{custom_gate::CustomizedGates, prelude::HyperPlonkErrors, selectors::SelectorColumn};
use ark_ec::PairingEngine;
use ark_ec::pairing::Pairing;
use ark_ff::PrimeField;
use ark_poly::DenseMultilinearExtension;
use ark_std::log2;
@@ -25,7 +25,7 @@ use subroutines::{
#[derive(Clone, Debug, PartialEq)]
pub struct HyperPlonkProof<E, PC, PCS>
where
E: PairingEngine,
E: Pairing,
PC: PermutationCheck<E, PCS>,
PCS: PolynomialCommitmentScheme<E>,
{
@@ -36,7 +36,7 @@ where
// IOP proofs
// =======================================================================
// the custom gate zerocheck proof
pub zero_check_proof: <PC as ZeroCheck<E::Fr>>::ZeroCheckProof,
pub zero_check_proof: <PC as ZeroCheck<E::ScalarField>>::ZeroCheckProof,
// the permutation check proof for copy constraints
pub perm_check_proof: PC::PermutationProof,
}
@@ -128,13 +128,13 @@ impl<F: PrimeField> HyperPlonkIndex<F> {
/// - the commitment to the selectors and permutations
/// - the parameters for polynomial commitment
#[derive(Clone, Debug, Default, PartialEq)]
pub struct HyperPlonkProvingKey<E: PairingEngine, PCS: PolynomialCommitmentScheme<E>> {
pub struct HyperPlonkProvingKey<E: Pairing, PCS: PolynomialCommitmentScheme<E>> {
/// Hyperplonk instance parameters
pub params: HyperPlonkParams,
/// The preprocessed permutation polynomials
pub permutation_oracles: Vec<Arc<DenseMultilinearExtension<E::Fr>>>,
pub permutation_oracles: Vec<Arc<DenseMultilinearExtension<E::ScalarField>>>,
/// The preprocessed selector polynomials
pub selector_oracles: Vec<Arc<DenseMultilinearExtension<E::Fr>>>,
pub selector_oracles: Vec<Arc<DenseMultilinearExtension<E::ScalarField>>>,
/// Commitments to the preprocessed selector polynomials
pub selector_commitments: Vec<PCS::Commitment>,
/// Commitments to the preprocessed permutation polynomials
@@ -148,7 +148,7 @@ pub struct HyperPlonkProvingKey<E: PairingEngine, PCS: PolynomialCommitmentSchem
/// - the commitments to the preprocessed polynomials output by the indexer
/// - the parameters for polynomial commitment
#[derive(Clone, Debug, Default, PartialEq)]
pub struct HyperPlonkVerifyingKey<E: PairingEngine, PCS: PolynomialCommitmentScheme<E>> {
pub struct HyperPlonkVerifyingKey<E: Pairing, PCS: PolynomialCommitmentScheme<E>> {
/// Hyperplonk instance parameters
pub params: HyperPlonkParams,
/// The parameters for PCS commitment

View File

@@ -9,7 +9,7 @@ use crate::{
witness::WitnessColumn,
};
use arithmetic::{evaluate_opt, VirtualPolynomial};
use ark_ec::PairingEngine;
use ark_ec::pairing::Pairing;
use ark_ff::PrimeField;
use ark_poly::DenseMultilinearExtension;
use std::{borrow::Borrow, sync::Arc};
@@ -19,7 +19,7 @@ use transcript::IOPTranscript;
/// An accumulator structure that holds a polynomial and
/// its opening points
#[derive(Debug)]
pub(super) struct PcsAccumulator<E: PairingEngine, PCS: PolynomialCommitmentScheme<E>> {
pub(super) struct PcsAccumulator<E: Pairing, PCS: PolynomialCommitmentScheme<E>> {
// sequence:
// - prod(x) at 5 points
// - w_merged at perm check point
@@ -35,12 +35,12 @@ pub(super) struct PcsAccumulator<E: PairingEngine, PCS: PolynomialCommitmentSche
impl<E, PCS> PcsAccumulator<E, PCS>
where
E: PairingEngine,
E: Pairing,
PCS: PolynomialCommitmentScheme<
E,
Polynomial = Arc<DenseMultilinearExtension<E::Fr>>,
Point = Vec<E::Fr>,
Evaluation = E::Fr,
Polynomial = Arc<DenseMultilinearExtension<E::ScalarField>>,
Point = Vec<E::ScalarField>,
Evaluation = E::ScalarField,
Commitment = Commitment<E>,
>,
{
@@ -78,7 +78,7 @@ where
pub(super) fn multi_open(
&self,
prover_param: impl Borrow<PCS::ProverParam>,
transcript: &mut IOPTranscript<E::Fr>,
transcript: &mut IOPTranscript<E::ScalarField>,
) -> Result<PCS::BatchProof, HyperPlonkErrors> {
Ok(PCS::multi_open(
prover_param.borrow(),