mirror of
https://github.com/arnaucube/testudo.git
synced 2026-01-12 08:41:29 +01:00
update workflows for testudo
This commit is contained in:
30
.github/workflows/rust.yml
vendored
30
.github/workflows/rust.yml
vendored
@@ -1,30 +0,0 @@
|
|||||||
name: Build and Test Spartan
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ master ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ master ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build_nightly:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Install
|
|
||||||
run: rustup default nightly
|
|
||||||
- name: Install rustfmt Components
|
|
||||||
run: rustup component add rustfmt
|
|
||||||
- name: Install clippy
|
|
||||||
run: rustup component add clippy
|
|
||||||
- name: Build
|
|
||||||
run: cargo build --verbose
|
|
||||||
- name: Run tests
|
|
||||||
run: cargo test --verbose
|
|
||||||
- name: Build examples
|
|
||||||
run: cargo build --examples --verbose
|
|
||||||
- name: Check Rustfmt Code Style
|
|
||||||
run: cargo fmt --all -- --check
|
|
||||||
- name: Check clippy warnings
|
|
||||||
run: cargo clippy --all-targets --all-features -- -D warnings
|
|
||||||
|
|
||||||
29
.github/workflows/testudo.yml
vendored
Normal file
29
.github/workflows/testudo.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
name: Build and Test Testudo
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
branches: [master]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build_nightly:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Install
|
||||||
|
run: rustup default nightly
|
||||||
|
- name: Install rustfmt Components
|
||||||
|
run: rustup component add rustfmt
|
||||||
|
- name: Install clippy
|
||||||
|
run: rustup component add clippy
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --verbose
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --verbose
|
||||||
|
- name: Build examples
|
||||||
|
run: cargo build --examples --verbose
|
||||||
|
- name: Check Rustfmt Code Style
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
- name: Check clippy warnings
|
||||||
|
run: cargo clippy --all-targets --all-features
|
||||||
@@ -69,7 +69,7 @@ fn produce_r1cs() -> (
|
|||||||
A.push((3, 3, one.clone()));
|
A.push((3, 3, one.clone()));
|
||||||
A.push((3, num_vars, Scalar::from(5u32).into_repr().to_bytes_le()));
|
A.push((3, num_vars, Scalar::from(5u32).into_repr().to_bytes_le()));
|
||||||
B.push((3, num_vars, one.clone()));
|
B.push((3, num_vars, one.clone()));
|
||||||
C.push((3, num_vars + 1, one.clone()));
|
C.push((3, num_vars + 1, one));
|
||||||
|
|
||||||
let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap();
|
let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ impl MultiCommitGens {
|
|||||||
let mut gens: Vec<GroupElement> = Vec::new();
|
let mut gens: Vec<GroupElement> = Vec::new();
|
||||||
for _ in 0..n + 1 {
|
for _ in 0..n + 1 {
|
||||||
let mut el_aff: Option<GroupElementAffine> = None;
|
let mut el_aff: Option<GroupElementAffine> = None;
|
||||||
while el_aff.is_some() != true {
|
while el_aff.is_none() {
|
||||||
let uniform_bytes = sponge.squeeze_bytes(64);
|
let uniform_bytes = sponge.squeeze_bytes(64);
|
||||||
el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes);
|
el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,10 +23,10 @@ use ark_r1cs_std::{
|
|||||||
fields::fp::FpVar,
|
fields::fp::FpVar,
|
||||||
prelude::{Boolean, EqGadget, FieldVar},
|
prelude::{Boolean, EqGadget, FieldVar},
|
||||||
};
|
};
|
||||||
<<<<<<< HEAD
|
|
||||||
use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, Namespace, SynthesisError};
|
use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, Namespace, SynthesisError};
|
||||||
=======
|
use ark_sponge::{
|
||||||
use ark_relations::{
|
constraints::CryptographicSpongeVar,
|
||||||
|
poseidon::{constraints::PoseidonSpongeVar, PoseidonParameters},
|
||||||
};
|
};
|
||||||
use rand::{CryptoRng, Rng};
|
use rand::{CryptoRng, Rng};
|
||||||
|
|
||||||
@@ -46,12 +46,12 @@ impl PoseidonTranscripVar {
|
|||||||
|
|
||||||
if let Some(c) = challenge {
|
if let Some(c) = challenge {
|
||||||
let c_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(c)).unwrap();
|
let c_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(c)).unwrap();
|
||||||
sponge.absorb(&c_var);
|
sponge.absorb(&c_var).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
cs: cs,
|
cs,
|
||||||
sponge: sponge,
|
sponge,
|
||||||
params: params.clone(),
|
params: params.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -60,7 +60,7 @@ impl PoseidonTranscripVar {
|
|||||||
self.sponge.absorb(&input)
|
self.sponge.absorb(&input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_vector(&mut self, input_vec: &Vec<FpVar<Fr>>) -> Result<(), SynthesisError> {
|
fn append_vector(&mut self, input_vec: &[FpVar<Fr>]) -> Result<(), SynthesisError> {
|
||||||
for input in input_vec.iter() {
|
for input in input_vec.iter() {
|
||||||
self.append(input)?;
|
self.append(input)?;
|
||||||
}
|
}
|
||||||
@@ -96,7 +96,7 @@ impl AllocVar<UniPoly, Fr> for UniPolyVar {
|
|||||||
let cp: &UniPoly = c.borrow();
|
let cp: &UniPoly = c.borrow();
|
||||||
let mut coeffs_var = Vec::new();
|
let mut coeffs_var = Vec::new();
|
||||||
for coeff in cp.coeffs.iter() {
|
for coeff in cp.coeffs.iter() {
|
||||||
let coeff_var = FpVar::<Fr>::new_variable(cs.clone(), || Ok(coeff.clone()), mode)?;
|
let coeff_var = FpVar::<Fr>::new_variable(cs.clone(), || Ok(coeff), mode)?;
|
||||||
coeffs_var.push(coeff_var);
|
coeffs_var.push(coeff_var);
|
||||||
}
|
}
|
||||||
Ok(Self { coeffs: coeffs_var })
|
Ok(Self { coeffs: coeffs_var })
|
||||||
@@ -138,7 +138,7 @@ pub struct SumcheckVerificationCircuit {
|
|||||||
impl SumcheckVerificationCircuit {
|
impl SumcheckVerificationCircuit {
|
||||||
fn verifiy_sumcheck(
|
fn verifiy_sumcheck(
|
||||||
&self,
|
&self,
|
||||||
poly_vars: &Vec<UniPolyVar>,
|
poly_vars: &[UniPolyVar],
|
||||||
claim_var: &FpVar<Fr>,
|
claim_var: &FpVar<Fr>,
|
||||||
transcript_var: &mut PoseidonTranscripVar,
|
transcript_var: &mut PoseidonTranscripVar,
|
||||||
) -> Result<(FpVar<Fr>, Vec<FpVar<Fr>>), SynthesisError> {
|
) -> Result<(FpVar<Fr>, Vec<FpVar<Fr>>), SynthesisError> {
|
||||||
@@ -173,7 +173,7 @@ impl AllocVar<SparsePolyEntry, Fr> for SparsePolyEntryVar {
|
|||||||
f().and_then(|s| {
|
f().and_then(|s| {
|
||||||
let cs = cs.into();
|
let cs = cs.into();
|
||||||
let spe: &SparsePolyEntry = s.borrow();
|
let spe: &SparsePolyEntry = s.borrow();
|
||||||
let val_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(spe.val))?;
|
let val_var = FpVar::<Fr>::new_witness(cs, || Ok(spe.val))?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
idx: spe.idx,
|
idx: spe.idx,
|
||||||
val_var,
|
val_var,
|
||||||
@@ -211,7 +211,7 @@ impl AllocVar<SparsePolynomial, Fr> for SparsePolynomialVar {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SparsePolynomialVar {
|
impl SparsePolynomialVar {
|
||||||
fn compute_chi(a: &[bool], r_vars: &Vec<FpVar<Fr>>) -> FpVar<Fr> {
|
fn compute_chi(a: &[bool], r_vars: &[FpVar<Fr>]) -> FpVar<Fr> {
|
||||||
let mut chi_i_var = FpVar::<Fr>::one();
|
let mut chi_i_var = FpVar::<Fr>::one();
|
||||||
let one = FpVar::<Fr>::one();
|
let one = FpVar::<Fr>::one();
|
||||||
for (i, r_var) in r_vars.iter().enumerate() {
|
for (i, r_var) in r_vars.iter().enumerate() {
|
||||||
@@ -224,12 +224,12 @@ impl SparsePolynomialVar {
|
|||||||
chi_i_var
|
chi_i_var
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn evaluate(&self, r_var: &Vec<FpVar<Fr>>) -> FpVar<Fr> {
|
pub fn evaluate(&self, r_var: &[FpVar<Fr>]) -> FpVar<Fr> {
|
||||||
let mut sum = FpVar::<Fr>::zero();
|
let mut sum = FpVar::<Fr>::zero();
|
||||||
for spe_var in self.Z_var.iter() {
|
for spe_var in self.Z_var.iter() {
|
||||||
// potential problem
|
// potential problem
|
||||||
let bits = &spe_var.idx.get_bits(r_var.len());
|
let bits = &spe_var.idx.get_bits(r_var.len());
|
||||||
sum += SparsePolynomialVar::compute_chi(&bits, r_var) * &spe_var.val_var;
|
sum += SparsePolynomialVar::compute_chi(bits, r_var) * &spe_var.val_var;
|
||||||
}
|
}
|
||||||
sum
|
sum
|
||||||
}
|
}
|
||||||
@@ -350,7 +350,7 @@ impl ConstraintSynthesizer<Fr> for R1CSVerificationCircuit {
|
|||||||
AllocationMode::Witness,
|
AllocationMode::Witness,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let poly_input_eval_var = input_as_sparse_poly_var.evaluate(&ry_var[1..].to_vec());
|
let poly_input_eval_var = input_as_sparse_poly_var.evaluate(&ry_var[1..]);
|
||||||
|
|
||||||
let eval_vars_at_ry_var = FpVar::<Fr>::new_input(cs.clone(), || Ok(&self.eval_vars_at_ry))?;
|
let eval_vars_at_ry_var = FpVar::<Fr>::new_input(cs.clone(), || Ok(&self.eval_vars_at_ry))?;
|
||||||
|
|
||||||
@@ -361,7 +361,7 @@ impl ConstraintSynthesizer<Fr> for R1CSVerificationCircuit {
|
|||||||
|
|
||||||
let eval_A_r_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(eval_A_r))?;
|
let eval_A_r_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(eval_A_r))?;
|
||||||
let eval_B_r_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(eval_B_r))?;
|
let eval_B_r_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(eval_B_r))?;
|
||||||
let eval_C_r_var = FpVar::<Fr>::new_witness(cs.clone(), || Ok(eval_C_r))?;
|
let eval_C_r_var = FpVar::<Fr>::new_witness(cs, || Ok(eval_C_r))?;
|
||||||
|
|
||||||
let scalar_var = &r_A_var * &eval_A_r_var + &r_B_var * &eval_B_r_var + &r_C_var * &eval_C_r_var;
|
let scalar_var = &r_A_var * &eval_A_r_var + &r_B_var * &eval_B_r_var + &r_C_var * &eval_C_r_var;
|
||||||
|
|
||||||
@@ -407,7 +407,7 @@ impl VerifierCircuit {
|
|||||||
let proof = Groth16::<I>::prove(&pk, inner_circuit.clone(), &mut rng)?;
|
let proof = Groth16::<I>::prove(&pk, inner_circuit.clone(), &mut rng)?;
|
||||||
let pvk = Groth16::<I>::process_vk(&vk).unwrap();
|
let pvk = Groth16::<I>::process_vk(&vk).unwrap();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
inner_circuit: inner_circuit,
|
inner_circuit,
|
||||||
inner_proof: proof,
|
inner_proof: proof,
|
||||||
inner_vk: pvk,
|
inner_vk: pvk,
|
||||||
evals_var_at_ry: config.eval_vars_at_ry,
|
evals_var_at_ry: config.eval_vars_at_ry,
|
||||||
@@ -432,7 +432,7 @@ impl ConstraintSynthesizer<Fq> for VerifierCircuit {
|
|||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
let input_var = BooleanInputVar::<Fr, Fq>::new(bits);
|
let input_var = BooleanInputVar::<Fr, Fq>::new(bits);
|
||||||
|
|
||||||
let vk_var = PreparedVerifyingKeyVar::new_witness(cs.clone(), || Ok(self.inner_vk.clone()))?;
|
let vk_var = PreparedVerifyingKeyVar::new_witness(cs, || Ok(self.inner_vk.clone()))?;
|
||||||
Groth16VerifierGadget::verify_with_processed_vk(&vk_var, &input_var, &proof_var)?
|
Groth16VerifierGadget::verify_with_processed_vk(&vk_var, &input_var, &proof_var)?
|
||||||
.enforce_equal(&Boolean::constant(true))?;
|
.enforce_equal(&Boolean::constant(true))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
15
src/group.rs
15
src/group.rs
@@ -6,7 +6,7 @@ use lazy_static::lazy_static;
|
|||||||
|
|
||||||
use super::scalar::Scalar;
|
use super::scalar::Scalar;
|
||||||
|
|
||||||
use ark_ec::{AffineCurve, ProjectiveCurve};
|
use ark_ec::ProjectiveCurve;
|
||||||
use ark_serialize::*;
|
use ark_serialize::*;
|
||||||
use core::borrow::Borrow;
|
use core::borrow::Borrow;
|
||||||
|
|
||||||
@@ -38,7 +38,6 @@ impl CompressGroupElement for GroupElement {
|
|||||||
fn compress(&self) -> CompressedGroup {
|
fn compress(&self) -> CompressedGroup {
|
||||||
let mut point_encoding = Vec::new();
|
let mut point_encoding = Vec::new();
|
||||||
self.serialize(&mut point_encoding).unwrap();
|
self.serialize(&mut point_encoding).unwrap();
|
||||||
// println!("in compress {:?}", point_encoding);;
|
|
||||||
CompressedGroup(point_encoding)
|
CompressedGroup(point_encoding)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -46,11 +45,11 @@ impl CompressGroupElement for GroupElement {
|
|||||||
impl DecompressGroupElement for GroupElement {
|
impl DecompressGroupElement for GroupElement {
|
||||||
fn decompress(encoded: &CompressedGroup) -> Option<Self> {
|
fn decompress(encoded: &CompressedGroup) -> Option<Self> {
|
||||||
let res = GroupElement::deserialize(&*encoded.0);
|
let res = GroupElement::deserialize(&*encoded.0);
|
||||||
if res.is_err() {
|
if let Ok(r) = res {
|
||||||
|
Some(r)
|
||||||
|
} else {
|
||||||
println!("{:?}", res);
|
println!("{:?}", res);
|
||||||
None
|
None
|
||||||
} else {
|
|
||||||
Some(res.unwrap())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -58,7 +57,7 @@ impl DecompressGroupElement for GroupElement {
|
|||||||
impl UnpackGroupElement for CompressedGroup {
|
impl UnpackGroupElement for CompressedGroup {
|
||||||
fn unpack(&self) -> Result<GroupElement, ProofVerifyError> {
|
fn unpack(&self) -> Result<GroupElement, ProofVerifyError> {
|
||||||
let encoded = self.0.clone();
|
let encoded = self.0.clone();
|
||||||
GroupElement::decompress(self).ok_or_else(|| ProofVerifyError::DecompressionError(encoded))
|
GroupElement::decompress(self).ok_or(ProofVerifyError::DecompressionError(encoded))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -69,11 +68,11 @@ pub trait VartimeMultiscalarMul {
|
|||||||
impl VartimeMultiscalarMul for GroupElement {
|
impl VartimeMultiscalarMul for GroupElement {
|
||||||
fn vartime_multiscalar_mul(scalars: &[Scalar], points: &[GroupElement]) -> GroupElement {
|
fn vartime_multiscalar_mul(scalars: &[Scalar], points: &[GroupElement]) -> GroupElement {
|
||||||
let repr_scalars = scalars
|
let repr_scalars = scalars
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|S| S.borrow().into_repr())
|
.map(|S| S.borrow().into_repr())
|
||||||
.collect::<Vec<<Scalar as PrimeField>::BigInt>>();
|
.collect::<Vec<<Scalar as PrimeField>::BigInt>>();
|
||||||
let aff_points = points
|
let aff_points = points
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|P| P.borrow().into_affine())
|
.map(|P| P.borrow().into_affine())
|
||||||
.collect::<Vec<GroupElementAffine>>();
|
.collect::<Vec<GroupElementAffine>>();
|
||||||
VariableBaseMSM::multi_scalar_mul(aff_points.as_slice(), repr_scalars.as_slice())
|
VariableBaseMSM::multi_scalar_mul(aff_points.as_slice(), repr_scalars.as_slice())
|
||||||
|
|||||||
13
src/lib.rs
13
src/lib.rs
@@ -56,7 +56,6 @@ use random::RandomTape;
|
|||||||
use scalar::Scalar;
|
use scalar::Scalar;
|
||||||
|
|
||||||
use timer::Timer;
|
use timer::Timer;
|
||||||
use transcript::ProofTranscript;
|
|
||||||
|
|
||||||
/// `ComputationCommitment` holds a public preprocessed NP statement (e.g., R1CS)
|
/// `ComputationCommitment` holds a public preprocessed NP statement (e.g., R1CS)
|
||||||
pub struct ComputationCommitment {
|
pub struct ComputationCommitment {
|
||||||
@@ -81,8 +80,8 @@ impl Assignment {
|
|||||||
let mut vec_scalar: Vec<Scalar> = Vec::new();
|
let mut vec_scalar: Vec<Scalar> = Vec::new();
|
||||||
for v in vec {
|
for v in vec {
|
||||||
let val = Scalar::from_random_bytes(v.as_slice());
|
let val = Scalar::from_random_bytes(v.as_slice());
|
||||||
if val.is_some() == true {
|
if let Some(v) = val {
|
||||||
vec_scalar.push(val.unwrap());
|
vec_scalar.push(v);
|
||||||
} else {
|
} else {
|
||||||
return Err(R1CSError::InvalidScalar);
|
return Err(R1CSError::InvalidScalar);
|
||||||
}
|
}
|
||||||
@@ -188,14 +187,14 @@ impl Instance {
|
|||||||
return Err(R1CSError::InvalidIndex);
|
return Err(R1CSError::InvalidIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
let val = Scalar::from_random_bytes(&val_bytes.as_slice());
|
let val = Scalar::from_random_bytes(val_bytes.as_slice());
|
||||||
if val.is_some() == true {
|
if let Some(v) = val {
|
||||||
// if col >= num_vars, it means that it is referencing a 1 or input in the satisfying
|
// if col >= num_vars, it means that it is referencing a 1 or input in the satisfying
|
||||||
// assignment
|
// assignment
|
||||||
if *col >= num_vars {
|
if *col >= num_vars {
|
||||||
mat.push((*row, *col + num_vars_padded - num_vars, val.unwrap()));
|
mat.push((*row, *col + num_vars_padded - num_vars, v));
|
||||||
} else {
|
} else {
|
||||||
mat.push((*row, *col, val.unwrap()));
|
mat.push((*row, *col, v));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(R1CSError::InvalidScalar);
|
return Err(R1CSError::InvalidScalar);
|
||||||
|
|||||||
@@ -91,14 +91,14 @@ impl BulletReductionProof {
|
|||||||
.iter()
|
.iter()
|
||||||
.chain(iter::once(&c_L))
|
.chain(iter::once(&c_L))
|
||||||
.chain(iter::once(blind_L))
|
.chain(iter::once(blind_L))
|
||||||
.map(|s| *s)
|
.copied()
|
||||||
.collect::<Vec<Scalar>>()
|
.collect::<Vec<Scalar>>()
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
G_R
|
G_R
|
||||||
.iter()
|
.iter()
|
||||||
.chain(iter::once(Q))
|
.chain(iter::once(Q))
|
||||||
.chain(iter::once(H))
|
.chain(iter::once(H))
|
||||||
.map(|p| *p)
|
.copied()
|
||||||
.collect::<Vec<GroupElement>>()
|
.collect::<Vec<GroupElement>>()
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
);
|
);
|
||||||
@@ -108,14 +108,14 @@ impl BulletReductionProof {
|
|||||||
.iter()
|
.iter()
|
||||||
.chain(iter::once(&c_R))
|
.chain(iter::once(&c_R))
|
||||||
.chain(iter::once(blind_R))
|
.chain(iter::once(blind_R))
|
||||||
.map(|s| *s)
|
.copied()
|
||||||
.collect::<Vec<Scalar>>()
|
.collect::<Vec<Scalar>>()
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
G_L
|
G_L
|
||||||
.iter()
|
.iter()
|
||||||
.chain(iter::once(Q))
|
.chain(iter::once(Q))
|
||||||
.chain(iter::once(H))
|
.chain(iter::once(H))
|
||||||
.map(|p| *p)
|
.copied()
|
||||||
.collect::<Vec<GroupElement>>()
|
.collect::<Vec<GroupElement>>()
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
);
|
);
|
||||||
@@ -248,13 +248,13 @@ impl BulletReductionProof {
|
|||||||
.iter()
|
.iter()
|
||||||
.chain(u_inv_sq.iter())
|
.chain(u_inv_sq.iter())
|
||||||
.chain(iter::once(&Scalar::one()))
|
.chain(iter::once(&Scalar::one()))
|
||||||
.map(|s| *s)
|
.copied()
|
||||||
.collect::<Vec<Scalar>>()
|
.collect::<Vec<Scalar>>()
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
Ls.iter()
|
Ls.iter()
|
||||||
.chain(Rs.iter())
|
.chain(Rs.iter())
|
||||||
.chain(iter::once(Gamma))
|
.chain(iter::once(Gamma))
|
||||||
.map(|p| *p)
|
.copied()
|
||||||
.collect::<Vec<GroupElement>>()
|
.collect::<Vec<GroupElement>>()
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -128,8 +128,8 @@ impl EqualityProof {
|
|||||||
) -> Result<(), ProofVerifyError> {
|
) -> Result<(), ProofVerifyError> {
|
||||||
// transcript.append_protocol_name(EqualityProof::protocol_name());
|
// transcript.append_protocol_name(EqualityProof::protocol_name());
|
||||||
|
|
||||||
transcript.append_point(&C1);
|
transcript.append_point(C1);
|
||||||
transcript.append_point(&C2);
|
transcript.append_point(C2);
|
||||||
transcript.append_point(&self.alpha);
|
transcript.append_point(&self.alpha);
|
||||||
|
|
||||||
let c = transcript.challenge_scalar();
|
let c = transcript.challenge_scalar();
|
||||||
@@ -351,7 +351,7 @@ impl DotProductProof {
|
|||||||
let Cy = y.commit(blind_y, gens_1).compress();
|
let Cy = y.commit(blind_y, gens_1).compress();
|
||||||
Cy.append_to_poseidon(transcript);
|
Cy.append_to_poseidon(transcript);
|
||||||
|
|
||||||
transcript.append_scalar_vector(&a_vec.to_vec());
|
transcript.append_scalar_vector(a_vec);
|
||||||
|
|
||||||
let delta = d_vec.commit(&r_delta, gens_n).compress();
|
let delta = d_vec.commit(&r_delta, gens_n).compress();
|
||||||
delta.append_to_poseidon(transcript);
|
delta.append_to_poseidon(transcript);
|
||||||
@@ -398,7 +398,7 @@ impl DotProductProof {
|
|||||||
// transcript.append_protocol_name(DotProductProof::protocol_name());
|
// transcript.append_protocol_name(DotProductProof::protocol_name());
|
||||||
Cx.append_to_poseidon(transcript);
|
Cx.append_to_poseidon(transcript);
|
||||||
Cy.append_to_poseidon(transcript);
|
Cy.append_to_poseidon(transcript);
|
||||||
transcript.append_scalar_vector(&a.to_vec());
|
transcript.append_scalar_vector(a);
|
||||||
self.delta.append_to_poseidon(transcript);
|
self.delta.append_to_poseidon(transcript);
|
||||||
self.beta.append_to_poseidon(transcript);
|
self.beta.append_to_poseidon(transcript);
|
||||||
|
|
||||||
@@ -484,7 +484,7 @@ impl DotProductProofLog {
|
|||||||
|
|
||||||
let Cy = y.commit(blind_y, &gens.gens_1).compress();
|
let Cy = y.commit(blind_y, &gens.gens_1).compress();
|
||||||
transcript.append_point(&Cy);
|
transcript.append_point(&Cy);
|
||||||
transcript.append_scalar_vector(&a_vec.to_vec());
|
transcript.append_scalar_vector(a_vec);
|
||||||
|
|
||||||
let blind_Gamma = (*blind_x) + blind_y;
|
let blind_Gamma = (*blind_x) + blind_y;
|
||||||
let (bullet_reduction_proof, _Gamma_hat, x_hat, a_hat, g_hat, rhat_Gamma) =
|
let (bullet_reduction_proof, _Gamma_hat, x_hat, a_hat, g_hat, rhat_Gamma) =
|
||||||
@@ -548,9 +548,9 @@ impl DotProductProofLog {
|
|||||||
// Cy.append_to_poseidon( transcript);
|
// Cy.append_to_poseidon( transcript);
|
||||||
// a.append_to_poseidon( transcript);
|
// a.append_to_poseidon( transcript);
|
||||||
|
|
||||||
transcript.append_point(&Cx);
|
transcript.append_point(Cx);
|
||||||
transcript.append_point(&Cy);
|
transcript.append_point(Cy);
|
||||||
transcript.append_scalar_vector(&a.to_vec());
|
transcript.append_scalar_vector(a);
|
||||||
|
|
||||||
let Gamma = Cx.unpack()? + Cy.unpack()?;
|
let Gamma = Cx.unpack()? + Cy.unpack()?;
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ use super::scalar::Scalar;
|
|||||||
|
|
||||||
// use ark_r1cs_std::prelude::*;
|
// use ark_r1cs_std::prelude::*;
|
||||||
use ark_sponge::{
|
use ark_sponge::{
|
||||||
constraints::CryptographicSpongeVar,
|
|
||||||
poseidon::{PoseidonParameters, PoseidonSponge},
|
poseidon::{PoseidonParameters, PoseidonSponge},
|
||||||
CryptographicSponge,
|
CryptographicSponge,
|
||||||
};
|
};
|
||||||
@@ -21,14 +20,14 @@ impl PoseidonTranscript {
|
|||||||
pub fn new(params: &PoseidonParameters<Fr>) -> Self {
|
pub fn new(params: &PoseidonParameters<Fr>) -> Self {
|
||||||
let sponge = PoseidonSponge::new(params);
|
let sponge = PoseidonSponge::new(params);
|
||||||
PoseidonTranscript {
|
PoseidonTranscript {
|
||||||
sponge: sponge,
|
sponge,
|
||||||
params: params.clone(),
|
params: params.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_from_state(&mut self, challenge: &Scalar) {
|
pub fn new_from_state(&mut self, challenge: &Scalar) {
|
||||||
self.sponge = PoseidonSponge::new(&self.params);
|
self.sponge = PoseidonSponge::new(&self.params);
|
||||||
self.append_scalar(&challenge);
|
self.append_scalar(challenge);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn append_u64(&mut self, x: u64) {
|
pub fn append_u64(&mut self, x: u64) {
|
||||||
@@ -47,20 +46,18 @@ impl PoseidonTranscript {
|
|||||||
self.sponge.absorb(&point.0);
|
self.sponge.absorb(&point.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn append_scalar_vector(&mut self, scalars: &Vec<Scalar>) {
|
pub fn append_scalar_vector(&mut self, scalars: &[Scalar]) {
|
||||||
for scalar in scalars.iter() {
|
for scalar in scalars.iter() {
|
||||||
self.append_scalar(&scalar);
|
self.append_scalar(scalar);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn challenge_scalar(&mut self) -> Scalar {
|
pub fn challenge_scalar(&mut self) -> Scalar {
|
||||||
let scalar = self.sponge.squeeze_field_elements(1).remove(0);
|
self.sponge.squeeze_field_elements(1).remove(0)
|
||||||
scalar
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn challenge_vector(&mut self, len: usize) -> Vec<Scalar> {
|
pub fn challenge_vector(&mut self, len: usize) -> Vec<Scalar> {
|
||||||
let challenges = self.sponge.squeeze_field_elements(len);
|
self.sponge.squeeze_field_elements(len)
|
||||||
challenges
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ use super::dense_mlpoly::EqPolynomial;
|
|||||||
use super::math::Math;
|
use super::math::Math;
|
||||||
use super::scalar::Scalar;
|
use super::scalar::Scalar;
|
||||||
use super::sumcheck::SumcheckInstanceProof;
|
use super::sumcheck::SumcheckInstanceProof;
|
||||||
use super::transcript::ProofTranscript;
|
|
||||||
use ark_serialize::*;
|
use ark_serialize::*;
|
||||||
use ark_std::One;
|
use ark_std::One;
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ use ark_std::{One, UniformRand, Zero};
|
|||||||
use digest::{ExtendableOutput, Input};
|
use digest::{ExtendableOutput, Input};
|
||||||
|
|
||||||
use merlin::Transcript;
|
use merlin::Transcript;
|
||||||
use serde::Serialize;
|
|
||||||
use sha3::Shake256;
|
use sha3::Shake256;
|
||||||
|
|
||||||
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)]
|
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)]
|
||||||
@@ -167,7 +166,7 @@ impl R1CSInstance {
|
|||||||
shake.input(bytes);
|
shake.input(bytes);
|
||||||
let mut reader = shake.xof_result();
|
let mut reader = shake.xof_result();
|
||||||
let mut buf = [0u8; 256];
|
let mut buf = [0u8; 256];
|
||||||
reader.read(&mut buf).unwrap();
|
reader.read_exact(&mut buf).unwrap();
|
||||||
buf.to_vec()
|
buf.to_vec()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,16 +11,12 @@ use ark_bw6_761::BW6_761 as P;
|
|||||||
use super::commitments::MultiCommitGens;
|
use super::commitments::MultiCommitGens;
|
||||||
use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens};
|
use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens};
|
||||||
use super::errors::ProofVerifyError;
|
use super::errors::ProofVerifyError;
|
||||||
use super::group::{
|
|
||||||
CompressGroupElement, DecompressGroupElement, GroupElement, VartimeMultiscalarMul,
|
|
||||||
};
|
|
||||||
use super::nizk::{EqualityProof, KnowledgeProof, ProductProof};
|
|
||||||
use super::r1csinstance::R1CSInstance;
|
use super::r1csinstance::R1CSInstance;
|
||||||
|
|
||||||
use super::scalar::Scalar;
|
use super::scalar::Scalar;
|
||||||
use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial};
|
use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial};
|
||||||
use super::timer::Timer;
|
use super::timer::Timer;
|
||||||
use super::transcript::ProofTranscript;
|
|
||||||
use ark_crypto_primitives::{CircuitSpecificSetupSNARK, SNARK};
|
use ark_crypto_primitives::{CircuitSpecificSetupSNARK, SNARK};
|
||||||
|
|
||||||
use ark_groth16::Groth16;
|
use ark_groth16::Groth16;
|
||||||
@@ -141,7 +137,7 @@ impl R1CSProof {
|
|||||||
let c = transcript.challenge_scalar();
|
let c = transcript.challenge_scalar();
|
||||||
transcript.new_from_state(&c);
|
transcript.new_from_state(&c);
|
||||||
|
|
||||||
transcript.append_scalar_vector(&input.to_vec());
|
transcript.append_scalar_vector(input);
|
||||||
|
|
||||||
let poly_vars = DensePolynomial::new(vars.clone());
|
let poly_vars = DensePolynomial::new(vars.clone());
|
||||||
|
|
||||||
@@ -219,7 +215,7 @@ impl R1CSProof {
|
|||||||
timer_sc_proof_phase2.stop();
|
timer_sc_proof_phase2.stop();
|
||||||
|
|
||||||
let timer_polyeval = Timer::new("polyeval");
|
let timer_polyeval = Timer::new("polyeval");
|
||||||
let eval_vars_at_ry = poly_vars.evaluate(&ry[1..].to_vec());
|
let eval_vars_at_ry = poly_vars.evaluate(&ry[1..]);
|
||||||
timer_polyeval.stop();
|
timer_polyeval.stop();
|
||||||
|
|
||||||
timer_prove.stop();
|
timer_prove.stop();
|
||||||
@@ -259,8 +255,8 @@ impl R1CSProof {
|
|||||||
SparsePolynomial::new(n.log_2() as usize, input_as_sparse_poly_entries);
|
SparsePolynomial::new(n.log_2() as usize, input_as_sparse_poly_entries);
|
||||||
|
|
||||||
let config = VerifierConfig {
|
let config = VerifierConfig {
|
||||||
num_vars: num_vars,
|
num_vars,
|
||||||
num_cons: num_cons,
|
num_cons,
|
||||||
input: input.to_vec(),
|
input: input.to_vec(),
|
||||||
evals: *evals,
|
evals: *evals,
|
||||||
params: poseidon_params(),
|
params: poseidon_params(),
|
||||||
@@ -269,7 +265,7 @@ impl R1CSProof {
|
|||||||
polys_sc1: self.sc_proof_phase1.polys.clone(),
|
polys_sc1: self.sc_proof_phase1.polys.clone(),
|
||||||
polys_sc2: self.sc_proof_phase2.polys.clone(),
|
polys_sc2: self.sc_proof_phase2.polys.clone(),
|
||||||
eval_vars_at_ry: self.eval_vars_at_ry,
|
eval_vars_at_ry: self.eval_vars_at_ry,
|
||||||
input_as_sparse_poly: input_as_sparse_poly,
|
input_as_sparse_poly,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut rng = ark_std::test_rng();
|
let mut rng = ark_std::test_rng();
|
||||||
@@ -283,7 +279,7 @@ impl R1CSProof {
|
|||||||
let ds = start.elapsed().as_millis();
|
let ds = start.elapsed().as_millis();
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let proof = Groth16::<P>::prove(&pk, circuit.clone(), &mut rng).unwrap();
|
let proof = Groth16::<P>::prove(&pk, circuit, &mut rng).unwrap();
|
||||||
let dp2 = start.elapsed().as_millis();
|
let dp2 = start.elapsed().as_millis();
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
@@ -317,8 +313,8 @@ impl R1CSProof {
|
|||||||
SparsePolynomial::new(n.log_2() as usize, input_as_sparse_poly_entries);
|
SparsePolynomial::new(n.log_2() as usize, input_as_sparse_poly_entries);
|
||||||
|
|
||||||
let config = VerifierConfig {
|
let config = VerifierConfig {
|
||||||
num_vars: num_vars,
|
num_vars,
|
||||||
num_cons: num_cons,
|
num_cons,
|
||||||
input: input.to_vec(),
|
input: input.to_vec(),
|
||||||
evals: *evals,
|
evals: *evals,
|
||||||
params: poseidon_params(),
|
params: poseidon_params(),
|
||||||
@@ -327,7 +323,7 @@ impl R1CSProof {
|
|||||||
polys_sc1: self.sc_proof_phase1.polys.clone(),
|
polys_sc1: self.sc_proof_phase1.polys.clone(),
|
||||||
polys_sc2: self.sc_proof_phase2.polys.clone(),
|
polys_sc2: self.sc_proof_phase2.polys.clone(),
|
||||||
eval_vars_at_ry: self.eval_vars_at_ry,
|
eval_vars_at_ry: self.eval_vars_at_ry,
|
||||||
input_as_sparse_poly: input_as_sparse_poly,
|
input_as_sparse_poly,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut rng = ark_std::test_rng();
|
let mut rng = ark_std::test_rng();
|
||||||
@@ -335,7 +331,7 @@ impl R1CSProof {
|
|||||||
|
|
||||||
let nc_inner = verify_constraints_inner(circuit.clone(), &num_cons);
|
let nc_inner = verify_constraints_inner(circuit.clone(), &num_cons);
|
||||||
|
|
||||||
let nc_outer = verify_constraints_outer(circuit.clone(), &num_cons);
|
let nc_outer = verify_constraints_outer(circuit, &num_cons);
|
||||||
Ok(nc_inner + nc_outer)
|
Ok(nc_inner + nc_outer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,12 +13,11 @@ use super::product_tree::{DotProductCircuit, ProductCircuit, ProductCircuitEvalP
|
|||||||
use super::random::RandomTape;
|
use super::random::RandomTape;
|
||||||
use super::scalar::Scalar;
|
use super::scalar::Scalar;
|
||||||
use super::timer::Timer;
|
use super::timer::Timer;
|
||||||
use super::transcript::{AppendToTranscript, ProofTranscript};
|
use super::transcript::AppendToTranscript;
|
||||||
use ark_ff::{Field, One, Zero};
|
use ark_ff::{Field, One, Zero};
|
||||||
use ark_serialize::*;
|
use ark_serialize::*;
|
||||||
use core::cmp::Ordering;
|
use core::cmp::Ordering;
|
||||||
use merlin::Transcript;
|
use merlin::Transcript;
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)]
|
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)]
|
||||||
pub struct SparseMatEntry {
|
pub struct SparseMatEntry {
|
||||||
@@ -1650,11 +1649,7 @@ impl SparsePolynomial {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
<<<<<<< HEAD
|
|
||||||
use crate::parameters::poseidon_params;
|
use crate::parameters::poseidon_params;
|
||||||
=======
|
|
||||||
use crate::{parameters::poseidon_params};
|
|
||||||
>>>>>>> implement spartan verifier as a circuit and verify it with groth16
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use ark_std::UniformRand;
|
use ark_std::UniformRand;
|
||||||
@@ -1716,7 +1711,6 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||||
(b"example");
|
|
||||||
assert!(proof
|
assert!(proof
|
||||||
.verify(
|
.verify(
|
||||||
&poly_comm,
|
&poly_comm,
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ use super::dense_mlpoly::DensePolynomial;
|
|||||||
use super::errors::ProofVerifyError;
|
use super::errors::ProofVerifyError;
|
||||||
|
|
||||||
use super::scalar::Scalar;
|
use super::scalar::Scalar;
|
||||||
use super::transcript::ProofTranscript;
|
|
||||||
use super::unipoly::UniPoly;
|
use super::unipoly::UniPoly;
|
||||||
|
|
||||||
use ark_ff::Zero;
|
use ark_ff::Zero;
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ impl ProofTranscript for Transcript {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn append_scalar(&mut self, label: &'static [u8], scalar: &Scalar) {
|
fn append_scalar(&mut self, label: &'static [u8], scalar: &Scalar) {
|
||||||
self.append_message(label, &scalar.into_repr().to_bytes_le().as_slice());
|
self.append_message(label, scalar.into_repr().to_bytes_le().as_slice());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_point(&mut self, label: &'static [u8], point: &CompressedGroup) {
|
fn append_point(&mut self, label: &'static [u8], point: &CompressedGroup) {
|
||||||
|
|||||||
Reference in New Issue
Block a user