Browse Source

implement alternative transcript with poseidon backend

master
Mara Mihali 2 years ago
parent
commit
e7242a7b54
20 changed files with 1013 additions and 842 deletions
  1. +9
    -1
      Cargo.toml
  2. +18
    -9
      README.md
  3. +9
    -5
      benches/nizk.rs
  4. +12
    -4
      benches/snark.rs
  5. +12
    -2
      examples/cubic.rs
  6. +6
    -3
      profiler/nizk.rs
  7. +7
    -4
      profiler/snark.rs
  8. +12
    -40
      src/commitments.rs
  9. +34
    -16
      src/dense_mlpoly.rs
  10. +3
    -7
      src/group.rs
  11. +47
    -21
      src/lib.rs
  12. +10
    -9
      src/nizk/bullet.rs
  13. +98
    -85
      src/nizk/mod.rs
  14. +33
    -1
      src/parameters.rs
  15. +30
    -30
      src/product_tree.rs
  16. +12
    -2
      src/r1csinstance.rs
  17. +26
    -21
      src/r1csproof.rs
  18. +129
    -90
      src/sparse_mlpoly.rs
  19. +491
    -489
      src/sumcheck.rs
  20. +15
    -3
      src/unipoly.rs

+ 9
- 1
Cargo.toml

@ -33,6 +33,11 @@ ark-std = { version = "^0.3.0"}
ark-bls12-377 = { version = "^0.3.0", features = ["r1cs","curve"] } ark-bls12-377 = { version = "^0.3.0", features = ["r1cs","curve"] }
ark-serialize = { version = "^0.3.0", features = ["derive"] } ark-serialize = { version = "^0.3.0", features = ["derive"] }
ark-sponge = { version = "^0.3.0" , features = ["r1cs"] } ark-sponge = { version = "^0.3.0" , features = ["r1cs"] }
ark-crypto-primitives = { version = "^0.3.0", default-features = true }
ark-r1cs-std = { version = "^0.3.0", default-features = false }
ark-nonnative-field = { version = "0.3.0", default-features = false }
ark-relations = { version = "^0.3.0", default-features = false }
ark-snark = { version = "^0.3.0", default-features = false }
lazy_static = "1.4.0" lazy_static = "1.4.0"
rand = { version = "0.8", features = [ "std", "std_rng" ] } rand = { version = "0.8", features = [ "std", "std_rng" ] }
@ -64,4 +69,7 @@ harness = false
[features] [features]
default = ["curve25519-dalek/simd_backend"] default = ["curve25519-dalek/simd_backend"]
multicore = ["rayon"] multicore = ["rayon"]
profile = []
profile = []
[patch.crates-io]
ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"}

+ 18
- 9
README.md

@ -43,7 +43,8 @@ Some of our public APIs' style is inspired by the underlying crates we use.
# extern crate libspartan; # extern crate libspartan;
# extern crate merlin; # extern crate merlin;
# use libspartan::{Instance, SNARKGens, SNARK}; # use libspartan::{Instance, SNARKGens, SNARK};
# use merlin::Transcript;
# use libspartan::poseidon_transcript::PoseidonTranscript;
# use libspartan::parameters::poseidon_params;
# fn main() { # fn main() {
// specify the size of an R1CS instance // specify the size of an R1CS instance
let num_vars = 1024; let num_vars = 1024;
@ -60,12 +61,14 @@ Some of our public APIs' style is inspired by the underlying crates we use.
// create a commitment to the R1CS instance // create a commitment to the R1CS instance
let (comm, decomm) = SNARK::encode(&inst, &gens); let (comm, decomm) = SNARK::encode(&inst, &gens);
let params = poseidon_params();
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"snark_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SNARK::prove(&inst, &comm, &decomm, vars, &inputs, &gens, &mut prover_transcript); let proof = SNARK::prove(&inst, &comm, &decomm, vars, &inputs, &gens, &mut prover_transcript);
// verify the proof of satisfiability // verify the proof of satisfiability
let mut verifier_transcript = Transcript::new(b"snark_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&comm, &inputs, &mut verifier_transcript, &gens) .verify(&comm, &inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());
@ -79,7 +82,8 @@ Here is another example to use the NIZK variant of the Spartan proof system:
# extern crate libspartan; # extern crate libspartan;
# extern crate merlin; # extern crate merlin;
# use libspartan::{Instance, NIZKGens, NIZK}; # use libspartan::{Instance, NIZKGens, NIZK};
# use merlin::Transcript;
# use libspartan::poseidon_transcript::PoseidonTranscript;
# use libspartan::parameters::poseidon_params;
# fn main() { # fn main() {
// specify the size of an R1CS instance // specify the size of an R1CS instance
let num_vars = 1024; let num_vars = 1024;
@ -92,12 +96,14 @@ Here is another example to use the NIZK variant of the Spartan proof system:
// ask the library to produce a synthentic R1CS instance // ask the library to produce a synthentic R1CS instance
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
let params = poseidon_params();
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"nizk_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript);
// verify the proof of satisfiability // verify the proof of satisfiability
let mut verifier_transcript = Transcript::new(b"nizk_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&inst, &inputs, &mut verifier_transcript, &gens) .verify(&inst, &inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());
@ -114,8 +120,10 @@ Finally, we provide an example that specifies a custom R1CS instance instead of
# extern crate merlin; # extern crate merlin;
# mod scalar; # mod scalar;
# use scalar::Scalar; # use scalar::Scalar;
# use libspartan::parameters::poseidon_params;
# use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK}; # use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK};
# use merlin::Transcript;
# use libspartan::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
#
# use ark_ff::{PrimeField, Field, BigInteger}; # use ark_ff::{PrimeField, Field, BigInteger};
# use ark_std::{One, Zero, UniformRand}; # use ark_std::{One, Zero, UniformRand};
# fn main() { # fn main() {
@ -135,9 +143,10 @@ Finally, we provide an example that specifies a custom R1CS instance instead of
// create a commitment to the R1CS instance // create a commitment to the R1CS instance
let (comm, decomm) = SNARK::encode(&inst, &gens); let (comm, decomm) = SNARK::encode(&inst, &gens);
let params = poseidon_params();
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"snark_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SNARK::prove( let proof = SNARK::prove(
&inst, &inst,
&comm, &comm,
@ -149,7 +158,7 @@ Finally, we provide an example that specifies a custom R1CS instance instead of
); );
// verify the proof of satisfiability // verify the proof of satisfiability
let mut verifier_transcript = Transcript::new(b"snark_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens) .verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());

+ 9
- 5
benches/nizk.rs

@ -7,7 +7,9 @@ extern crate libspartan;
extern crate merlin; extern crate merlin;
extern crate sha3; extern crate sha3;
use libspartan::{Instance, NIZKGens, NIZK};
use libspartan::{
parameters::poseidon_params, poseidon_transcript::PoseidonTranscript, Instance, NIZKGens, NIZK,
};
use merlin::Transcript; use merlin::Transcript;
use criterion::*; use criterion::*;
@ -22,6 +24,8 @@ fn nizk_prove_benchmark(c: &mut Criterion) {
let num_cons = num_vars; let num_cons = num_vars;
let num_inputs = 10; let num_inputs = 10;
let params = poseidon_params();
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
let gens = NIZKGens::new(num_cons, num_vars, num_inputs); let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
@ -29,7 +33,7 @@ fn nizk_prove_benchmark(c: &mut Criterion) {
let name = format!("NIZK_prove_{}", num_vars); let name = format!("NIZK_prove_{}", num_vars);
group.bench_function(&name, move |b| { group.bench_function(&name, move |b| {
b.iter(|| { b.iter(|| {
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
NIZK::prove( NIZK::prove(
black_box(&inst), black_box(&inst),
black_box(vars.clone()), black_box(vars.clone()),
@ -55,15 +59,15 @@ fn nizk_verify_benchmark(c: &mut Criterion) {
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
let gens = NIZKGens::new(num_cons, num_vars, num_inputs); let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
let params = poseidon_params();
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript);
let name = format!("NIZK_verify_{}", num_cons); let name = format!("NIZK_verify_{}", num_cons);
group.bench_function(&name, move |b| { group.bench_function(&name, move |b| {
b.iter(|| { b.iter(|| {
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify( .verify(
black_box(&inst), black_box(&inst),

+ 12
- 4
benches/snark.rs

@ -2,7 +2,11 @@
extern crate libspartan; extern crate libspartan;
extern crate merlin; extern crate merlin;
use libspartan::{Instance, SNARKGens, SNARK};
use libspartan::{
parameters::poseidon_params,
poseidon_transcript::{self, PoseidonTranscript},
Instance, SNARKGens, SNARK,
};
use merlin::Transcript; use merlin::Transcript;
use criterion::*; use criterion::*;
@ -42,6 +46,8 @@ fn snark_prove_benchmark(c: &mut Criterion) {
let num_cons = num_vars; let num_cons = num_vars;
let num_inputs = 10; let num_inputs = 10;
let params = poseidon_params();
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
// produce public parameters // produce public parameters
@ -54,7 +60,7 @@ fn snark_prove_benchmark(c: &mut Criterion) {
let name = format!("SNARK_prove_{}", num_cons); let name = format!("SNARK_prove_{}", num_cons);
group.bench_function(&name, move |b| { group.bench_function(&name, move |b| {
b.iter(|| { b.iter(|| {
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
SNARK::prove( SNARK::prove(
black_box(&inst), black_box(&inst),
black_box(&comm), black_box(&comm),
@ -76,6 +82,8 @@ fn snark_verify_benchmark(c: &mut Criterion) {
let mut group = c.benchmark_group("SNARK_verify_benchmark"); let mut group = c.benchmark_group("SNARK_verify_benchmark");
group.plot_config(plot_config); group.plot_config(plot_config);
let params = poseidon_params();
let num_vars = (2_usize).pow(s as u32); let num_vars = (2_usize).pow(s as u32);
let num_cons = num_vars; let num_cons = num_vars;
let num_inputs = 10; let num_inputs = 10;
@ -88,7 +96,7 @@ fn snark_verify_benchmark(c: &mut Criterion) {
let (comm, decomm) = SNARK::encode(&inst, &gens); let (comm, decomm) = SNARK::encode(&inst, &gens);
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SNARK::prove( let proof = SNARK::prove(
&inst, &inst,
&comm, &comm,
@ -103,7 +111,7 @@ fn snark_verify_benchmark(c: &mut Criterion) {
let name = format!("SNARK_verify_{}", num_cons); let name = format!("SNARK_verify_{}", num_cons);
group.bench_function(&name, move |b| { group.bench_function(&name, move |b| {
b.iter(|| { b.iter(|| {
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify( .verify(
black_box(&comm), black_box(&comm),

+ 12
- 2
examples/cubic.rs

@ -11,7 +11,15 @@
use ark_bls12_377::Fr as Scalar; use ark_bls12_377::Fr as Scalar;
use ark_ff::{BigInteger, PrimeField}; use ark_ff::{BigInteger, PrimeField};
use ark_std::{One, UniformRand, Zero}; use ark_std::{One, UniformRand, Zero};
<<<<<<< HEAD
use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK}; use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK};
=======
use libspartan::{
parameters::poseidon_params,
poseidon_transcript::{self, PoseidonTranscript},
InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK,
};
>>>>>>> implement alternative transcript with poseidon backend
use merlin::Transcript; use merlin::Transcript;
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -119,6 +127,8 @@ fn main() {
assignment_inputs, assignment_inputs,
) = produce_r1cs(); ) = produce_r1cs();
let params = poseidon_params();
// produce public parameters // produce public parameters
let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_non_zero_entries); let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_non_zero_entries);
@ -126,7 +136,7 @@ fn main() {
let (comm, decomm) = SNARK::encode(&inst, &gens); let (comm, decomm) = SNARK::encode(&inst, &gens);
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"snark_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SNARK::prove( let proof = SNARK::prove(
&inst, &inst,
&comm, &comm,
@ -138,7 +148,7 @@ fn main() {
); );
// verify the proof of satisfiability // verify the proof of satisfiability
let mut verifier_transcript = Transcript::new(b"snark_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens) .verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());

+ 6
- 3
profiler/nizk.rs

@ -6,9 +6,11 @@ extern crate libspartan;
extern crate merlin; extern crate merlin;
extern crate rand; extern crate rand;
use ark_serialize::*;
use libspartan::parameters::poseidon_params;
use libspartan::poseidon_transcript::PoseidonTranscript;
use libspartan::{Instance, NIZKGens, NIZK}; use libspartan::{Instance, NIZKGens, NIZK};
use merlin::Transcript; use merlin::Transcript;
use ark_serialize::*;
fn print(msg: &str) { fn print(msg: &str) {
let star = "* "; let star = "* ";
@ -31,8 +33,9 @@ pub fn main() {
// produce public generators // produce public generators
let gens = NIZKGens::new(num_cons, num_vars, num_inputs); let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
let params = poseidon_params();
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"nizk_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript);
let mut proof_encoded = Vec::new(); let mut proof_encoded = Vec::new();
@ -41,7 +44,7 @@ pub fn main() {
print(&msg_proof_len); print(&msg_proof_len);
// verify the proof of satisfiability // verify the proof of satisfiability
let mut verifier_transcript = Transcript::new(b"nizk_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&inst, &inputs, &mut verifier_transcript, &gens) .verify(&inst, &inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());

+ 7
- 4
profiler/snark.rs

@ -5,9 +5,10 @@ extern crate flate2;
extern crate libspartan; extern crate libspartan;
extern crate merlin; extern crate merlin;
use libspartan::{Instance, SNARKGens, SNARK};
use merlin::Transcript;
use ark_serialize::*; use ark_serialize::*;
use libspartan::parameters::poseidon_params;
use libspartan::poseidon_transcript::PoseidonTranscript;
use libspartan::{Instance, SNARKGens, SNARK};
fn print(msg: &str) { fn print(msg: &str) {
let star = "* "; let star = "* ";
@ -33,8 +34,10 @@ pub fn main() {
// create a commitment to R1CSInstance // create a commitment to R1CSInstance
let (comm, decomm) = SNARK::encode(&inst, &gens); let (comm, decomm) = SNARK::encode(&inst, &gens);
let params = poseidon_params();
// produce a proof of satisfiability // produce a proof of satisfiability
let mut prover_transcript = Transcript::new(b"snark_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SNARK::prove( let proof = SNARK::prove(
&inst, &inst,
&comm, &comm,
@ -51,7 +54,7 @@ pub fn main() {
print(&msg_proof_len); print(&msg_proof_len);
// verify the proof of satisfiability // verify the proof of satisfiability
let mut verifier_transcript = Transcript::new(b"snark_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&comm, &inputs, &mut verifier_transcript, &gens) .verify(&comm, &inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());

+ 12
- 40
src/commitments.rs

@ -1,17 +1,16 @@
use super::group::{Fq, GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROUP_BASEPOINT};
use super::scalar::Scalar;
use crate::group::{CompressGroupElement, DecompressGroupElement}; use crate::group::{CompressGroupElement, DecompressGroupElement};
use crate::parameters::*; use crate::parameters::*;
use super::group::{GroupElement, VartimeMultiscalarMul, GROUP_BASEPOINT, GroupElementAffine, CurveField};
use super::scalar::Scalar;
use ark_bls12_377::Fq;
use ark_ec::{AffineCurve, ProjectiveCurve};
use ark_ff::PrimeField; use ark_ff::PrimeField;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_sponge::poseidon::{PoseidonParameters, PoseidonSponge};
use ark_sponge::CryptographicSponge; use ark_sponge::CryptographicSponge;
use digest::{ExtendableOutput, Input}; use digest::{ExtendableOutput, Input};
use sha3::Shake256; use sha3::Shake256;
use std::io::Read; use std::io::Read;
use std::str::FromStr; use std::str::FromStr;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_ec::{ProjectiveCurve, AffineCurve};
use ark_sponge::poseidon::{PoseidonParameters, PoseidonSponge};
#[derive(Debug)] #[derive(Debug)]
pub struct MultiCommitGens { pub struct MultiCommitGens {
@ -21,47 +20,22 @@ pub struct MultiCommitGens {
} }
impl MultiCommitGens { impl MultiCommitGens {
pub fn poseidon_params() -> PoseidonParameters<CurveField> {
let arks = P1["ark"]
.members()
.map(|ark| {
ark.members()
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
let mds = P1["mds"]
.members()
.map(|m| {
m.members()
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
PoseidonParameters::new(
P1["full_rounds"].as_u32().unwrap(),
P1["partial_rounds"].as_u32().unwrap(),
P1["alpha"].as_u64().unwrap(),
mds,
arks,
)
}
pub fn new(n: usize, label: &[u8]) -> Self {
let params = MultiCommitGens::poseidon_params();
pub fn new(n: usize, label: &[u8]) -> Self {
let params = poseidon_params();
let mut sponge = PoseidonSponge::new(&params); let mut sponge = PoseidonSponge::new(&params);
sponge.absorb(&label); sponge.absorb(&label);
sponge.absorb(&GROUP_BASEPOINT.into_affine()); sponge.absorb(&GROUP_BASEPOINT.into_affine());
let mut gens: Vec<GroupElement> = Vec::new(); let mut gens: Vec<GroupElement> = Vec::new();
for _ in 0..n + 1 { for _ in 0..n + 1 {
let mut el_aff: Option<GroupElementAffine> = None; let mut el_aff: Option<GroupElementAffine> = None;
while el_aff.is_some() != true { while el_aff.is_some() != true {
let uniform_bytes = sponge.squeeze_bytes(64); let uniform_bytes = sponge.squeeze_bytes(64);
el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes);
el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes);
}
let el = el_aff.unwrap().mul_by_cofactor_to_projective();
gens.push(el);
} }
let el = el_aff.unwrap().mul_by_cofactor_to_projective();
gens.push(el);
}
MultiCommitGens { MultiCommitGens {
n, n,
@ -111,7 +85,6 @@ impl Commitments for Vec {
fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement { fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement {
assert_eq!(gens_n.n, self.len()); assert_eq!(gens_n.n, self.len());
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr()) GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr())
} }
} }
@ -119,6 +92,5 @@ impl Commitments for [Scalar] {
fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement { fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement {
assert_eq!(gens_n.n, self.len()); assert_eq!(gens_n.n, self.len());
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr()) GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr())
} }
} }

+ 34
- 16
src/dense_mlpoly.rs

@ -1,18 +1,21 @@
#![allow(clippy::too_many_arguments)] #![allow(clippy::too_many_arguments)]
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
use super::commitments::{Commitments, MultiCommitGens}; use super::commitments::{Commitments, MultiCommitGens};
use super::errors::ProofVerifyError; use super::errors::ProofVerifyError;
use super::group::{GroupElement, CompressedGroup, VartimeMultiscalarMul, CompressGroupElement, DecompressGroupElement};
use super::group::{
CompressGroupElement, CompressedGroup, DecompressGroupElement, GroupElement,
VartimeMultiscalarMul,
};
use super::math::Math; use super::math::Math;
use super::nizk::{DotProductProofGens, DotProductProofLog}; use super::nizk::{DotProductProofGens, DotProductProofLog};
use super::random::RandomTape; use super::random::RandomTape;
use super::scalar::Scalar; use super::scalar::Scalar;
use super::transcript::{AppendToTranscript, ProofTranscript}; use super::transcript::{AppendToTranscript, ProofTranscript};
use ark_ff::{One, Zero};
use ark_serialize::*;
use core::ops::Index; use core::ops::Index;
use merlin::Transcript; use merlin::Transcript;
use ark_serialize::*;
use ark_ff::{One,Zero};
#[cfg(feature = "multicore")] #[cfg(feature = "multicore")]
use rayon::prelude::*; use rayon::prelude::*;
@ -299,6 +302,14 @@ impl AppendToTranscript for PolyCommitment {
} }
} }
impl AppendToPoseidon for PolyCommitment {
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
for i in 0..self.C.len() {
transcript.append_point(&self.C[i]);
}
}
}
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)]
pub struct PolyEvalProof { pub struct PolyEvalProof {
proof: DotProductProofLog, proof: DotProductProofLog,
@ -316,10 +327,10 @@ impl PolyEvalProof {
Zr: &Scalar, // evaluation of \widetilde{Z}(r) Zr: &Scalar, // evaluation of \widetilde{Z}(r)
blind_Zr_opt: Option<&Scalar>, // specifies a blind for Zr blind_Zr_opt: Option<&Scalar>, // specifies a blind for Zr
gens: &PolyCommitmentGens, gens: &PolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> (PolyEvalProof, CompressedGroup) { ) -> (PolyEvalProof, CompressedGroup) {
transcript.append_protocol_name(PolyEvalProof::protocol_name());
// transcript.append_protocol_name(PolyEvalProof::protocol_name());
// assert vectors are of the right size // assert vectors are of the right size
assert_eq!(poly.get_num_vars(), r.len()); assert_eq!(poly.get_num_vars(), r.len());
@ -367,19 +378,23 @@ impl PolyEvalProof {
pub fn verify( pub fn verify(
&self, &self,
gens: &PolyCommitmentGens, gens: &PolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
r: &[Scalar], // point at which the polynomial is evaluated r: &[Scalar], // point at which the polynomial is evaluated
C_Zr: &CompressedGroup, // commitment to \widetilde{Z}(r) C_Zr: &CompressedGroup, // commitment to \widetilde{Z}(r)
comm: &PolyCommitment, comm: &PolyCommitment,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
transcript.append_protocol_name(PolyEvalProof::protocol_name());
// transcript.append_protocol_name(PolyEvalProof::protocol_name());
// compute L and R // compute L and R
let eq = EqPolynomial::new(r.to_vec()); let eq = EqPolynomial::new(r.to_vec());
let (L, R) = eq.compute_factored_evals(); let (L, R) = eq.compute_factored_evals();
// compute a weighted sum of commitments and L // compute a weighted sum of commitments and L
let C_decompressed = comm.C.iter().map(|pt| GroupElement::decompress(pt).unwrap()).collect::<Vec<GroupElement>>();
let C_decompressed = comm
.C
.iter()
.map(|pt| GroupElement::decompress(pt).unwrap())
.collect::<Vec<GroupElement>>();
let C_LZ = GroupElement::vartime_multiscalar_mul(&L, C_decompressed.as_slice()).compress(); let C_LZ = GroupElement::vartime_multiscalar_mul(&L, C_decompressed.as_slice()).compress();
@ -391,7 +406,7 @@ impl PolyEvalProof {
pub fn verify_plain( pub fn verify_plain(
&self, &self,
gens: &PolyCommitmentGens, gens: &PolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
r: &[Scalar], // point at which the polynomial is evaluated r: &[Scalar], // point at which the polynomial is evaluated
Zr: &Scalar, // evaluation \widetilde{Z}(r) Zr: &Scalar, // evaluation \widetilde{Z}(r)
comm: &PolyCommitment, comm: &PolyCommitment,
@ -405,8 +420,10 @@ impl PolyEvalProof {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::parameters::poseidon_params;
use super::*; use super::*;
use ark_std::{UniformRand};
use ark_std::UniformRand;
fn evaluate_with_LR(Z: &[Scalar], r: &[Scalar]) -> Scalar { fn evaluate_with_LR(Z: &[Scalar], r: &[Scalar]) -> Scalar {
let eq = EqPolynomial::new(r.to_vec()); let eq = EqPolynomial::new(r.to_vec());
@ -436,7 +453,7 @@ mod tests {
Scalar::one(), Scalar::one(),
Scalar::from(2), Scalar::from(2),
Scalar::from(1), Scalar::from(1),
Scalar::from(4)
Scalar::from(4),
]; ];
// r = [4,3] // r = [4,3]
@ -569,7 +586,7 @@ mod tests {
Scalar::from(1), Scalar::from(1),
Scalar::from(2), Scalar::from(2),
Scalar::from(1), Scalar::from(1),
Scalar::from(4)
Scalar::from(4),
]; ];
let poly = DensePolynomial::new(Z); let poly = DensePolynomial::new(Z);
@ -582,7 +599,8 @@ mod tests {
let (poly_commitment, blinds) = poly.commit(&gens, None); let (poly_commitment, blinds) = poly.commit(&gens, None);
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
let params = poseidon_params();
let mut prover_transcript = PoseidonTranscript::new(&params);
let (proof, C_Zr) = PolyEvalProof::prove( let (proof, C_Zr) = PolyEvalProof::prove(
&poly, &poly,
Some(&blinds), Some(&blinds),
@ -594,7 +612,7 @@ mod tests {
&mut random_tape, &mut random_tape,
); );
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&gens, &mut verifier_transcript, &r, &C_Zr, &poly_commitment) .verify(&gens, &mut verifier_transcript, &r, &C_Zr, &poly_commitment)
.is_ok()); .is_ok());

+ 3
- 7
src/group.rs

@ -1,24 +1,20 @@
use crate::errors::ProofVerifyError; use crate::errors::ProofVerifyError;
use ark_ec::msm::VariableBaseMSM; use ark_ec::msm::VariableBaseMSM;
use ark_ff::PrimeField; use ark_ff::PrimeField;
use digest::DynDigest;
use lazy_static::lazy_static;
use num_bigint::BigInt;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use num_bigint::BigInt; use num_bigint::BigInt;
use super::scalar::Scalar; use super::scalar::Scalar;
use ark_ec::{AffineCurve, ProjectiveCurve}; use ark_ec::{AffineCurve, ProjectiveCurve};
use ark_ec::{AffineCurve, ProjectiveCurve};
use ark_serialize::*;
use ark_serialize::*; use ark_serialize::*;
use core::borrow::Borrow; use core::borrow::Borrow;
use core::ops::{Mul, MulAssign};
pub type GroupElement = ark_bls12_377::G1Projective; pub type GroupElement = ark_bls12_377::G1Projective;
pub type GroupElementAffine = ark_bls12_377::G1Affine; pub type GroupElementAffine = ark_bls12_377::G1Affine;
pub type CurveField = ark_bls12_377::Fq;
pub type Fq = ark_bls12_377::Fq;
pub type Fr = ark_bls12_377::Fr;
#[derive(Clone, Eq, PartialEq, Hash, Debug, CanonicalSerialize, CanonicalDeserialize)] #[derive(Clone, Eq, PartialEq, Hash, Debug, CanonicalSerialize, CanonicalDeserialize)]
pub struct CompressedGroup(pub Vec<u8>); pub struct CompressedGroup(pub Vec<u8>);

+ 47
- 21
src/lib.rs

@ -26,7 +26,6 @@ mod errors;
mod group; mod group;
mod math; mod math;
mod nizk; mod nizk;
mod parameters;
mod product_tree; mod product_tree;
mod r1csinstance; mod r1csinstance;
mod r1csproof; mod r1csproof;
@ -38,12 +37,18 @@ mod timer;
mod transcript; mod transcript;
mod unipoly; mod unipoly;
/// TODO
pub mod parameters;
/// TODO
pub mod poseidon_transcript;
use ark_ff::{BigInteger, Field, PrimeField}; use ark_ff::{BigInteger, Field, PrimeField};
use ark_serialize::*; use ark_serialize::*;
use ark_std::{One, UniformRand, Zero}; use ark_std::{One, UniformRand, Zero};
use core::cmp::max; use core::cmp::max;
use errors::{ProofVerifyError, R1CSError}; use errors::{ProofVerifyError, R1CSError};
use merlin::Transcript; use merlin::Transcript;
use poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
use r1csinstance::{ use r1csinstance::{
R1CSCommitment, R1CSCommitmentGens, R1CSDecommitment, R1CSEvalProof, R1CSInstance, R1CSCommitment, R1CSCommitmentGens, R1CSDecommitment, R1CSEvalProof, R1CSInstance,
}; };
@ -354,7 +359,7 @@ impl SNARK {
vars: VarsAssignment, vars: VarsAssignment,
inputs: &InputsAssignment, inputs: &InputsAssignment,
gens: &SNARKGens, gens: &SNARKGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Self { ) -> Self {
let timer_prove = Timer::new("SNARK::prove"); let timer_prove = Timer::new("SNARK::prove");
@ -362,8 +367,8 @@ impl SNARK {
// to aid the prover produce its randomness // to aid the prover produce its randomness
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
transcript.append_protocol_name(SNARK::protocol_name());
comm.comm.append_to_transcript(b"comm", transcript);
// transcript.append_protocol_name(SNARK::protocol_name());
comm.comm.append_to_poseidon(transcript);
let (r1cs_sat_proof, rx, ry) = { let (r1cs_sat_proof, rx, ry) = {
let (proof, rx, ry) = { let (proof, rx, ry) = {
@ -400,9 +405,9 @@ impl SNARK {
let timer_eval = Timer::new("eval_sparse_polys"); let timer_eval = Timer::new("eval_sparse_polys");
let inst_evals = { let inst_evals = {
let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry); let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry);
Ar.append_to_transcript(b"Ar_claim", transcript);
Br.append_to_transcript(b"Br_claim", transcript);
Cr.append_to_transcript(b"Cr_claim", transcript);
transcript.append_scalar(&Ar);
transcript.append_scalar(&Br);
transcript.append_scalar(&Cr);
(Ar, Br, Cr) (Ar, Br, Cr)
}; };
timer_eval.stop(); timer_eval.stop();
@ -437,14 +442,14 @@ impl SNARK {
&self, &self,
comm: &ComputationCommitment, comm: &ComputationCommitment,
input: &InputsAssignment, input: &InputsAssignment,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
gens: &SNARKGens, gens: &SNARKGens,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
let timer_verify = Timer::new("SNARK::verify"); let timer_verify = Timer::new("SNARK::verify");
transcript.append_protocol_name(SNARK::protocol_name());
// transcript.append_protocol_name(SNARK::protocol_name());
// append a commitment to the computation to the transcript // append a commitment to the computation to the transcript
comm.comm.append_to_transcript(b"comm", transcript);
comm.comm.append_to_poseidon(transcript);
let timer_sat_proof = Timer::new("verify_sat_proof"); let timer_sat_proof = Timer::new("verify_sat_proof");
assert_eq!(input.assignment.len(), comm.comm.get_num_inputs()); assert_eq!(input.assignment.len(), comm.comm.get_num_inputs());
@ -460,9 +465,12 @@ impl SNARK {
let timer_eval_proof = Timer::new("verify_eval_proof"); let timer_eval_proof = Timer::new("verify_eval_proof");
let (Ar, Br, Cr) = &self.inst_evals; let (Ar, Br, Cr) = &self.inst_evals;
Ar.append_to_transcript(b"Ar_claim", transcript);
Br.append_to_transcript(b"Br_claim", transcript);
Cr.append_to_transcript(b"Cr_claim", transcript);
// Ar.append_to_transcript(b"Ar_claim", transcript);
// Br.append_to_transcript(b"Br_claim", transcript);
// Cr.append_to_transcript(b"Cr_claim", transcript);
transcript.append_scalar(&Ar);
transcript.append_scalar(&Br);
transcript.append_scalar(&Cr);
self.r1cs_eval_proof.verify( self.r1cs_eval_proof.verify(
&comm.comm, &comm.comm,
&rx, &rx,
@ -516,15 +524,20 @@ impl NIZK {
vars: VarsAssignment, vars: VarsAssignment,
input: &InputsAssignment, input: &InputsAssignment,
gens: &NIZKGens, gens: &NIZKGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Self { ) -> Self {
let timer_prove = Timer::new("NIZK::prove"); let timer_prove = Timer::new("NIZK::prove");
// we create a Transcript object seeded with a random Scalar // we create a Transcript object seeded with a random Scalar
// to aid the prover produce its randomness // to aid the prover produce its randomness
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
<<<<<<< HEAD
transcript.append_protocol_name(NIZK::protocol_name()); transcript.append_protocol_name(NIZK::protocol_name());
transcript.append_message(b"R1CSInstanceDigest", &inst.digest); transcript.append_message(b"R1CSInstanceDigest", &inst.digest);
=======
// transcript.append_protocol_name(NIZK::protocol_name());
inst.inst.append_to_poseidon(transcript);
>>>>>>> simplify transcript and change merlin backend to poseidon
let (r1cs_sat_proof, rx, ry) = { let (r1cs_sat_proof, rx, ry) = {
// we might need to pad variables // we might need to pad variables
@ -564,13 +577,18 @@ impl NIZK {
&self, &self,
inst: &Instance, inst: &Instance,
input: &InputsAssignment, input: &InputsAssignment,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
gens: &NIZKGens, gens: &NIZKGens,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
let timer_verify = Timer::new("NIZK::verify"); let timer_verify = Timer::new("NIZK::verify");
<<<<<<< HEAD
transcript.append_protocol_name(NIZK::protocol_name()); transcript.append_protocol_name(NIZK::protocol_name());
transcript.append_message(b"R1CSInstanceDigest", &inst.digest); transcript.append_message(b"R1CSInstanceDigest", &inst.digest);
=======
// transcript.append_protocol_name(NIZK::protocol_name());
inst.inst.append_to_poseidon(transcript);
>>>>>>> simplify transcript and change merlin backend to poseidon
// We send evaluations of A, B, C at r = (rx, ry) as claims // We send evaluations of A, B, C at r = (rx, ry) as claims
// to enable the verifier complete the first sum-check // to enable the verifier complete the first sum-check
@ -602,6 +620,8 @@ impl NIZK {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::parameters::poseidon_params;
use super::*; use super::*;
use ark_ff::PrimeField; use ark_ff::PrimeField;
@ -620,8 +640,10 @@ mod tests {
// create a commitment to R1CSInstance // create a commitment to R1CSInstance
let (comm, decomm) = SNARK::encode(&inst, &gens); let (comm, decomm) = SNARK::encode(&inst, &gens);
let params = poseidon_params();
// produce a proof // produce a proof
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SNARK::prove( let proof = SNARK::prove(
&inst, &inst,
&comm, &comm,
@ -633,7 +655,7 @@ mod tests {
); );
// verify the proof // verify the proof
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&comm, &inputs, &mut verifier_transcript, &gens) .verify(&comm, &inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());
@ -732,8 +754,10 @@ mod tests {
// create a commitment to the R1CS instance // create a commitment to the R1CS instance
let (comm, decomm) = SNARK::encode(&inst, &gens); let (comm, decomm) = SNARK::encode(&inst, &gens);
let params = poseidon_params();
// produce a SNARK // produce a SNARK
let mut prover_transcript = Transcript::new(b"snark_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SNARK::prove( let proof = SNARK::prove(
&inst, &inst,
&comm, &comm,
@ -745,7 +769,7 @@ mod tests {
); );
// verify the SNARK // verify the SNARK
let mut verifier_transcript = Transcript::new(b"snark_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens) .verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());
@ -753,8 +777,10 @@ mod tests {
// NIZK public params // NIZK public params
let gens = NIZKGens::new(num_cons, num_vars, num_inputs); let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
let params = poseidon_params();
// produce a NIZK // produce a NIZK
let mut prover_transcript = Transcript::new(b"nizk_example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = NIZK::prove( let proof = NIZK::prove(
&inst, &inst,
assignment_vars, assignment_vars,
@ -764,7 +790,7 @@ mod tests {
); );
// verify the NIZK // verify the NIZK
let mut verifier_transcript = Transcript::new(b"nizk_example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&inst, &assignment_inputs, &mut verifier_transcript, &gens) .verify(&inst, &assignment_inputs, &mut verifier_transcript, &gens)
.is_ok()); .is_ok());

+ 10
- 9
src/nizk/bullet.rs

@ -4,6 +4,7 @@
#![allow(clippy::type_complexity)] #![allow(clippy::type_complexity)]
#![allow(clippy::too_many_arguments)] #![allow(clippy::too_many_arguments)]
use crate::math::Math; use crate::math::Math;
use crate::poseidon_transcript::PoseidonTranscript;
use super::super::errors::ProofVerifyError; use super::super::errors::ProofVerifyError;
use super::super::group::{ use super::super::group::{
@ -38,7 +39,7 @@ impl BulletReductionProof {
/// The lengths of the vectors must all be the same, and must all be /// The lengths of the vectors must all be the same, and must all be
/// either 0 or a power of 2. /// either 0 or a power of 2.
pub fn prove( pub fn prove(
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
Q: &GroupElement, Q: &GroupElement,
G_vec: &[GroupElement], G_vec: &[GroupElement],
H: &GroupElement, H: &GroupElement,
@ -122,10 +123,10 @@ impl BulletReductionProof {
.as_slice(), .as_slice(),
); );
transcript.append_point(b"L", &L.compress());
transcript.append_point(b"R", &R.compress());
transcript.append_point(&L.compress());
transcript.append_point(&R.compress());
let u = transcript.challenge_scalar(b"u");
let u = transcript.challenge_scalar();
let u_inv = u.inverse().unwrap(); let u_inv = u.inverse().unwrap();
for i in 0..n { for i in 0..n {
@ -163,7 +164,7 @@ impl BulletReductionProof {
fn verification_scalars( fn verification_scalars(
&self, &self,
n: usize, n: usize,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result<(Vec<Scalar>, Vec<Scalar>, Vec<Scalar>), ProofVerifyError> { ) -> Result<(Vec<Scalar>, Vec<Scalar>, Vec<Scalar>), ProofVerifyError> {
let lg_n = self.L_vec.len(); let lg_n = self.L_vec.len();
if lg_n >= 32 { if lg_n >= 32 {
@ -178,9 +179,9 @@ impl BulletReductionProof {
// 1. Recompute x_k,...,x_1 based on the proof transcript // 1. Recompute x_k,...,x_1 based on the proof transcript
let mut challenges = Vec::with_capacity(lg_n); let mut challenges = Vec::with_capacity(lg_n);
for (L, R) in self.L_vec.iter().zip(self.R_vec.iter()) { for (L, R) in self.L_vec.iter().zip(self.R_vec.iter()) {
transcript.append_point(b"L", L);
transcript.append_point(b"R", R);
challenges.push(transcript.challenge_scalar(b"u"));
transcript.append_point(L);
transcript.append_point(R);
challenges.push(transcript.challenge_scalar());
} }
// 2. Compute 1/(u_k...u_1) and 1/u_k, ..., 1/u_1 // 2. Compute 1/(u_k...u_1) and 1/u_k, ..., 1/u_1
@ -224,7 +225,7 @@ impl BulletReductionProof {
&self, &self,
n: usize, n: usize,
a: &[Scalar], a: &[Scalar],
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
Gamma: &GroupElement, Gamma: &GroupElement,
G: &[GroupElement], G: &[GroupElement],
) -> Result<(GroupElement, GroupElement, Scalar), ProofVerifyError> { ) -> Result<(GroupElement, GroupElement, Scalar), ProofVerifyError> {

+ 98
- 85
src/nizk/mod.rs

@ -1,5 +1,6 @@
#![allow(clippy::too_many_arguments)] #![allow(clippy::too_many_arguments)]
use crate::math::Math; use crate::math::Math;
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
use super::commitments::{Commitments, MultiCommitGens}; use super::commitments::{Commitments, MultiCommitGens};
use super::errors::ProofVerifyError; use super::errors::ProofVerifyError;
@ -34,24 +35,24 @@ impl KnowledgeProof {
pub fn prove( pub fn prove(
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
x: &Scalar, x: &Scalar,
r: &Scalar, r: &Scalar,
) -> (KnowledgeProof, CompressedGroup) { ) -> (KnowledgeProof, CompressedGroup) {
transcript.append_protocol_name(KnowledgeProof::protocol_name());
// transcript.append_protocol_name(KnowledgeProof::protocol_name());
// produce two random Scalars // produce two random Scalars
let t1 = random_tape.random_scalar(b"t1"); let t1 = random_tape.random_scalar(b"t1");
let t2 = random_tape.random_scalar(b"t2"); let t2 = random_tape.random_scalar(b"t2");
let C = x.commit(r, gens_n).compress(); let C = x.commit(r, gens_n).compress();
C.append_to_transcript(b"C", transcript);
C.append_to_poseidon(transcript);
let alpha = t1.commit(&t2, gens_n).compress(); let alpha = t1.commit(&t2, gens_n).compress();
alpha.append_to_transcript(b"alpha", transcript);
alpha.append_to_poseidon(transcript);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let z1 = c * x + t1; let z1 = c * x + t1;
let z2 = c * r + t2; let z2 = c * r + t2;
@ -62,14 +63,14 @@ impl KnowledgeProof {
pub fn verify( pub fn verify(
&self, &self,
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
C: &CompressedGroup, C: &CompressedGroup,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
transcript.append_protocol_name(KnowledgeProof::protocol_name());
C.append_to_transcript(b"C", transcript);
self.alpha.append_to_transcript(b"alpha", transcript);
// transcript.append_protocol_name(KnowledgeProof::protocol_name());
C.append_to_poseidon(transcript);
self.alpha.append_to_poseidon(transcript);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let lhs = self.z1.commit(&self.z2, gens_n).compress(); let lhs = self.z1.commit(&self.z2, gens_n).compress();
let rhs = (C.unpack()?.mul(c.into_repr()) + self.alpha.unpack()?).compress(); let rhs = (C.unpack()?.mul(c.into_repr()) + self.alpha.unpack()?).compress();
@ -95,28 +96,28 @@ impl EqualityProof {
pub fn prove( pub fn prove(
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
v1: &Scalar, v1: &Scalar,
s1: &Scalar, s1: &Scalar,
v2: &Scalar, v2: &Scalar,
s2: &Scalar, s2: &Scalar,
) -> (EqualityProof, CompressedGroup, CompressedGroup) { ) -> (EqualityProof, CompressedGroup, CompressedGroup) {
transcript.append_protocol_name(EqualityProof::protocol_name());
// transcript.append_protocol_name(EqualityProof::protocol_name());
// produce a random Scalar // produce a random Scalar
let r = random_tape.random_scalar(b"r"); let r = random_tape.random_scalar(b"r");
let C1 = v1.commit(s1, gens_n).compress(); let C1 = v1.commit(s1, gens_n).compress();
C1.append_to_transcript(b"C1", transcript);
transcript.append_point(&C1);
let C2 = v2.commit(s2, gens_n).compress(); let C2 = v2.commit(s2, gens_n).compress();
C2.append_to_transcript(b"C2", transcript);
transcript.append_point(&C2);
let alpha = gens_n.h.mul(r.into_repr()).compress(); let alpha = gens_n.h.mul(r.into_repr()).compress();
alpha.append_to_transcript(b"alpha", transcript);
transcript.append_point(&alpha);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let z = c * ((*s1) - s2) + r; let z = c * ((*s1) - s2) + r;
@ -126,16 +127,17 @@ impl EqualityProof {
pub fn verify( pub fn verify(
&self, &self,
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
C1: &CompressedGroup, C1: &CompressedGroup,
C2: &CompressedGroup, C2: &CompressedGroup,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
transcript.append_protocol_name(EqualityProof::protocol_name());
C1.append_to_transcript(b"C1", transcript);
C2.append_to_transcript(b"C2", transcript);
self.alpha.append_to_transcript(b"alpha", transcript);
// transcript.append_protocol_name(EqualityProof::protocol_name());
let c = transcript.challenge_scalar(b"c");
transcript.append_point(&C1);
transcript.append_point(&C2);
transcript.append_point(&self.alpha);
let c = transcript.challenge_scalar();
let rhs = { let rhs = {
let C = C1.unpack()? - C2.unpack()?; let C = C1.unpack()? - C2.unpack()?;
(C.mul(c.into_repr()) + self.alpha.unpack()?).compress() (C.mul(c.into_repr()) + self.alpha.unpack()?).compress()
@ -167,7 +169,7 @@ impl ProductProof {
pub fn prove( pub fn prove(
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
x: &Scalar, x: &Scalar,
rX: &Scalar, rX: &Scalar,
@ -181,7 +183,7 @@ impl ProductProof {
CompressedGroup, CompressedGroup,
CompressedGroup, CompressedGroup,
) { ) {
transcript.append_protocol_name(ProductProof::protocol_name());
// transcript.append_protocol_name(ProductProof::protocol_name());
// produce five random Scalar // produce five random Scalar
let b1 = random_tape.random_scalar(b"b1"); let b1 = random_tape.random_scalar(b"b1");
@ -193,23 +195,22 @@ impl ProductProof {
let X_unc = x.commit(rX, gens_n); let X_unc = x.commit(rX, gens_n);
let X = X_unc.compress(); let X = X_unc.compress();
X.append_to_transcript(b"X", transcript);
transcript.append_point(&X);
let X_new = GroupElement::decompress(&X); let X_new = GroupElement::decompress(&X);
assert_eq!(X_unc, X_new.unwrap()); assert_eq!(X_unc, X_new.unwrap());
let Y = y.commit(rY, gens_n).compress(); let Y = y.commit(rY, gens_n).compress();
Y.append_to_transcript(b"Y", transcript);
transcript.append_point(&Y);
let Z = z.commit(rZ, gens_n).compress(); let Z = z.commit(rZ, gens_n).compress();
Z.append_to_transcript(b"Z", transcript);
transcript.append_point(&Z);
let alpha = b1.commit(&b2, gens_n).compress(); let alpha = b1.commit(&b2, gens_n).compress();
alpha.append_to_transcript(b"alpha", transcript);
transcript.append_point(&alpha);
let beta = b3.commit(&b4, gens_n).compress(); let beta = b3.commit(&b4, gens_n).compress();
beta.append_to_transcript(b"beta", transcript);
transcript.append_point(&beta);
let delta = { let delta = {
let gens_X = &MultiCommitGens { let gens_X = &MultiCommitGens {
@ -219,9 +220,9 @@ impl ProductProof {
}; };
b3.commit(&b5, gens_X).compress() b3.commit(&b5, gens_X).compress()
}; };
delta.append_to_transcript(b"delta", transcript);
transcript.append_point(&delta);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let z1 = b1 + c * x; let z1 = b1 + c * x;
let z2 = b2 + c * rX; let z2 = b2 + c * rX;
@ -263,19 +264,19 @@ impl ProductProof {
pub fn verify( pub fn verify(
&self, &self,
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
X: &CompressedGroup, X: &CompressedGroup,
Y: &CompressedGroup, Y: &CompressedGroup,
Z: &CompressedGroup, Z: &CompressedGroup,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
transcript.append_protocol_name(ProductProof::protocol_name());
// transcript.append_protocol_name(ProductProof::protocol_name());
X.append_to_transcript(b"X", transcript);
Y.append_to_transcript(b"Y", transcript);
Z.append_to_transcript(b"Z", transcript);
self.alpha.append_to_transcript(b"alpha", transcript);
self.beta.append_to_transcript(b"beta", transcript);
self.delta.append_to_transcript(b"delta", transcript);
X.append_to_poseidon(transcript);
Y.append_to_poseidon(transcript);
Z.append_to_poseidon(transcript);
self.alpha.append_to_poseidon(transcript);
self.beta.append_to_poseidon(transcript);
self.delta.append_to_poseidon(transcript);
let z1 = self.z[0]; let z1 = self.z[0];
let z2 = self.z[1]; let z2 = self.z[1];
@ -283,7 +284,7 @@ impl ProductProof {
let z4 = self.z[3]; let z4 = self.z[3];
let z5 = self.z[4]; let z5 = self.z[4];
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
if ProductProof::check_equality(&self.alpha, X, &c, gens_n, &z1, &z2) if ProductProof::check_equality(&self.alpha, X, &c, gens_n, &z1, &z2)
&& ProductProof::check_equality(&self.beta, Y, &c, gens_n, &z3, &z4) && ProductProof::check_equality(&self.beta, Y, &c, gens_n, &z3, &z4)
@ -329,7 +330,7 @@ impl DotProductProof {
pub fn prove( pub fn prove(
gens_1: &MultiCommitGens, gens_1: &MultiCommitGens,
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
x_vec: &[Scalar], x_vec: &[Scalar],
blind_x: &Scalar, blind_x: &Scalar,
@ -337,7 +338,7 @@ impl DotProductProof {
y: &Scalar, y: &Scalar,
blind_y: &Scalar, blind_y: &Scalar,
) -> (DotProductProof, CompressedGroup, CompressedGroup) { ) -> (DotProductProof, CompressedGroup, CompressedGroup) {
transcript.append_protocol_name(DotProductProof::protocol_name());
// transcript.append_protocol_name(DotProductProof::protocol_name());
let n = x_vec.len(); let n = x_vec.len();
assert_eq!(x_vec.len(), a_vec.len()); assert_eq!(x_vec.len(), a_vec.len());
@ -350,22 +351,22 @@ impl DotProductProof {
let r_beta = random_tape.random_scalar(b"r_beta"); let r_beta = random_tape.random_scalar(b"r_beta");
let Cx = x_vec.commit(blind_x, gens_n).compress(); let Cx = x_vec.commit(blind_x, gens_n).compress();
Cx.append_to_transcript(b"Cx", transcript);
Cx.append_to_poseidon(transcript);
let Cy = y.commit(blind_y, gens_1).compress(); let Cy = y.commit(blind_y, gens_1).compress();
Cy.append_to_transcript(b"Cy", transcript);
Cy.append_to_poseidon(transcript);
a_vec.append_to_transcript(b"a", transcript);
transcript.append_scalar_vector(&a_vec.to_vec());
let delta = d_vec.commit(&r_delta, gens_n).compress(); let delta = d_vec.commit(&r_delta, gens_n).compress();
delta.append_to_transcript(b"delta", transcript);
delta.append_to_poseidon(transcript);
let dotproduct_a_d = DotProductProof::compute_dotproduct(a_vec, &d_vec); let dotproduct_a_d = DotProductProof::compute_dotproduct(a_vec, &d_vec);
let beta = dotproduct_a_d.commit(&r_beta, gens_1).compress(); let beta = dotproduct_a_d.commit(&r_beta, gens_1).compress();
beta.append_to_transcript(b"beta", transcript);
beta.append_to_poseidon(transcript);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let z = (0..d_vec.len()) let z = (0..d_vec.len())
.map(|i| c * x_vec[i] + d_vec[i]) .map(|i| c * x_vec[i] + d_vec[i])
@ -391,7 +392,7 @@ impl DotProductProof {
&self, &self,
gens_1: &MultiCommitGens, gens_1: &MultiCommitGens,
gens_n: &MultiCommitGens, gens_n: &MultiCommitGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
a: &[Scalar], a: &[Scalar],
Cx: &CompressedGroup, Cx: &CompressedGroup,
Cy: &CompressedGroup, Cy: &CompressedGroup,
@ -399,14 +400,14 @@ impl DotProductProof {
assert_eq!(gens_n.n, a.len()); assert_eq!(gens_n.n, a.len());
assert_eq!(gens_1.n, 1); assert_eq!(gens_1.n, 1);
transcript.append_protocol_name(DotProductProof::protocol_name());
Cx.append_to_transcript(b"Cx", transcript);
Cy.append_to_transcript(b"Cy", transcript);
a.append_to_transcript(b"a", transcript);
self.delta.append_to_transcript(b"delta", transcript);
self.beta.append_to_transcript(b"beta", transcript);
// transcript.append_protocol_name(DotProductProof::protocol_name());
Cx.append_to_poseidon(transcript);
Cy.append_to_poseidon(transcript);
transcript.append_scalar_vector(&a.to_vec());
self.delta.append_to_poseidon(transcript);
self.beta.append_to_poseidon(transcript);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let mut result = Cx.unpack()?.mul(c.into_repr()) + self.delta.unpack()? let mut result = Cx.unpack()?.mul(c.into_repr()) + self.delta.unpack()?
== self.z.commit(&self.z_delta, gens_n); == self.z.commit(&self.z_delta, gens_n);
@ -456,7 +457,7 @@ impl DotProductProofLog {
pub fn prove( pub fn prove(
gens: &DotProductProofGens, gens: &DotProductProofGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
x_vec: &[Scalar], x_vec: &[Scalar],
blind_x: &Scalar, blind_x: &Scalar,
@ -464,7 +465,7 @@ impl DotProductProofLog {
y: &Scalar, y: &Scalar,
blind_y: &Scalar, blind_y: &Scalar,
) -> (DotProductProofLog, CompressedGroup, CompressedGroup) { ) -> (DotProductProofLog, CompressedGroup, CompressedGroup) {
transcript.append_protocol_name(DotProductProofLog::protocol_name());
// transcript.append_protocol_name(DotProductProofLog::protocol_name());
let n = x_vec.len(); let n = x_vec.len();
assert_eq!(x_vec.len(), a_vec.len()); assert_eq!(x_vec.len(), a_vec.len());
@ -483,12 +484,11 @@ impl DotProductProofLog {
}; };
let Cx = x_vec.commit(blind_x, &gens.gens_n).compress(); let Cx = x_vec.commit(blind_x, &gens.gens_n).compress();
Cx.append_to_transcript(b"Cx", transcript);
transcript.append_point(&Cx);
let Cy = y.commit(blind_y, &gens.gens_1).compress(); let Cy = y.commit(blind_y, &gens.gens_1).compress();
Cy.append_to_transcript(b"Cy", transcript);
a_vec.append_to_transcript(b"a", transcript);
transcript.append_point(&Cy);
transcript.append_scalar_vector(&a_vec.to_vec());
let blind_Gamma = (*blind_x) + blind_y; let blind_Gamma = (*blind_x) + blind_y;
let (bullet_reduction_proof, _Gamma_hat, x_hat, a_hat, g_hat, rhat_Gamma) = let (bullet_reduction_proof, _Gamma_hat, x_hat, a_hat, g_hat, rhat_Gamma) =
@ -512,12 +512,12 @@ impl DotProductProofLog {
}; };
d.commit(&r_delta, &gens_hat).compress() d.commit(&r_delta, &gens_hat).compress()
}; };
delta.append_to_transcript(b"delta", transcript);
transcript.append_point(&delta);
let beta = d.commit(&r_beta, &gens.gens_1).compress(); let beta = d.commit(&r_beta, &gens.gens_1).compress();
beta.append_to_transcript(b"beta", transcript);
transcript.append_point(&beta);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let z1 = d + c * y_hat; let z1 = d + c * y_hat;
let z2 = a_hat * (c * rhat_Gamma + r_beta) + r_delta; let z2 = a_hat * (c * rhat_Gamma + r_beta) + r_delta;
@ -539,7 +539,7 @@ impl DotProductProofLog {
&self, &self,
n: usize, n: usize,
gens: &DotProductProofGens, gens: &DotProductProofGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
a: &[Scalar], a: &[Scalar],
Cx: &CompressedGroup, Cx: &CompressedGroup,
Cy: &CompressedGroup, Cy: &CompressedGroup,
@ -547,10 +547,14 @@ impl DotProductProofLog {
assert_eq!(gens.n, n); assert_eq!(gens.n, n);
assert_eq!(a.len(), n); assert_eq!(a.len(), n);
transcript.append_protocol_name(DotProductProofLog::protocol_name());
Cx.append_to_transcript(b"Cx", transcript);
Cy.append_to_transcript(b"Cy", transcript);
a.append_to_transcript(b"a", transcript);
// transcript.append_protocol_name(DotProductProofLog::protocol_name());
// Cx.append_to_poseidon( transcript);
// Cy.append_to_poseidon( transcript);
// a.append_to_poseidon( transcript);
transcript.append_point(&Cx);
transcript.append_point(&Cy);
transcript.append_scalar_vector(&a.to_vec());
let Gamma = Cx.unpack()? + Cy.unpack()?; let Gamma = Cx.unpack()? + Cy.unpack()?;
@ -558,10 +562,13 @@ impl DotProductProofLog {
self self
.bullet_reduction_proof .bullet_reduction_proof
.verify(n, a, transcript, &Gamma, &gens.gens_n.G)?; .verify(n, a, transcript, &Gamma, &gens.gens_n.G)?;
self.delta.append_to_transcript(b"delta", transcript);
self.beta.append_to_transcript(b"beta", transcript);
// self.delta.append_to_poseidon( transcript);
// self.beta.append_to_poseidon( transcript);
transcript.append_point(&self.delta);
transcript.append_point(&self.beta);
let c = transcript.challenge_scalar(b"c");
let c = transcript.challenge_scalar();
let c_s = &c; let c_s = &c;
let beta_s = self.beta.unpack()?; let beta_s = self.beta.unpack()?;
@ -590,7 +597,7 @@ impl DotProductProofLog {
mod tests { mod tests {
use std::marker::PhantomData; use std::marker::PhantomData;
use crate::group::VartimeMultiscalarMul;
use crate::{group::VartimeMultiscalarMul, parameters::poseidon_params};
use super::*; use super::*;
use ark_bls12_377::{Fq, FqParameters, G1Affine}; use ark_bls12_377::{Fq, FqParameters, G1Affine};
@ -605,12 +612,14 @@ mod tests {
let x = Scalar::rand(&mut rng); let x = Scalar::rand(&mut rng);
let r = Scalar::rand(&mut rng); let r = Scalar::rand(&mut rng);
let params = poseidon_params();
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let (proof, committed_value) = let (proof, committed_value) =
KnowledgeProof::prove(&gens_1, &mut prover_transcript, &mut random_tape, &x, &r); KnowledgeProof::prove(&gens_1, &mut prover_transcript, &mut random_tape, &x, &r);
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&gens_1, &mut verifier_transcript, &committed_value) .verify(&gens_1, &mut verifier_transcript, &committed_value)
.is_ok()); .is_ok());
@ -619,6 +628,7 @@ mod tests {
#[test] #[test]
fn check_equalityproof() { fn check_equalityproof() {
let mut rng = ark_std::rand::thread_rng(); let mut rng = ark_std::rand::thread_rng();
let params = poseidon_params();
let gens_1 = MultiCommitGens::new(1, b"test-equalityproof"); let gens_1 = MultiCommitGens::new(1, b"test-equalityproof");
let v1 = Scalar::rand(&mut rng); let v1 = Scalar::rand(&mut rng);
@ -627,7 +637,7 @@ mod tests {
let s2 = Scalar::rand(&mut rng); let s2 = Scalar::rand(&mut rng);
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let (proof, C1, C2) = EqualityProof::prove( let (proof, C1, C2) = EqualityProof::prove(
&gens_1, &gens_1,
&mut prover_transcript, &mut prover_transcript,
@ -638,7 +648,7 @@ mod tests {
&s2, &s2,
); );
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&gens_1, &mut verifier_transcript, &C1, &C2) .verify(&gens_1, &mut verifier_transcript, &C1, &C2)
.is_ok()); .is_ok());
@ -651,6 +661,7 @@ mod tests {
let pt_c = pt.compress(); let pt_c = pt.compress();
let pt2 = GroupElement::decompress(&pt_c).unwrap(); let pt2 = GroupElement::decompress(&pt_c).unwrap();
assert_eq!(pt, pt2); assert_eq!(pt, pt2);
let params = poseidon_params();
let gens_1 = MultiCommitGens::new(1, b"test-productproof"); let gens_1 = MultiCommitGens::new(1, b"test-productproof");
let x = Scalar::rand(&mut rng); let x = Scalar::rand(&mut rng);
@ -661,7 +672,7 @@ mod tests {
let rZ = Scalar::rand(&mut rng); let rZ = Scalar::rand(&mut rng);
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let (proof, X, Y, Z) = ProductProof::prove( let (proof, X, Y, Z) = ProductProof::prove(
&gens_1, &gens_1,
&mut prover_transcript, &mut prover_transcript,
@ -674,7 +685,7 @@ mod tests {
&rZ, &rZ,
); );
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&gens_1, &mut verifier_transcript, &X, &Y, &Z) .verify(&gens_1, &mut verifier_transcript, &X, &Y, &Z)
.is_ok()); .is_ok());
@ -688,6 +699,7 @@ mod tests {
let gens_1 = MultiCommitGens::new(1, b"test-two"); let gens_1 = MultiCommitGens::new(1, b"test-two");
let gens_1024 = MultiCommitGens::new(n, b"test-1024"); let gens_1024 = MultiCommitGens::new(n, b"test-1024");
let params = poseidon_params();
let mut x: Vec<Scalar> = Vec::new(); let mut x: Vec<Scalar> = Vec::new();
let mut a: Vec<Scalar> = Vec::new(); let mut a: Vec<Scalar> = Vec::new();
@ -700,7 +712,7 @@ mod tests {
let r_y = Scalar::rand(&mut rng); let r_y = Scalar::rand(&mut rng);
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let (proof, Cx, Cy) = DotProductProof::prove( let (proof, Cx, Cy) = DotProductProof::prove(
&gens_1, &gens_1,
&gens_1024, &gens_1024,
@ -713,7 +725,7 @@ mod tests {
&r_y, &r_y,
); );
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(&gens_1, &gens_1024, &mut verifier_transcript, &a, &Cx, &Cy) .verify(&gens_1, &gens_1024, &mut verifier_transcript, &a, &Cx, &Cy)
.is_ok()); .is_ok());
@ -734,8 +746,9 @@ mod tests {
let r_x = Scalar::rand(&mut rng); let r_x = Scalar::rand(&mut rng);
let r_y = Scalar::rand(&mut rng); let r_y = Scalar::rand(&mut rng);
let params = poseidon_params();
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let (proof, Cx, Cy) = DotProductProofLog::prove( let (proof, Cx, Cy) = DotProductProofLog::prove(
&gens, &gens,
&mut prover_transcript, &mut prover_transcript,
@ -747,7 +760,7 @@ mod tests {
&r_y, &r_y,
); );
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify(n, &gens, &mut verifier_transcript, &a, &Cx, &Cy) .verify(n, &gens, &mut verifier_transcript, &a, &Cx, &Cy)
.is_ok()); .is_ok());

+ 33
- 1
src/parameters.rs

@ -1,10 +1,14 @@
use std::str::FromStr;
use ark_sponge::poseidon::PoseidonParameters;
// Copyright: https://github.com/nikkolasg/ark-dkg/blob/main/src/parameters.rs // Copyright: https://github.com/nikkolasg/ark-dkg/blob/main/src/parameters.rs
use json::JsonValue; use json::JsonValue;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use crate::group::Fq;
lazy_static! { lazy_static! {
// bls12377_rate2_constraints:
/// bls12377_rate2_constraints:
pub static ref P1: JsonValue = object! { pub static ref P1: JsonValue = object! {
"ark" => array![ "ark" => array![
array![ array![
@ -226,3 +230,31 @@ lazy_static! {
"partial_rounds" => 31 "partial_rounds" => 31
}; };
} }
/// TODO
pub fn poseidon_params() -> PoseidonParameters<Fq> {
let arks = P1["ark"]
.members()
.map(|ark| {
ark
.members()
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
let mds = P1["mds"]
.members()
.map(|m| {
m.members()
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
PoseidonParameters::new(
P1["full_rounds"].as_u32().unwrap(),
P1["partial_rounds"].as_u32().unwrap(),
P1["alpha"].as_u64().unwrap(),
mds,
arks,
)
}

+ 30
- 30
src/product_tree.rs

@ -1,13 +1,15 @@
#![allow(dead_code)] #![allow(dead_code)]
use crate::poseidon_transcript::PoseidonTranscript;
use super::dense_mlpoly::DensePolynomial; use super::dense_mlpoly::DensePolynomial;
use super::dense_mlpoly::EqPolynomial; use super::dense_mlpoly::EqPolynomial;
use super::math::Math; use super::math::Math;
use super::scalar::Scalar; use super::scalar::Scalar;
use super::sumcheck::SumcheckInstanceProof; use super::sumcheck::SumcheckInstanceProof;
use super::transcript::ProofTranscript; use super::transcript::ProofTranscript;
use merlin::Transcript;
use ark_serialize::*; use ark_serialize::*;
use ark_std::{One};
use ark_std::One;
use merlin::Transcript;
#[derive(Debug)] #[derive(Debug)]
pub struct ProductCircuit { pub struct ProductCircuit {
@ -122,7 +124,7 @@ impl LayerProof {
claim: Scalar, claim: Scalar,
num_rounds: usize, num_rounds: usize,
degree_bound: usize, degree_bound: usize,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (Scalar, Vec<Scalar>) { ) -> (Scalar, Vec<Scalar>) {
self self
.proof .proof
@ -146,7 +148,7 @@ impl LayerProofBatched {
claim: Scalar, claim: Scalar,
num_rounds: usize, num_rounds: usize,
degree_bound: usize, degree_bound: usize,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (Scalar, Vec<Scalar>) { ) -> (Scalar, Vec<Scalar>) {
self self
.proof .proof
@ -170,7 +172,7 @@ impl ProductCircuitEvalProof {
#![allow(dead_code)] #![allow(dead_code)]
pub fn prove( pub fn prove(
circuit: &mut ProductCircuit, circuit: &mut ProductCircuit,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (Self, Scalar, Vec<Scalar>) { ) -> (Self, Scalar, Vec<Scalar>) {
let mut proof: Vec<LayerProof> = Vec::new(); let mut proof: Vec<LayerProof> = Vec::new();
let num_layers = circuit.left_vec.len(); let num_layers = circuit.left_vec.len();
@ -198,11 +200,11 @@ impl ProductCircuitEvalProof {
transcript, transcript,
); );
transcript.append_scalar(b"claim_prod_left", &claims_prod[0]);
transcript.append_scalar(b"claim_prod_right", &claims_prod[1]);
transcript.append_scalar(&claims_prod[0]);
transcript.append_scalar(&claims_prod[1]);
// produce a random challenge // produce a random challenge
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
let r_layer = transcript.challenge_scalar();
claim = claims_prod[0] + r_layer * (claims_prod[1] - claims_prod[0]); claim = claims_prod[0] + r_layer * (claims_prod[1] - claims_prod[0]);
let mut ext = vec![r_layer]; let mut ext = vec![r_layer];
@ -222,7 +224,7 @@ impl ProductCircuitEvalProof {
&self, &self,
eval: Scalar, eval: Scalar,
len: usize, len: usize,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (Scalar, Vec<Scalar>) { ) -> (Scalar, Vec<Scalar>) {
let num_layers = len.log_2(); let num_layers = len.log_2();
let mut claim = eval; let mut claim = eval;
@ -233,8 +235,8 @@ impl ProductCircuitEvalProof {
let (claim_last, rand_prod) = self.proof[i].verify(claim, num_rounds, 3, transcript); let (claim_last, rand_prod) = self.proof[i].verify(claim, num_rounds, 3, transcript);
let claims_prod = &self.proof[i].claims; let claims_prod = &self.proof[i].claims;
transcript.append_scalar(b"claim_prod_left", &claims_prod[0]);
transcript.append_scalar(b"claim_prod_right", &claims_prod[1]);
transcript.append_scalar(&claims_prod[0]);
transcript.append_scalar(&claims_prod[1]);
assert_eq!(rand.len(), rand_prod.len()); assert_eq!(rand.len(), rand_prod.len());
let eq: Scalar = (0..rand.len()) let eq: Scalar = (0..rand.len())
@ -245,7 +247,7 @@ impl ProductCircuitEvalProof {
assert_eq!(claims_prod[0] * claims_prod[1] * eq, claim_last); assert_eq!(claims_prod[0] * claims_prod[1] * eq, claim_last);
// produce a random challenge // produce a random challenge
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
let r_layer = transcript.challenge_scalar();
claim = (Scalar::one() - r_layer) * claims_prod[0] + r_layer * claims_prod[1]; claim = (Scalar::one() - r_layer) * claims_prod[0] + r_layer * claims_prod[1];
let mut ext = vec![r_layer]; let mut ext = vec![r_layer];
ext.extend(rand_prod); ext.extend(rand_prod);
@ -260,7 +262,7 @@ impl ProductCircuitEvalProofBatched {
pub fn prove( pub fn prove(
prod_circuit_vec: &mut Vec<&mut ProductCircuit>, prod_circuit_vec: &mut Vec<&mut ProductCircuit>,
dotp_circuit_vec: &mut Vec<&mut DotProductCircuit>, dotp_circuit_vec: &mut Vec<&mut DotProductCircuit>,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (Self, Vec<Scalar>) { ) -> (Self, Vec<Scalar>) {
assert!(!prod_circuit_vec.is_empty()); assert!(!prod_circuit_vec.is_empty());
@ -324,8 +326,7 @@ impl ProductCircuitEvalProofBatched {
); );
// produce a fresh set of coeffs and a joint claim // produce a fresh set of coeffs and a joint claim
let coeff_vec =
transcript.challenge_vector(b"rand_coeffs_next_layer", claims_to_verify.len());
let coeff_vec = transcript.challenge_vector(claims_to_verify.len());
let claim = (0..claims_to_verify.len()) let claim = (0..claims_to_verify.len())
.map(|i| claims_to_verify[i] * coeff_vec[i]) .map(|i| claims_to_verify[i] * coeff_vec[i])
.sum(); .sum();
@ -342,22 +343,22 @@ impl ProductCircuitEvalProofBatched {
let (claims_prod_left, claims_prod_right, _claims_eq) = claims_prod; let (claims_prod_left, claims_prod_right, _claims_eq) = claims_prod;
for i in 0..prod_circuit_vec.len() { for i in 0..prod_circuit_vec.len() {
transcript.append_scalar(b"claim_prod_left", &claims_prod_left[i]);
transcript.append_scalar(b"claim_prod_right", &claims_prod_right[i]);
transcript.append_scalar(&claims_prod_left[i]);
transcript.append_scalar(&claims_prod_right[i]);
} }
if layer_id == 0 && !dotp_circuit_vec.is_empty() { if layer_id == 0 && !dotp_circuit_vec.is_empty() {
let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = claims_dotp; let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = claims_dotp;
for i in 0..dotp_circuit_vec.len() { for i in 0..dotp_circuit_vec.len() {
transcript.append_scalar(b"claim_dotp_left", &claims_dotp_left[i]);
transcript.append_scalar(b"claim_dotp_right", &claims_dotp_right[i]);
transcript.append_scalar(b"claim_dotp_weight", &claims_dotp_weight[i]);
transcript.append_scalar(&claims_dotp_left[i]);
transcript.append_scalar(&claims_dotp_right[i]);
transcript.append_scalar(&claims_dotp_weight[i]);
} }
claims_dotp_final = (claims_dotp_left, claims_dotp_right, claims_dotp_weight); claims_dotp_final = (claims_dotp_left, claims_dotp_right, claims_dotp_weight);
} }
// produce a random challenge to condense two claims into a single claim // produce a random challenge to condense two claims into a single claim
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
let r_layer = transcript.challenge_scalar();
claims_to_verify = (0..prod_circuit_vec.len()) claims_to_verify = (0..prod_circuit_vec.len())
.map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i])) .map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i]))
@ -388,7 +389,7 @@ impl ProductCircuitEvalProofBatched {
claims_prod_vec: &[Scalar], claims_prod_vec: &[Scalar],
claims_dotp_vec: &[Scalar], claims_dotp_vec: &[Scalar],
len: usize, len: usize,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (Vec<Scalar>, Vec<Scalar>, Vec<Scalar>) { ) -> (Vec<Scalar>, Vec<Scalar>, Vec<Scalar>) {
let num_layers = len.log_2(); let num_layers = len.log_2();
let mut rand: Vec<Scalar> = Vec::new(); let mut rand: Vec<Scalar> = Vec::new();
@ -403,8 +404,7 @@ impl ProductCircuitEvalProofBatched {
} }
// produce random coefficients, one for each instance // produce random coefficients, one for each instance
let coeff_vec =
transcript.challenge_vector(b"rand_coeffs_next_layer", claims_to_verify.len());
let coeff_vec = transcript.challenge_vector(claims_to_verify.len());
// produce a joint claim // produce a joint claim
let claim = (0..claims_to_verify.len()) let claim = (0..claims_to_verify.len())
@ -419,8 +419,8 @@ impl ProductCircuitEvalProofBatched {
assert_eq!(claims_prod_right.len(), claims_prod_vec.len()); assert_eq!(claims_prod_right.len(), claims_prod_vec.len());
for i in 0..claims_prod_vec.len() { for i in 0..claims_prod_vec.len() {
transcript.append_scalar(b"claim_prod_left", &claims_prod_left[i]);
transcript.append_scalar(b"claim_prod_right", &claims_prod_right[i]);
transcript.append_scalar(&claims_prod_left[i]);
transcript.append_scalar(&claims_prod_right[i]);
} }
assert_eq!(rand.len(), rand_prod.len()); assert_eq!(rand.len(), rand_prod.len());
@ -438,9 +438,9 @@ impl ProductCircuitEvalProofBatched {
let num_prod_instances = claims_prod_vec.len(); let num_prod_instances = claims_prod_vec.len();
let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = &self.claims_dotp; let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = &self.claims_dotp;
for i in 0..claims_dotp_left.len() { for i in 0..claims_dotp_left.len() {
transcript.append_scalar(b"claim_dotp_left", &claims_dotp_left[i]);
transcript.append_scalar(b"claim_dotp_right", &claims_dotp_right[i]);
transcript.append_scalar(b"claim_dotp_weight", &claims_dotp_weight[i]);
transcript.append_scalar(&claims_dotp_left[i]);
transcript.append_scalar(&claims_dotp_right[i]);
transcript.append_scalar(&claims_dotp_weight[i]);
claim_expected += coeff_vec[i + num_prod_instances] claim_expected += coeff_vec[i + num_prod_instances]
* claims_dotp_left[i] * claims_dotp_left[i]
@ -452,7 +452,7 @@ impl ProductCircuitEvalProofBatched {
assert_eq!(claim_expected, claim_last); assert_eq!(claim_expected, claim_last);
// produce a random challenge // produce a random challenge
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
let r_layer = transcript.challenge_scalar();
claims_to_verify = (0..claims_prod_left.len()) claims_to_verify = (0..claims_prod_left.len())
.map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i])) .map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i]))

+ 12
- 2
src/r1csinstance.rs

@ -1,3 +1,4 @@
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
use crate::transcript::AppendToTranscript; use crate::transcript::AppendToTranscript;
use super::dense_mlpoly::DensePolynomial; use super::dense_mlpoly::DensePolynomial;
@ -63,6 +64,15 @@ impl AppendToTranscript for R1CSCommitment {
} }
} }
impl AppendToPoseidon for R1CSCommitment {
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
transcript.append_u64(self.num_cons as u64);
transcript.append_u64(self.num_vars as u64);
transcript.append_u64(self.num_inputs as u64);
self.comm.append_to_poseidon(transcript);
}
}
pub struct R1CSDecommitment { pub struct R1CSDecommitment {
dense: MultiSparseMatPolynomialAsDense, dense: MultiSparseMatPolynomialAsDense,
} }
@ -328,7 +338,7 @@ impl R1CSEvalProof {
ry: &[Scalar], ry: &[Scalar],
evals: &(Scalar, Scalar, Scalar), evals: &(Scalar, Scalar, Scalar),
gens: &R1CSCommitmentGens, gens: &R1CSCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> R1CSEvalProof { ) -> R1CSEvalProof {
let timer = Timer::new("R1CSEvalProof::prove"); let timer = Timer::new("R1CSEvalProof::prove");
@ -353,7 +363,7 @@ impl R1CSEvalProof {
ry: &[Scalar], ry: &[Scalar],
evals: &(Scalar, Scalar, Scalar), evals: &(Scalar, Scalar, Scalar),
gens: &R1CSCommitmentGens, gens: &R1CSCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
self.proof.verify( self.proof.verify(
&comm.comm, &comm.comm,

+ 26
- 21
src/r1csproof.rs

@ -16,7 +16,6 @@ use super::r1csinstance::R1CSInstance;
use super::random::RandomTape; use super::random::RandomTape;
use super::scalar::Scalar; use super::scalar::Scalar;
use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial}; use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial};
use super::sumcheck::ZKSumcheckInstanceProof;
use super::timer::Timer; use super::timer::Timer;
use super::transcript::{AppendToTranscript, ProofTranscript}; use super::transcript::{AppendToTranscript, ProofTranscript};
use ark_ec::ProjectiveCurve; use ark_ec::ProjectiveCurve;
@ -80,7 +79,7 @@ impl R1CSProof {
evals_Az: &mut DensePolynomial, evals_Az: &mut DensePolynomial,
evals_Bz: &mut DensePolynomial, evals_Bz: &mut DensePolynomial,
evals_Cz: &mut DensePolynomial, evals_Cz: &mut DensePolynomial,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (SumcheckInstanceProof, Vec<Scalar>, Vec<Scalar>) { ) -> (SumcheckInstanceProof, Vec<Scalar>, Vec<Scalar>) {
let comb_func = let comb_func =
|poly_tau_comp: &Scalar, |poly_tau_comp: &Scalar,
@ -108,7 +107,7 @@ impl R1CSProof {
claim: &Scalar, claim: &Scalar,
evals_z: &mut DensePolynomial, evals_z: &mut DensePolynomial,
evals_ABC: &mut DensePolynomial, evals_ABC: &mut DensePolynomial,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (SumcheckInstanceProof, Vec<Scalar>, Vec<Scalar>) { ) -> (SumcheckInstanceProof, Vec<Scalar>, Vec<Scalar>) {
let comb_func = let comb_func =
|poly_A_comp: &Scalar, poly_B_comp: &Scalar| -> Scalar { (*poly_A_comp) * poly_B_comp }; |poly_A_comp: &Scalar, poly_B_comp: &Scalar| -> Scalar { (*poly_A_comp) * poly_B_comp };
@ -128,16 +127,14 @@ impl R1CSProof {
vars: Vec<Scalar>, vars: Vec<Scalar>,
input: &[Scalar], input: &[Scalar],
gens: &R1CSGens, gens: &R1CSGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> (R1CSProof, Vec<Scalar>, Vec<Scalar>) { ) -> (R1CSProof, Vec<Scalar>, Vec<Scalar>) {
let timer_prove = Timer::new("R1CSProof::prove"); let timer_prove = Timer::new("R1CSProof::prove");
transcript.append_protocol_name(R1CSProof::protocol_name());
// we currently require the number of |inputs| + 1 to be at most number of vars // we currently require the number of |inputs| + 1 to be at most number of vars
assert!(input.len() < vars.len()); assert!(input.len() < vars.len());
input.append_to_transcript(b"input", transcript);
transcript.append_scalar_vector(&input.to_vec());
let poly_vars = DensePolynomial::new(vars.clone()); let poly_vars = DensePolynomial::new(vars.clone());
@ -155,8 +152,9 @@ impl R1CSProof {
}; };
// derive the verifier's challenge tau // derive the verifier's challenge tau
let (num_rounds_x, num_rounds_y) = (inst.get_num_cons().log_2(), z.len().log_2());
let tau = transcript.challenge_vector(b"challenge_tau", num_rounds_x);
let (num_rounds_x, num_rounds_y) =
(inst.get_num_cons().log2() as usize, z.len().log2() as usize);
let tau = transcript.challenge_vector(num_rounds_x);
// compute the initial evaluation table for R(\tau, x) // compute the initial evaluation table for R(\tau, x)
let mut poly_tau = DensePolynomial::new(EqPolynomial::new(tau).evals()); let mut poly_tau = DensePolynomial::new(EqPolynomial::new(tau).evals());
let (mut poly_Az, mut poly_Bz, mut poly_Cz) = let (mut poly_Az, mut poly_Bz, mut poly_Cz) =
@ -186,9 +184,9 @@ impl R1CSProof {
let timer_sc_proof_phase2 = Timer::new("prove_sc_phase_two"); let timer_sc_proof_phase2 = Timer::new("prove_sc_phase_two");
// combine the three claims into a single claim // combine the three claims into a single claim
let r_A = transcript.challenge_scalar(b"challenege_Az");
let r_B = transcript.challenge_scalar(b"challenege_Bz");
let r_C = transcript.challenge_scalar(b"challenege_Cz");
let r_A = transcript.challenge_scalar();
let r_B = transcript.challenge_scalar();
let r_C = transcript.challenge_scalar();
let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim; let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim;
let evals_ABC = { let evals_ABC = {
@ -238,19 +236,21 @@ impl R1CSProof {
num_cons: usize, num_cons: usize,
input: &[Scalar], input: &[Scalar],
evals: &(Scalar, Scalar, Scalar), evals: &(Scalar, Scalar, Scalar),
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
gens: &R1CSGens, gens: &R1CSGens,
) -> Result<(Vec<Scalar>, Vec<Scalar>), ProofVerifyError> { ) -> Result<(Vec<Scalar>, Vec<Scalar>), ProofVerifyError> {
transcript.append_protocol_name(R1CSProof::protocol_name());
// transcript.append_protocol_name(R1CSProof::protocol_name());
input.append_to_transcript(b"input", transcript);
for i in 0..input.len() {
transcript.append_scalar(&input[i]);
}
let n = num_vars; let n = num_vars;
let (num_rounds_x, num_rounds_y) = (num_cons.log_2(), (2 * num_vars).log_2()); let (num_rounds_x, num_rounds_y) = (num_cons.log_2(), (2 * num_vars).log_2());
// derive the verifier's challenge tau // derive the verifier's challenge tau
let tau = transcript.challenge_vector(b"challenge_tau", num_rounds_x);
let tau = transcript.challenge_vector(num_rounds_x);
// verify the first sum-check instance // verify the first sum-check instance
let claim_phase1 = Scalar::zero(); let claim_phase1 = Scalar::zero();
@ -271,9 +271,9 @@ impl R1CSProof {
assert_eq!(claim_post_phase1, expected_claim_post_phase1); assert_eq!(claim_post_phase1, expected_claim_post_phase1);
// derive three public challenges and then derive a joint claim // derive three public challenges and then derive a joint claim
let r_A = transcript.challenge_scalar(b"challenege_Az");
let r_B = transcript.challenge_scalar(b"challenege_Bz");
let r_C = transcript.challenge_scalar(b"challenege_Cz");
let r_A = transcript.challenge_scalar();
let r_B = transcript.challenge_scalar();
let r_C = transcript.challenge_scalar();
let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim; let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim;
@ -310,6 +310,8 @@ impl R1CSProof {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::parameters::poseidon_params;
use super::*; use super::*;
use ark_std::UniformRand; use ark_std::UniformRand;
use test::Bencher; use test::Bencher;
@ -394,8 +396,10 @@ mod tests {
let gens = R1CSGens::new(b"test-m", num_cons, num_vars); let gens = R1CSGens::new(b"test-m", num_cons, num_vars);
let params = poseidon_params();
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
// let mut prover_transcript = PoseidonTranscript::new(&params);
let mut prover_transcript = PoseidonTranscript::new(&params);
let (proof, rx, ry) = R1CSProof::prove( let (proof, rx, ry) = R1CSProof::prove(
&inst, &inst,
vars, vars,
@ -407,7 +411,8 @@ mod tests {
let inst_evals = inst.evaluate(&rx, &ry); let inst_evals = inst.evaluate(&rx, &ry);
let mut verifier_transcript = Transcript::new(b"example");
// let mut verifier_transcript = PoseidonTranscript::new(&params);
let mut verifier_transcript = PoseidonTranscript::new(&params);
assert!(proof assert!(proof
.verify( .verify(
inst.get_num_vars(), inst.get_num_vars(),

+ 129
- 90
src/sparse_mlpoly.rs

@ -1,6 +1,8 @@
#![allow(clippy::type_complexity)] #![allow(clippy::type_complexity)]
#![allow(clippy::too_many_arguments)] #![allow(clippy::too_many_arguments)]
#![allow(clippy::needless_range_loop)] #![allow(clippy::needless_range_loop)]
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
use super::dense_mlpoly::DensePolynomial; use super::dense_mlpoly::DensePolynomial;
use super::dense_mlpoly::{ use super::dense_mlpoly::{
EqPolynomial, IdentityPolynomial, PolyCommitment, PolyCommitmentGens, PolyEvalProof, EqPolynomial, IdentityPolynomial, PolyCommitment, PolyCommitmentGens, PolyEvalProof,
@ -12,10 +14,10 @@ use super::random::RandomTape;
use super::scalar::Scalar; use super::scalar::Scalar;
use super::timer::Timer; use super::timer::Timer;
use super::transcript::{AppendToTranscript, ProofTranscript}; use super::transcript::{AppendToTranscript, ProofTranscript};
use ark_ff::{Field, One, Zero};
use ark_serialize::*;
use core::cmp::Ordering; use core::cmp::Ordering;
use merlin::Transcript; use merlin::Transcript;
use ark_serialize::*;
use ark_ff::{One, Zero, Field};
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)]
pub struct SparseMatEntry { pub struct SparseMatEntry {
@ -87,18 +89,18 @@ impl DerefsEvalProof {
r: &[Scalar], r: &[Scalar],
evals: Vec<Scalar>, evals: Vec<Scalar>,
gens: &PolyCommitmentGens, gens: &PolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> PolyEvalProof { ) -> PolyEvalProof {
assert_eq!(joint_poly.get_num_vars(), r.len() + evals.len().log_2()); assert_eq!(joint_poly.get_num_vars(), r.len() + evals.len().log_2());
// append the claimed evaluations to transcript // append the claimed evaluations to transcript
evals.append_to_transcript(b"evals_ops_val", transcript);
// evals.append_to_transcript(b"evals_ops_val", transcript);
transcript.append_scalar_vector(&evals);
// n-to-1 reduction // n-to-1 reduction
let (r_joint, eval_joint) = { let (r_joint, eval_joint) = {
let challenges =
transcript.challenge_vector(b"challenge_combine_n_to_one", evals.len().log_2());
let challenges = transcript.challenge_vector(evals.len().log2());
let mut poly_evals = DensePolynomial::new(evals); let mut poly_evals = DensePolynomial::new(evals);
for i in (0..challenges.len()).rev() { for i in (0..challenges.len()).rev() {
poly_evals.bound_poly_var_bot(&challenges[i]); poly_evals.bound_poly_var_bot(&challenges[i]);
@ -112,7 +114,7 @@ impl DerefsEvalProof {
(r_joint, joint_claim_eval) (r_joint, joint_claim_eval)
}; };
// decommit the joint polynomial at r_joint // decommit the joint polynomial at r_joint
eval_joint.append_to_transcript(b"joint_claim_eval", transcript);
transcript.append_scalar(&eval_joint);
let (proof_derefs, _comm_derefs_eval) = PolyEvalProof::prove( let (proof_derefs, _comm_derefs_eval) = PolyEvalProof::prove(
joint_poly, joint_poly,
None, None,
@ -134,10 +136,10 @@ impl DerefsEvalProof {
eval_col_ops_val_vec: &[Scalar], eval_col_ops_val_vec: &[Scalar],
r: &[Scalar], r: &[Scalar],
gens: &PolyCommitmentGens, gens: &PolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> Self { ) -> Self {
transcript.append_protocol_name(DerefsEvalProof::protocol_name());
// transcript.append_protocol_name(DerefsEvalProof::protocol_name());
let evals = { let evals = {
let mut evals = eval_row_ops_val_vec.to_owned(); let mut evals = eval_row_ops_val_vec.to_owned();
@ -157,14 +159,14 @@ impl DerefsEvalProof {
r: &[Scalar], r: &[Scalar],
evals: Vec<Scalar>, evals: Vec<Scalar>,
gens: &PolyCommitmentGens, gens: &PolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
// append the claimed evaluations to transcript // append the claimed evaluations to transcript
evals.append_to_transcript(b"evals_ops_val", transcript);
// evals.append_to_transcript(b"evals_ops_val", transcript);
transcript.append_scalar_vector(&evals);
// n-to-1 reduction // n-to-1 reduction
let challenges =
transcript.challenge_vector(b"challenge_combine_n_to_one", evals.len().log_2());
let challenges = transcript.challenge_vector(evals.len().log2());
let mut poly_evals = DensePolynomial::new(evals); let mut poly_evals = DensePolynomial::new(evals);
for i in (0..challenges.len()).rev() { for i in (0..challenges.len()).rev() {
poly_evals.bound_poly_var_bot(&challenges[i]); poly_evals.bound_poly_var_bot(&challenges[i]);
@ -175,7 +177,8 @@ impl DerefsEvalProof {
r_joint.extend(r); r_joint.extend(r);
// decommit the joint polynomial at r_joint // decommit the joint polynomial at r_joint
joint_claim_eval.append_to_transcript(b"joint_claim_eval", transcript);
// joint_claim_eval.append_to_transcript(b"joint_claim_eval", transcript);
transcript.append_scalar(&joint_claim_eval);
proof.verify_plain(gens, transcript, &r_joint, &joint_claim_eval, comm) proof.verify_plain(gens, transcript, &r_joint, &joint_claim_eval, comm)
} }
@ -188,9 +191,9 @@ impl DerefsEvalProof {
eval_col_ops_val_vec: &[Scalar], eval_col_ops_val_vec: &[Scalar],
gens: &PolyCommitmentGens, gens: &PolyCommitmentGens,
comm: &DerefsCommitment, comm: &DerefsCommitment,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
transcript.append_protocol_name(DerefsEvalProof::protocol_name());
// transcript.append_protocol_name(DerefsEvalProof::protocol_name());
let mut evals = eval_row_ops_val_vec.to_owned(); let mut evals = eval_row_ops_val_vec.to_owned();
evals.extend(eval_col_ops_val_vec); evals.extend(eval_col_ops_val_vec);
evals.resize(evals.len().next_power_of_two(), Scalar::zero()); evals.resize(evals.len().next_power_of_two(), Scalar::zero());
@ -214,6 +217,11 @@ impl AppendToTranscript for DerefsCommitment {
} }
} }
impl AppendToPoseidon for DerefsCommitment {
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
self.comm_ops_val.append_to_poseidon(transcript);
}
}
struct AddrTimestamps { struct AddrTimestamps {
ops_addr_usize: Vec<Vec<usize>>, ops_addr_usize: Vec<Vec<usize>>,
ops_addr: Vec<DensePolynomial>, ops_addr: Vec<DensePolynomial>,
@ -342,6 +350,16 @@ impl AppendToTranscript for SparseMatPolyCommitment {
} }
} }
impl AppendToPoseidon for SparseMatPolyCommitment {
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
transcript.append_u64(self.batch_size as u64);
transcript.append_u64(self.num_ops as u64);
transcript.append_u64(self.num_mem_cells as u64);
self.comm_comb_ops.append_to_poseidon(transcript);
self.comm_comb_mem.append_to_poseidon(transcript);
}
}
impl SparseMatPolynomial { impl SparseMatPolynomial {
pub fn new(num_vars_x: usize, num_vars_y: usize, M: Vec<SparseMatEntry>) -> Self { pub fn new(num_vars_x: usize, num_vars_y: usize, M: Vec<SparseMatEntry>) -> Self {
SparseMatPolynomial { SparseMatPolynomial {
@ -465,7 +483,7 @@ impl SparseMatPolynomial {
let val = &self.M[i].val; let val = &self.M[i].val;
(row, z[col] * val) (row, z[col] * val)
}) })
.fold(vec![Scalar::zero(); num_rows], |mut Mz, (r, v)| {
.fold(vec![Scalar::zero(); num_rows], |mut Mz, (r, v)| {
Mz[r] += v; Mz[r] += v;
Mz Mz
}) })
@ -732,10 +750,10 @@ impl HashLayerProof {
dense: &MultiSparseMatPolynomialAsDense, dense: &MultiSparseMatPolynomialAsDense,
derefs: &Derefs, derefs: &Derefs,
gens: &SparseMatPolyCommitmentGens, gens: &SparseMatPolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> Self { ) -> Self {
transcript.append_protocol_name(HashLayerProof::protocol_name());
// transcript.append_protocol_name(HashLayerProof::protocol_name());
let (rand_mem, rand_ops) = rand; let (rand_mem, rand_ops) = rand;
@ -775,9 +793,8 @@ impl HashLayerProof {
evals_ops.extend(&eval_col_read_ts_vec); evals_ops.extend(&eval_col_read_ts_vec);
evals_ops.extend(&eval_val_vec); evals_ops.extend(&eval_val_vec);
evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero()); evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero());
evals_ops.append_to_transcript(b"claim_evals_ops", transcript);
let challenges_ops =
transcript.challenge_vector(b"challenge_combine_n_to_one", evals_ops.len().log_2());
transcript.append_scalar_vector(&evals_ops);
let challenges_ops = transcript.challenge_vector(evals_ops.len().log2());
let mut poly_evals_ops = DensePolynomial::new(evals_ops); let mut poly_evals_ops = DensePolynomial::new(evals_ops);
for i in (0..challenges_ops.len()).rev() { for i in (0..challenges_ops.len()).rev() {
@ -788,7 +805,7 @@ impl HashLayerProof {
let mut r_joint_ops = challenges_ops; let mut r_joint_ops = challenges_ops;
r_joint_ops.extend(rand_ops); r_joint_ops.extend(rand_ops);
debug_assert_eq!(dense.comb_ops.evaluate(&r_joint_ops), joint_claim_eval_ops); debug_assert_eq!(dense.comb_ops.evaluate(&r_joint_ops), joint_claim_eval_ops);
joint_claim_eval_ops.append_to_transcript(b"joint_claim_eval_ops", transcript);
transcript.append_scalar(&joint_claim_eval_ops);
let (proof_ops, _comm_ops_eval) = PolyEvalProof::prove( let (proof_ops, _comm_ops_eval) = PolyEvalProof::prove(
&dense.comb_ops, &dense.comb_ops,
None, None,
@ -802,9 +819,9 @@ impl HashLayerProof {
// form a single decommitment using comb_comb_mem at rand_mem // form a single decommitment using comb_comb_mem at rand_mem
let evals_mem: Vec<Scalar> = vec![eval_row_audit_ts, eval_col_audit_ts]; let evals_mem: Vec<Scalar> = vec![eval_row_audit_ts, eval_col_audit_ts];
evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
let challenges_mem =
transcript.challenge_vector(b"challenge_combine_two_to_one", evals_mem.len().log_2());
// evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
transcript.append_scalar_vector(&evals_mem);
let challenges_mem = transcript.challenge_vector(evals_mem.len().log2());
let mut poly_evals_mem = DensePolynomial::new(evals_mem); let mut poly_evals_mem = DensePolynomial::new(evals_mem);
for i in (0..challenges_mem.len()).rev() { for i in (0..challenges_mem.len()).rev() {
@ -815,7 +832,7 @@ impl HashLayerProof {
let mut r_joint_mem = challenges_mem; let mut r_joint_mem = challenges_mem;
r_joint_mem.extend(rand_mem); r_joint_mem.extend(rand_mem);
debug_assert_eq!(dense.comb_mem.evaluate(&r_joint_mem), joint_claim_eval_mem); debug_assert_eq!(dense.comb_mem.evaluate(&r_joint_mem), joint_claim_eval_mem);
joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript);
transcript.append_scalar(&joint_claim_eval_mem);
let (proof_mem, _comm_mem_eval) = PolyEvalProof::prove( let (proof_mem, _comm_mem_eval) = PolyEvalProof::prove(
&dense.comb_mem, &dense.comb_mem,
None, None,
@ -902,10 +919,10 @@ impl HashLayerProof {
ry: &[Scalar], ry: &[Scalar],
r_hash: &Scalar, r_hash: &Scalar,
r_multiset_check: &Scalar, r_multiset_check: &Scalar,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
let timer = Timer::new("verify_hash_proof"); let timer = Timer::new("verify_hash_proof");
transcript.append_protocol_name(HashLayerProof::protocol_name());
// transcript.append_protocol_name(HashLayerProof::protocol_name());
let (rand_mem, rand_ops) = rand; let (rand_mem, rand_ops) = rand;
@ -945,9 +962,9 @@ impl HashLayerProof {
evals_ops.extend(eval_col_read_ts_vec); evals_ops.extend(eval_col_read_ts_vec);
evals_ops.extend(eval_val_vec); evals_ops.extend(eval_val_vec);
evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero()); evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero());
evals_ops.append_to_transcript(b"claim_evals_ops", transcript);
let challenges_ops =
transcript.challenge_vector(b"challenge_combine_n_to_one", evals_ops.len().log_2());
transcript.append_scalar_vector(&evals_ops);
// evals_ops.append_to_transcript(b"claim_evals_ops", transcript);
let challenges_ops = transcript.challenge_vector(evals_ops.len().log2());
let mut poly_evals_ops = DensePolynomial::new(evals_ops); let mut poly_evals_ops = DensePolynomial::new(evals_ops);
for i in (0..challenges_ops.len()).rev() { for i in (0..challenges_ops.len()).rev() {
@ -957,21 +974,24 @@ impl HashLayerProof {
let joint_claim_eval_ops = poly_evals_ops[0]; let joint_claim_eval_ops = poly_evals_ops[0];
let mut r_joint_ops = challenges_ops; let mut r_joint_ops = challenges_ops;
r_joint_ops.extend(rand_ops); r_joint_ops.extend(rand_ops);
joint_claim_eval_ops.append_to_transcript(b"joint_claim_eval_ops", transcript);
self.proof_ops.verify_plain(
&gens.gens_ops,
transcript,
&r_joint_ops,
&joint_claim_eval_ops,
&comm.comm_comb_ops,
)?;
transcript.append_scalar(&joint_claim_eval_ops);
assert!(self
.proof_ops
.verify_plain(
&gens.gens_ops,
transcript,
&r_joint_ops,
&joint_claim_eval_ops,
&comm.comm_comb_ops
)
.is_ok());
// verify proof-mem using comm_comb_mem at rand_mem // verify proof-mem using comm_comb_mem at rand_mem
// form a single decommitment using comb_comb_mem at rand_mem // form a single decommitment using comb_comb_mem at rand_mem
let evals_mem: Vec<Scalar> = vec![*eval_row_audit_ts, *eval_col_audit_ts]; let evals_mem: Vec<Scalar> = vec![*eval_row_audit_ts, *eval_col_audit_ts];
evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
let challenges_mem =
transcript.challenge_vector(b"challenge_combine_two_to_one", evals_mem.len().log_2());
// evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
transcript.append_scalar_vector(&evals_mem);
let challenges_mem = transcript.challenge_vector(evals_mem.len().log2());
let mut poly_evals_mem = DensePolynomial::new(evals_mem); let mut poly_evals_mem = DensePolynomial::new(evals_mem);
for i in (0..challenges_mem.len()).rev() { for i in (0..challenges_mem.len()).rev() {
@ -981,7 +1001,8 @@ impl HashLayerProof {
let joint_claim_eval_mem = poly_evals_mem[0]; let joint_claim_eval_mem = poly_evals_mem[0];
let mut r_joint_mem = challenges_mem; let mut r_joint_mem = challenges_mem;
r_joint_mem.extend(rand_mem); r_joint_mem.extend(rand_mem);
joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript);
// joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript);
transcript.append_scalar(&joint_claim_eval_mem);
self.proof_mem.verify_plain( self.proof_mem.verify_plain(
&gens.gens_mem, &gens.gens_mem,
transcript, transcript,
@ -1042,9 +1063,9 @@ impl ProductLayerProof {
dense: &MultiSparseMatPolynomialAsDense, dense: &MultiSparseMatPolynomialAsDense,
derefs: &Derefs, derefs: &Derefs,
eval: &[Scalar], eval: &[Scalar],
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> (Self, Vec<Scalar>, Vec<Scalar>) { ) -> (Self, Vec<Scalar>, Vec<Scalar>) {
transcript.append_protocol_name(ProductLayerProof::protocol_name());
// transcript.append_protocol_name(ProductLayerProof::protocol_name());
let row_eval_init = row_prod_layer.init.evaluate(); let row_eval_init = row_prod_layer.init.evaluate();
let row_eval_audit = row_prod_layer.audit.evaluate(); let row_eval_audit = row_prod_layer.audit.evaluate();
@ -1062,10 +1083,10 @@ impl ProductLayerProof {
let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product();
assert_eq!(row_eval_init * ws, rs * row_eval_audit); assert_eq!(row_eval_init * ws, rs * row_eval_audit);
row_eval_init.append_to_transcript(b"claim_row_eval_init", transcript);
row_eval_read.append_to_transcript(b"claim_row_eval_read", transcript);
row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript);
row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript);
transcript.append_scalar(&row_eval_init);
transcript.append_scalar_vector(&row_eval_read);
transcript.append_scalar_vector(&row_eval_write);
transcript.append_scalar(&row_eval_audit);
let col_eval_init = col_prod_layer.init.evaluate(); let col_eval_init = col_prod_layer.init.evaluate();
let col_eval_audit = col_prod_layer.audit.evaluate(); let col_eval_audit = col_prod_layer.audit.evaluate();
@ -1083,10 +1104,10 @@ impl ProductLayerProof {
let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product();
assert_eq!(col_eval_init * ws, rs * col_eval_audit); assert_eq!(col_eval_init * ws, rs * col_eval_audit);
col_eval_init.append_to_transcript(b"claim_col_eval_init", transcript);
col_eval_read.append_to_transcript(b"claim_col_eval_read", transcript);
col_eval_write.append_to_transcript(b"claim_col_eval_write", transcript);
col_eval_audit.append_to_transcript(b"claim_col_eval_audit", transcript);
transcript.append_scalar(&col_eval_init);
transcript.append_scalar_vector(&col_eval_read);
transcript.append_scalar_vector(&col_eval_write);
transcript.append_scalar(&col_eval_audit);
// prepare dotproduct circuit for batching then with ops-related product circuits // prepare dotproduct circuit for batching then with ops-related product circuits
assert_eq!(eval.len(), derefs.row_ops_val.len()); assert_eq!(eval.len(), derefs.row_ops_val.len());
@ -1109,8 +1130,10 @@ impl ProductLayerProof {
let (eval_dotp_left, eval_dotp_right) = let (eval_dotp_left, eval_dotp_right) =
(dotp_circuit_left.evaluate(), dotp_circuit_right.evaluate()); (dotp_circuit_left.evaluate(), dotp_circuit_right.evaluate());
eval_dotp_left.append_to_transcript(b"claim_eval_dotp_left", transcript);
eval_dotp_right.append_to_transcript(b"claim_eval_dotp_right", transcript);
// eval_dotp_left.append_to_transcript(b"claim_eval_dotp_left", transcript);
// eval_dotp_right.append_to_transcript(b"claim_eval_dotp_right", transcript);
transcript.append_scalar(&eval_dotp_left);
transcript.append_scalar(&eval_dotp_right);
assert_eq!(eval_dotp_left + eval_dotp_right, eval[i]); assert_eq!(eval_dotp_left + eval_dotp_right, eval[i]);
eval_dotp_left_vec.push(eval_dotp_left); eval_dotp_left_vec.push(eval_dotp_left);
eval_dotp_right_vec.push(eval_dotp_right); eval_dotp_right_vec.push(eval_dotp_right);
@ -1207,7 +1230,9 @@ impl ProductLayerProof {
}; };
let mut product_layer_proof_encoded: Vec<u8> = Vec::new(); let mut product_layer_proof_encoded: Vec<u8> = Vec::new();
product_layer_proof.serialize(&mut product_layer_proof_encoded).unwrap();
product_layer_proof
.serialize(&mut product_layer_proof_encoded)
.unwrap();
let msg = format!( let msg = format!(
"len_product_layer_proof {:?}", "len_product_layer_proof {:?}",
product_layer_proof_encoded.len() product_layer_proof_encoded.len()
@ -1222,7 +1247,7 @@ impl ProductLayerProof {
num_ops: usize, num_ops: usize,
num_cells: usize, num_cells: usize,
eval: &[Scalar], eval: &[Scalar],
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result< ) -> Result<
( (
Vec<Scalar>, Vec<Scalar>,
@ -1233,7 +1258,7 @@ impl ProductLayerProof {
), ),
ProofVerifyError, ProofVerifyError,
> { > {
transcript.append_protocol_name(ProductLayerProof::protocol_name());
// transcript.append_protocol_name(ProductLayerProof::protocol_name());
let timer = Timer::new("verify_prod_proof"); let timer = Timer::new("verify_prod_proof");
let num_instances = eval.len(); let num_instances = eval.len();
@ -1246,12 +1271,17 @@ impl ProductLayerProof {
.map(|i| row_eval_write[i]) .map(|i| row_eval_write[i])
.product(); .product();
let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product();
assert_eq!( ws * row_eval_init , rs * row_eval_audit);
assert_eq!(ws * row_eval_init, rs * row_eval_audit);
// row_eval_init.append_to_transcript(b"claim_row_eval_init", transcript);
// row_eval_read.append_to_transcript(b"claim_row_eval_read", transcript);
// row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript);
// row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript);
row_eval_init.append_to_transcript(b"claim_row_eval_init", transcript);
row_eval_read.append_to_transcript(b"claim_row_eval_read", transcript);
row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript);
row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript);
transcript.append_scalar(row_eval_init);
transcript.append_scalar_vector(row_eval_read);
transcript.append_scalar_vector(row_eval_write);
transcript.append_scalar(row_eval_audit);
// subset check // subset check
let (col_eval_init, col_eval_read, col_eval_write, col_eval_audit) = &self.eval_col; let (col_eval_init, col_eval_read, col_eval_write, col_eval_audit) = &self.eval_col;
@ -1263,10 +1293,15 @@ impl ProductLayerProof {
let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product();
assert_eq!(ws * col_eval_init, rs * col_eval_audit); assert_eq!(ws * col_eval_init, rs * col_eval_audit);
col_eval_init.append_to_transcript(b"claim_col_eval_init", transcript);
col_eval_read.append_to_transcript(b"claim_col_eval_read", transcript);
col_eval_write.append_to_transcript(b"claim_col_eval_write", transcript);
col_eval_audit.append_to_transcript(b"claim_col_eval_audit", transcript);
// col_eval_init.append_to_transcript(b"claim_col_eval_init", transcript);
// col_eval_read.append_to_transcript(b"claim_col_eval_read", transcript);
// col_eval_write.append_to_transcript(b"claim_col_eval_write", transcript);
// col_eval_audit.append_to_transcript(b"claim_col_eval_audit", transcript);
transcript.append_scalar(col_eval_init);
transcript.append_scalar_vector(col_eval_read);
transcript.append_scalar_vector(col_eval_write);
transcript.append_scalar(col_eval_audit);
// verify the evaluation of the sparse polynomial // verify the evaluation of the sparse polynomial
let (eval_dotp_left, eval_dotp_right) = &self.eval_val; let (eval_dotp_left, eval_dotp_right) = &self.eval_val;
@ -1275,8 +1310,10 @@ impl ProductLayerProof {
let mut claims_dotp_circuit: Vec<Scalar> = Vec::new(); let mut claims_dotp_circuit: Vec<Scalar> = Vec::new();
for i in 0..num_instances { for i in 0..num_instances {
assert_eq!(eval_dotp_left[i] + eval_dotp_right[i], eval[i]); assert_eq!(eval_dotp_left[i] + eval_dotp_right[i], eval[i]);
eval_dotp_left[i].append_to_transcript(b"claim_eval_dotp_left", transcript);
eval_dotp_right[i].append_to_transcript(b"claim_eval_dotp_right", transcript);
// eval_dotp_left[i].append_to_transcript(b"claim_eval_dotp_left", transcript);
// eval_dotp_right[i].append_to_transcript(b"claim_eval_dotp_right", transcript)
transcript.append_scalar(&eval_dotp_left[i]);
transcript.append_scalar(&eval_dotp_right[i]);
claims_dotp_circuit.push(eval_dotp_left[i]); claims_dotp_circuit.push(eval_dotp_left[i]);
claims_dotp_circuit.push(eval_dotp_right[i]); claims_dotp_circuit.push(eval_dotp_right[i]);
@ -1330,10 +1367,10 @@ impl PolyEvalNetworkProof {
derefs: &Derefs, derefs: &Derefs,
evals: &[Scalar], evals: &[Scalar],
gens: &SparseMatPolyCommitmentGens, gens: &SparseMatPolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> Self { ) -> Self {
transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
// transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
let (proof_prod_layer, rand_mem, rand_ops) = ProductLayerProof::prove( let (proof_prod_layer, rand_mem, rand_ops) = ProductLayerProof::prove(
&mut network.row_layers.prod_layer, &mut network.row_layers.prod_layer,
@ -1370,10 +1407,10 @@ impl PolyEvalNetworkProof {
ry: &[Scalar], ry: &[Scalar],
r_mem_check: &(Scalar, Scalar), r_mem_check: &(Scalar, Scalar),
nz: usize, nz: usize,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
let timer = Timer::new("verify_polyeval_proof"); let timer = Timer::new("verify_polyeval_proof");
transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
// transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
let num_instances = evals.len(); let num_instances = evals.len();
let (r_hash, r_multiset_check) = r_mem_check; let (r_hash, r_multiset_check) = r_mem_check;
@ -1459,10 +1496,10 @@ impl SparseMatPolyEvalProof {
ry: &[Scalar], ry: &[Scalar],
evals: &[Scalar], // a vector evaluation of \widetilde{M}(r = (rx,ry)) for each M evals: &[Scalar], // a vector evaluation of \widetilde{M}(r = (rx,ry)) for each M
gens: &SparseMatPolyCommitmentGens, gens: &SparseMatPolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
random_tape: &mut RandomTape, random_tape: &mut RandomTape,
) -> SparseMatPolyEvalProof { ) -> SparseMatPolyEvalProof {
transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
// transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
// ensure there is one eval for each polynomial in dense // ensure there is one eval for each polynomial in dense
assert_eq!(evals.len(), dense.batch_size); assert_eq!(evals.len(), dense.batch_size);
@ -1481,14 +1518,14 @@ impl SparseMatPolyEvalProof {
let timer_commit = Timer::new("commit_nondet_witness"); let timer_commit = Timer::new("commit_nondet_witness");
let comm_derefs = { let comm_derefs = {
let comm = derefs.commit(&gens.gens_derefs); let comm = derefs.commit(&gens.gens_derefs);
comm.append_to_transcript(b"comm_poly_row_col_ops_val", transcript);
comm.append_to_poseidon(transcript);
comm comm
}; };
timer_commit.stop(); timer_commit.stop();
let poly_eval_network_proof = { let poly_eval_network_proof = {
// produce a random element from the transcript for hash function // produce a random element from the transcript for hash function
let r_mem_check = transcript.challenge_vector(b"challenge_r_hash", 2);
let r_mem_check = transcript.challenge_vector(2);
// build a network to evaluate the sparse polynomial // build a network to evaluate the sparse polynomial
let timer_build_network = Timer::new("build_layered_network"); let timer_build_network = Timer::new("build_layered_network");
@ -1529,9 +1566,9 @@ impl SparseMatPolyEvalProof {
ry: &[Scalar], ry: &[Scalar],
evals: &[Scalar], // evaluation of \widetilde{M}(r = (rx,ry)) evals: &[Scalar], // evaluation of \widetilde{M}(r = (rx,ry))
gens: &SparseMatPolyCommitmentGens, gens: &SparseMatPolyCommitmentGens,
transcript: &mut Transcript,
transcript: &mut PoseidonTranscript,
) -> Result<(), ProofVerifyError> { ) -> Result<(), ProofVerifyError> {
transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
// transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
// equalize the lengths of rx and ry // equalize the lengths of rx and ry
let (rx_ext, ry_ext) = SparseMatPolyEvalProof::equalize(rx, ry); let (rx_ext, ry_ext) = SparseMatPolyEvalProof::equalize(rx, ry);
@ -1540,12 +1577,10 @@ impl SparseMatPolyEvalProof {
assert_eq!(rx_ext.len().pow2(), num_mem_cells); assert_eq!(rx_ext.len().pow2(), num_mem_cells);
// add claims to transcript and obtain challenges for randomized mem-check circuit // add claims to transcript and obtain challenges for randomized mem-check circuit
self
.comm_derefs
.append_to_transcript(b"comm_poly_row_col_ops_val", transcript);
self.comm_derefs.append_to_poseidon(transcript);
// produce a random element from the transcript for hash function // produce a random element from the transcript for hash function
let r_mem_check = transcript.challenge_vector(b"challenge_r_hash", 2);
let r_mem_check = transcript.challenge_vector(2);
self.poly_eval_network_proof.verify( self.poly_eval_network_proof.verify(
comm, comm,
@ -1610,13 +1645,15 @@ impl SparsePolynomial {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{commitments::MultiCommitGens, parameters::poseidon_params};
use super::*; use super::*;
use ark_std::{UniformRand};
use rand::RngCore;
use ark_std::UniformRand;
use rand::RngCore;
#[test] #[test]
fn check_sparse_polyeval_proof() { fn check_sparse_polyeval_proof() {
let mut rng = ark_std::rand::thread_rng();
let mut rng = ark_std::rand::thread_rng();
let num_nz_entries: usize = 256; let num_nz_entries: usize = 256;
let num_rows: usize = 256; let num_rows: usize = 256;
@ -1628,7 +1665,7 @@ use rand::RngCore;
for _i in 0..num_nz_entries { for _i in 0..num_nz_entries {
M.push(SparseMatEntry::new( M.push(SparseMatEntry::new(
(rng.next_u64()% (num_rows as u64)) as usize,
(rng.next_u64() % (num_rows as u64)) as usize,
(rng.next_u64() % (num_cols as u64)) as usize, (rng.next_u64() % (num_cols as u64)) as usize,
Scalar::rand(&mut rng), Scalar::rand(&mut rng),
)); ));
@ -1656,8 +1693,9 @@ use rand::RngCore;
let eval = SparseMatPolynomial::multi_evaluate(&[&poly_M], &rx, &ry); let eval = SparseMatPolynomial::multi_evaluate(&[&poly_M], &rx, &ry);
let evals = vec![eval[0], eval[0], eval[0]]; let evals = vec![eval[0], eval[0], eval[0]];
let params = poseidon_params();
let mut random_tape = RandomTape::new(b"proof"); let mut random_tape = RandomTape::new(b"proof");
let mut prover_transcript = Transcript::new(b"example");
let mut prover_transcript = PoseidonTranscript::new(&params);
let proof = SparseMatPolyEvalProof::prove( let proof = SparseMatPolyEvalProof::prove(
&dense, &dense,
&rx, &rx,
@ -1668,7 +1706,8 @@ use rand::RngCore;
&mut random_tape, &mut random_tape,
); );
let mut verifier_transcript = Transcript::new(b"example");
let mut verifier_transcript = PoseidonTranscript::new(&params);
(b"example");
assert!(proof assert!(proof
.verify( .verify(
&poly_comm, &poly_comm,

+ 491
- 489
src/sumcheck.rs
File diff suppressed because it is too large
View File


+ 15
- 3
src/unipoly.rs

@ -1,10 +1,12 @@
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
use super::commitments::{Commitments, MultiCommitGens}; use super::commitments::{Commitments, MultiCommitGens};
use super::group::GroupElement; use super::group::GroupElement;
use super::scalar::{Scalar};
use super::scalar::Scalar;
use super::transcript::{AppendToTranscript, ProofTranscript}; use super::transcript::{AppendToTranscript, ProofTranscript};
use merlin::Transcript;
use ark_ff::{Field, One, Zero};
use ark_serialize::*; use ark_serialize::*;
use ark_ff::{One, Zero, Field};
use merlin::Transcript;
// ax^2 + bx + c stored as vec![c,b,a] // ax^2 + bx + c stored as vec![c,b,a]
// ax^3 + bx^2 + cx + d stored as vec![d,c,b,a] // ax^3 + bx^2 + cx + d stored as vec![d,c,b,a]
#[derive(Debug)] #[derive(Debug)]
@ -109,6 +111,16 @@ impl CompressedUniPoly {
} }
} }
impl AppendToPoseidon for UniPoly {
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
// transcript.append_message(label, b"UniPoly_begin");
for i in 0..self.coeffs.len() {
transcript.append_scalar(&self.coeffs[i]);
}
// transcript.append_message(label, b"UniPoly_end");
}
}
impl AppendToTranscript for UniPoly { impl AppendToTranscript for UniPoly {
fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) {
transcript.append_message(label, b"UniPoly_begin"); transcript.append_message(label, b"UniPoly_begin");

Loading…
Cancel
Save