mirror of
https://github.com/arnaucube/testudo.git
synced 2026-01-12 16:51:28 +01:00
implement alternative transcript with poseidon backend
This commit is contained in:
@@ -33,6 +33,11 @@ ark-std = { version = "^0.3.0"}
|
||||
ark-bls12-377 = { version = "^0.3.0", features = ["r1cs","curve"] }
|
||||
ark-serialize = { version = "^0.3.0", features = ["derive"] }
|
||||
ark-sponge = { version = "^0.3.0" , features = ["r1cs"] }
|
||||
ark-crypto-primitives = { version = "^0.3.0", default-features = true }
|
||||
ark-r1cs-std = { version = "^0.3.0", default-features = false }
|
||||
ark-nonnative-field = { version = "0.3.0", default-features = false }
|
||||
ark-relations = { version = "^0.3.0", default-features = false }
|
||||
ark-snark = { version = "^0.3.0", default-features = false }
|
||||
|
||||
lazy_static = "1.4.0"
|
||||
rand = { version = "0.8", features = [ "std", "std_rng" ] }
|
||||
@@ -65,3 +70,6 @@ harness = false
|
||||
default = ["curve25519-dalek/simd_backend"]
|
||||
multicore = ["rayon"]
|
||||
profile = []
|
||||
|
||||
[patch.crates-io]
|
||||
ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"}
|
||||
27
README.md
27
README.md
@@ -43,7 +43,8 @@ Some of our public APIs' style is inspired by the underlying crates we use.
|
||||
# extern crate libspartan;
|
||||
# extern crate merlin;
|
||||
# use libspartan::{Instance, SNARKGens, SNARK};
|
||||
# use merlin::Transcript;
|
||||
# use libspartan::poseidon_transcript::PoseidonTranscript;
|
||||
# use libspartan::parameters::poseidon_params;
|
||||
# fn main() {
|
||||
// specify the size of an R1CS instance
|
||||
let num_vars = 1024;
|
||||
@@ -60,12 +61,14 @@ Some of our public APIs' style is inspired by the underlying crates we use.
|
||||
// create a commitment to the R1CS instance
|
||||
let (comm, decomm) = SNARK::encode(&inst, &gens);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"snark_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SNARK::prove(&inst, &comm, &decomm, vars, &inputs, &gens, &mut prover_transcript);
|
||||
|
||||
// verify the proof of satisfiability
|
||||
let mut verifier_transcript = Transcript::new(b"snark_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&comm, &inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
@@ -79,7 +82,8 @@ Here is another example to use the NIZK variant of the Spartan proof system:
|
||||
# extern crate libspartan;
|
||||
# extern crate merlin;
|
||||
# use libspartan::{Instance, NIZKGens, NIZK};
|
||||
# use merlin::Transcript;
|
||||
# use libspartan::poseidon_transcript::PoseidonTranscript;
|
||||
# use libspartan::parameters::poseidon_params;
|
||||
# fn main() {
|
||||
// specify the size of an R1CS instance
|
||||
let num_vars = 1024;
|
||||
@@ -92,12 +96,14 @@ Here is another example to use the NIZK variant of the Spartan proof system:
|
||||
// ask the library to produce a synthentic R1CS instance
|
||||
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"nizk_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript);
|
||||
|
||||
// verify the proof of satisfiability
|
||||
let mut verifier_transcript = Transcript::new(b"nizk_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&inst, &inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
@@ -114,8 +120,10 @@ Finally, we provide an example that specifies a custom R1CS instance instead of
|
||||
# extern crate merlin;
|
||||
# mod scalar;
|
||||
# use scalar::Scalar;
|
||||
# use libspartan::parameters::poseidon_params;
|
||||
# use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK};
|
||||
# use merlin::Transcript;
|
||||
# use libspartan::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
#
|
||||
# use ark_ff::{PrimeField, Field, BigInteger};
|
||||
# use ark_std::{One, Zero, UniformRand};
|
||||
# fn main() {
|
||||
@@ -135,9 +143,10 @@ Finally, we provide an example that specifies a custom R1CS instance instead of
|
||||
|
||||
// create a commitment to the R1CS instance
|
||||
let (comm, decomm) = SNARK::encode(&inst, &gens);
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"snark_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SNARK::prove(
|
||||
&inst,
|
||||
&comm,
|
||||
@@ -149,7 +158,7 @@ Finally, we provide an example that specifies a custom R1CS instance instead of
|
||||
);
|
||||
|
||||
// verify the proof of satisfiability
|
||||
let mut verifier_transcript = Transcript::new(b"snark_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
|
||||
@@ -7,7 +7,9 @@ extern crate libspartan;
|
||||
extern crate merlin;
|
||||
extern crate sha3;
|
||||
|
||||
use libspartan::{Instance, NIZKGens, NIZK};
|
||||
use libspartan::{
|
||||
parameters::poseidon_params, poseidon_transcript::PoseidonTranscript, Instance, NIZKGens, NIZK,
|
||||
};
|
||||
use merlin::Transcript;
|
||||
|
||||
use criterion::*;
|
||||
@@ -22,6 +24,8 @@ fn nizk_prove_benchmark(c: &mut Criterion) {
|
||||
let num_cons = num_vars;
|
||||
let num_inputs = 10;
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
|
||||
|
||||
let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
|
||||
@@ -29,7 +33,7 @@ fn nizk_prove_benchmark(c: &mut Criterion) {
|
||||
let name = format!("NIZK_prove_{}", num_vars);
|
||||
group.bench_function(&name, move |b| {
|
||||
b.iter(|| {
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
NIZK::prove(
|
||||
black_box(&inst),
|
||||
black_box(vars.clone()),
|
||||
@@ -55,15 +59,15 @@ fn nizk_verify_benchmark(c: &mut Criterion) {
|
||||
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
|
||||
|
||||
let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
|
||||
|
||||
let params = poseidon_params();
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript);
|
||||
|
||||
let name = format!("NIZK_verify_{}", num_cons);
|
||||
group.bench_function(&name, move |b| {
|
||||
b.iter(|| {
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(
|
||||
black_box(&inst),
|
||||
|
||||
@@ -2,7 +2,11 @@
|
||||
extern crate libspartan;
|
||||
extern crate merlin;
|
||||
|
||||
use libspartan::{Instance, SNARKGens, SNARK};
|
||||
use libspartan::{
|
||||
parameters::poseidon_params,
|
||||
poseidon_transcript::{self, PoseidonTranscript},
|
||||
Instance, SNARKGens, SNARK,
|
||||
};
|
||||
use merlin::Transcript;
|
||||
|
||||
use criterion::*;
|
||||
@@ -42,6 +46,8 @@ fn snark_prove_benchmark(c: &mut Criterion) {
|
||||
let num_cons = num_vars;
|
||||
let num_inputs = 10;
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs);
|
||||
|
||||
// produce public parameters
|
||||
@@ -54,7 +60,7 @@ fn snark_prove_benchmark(c: &mut Criterion) {
|
||||
let name = format!("SNARK_prove_{}", num_cons);
|
||||
group.bench_function(&name, move |b| {
|
||||
b.iter(|| {
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
SNARK::prove(
|
||||
black_box(&inst),
|
||||
black_box(&comm),
|
||||
@@ -76,6 +82,8 @@ fn snark_verify_benchmark(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("SNARK_verify_benchmark");
|
||||
group.plot_config(plot_config);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
let num_vars = (2_usize).pow(s as u32);
|
||||
let num_cons = num_vars;
|
||||
let num_inputs = 10;
|
||||
@@ -88,7 +96,7 @@ fn snark_verify_benchmark(c: &mut Criterion) {
|
||||
let (comm, decomm) = SNARK::encode(&inst, &gens);
|
||||
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SNARK::prove(
|
||||
&inst,
|
||||
&comm,
|
||||
@@ -103,7 +111,7 @@ fn snark_verify_benchmark(c: &mut Criterion) {
|
||||
let name = format!("SNARK_verify_{}", num_cons);
|
||||
group.bench_function(&name, move |b| {
|
||||
b.iter(|| {
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(
|
||||
black_box(&comm),
|
||||
|
||||
@@ -11,7 +11,15 @@
|
||||
use ark_bls12_377::Fr as Scalar;
|
||||
use ark_ff::{BigInteger, PrimeField};
|
||||
use ark_std::{One, UniformRand, Zero};
|
||||
<<<<<<< HEAD
|
||||
use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK};
|
||||
=======
|
||||
use libspartan::{
|
||||
parameters::poseidon_params,
|
||||
poseidon_transcript::{self, PoseidonTranscript},
|
||||
InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK,
|
||||
};
|
||||
>>>>>>> implement alternative transcript with poseidon backend
|
||||
use merlin::Transcript;
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
@@ -119,6 +127,8 @@ fn main() {
|
||||
assignment_inputs,
|
||||
) = produce_r1cs();
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce public parameters
|
||||
let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_non_zero_entries);
|
||||
|
||||
@@ -126,7 +136,7 @@ fn main() {
|
||||
let (comm, decomm) = SNARK::encode(&inst, &gens);
|
||||
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"snark_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SNARK::prove(
|
||||
&inst,
|
||||
&comm,
|
||||
@@ -138,7 +148,7 @@ fn main() {
|
||||
);
|
||||
|
||||
// verify the proof of satisfiability
|
||||
let mut verifier_transcript = Transcript::new(b"snark_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
|
||||
@@ -6,9 +6,11 @@ extern crate libspartan;
|
||||
extern crate merlin;
|
||||
extern crate rand;
|
||||
|
||||
use ark_serialize::*;
|
||||
use libspartan::parameters::poseidon_params;
|
||||
use libspartan::poseidon_transcript::PoseidonTranscript;
|
||||
use libspartan::{Instance, NIZKGens, NIZK};
|
||||
use merlin::Transcript;
|
||||
use ark_serialize::*;
|
||||
|
||||
fn print(msg: &str) {
|
||||
let star = "* ";
|
||||
@@ -31,8 +33,9 @@ pub fn main() {
|
||||
// produce public generators
|
||||
let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
|
||||
|
||||
let params = poseidon_params();
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"nizk_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript);
|
||||
|
||||
let mut proof_encoded = Vec::new();
|
||||
@@ -41,7 +44,7 @@ pub fn main() {
|
||||
print(&msg_proof_len);
|
||||
|
||||
// verify the proof of satisfiability
|
||||
let mut verifier_transcript = Transcript::new(b"nizk_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&inst, &inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
|
||||
@@ -5,9 +5,10 @@ extern crate flate2;
|
||||
extern crate libspartan;
|
||||
extern crate merlin;
|
||||
|
||||
use libspartan::{Instance, SNARKGens, SNARK};
|
||||
use merlin::Transcript;
|
||||
use ark_serialize::*;
|
||||
use libspartan::parameters::poseidon_params;
|
||||
use libspartan::poseidon_transcript::PoseidonTranscript;
|
||||
use libspartan::{Instance, SNARKGens, SNARK};
|
||||
|
||||
fn print(msg: &str) {
|
||||
let star = "* ";
|
||||
@@ -33,8 +34,10 @@ pub fn main() {
|
||||
// create a commitment to R1CSInstance
|
||||
let (comm, decomm) = SNARK::encode(&inst, &gens);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce a proof of satisfiability
|
||||
let mut prover_transcript = Transcript::new(b"snark_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SNARK::prove(
|
||||
&inst,
|
||||
&comm,
|
||||
@@ -51,7 +54,7 @@ pub fn main() {
|
||||
print(&msg_proof_len);
|
||||
|
||||
// verify the proof of satisfiability
|
||||
let mut verifier_transcript = Transcript::new(b"snark_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&comm, &inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
use super::group::{Fq, GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROUP_BASEPOINT};
|
||||
use super::scalar::Scalar;
|
||||
use crate::group::{CompressGroupElement, DecompressGroupElement};
|
||||
use crate::parameters::*;
|
||||
use super::group::{GroupElement, VartimeMultiscalarMul, GROUP_BASEPOINT, GroupElementAffine, CurveField};
|
||||
use super::scalar::Scalar;
|
||||
use ark_bls12_377::Fq;
|
||||
use ark_ec::{AffineCurve, ProjectiveCurve};
|
||||
use ark_ff::PrimeField;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_sponge::poseidon::{PoseidonParameters, PoseidonSponge};
|
||||
use ark_sponge::CryptographicSponge;
|
||||
use digest::{ExtendableOutput, Input};
|
||||
use sha3::Shake256;
|
||||
use std::io::Read;
|
||||
use std::str::FromStr;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_ec::{ProjectiveCurve, AffineCurve};
|
||||
use ark_sponge::poseidon::{PoseidonParameters, PoseidonSponge};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MultiCommitGens {
|
||||
@@ -21,33 +20,8 @@ pub struct MultiCommitGens {
|
||||
}
|
||||
|
||||
impl MultiCommitGens {
|
||||
pub fn poseidon_params() -> PoseidonParameters<CurveField> {
|
||||
let arks = P1["ark"]
|
||||
.members()
|
||||
.map(|ark| {
|
||||
ark.members()
|
||||
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mds = P1["mds"]
|
||||
.members()
|
||||
.map(|m| {
|
||||
m.members()
|
||||
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
PoseidonParameters::new(
|
||||
P1["full_rounds"].as_u32().unwrap(),
|
||||
P1["partial_rounds"].as_u32().unwrap(),
|
||||
P1["alpha"].as_u64().unwrap(),
|
||||
mds,
|
||||
arks,
|
||||
)
|
||||
}
|
||||
pub fn new(n: usize, label: &[u8]) -> Self {
|
||||
let params = MultiCommitGens::poseidon_params();
|
||||
pub fn new(n: usize, label: &[u8]) -> Self {
|
||||
let params = poseidon_params();
|
||||
let mut sponge = PoseidonSponge::new(¶ms);
|
||||
sponge.absorb(&label);
|
||||
sponge.absorb(&GROUP_BASEPOINT.into_affine());
|
||||
@@ -57,11 +31,11 @@ impl MultiCommitGens {
|
||||
let mut el_aff: Option<GroupElementAffine> = None;
|
||||
while el_aff.is_some() != true {
|
||||
let uniform_bytes = sponge.squeeze_bytes(64);
|
||||
el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes);
|
||||
el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes);
|
||||
}
|
||||
let el = el_aff.unwrap().mul_by_cofactor_to_projective();
|
||||
gens.push(el);
|
||||
}
|
||||
let el = el_aff.unwrap().mul_by_cofactor_to_projective();
|
||||
gens.push(el);
|
||||
}
|
||||
|
||||
MultiCommitGens {
|
||||
n,
|
||||
@@ -111,7 +85,6 @@ impl Commitments for Vec<Scalar> {
|
||||
fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement {
|
||||
assert_eq!(gens_n.n, self.len());
|
||||
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr())
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,6 +92,5 @@ impl Commitments for [Scalar] {
|
||||
fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement {
|
||||
assert_eq!(gens_n.n, self.len());
|
||||
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr())
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,21 @@
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
|
||||
use super::commitments::{Commitments, MultiCommitGens};
|
||||
use super::errors::ProofVerifyError;
|
||||
use super::group::{GroupElement, CompressedGroup, VartimeMultiscalarMul, CompressGroupElement, DecompressGroupElement};
|
||||
use super::group::{
|
||||
CompressGroupElement, CompressedGroup, DecompressGroupElement, GroupElement,
|
||||
VartimeMultiscalarMul,
|
||||
};
|
||||
use super::math::Math;
|
||||
use super::nizk::{DotProductProofGens, DotProductProofLog};
|
||||
use super::random::RandomTape;
|
||||
use super::scalar::Scalar;
|
||||
use super::transcript::{AppendToTranscript, ProofTranscript};
|
||||
use ark_ff::{One, Zero};
|
||||
use ark_serialize::*;
|
||||
use core::ops::Index;
|
||||
use merlin::Transcript;
|
||||
use ark_serialize::*;
|
||||
use ark_ff::{One,Zero};
|
||||
|
||||
|
||||
|
||||
#[cfg(feature = "multicore")]
|
||||
use rayon::prelude::*;
|
||||
@@ -299,6 +302,14 @@ impl AppendToTranscript for PolyCommitment {
|
||||
}
|
||||
}
|
||||
|
||||
impl AppendToPoseidon for PolyCommitment {
|
||||
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
|
||||
for i in 0..self.C.len() {
|
||||
transcript.append_point(&self.C[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)]
|
||||
pub struct PolyEvalProof {
|
||||
proof: DotProductProofLog,
|
||||
@@ -316,10 +327,10 @@ impl PolyEvalProof {
|
||||
Zr: &Scalar, // evaluation of \widetilde{Z}(r)
|
||||
blind_Zr_opt: Option<&Scalar>, // specifies a blind for Zr
|
||||
gens: &PolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> (PolyEvalProof, CompressedGroup) {
|
||||
transcript.append_protocol_name(PolyEvalProof::protocol_name());
|
||||
// transcript.append_protocol_name(PolyEvalProof::protocol_name());
|
||||
|
||||
// assert vectors are of the right size
|
||||
assert_eq!(poly.get_num_vars(), r.len());
|
||||
@@ -367,19 +378,23 @@ impl PolyEvalProof {
|
||||
pub fn verify(
|
||||
&self,
|
||||
gens: &PolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
r: &[Scalar], // point at which the polynomial is evaluated
|
||||
C_Zr: &CompressedGroup, // commitment to \widetilde{Z}(r)
|
||||
comm: &PolyCommitment,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
transcript.append_protocol_name(PolyEvalProof::protocol_name());
|
||||
// transcript.append_protocol_name(PolyEvalProof::protocol_name());
|
||||
|
||||
// compute L and R
|
||||
let eq = EqPolynomial::new(r.to_vec());
|
||||
let (L, R) = eq.compute_factored_evals();
|
||||
|
||||
// compute a weighted sum of commitments and L
|
||||
let C_decompressed = comm.C.iter().map(|pt| GroupElement::decompress(pt).unwrap()).collect::<Vec<GroupElement>>();
|
||||
let C_decompressed = comm
|
||||
.C
|
||||
.iter()
|
||||
.map(|pt| GroupElement::decompress(pt).unwrap())
|
||||
.collect::<Vec<GroupElement>>();
|
||||
|
||||
let C_LZ = GroupElement::vartime_multiscalar_mul(&L, C_decompressed.as_slice()).compress();
|
||||
|
||||
@@ -391,7 +406,7 @@ impl PolyEvalProof {
|
||||
pub fn verify_plain(
|
||||
&self,
|
||||
gens: &PolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
r: &[Scalar], // point at which the polynomial is evaluated
|
||||
Zr: &Scalar, // evaluation \widetilde{Z}(r)
|
||||
comm: &PolyCommitment,
|
||||
@@ -405,8 +420,10 @@ impl PolyEvalProof {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::parameters::poseidon_params;
|
||||
|
||||
use super::*;
|
||||
use ark_std::{UniformRand};
|
||||
use ark_std::UniformRand;
|
||||
|
||||
fn evaluate_with_LR(Z: &[Scalar], r: &[Scalar]) -> Scalar {
|
||||
let eq = EqPolynomial::new(r.to_vec());
|
||||
@@ -436,7 +453,7 @@ mod tests {
|
||||
Scalar::one(),
|
||||
Scalar::from(2),
|
||||
Scalar::from(1),
|
||||
Scalar::from(4)
|
||||
Scalar::from(4),
|
||||
];
|
||||
|
||||
// r = [4,3]
|
||||
@@ -569,7 +586,7 @@ mod tests {
|
||||
Scalar::from(1),
|
||||
Scalar::from(2),
|
||||
Scalar::from(1),
|
||||
Scalar::from(4)
|
||||
Scalar::from(4),
|
||||
];
|
||||
let poly = DensePolynomial::new(Z);
|
||||
|
||||
@@ -582,7 +599,8 @@ mod tests {
|
||||
let (poly_commitment, blinds) = poly.commit(&gens, None);
|
||||
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let params = poseidon_params();
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let (proof, C_Zr) = PolyEvalProof::prove(
|
||||
&poly,
|
||||
Some(&blinds),
|
||||
@@ -594,7 +612,7 @@ mod tests {
|
||||
&mut random_tape,
|
||||
);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&gens, &mut verifier_transcript, &r, &C_Zr, &poly_commitment)
|
||||
.is_ok());
|
||||
|
||||
10
src/group.rs
10
src/group.rs
@@ -1,24 +1,20 @@
|
||||
use crate::errors::ProofVerifyError;
|
||||
use ark_ec::msm::VariableBaseMSM;
|
||||
use ark_ff::PrimeField;
|
||||
use digest::DynDigest;
|
||||
use lazy_static::lazy_static;
|
||||
use num_bigint::BigInt;
|
||||
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use num_bigint::BigInt;
|
||||
|
||||
use super::scalar::Scalar;
|
||||
use ark_ec::{AffineCurve, ProjectiveCurve};
|
||||
use ark_ec::{AffineCurve, ProjectiveCurve};
|
||||
use ark_serialize::*;
|
||||
use ark_serialize::*;
|
||||
use core::borrow::Borrow;
|
||||
use core::ops::{Mul, MulAssign};
|
||||
|
||||
pub type GroupElement = ark_bls12_377::G1Projective;
|
||||
pub type GroupElementAffine = ark_bls12_377::G1Affine;
|
||||
pub type CurveField = ark_bls12_377::Fq;
|
||||
pub type Fq = ark_bls12_377::Fq;
|
||||
pub type Fr = ark_bls12_377::Fr;
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Hash, Debug, CanonicalSerialize, CanonicalDeserialize)]
|
||||
pub struct CompressedGroup(pub Vec<u8>);
|
||||
|
||||
68
src/lib.rs
68
src/lib.rs
@@ -26,7 +26,6 @@ mod errors;
|
||||
mod group;
|
||||
mod math;
|
||||
mod nizk;
|
||||
mod parameters;
|
||||
mod product_tree;
|
||||
mod r1csinstance;
|
||||
mod r1csproof;
|
||||
@@ -38,12 +37,18 @@ mod timer;
|
||||
mod transcript;
|
||||
mod unipoly;
|
||||
|
||||
/// TODO
|
||||
pub mod parameters;
|
||||
/// TODO
|
||||
pub mod poseidon_transcript;
|
||||
|
||||
use ark_ff::{BigInteger, Field, PrimeField};
|
||||
use ark_serialize::*;
|
||||
use ark_std::{One, UniformRand, Zero};
|
||||
use core::cmp::max;
|
||||
use errors::{ProofVerifyError, R1CSError};
|
||||
use merlin::Transcript;
|
||||
use poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
use r1csinstance::{
|
||||
R1CSCommitment, R1CSCommitmentGens, R1CSDecommitment, R1CSEvalProof, R1CSInstance,
|
||||
};
|
||||
@@ -354,7 +359,7 @@ impl SNARK {
|
||||
vars: VarsAssignment,
|
||||
inputs: &InputsAssignment,
|
||||
gens: &SNARKGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Self {
|
||||
let timer_prove = Timer::new("SNARK::prove");
|
||||
|
||||
@@ -362,8 +367,8 @@ impl SNARK {
|
||||
// to aid the prover produce its randomness
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
|
||||
transcript.append_protocol_name(SNARK::protocol_name());
|
||||
comm.comm.append_to_transcript(b"comm", transcript);
|
||||
// transcript.append_protocol_name(SNARK::protocol_name());
|
||||
comm.comm.append_to_poseidon(transcript);
|
||||
|
||||
let (r1cs_sat_proof, rx, ry) = {
|
||||
let (proof, rx, ry) = {
|
||||
@@ -400,9 +405,9 @@ impl SNARK {
|
||||
let timer_eval = Timer::new("eval_sparse_polys");
|
||||
let inst_evals = {
|
||||
let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry);
|
||||
Ar.append_to_transcript(b"Ar_claim", transcript);
|
||||
Br.append_to_transcript(b"Br_claim", transcript);
|
||||
Cr.append_to_transcript(b"Cr_claim", transcript);
|
||||
transcript.append_scalar(&Ar);
|
||||
transcript.append_scalar(&Br);
|
||||
transcript.append_scalar(&Cr);
|
||||
(Ar, Br, Cr)
|
||||
};
|
||||
timer_eval.stop();
|
||||
@@ -437,14 +442,14 @@ impl SNARK {
|
||||
&self,
|
||||
comm: &ComputationCommitment,
|
||||
input: &InputsAssignment,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
gens: &SNARKGens,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
let timer_verify = Timer::new("SNARK::verify");
|
||||
transcript.append_protocol_name(SNARK::protocol_name());
|
||||
// transcript.append_protocol_name(SNARK::protocol_name());
|
||||
|
||||
// append a commitment to the computation to the transcript
|
||||
comm.comm.append_to_transcript(b"comm", transcript);
|
||||
comm.comm.append_to_poseidon(transcript);
|
||||
|
||||
let timer_sat_proof = Timer::new("verify_sat_proof");
|
||||
assert_eq!(input.assignment.len(), comm.comm.get_num_inputs());
|
||||
@@ -460,9 +465,12 @@ impl SNARK {
|
||||
|
||||
let timer_eval_proof = Timer::new("verify_eval_proof");
|
||||
let (Ar, Br, Cr) = &self.inst_evals;
|
||||
Ar.append_to_transcript(b"Ar_claim", transcript);
|
||||
Br.append_to_transcript(b"Br_claim", transcript);
|
||||
Cr.append_to_transcript(b"Cr_claim", transcript);
|
||||
// Ar.append_to_transcript(b"Ar_claim", transcript);
|
||||
// Br.append_to_transcript(b"Br_claim", transcript);
|
||||
// Cr.append_to_transcript(b"Cr_claim", transcript);
|
||||
transcript.append_scalar(&Ar);
|
||||
transcript.append_scalar(&Br);
|
||||
transcript.append_scalar(&Cr);
|
||||
self.r1cs_eval_proof.verify(
|
||||
&comm.comm,
|
||||
&rx,
|
||||
@@ -516,15 +524,20 @@ impl NIZK {
|
||||
vars: VarsAssignment,
|
||||
input: &InputsAssignment,
|
||||
gens: &NIZKGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Self {
|
||||
let timer_prove = Timer::new("NIZK::prove");
|
||||
// we create a Transcript object seeded with a random Scalar
|
||||
// to aid the prover produce its randomness
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
|
||||
<<<<<<< HEAD
|
||||
transcript.append_protocol_name(NIZK::protocol_name());
|
||||
transcript.append_message(b"R1CSInstanceDigest", &inst.digest);
|
||||
=======
|
||||
// transcript.append_protocol_name(NIZK::protocol_name());
|
||||
inst.inst.append_to_poseidon(transcript);
|
||||
>>>>>>> simplify transcript and change merlin backend to poseidon
|
||||
|
||||
let (r1cs_sat_proof, rx, ry) = {
|
||||
// we might need to pad variables
|
||||
@@ -564,13 +577,18 @@ impl NIZK {
|
||||
&self,
|
||||
inst: &Instance,
|
||||
input: &InputsAssignment,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
gens: &NIZKGens,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
let timer_verify = Timer::new("NIZK::verify");
|
||||
|
||||
<<<<<<< HEAD
|
||||
transcript.append_protocol_name(NIZK::protocol_name());
|
||||
transcript.append_message(b"R1CSInstanceDigest", &inst.digest);
|
||||
=======
|
||||
// transcript.append_protocol_name(NIZK::protocol_name());
|
||||
inst.inst.append_to_poseidon(transcript);
|
||||
>>>>>>> simplify transcript and change merlin backend to poseidon
|
||||
|
||||
// We send evaluations of A, B, C at r = (rx, ry) as claims
|
||||
// to enable the verifier complete the first sum-check
|
||||
@@ -602,6 +620,8 @@ impl NIZK {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::parameters::poseidon_params;
|
||||
|
||||
use super::*;
|
||||
use ark_ff::PrimeField;
|
||||
|
||||
@@ -620,8 +640,10 @@ mod tests {
|
||||
// create a commitment to R1CSInstance
|
||||
let (comm, decomm) = SNARK::encode(&inst, &gens);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce a proof
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SNARK::prove(
|
||||
&inst,
|
||||
&comm,
|
||||
@@ -633,7 +655,7 @@ mod tests {
|
||||
);
|
||||
|
||||
// verify the proof
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&comm, &inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
@@ -732,8 +754,10 @@ mod tests {
|
||||
// create a commitment to the R1CS instance
|
||||
let (comm, decomm) = SNARK::encode(&inst, &gens);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce a SNARK
|
||||
let mut prover_transcript = Transcript::new(b"snark_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SNARK::prove(
|
||||
&inst,
|
||||
&comm,
|
||||
@@ -745,7 +769,7 @@ mod tests {
|
||||
);
|
||||
|
||||
// verify the SNARK
|
||||
let mut verifier_transcript = Transcript::new(b"snark_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
@@ -753,8 +777,10 @@ mod tests {
|
||||
// NIZK public params
|
||||
let gens = NIZKGens::new(num_cons, num_vars, num_inputs);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
// produce a NIZK
|
||||
let mut prover_transcript = Transcript::new(b"nizk_example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = NIZK::prove(
|
||||
&inst,
|
||||
assignment_vars,
|
||||
@@ -764,7 +790,7 @@ mod tests {
|
||||
);
|
||||
|
||||
// verify the NIZK
|
||||
let mut verifier_transcript = Transcript::new(b"nizk_example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&inst, &assignment_inputs, &mut verifier_transcript, &gens)
|
||||
.is_ok());
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
use crate::math::Math;
|
||||
use crate::poseidon_transcript::PoseidonTranscript;
|
||||
|
||||
use super::super::errors::ProofVerifyError;
|
||||
use super::super::group::{
|
||||
@@ -38,7 +39,7 @@ impl BulletReductionProof {
|
||||
/// The lengths of the vectors must all be the same, and must all be
|
||||
/// either 0 or a power of 2.
|
||||
pub fn prove(
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
Q: &GroupElement,
|
||||
G_vec: &[GroupElement],
|
||||
H: &GroupElement,
|
||||
@@ -122,10 +123,10 @@ impl BulletReductionProof {
|
||||
.as_slice(),
|
||||
);
|
||||
|
||||
transcript.append_point(b"L", &L.compress());
|
||||
transcript.append_point(b"R", &R.compress());
|
||||
transcript.append_point(&L.compress());
|
||||
transcript.append_point(&R.compress());
|
||||
|
||||
let u = transcript.challenge_scalar(b"u");
|
||||
let u = transcript.challenge_scalar();
|
||||
let u_inv = u.inverse().unwrap();
|
||||
|
||||
for i in 0..n {
|
||||
@@ -163,7 +164,7 @@ impl BulletReductionProof {
|
||||
fn verification_scalars(
|
||||
&self,
|
||||
n: usize,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(Vec<Scalar>, Vec<Scalar>, Vec<Scalar>), ProofVerifyError> {
|
||||
let lg_n = self.L_vec.len();
|
||||
if lg_n >= 32 {
|
||||
@@ -178,9 +179,9 @@ impl BulletReductionProof {
|
||||
// 1. Recompute x_k,...,x_1 based on the proof transcript
|
||||
let mut challenges = Vec::with_capacity(lg_n);
|
||||
for (L, R) in self.L_vec.iter().zip(self.R_vec.iter()) {
|
||||
transcript.append_point(b"L", L);
|
||||
transcript.append_point(b"R", R);
|
||||
challenges.push(transcript.challenge_scalar(b"u"));
|
||||
transcript.append_point(L);
|
||||
transcript.append_point(R);
|
||||
challenges.push(transcript.challenge_scalar());
|
||||
}
|
||||
|
||||
// 2. Compute 1/(u_k...u_1) and 1/u_k, ..., 1/u_1
|
||||
@@ -224,7 +225,7 @@ impl BulletReductionProof {
|
||||
&self,
|
||||
n: usize,
|
||||
a: &[Scalar],
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
Gamma: &GroupElement,
|
||||
G: &[GroupElement],
|
||||
) -> Result<(GroupElement, GroupElement, Scalar), ProofVerifyError> {
|
||||
|
||||
183
src/nizk/mod.rs
183
src/nizk/mod.rs
@@ -1,5 +1,6 @@
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
use crate::math::Math;
|
||||
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
|
||||
use super::commitments::{Commitments, MultiCommitGens};
|
||||
use super::errors::ProofVerifyError;
|
||||
@@ -34,24 +35,24 @@ impl KnowledgeProof {
|
||||
|
||||
pub fn prove(
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
x: &Scalar,
|
||||
r: &Scalar,
|
||||
) -> (KnowledgeProof, CompressedGroup) {
|
||||
transcript.append_protocol_name(KnowledgeProof::protocol_name());
|
||||
// transcript.append_protocol_name(KnowledgeProof::protocol_name());
|
||||
|
||||
// produce two random Scalars
|
||||
let t1 = random_tape.random_scalar(b"t1");
|
||||
let t2 = random_tape.random_scalar(b"t2");
|
||||
|
||||
let C = x.commit(r, gens_n).compress();
|
||||
C.append_to_transcript(b"C", transcript);
|
||||
C.append_to_poseidon(transcript);
|
||||
|
||||
let alpha = t1.commit(&t2, gens_n).compress();
|
||||
alpha.append_to_transcript(b"alpha", transcript);
|
||||
alpha.append_to_poseidon(transcript);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let z1 = c * x + t1;
|
||||
let z2 = c * r + t2;
|
||||
@@ -62,14 +63,14 @@ impl KnowledgeProof {
|
||||
pub fn verify(
|
||||
&self,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
C: &CompressedGroup,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
transcript.append_protocol_name(KnowledgeProof::protocol_name());
|
||||
C.append_to_transcript(b"C", transcript);
|
||||
self.alpha.append_to_transcript(b"alpha", transcript);
|
||||
// transcript.append_protocol_name(KnowledgeProof::protocol_name());
|
||||
C.append_to_poseidon(transcript);
|
||||
self.alpha.append_to_poseidon(transcript);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let lhs = self.z1.commit(&self.z2, gens_n).compress();
|
||||
let rhs = (C.unpack()?.mul(c.into_repr()) + self.alpha.unpack()?).compress();
|
||||
@@ -95,28 +96,28 @@ impl EqualityProof {
|
||||
|
||||
pub fn prove(
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
v1: &Scalar,
|
||||
s1: &Scalar,
|
||||
v2: &Scalar,
|
||||
s2: &Scalar,
|
||||
) -> (EqualityProof, CompressedGroup, CompressedGroup) {
|
||||
transcript.append_protocol_name(EqualityProof::protocol_name());
|
||||
// transcript.append_protocol_name(EqualityProof::protocol_name());
|
||||
|
||||
// produce a random Scalar
|
||||
let r = random_tape.random_scalar(b"r");
|
||||
|
||||
let C1 = v1.commit(s1, gens_n).compress();
|
||||
C1.append_to_transcript(b"C1", transcript);
|
||||
transcript.append_point(&C1);
|
||||
|
||||
let C2 = v2.commit(s2, gens_n).compress();
|
||||
C2.append_to_transcript(b"C2", transcript);
|
||||
transcript.append_point(&C2);
|
||||
|
||||
let alpha = gens_n.h.mul(r.into_repr()).compress();
|
||||
alpha.append_to_transcript(b"alpha", transcript);
|
||||
transcript.append_point(&alpha);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let z = c * ((*s1) - s2) + r;
|
||||
|
||||
@@ -126,16 +127,17 @@ impl EqualityProof {
|
||||
pub fn verify(
|
||||
&self,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
C1: &CompressedGroup,
|
||||
C2: &CompressedGroup,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
transcript.append_protocol_name(EqualityProof::protocol_name());
|
||||
C1.append_to_transcript(b"C1", transcript);
|
||||
C2.append_to_transcript(b"C2", transcript);
|
||||
self.alpha.append_to_transcript(b"alpha", transcript);
|
||||
// transcript.append_protocol_name(EqualityProof::protocol_name());
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
transcript.append_point(&C1);
|
||||
transcript.append_point(&C2);
|
||||
transcript.append_point(&self.alpha);
|
||||
|
||||
let c = transcript.challenge_scalar();
|
||||
let rhs = {
|
||||
let C = C1.unpack()? - C2.unpack()?;
|
||||
(C.mul(c.into_repr()) + self.alpha.unpack()?).compress()
|
||||
@@ -167,7 +169,7 @@ impl ProductProof {
|
||||
|
||||
pub fn prove(
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
x: &Scalar,
|
||||
rX: &Scalar,
|
||||
@@ -181,7 +183,7 @@ impl ProductProof {
|
||||
CompressedGroup,
|
||||
CompressedGroup,
|
||||
) {
|
||||
transcript.append_protocol_name(ProductProof::protocol_name());
|
||||
// transcript.append_protocol_name(ProductProof::protocol_name());
|
||||
|
||||
// produce five random Scalar
|
||||
let b1 = random_tape.random_scalar(b"b1");
|
||||
@@ -193,23 +195,22 @@ impl ProductProof {
|
||||
let X_unc = x.commit(rX, gens_n);
|
||||
|
||||
let X = X_unc.compress();
|
||||
X.append_to_transcript(b"X", transcript);
|
||||
|
||||
transcript.append_point(&X);
|
||||
let X_new = GroupElement::decompress(&X);
|
||||
|
||||
assert_eq!(X_unc, X_new.unwrap());
|
||||
|
||||
let Y = y.commit(rY, gens_n).compress();
|
||||
Y.append_to_transcript(b"Y", transcript);
|
||||
transcript.append_point(&Y);
|
||||
|
||||
let Z = z.commit(rZ, gens_n).compress();
|
||||
Z.append_to_transcript(b"Z", transcript);
|
||||
transcript.append_point(&Z);
|
||||
|
||||
let alpha = b1.commit(&b2, gens_n).compress();
|
||||
alpha.append_to_transcript(b"alpha", transcript);
|
||||
transcript.append_point(&alpha);
|
||||
|
||||
let beta = b3.commit(&b4, gens_n).compress();
|
||||
beta.append_to_transcript(b"beta", transcript);
|
||||
transcript.append_point(&beta);
|
||||
|
||||
let delta = {
|
||||
let gens_X = &MultiCommitGens {
|
||||
@@ -219,9 +220,9 @@ impl ProductProof {
|
||||
};
|
||||
b3.commit(&b5, gens_X).compress()
|
||||
};
|
||||
delta.append_to_transcript(b"delta", transcript);
|
||||
transcript.append_point(&delta);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let z1 = b1 + c * x;
|
||||
let z2 = b2 + c * rX;
|
||||
@@ -263,19 +264,19 @@ impl ProductProof {
|
||||
pub fn verify(
|
||||
&self,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
X: &CompressedGroup,
|
||||
Y: &CompressedGroup,
|
||||
Z: &CompressedGroup,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
transcript.append_protocol_name(ProductProof::protocol_name());
|
||||
// transcript.append_protocol_name(ProductProof::protocol_name());
|
||||
|
||||
X.append_to_transcript(b"X", transcript);
|
||||
Y.append_to_transcript(b"Y", transcript);
|
||||
Z.append_to_transcript(b"Z", transcript);
|
||||
self.alpha.append_to_transcript(b"alpha", transcript);
|
||||
self.beta.append_to_transcript(b"beta", transcript);
|
||||
self.delta.append_to_transcript(b"delta", transcript);
|
||||
X.append_to_poseidon(transcript);
|
||||
Y.append_to_poseidon(transcript);
|
||||
Z.append_to_poseidon(transcript);
|
||||
self.alpha.append_to_poseidon(transcript);
|
||||
self.beta.append_to_poseidon(transcript);
|
||||
self.delta.append_to_poseidon(transcript);
|
||||
|
||||
let z1 = self.z[0];
|
||||
let z2 = self.z[1];
|
||||
@@ -283,7 +284,7 @@ impl ProductProof {
|
||||
let z4 = self.z[3];
|
||||
let z5 = self.z[4];
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
if ProductProof::check_equality(&self.alpha, X, &c, gens_n, &z1, &z2)
|
||||
&& ProductProof::check_equality(&self.beta, Y, &c, gens_n, &z3, &z4)
|
||||
@@ -329,7 +330,7 @@ impl DotProductProof {
|
||||
pub fn prove(
|
||||
gens_1: &MultiCommitGens,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
x_vec: &[Scalar],
|
||||
blind_x: &Scalar,
|
||||
@@ -337,7 +338,7 @@ impl DotProductProof {
|
||||
y: &Scalar,
|
||||
blind_y: &Scalar,
|
||||
) -> (DotProductProof, CompressedGroup, CompressedGroup) {
|
||||
transcript.append_protocol_name(DotProductProof::protocol_name());
|
||||
// transcript.append_protocol_name(DotProductProof::protocol_name());
|
||||
|
||||
let n = x_vec.len();
|
||||
assert_eq!(x_vec.len(), a_vec.len());
|
||||
@@ -350,22 +351,22 @@ impl DotProductProof {
|
||||
let r_beta = random_tape.random_scalar(b"r_beta");
|
||||
|
||||
let Cx = x_vec.commit(blind_x, gens_n).compress();
|
||||
Cx.append_to_transcript(b"Cx", transcript);
|
||||
Cx.append_to_poseidon(transcript);
|
||||
|
||||
let Cy = y.commit(blind_y, gens_1).compress();
|
||||
Cy.append_to_transcript(b"Cy", transcript);
|
||||
Cy.append_to_poseidon(transcript);
|
||||
|
||||
a_vec.append_to_transcript(b"a", transcript);
|
||||
transcript.append_scalar_vector(&a_vec.to_vec());
|
||||
|
||||
let delta = d_vec.commit(&r_delta, gens_n).compress();
|
||||
delta.append_to_transcript(b"delta", transcript);
|
||||
delta.append_to_poseidon(transcript);
|
||||
|
||||
let dotproduct_a_d = DotProductProof::compute_dotproduct(a_vec, &d_vec);
|
||||
|
||||
let beta = dotproduct_a_d.commit(&r_beta, gens_1).compress();
|
||||
beta.append_to_transcript(b"beta", transcript);
|
||||
beta.append_to_poseidon(transcript);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let z = (0..d_vec.len())
|
||||
.map(|i| c * x_vec[i] + d_vec[i])
|
||||
@@ -391,7 +392,7 @@ impl DotProductProof {
|
||||
&self,
|
||||
gens_1: &MultiCommitGens,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
a: &[Scalar],
|
||||
Cx: &CompressedGroup,
|
||||
Cy: &CompressedGroup,
|
||||
@@ -399,14 +400,14 @@ impl DotProductProof {
|
||||
assert_eq!(gens_n.n, a.len());
|
||||
assert_eq!(gens_1.n, 1);
|
||||
|
||||
transcript.append_protocol_name(DotProductProof::protocol_name());
|
||||
Cx.append_to_transcript(b"Cx", transcript);
|
||||
Cy.append_to_transcript(b"Cy", transcript);
|
||||
a.append_to_transcript(b"a", transcript);
|
||||
self.delta.append_to_transcript(b"delta", transcript);
|
||||
self.beta.append_to_transcript(b"beta", transcript);
|
||||
// transcript.append_protocol_name(DotProductProof::protocol_name());
|
||||
Cx.append_to_poseidon(transcript);
|
||||
Cy.append_to_poseidon(transcript);
|
||||
transcript.append_scalar_vector(&a.to_vec());
|
||||
self.delta.append_to_poseidon(transcript);
|
||||
self.beta.append_to_poseidon(transcript);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let mut result = Cx.unpack()?.mul(c.into_repr()) + self.delta.unpack()?
|
||||
== self.z.commit(&self.z_delta, gens_n);
|
||||
@@ -456,7 +457,7 @@ impl DotProductProofLog {
|
||||
|
||||
pub fn prove(
|
||||
gens: &DotProductProofGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
x_vec: &[Scalar],
|
||||
blind_x: &Scalar,
|
||||
@@ -464,7 +465,7 @@ impl DotProductProofLog {
|
||||
y: &Scalar,
|
||||
blind_y: &Scalar,
|
||||
) -> (DotProductProofLog, CompressedGroup, CompressedGroup) {
|
||||
transcript.append_protocol_name(DotProductProofLog::protocol_name());
|
||||
// transcript.append_protocol_name(DotProductProofLog::protocol_name());
|
||||
|
||||
let n = x_vec.len();
|
||||
assert_eq!(x_vec.len(), a_vec.len());
|
||||
@@ -483,12 +484,11 @@ impl DotProductProofLog {
|
||||
};
|
||||
|
||||
let Cx = x_vec.commit(blind_x, &gens.gens_n).compress();
|
||||
Cx.append_to_transcript(b"Cx", transcript);
|
||||
transcript.append_point(&Cx);
|
||||
|
||||
let Cy = y.commit(blind_y, &gens.gens_1).compress();
|
||||
Cy.append_to_transcript(b"Cy", transcript);
|
||||
|
||||
a_vec.append_to_transcript(b"a", transcript);
|
||||
transcript.append_point(&Cy);
|
||||
transcript.append_scalar_vector(&a_vec.to_vec());
|
||||
|
||||
let blind_Gamma = (*blind_x) + blind_y;
|
||||
let (bullet_reduction_proof, _Gamma_hat, x_hat, a_hat, g_hat, rhat_Gamma) =
|
||||
@@ -512,12 +512,12 @@ impl DotProductProofLog {
|
||||
};
|
||||
d.commit(&r_delta, &gens_hat).compress()
|
||||
};
|
||||
delta.append_to_transcript(b"delta", transcript);
|
||||
transcript.append_point(&delta);
|
||||
|
||||
let beta = d.commit(&r_beta, &gens.gens_1).compress();
|
||||
beta.append_to_transcript(b"beta", transcript);
|
||||
transcript.append_point(&beta);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let z1 = d + c * y_hat;
|
||||
let z2 = a_hat * (c * rhat_Gamma + r_beta) + r_delta;
|
||||
@@ -539,7 +539,7 @@ impl DotProductProofLog {
|
||||
&self,
|
||||
n: usize,
|
||||
gens: &DotProductProofGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
a: &[Scalar],
|
||||
Cx: &CompressedGroup,
|
||||
Cy: &CompressedGroup,
|
||||
@@ -547,10 +547,14 @@ impl DotProductProofLog {
|
||||
assert_eq!(gens.n, n);
|
||||
assert_eq!(a.len(), n);
|
||||
|
||||
transcript.append_protocol_name(DotProductProofLog::protocol_name());
|
||||
Cx.append_to_transcript(b"Cx", transcript);
|
||||
Cy.append_to_transcript(b"Cy", transcript);
|
||||
a.append_to_transcript(b"a", transcript);
|
||||
// transcript.append_protocol_name(DotProductProofLog::protocol_name());
|
||||
// Cx.append_to_poseidon( transcript);
|
||||
// Cy.append_to_poseidon( transcript);
|
||||
// a.append_to_poseidon( transcript);
|
||||
|
||||
transcript.append_point(&Cx);
|
||||
transcript.append_point(&Cy);
|
||||
transcript.append_scalar_vector(&a.to_vec());
|
||||
|
||||
let Gamma = Cx.unpack()? + Cy.unpack()?;
|
||||
|
||||
@@ -558,10 +562,13 @@ impl DotProductProofLog {
|
||||
self
|
||||
.bullet_reduction_proof
|
||||
.verify(n, a, transcript, &Gamma, &gens.gens_n.G)?;
|
||||
self.delta.append_to_transcript(b"delta", transcript);
|
||||
self.beta.append_to_transcript(b"beta", transcript);
|
||||
// self.delta.append_to_poseidon( transcript);
|
||||
// self.beta.append_to_poseidon( transcript);
|
||||
|
||||
let c = transcript.challenge_scalar(b"c");
|
||||
transcript.append_point(&self.delta);
|
||||
transcript.append_point(&self.beta);
|
||||
|
||||
let c = transcript.challenge_scalar();
|
||||
|
||||
let c_s = &c;
|
||||
let beta_s = self.beta.unpack()?;
|
||||
@@ -590,7 +597,7 @@ impl DotProductProofLog {
|
||||
mod tests {
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use crate::group::VartimeMultiscalarMul;
|
||||
use crate::{group::VartimeMultiscalarMul, parameters::poseidon_params};
|
||||
|
||||
use super::*;
|
||||
use ark_bls12_377::{Fq, FqParameters, G1Affine};
|
||||
@@ -605,12 +612,14 @@ mod tests {
|
||||
let x = Scalar::rand(&mut rng);
|
||||
let r = Scalar::rand(&mut rng);
|
||||
|
||||
let params = poseidon_params();
|
||||
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let (proof, committed_value) =
|
||||
KnowledgeProof::prove(&gens_1, &mut prover_transcript, &mut random_tape, &x, &r);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&gens_1, &mut verifier_transcript, &committed_value)
|
||||
.is_ok());
|
||||
@@ -619,6 +628,7 @@ mod tests {
|
||||
#[test]
|
||||
fn check_equalityproof() {
|
||||
let mut rng = ark_std::rand::thread_rng();
|
||||
let params = poseidon_params();
|
||||
|
||||
let gens_1 = MultiCommitGens::new(1, b"test-equalityproof");
|
||||
let v1 = Scalar::rand(&mut rng);
|
||||
@@ -627,7 +637,7 @@ mod tests {
|
||||
let s2 = Scalar::rand(&mut rng);
|
||||
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let (proof, C1, C2) = EqualityProof::prove(
|
||||
&gens_1,
|
||||
&mut prover_transcript,
|
||||
@@ -638,7 +648,7 @@ mod tests {
|
||||
&s2,
|
||||
);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&gens_1, &mut verifier_transcript, &C1, &C2)
|
||||
.is_ok());
|
||||
@@ -651,6 +661,7 @@ mod tests {
|
||||
let pt_c = pt.compress();
|
||||
let pt2 = GroupElement::decompress(&pt_c).unwrap();
|
||||
assert_eq!(pt, pt2);
|
||||
let params = poseidon_params();
|
||||
|
||||
let gens_1 = MultiCommitGens::new(1, b"test-productproof");
|
||||
let x = Scalar::rand(&mut rng);
|
||||
@@ -661,7 +672,7 @@ mod tests {
|
||||
let rZ = Scalar::rand(&mut rng);
|
||||
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let (proof, X, Y, Z) = ProductProof::prove(
|
||||
&gens_1,
|
||||
&mut prover_transcript,
|
||||
@@ -674,7 +685,7 @@ mod tests {
|
||||
&rZ,
|
||||
);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&gens_1, &mut verifier_transcript, &X, &Y, &Z)
|
||||
.is_ok());
|
||||
@@ -688,6 +699,7 @@ mod tests {
|
||||
|
||||
let gens_1 = MultiCommitGens::new(1, b"test-two");
|
||||
let gens_1024 = MultiCommitGens::new(n, b"test-1024");
|
||||
let params = poseidon_params();
|
||||
|
||||
let mut x: Vec<Scalar> = Vec::new();
|
||||
let mut a: Vec<Scalar> = Vec::new();
|
||||
@@ -700,7 +712,7 @@ mod tests {
|
||||
let r_y = Scalar::rand(&mut rng);
|
||||
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let (proof, Cx, Cy) = DotProductProof::prove(
|
||||
&gens_1,
|
||||
&gens_1024,
|
||||
@@ -713,7 +725,7 @@ mod tests {
|
||||
&r_y,
|
||||
);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(&gens_1, &gens_1024, &mut verifier_transcript, &a, &Cx, &Cy)
|
||||
.is_ok());
|
||||
@@ -734,8 +746,9 @@ mod tests {
|
||||
let r_x = Scalar::rand(&mut rng);
|
||||
let r_y = Scalar::rand(&mut rng);
|
||||
|
||||
let params = poseidon_params();
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let (proof, Cx, Cy) = DotProductProofLog::prove(
|
||||
&gens,
|
||||
&mut prover_transcript,
|
||||
@@ -747,7 +760,7 @@ mod tests {
|
||||
&r_y,
|
||||
);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(n, &gens, &mut verifier_transcript, &a, &Cx, &Cy)
|
||||
.is_ok());
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use ark_sponge::poseidon::PoseidonParameters;
|
||||
// Copyright: https://github.com/nikkolasg/ark-dkg/blob/main/src/parameters.rs
|
||||
use json::JsonValue;
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use crate::group::Fq;
|
||||
|
||||
lazy_static! {
|
||||
// bls12377_rate2_constraints:
|
||||
/// bls12377_rate2_constraints:
|
||||
pub static ref P1: JsonValue = object! {
|
||||
"ark" => array![
|
||||
array![
|
||||
@@ -226,3 +230,31 @@ lazy_static! {
|
||||
"partial_rounds" => 31
|
||||
};
|
||||
}
|
||||
|
||||
/// TODO
|
||||
pub fn poseidon_params() -> PoseidonParameters<Fq> {
|
||||
let arks = P1["ark"]
|
||||
.members()
|
||||
.map(|ark| {
|
||||
ark
|
||||
.members()
|
||||
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mds = P1["mds"]
|
||||
.members()
|
||||
.map(|m| {
|
||||
m.members()
|
||||
.map(|v| Fq::from_str(v.as_str().unwrap()).unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
PoseidonParameters::new(
|
||||
P1["full_rounds"].as_u32().unwrap(),
|
||||
P1["partial_rounds"].as_u32().unwrap(),
|
||||
P1["alpha"].as_u64().unwrap(),
|
||||
mds,
|
||||
arks,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::poseidon_transcript::PoseidonTranscript;
|
||||
|
||||
use super::dense_mlpoly::DensePolynomial;
|
||||
use super::dense_mlpoly::EqPolynomial;
|
||||
use super::math::Math;
|
||||
use super::scalar::Scalar;
|
||||
use super::sumcheck::SumcheckInstanceProof;
|
||||
use super::transcript::ProofTranscript;
|
||||
use merlin::Transcript;
|
||||
use ark_serialize::*;
|
||||
use ark_std::{One};
|
||||
use ark_std::One;
|
||||
use merlin::Transcript;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ProductCircuit {
|
||||
@@ -122,7 +124,7 @@ impl LayerProof {
|
||||
claim: Scalar,
|
||||
num_rounds: usize,
|
||||
degree_bound: usize,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Scalar, Vec<Scalar>) {
|
||||
self
|
||||
.proof
|
||||
@@ -146,7 +148,7 @@ impl LayerProofBatched {
|
||||
claim: Scalar,
|
||||
num_rounds: usize,
|
||||
degree_bound: usize,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Scalar, Vec<Scalar>) {
|
||||
self
|
||||
.proof
|
||||
@@ -170,7 +172,7 @@ impl ProductCircuitEvalProof {
|
||||
#![allow(dead_code)]
|
||||
pub fn prove(
|
||||
circuit: &mut ProductCircuit,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Self, Scalar, Vec<Scalar>) {
|
||||
let mut proof: Vec<LayerProof> = Vec::new();
|
||||
let num_layers = circuit.left_vec.len();
|
||||
@@ -198,11 +200,11 @@ impl ProductCircuitEvalProof {
|
||||
transcript,
|
||||
);
|
||||
|
||||
transcript.append_scalar(b"claim_prod_left", &claims_prod[0]);
|
||||
transcript.append_scalar(b"claim_prod_right", &claims_prod[1]);
|
||||
transcript.append_scalar(&claims_prod[0]);
|
||||
transcript.append_scalar(&claims_prod[1]);
|
||||
|
||||
// produce a random challenge
|
||||
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
|
||||
let r_layer = transcript.challenge_scalar();
|
||||
claim = claims_prod[0] + r_layer * (claims_prod[1] - claims_prod[0]);
|
||||
|
||||
let mut ext = vec![r_layer];
|
||||
@@ -222,7 +224,7 @@ impl ProductCircuitEvalProof {
|
||||
&self,
|
||||
eval: Scalar,
|
||||
len: usize,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Scalar, Vec<Scalar>) {
|
||||
let num_layers = len.log_2();
|
||||
let mut claim = eval;
|
||||
@@ -233,8 +235,8 @@ impl ProductCircuitEvalProof {
|
||||
let (claim_last, rand_prod) = self.proof[i].verify(claim, num_rounds, 3, transcript);
|
||||
|
||||
let claims_prod = &self.proof[i].claims;
|
||||
transcript.append_scalar(b"claim_prod_left", &claims_prod[0]);
|
||||
transcript.append_scalar(b"claim_prod_right", &claims_prod[1]);
|
||||
transcript.append_scalar(&claims_prod[0]);
|
||||
transcript.append_scalar(&claims_prod[1]);
|
||||
|
||||
assert_eq!(rand.len(), rand_prod.len());
|
||||
let eq: Scalar = (0..rand.len())
|
||||
@@ -245,7 +247,7 @@ impl ProductCircuitEvalProof {
|
||||
assert_eq!(claims_prod[0] * claims_prod[1] * eq, claim_last);
|
||||
|
||||
// produce a random challenge
|
||||
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
|
||||
let r_layer = transcript.challenge_scalar();
|
||||
claim = (Scalar::one() - r_layer) * claims_prod[0] + r_layer * claims_prod[1];
|
||||
let mut ext = vec![r_layer];
|
||||
ext.extend(rand_prod);
|
||||
@@ -260,7 +262,7 @@ impl ProductCircuitEvalProofBatched {
|
||||
pub fn prove(
|
||||
prod_circuit_vec: &mut Vec<&mut ProductCircuit>,
|
||||
dotp_circuit_vec: &mut Vec<&mut DotProductCircuit>,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Self, Vec<Scalar>) {
|
||||
assert!(!prod_circuit_vec.is_empty());
|
||||
|
||||
@@ -324,8 +326,7 @@ impl ProductCircuitEvalProofBatched {
|
||||
);
|
||||
|
||||
// produce a fresh set of coeffs and a joint claim
|
||||
let coeff_vec =
|
||||
transcript.challenge_vector(b"rand_coeffs_next_layer", claims_to_verify.len());
|
||||
let coeff_vec = transcript.challenge_vector(claims_to_verify.len());
|
||||
let claim = (0..claims_to_verify.len())
|
||||
.map(|i| claims_to_verify[i] * coeff_vec[i])
|
||||
.sum();
|
||||
@@ -342,22 +343,22 @@ impl ProductCircuitEvalProofBatched {
|
||||
|
||||
let (claims_prod_left, claims_prod_right, _claims_eq) = claims_prod;
|
||||
for i in 0..prod_circuit_vec.len() {
|
||||
transcript.append_scalar(b"claim_prod_left", &claims_prod_left[i]);
|
||||
transcript.append_scalar(b"claim_prod_right", &claims_prod_right[i]);
|
||||
transcript.append_scalar(&claims_prod_left[i]);
|
||||
transcript.append_scalar(&claims_prod_right[i]);
|
||||
}
|
||||
|
||||
if layer_id == 0 && !dotp_circuit_vec.is_empty() {
|
||||
let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = claims_dotp;
|
||||
for i in 0..dotp_circuit_vec.len() {
|
||||
transcript.append_scalar(b"claim_dotp_left", &claims_dotp_left[i]);
|
||||
transcript.append_scalar(b"claim_dotp_right", &claims_dotp_right[i]);
|
||||
transcript.append_scalar(b"claim_dotp_weight", &claims_dotp_weight[i]);
|
||||
transcript.append_scalar(&claims_dotp_left[i]);
|
||||
transcript.append_scalar(&claims_dotp_right[i]);
|
||||
transcript.append_scalar(&claims_dotp_weight[i]);
|
||||
}
|
||||
claims_dotp_final = (claims_dotp_left, claims_dotp_right, claims_dotp_weight);
|
||||
}
|
||||
|
||||
// produce a random challenge to condense two claims into a single claim
|
||||
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
|
||||
let r_layer = transcript.challenge_scalar();
|
||||
|
||||
claims_to_verify = (0..prod_circuit_vec.len())
|
||||
.map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i]))
|
||||
@@ -388,7 +389,7 @@ impl ProductCircuitEvalProofBatched {
|
||||
claims_prod_vec: &[Scalar],
|
||||
claims_dotp_vec: &[Scalar],
|
||||
len: usize,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Vec<Scalar>, Vec<Scalar>, Vec<Scalar>) {
|
||||
let num_layers = len.log_2();
|
||||
let mut rand: Vec<Scalar> = Vec::new();
|
||||
@@ -403,8 +404,7 @@ impl ProductCircuitEvalProofBatched {
|
||||
}
|
||||
|
||||
// produce random coefficients, one for each instance
|
||||
let coeff_vec =
|
||||
transcript.challenge_vector(b"rand_coeffs_next_layer", claims_to_verify.len());
|
||||
let coeff_vec = transcript.challenge_vector(claims_to_verify.len());
|
||||
|
||||
// produce a joint claim
|
||||
let claim = (0..claims_to_verify.len())
|
||||
@@ -419,8 +419,8 @@ impl ProductCircuitEvalProofBatched {
|
||||
assert_eq!(claims_prod_right.len(), claims_prod_vec.len());
|
||||
|
||||
for i in 0..claims_prod_vec.len() {
|
||||
transcript.append_scalar(b"claim_prod_left", &claims_prod_left[i]);
|
||||
transcript.append_scalar(b"claim_prod_right", &claims_prod_right[i]);
|
||||
transcript.append_scalar(&claims_prod_left[i]);
|
||||
transcript.append_scalar(&claims_prod_right[i]);
|
||||
}
|
||||
|
||||
assert_eq!(rand.len(), rand_prod.len());
|
||||
@@ -438,9 +438,9 @@ impl ProductCircuitEvalProofBatched {
|
||||
let num_prod_instances = claims_prod_vec.len();
|
||||
let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = &self.claims_dotp;
|
||||
for i in 0..claims_dotp_left.len() {
|
||||
transcript.append_scalar(b"claim_dotp_left", &claims_dotp_left[i]);
|
||||
transcript.append_scalar(b"claim_dotp_right", &claims_dotp_right[i]);
|
||||
transcript.append_scalar(b"claim_dotp_weight", &claims_dotp_weight[i]);
|
||||
transcript.append_scalar(&claims_dotp_left[i]);
|
||||
transcript.append_scalar(&claims_dotp_right[i]);
|
||||
transcript.append_scalar(&claims_dotp_weight[i]);
|
||||
|
||||
claim_expected += coeff_vec[i + num_prod_instances]
|
||||
* claims_dotp_left[i]
|
||||
@@ -452,7 +452,7 @@ impl ProductCircuitEvalProofBatched {
|
||||
assert_eq!(claim_expected, claim_last);
|
||||
|
||||
// produce a random challenge
|
||||
let r_layer = transcript.challenge_scalar(b"challenge_r_layer");
|
||||
let r_layer = transcript.challenge_scalar();
|
||||
|
||||
claims_to_verify = (0..claims_prod_left.len())
|
||||
.map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i]))
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
use crate::transcript::AppendToTranscript;
|
||||
|
||||
use super::dense_mlpoly::DensePolynomial;
|
||||
@@ -63,6 +64,15 @@ impl AppendToTranscript for R1CSCommitment {
|
||||
}
|
||||
}
|
||||
|
||||
impl AppendToPoseidon for R1CSCommitment {
|
||||
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
|
||||
transcript.append_u64(self.num_cons as u64);
|
||||
transcript.append_u64(self.num_vars as u64);
|
||||
transcript.append_u64(self.num_inputs as u64);
|
||||
self.comm.append_to_poseidon(transcript);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct R1CSDecommitment {
|
||||
dense: MultiSparseMatPolynomialAsDense,
|
||||
}
|
||||
@@ -328,7 +338,7 @@ impl R1CSEvalProof {
|
||||
ry: &[Scalar],
|
||||
evals: &(Scalar, Scalar, Scalar),
|
||||
gens: &R1CSCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> R1CSEvalProof {
|
||||
let timer = Timer::new("R1CSEvalProof::prove");
|
||||
@@ -353,7 +363,7 @@ impl R1CSEvalProof {
|
||||
ry: &[Scalar],
|
||||
evals: &(Scalar, Scalar, Scalar),
|
||||
gens: &R1CSCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
self.proof.verify(
|
||||
&comm.comm,
|
||||
|
||||
@@ -16,7 +16,6 @@ use super::r1csinstance::R1CSInstance;
|
||||
use super::random::RandomTape;
|
||||
use super::scalar::Scalar;
|
||||
use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial};
|
||||
use super::sumcheck::ZKSumcheckInstanceProof;
|
||||
use super::timer::Timer;
|
||||
use super::transcript::{AppendToTranscript, ProofTranscript};
|
||||
use ark_ec::ProjectiveCurve;
|
||||
@@ -80,7 +79,7 @@ impl R1CSProof {
|
||||
evals_Az: &mut DensePolynomial,
|
||||
evals_Bz: &mut DensePolynomial,
|
||||
evals_Cz: &mut DensePolynomial,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (SumcheckInstanceProof, Vec<Scalar>, Vec<Scalar>) {
|
||||
let comb_func =
|
||||
|poly_tau_comp: &Scalar,
|
||||
@@ -108,7 +107,7 @@ impl R1CSProof {
|
||||
claim: &Scalar,
|
||||
evals_z: &mut DensePolynomial,
|
||||
evals_ABC: &mut DensePolynomial,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (SumcheckInstanceProof, Vec<Scalar>, Vec<Scalar>) {
|
||||
let comb_func =
|
||||
|poly_A_comp: &Scalar, poly_B_comp: &Scalar| -> Scalar { (*poly_A_comp) * poly_B_comp };
|
||||
@@ -128,16 +127,14 @@ impl R1CSProof {
|
||||
vars: Vec<Scalar>,
|
||||
input: &[Scalar],
|
||||
gens: &R1CSGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> (R1CSProof, Vec<Scalar>, Vec<Scalar>) {
|
||||
let timer_prove = Timer::new("R1CSProof::prove");
|
||||
transcript.append_protocol_name(R1CSProof::protocol_name());
|
||||
|
||||
// we currently require the number of |inputs| + 1 to be at most number of vars
|
||||
assert!(input.len() < vars.len());
|
||||
|
||||
input.append_to_transcript(b"input", transcript);
|
||||
transcript.append_scalar_vector(&input.to_vec());
|
||||
|
||||
let poly_vars = DensePolynomial::new(vars.clone());
|
||||
|
||||
@@ -155,8 +152,9 @@ impl R1CSProof {
|
||||
};
|
||||
|
||||
// derive the verifier's challenge tau
|
||||
let (num_rounds_x, num_rounds_y) = (inst.get_num_cons().log_2(), z.len().log_2());
|
||||
let tau = transcript.challenge_vector(b"challenge_tau", num_rounds_x);
|
||||
let (num_rounds_x, num_rounds_y) =
|
||||
(inst.get_num_cons().log2() as usize, z.len().log2() as usize);
|
||||
let tau = transcript.challenge_vector(num_rounds_x);
|
||||
// compute the initial evaluation table for R(\tau, x)
|
||||
let mut poly_tau = DensePolynomial::new(EqPolynomial::new(tau).evals());
|
||||
let (mut poly_Az, mut poly_Bz, mut poly_Cz) =
|
||||
@@ -186,9 +184,9 @@ impl R1CSProof {
|
||||
|
||||
let timer_sc_proof_phase2 = Timer::new("prove_sc_phase_two");
|
||||
// combine the three claims into a single claim
|
||||
let r_A = transcript.challenge_scalar(b"challenege_Az");
|
||||
let r_B = transcript.challenge_scalar(b"challenege_Bz");
|
||||
let r_C = transcript.challenge_scalar(b"challenege_Cz");
|
||||
let r_A = transcript.challenge_scalar();
|
||||
let r_B = transcript.challenge_scalar();
|
||||
let r_C = transcript.challenge_scalar();
|
||||
let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim;
|
||||
|
||||
let evals_ABC = {
|
||||
@@ -238,19 +236,21 @@ impl R1CSProof {
|
||||
num_cons: usize,
|
||||
input: &[Scalar],
|
||||
evals: &(Scalar, Scalar, Scalar),
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
gens: &R1CSGens,
|
||||
) -> Result<(Vec<Scalar>, Vec<Scalar>), ProofVerifyError> {
|
||||
transcript.append_protocol_name(R1CSProof::protocol_name());
|
||||
// transcript.append_protocol_name(R1CSProof::protocol_name());
|
||||
|
||||
input.append_to_transcript(b"input", transcript);
|
||||
for i in 0..input.len() {
|
||||
transcript.append_scalar(&input[i]);
|
||||
}
|
||||
|
||||
let n = num_vars;
|
||||
|
||||
let (num_rounds_x, num_rounds_y) = (num_cons.log_2(), (2 * num_vars).log_2());
|
||||
|
||||
// derive the verifier's challenge tau
|
||||
let tau = transcript.challenge_vector(b"challenge_tau", num_rounds_x);
|
||||
let tau = transcript.challenge_vector(num_rounds_x);
|
||||
|
||||
// verify the first sum-check instance
|
||||
let claim_phase1 = Scalar::zero();
|
||||
@@ -271,9 +271,9 @@ impl R1CSProof {
|
||||
assert_eq!(claim_post_phase1, expected_claim_post_phase1);
|
||||
|
||||
// derive three public challenges and then derive a joint claim
|
||||
let r_A = transcript.challenge_scalar(b"challenege_Az");
|
||||
let r_B = transcript.challenge_scalar(b"challenege_Bz");
|
||||
let r_C = transcript.challenge_scalar(b"challenege_Cz");
|
||||
let r_A = transcript.challenge_scalar();
|
||||
let r_B = transcript.challenge_scalar();
|
||||
let r_C = transcript.challenge_scalar();
|
||||
|
||||
let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim;
|
||||
|
||||
@@ -310,6 +310,8 @@ impl R1CSProof {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::parameters::poseidon_params;
|
||||
|
||||
use super::*;
|
||||
use ark_std::UniformRand;
|
||||
use test::Bencher;
|
||||
@@ -394,8 +396,10 @@ mod tests {
|
||||
|
||||
let gens = R1CSGens::new(b"test-m", num_cons, num_vars);
|
||||
|
||||
let params = poseidon_params();
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
// let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let (proof, rx, ry) = R1CSProof::prove(
|
||||
&inst,
|
||||
vars,
|
||||
@@ -407,7 +411,8 @@ mod tests {
|
||||
|
||||
let inst_evals = inst.evaluate(&rx, &ry);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
// let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
assert!(proof
|
||||
.verify(
|
||||
inst.get_num_vars(),
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
#![allow(clippy::needless_range_loop)]
|
||||
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
|
||||
use super::dense_mlpoly::DensePolynomial;
|
||||
use super::dense_mlpoly::{
|
||||
EqPolynomial, IdentityPolynomial, PolyCommitment, PolyCommitmentGens, PolyEvalProof,
|
||||
@@ -12,10 +14,10 @@ use super::random::RandomTape;
|
||||
use super::scalar::Scalar;
|
||||
use super::timer::Timer;
|
||||
use super::transcript::{AppendToTranscript, ProofTranscript};
|
||||
use ark_ff::{Field, One, Zero};
|
||||
use ark_serialize::*;
|
||||
use core::cmp::Ordering;
|
||||
use merlin::Transcript;
|
||||
use ark_serialize::*;
|
||||
use ark_ff::{One, Zero, Field};
|
||||
|
||||
#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)]
|
||||
pub struct SparseMatEntry {
|
||||
@@ -87,18 +89,18 @@ impl DerefsEvalProof {
|
||||
r: &[Scalar],
|
||||
evals: Vec<Scalar>,
|
||||
gens: &PolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> PolyEvalProof {
|
||||
assert_eq!(joint_poly.get_num_vars(), r.len() + evals.len().log_2());
|
||||
|
||||
// append the claimed evaluations to transcript
|
||||
evals.append_to_transcript(b"evals_ops_val", transcript);
|
||||
// evals.append_to_transcript(b"evals_ops_val", transcript);
|
||||
transcript.append_scalar_vector(&evals);
|
||||
|
||||
// n-to-1 reduction
|
||||
let (r_joint, eval_joint) = {
|
||||
let challenges =
|
||||
transcript.challenge_vector(b"challenge_combine_n_to_one", evals.len().log_2());
|
||||
let challenges = transcript.challenge_vector(evals.len().log2());
|
||||
let mut poly_evals = DensePolynomial::new(evals);
|
||||
for i in (0..challenges.len()).rev() {
|
||||
poly_evals.bound_poly_var_bot(&challenges[i]);
|
||||
@@ -112,7 +114,7 @@ impl DerefsEvalProof {
|
||||
(r_joint, joint_claim_eval)
|
||||
};
|
||||
// decommit the joint polynomial at r_joint
|
||||
eval_joint.append_to_transcript(b"joint_claim_eval", transcript);
|
||||
transcript.append_scalar(&eval_joint);
|
||||
let (proof_derefs, _comm_derefs_eval) = PolyEvalProof::prove(
|
||||
joint_poly,
|
||||
None,
|
||||
@@ -134,10 +136,10 @@ impl DerefsEvalProof {
|
||||
eval_col_ops_val_vec: &[Scalar],
|
||||
r: &[Scalar],
|
||||
gens: &PolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> Self {
|
||||
transcript.append_protocol_name(DerefsEvalProof::protocol_name());
|
||||
// transcript.append_protocol_name(DerefsEvalProof::protocol_name());
|
||||
|
||||
let evals = {
|
||||
let mut evals = eval_row_ops_val_vec.to_owned();
|
||||
@@ -157,14 +159,14 @@ impl DerefsEvalProof {
|
||||
r: &[Scalar],
|
||||
evals: Vec<Scalar>,
|
||||
gens: &PolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
// append the claimed evaluations to transcript
|
||||
evals.append_to_transcript(b"evals_ops_val", transcript);
|
||||
// evals.append_to_transcript(b"evals_ops_val", transcript);
|
||||
transcript.append_scalar_vector(&evals);
|
||||
|
||||
// n-to-1 reduction
|
||||
let challenges =
|
||||
transcript.challenge_vector(b"challenge_combine_n_to_one", evals.len().log_2());
|
||||
let challenges = transcript.challenge_vector(evals.len().log2());
|
||||
let mut poly_evals = DensePolynomial::new(evals);
|
||||
for i in (0..challenges.len()).rev() {
|
||||
poly_evals.bound_poly_var_bot(&challenges[i]);
|
||||
@@ -175,7 +177,8 @@ impl DerefsEvalProof {
|
||||
r_joint.extend(r);
|
||||
|
||||
// decommit the joint polynomial at r_joint
|
||||
joint_claim_eval.append_to_transcript(b"joint_claim_eval", transcript);
|
||||
// joint_claim_eval.append_to_transcript(b"joint_claim_eval", transcript);
|
||||
transcript.append_scalar(&joint_claim_eval);
|
||||
|
||||
proof.verify_plain(gens, transcript, &r_joint, &joint_claim_eval, comm)
|
||||
}
|
||||
@@ -188,9 +191,9 @@ impl DerefsEvalProof {
|
||||
eval_col_ops_val_vec: &[Scalar],
|
||||
gens: &PolyCommitmentGens,
|
||||
comm: &DerefsCommitment,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
transcript.append_protocol_name(DerefsEvalProof::protocol_name());
|
||||
// transcript.append_protocol_name(DerefsEvalProof::protocol_name());
|
||||
let mut evals = eval_row_ops_val_vec.to_owned();
|
||||
evals.extend(eval_col_ops_val_vec);
|
||||
evals.resize(evals.len().next_power_of_two(), Scalar::zero());
|
||||
@@ -214,6 +217,11 @@ impl AppendToTranscript for DerefsCommitment {
|
||||
}
|
||||
}
|
||||
|
||||
impl AppendToPoseidon for DerefsCommitment {
|
||||
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
|
||||
self.comm_ops_val.append_to_poseidon(transcript);
|
||||
}
|
||||
}
|
||||
struct AddrTimestamps {
|
||||
ops_addr_usize: Vec<Vec<usize>>,
|
||||
ops_addr: Vec<DensePolynomial>,
|
||||
@@ -342,6 +350,16 @@ impl AppendToTranscript for SparseMatPolyCommitment {
|
||||
}
|
||||
}
|
||||
|
||||
impl AppendToPoseidon for SparseMatPolyCommitment {
|
||||
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
|
||||
transcript.append_u64(self.batch_size as u64);
|
||||
transcript.append_u64(self.num_ops as u64);
|
||||
transcript.append_u64(self.num_mem_cells as u64);
|
||||
self.comm_comb_ops.append_to_poseidon(transcript);
|
||||
self.comm_comb_mem.append_to_poseidon(transcript);
|
||||
}
|
||||
}
|
||||
|
||||
impl SparseMatPolynomial {
|
||||
pub fn new(num_vars_x: usize, num_vars_y: usize, M: Vec<SparseMatEntry>) -> Self {
|
||||
SparseMatPolynomial {
|
||||
@@ -465,7 +483,7 @@ impl SparseMatPolynomial {
|
||||
let val = &self.M[i].val;
|
||||
(row, z[col] * val)
|
||||
})
|
||||
.fold(vec![Scalar::zero(); num_rows], |mut Mz, (r, v)| {
|
||||
.fold(vec![Scalar::zero(); num_rows], |mut Mz, (r, v)| {
|
||||
Mz[r] += v;
|
||||
Mz
|
||||
})
|
||||
@@ -732,10 +750,10 @@ impl HashLayerProof {
|
||||
dense: &MultiSparseMatPolynomialAsDense,
|
||||
derefs: &Derefs,
|
||||
gens: &SparseMatPolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> Self {
|
||||
transcript.append_protocol_name(HashLayerProof::protocol_name());
|
||||
// transcript.append_protocol_name(HashLayerProof::protocol_name());
|
||||
|
||||
let (rand_mem, rand_ops) = rand;
|
||||
|
||||
@@ -775,9 +793,8 @@ impl HashLayerProof {
|
||||
evals_ops.extend(&eval_col_read_ts_vec);
|
||||
evals_ops.extend(&eval_val_vec);
|
||||
evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero());
|
||||
evals_ops.append_to_transcript(b"claim_evals_ops", transcript);
|
||||
let challenges_ops =
|
||||
transcript.challenge_vector(b"challenge_combine_n_to_one", evals_ops.len().log_2());
|
||||
transcript.append_scalar_vector(&evals_ops);
|
||||
let challenges_ops = transcript.challenge_vector(evals_ops.len().log2());
|
||||
|
||||
let mut poly_evals_ops = DensePolynomial::new(evals_ops);
|
||||
for i in (0..challenges_ops.len()).rev() {
|
||||
@@ -788,7 +805,7 @@ impl HashLayerProof {
|
||||
let mut r_joint_ops = challenges_ops;
|
||||
r_joint_ops.extend(rand_ops);
|
||||
debug_assert_eq!(dense.comb_ops.evaluate(&r_joint_ops), joint_claim_eval_ops);
|
||||
joint_claim_eval_ops.append_to_transcript(b"joint_claim_eval_ops", transcript);
|
||||
transcript.append_scalar(&joint_claim_eval_ops);
|
||||
let (proof_ops, _comm_ops_eval) = PolyEvalProof::prove(
|
||||
&dense.comb_ops,
|
||||
None,
|
||||
@@ -802,9 +819,9 @@ impl HashLayerProof {
|
||||
|
||||
// form a single decommitment using comb_comb_mem at rand_mem
|
||||
let evals_mem: Vec<Scalar> = vec![eval_row_audit_ts, eval_col_audit_ts];
|
||||
evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
|
||||
let challenges_mem =
|
||||
transcript.challenge_vector(b"challenge_combine_two_to_one", evals_mem.len().log_2());
|
||||
// evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
|
||||
transcript.append_scalar_vector(&evals_mem);
|
||||
let challenges_mem = transcript.challenge_vector(evals_mem.len().log2());
|
||||
|
||||
let mut poly_evals_mem = DensePolynomial::new(evals_mem);
|
||||
for i in (0..challenges_mem.len()).rev() {
|
||||
@@ -815,7 +832,7 @@ impl HashLayerProof {
|
||||
let mut r_joint_mem = challenges_mem;
|
||||
r_joint_mem.extend(rand_mem);
|
||||
debug_assert_eq!(dense.comb_mem.evaluate(&r_joint_mem), joint_claim_eval_mem);
|
||||
joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript);
|
||||
transcript.append_scalar(&joint_claim_eval_mem);
|
||||
let (proof_mem, _comm_mem_eval) = PolyEvalProof::prove(
|
||||
&dense.comb_mem,
|
||||
None,
|
||||
@@ -902,10 +919,10 @@ impl HashLayerProof {
|
||||
ry: &[Scalar],
|
||||
r_hash: &Scalar,
|
||||
r_multiset_check: &Scalar,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
let timer = Timer::new("verify_hash_proof");
|
||||
transcript.append_protocol_name(HashLayerProof::protocol_name());
|
||||
// transcript.append_protocol_name(HashLayerProof::protocol_name());
|
||||
|
||||
let (rand_mem, rand_ops) = rand;
|
||||
|
||||
@@ -945,9 +962,9 @@ impl HashLayerProof {
|
||||
evals_ops.extend(eval_col_read_ts_vec);
|
||||
evals_ops.extend(eval_val_vec);
|
||||
evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero());
|
||||
evals_ops.append_to_transcript(b"claim_evals_ops", transcript);
|
||||
let challenges_ops =
|
||||
transcript.challenge_vector(b"challenge_combine_n_to_one", evals_ops.len().log_2());
|
||||
transcript.append_scalar_vector(&evals_ops);
|
||||
// evals_ops.append_to_transcript(b"claim_evals_ops", transcript);
|
||||
let challenges_ops = transcript.challenge_vector(evals_ops.len().log2());
|
||||
|
||||
let mut poly_evals_ops = DensePolynomial::new(evals_ops);
|
||||
for i in (0..challenges_ops.len()).rev() {
|
||||
@@ -957,21 +974,24 @@ impl HashLayerProof {
|
||||
let joint_claim_eval_ops = poly_evals_ops[0];
|
||||
let mut r_joint_ops = challenges_ops;
|
||||
r_joint_ops.extend(rand_ops);
|
||||
joint_claim_eval_ops.append_to_transcript(b"joint_claim_eval_ops", transcript);
|
||||
self.proof_ops.verify_plain(
|
||||
&gens.gens_ops,
|
||||
transcript,
|
||||
&r_joint_ops,
|
||||
&joint_claim_eval_ops,
|
||||
&comm.comm_comb_ops,
|
||||
)?;
|
||||
transcript.append_scalar(&joint_claim_eval_ops);
|
||||
assert!(self
|
||||
.proof_ops
|
||||
.verify_plain(
|
||||
&gens.gens_ops,
|
||||
transcript,
|
||||
&r_joint_ops,
|
||||
&joint_claim_eval_ops,
|
||||
&comm.comm_comb_ops
|
||||
)
|
||||
.is_ok());
|
||||
|
||||
// verify proof-mem using comm_comb_mem at rand_mem
|
||||
// form a single decommitment using comb_comb_mem at rand_mem
|
||||
let evals_mem: Vec<Scalar> = vec![*eval_row_audit_ts, *eval_col_audit_ts];
|
||||
evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
|
||||
let challenges_mem =
|
||||
transcript.challenge_vector(b"challenge_combine_two_to_one", evals_mem.len().log_2());
|
||||
// evals_mem.append_to_transcript(b"claim_evals_mem", transcript);
|
||||
transcript.append_scalar_vector(&evals_mem);
|
||||
let challenges_mem = transcript.challenge_vector(evals_mem.len().log2());
|
||||
|
||||
let mut poly_evals_mem = DensePolynomial::new(evals_mem);
|
||||
for i in (0..challenges_mem.len()).rev() {
|
||||
@@ -981,7 +1001,8 @@ impl HashLayerProof {
|
||||
let joint_claim_eval_mem = poly_evals_mem[0];
|
||||
let mut r_joint_mem = challenges_mem;
|
||||
r_joint_mem.extend(rand_mem);
|
||||
joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript);
|
||||
// joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript);
|
||||
transcript.append_scalar(&joint_claim_eval_mem);
|
||||
self.proof_mem.verify_plain(
|
||||
&gens.gens_mem,
|
||||
transcript,
|
||||
@@ -1042,9 +1063,9 @@ impl ProductLayerProof {
|
||||
dense: &MultiSparseMatPolynomialAsDense,
|
||||
derefs: &Derefs,
|
||||
eval: &[Scalar],
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Self, Vec<Scalar>, Vec<Scalar>) {
|
||||
transcript.append_protocol_name(ProductLayerProof::protocol_name());
|
||||
// transcript.append_protocol_name(ProductLayerProof::protocol_name());
|
||||
|
||||
let row_eval_init = row_prod_layer.init.evaluate();
|
||||
let row_eval_audit = row_prod_layer.audit.evaluate();
|
||||
@@ -1062,10 +1083,10 @@ impl ProductLayerProof {
|
||||
let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product();
|
||||
assert_eq!(row_eval_init * ws, rs * row_eval_audit);
|
||||
|
||||
row_eval_init.append_to_transcript(b"claim_row_eval_init", transcript);
|
||||
row_eval_read.append_to_transcript(b"claim_row_eval_read", transcript);
|
||||
row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript);
|
||||
row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript);
|
||||
transcript.append_scalar(&row_eval_init);
|
||||
transcript.append_scalar_vector(&row_eval_read);
|
||||
transcript.append_scalar_vector(&row_eval_write);
|
||||
transcript.append_scalar(&row_eval_audit);
|
||||
|
||||
let col_eval_init = col_prod_layer.init.evaluate();
|
||||
let col_eval_audit = col_prod_layer.audit.evaluate();
|
||||
@@ -1083,10 +1104,10 @@ impl ProductLayerProof {
|
||||
let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product();
|
||||
assert_eq!(col_eval_init * ws, rs * col_eval_audit);
|
||||
|
||||
col_eval_init.append_to_transcript(b"claim_col_eval_init", transcript);
|
||||
col_eval_read.append_to_transcript(b"claim_col_eval_read", transcript);
|
||||
col_eval_write.append_to_transcript(b"claim_col_eval_write", transcript);
|
||||
col_eval_audit.append_to_transcript(b"claim_col_eval_audit", transcript);
|
||||
transcript.append_scalar(&col_eval_init);
|
||||
transcript.append_scalar_vector(&col_eval_read);
|
||||
transcript.append_scalar_vector(&col_eval_write);
|
||||
transcript.append_scalar(&col_eval_audit);
|
||||
|
||||
// prepare dotproduct circuit for batching then with ops-related product circuits
|
||||
assert_eq!(eval.len(), derefs.row_ops_val.len());
|
||||
@@ -1109,8 +1130,10 @@ impl ProductLayerProof {
|
||||
let (eval_dotp_left, eval_dotp_right) =
|
||||
(dotp_circuit_left.evaluate(), dotp_circuit_right.evaluate());
|
||||
|
||||
eval_dotp_left.append_to_transcript(b"claim_eval_dotp_left", transcript);
|
||||
eval_dotp_right.append_to_transcript(b"claim_eval_dotp_right", transcript);
|
||||
// eval_dotp_left.append_to_transcript(b"claim_eval_dotp_left", transcript);
|
||||
// eval_dotp_right.append_to_transcript(b"claim_eval_dotp_right", transcript);
|
||||
transcript.append_scalar(&eval_dotp_left);
|
||||
transcript.append_scalar(&eval_dotp_right);
|
||||
assert_eq!(eval_dotp_left + eval_dotp_right, eval[i]);
|
||||
eval_dotp_left_vec.push(eval_dotp_left);
|
||||
eval_dotp_right_vec.push(eval_dotp_right);
|
||||
@@ -1207,7 +1230,9 @@ impl ProductLayerProof {
|
||||
};
|
||||
|
||||
let mut product_layer_proof_encoded: Vec<u8> = Vec::new();
|
||||
product_layer_proof.serialize(&mut product_layer_proof_encoded).unwrap();
|
||||
product_layer_proof
|
||||
.serialize(&mut product_layer_proof_encoded)
|
||||
.unwrap();
|
||||
let msg = format!(
|
||||
"len_product_layer_proof {:?}",
|
||||
product_layer_proof_encoded.len()
|
||||
@@ -1222,7 +1247,7 @@ impl ProductLayerProof {
|
||||
num_ops: usize,
|
||||
num_cells: usize,
|
||||
eval: &[Scalar],
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<
|
||||
(
|
||||
Vec<Scalar>,
|
||||
@@ -1233,7 +1258,7 @@ impl ProductLayerProof {
|
||||
),
|
||||
ProofVerifyError,
|
||||
> {
|
||||
transcript.append_protocol_name(ProductLayerProof::protocol_name());
|
||||
// transcript.append_protocol_name(ProductLayerProof::protocol_name());
|
||||
|
||||
let timer = Timer::new("verify_prod_proof");
|
||||
let num_instances = eval.len();
|
||||
@@ -1246,12 +1271,17 @@ impl ProductLayerProof {
|
||||
.map(|i| row_eval_write[i])
|
||||
.product();
|
||||
let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product();
|
||||
assert_eq!( ws * row_eval_init , rs * row_eval_audit);
|
||||
assert_eq!(ws * row_eval_init, rs * row_eval_audit);
|
||||
|
||||
row_eval_init.append_to_transcript(b"claim_row_eval_init", transcript);
|
||||
row_eval_read.append_to_transcript(b"claim_row_eval_read", transcript);
|
||||
row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript);
|
||||
row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript);
|
||||
// row_eval_init.append_to_transcript(b"claim_row_eval_init", transcript);
|
||||
// row_eval_read.append_to_transcript(b"claim_row_eval_read", transcript);
|
||||
// row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript);
|
||||
// row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript);
|
||||
|
||||
transcript.append_scalar(row_eval_init);
|
||||
transcript.append_scalar_vector(row_eval_read);
|
||||
transcript.append_scalar_vector(row_eval_write);
|
||||
transcript.append_scalar(row_eval_audit);
|
||||
|
||||
// subset check
|
||||
let (col_eval_init, col_eval_read, col_eval_write, col_eval_audit) = &self.eval_col;
|
||||
@@ -1263,10 +1293,15 @@ impl ProductLayerProof {
|
||||
let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product();
|
||||
assert_eq!(ws * col_eval_init, rs * col_eval_audit);
|
||||
|
||||
col_eval_init.append_to_transcript(b"claim_col_eval_init", transcript);
|
||||
col_eval_read.append_to_transcript(b"claim_col_eval_read", transcript);
|
||||
col_eval_write.append_to_transcript(b"claim_col_eval_write", transcript);
|
||||
col_eval_audit.append_to_transcript(b"claim_col_eval_audit", transcript);
|
||||
// col_eval_init.append_to_transcript(b"claim_col_eval_init", transcript);
|
||||
// col_eval_read.append_to_transcript(b"claim_col_eval_read", transcript);
|
||||
// col_eval_write.append_to_transcript(b"claim_col_eval_write", transcript);
|
||||
// col_eval_audit.append_to_transcript(b"claim_col_eval_audit", transcript);
|
||||
|
||||
transcript.append_scalar(col_eval_init);
|
||||
transcript.append_scalar_vector(col_eval_read);
|
||||
transcript.append_scalar_vector(col_eval_write);
|
||||
transcript.append_scalar(col_eval_audit);
|
||||
|
||||
// verify the evaluation of the sparse polynomial
|
||||
let (eval_dotp_left, eval_dotp_right) = &self.eval_val;
|
||||
@@ -1275,8 +1310,10 @@ impl ProductLayerProof {
|
||||
let mut claims_dotp_circuit: Vec<Scalar> = Vec::new();
|
||||
for i in 0..num_instances {
|
||||
assert_eq!(eval_dotp_left[i] + eval_dotp_right[i], eval[i]);
|
||||
eval_dotp_left[i].append_to_transcript(b"claim_eval_dotp_left", transcript);
|
||||
eval_dotp_right[i].append_to_transcript(b"claim_eval_dotp_right", transcript);
|
||||
// eval_dotp_left[i].append_to_transcript(b"claim_eval_dotp_left", transcript);
|
||||
// eval_dotp_right[i].append_to_transcript(b"claim_eval_dotp_right", transcript)
|
||||
transcript.append_scalar(&eval_dotp_left[i]);
|
||||
transcript.append_scalar(&eval_dotp_right[i]);
|
||||
|
||||
claims_dotp_circuit.push(eval_dotp_left[i]);
|
||||
claims_dotp_circuit.push(eval_dotp_right[i]);
|
||||
@@ -1330,10 +1367,10 @@ impl PolyEvalNetworkProof {
|
||||
derefs: &Derefs,
|
||||
evals: &[Scalar],
|
||||
gens: &SparseMatPolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> Self {
|
||||
transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
|
||||
// transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
|
||||
|
||||
let (proof_prod_layer, rand_mem, rand_ops) = ProductLayerProof::prove(
|
||||
&mut network.row_layers.prod_layer,
|
||||
@@ -1370,10 +1407,10 @@ impl PolyEvalNetworkProof {
|
||||
ry: &[Scalar],
|
||||
r_mem_check: &(Scalar, Scalar),
|
||||
nz: usize,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
let timer = Timer::new("verify_polyeval_proof");
|
||||
transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
|
||||
// transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name());
|
||||
|
||||
let num_instances = evals.len();
|
||||
let (r_hash, r_multiset_check) = r_mem_check;
|
||||
@@ -1459,10 +1496,10 @@ impl SparseMatPolyEvalProof {
|
||||
ry: &[Scalar],
|
||||
evals: &[Scalar], // a vector evaluation of \widetilde{M}(r = (rx,ry)) for each M
|
||||
gens: &SparseMatPolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> SparseMatPolyEvalProof {
|
||||
transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
|
||||
// transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
|
||||
|
||||
// ensure there is one eval for each polynomial in dense
|
||||
assert_eq!(evals.len(), dense.batch_size);
|
||||
@@ -1481,14 +1518,14 @@ impl SparseMatPolyEvalProof {
|
||||
let timer_commit = Timer::new("commit_nondet_witness");
|
||||
let comm_derefs = {
|
||||
let comm = derefs.commit(&gens.gens_derefs);
|
||||
comm.append_to_transcript(b"comm_poly_row_col_ops_val", transcript);
|
||||
comm.append_to_poseidon(transcript);
|
||||
comm
|
||||
};
|
||||
timer_commit.stop();
|
||||
|
||||
let poly_eval_network_proof = {
|
||||
// produce a random element from the transcript for hash function
|
||||
let r_mem_check = transcript.challenge_vector(b"challenge_r_hash", 2);
|
||||
let r_mem_check = transcript.challenge_vector(2);
|
||||
|
||||
// build a network to evaluate the sparse polynomial
|
||||
let timer_build_network = Timer::new("build_layered_network");
|
||||
@@ -1529,9 +1566,9 @@ impl SparseMatPolyEvalProof {
|
||||
ry: &[Scalar],
|
||||
evals: &[Scalar], // evaluation of \widetilde{M}(r = (rx,ry))
|
||||
gens: &SparseMatPolyCommitmentGens,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(), ProofVerifyError> {
|
||||
transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
|
||||
// transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name());
|
||||
|
||||
// equalize the lengths of rx and ry
|
||||
let (rx_ext, ry_ext) = SparseMatPolyEvalProof::equalize(rx, ry);
|
||||
@@ -1540,12 +1577,10 @@ impl SparseMatPolyEvalProof {
|
||||
assert_eq!(rx_ext.len().pow2(), num_mem_cells);
|
||||
|
||||
// add claims to transcript and obtain challenges for randomized mem-check circuit
|
||||
self
|
||||
.comm_derefs
|
||||
.append_to_transcript(b"comm_poly_row_col_ops_val", transcript);
|
||||
self.comm_derefs.append_to_poseidon(transcript);
|
||||
|
||||
// produce a random element from the transcript for hash function
|
||||
let r_mem_check = transcript.challenge_vector(b"challenge_r_hash", 2);
|
||||
let r_mem_check = transcript.challenge_vector(2);
|
||||
|
||||
self.poly_eval_network_proof.verify(
|
||||
comm,
|
||||
@@ -1610,13 +1645,15 @@ impl SparsePolynomial {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{commitments::MultiCommitGens, parameters::poseidon_params};
|
||||
|
||||
use super::*;
|
||||
use ark_std::{UniformRand};
|
||||
use rand::RngCore;
|
||||
use ark_std::UniformRand;
|
||||
use rand::RngCore;
|
||||
|
||||
#[test]
|
||||
fn check_sparse_polyeval_proof() {
|
||||
let mut rng = ark_std::rand::thread_rng();
|
||||
let mut rng = ark_std::rand::thread_rng();
|
||||
|
||||
let num_nz_entries: usize = 256;
|
||||
let num_rows: usize = 256;
|
||||
@@ -1628,7 +1665,7 @@ use rand::RngCore;
|
||||
|
||||
for _i in 0..num_nz_entries {
|
||||
M.push(SparseMatEntry::new(
|
||||
(rng.next_u64()% (num_rows as u64)) as usize,
|
||||
(rng.next_u64() % (num_rows as u64)) as usize,
|
||||
(rng.next_u64() % (num_cols as u64)) as usize,
|
||||
Scalar::rand(&mut rng),
|
||||
));
|
||||
@@ -1656,8 +1693,9 @@ use rand::RngCore;
|
||||
let eval = SparseMatPolynomial::multi_evaluate(&[&poly_M], &rx, &ry);
|
||||
let evals = vec![eval[0], eval[0], eval[0]];
|
||||
|
||||
let params = poseidon_params();
|
||||
let mut random_tape = RandomTape::new(b"proof");
|
||||
let mut prover_transcript = Transcript::new(b"example");
|
||||
let mut prover_transcript = PoseidonTranscript::new(¶ms);
|
||||
let proof = SparseMatPolyEvalProof::prove(
|
||||
&dense,
|
||||
&rx,
|
||||
@@ -1668,7 +1706,8 @@ use rand::RngCore;
|
||||
&mut random_tape,
|
||||
);
|
||||
|
||||
let mut verifier_transcript = Transcript::new(b"example");
|
||||
let mut verifier_transcript = PoseidonTranscript::new(¶ms);
|
||||
(b"example");
|
||||
assert!(proof
|
||||
.verify(
|
||||
&poly_comm,
|
||||
|
||||
838
src/sumcheck.rs
838
src/sumcheck.rs
@@ -1,5 +1,7 @@
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
|
||||
use super::commitments::{Commitments, MultiCommitGens};
|
||||
use super::dense_mlpoly::DensePolynomial;
|
||||
use super::errors::ProofVerifyError;
|
||||
@@ -33,7 +35,7 @@ impl SumcheckInstanceProof {
|
||||
claim: Scalar,
|
||||
num_rounds: usize,
|
||||
degree_bound: usize,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> Result<(Scalar, Vec<Scalar>), ProofVerifyError> {
|
||||
let mut e = claim;
|
||||
let mut r: Vec<Scalar> = Vec::new();
|
||||
@@ -50,10 +52,10 @@ impl SumcheckInstanceProof {
|
||||
assert_eq!(poly.eval_at_zero() + poly.eval_at_one(), e);
|
||||
|
||||
// append the prover's message to the transcript
|
||||
poly.append_to_transcript(b"poly", transcript);
|
||||
poly.append_to_poseidon(transcript);
|
||||
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_i = transcript.challenge_scalar(b"challenge_nextround");
|
||||
let r_i = transcript.challenge_scalar();
|
||||
|
||||
r.push(r_i);
|
||||
|
||||
@@ -65,124 +67,124 @@ impl SumcheckInstanceProof {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)]
|
||||
pub struct ZKSumcheckInstanceProof {
|
||||
comm_polys: Vec<CompressedGroup>,
|
||||
comm_evals: Vec<CompressedGroup>,
|
||||
proofs: Vec<DotProductProof>,
|
||||
}
|
||||
// #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)]
|
||||
// pub struct ZKSumcheckInstanceProof {
|
||||
// comm_polys: Vec<CompressedGroup>,
|
||||
// comm_evals: Vec<CompressedGroup>,
|
||||
// proofs: Vec<DotProductProof>,
|
||||
// }
|
||||
|
||||
impl ZKSumcheckInstanceProof {
|
||||
pub fn new(
|
||||
comm_polys: Vec<CompressedGroup>,
|
||||
comm_evals: Vec<CompressedGroup>,
|
||||
proofs: Vec<DotProductProof>,
|
||||
) -> Self {
|
||||
ZKSumcheckInstanceProof {
|
||||
comm_polys,
|
||||
comm_evals,
|
||||
proofs,
|
||||
}
|
||||
}
|
||||
// impl ZKSumcheckInstanceProof {
|
||||
// pub fn new(
|
||||
// comm_polys: Vec<CompressedGroup>,
|
||||
// comm_evals: Vec<CompressedGroup>,
|
||||
// proofs: Vec<DotProductProof>,
|
||||
// ) -> Self {
|
||||
// ZKSumcheckInstanceProof {
|
||||
// comm_polys,
|
||||
// comm_evals,
|
||||
// proofs,
|
||||
// }
|
||||
// }
|
||||
|
||||
pub fn verify(
|
||||
&self,
|
||||
comm_claim: &CompressedGroup,
|
||||
num_rounds: usize,
|
||||
degree_bound: usize,
|
||||
gens_1: &MultiCommitGens,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
) -> Result<(CompressedGroup, Vec<Scalar>), ProofVerifyError> {
|
||||
// verify degree bound
|
||||
assert_eq!(gens_n.n, degree_bound + 1);
|
||||
// pub fn verify(
|
||||
// &self,
|
||||
// comm_claim: &CompressedGroup,
|
||||
// num_rounds: usize,
|
||||
// degree_bound: usize,
|
||||
// gens_1: &MultiCommitGens,
|
||||
// gens_n: &MultiCommitGens,
|
||||
// transcript: &mut Transcript,
|
||||
// ) -> Result<(CompressedGroup, Vec<Scalar>), ProofVerifyError> {
|
||||
// // verify degree bound
|
||||
// assert_eq!(gens_n.n, degree_bound + 1);
|
||||
|
||||
// verify that there is a univariate polynomial for each round
|
||||
assert_eq!(self.comm_polys.len(), num_rounds);
|
||||
assert_eq!(self.comm_evals.len(), num_rounds);
|
||||
// // verify that there is a univariate polynomial for each round
|
||||
// assert_eq!(self.comm_polys.len(), num_rounds);
|
||||
// assert_eq!(self.comm_evals.len(), num_rounds);
|
||||
|
||||
let mut r: Vec<Scalar> = Vec::new();
|
||||
for i in 0..self.comm_polys.len() {
|
||||
let comm_poly = &self.comm_polys[i];
|
||||
// let mut r: Vec<Scalar> = Vec::new();
|
||||
// for i in 0..self.comm_polys.len() {
|
||||
// let comm_poly = &self.comm_polys[i];
|
||||
|
||||
// append the prover's polynomial to the transcript
|
||||
comm_poly.append_to_transcript(b"comm_poly", transcript);
|
||||
// // append the prover's polynomial to the transcript
|
||||
// comm_poly.append_to_transcript(b"comm_poly", transcript);
|
||||
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_i = transcript.challenge_scalar(b"challenge_nextround");
|
||||
// //derive the verifier's challenge for the next round
|
||||
// let r_i = transcript.challenge_scalar(b"challenge_nextround");
|
||||
|
||||
// verify the proof of sum-check and evals
|
||||
let res = {
|
||||
let comm_claim_per_round = if i == 0 {
|
||||
comm_claim
|
||||
} else {
|
||||
&self.comm_evals[i - 1]
|
||||
};
|
||||
let mut comm_eval = &self.comm_evals[i];
|
||||
// // verify the proof of sum-check and evals
|
||||
// let res = {
|
||||
// let comm_claim_per_round = if i == 0 {
|
||||
// comm_claim
|
||||
// } else {
|
||||
// &self.comm_evals[i - 1]
|
||||
// };
|
||||
// let mut comm_eval = &self.comm_evals[i];
|
||||
|
||||
// add two claims to transcript
|
||||
comm_claim_per_round.append_to_transcript(b"comm_claim_per_round", transcript);
|
||||
comm_eval.append_to_transcript(b"comm_eval", transcript);
|
||||
// // add two claims to transcript
|
||||
// comm_claim_per_round.append_to_transcript(transcript);
|
||||
// comm_eval.append_to_transcript(transcript);
|
||||
|
||||
// produce two weights
|
||||
let w = transcript.challenge_vector(b"combine_two_claims_to_one", 2);
|
||||
// // produce two weights
|
||||
// let w = transcript.challenge_vector(2);
|
||||
|
||||
// compute a weighted sum of the RHS
|
||||
let comm_target = GroupElement::vartime_multiscalar_mul(
|
||||
w.as_slice(),
|
||||
iter::once(&comm_claim_per_round)
|
||||
.chain(iter::once(&comm_eval))
|
||||
.map(|pt| GroupElement::decompress(pt).unwrap())
|
||||
.collect::<Vec<GroupElement>>()
|
||||
.as_slice(),
|
||||
)
|
||||
.compress();
|
||||
// // compute a weighted sum of the RHS
|
||||
// let comm_target = GroupElement::vartime_multiscalar_mul(
|
||||
// w.as_slice(),
|
||||
// iter::once(&comm_claim_per_round)
|
||||
// .chain(iter::once(&comm_eval))
|
||||
// .map(|pt| GroupElement::decompress(pt).unwrap())
|
||||
// .collect::<Vec<GroupElement>>()
|
||||
// .as_slice(),
|
||||
// )
|
||||
// .compress();
|
||||
|
||||
let a = {
|
||||
// the vector to use to decommit for sum-check test
|
||||
let a_sc = {
|
||||
let mut a = vec![Scalar::one(); degree_bound + 1];
|
||||
a[0] += Scalar::one();
|
||||
a
|
||||
};
|
||||
// let a = {
|
||||
// // the vector to use to decommit for sum-check test
|
||||
// let a_sc = {
|
||||
// let mut a = vec![Scalar::one(); degree_bound + 1];
|
||||
// a[0] += Scalar::one();
|
||||
// a
|
||||
// };
|
||||
|
||||
// the vector to use to decommit for evaluation
|
||||
let a_eval = {
|
||||
let mut a = vec![Scalar::one(); degree_bound + 1];
|
||||
for j in 1..a.len() {
|
||||
a[j] = a[j - 1] * r_i;
|
||||
}
|
||||
a
|
||||
};
|
||||
// // the vector to use to decommit for evaluation
|
||||
// let a_eval = {
|
||||
// let mut a = vec![Scalar::one(); degree_bound + 1];
|
||||
// for j in 1..a.len() {
|
||||
// a[j] = a[j - 1] * r_i;
|
||||
// }
|
||||
// a
|
||||
// };
|
||||
|
||||
// take weighted sum of the two vectors using w
|
||||
assert_eq!(a_sc.len(), a_eval.len());
|
||||
(0..a_sc.len())
|
||||
.map(|i| w[0] * a_sc[i] + w[1] * a_eval[i])
|
||||
.collect::<Vec<Scalar>>()
|
||||
};
|
||||
// // take weighted sum of the two vectors using w
|
||||
// assert_eq!(a_sc.len(), a_eval.len());
|
||||
// (0..a_sc.len())
|
||||
// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i])
|
||||
// .collect::<Vec<Scalar>>()
|
||||
// };
|
||||
|
||||
self.proofs[i]
|
||||
.verify(
|
||||
gens_1,
|
||||
gens_n,
|
||||
transcript,
|
||||
&a,
|
||||
&self.comm_polys[i],
|
||||
&comm_target,
|
||||
)
|
||||
.is_ok()
|
||||
};
|
||||
if !res {
|
||||
return Err(ProofVerifyError::InternalError);
|
||||
}
|
||||
// self.proofs[i]
|
||||
// .verify(
|
||||
// gens_1,
|
||||
// gens_n,
|
||||
// transcript,
|
||||
// &a,
|
||||
// &self.comm_polys[i],
|
||||
// &comm_target,
|
||||
// )
|
||||
// .is_ok()
|
||||
// };
|
||||
// if !res {
|
||||
// return Err(ProofVerifyError::InternalError);
|
||||
// }
|
||||
|
||||
r.push(r_i);
|
||||
}
|
||||
// r.push(r_i);
|
||||
// }
|
||||
|
||||
Ok((self.comm_evals[&self.comm_evals.len() - 1].clone(), r))
|
||||
}
|
||||
}
|
||||
// Ok((self.comm_evals[&self.comm_evals.len() - 1].clone(), r))
|
||||
// }
|
||||
// }
|
||||
|
||||
impl SumcheckInstanceProof {
|
||||
pub fn prove_cubic_with_additive_term<F>(
|
||||
@@ -193,7 +195,7 @@ impl SumcheckInstanceProof {
|
||||
poly_B: &mut DensePolynomial,
|
||||
poly_C: &mut DensePolynomial,
|
||||
comb_func: F,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Self, Vec<Scalar>, Vec<Scalar>)
|
||||
where
|
||||
F: Fn(&Scalar, &Scalar, &Scalar, &Scalar) -> Scalar,
|
||||
@@ -242,9 +244,9 @@ impl SumcheckInstanceProof {
|
||||
let poly = UniPoly::from_evals(&evals);
|
||||
|
||||
// append the prover's message to the transcript
|
||||
poly.append_to_transcript(b"poly", transcript);
|
||||
poly.append_to_poseidon(transcript);
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
let r_j = transcript.challenge_scalar();
|
||||
r.push(r_j);
|
||||
|
||||
// bound all tables to the verifier's challenege
|
||||
@@ -269,7 +271,7 @@ impl SumcheckInstanceProof {
|
||||
poly_B: &mut DensePolynomial,
|
||||
poly_C: &mut DensePolynomial,
|
||||
comb_func: F,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Self, Vec<Scalar>, Vec<Scalar>)
|
||||
where
|
||||
F: Fn(&Scalar, &Scalar, &Scalar) -> Scalar,
|
||||
@@ -313,10 +315,10 @@ impl SumcheckInstanceProof {
|
||||
let poly = UniPoly::from_evals(&evals);
|
||||
|
||||
// append the prover's message to the transcript
|
||||
poly.append_to_transcript(b"poly", transcript);
|
||||
poly.append_to_poseidon(transcript);
|
||||
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
let r_j = transcript.challenge_scalar();
|
||||
r.push(r_j);
|
||||
// bound all tables to the verifier's challenege
|
||||
poly_A.bound_poly_var_top(&r_j);
|
||||
@@ -348,7 +350,7 @@ impl SumcheckInstanceProof {
|
||||
),
|
||||
coeffs: &[Scalar],
|
||||
comb_func: F,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (
|
||||
Self,
|
||||
Vec<Scalar>,
|
||||
@@ -451,10 +453,10 @@ impl SumcheckInstanceProof {
|
||||
let poly = UniPoly::from_evals(&evals);
|
||||
|
||||
// append the prover's message to the transcript
|
||||
poly.append_to_transcript(b"poly", transcript);
|
||||
poly.append_to_poseidon(transcript);
|
||||
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
let r_j = transcript.challenge_scalar();
|
||||
r.push(r_j);
|
||||
|
||||
// bound all tables to the verifier's challenege
|
||||
@@ -511,7 +513,7 @@ impl SumcheckInstanceProof {
|
||||
poly_A: &mut DensePolynomial,
|
||||
poly_B: &mut DensePolynomial,
|
||||
comb_func: F,
|
||||
transcript: &mut Transcript,
|
||||
transcript: &mut PoseidonTranscript,
|
||||
) -> (Self, Vec<Scalar>, Vec<Scalar>)
|
||||
where
|
||||
F: Fn(&Scalar, &Scalar) -> Scalar,
|
||||
@@ -539,10 +541,10 @@ impl SumcheckInstanceProof {
|
||||
let poly = UniPoly::from_evals(&evals);
|
||||
|
||||
// append the prover's message to the transcript
|
||||
poly.append_to_transcript(b"poly", transcript);
|
||||
poly.append_to_poseidon(transcript);
|
||||
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
let r_j = transcript.challenge_scalar();
|
||||
r.push(r_j);
|
||||
|
||||
// bound all tables to the verifier's challenege
|
||||
@@ -560,359 +562,359 @@ impl SumcheckInstanceProof {
|
||||
}
|
||||
}
|
||||
|
||||
impl ZKSumcheckInstanceProof {
|
||||
pub fn prove_quad<F>(
|
||||
claim: &Scalar,
|
||||
blind_claim: &Scalar,
|
||||
num_rounds: usize,
|
||||
poly_A: &mut DensePolynomial,
|
||||
poly_B: &mut DensePolynomial,
|
||||
comb_func: F,
|
||||
gens_1: &MultiCommitGens,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> (Self, Vec<Scalar>, Vec<Scalar>, Scalar)
|
||||
where
|
||||
F: Fn(&Scalar, &Scalar) -> Scalar,
|
||||
{
|
||||
let (blinds_poly, blinds_evals) = (
|
||||
random_tape.random_vector(b"blinds_poly", num_rounds),
|
||||
random_tape.random_vector(b"blinds_evals", num_rounds),
|
||||
);
|
||||
let mut claim_per_round = *claim;
|
||||
let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress();
|
||||
// impl ZKSumcheckInstanceProof {
|
||||
// pub fn prove_quad<F>(
|
||||
// claim: &Scalar,
|
||||
// blind_claim: &Scalar,
|
||||
// num_rounds: usize,
|
||||
// poly_A: &mut DensePolynomial,
|
||||
// poly_B: &mut DensePolynomial,
|
||||
// comb_func: F,
|
||||
// gens_1: &MultiCommitGens,
|
||||
// gens_n: &MultiCommitGens,
|
||||
// transcript: &mut Transcript,
|
||||
// random_tape: &mut RandomTape,
|
||||
// ) -> (Self, Vec<Scalar>, Vec<Scalar>, Scalar)
|
||||
// where
|
||||
// F: Fn(&Scalar, &Scalar) -> Scalar,
|
||||
// {
|
||||
// let (blinds_poly, blinds_evals) = (
|
||||
// random_tape.random_vector(b"blinds_poly", num_rounds),
|
||||
// random_tape.random_vector(b"blinds_evals", num_rounds),
|
||||
// );
|
||||
// let mut claim_per_round = *claim;
|
||||
// let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress();
|
||||
|
||||
let mut r: Vec<Scalar> = Vec::new();
|
||||
let mut comm_polys: Vec<CompressedGroup> = Vec::new();
|
||||
let mut comm_evals: Vec<CompressedGroup> = Vec::new();
|
||||
let mut proofs: Vec<DotProductProof> = Vec::new();
|
||||
// let mut r: Vec<Scalar> = Vec::new();
|
||||
// let mut comm_polys: Vec<CompressedGroup> = Vec::new();
|
||||
// let mut comm_evals: Vec<CompressedGroup> = Vec::new();
|
||||
// let mut proofs: Vec<DotProductProof> = Vec::new();
|
||||
|
||||
for j in 0..num_rounds {
|
||||
let (poly, comm_poly) = {
|
||||
let mut eval_point_0 = Scalar::zero();
|
||||
let mut eval_point_2 = Scalar::zero();
|
||||
// for j in 0..num_rounds {
|
||||
// let (poly, comm_poly) = {
|
||||
// let mut eval_point_0 = Scalar::zero();
|
||||
// let mut eval_point_2 = Scalar::zero();
|
||||
|
||||
let len = poly_A.len() / 2;
|
||||
for i in 0..len {
|
||||
// eval 0: bound_func is A(low)
|
||||
eval_point_0 += comb_func(&poly_A[i], &poly_B[i]);
|
||||
// let len = poly_A.len() / 2;
|
||||
// for i in 0..len {
|
||||
// // eval 0: bound_func is A(low)
|
||||
// eval_point_0 += comb_func(&poly_A[i], &poly_B[i]);
|
||||
|
||||
// eval 2: bound_func is -A(low) + 2*A(high)
|
||||
let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i];
|
||||
let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i];
|
||||
eval_point_2 += comb_func(&poly_A_bound_point, &poly_B_bound_point);
|
||||
}
|
||||
// // eval 2: bound_func is -A(low) + 2*A(high)
|
||||
// let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i];
|
||||
// let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i];
|
||||
// eval_point_2 += comb_func(&poly_A_bound_point, &poly_B_bound_point);
|
||||
// }
|
||||
|
||||
let evals = vec![eval_point_0, claim_per_round - eval_point_0, eval_point_2];
|
||||
let poly = UniPoly::from_evals(&evals);
|
||||
let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress();
|
||||
(poly, comm_poly)
|
||||
};
|
||||
// let evals = vec![eval_point_0, claim_per_round - eval_point_0, eval_point_2];
|
||||
// let poly = UniPoly::from_evals(&evals);
|
||||
// let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress();
|
||||
// (poly, comm_poly)
|
||||
// };
|
||||
|
||||
// append the prover's message to the transcript
|
||||
comm_poly.append_to_transcript(b"comm_poly", transcript);
|
||||
comm_polys.push(comm_poly);
|
||||
// // append the prover's message to the transcript
|
||||
// comm_poly.append_to_transcript(b"comm_poly", transcript);
|
||||
// comm_polys.push(comm_poly);
|
||||
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
// //derive the verifier's challenge for the next round
|
||||
// let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
|
||||
// bound all tables to the verifier's challenege
|
||||
poly_A.bound_poly_var_top(&r_j);
|
||||
poly_B.bound_poly_var_top(&r_j);
|
||||
// // bound all tables to the verifier's challenege
|
||||
// poly_A.bound_poly_var_top(&r_j);
|
||||
// poly_B.bound_poly_var_top(&r_j);
|
||||
|
||||
// produce a proof of sum-check and of evaluation
|
||||
let (proof, claim_next_round, comm_claim_next_round) = {
|
||||
let eval = poly.evaluate(&r_j);
|
||||
let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress();
|
||||
// // produce a proof of sum-check and of evaluation
|
||||
// let (proof, claim_next_round, comm_claim_next_round) = {
|
||||
// let eval = poly.evaluate(&r_j);
|
||||
// let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress();
|
||||
|
||||
// we need to prove the following under homomorphic commitments:
|
||||
// (1) poly(0) + poly(1) = claim_per_round
|
||||
// (2) poly(r_j) = eval
|
||||
// // we need to prove the following under homomorphic commitments:
|
||||
// // (1) poly(0) + poly(1) = claim_per_round
|
||||
// // (2) poly(r_j) = eval
|
||||
|
||||
// Our technique is to leverage dot product proofs:
|
||||
// (1) we can prove: <poly_in_coeffs_form, (2, 1, 1, 1)> = claim_per_round
|
||||
// (2) we can prove: <poly_in_coeffs_form, (1, r_j, r^2_j, ..) = eval
|
||||
// for efficiency we batch them using random weights
|
||||
// // Our technique is to leverage dot product proofs:
|
||||
// // (1) we can prove: <poly_in_coeffs_form, (2, 1, 1, 1)> = claim_per_round
|
||||
// // (2) we can prove: <poly_in_coeffs_form, (1, r_j, r^2_j, ..) = eval
|
||||
// // for efficiency we batch them using random weights
|
||||
|
||||
// add two claims to transcript
|
||||
comm_claim_per_round.append_to_transcript(b"comm_claim_per_round", transcript);
|
||||
comm_eval.append_to_transcript(b"comm_eval", transcript);
|
||||
// // add two claims to transcript
|
||||
// comm_claim_per_round.append_to_transcript(b"comm_claim_per_round", transcript);
|
||||
// comm_eval.append_to_transcript(b"comm_eval", transcript);
|
||||
|
||||
// produce two weights
|
||||
let w = transcript.challenge_vector(b"combine_two_claims_to_one", 2);
|
||||
// // produce two weights
|
||||
// let w = transcript.challenge_vector(b"combine_two_claims_to_one", 2);
|
||||
|
||||
// compute a weighted sum of the RHS
|
||||
let target = w[0] * claim_per_round + w[1] * eval;
|
||||
let comm_target = GroupElement::vartime_multiscalar_mul(
|
||||
w.as_slice(),
|
||||
iter::once(&comm_claim_per_round)
|
||||
.chain(iter::once(&comm_eval))
|
||||
.map(|pt| GroupElement::decompress(pt).unwrap())
|
||||
.collect::<Vec<GroupElement>>()
|
||||
.as_slice(),
|
||||
)
|
||||
.compress();
|
||||
// // compute a weighted sum of the RHS
|
||||
// let target = w[0] * claim_per_round + w[1] * eval;
|
||||
// let comm_target = GroupElement::vartime_multiscalar_mul(
|
||||
// w.as_slice(),
|
||||
// iter::once(&comm_claim_per_round)
|
||||
// .chain(iter::once(&comm_eval))
|
||||
// .map(|pt| GroupElement::decompress(pt).unwrap())
|
||||
// .collect::<Vec<GroupElement>>()
|
||||
// .as_slice(),
|
||||
// )
|
||||
// .compress();
|
||||
|
||||
let blind = {
|
||||
let blind_sc = if j == 0 {
|
||||
blind_claim
|
||||
} else {
|
||||
&blinds_evals[j - 1]
|
||||
};
|
||||
// let blind = {
|
||||
// let blind_sc = if j == 0 {
|
||||
// blind_claim
|
||||
// } else {
|
||||
// &blinds_evals[j - 1]
|
||||
// };
|
||||
|
||||
let blind_eval = &blinds_evals[j];
|
||||
// let blind_eval = &blinds_evals[j];
|
||||
|
||||
w[0] * blind_sc + w[1] * blind_eval
|
||||
};
|
||||
assert_eq!(target.commit(&blind, gens_1).compress(), comm_target);
|
||||
// w[0] * blind_sc + w[1] * blind_eval
|
||||
// };
|
||||
// assert_eq!(target.commit(&blind, gens_1).compress(), comm_target);
|
||||
|
||||
let a = {
|
||||
// the vector to use to decommit for sum-check test
|
||||
let a_sc = {
|
||||
let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
a[0] += Scalar::one();
|
||||
a
|
||||
};
|
||||
// let a = {
|
||||
// // the vector to use to decommit for sum-check test
|
||||
// let a_sc = {
|
||||
// let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
// a[0] += Scalar::one();
|
||||
// a
|
||||
// };
|
||||
|
||||
// the vector to use to decommit for evaluation
|
||||
let a_eval = {
|
||||
let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
for j in 1..a.len() {
|
||||
a[j] = a[j - 1] * r_j;
|
||||
}
|
||||
a
|
||||
};
|
||||
// // the vector to use to decommit for evaluation
|
||||
// let a_eval = {
|
||||
// let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
// for j in 1..a.len() {
|
||||
// a[j] = a[j - 1] * r_j;
|
||||
// }
|
||||
// a
|
||||
// };
|
||||
|
||||
// take weighted sum of the two vectors using w
|
||||
assert_eq!(a_sc.len(), a_eval.len());
|
||||
(0..a_sc.len())
|
||||
.map(|i| w[0] * a_sc[i] + w[1] * a_eval[i])
|
||||
.collect::<Vec<Scalar>>()
|
||||
};
|
||||
// // take weighted sum of the two vectors using w
|
||||
// assert_eq!(a_sc.len(), a_eval.len());
|
||||
// (0..a_sc.len())
|
||||
// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i])
|
||||
// .collect::<Vec<Scalar>>()
|
||||
// };
|
||||
|
||||
let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove(
|
||||
gens_1,
|
||||
gens_n,
|
||||
transcript,
|
||||
random_tape,
|
||||
&poly.as_vec(),
|
||||
&blinds_poly[j],
|
||||
&a,
|
||||
&target,
|
||||
&blind,
|
||||
);
|
||||
// let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove(
|
||||
// gens_1,
|
||||
// gens_n,
|
||||
// transcript,
|
||||
// random_tape,
|
||||
// &poly.as_vec(),
|
||||
// &blinds_poly[j],
|
||||
// &a,
|
||||
// &target,
|
||||
// &blind,
|
||||
// );
|
||||
|
||||
(proof, eval, comm_eval)
|
||||
};
|
||||
// (proof, eval, comm_eval)
|
||||
// };
|
||||
|
||||
claim_per_round = claim_next_round;
|
||||
comm_claim_per_round = comm_claim_next_round;
|
||||
// claim_per_round = claim_next_round;
|
||||
// comm_claim_per_round = comm_claim_next_round;
|
||||
|
||||
proofs.push(proof);
|
||||
r.push(r_j);
|
||||
comm_evals.push(comm_claim_per_round.clone());
|
||||
}
|
||||
// proofs.push(proof);
|
||||
// r.push(r_j);
|
||||
// comm_evals.push(comm_claim_per_round.clone());
|
||||
// }
|
||||
|
||||
(
|
||||
ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs),
|
||||
r,
|
||||
vec![poly_A[0], poly_B[0]],
|
||||
blinds_evals[num_rounds - 1],
|
||||
)
|
||||
}
|
||||
// (
|
||||
// ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs),
|
||||
// r,
|
||||
// vec![poly_A[0], poly_B[0]],
|
||||
// blinds_evals[num_rounds - 1],
|
||||
// )
|
||||
// }
|
||||
|
||||
pub fn prove_cubic_with_additive_term<F>(
|
||||
claim: &Scalar,
|
||||
blind_claim: &Scalar,
|
||||
num_rounds: usize,
|
||||
poly_A: &mut DensePolynomial,
|
||||
poly_B: &mut DensePolynomial,
|
||||
poly_C: &mut DensePolynomial,
|
||||
poly_D: &mut DensePolynomial,
|
||||
comb_func: F,
|
||||
gens_1: &MultiCommitGens,
|
||||
gens_n: &MultiCommitGens,
|
||||
transcript: &mut Transcript,
|
||||
random_tape: &mut RandomTape,
|
||||
) -> (Self, Vec<Scalar>, Vec<Scalar>, Scalar)
|
||||
where
|
||||
F: Fn(&Scalar, &Scalar, &Scalar, &Scalar) -> Scalar,
|
||||
{
|
||||
let (blinds_poly, blinds_evals) = (
|
||||
random_tape.random_vector(b"blinds_poly", num_rounds),
|
||||
random_tape.random_vector(b"blinds_evals", num_rounds),
|
||||
);
|
||||
// pub fn prove_cubic_with_additive_term<F>(
|
||||
// claim: &Scalar,
|
||||
// blind_claim: &Scalar,
|
||||
// num_rounds: usize,
|
||||
// poly_A: &mut DensePolynomial,
|
||||
// poly_B: &mut DensePolynomial,
|
||||
// poly_C: &mut DensePolynomial,
|
||||
// poly_D: &mut DensePolynomial,
|
||||
// comb_func: F,
|
||||
// gens_1: &MultiCommitGens,
|
||||
// gens_n: &MultiCommitGens,
|
||||
// transcript: &mut Transcript,
|
||||
// random_tape: &mut RandomTape,
|
||||
// ) -> (Self, Vec<Scalar>, Vec<Scalar>, Scalar)
|
||||
// where
|
||||
// F: Fn(&Scalar, &Scalar, &Scalar, &Scalar) -> Scalar,
|
||||
// {
|
||||
// let (blinds_poly, blinds_evals) = (
|
||||
// random_tape.random_vector(b"blinds_poly", num_rounds),
|
||||
// random_tape.random_vector(b"blinds_evals", num_rounds),
|
||||
// );
|
||||
|
||||
let mut claim_per_round = *claim;
|
||||
let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress();
|
||||
// let mut claim_per_round = *claim;
|
||||
// let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress();
|
||||
|
||||
let mut r: Vec<Scalar> = Vec::new();
|
||||
let mut comm_polys: Vec<CompressedGroup> = Vec::new();
|
||||
let mut comm_evals: Vec<CompressedGroup> = Vec::new();
|
||||
let mut proofs: Vec<DotProductProof> = Vec::new();
|
||||
// let mut r: Vec<Scalar> = Vec::new();
|
||||
// let mut comm_polys: Vec<CompressedGroup> = Vec::new();
|
||||
// let mut comm_evals: Vec<CompressedGroup> = Vec::new();
|
||||
// let mut proofs: Vec<DotProductProof> = Vec::new();
|
||||
|
||||
for j in 0..num_rounds {
|
||||
let (poly, comm_poly) = {
|
||||
let mut eval_point_0 = Scalar::zero();
|
||||
let mut eval_point_2 = Scalar::zero();
|
||||
let mut eval_point_3 = Scalar::zero();
|
||||
// for j in 0..num_rounds {
|
||||
// let (poly, comm_poly) = {
|
||||
// let mut eval_point_0 = Scalar::zero();
|
||||
// let mut eval_point_2 = Scalar::zero();
|
||||
// let mut eval_point_3 = Scalar::zero();
|
||||
|
||||
let len = poly_A.len() / 2;
|
||||
for i in 0..len {
|
||||
// eval 0: bound_func is A(low)
|
||||
eval_point_0 += comb_func(&poly_A[i], &poly_B[i], &poly_C[i], &poly_D[i]);
|
||||
// let len = poly_A.len() / 2;
|
||||
// for i in 0..len {
|
||||
// // eval 0: bound_func is A(low)
|
||||
// eval_point_0 += comb_func(&poly_A[i], &poly_B[i], &poly_C[i], &poly_D[i]);
|
||||
|
||||
// eval 2: bound_func is -A(low) + 2*A(high)
|
||||
let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i];
|
||||
let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i];
|
||||
let poly_C_bound_point = poly_C[len + i] + poly_C[len + i] - poly_C[i];
|
||||
let poly_D_bound_point = poly_D[len + i] + poly_D[len + i] - poly_D[i];
|
||||
eval_point_2 += comb_func(
|
||||
&poly_A_bound_point,
|
||||
&poly_B_bound_point,
|
||||
&poly_C_bound_point,
|
||||
&poly_D_bound_point,
|
||||
);
|
||||
// // eval 2: bound_func is -A(low) + 2*A(high)
|
||||
// let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i];
|
||||
// let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i];
|
||||
// let poly_C_bound_point = poly_C[len + i] + poly_C[len + i] - poly_C[i];
|
||||
// let poly_D_bound_point = poly_D[len + i] + poly_D[len + i] - poly_D[i];
|
||||
// eval_point_2 += comb_func(
|
||||
// &poly_A_bound_point,
|
||||
// &poly_B_bound_point,
|
||||
// &poly_C_bound_point,
|
||||
// &poly_D_bound_point,
|
||||
// );
|
||||
|
||||
// eval 3: bound_func is -2A(low) + 3A(high); computed incrementally with bound_func applied to eval(2)
|
||||
let poly_A_bound_point = poly_A_bound_point + poly_A[len + i] - poly_A[i];
|
||||
let poly_B_bound_point = poly_B_bound_point + poly_B[len + i] - poly_B[i];
|
||||
let poly_C_bound_point = poly_C_bound_point + poly_C[len + i] - poly_C[i];
|
||||
let poly_D_bound_point = poly_D_bound_point + poly_D[len + i] - poly_D[i];
|
||||
eval_point_3 += comb_func(
|
||||
&poly_A_bound_point,
|
||||
&poly_B_bound_point,
|
||||
&poly_C_bound_point,
|
||||
&poly_D_bound_point,
|
||||
);
|
||||
}
|
||||
// // eval 3: bound_func is -2A(low) + 3A(high); computed incrementally with bound_func applied to eval(2)
|
||||
// let poly_A_bound_point = poly_A_bound_point + poly_A[len + i] - poly_A[i];
|
||||
// let poly_B_bound_point = poly_B_bound_point + poly_B[len + i] - poly_B[i];
|
||||
// let poly_C_bound_point = poly_C_bound_point + poly_C[len + i] - poly_C[i];
|
||||
// let poly_D_bound_point = poly_D_bound_point + poly_D[len + i] - poly_D[i];
|
||||
// eval_point_3 += comb_func(
|
||||
// &poly_A_bound_point,
|
||||
// &poly_B_bound_point,
|
||||
// &poly_C_bound_point,
|
||||
// &poly_D_bound_point,
|
||||
// );
|
||||
// }
|
||||
|
||||
let evals = vec![
|
||||
eval_point_0,
|
||||
claim_per_round - eval_point_0,
|
||||
eval_point_2,
|
||||
eval_point_3,
|
||||
];
|
||||
let poly = UniPoly::from_evals(&evals);
|
||||
let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress();
|
||||
(poly, comm_poly)
|
||||
};
|
||||
// let evals = vec![
|
||||
// eval_point_0,
|
||||
// claim_per_round - eval_point_0,
|
||||
// eval_point_2,
|
||||
// eval_point_3,
|
||||
// ];
|
||||
// let poly = UniPoly::from_evals(&evals);
|
||||
// let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress();
|
||||
// (poly, comm_poly)
|
||||
// };
|
||||
|
||||
// append the prover's message to the transcript
|
||||
comm_poly.append_to_transcript(b"comm_poly", transcript);
|
||||
comm_polys.push(comm_poly);
|
||||
// // append the prover's message to the transcript
|
||||
// comm_poly.append_to_transcript(b"comm_poly", transcript);
|
||||
// comm_polys.push(comm_poly);
|
||||
|
||||
//derive the verifier's challenge for the next round
|
||||
let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
// //derive the verifier's challenge for the next round
|
||||
// let r_j = transcript.challenge_scalar(b"challenge_nextround");
|
||||
|
||||
// bound all tables to the verifier's challenege
|
||||
poly_A.bound_poly_var_top(&r_j);
|
||||
poly_B.bound_poly_var_top(&r_j);
|
||||
poly_C.bound_poly_var_top(&r_j);
|
||||
poly_D.bound_poly_var_top(&r_j);
|
||||
// // bound all tables to the verifier's challenege
|
||||
// poly_A.bound_poly_var_top(&r_j);
|
||||
// poly_B.bound_poly_var_top(&r_j);
|
||||
// poly_C.bound_poly_var_top(&r_j);
|
||||
// poly_D.bound_poly_var_top(&r_j);
|
||||
|
||||
// produce a proof of sum-check and of evaluation
|
||||
let (proof, claim_next_round, comm_claim_next_round) = {
|
||||
let eval = poly.evaluate(&r_j);
|
||||
let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress();
|
||||
// // produce a proof of sum-check and of evaluation
|
||||
// let (proof, claim_next_round, comm_claim_next_round) = {
|
||||
// let eval = poly.evaluate(&r_j);
|
||||
// let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress();
|
||||
|
||||
// we need to prove the following under homomorphic commitments:
|
||||
// (1) poly(0) + poly(1) = claim_per_round
|
||||
// (2) poly(r_j) = eval
|
||||
// // we need to prove the following under homomorphic commitments:
|
||||
// // (1) poly(0) + poly(1) = claim_per_round
|
||||
// // (2) poly(r_j) = eval
|
||||
|
||||
// Our technique is to leverage dot product proofs:
|
||||
// (1) we can prove: <poly_in_coeffs_form, (2, 1, 1, 1)> = claim_per_round
|
||||
// (2) we can prove: <poly_in_coeffs_form, (1, r_j, r^2_j, ..) = eval
|
||||
// for efficiency we batch them using random weights
|
||||
// // Our technique is to leverage dot product proofs:
|
||||
// // (1) we can prove: <poly_in_coeffs_form, (2, 1, 1, 1)> = claim_per_round
|
||||
// // (2) we can prove: <poly_in_coeffs_form, (1, r_j, r^2_j, ..) = eval
|
||||
// // for efficiency we batch them using random weights
|
||||
|
||||
// add two claims to transcript
|
||||
comm_claim_per_round.append_to_transcript(b"comm_claim_per_round", transcript);
|
||||
comm_eval.append_to_transcript(b"comm_eval", transcript);
|
||||
// // add two claims to transcript
|
||||
// comm_claim_per_round.append_to_transcript(b"comm_claim_per_round", transcript);
|
||||
// comm_eval.append_to_transcript(b"comm_eval", transcript);
|
||||
|
||||
// produce two weights
|
||||
let w = transcript.challenge_vector(b"combine_two_claims_to_one", 2);
|
||||
// // produce two weights
|
||||
// let w = transcript.challenge_vector(b"combine_two_claims_to_one", 2);
|
||||
|
||||
// compute a weighted sum of the RHS
|
||||
let target = w[0] * claim_per_round + w[1] * eval;
|
||||
// // compute a weighted sum of the RHS
|
||||
// let target = w[0] * claim_per_round + w[1] * eval;
|
||||
|
||||
let comm_target = GroupElement::vartime_multiscalar_mul(
|
||||
w.as_slice(),
|
||||
iter::once(&comm_claim_per_round)
|
||||
.chain(iter::once(&comm_eval))
|
||||
.map(|pt| GroupElement::decompress(&pt).unwrap())
|
||||
.collect::<Vec<GroupElement>>()
|
||||
.as_slice(),
|
||||
)
|
||||
.compress();
|
||||
// let comm_target = GroupElement::vartime_multiscalar_mul(
|
||||
// w.as_slice(),
|
||||
// iter::once(&comm_claim_per_round)
|
||||
// .chain(iter::once(&comm_eval))
|
||||
// .map(|pt| GroupElement::decompress(&pt).unwrap())
|
||||
// .collect::<Vec<GroupElement>>()
|
||||
// .as_slice(),
|
||||
// )
|
||||
// .compress();
|
||||
|
||||
let blind = {
|
||||
let blind_sc = if j == 0 {
|
||||
blind_claim
|
||||
} else {
|
||||
&blinds_evals[j - 1]
|
||||
};
|
||||
// let blind = {
|
||||
// let blind_sc = if j == 0 {
|
||||
// blind_claim
|
||||
// } else {
|
||||
// &blinds_evals[j - 1]
|
||||
// };
|
||||
|
||||
let blind_eval = &blinds_evals[j];
|
||||
// let blind_eval = &blinds_evals[j];
|
||||
|
||||
w[0] * blind_sc + w[1] * blind_eval
|
||||
};
|
||||
// w[0] * blind_sc + w[1] * blind_eval
|
||||
// };
|
||||
|
||||
let res = target.commit(&blind, gens_1);
|
||||
// let res = target.commit(&blind, gens_1);
|
||||
|
||||
assert_eq!(res.compress(), comm_target);
|
||||
// assert_eq!(res.compress(), comm_target);
|
||||
|
||||
let a = {
|
||||
// the vector to use to decommit for sum-check test
|
||||
let a_sc = {
|
||||
let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
a[0] += Scalar::one();
|
||||
a
|
||||
};
|
||||
// let a = {
|
||||
// // the vector to use to decommit for sum-check test
|
||||
// let a_sc = {
|
||||
// let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
// a[0] += Scalar::one();
|
||||
// a
|
||||
// };
|
||||
|
||||
// the vector to use to decommit for evaluation
|
||||
let a_eval = {
|
||||
let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
for j in 1..a.len() {
|
||||
a[j] = a[j - 1] * r_j;
|
||||
}
|
||||
a
|
||||
};
|
||||
// // the vector to use to decommit for evaluation
|
||||
// let a_eval = {
|
||||
// let mut a = vec![Scalar::one(); poly.degree() + 1];
|
||||
// for j in 1..a.len() {
|
||||
// a[j] = a[j - 1] * r_j;
|
||||
// }
|
||||
// a
|
||||
// };
|
||||
|
||||
// take weighted sum of the two vectors using w
|
||||
assert_eq!(a_sc.len(), a_eval.len());
|
||||
(0..a_sc.len())
|
||||
.map(|i| w[0] * a_sc[i] + w[1] * a_eval[i])
|
||||
.collect::<Vec<Scalar>>()
|
||||
};
|
||||
// // take weighted sum of the two vectors using w
|
||||
// assert_eq!(a_sc.len(), a_eval.len());
|
||||
// (0..a_sc.len())
|
||||
// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i])
|
||||
// .collect::<Vec<Scalar>>()
|
||||
// };
|
||||
|
||||
let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove(
|
||||
gens_1,
|
||||
gens_n,
|
||||
transcript,
|
||||
random_tape,
|
||||
&poly.as_vec(),
|
||||
&blinds_poly[j],
|
||||
&a,
|
||||
&target,
|
||||
&blind,
|
||||
);
|
||||
// let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove(
|
||||
// gens_1,
|
||||
// gens_n,
|
||||
// transcript,
|
||||
// random_tape,
|
||||
// &poly.as_vec(),
|
||||
// &blinds_poly[j],
|
||||
// &a,
|
||||
// &target,
|
||||
// &blind,
|
||||
// );
|
||||
|
||||
(proof, eval, comm_eval)
|
||||
};
|
||||
// (proof, eval, comm_eval)
|
||||
// };
|
||||
|
||||
proofs.push(proof);
|
||||
claim_per_round = claim_next_round;
|
||||
comm_claim_per_round = comm_claim_next_round;
|
||||
r.push(r_j);
|
||||
comm_evals.push(comm_claim_per_round.clone());
|
||||
}
|
||||
// proofs.push(proof);
|
||||
// claim_per_round = claim_next_round;
|
||||
// comm_claim_per_round = comm_claim_next_round;
|
||||
// r.push(r_j);
|
||||
// comm_evals.push(comm_claim_per_round.clone());
|
||||
// }
|
||||
|
||||
(
|
||||
ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs),
|
||||
r,
|
||||
vec![poly_A[0], poly_B[0], poly_C[0], poly_D[0]],
|
||||
blinds_evals[num_rounds - 1],
|
||||
)
|
||||
}
|
||||
}
|
||||
// (
|
||||
// ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs),
|
||||
// r,
|
||||
// vec![poly_A[0], poly_B[0], poly_C[0], poly_D[0]],
|
||||
// blinds_evals[num_rounds - 1],
|
||||
// )
|
||||
// }
|
||||
// }
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript};
|
||||
|
||||
use super::commitments::{Commitments, MultiCommitGens};
|
||||
use super::group::GroupElement;
|
||||
use super::scalar::{Scalar};
|
||||
use super::scalar::Scalar;
|
||||
use super::transcript::{AppendToTranscript, ProofTranscript};
|
||||
use merlin::Transcript;
|
||||
use ark_ff::{Field, One, Zero};
|
||||
use ark_serialize::*;
|
||||
use ark_ff::{One, Zero, Field};
|
||||
use merlin::Transcript;
|
||||
// ax^2 + bx + c stored as vec![c,b,a]
|
||||
// ax^3 + bx^2 + cx + d stored as vec![d,c,b,a]
|
||||
#[derive(Debug)]
|
||||
@@ -109,6 +111,16 @@ impl CompressedUniPoly {
|
||||
}
|
||||
}
|
||||
|
||||
impl AppendToPoseidon for UniPoly {
|
||||
fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) {
|
||||
// transcript.append_message(label, b"UniPoly_begin");
|
||||
for i in 0..self.coeffs.len() {
|
||||
transcript.append_scalar(&self.coeffs[i]);
|
||||
}
|
||||
// transcript.append_message(label, b"UniPoly_end");
|
||||
}
|
||||
}
|
||||
|
||||
impl AppendToTranscript for UniPoly {
|
||||
fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) {
|
||||
transcript.append_message(label, b"UniPoly_begin");
|
||||
|
||||
Reference in New Issue
Block a user