mirror of
https://github.com/arnaucube/testudo.git
synced 2026-01-12 08:41:29 +01:00
committed by
maramihali
parent
d6a1c21098
commit
e1383ff248
@@ -1,3 +1,4 @@
|
||||
#![allow(clippy::assertions_on_result_states)]
|
||||
extern crate byteorder;
|
||||
extern crate core;
|
||||
extern crate criterion;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
#![allow(clippy::assertions_on_result_states)]
|
||||
extern crate libspartan;
|
||||
extern crate merlin;
|
||||
|
||||
|
||||
@@ -8,11 +8,9 @@
|
||||
//! `(Z3 + 5) * 1 - I0 = 0`
|
||||
//!
|
||||
//! [here]: https://medium.com/@VitalikButerin/quadratic-arithmetic-programs-from-zero-to-hero-f6d558cea649
|
||||
use ark_bls12_377::Fr as Scalar;
|
||||
use ark_ff::{PrimeField, BigInteger};
|
||||
use ark_std::{One, UniformRand, Zero};
|
||||
use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK};
|
||||
use merlin::Transcript;
|
||||
use ark_std::{UniformRand, One, Zero};
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn produce_r1cs() -> (
|
||||
@@ -72,7 +70,7 @@ fn produce_r1cs() -> (
|
||||
let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap();
|
||||
|
||||
// compute a satisfying assignment
|
||||
let mut rng = ark_std::rand::thread_rng();
|
||||
let mut rng = ark_std::rand::thread_rng();
|
||||
let z0 = Scalar::rand(&mut rng);
|
||||
let z1 = z0 * z0; // constraint 0
|
||||
let z2 = z1 * z0; // constraint 1
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#![allow(non_snake_case)]
|
||||
#![allow(clippy::assertions_on_result_states)]
|
||||
|
||||
extern crate flate2;
|
||||
extern crate libspartan;
|
||||
extern crate merlin;
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#![allow(non_snake_case)]
|
||||
#![allow(clippy::assertions_on_result_states)]
|
||||
|
||||
extern crate flate2;
|
||||
extern crate libspartan;
|
||||
extern crate merlin;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
edition = "2018"
|
||||
tab_spaces = 2
|
||||
newline_style = "Unix"
|
||||
report_fixme = "Always"
|
||||
use_try_shorthand = true
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#![feature(int_log)]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(clippy::assertions_on_result_states)]
|
||||
|
||||
extern crate byteorder;
|
||||
extern crate core;
|
||||
|
||||
@@ -229,7 +229,7 @@ impl R1CSInstance {
|
||||
C: poly_C,
|
||||
};
|
||||
|
||||
assert!(inst.is_sat(&Z[..num_vars].to_vec(), &Z[num_vars + 1..].to_vec()));
|
||||
assert!(inst.is_sat(&Z[..num_vars], &Z[num_vars + 1..]));
|
||||
|
||||
(inst, Z[..num_vars].to_vec(), Z[num_vars + 1..].to_vec())
|
||||
}
|
||||
|
||||
@@ -278,10 +278,30 @@ impl R1CSProof {
|
||||
let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim;
|
||||
|
||||
// verify the joint claim with a sum-check protocol
|
||||
<<<<<<< HEAD
|
||||
let (claim_post_phase2, ry) =
|
||||
self
|
||||
.sc_proof_phase2
|
||||
.verify(claim_phase2, num_rounds_y, 2, transcript)?;
|
||||
=======
|
||||
let (comm_claim_post_phase2, ry) = self.sc_proof_phase2.verify(
|
||||
&comm_claim_phase2,
|
||||
num_rounds_y,
|
||||
2,
|
||||
&gens.gens_sc.gens_1,
|
||||
&gens.gens_sc.gens_3,
|
||||
transcript,
|
||||
)?;
|
||||
|
||||
// verify Z(ry) proof against the initial commitment
|
||||
self.proof_eval_vars_at_ry.verify(
|
||||
&gens.gens_pc,
|
||||
transcript,
|
||||
&ry[1..],
|
||||
&self.comm_vars_at_ry,
|
||||
&self.comm_vars,
|
||||
)?;
|
||||
>>>>>>> clippy fixes (#50)
|
||||
|
||||
let poly_input_eval = {
|
||||
// constant term
|
||||
@@ -292,8 +312,7 @@ impl R1CSProof {
|
||||
.map(|i| SparsePolyEntry::new(i + 1, input[i]))
|
||||
.collect::<Vec<SparsePolyEntry>>(),
|
||||
);
|
||||
SparsePolynomial::new(n.log2() as usize, input_as_sparse_poly_entries)
|
||||
.evaluate(&ry[1..].to_vec())
|
||||
SparsePolynomial::new(n.log2() as usize, input_as_sparse_poly_entries).evaluate(&ry[1..])
|
||||
};
|
||||
|
||||
let eval_Z_at_ry = (Scalar::one() - ry[0]) * self.eval_vars_at_ry + ry[0] * poly_input_eval;
|
||||
|
||||
@@ -967,16 +967,13 @@ impl HashLayerProof {
|
||||
let mut r_joint_ops = challenges_ops;
|
||||
r_joint_ops.extend(rand_ops);
|
||||
joint_claim_eval_ops.append_to_transcript(b"joint_claim_eval_ops", transcript);
|
||||
assert!(self
|
||||
.proof_ops
|
||||
.verify_plain(
|
||||
&gens.gens_ops,
|
||||
transcript,
|
||||
&r_joint_ops,
|
||||
&joint_claim_eval_ops,
|
||||
&comm.comm_comb_ops
|
||||
)
|
||||
.is_ok());
|
||||
self.proof_ops.verify_plain(
|
||||
&gens.gens_ops,
|
||||
transcript,
|
||||
&r_joint_ops,
|
||||
&joint_claim_eval_ops,
|
||||
&comm.comm_comb_ops,
|
||||
)?;
|
||||
|
||||
// verify proof-mem using comm_comb_mem at rand_mem
|
||||
// form a single decommitment using comb_comb_mem at rand_mem
|
||||
@@ -1408,33 +1405,30 @@ impl PolyEvalNetworkProof {
|
||||
let (claims_ops_col_read, claims_ops_col_write) = claims_ops_col.split_at_mut(num_instances);
|
||||
|
||||
// verify the proof of hash layer
|
||||
assert!(self
|
||||
.proof_hash_layer
|
||||
.verify(
|
||||
(&rand_mem, &rand_ops),
|
||||
&(
|
||||
claims_mem[0],
|
||||
claims_ops_row_read.to_vec(),
|
||||
claims_ops_row_write.to_vec(),
|
||||
claims_mem[1],
|
||||
),
|
||||
&(
|
||||
claims_mem[2],
|
||||
claims_ops_col_read.to_vec(),
|
||||
claims_ops_col_write.to_vec(),
|
||||
claims_mem[3],
|
||||
),
|
||||
&claims_dotp,
|
||||
comm,
|
||||
gens,
|
||||
comm_derefs,
|
||||
rx,
|
||||
ry,
|
||||
r_hash,
|
||||
r_multiset_check,
|
||||
transcript
|
||||
)
|
||||
.is_ok());
|
||||
self.proof_hash_layer.verify(
|
||||
(&rand_mem, &rand_ops),
|
||||
&(
|
||||
claims_mem[0],
|
||||
claims_ops_row_read.to_vec(),
|
||||
claims_ops_row_write.to_vec(),
|
||||
claims_mem[1],
|
||||
),
|
||||
&(
|
||||
claims_mem[2],
|
||||
claims_ops_col_read.to_vec(),
|
||||
claims_ops_col_write.to_vec(),
|
||||
claims_mem[3],
|
||||
),
|
||||
&claims_dotp,
|
||||
comm,
|
||||
gens,
|
||||
comm_derefs,
|
||||
rx,
|
||||
ry,
|
||||
r_hash,
|
||||
r_multiset_check,
|
||||
transcript,
|
||||
)?;
|
||||
timer.stop();
|
||||
|
||||
Ok(())
|
||||
|
||||
Reference in New Issue
Block a user