Add solidity groth16, kzg10 and final decider verifiers in a dedicated workspace (#70)

* change: Refactor structure into workspace

* chore: Add empty readme

* change: Transform repo into workspace

* add: Create folding-verifier-solidity crate

* add: Include askama.toml for `sol` extension escaper

* add: Jordi's old Groth16 verifier .sol template and adapt it

* tmp: create simple template struct to test

* Update FoldingSchemes trait, fit Nova+CycleFold

- update lib.rs's `FoldingScheme` trait interface
- fit Nova+CycleFold into the `FoldingScheme` trait
- refactor `src/nova/*`

* chore: add serialization assets for testing

Now we include an `assets` folder with a serialized proof & vk for tests

* Add `examples` dir, with Nova's `FoldingScheme` example

* polishing

* expose poseidon_test_config outside tests

* change: Refactor structure into workspace

* chore: Add empty readme

* change: Transform repo into workspace

* add: Create folding-verifier-solidity crate

* add: Include askama.toml for `sol` extension escaper

* add: Jordi's old Groth16 verifier .sol template and adapt it

* tmp: create simple template struct to test

* feat: templating kzg working

* chore: add emv and revm

* feat: start evm file

* chore: add ark-poly-commit

* chore: move `commitment` to `folding-schemes`

* chore: update `.gitignore` to ignore generated contracts

* chore: update template with bn254 lib on it (avoids import), update for loop to account for whitespaces

* refactor: update template with no lib

* feat: add evm deploy code, compile and create kzg verifier

* chore: update `Cargo.toml` to have `folding-schemes` available with verifiers

* feat: start kzg prove and verify with sol

* chore: compute crs from kzg prover

* feat: evm kzg verification passing

* tmp

* change: Swap order of G2 coordinates within the template

* Update way to serialize proof with correct order

* chore: update `Cargo.toml`

* chore: add revm

* chore: add `save_solidity`

* refactor: verifiers in dedicated mod

* refactor: have dedicated `utils` module

* chore: expose modules

* chore: update verifier for kzg

* chore: rename templates

* fix: look for binary using also name of contract

* refactor: generate groth16 proof for sha256 pre-image, generate groth16 template with verifying key

* chore: template renaming

* fix: switch circuit for circuit that simply adds

* feat: generates test data on the fly

* feat: update to latest groth16 verifier

* refactor: rename folder, update `.gitignore`

* chore: update `Cargo.toml`

* chore: update templates extension to indicate that they are templates

* chore: rename templates, both files and structs

* fix: template inheritance working

* feat: template spdx and pragma statements

* feat: decider verifier compiles, update test for kzg10 and groth16 templates

* feat: parameterize which size of the crs should be stored on the contract

* chore: add comment on how the groth16 and kzg10 proofs will be linked together

* chore: cargo clippy run

* chore: cargo clippy tests

* chore: cargo fmt

* refactor: remove unused lifetime parameter

* chore: end merge

* chore: move examples to `folding-schemes` workspace

* get latest main changes

* fix: temp fix clippy warnings, will remove lints once not used in tests only

* fix: cargo clippy lint added on `code_size`

* fix: update path to test circuit and add step for installing solc

* chore: remove `save_solidity` steps

* fix: the borrowed expression implements the required traits

* chore: update `Cargo.toml`

* chore: remove extra `[patch.crates-io]`

* fix: update to patch at the workspace level and add comment explaining this

* refactor: correct `staticcall` with valid input/output sizes and change return syntax for pairing

* refactor: expose modules and remove `dead_code` calls

* chore: update `README.md`, add additional comments on `kzg10` template and update `groth16` template comments

* chore: be clearer on attributions on `kzg10`

---------

Co-authored-by: CPerezz <c.perezbaro@gmail.com>
Co-authored-by: arnaucube <root@arnaucube.com>
This commit is contained in:
Pierre
2024-02-09 08:19:25 +01:00
committed by GitHub
parent 97e973a685
commit 63dbbfe1bc
67 changed files with 1208 additions and 53 deletions

View File

@@ -0,0 +1,123 @@
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial};
/// Computes the lagrange interpolated polynomial from the given points `p_i`
pub fn compute_lagrange_interpolated_poly<F: PrimeField>(p_i: &[F]) -> DensePolynomial<F> {
// domain is 0..p_i.len(), to fit `interpolate_uni_poly` from hyperplonk
let domain: Vec<usize> = (0..p_i.len()).collect();
// compute l(x), common to every basis polynomial
let mut l_x = DensePolynomial::from_coefficients_vec(vec![F::ONE]);
for x_m in domain.clone() {
let prod_m = DensePolynomial::from_coefficients_vec(vec![-F::from(x_m as u64), F::ONE]);
l_x = &l_x * &prod_m;
}
// compute each w_j - barycentric weights
let mut w_j_vector: Vec<F> = vec![];
for x_j in domain.clone() {
let mut w_j = F::ONE;
for x_m in domain.clone() {
if x_m != x_j {
let prod = (F::from(x_j as u64) - F::from(x_m as u64))
.inverse()
.unwrap(); // an inverse always exists since x_j != x_m (!=0)
// hence, we call unwrap() here without checking the Option's content
w_j *= prod;
}
}
w_j_vector.push(w_j);
}
// compute each polynomial within the sum L(x)
let mut lagrange_poly = DensePolynomial::from_coefficients_vec(vec![F::ZERO]);
for (j, w_j) in w_j_vector.iter().enumerate() {
let x_j = domain[j];
let y_j = p_i[j];
// we multiply by l(x) here, otherwise the below division will not work - deg(0)/deg(d)
let poly_numerator = &(&l_x * (*w_j)) * (y_j);
let poly_denominator =
DensePolynomial::from_coefficients_vec(vec![-F::from(x_j as u64), F::ONE]);
let poly = &poly_numerator / &poly_denominator;
lagrange_poly = &lagrange_poly + &poly;
}
lagrange_poly
}
#[cfg(test)]
mod tests {
use crate::utils::espresso::sum_check::verifier::interpolate_uni_poly;
use crate::utils::lagrange_poly::compute_lagrange_interpolated_poly;
use ark_pallas::Fr;
use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial, Polynomial};
use ark_std::{vec::Vec, UniformRand};
use espresso_subroutines::poly_iop::prelude::PolyIOPErrors;
#[test]
fn test_compute_lagrange_interpolated_poly() {
let mut prng = ark_std::test_rng();
for degree in 1..30 {
let poly = DensePolynomial::<Fr>::rand(degree, &mut prng);
// range (which is exclusive) is from 0 to degree + 1, since we need degree + 1 evaluations
let evals = (0..(degree + 1))
.map(|i| poly.evaluate(&Fr::from(i as u64)))
.collect::<Vec<Fr>>();
let lagrange_poly = compute_lagrange_interpolated_poly(&evals);
for _ in 0..10 {
let query = Fr::rand(&mut prng);
let lagrange_eval = lagrange_poly.evaluate(&query);
let eval = poly.evaluate(&query);
assert_eq!(eval, lagrange_eval);
assert_eq!(lagrange_poly.degree(), poly.degree());
}
}
}
#[test]
fn test_interpolation() -> Result<(), PolyIOPErrors> {
let mut prng = ark_std::test_rng();
// test a polynomial with 20 known points, i.e., with degree 19
let poly = DensePolynomial::<Fr>::rand(20 - 1, &mut prng);
let evals = (0..20)
.map(|i| poly.evaluate(&Fr::from(i)))
.collect::<Vec<Fr>>();
let query = Fr::rand(&mut prng);
assert_eq!(poly.evaluate(&query), interpolate_uni_poly(&evals, query)?);
assert_eq!(
compute_lagrange_interpolated_poly(&evals).evaluate(&query),
interpolate_uni_poly(&evals, query)?
);
// test a polynomial with 33 known points, i.e., with degree 32
let poly = DensePolynomial::<Fr>::rand(33 - 1, &mut prng);
let evals = (0..33)
.map(|i| poly.evaluate(&Fr::from(i)))
.collect::<Vec<Fr>>();
let query = Fr::rand(&mut prng);
assert_eq!(poly.evaluate(&query), interpolate_uni_poly(&evals, query)?);
assert_eq!(
compute_lagrange_interpolated_poly(&evals).evaluate(&query),
interpolate_uni_poly(&evals, query)?
);
// test a polynomial with 64 known points, i.e., with degree 63
let poly = DensePolynomial::<Fr>::rand(64 - 1, &mut prng);
let evals = (0..64)
.map(|i| poly.evaluate(&Fr::from(i)))
.collect::<Vec<Fr>>();
let query = Fr::rand(&mut prng);
assert_eq!(poly.evaluate(&query), interpolate_uni_poly(&evals, query)?);
assert_eq!(
compute_lagrange_interpolated_poly(&evals).evaluate(&query),
interpolate_uni_poly(&evals, query)?
);
Ok(())
}
}