Browse Source

update the dependencies & code clean (#196)

main
liquan.eth 10 months ago
committed by GitHub
parent
commit
ac8db99835
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 55 additions and 55 deletions
  1. +11
    -11
      Cargo.toml
  2. +4
    -4
      examples/signature.rs
  3. +1
    -1
      src/gadgets/ecc.rs
  4. +2
    -2
      src/lib.rs
  5. +5
    -5
      src/provider/bn256_grumpkin.rs
  6. +16
    -16
      src/provider/keccak.rs
  7. +5
    -5
      src/provider/pasta.rs
  8. +1
    -1
      src/spartan/direct.rs
  9. +10
    -10
      src/spartan/ppsnark.rs

+ 11
- 11
Cargo.toml

@ -13,33 +13,33 @@ keywords = ["zkSNARKs", "cryptography", "proofs"]
[dependencies]
bellperson = { version = "0.25", default-features = false }
ff = { version = "0.13.0", features = ["derive"] }
digest = "0.8.1"
sha3 = "0.8.2"
rayon = "1.3.0"
rand_core = { version = "0.6.0", default-features = false }
digest = "0.10"
sha3 = "0.10"
rayon = "1.7"
rand_core = { version = "0.6", default-features = false }
rand_chacha = "0.3"
itertools = "0.9.0"
subtle = "2.4"
itertools = "0.11"
subtle = "2.5"
pasta_curves = { version = "0.5", features = ["repr-c", "serde"] }
neptune = { version = "10.0.0", default-features = false }
generic-array = "0.14.4"
generic-array = "0.14"
num-bigint = { version = "0.4", features = ["serde", "rand"] }
num-traits = "0.2"
num-integer = "0.1"
serde = { version = "1.0", features = ["derive"] }
bincode = "1.2.1"
bincode = "1.3"
flate2 = "1.0"
bitvec = "1.0"
byteorder = "1.4.3"
thiserror = "1.0"
halo2curves = { version="0.1.0", features = [ "derive_serde" ] }
thiserror = "1.0"
halo2curves = { version = "0.1.0", features = ["derive_serde"] }
[target.'cfg(any(target_arch = "x86_64", target_arch = "aarch64"))'.dependencies]
pasta-msm = { version = "0.1.4" }
[target.wasm32-unknown-unknown.dependencies]
# see https://github.com/rust-random/rand/pull/948
getrandom = { version = "0.2.0", default-features = false, features = ["js"]}
getrandom = { version = "0.2.0", default-features = false, features = ["js"] }
[dev-dependencies]
criterion = { version = "0.4", features = ["html_reports"] }

+ 4
- 4
examples/signature.rs

@ -100,10 +100,10 @@ where
pub fn hash_to_scalar(persona: &[u8], a: &[u8], b: &[u8]) -> G::Scalar {
let mut hasher = Sha3_512::new();
hasher.input(persona);
hasher.input(a);
hasher.input(b);
let digest = hasher.result();
hasher.update(persona);
hasher.update(a);
hasher.update(b);
let digest = hasher.finalize();
Self::to_uniform(digest.as_ref())
}
}

+ 1
- 1
src/gadgets/ecc.rs

@ -1076,7 +1076,7 @@ mod tests {
{
let a = alloc_random_point(cs.namespace(|| "a")).unwrap();
inputize_allocted_point(&a, cs.namespace(|| "inputize a")).unwrap();
let mut b = &mut a.clone();
let b = &mut a.clone();
b.y = AllocatedNum::alloc(cs.namespace(|| "allocate negation of a"), || {
Ok(G::Base::ZERO)
})

+ 2
- 2
src/lib.rs

@ -769,8 +769,8 @@ fn compute_digest(o: &T) -> G::Scalar {
let bytes = bincode::serialize(o).unwrap();
// convert pp_bytes into a short digest
let mut hasher = Sha3_256::new();
hasher.input(&bytes);
let digest = hasher.result();
hasher.update(&bytes);
let digest = hasher.finalize();
// truncate the digest to NUM_HASH_BITS bits
let bv = (0..NUM_HASH_BITS).map(|i| {

+ 5
- 5
src/provider/bn256_grumpkin.rs

@ -8,7 +8,7 @@ use crate::{
},
traits::{CompressedGroup, Group, PrimeFieldExt, TranscriptReprTrait},
};
use digest::{ExtendableOutput, Input};
use digest::{ExtendableOutput, Update};
use ff::{FromUniformBytes, PrimeField};
use num_bigint::BigInt;
use num_traits::Num;
@ -80,8 +80,8 @@ macro_rules! impl_traits {
fn from_label(label: &'static [u8], n: usize) -> Vec<Self::PreprocessedGroupElement> {
let mut shake = Shake256::default();
shake.input(label);
let mut reader = shake.xof_result();
shake.update(label);
let mut reader = shake.finalize_xof();
let mut uniform_bytes_vec = Vec::new();
for _ in 0..n {
let mut uniform_bytes = [0u8; 32];
@ -216,8 +216,8 @@ mod tests {
fn from_label_serial(label: &'static [u8], n: usize) -> Vec<Bn256Affine> {
let mut shake = Shake256::default();
shake.input(label);
let mut reader = shake.xof_result();
shake.update(label);
let mut reader = shake.finalize_xof();
let mut ck = Vec::new();
for _ in 0..n {
let mut uniform_bytes = [0u8; 32];

+ 16
- 16
src/provider/keccak.rs

@ -24,7 +24,7 @@ pub struct Keccak256Transcript {
fn compute_updated_state(keccak_instance: Keccak256, input: &[u8]) -> [u8; KECCAK256_STATE_SIZE] {
let mut updated_instance = keccak_instance;
updated_instance.input(input);
updated_instance.update(input);
let input_lo = &[KECCAK256_PREFIX_CHALLENGE_LO];
let input_hi = &[KECCAK256_PREFIX_CHALLENGE_HI];
@ -32,11 +32,11 @@ fn compute_updated_state(keccak_instance: Keccak256, input: &[u8]) -> [u8; KECCA
let mut hasher_lo = updated_instance.clone();
let mut hasher_hi = updated_instance;
hasher_lo.input(input_lo);
hasher_hi.input(input_hi);
hasher_lo.update(input_lo);
hasher_hi.update(input_hi);
let output_lo = hasher_lo.result();
let output_hi = hasher_hi.result();
let output_lo = hasher_lo.finalize();
let output_hi = hasher_hi.finalize();
[output_lo, output_hi]
.concat()
@ -86,13 +86,13 @@ impl TranscriptEngineTrait for Keccak256Transcript {
}
fn absorb<T: TranscriptReprTrait<G>>(&mut self, label: &'static [u8], o: &T) {
self.transcript.input(label);
self.transcript.input(&o.to_transcript_bytes());
self.transcript.update(label);
self.transcript.update(&o.to_transcript_bytes());
}
fn dom_sep(&mut self, bytes: &'static [u8]) {
self.transcript.input(DOM_SEP_TAG);
self.transcript.input(bytes);
self.transcript.update(DOM_SEP_TAG);
self.transcript.update(bytes);
}
}
@ -149,8 +149,8 @@ mod tests {
#[test]
fn test_keccak_example() {
let mut hasher = Keccak256::new();
hasher.input(0xffffffff_u32.to_le_bytes());
let output: [u8; 32] = hasher.result().try_into().unwrap();
hasher.update(0xffffffff_u32.to_le_bytes());
let output: [u8; 32] = hasher.finalize().try_into().unwrap();
assert_eq!(
hex::encode(output),
"29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238"
@ -169,11 +169,11 @@ mod tests {
let mut hasher_lo = Keccak256::new();
let mut hasher_hi = Keccak256::new();
hasher_lo.input(&input_lo);
hasher_hi.input(&input_hi);
hasher_lo.update(&input_lo);
hasher_hi.update(&input_hi);
let output_lo = hasher_lo.result();
let output_hi = hasher_hi.result();
let output_lo = hasher_lo.finalize();
let output_hi = hasher_hi.finalize();
[output_lo, output_hi]
.concat()
@ -213,7 +213,7 @@ mod tests {
// add the scalars to the transcripts,
let mut manual_transcript: Vec<u8> = vec![];
let labels = vec![
let labels = [
b"s1", b"s2", b"s3", b"s4", b"s5", b"s6", b"s7", b"s8", b"s9", b"s0",
];

+ 5
- 5
src/provider/pasta.rs

@ -8,7 +8,7 @@ use crate::{
},
traits::{CompressedGroup, Group, PrimeFieldExt, TranscriptReprTrait},
};
use digest::{ExtendableOutput, Input};
use digest::{ExtendableOutput, Update};
use ff::{FromUniformBytes, PrimeField};
use num_bigint::BigInt;
use num_traits::Num;
@ -97,8 +97,8 @@ macro_rules! impl_traits {
fn from_label(label: &'static [u8], n: usize) -> Vec<Self::PreprocessedGroupElement> {
let mut shake = Shake256::default();
shake.input(label);
let mut reader = shake.xof_result();
shake.update(label);
let mut reader = shake.finalize_xof();
let mut uniform_bytes_vec = Vec::new();
for _ in 0..n {
let mut uniform_bytes = [0u8; 32];
@ -230,8 +230,8 @@ mod tests {
fn from_label_serial(label: &'static [u8], n: usize) -> Vec<EpAffine> {
let mut shake = Shake256::default();
shake.input(label);
let mut reader = shake.xof_result();
shake.update(label);
let mut reader = shake.finalize_xof();
let mut ck = Vec::new();
for _ in 0..n {
let mut uniform_bytes = [0u8; 32];

+ 1
- 1
src/spartan/direct.rs

@ -250,7 +250,7 @@ mod tests {
let io = z_i
.clone()
.into_iter()
.chain(z_i_plus_one.clone().into_iter())
.chain(z_i_plus_one.clone())
.collect::<Vec<_>>();
let res = snark.verify(&vk, &io);
assert!(res.is_ok());

+ 10
- 10
src/spartan/ppsnark.rs

@ -838,8 +838,8 @@ impl> RelaxedR1CSSNARK
let claims_inner = inner.initial_claims();
claims_mem
.into_iter()
.chain(claims_outer.into_iter())
.chain(claims_inner.into_iter())
.chain(claims_outer)
.chain(claims_inner)
.collect::<Vec<G::Scalar>>()
};
@ -1176,9 +1176,9 @@ impl> RelaxedR1CSSNARKTrait
eval_Az, eval_Bz, eval_Cz, eval_E, eval_E_row, eval_E_col, eval_val_A, eval_val_B, eval_val_C,
]
.into_iter()
.chain(eval_left_vec.clone().into_iter())
.chain(eval_right_vec.clone().into_iter())
.chain(eval_output_vec.clone().into_iter())
.chain(eval_left_vec.clone())
.chain(eval_right_vec.clone())
.chain(eval_output_vec.clone())
.collect::<Vec<G::Scalar>>();
// absorb all the claimed evaluations
@ -1211,7 +1211,7 @@ impl> RelaxedR1CSSNARKTrait
let evals = eval_input_vec
.clone()
.into_iter()
.chain(eval_output2_vec.clone().into_iter())
.chain(eval_output2_vec.clone())
.collect::<Vec<G::Scalar>>();
transcript.absorb(b"e", &evals.as_slice());
@ -1700,9 +1700,9 @@ impl> RelaxedR1CSSNARKTrait
self.eval_val_C,
]
.into_iter()
.chain(self.eval_left_arr.into_iter())
.chain(self.eval_right_arr.into_iter())
.chain(self.eval_output_arr.into_iter())
.chain(self.eval_left_arr)
.chain(self.eval_right_arr)
.chain(self.eval_output_arr)
.collect::<Vec<G::Scalar>>();
transcript.absorb(b"e", &eval_vec.as_slice());
@ -1722,7 +1722,7 @@ impl> RelaxedR1CSSNARKTrait
let evals = self
.eval_input_arr
.into_iter()
.chain(self.eval_output2_arr.into_iter())
.chain(self.eval_output2_arr)
.collect::<Vec<G::Scalar>>();
transcript.absorb(b"e", &evals.as_slice());

Loading…
Cancel
Save