mirror of
https://github.com/arnaucube/Nova.git
synced 2026-01-11 00:21:29 +01:00
update the dependencies & code clean (#196)
This commit is contained in:
22
Cargo.toml
22
Cargo.toml
@@ -13,33 +13,33 @@ keywords = ["zkSNARKs", "cryptography", "proofs"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
bellperson = { version = "0.25", default-features = false }
|
bellperson = { version = "0.25", default-features = false }
|
||||||
ff = { version = "0.13.0", features = ["derive"] }
|
ff = { version = "0.13.0", features = ["derive"] }
|
||||||
digest = "0.8.1"
|
digest = "0.10"
|
||||||
sha3 = "0.8.2"
|
sha3 = "0.10"
|
||||||
rayon = "1.3.0"
|
rayon = "1.7"
|
||||||
rand_core = { version = "0.6.0", default-features = false }
|
rand_core = { version = "0.6", default-features = false }
|
||||||
rand_chacha = "0.3"
|
rand_chacha = "0.3"
|
||||||
itertools = "0.9.0"
|
itertools = "0.11"
|
||||||
subtle = "2.4"
|
subtle = "2.5"
|
||||||
pasta_curves = { version = "0.5", features = ["repr-c", "serde"] }
|
pasta_curves = { version = "0.5", features = ["repr-c", "serde"] }
|
||||||
neptune = { version = "10.0.0", default-features = false }
|
neptune = { version = "10.0.0", default-features = false }
|
||||||
generic-array = "0.14.4"
|
generic-array = "0.14"
|
||||||
num-bigint = { version = "0.4", features = ["serde", "rand"] }
|
num-bigint = { version = "0.4", features = ["serde", "rand"] }
|
||||||
num-traits = "0.2"
|
num-traits = "0.2"
|
||||||
num-integer = "0.1"
|
num-integer = "0.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
bincode = "1.2.1"
|
bincode = "1.3"
|
||||||
flate2 = "1.0"
|
flate2 = "1.0"
|
||||||
bitvec = "1.0"
|
bitvec = "1.0"
|
||||||
byteorder = "1.4.3"
|
byteorder = "1.4.3"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
halo2curves = { version="0.1.0", features = [ "derive_serde" ] }
|
halo2curves = { version = "0.1.0", features = ["derive_serde"] }
|
||||||
|
|
||||||
[target.'cfg(any(target_arch = "x86_64", target_arch = "aarch64"))'.dependencies]
|
[target.'cfg(any(target_arch = "x86_64", target_arch = "aarch64"))'.dependencies]
|
||||||
pasta-msm = { version = "0.1.4" }
|
pasta-msm = { version = "0.1.4" }
|
||||||
|
|
||||||
[target.wasm32-unknown-unknown.dependencies]
|
[target.wasm32-unknown-unknown.dependencies]
|
||||||
# see https://github.com/rust-random/rand/pull/948
|
# see https://github.com/rust-random/rand/pull/948
|
||||||
getrandom = { version = "0.2.0", default-features = false, features = ["js"]}
|
getrandom = { version = "0.2.0", default-features = false, features = ["js"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.4", features = ["html_reports"] }
|
criterion = { version = "0.4", features = ["html_reports"] }
|
||||||
|
|||||||
@@ -100,10 +100,10 @@ where
|
|||||||
|
|
||||||
pub fn hash_to_scalar(persona: &[u8], a: &[u8], b: &[u8]) -> G::Scalar {
|
pub fn hash_to_scalar(persona: &[u8], a: &[u8], b: &[u8]) -> G::Scalar {
|
||||||
let mut hasher = Sha3_512::new();
|
let mut hasher = Sha3_512::new();
|
||||||
hasher.input(persona);
|
hasher.update(persona);
|
||||||
hasher.input(a);
|
hasher.update(a);
|
||||||
hasher.input(b);
|
hasher.update(b);
|
||||||
let digest = hasher.result();
|
let digest = hasher.finalize();
|
||||||
Self::to_uniform(digest.as_ref())
|
Self::to_uniform(digest.as_ref())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1076,7 +1076,7 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let a = alloc_random_point(cs.namespace(|| "a")).unwrap();
|
let a = alloc_random_point(cs.namespace(|| "a")).unwrap();
|
||||||
inputize_allocted_point(&a, cs.namespace(|| "inputize a")).unwrap();
|
inputize_allocted_point(&a, cs.namespace(|| "inputize a")).unwrap();
|
||||||
let mut b = &mut a.clone();
|
let b = &mut a.clone();
|
||||||
b.y = AllocatedNum::alloc(cs.namespace(|| "allocate negation of a"), || {
|
b.y = AllocatedNum::alloc(cs.namespace(|| "allocate negation of a"), || {
|
||||||
Ok(G::Base::ZERO)
|
Ok(G::Base::ZERO)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -769,8 +769,8 @@ fn compute_digest<G: Group, T: Serialize>(o: &T) -> G::Scalar {
|
|||||||
let bytes = bincode::serialize(o).unwrap();
|
let bytes = bincode::serialize(o).unwrap();
|
||||||
// convert pp_bytes into a short digest
|
// convert pp_bytes into a short digest
|
||||||
let mut hasher = Sha3_256::new();
|
let mut hasher = Sha3_256::new();
|
||||||
hasher.input(&bytes);
|
hasher.update(&bytes);
|
||||||
let digest = hasher.result();
|
let digest = hasher.finalize();
|
||||||
|
|
||||||
// truncate the digest to NUM_HASH_BITS bits
|
// truncate the digest to NUM_HASH_BITS bits
|
||||||
let bv = (0..NUM_HASH_BITS).map(|i| {
|
let bv = (0..NUM_HASH_BITS).map(|i| {
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use crate::{
|
|||||||
},
|
},
|
||||||
traits::{CompressedGroup, Group, PrimeFieldExt, TranscriptReprTrait},
|
traits::{CompressedGroup, Group, PrimeFieldExt, TranscriptReprTrait},
|
||||||
};
|
};
|
||||||
use digest::{ExtendableOutput, Input};
|
use digest::{ExtendableOutput, Update};
|
||||||
use ff::{FromUniformBytes, PrimeField};
|
use ff::{FromUniformBytes, PrimeField};
|
||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
use num_traits::Num;
|
use num_traits::Num;
|
||||||
@@ -80,8 +80,8 @@ macro_rules! impl_traits {
|
|||||||
|
|
||||||
fn from_label(label: &'static [u8], n: usize) -> Vec<Self::PreprocessedGroupElement> {
|
fn from_label(label: &'static [u8], n: usize) -> Vec<Self::PreprocessedGroupElement> {
|
||||||
let mut shake = Shake256::default();
|
let mut shake = Shake256::default();
|
||||||
shake.input(label);
|
shake.update(label);
|
||||||
let mut reader = shake.xof_result();
|
let mut reader = shake.finalize_xof();
|
||||||
let mut uniform_bytes_vec = Vec::new();
|
let mut uniform_bytes_vec = Vec::new();
|
||||||
for _ in 0..n {
|
for _ in 0..n {
|
||||||
let mut uniform_bytes = [0u8; 32];
|
let mut uniform_bytes = [0u8; 32];
|
||||||
@@ -216,8 +216,8 @@ mod tests {
|
|||||||
|
|
||||||
fn from_label_serial(label: &'static [u8], n: usize) -> Vec<Bn256Affine> {
|
fn from_label_serial(label: &'static [u8], n: usize) -> Vec<Bn256Affine> {
|
||||||
let mut shake = Shake256::default();
|
let mut shake = Shake256::default();
|
||||||
shake.input(label);
|
shake.update(label);
|
||||||
let mut reader = shake.xof_result();
|
let mut reader = shake.finalize_xof();
|
||||||
let mut ck = Vec::new();
|
let mut ck = Vec::new();
|
||||||
for _ in 0..n {
|
for _ in 0..n {
|
||||||
let mut uniform_bytes = [0u8; 32];
|
let mut uniform_bytes = [0u8; 32];
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ pub struct Keccak256Transcript<G: Group> {
|
|||||||
|
|
||||||
fn compute_updated_state(keccak_instance: Keccak256, input: &[u8]) -> [u8; KECCAK256_STATE_SIZE] {
|
fn compute_updated_state(keccak_instance: Keccak256, input: &[u8]) -> [u8; KECCAK256_STATE_SIZE] {
|
||||||
let mut updated_instance = keccak_instance;
|
let mut updated_instance = keccak_instance;
|
||||||
updated_instance.input(input);
|
updated_instance.update(input);
|
||||||
|
|
||||||
let input_lo = &[KECCAK256_PREFIX_CHALLENGE_LO];
|
let input_lo = &[KECCAK256_PREFIX_CHALLENGE_LO];
|
||||||
let input_hi = &[KECCAK256_PREFIX_CHALLENGE_HI];
|
let input_hi = &[KECCAK256_PREFIX_CHALLENGE_HI];
|
||||||
@@ -32,11 +32,11 @@ fn compute_updated_state(keccak_instance: Keccak256, input: &[u8]) -> [u8; KECCA
|
|||||||
let mut hasher_lo = updated_instance.clone();
|
let mut hasher_lo = updated_instance.clone();
|
||||||
let mut hasher_hi = updated_instance;
|
let mut hasher_hi = updated_instance;
|
||||||
|
|
||||||
hasher_lo.input(input_lo);
|
hasher_lo.update(input_lo);
|
||||||
hasher_hi.input(input_hi);
|
hasher_hi.update(input_hi);
|
||||||
|
|
||||||
let output_lo = hasher_lo.result();
|
let output_lo = hasher_lo.finalize();
|
||||||
let output_hi = hasher_hi.result();
|
let output_hi = hasher_hi.finalize();
|
||||||
|
|
||||||
[output_lo, output_hi]
|
[output_lo, output_hi]
|
||||||
.concat()
|
.concat()
|
||||||
@@ -86,13 +86,13 @@ impl<G: Group> TranscriptEngineTrait<G> for Keccak256Transcript<G> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn absorb<T: TranscriptReprTrait<G>>(&mut self, label: &'static [u8], o: &T) {
|
fn absorb<T: TranscriptReprTrait<G>>(&mut self, label: &'static [u8], o: &T) {
|
||||||
self.transcript.input(label);
|
self.transcript.update(label);
|
||||||
self.transcript.input(&o.to_transcript_bytes());
|
self.transcript.update(&o.to_transcript_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dom_sep(&mut self, bytes: &'static [u8]) {
|
fn dom_sep(&mut self, bytes: &'static [u8]) {
|
||||||
self.transcript.input(DOM_SEP_TAG);
|
self.transcript.update(DOM_SEP_TAG);
|
||||||
self.transcript.input(bytes);
|
self.transcript.update(bytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -149,8 +149,8 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_keccak_example() {
|
fn test_keccak_example() {
|
||||||
let mut hasher = Keccak256::new();
|
let mut hasher = Keccak256::new();
|
||||||
hasher.input(0xffffffff_u32.to_le_bytes());
|
hasher.update(0xffffffff_u32.to_le_bytes());
|
||||||
let output: [u8; 32] = hasher.result().try_into().unwrap();
|
let output: [u8; 32] = hasher.finalize().try_into().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
hex::encode(output),
|
hex::encode(output),
|
||||||
"29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238"
|
"29045a592007d0c246ef02c2223570da9522d0cf0f73282c79a1bc8f0bb2c238"
|
||||||
@@ -169,11 +169,11 @@ mod tests {
|
|||||||
let mut hasher_lo = Keccak256::new();
|
let mut hasher_lo = Keccak256::new();
|
||||||
let mut hasher_hi = Keccak256::new();
|
let mut hasher_hi = Keccak256::new();
|
||||||
|
|
||||||
hasher_lo.input(&input_lo);
|
hasher_lo.update(&input_lo);
|
||||||
hasher_hi.input(&input_hi);
|
hasher_hi.update(&input_hi);
|
||||||
|
|
||||||
let output_lo = hasher_lo.result();
|
let output_lo = hasher_lo.finalize();
|
||||||
let output_hi = hasher_hi.result();
|
let output_hi = hasher_hi.finalize();
|
||||||
|
|
||||||
[output_lo, output_hi]
|
[output_lo, output_hi]
|
||||||
.concat()
|
.concat()
|
||||||
@@ -213,7 +213,7 @@ mod tests {
|
|||||||
|
|
||||||
// add the scalars to the transcripts,
|
// add the scalars to the transcripts,
|
||||||
let mut manual_transcript: Vec<u8> = vec![];
|
let mut manual_transcript: Vec<u8> = vec![];
|
||||||
let labels = vec![
|
let labels = [
|
||||||
b"s1", b"s2", b"s3", b"s4", b"s5", b"s6", b"s7", b"s8", b"s9", b"s0",
|
b"s1", b"s2", b"s3", b"s4", b"s5", b"s6", b"s7", b"s8", b"s9", b"s0",
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use crate::{
|
|||||||
},
|
},
|
||||||
traits::{CompressedGroup, Group, PrimeFieldExt, TranscriptReprTrait},
|
traits::{CompressedGroup, Group, PrimeFieldExt, TranscriptReprTrait},
|
||||||
};
|
};
|
||||||
use digest::{ExtendableOutput, Input};
|
use digest::{ExtendableOutput, Update};
|
||||||
use ff::{FromUniformBytes, PrimeField};
|
use ff::{FromUniformBytes, PrimeField};
|
||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
use num_traits::Num;
|
use num_traits::Num;
|
||||||
@@ -97,8 +97,8 @@ macro_rules! impl_traits {
|
|||||||
|
|
||||||
fn from_label(label: &'static [u8], n: usize) -> Vec<Self::PreprocessedGroupElement> {
|
fn from_label(label: &'static [u8], n: usize) -> Vec<Self::PreprocessedGroupElement> {
|
||||||
let mut shake = Shake256::default();
|
let mut shake = Shake256::default();
|
||||||
shake.input(label);
|
shake.update(label);
|
||||||
let mut reader = shake.xof_result();
|
let mut reader = shake.finalize_xof();
|
||||||
let mut uniform_bytes_vec = Vec::new();
|
let mut uniform_bytes_vec = Vec::new();
|
||||||
for _ in 0..n {
|
for _ in 0..n {
|
||||||
let mut uniform_bytes = [0u8; 32];
|
let mut uniform_bytes = [0u8; 32];
|
||||||
@@ -230,8 +230,8 @@ mod tests {
|
|||||||
|
|
||||||
fn from_label_serial(label: &'static [u8], n: usize) -> Vec<EpAffine> {
|
fn from_label_serial(label: &'static [u8], n: usize) -> Vec<EpAffine> {
|
||||||
let mut shake = Shake256::default();
|
let mut shake = Shake256::default();
|
||||||
shake.input(label);
|
shake.update(label);
|
||||||
let mut reader = shake.xof_result();
|
let mut reader = shake.finalize_xof();
|
||||||
let mut ck = Vec::new();
|
let mut ck = Vec::new();
|
||||||
for _ in 0..n {
|
for _ in 0..n {
|
||||||
let mut uniform_bytes = [0u8; 32];
|
let mut uniform_bytes = [0u8; 32];
|
||||||
|
|||||||
@@ -250,7 +250,7 @@ mod tests {
|
|||||||
let io = z_i
|
let io = z_i
|
||||||
.clone()
|
.clone()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(z_i_plus_one.clone().into_iter())
|
.chain(z_i_plus_one.clone())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let res = snark.verify(&vk, &io);
|
let res = snark.verify(&vk, &io);
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
|
|||||||
@@ -838,8 +838,8 @@ impl<G: Group, EE: EvaluationEngineTrait<G, CE = G::CE>> RelaxedR1CSSNARK<G, EE>
|
|||||||
let claims_inner = inner.initial_claims();
|
let claims_inner = inner.initial_claims();
|
||||||
claims_mem
|
claims_mem
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(claims_outer.into_iter())
|
.chain(claims_outer)
|
||||||
.chain(claims_inner.into_iter())
|
.chain(claims_inner)
|
||||||
.collect::<Vec<G::Scalar>>()
|
.collect::<Vec<G::Scalar>>()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1176,9 +1176,9 @@ impl<G: Group, EE: EvaluationEngineTrait<G, CE = G::CE>> RelaxedR1CSSNARKTrait<G
|
|||||||
eval_Az, eval_Bz, eval_Cz, eval_E, eval_E_row, eval_E_col, eval_val_A, eval_val_B, eval_val_C,
|
eval_Az, eval_Bz, eval_Cz, eval_E, eval_E_row, eval_E_col, eval_val_A, eval_val_B, eval_val_C,
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(eval_left_vec.clone().into_iter())
|
.chain(eval_left_vec.clone())
|
||||||
.chain(eval_right_vec.clone().into_iter())
|
.chain(eval_right_vec.clone())
|
||||||
.chain(eval_output_vec.clone().into_iter())
|
.chain(eval_output_vec.clone())
|
||||||
.collect::<Vec<G::Scalar>>();
|
.collect::<Vec<G::Scalar>>();
|
||||||
|
|
||||||
// absorb all the claimed evaluations
|
// absorb all the claimed evaluations
|
||||||
@@ -1211,7 +1211,7 @@ impl<G: Group, EE: EvaluationEngineTrait<G, CE = G::CE>> RelaxedR1CSSNARKTrait<G
|
|||||||
let evals = eval_input_vec
|
let evals = eval_input_vec
|
||||||
.clone()
|
.clone()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(eval_output2_vec.clone().into_iter())
|
.chain(eval_output2_vec.clone())
|
||||||
.collect::<Vec<G::Scalar>>();
|
.collect::<Vec<G::Scalar>>();
|
||||||
transcript.absorb(b"e", &evals.as_slice());
|
transcript.absorb(b"e", &evals.as_slice());
|
||||||
|
|
||||||
@@ -1700,9 +1700,9 @@ impl<G: Group, EE: EvaluationEngineTrait<G, CE = G::CE>> RelaxedR1CSSNARKTrait<G
|
|||||||
self.eval_val_C,
|
self.eval_val_C,
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(self.eval_left_arr.into_iter())
|
.chain(self.eval_left_arr)
|
||||||
.chain(self.eval_right_arr.into_iter())
|
.chain(self.eval_right_arr)
|
||||||
.chain(self.eval_output_arr.into_iter())
|
.chain(self.eval_output_arr)
|
||||||
.collect::<Vec<G::Scalar>>();
|
.collect::<Vec<G::Scalar>>();
|
||||||
|
|
||||||
transcript.absorb(b"e", &eval_vec.as_slice());
|
transcript.absorb(b"e", &eval_vec.as_slice());
|
||||||
@@ -1722,7 +1722,7 @@ impl<G: Group, EE: EvaluationEngineTrait<G, CE = G::CE>> RelaxedR1CSSNARKTrait<G
|
|||||||
let evals = self
|
let evals = self
|
||||||
.eval_input_arr
|
.eval_input_arr
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(self.eval_output2_arr.into_iter())
|
.chain(self.eval_output2_arr)
|
||||||
.collect::<Vec<G::Scalar>>();
|
.collect::<Vec<G::Scalar>>();
|
||||||
transcript.absorb(b"e", &evals.as_slice());
|
transcript.absorb(b"e", &evals.as_slice());
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user