use backend::hal::{ api::{ MatZnxAlloc, ScalarZnxAlloc, ScratchAvailable, SvpPPolAlloc, SvpPrepare, TakeVecZnx, TakeVecZnxDft, VecZnxAddScalarInplace, VecZnxAllocBytes, ZnxView, ZnxViewMut, }, layouts::{Backend, Data, DataMut, DataRef, Module, ReaderFrom, ScalarZnx, ScalarZnxToRef, Scratch, SvpPPol, WriterTo}, }; use sampling::source::Source; use crate::{ Distribution, GGSWCiphertext, GGSWCiphertextExec, GGSWEncryptSkFamily, GGSWLayoutFamily, GLWESecretExec, Infos, LWESecret, }; pub struct BlindRotationKeyCGGI { pub(crate) keys: Vec>, pub(crate) dist: Distribution, } impl PartialEq for BlindRotationKeyCGGI { fn eq(&self, other: &Self) -> bool { if self.keys.len() != other.keys.len() { return false; } for (a, b) in self.keys.iter().zip(other.keys.iter()) { if a != b { return false; } } self.dist == other.dist } } impl Eq for BlindRotationKeyCGGI {} use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; impl ReaderFrom for BlindRotationKeyCGGI { fn read_from(&mut self, reader: &mut R) -> std::io::Result<()> { match Distribution::read_from(reader) { Ok(dist) => self.dist = dist, Err(e) => return Err(e), } let len: usize = reader.read_u64::()? as usize; if self.keys.len() != len { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, format!("self.keys.len()={} != read len={}", self.keys.len(), len), )); } for key in &mut self.keys { key.read_from(reader)?; } Ok(()) } } impl WriterTo for BlindRotationKeyCGGI { fn write_to(&self, writer: &mut W) -> std::io::Result<()> { match self.dist.write_to(writer) { Ok(()) => {} Err(e) => return Err(e), } writer.write_u64::(self.keys.len() as u64)?; for key in &self.keys { key.write_to(writer)?; } Ok(()) } } impl BlindRotationKeyCGGI> { pub fn alloc(module: &Module, n_lwe: usize, basek: usize, k: usize, rows: usize, rank: usize) -> Self where Module: MatZnxAlloc, { let mut data: Vec>> = Vec::with_capacity(n_lwe); (0..n_lwe).for_each(|_| data.push(GGSWCiphertext::alloc(module, basek, k, rows, 1, rank))); Self { keys: data, dist: Distribution::NONE, } } pub fn generate_from_sk_scratch_space(module: &Module, basek: usize, k: usize, rank: usize) -> usize where Module: GGSWEncryptSkFamily + VecZnxAllocBytes, { GGSWCiphertext::encrypt_sk_scratch_space(module, basek, k, rank) } } impl BlindRotationKeyCGGI { #[allow(dead_code)] pub(crate) fn n(&self) -> usize { self.keys[0].n() } #[allow(dead_code)] pub(crate) fn rows(&self) -> usize { self.keys[0].rows() } #[allow(dead_code)] pub(crate) fn k(&self) -> usize { self.keys[0].k() } #[allow(dead_code)] pub(crate) fn size(&self) -> usize { self.keys[0].size() } #[allow(dead_code)] pub(crate) fn rank(&self) -> usize { self.keys[0].rank() } pub(crate) fn basek(&self) -> usize { self.keys[0].basek() } #[allow(dead_code)] pub(crate) fn block_size(&self) -> usize { match self.dist { Distribution::BinaryBlock(value) => value, _ => 1, } } } impl BlindRotationKeyCGGI { pub fn generate_from_sk( &mut self, module: &Module, sk_glwe: &GLWESecretExec, sk_lwe: &LWESecret, source_xa: &mut Source, source_xe: &mut Source, sigma: f64, scratch: &mut Scratch, ) where DataSkGLWE: DataRef, DataSkLWE: DataRef, Module: GGSWEncryptSkFamily + ScalarZnxAlloc + VecZnxAddScalarInplace, Scratch: TakeVecZnxDft + ScratchAvailable + TakeVecZnx, { #[cfg(debug_assertions)] { assert_eq!(self.keys.len(), sk_lwe.n()); assert_eq!(sk_glwe.n(), module.n()); assert_eq!(sk_glwe.rank(), self.keys[0].rank()); match sk_lwe.dist { Distribution::BinaryBlock(_) | Distribution::BinaryFixed(_) | Distribution::BinaryProb(_) | Distribution::ZERO => {} _ => panic!( "invalid GLWESecret distribution: must be BinaryBlock, BinaryFixed or BinaryProb (or ZERO for debugging)" ), } } self.dist = sk_lwe.dist; let mut pt: ScalarZnx> = module.scalar_znx_alloc(1); let sk_ref: ScalarZnx<&[u8]> = sk_lwe.data.to_ref(); self.keys.iter_mut().enumerate().for_each(|(i, ggsw)| { pt.at_mut(0, 0)[0] = sk_ref.at(0, 0)[i]; ggsw.encrypt_sk(module, &pt, sk_glwe, source_xa, source_xe, sigma, scratch); }); } } #[derive(PartialEq, Eq)] pub struct BlindRotationKeyCGGIExec { pub(crate) data: Vec>, pub(crate) dist: Distribution, pub(crate) x_pow_a: Option, B>>>, } impl BlindRotationKeyCGGIExec { #[allow(dead_code)] pub(crate) fn n(&self) -> usize { self.data[0].n() } #[allow(dead_code)] pub(crate) fn rows(&self) -> usize { self.data[0].rows() } #[allow(dead_code)] pub(crate) fn k(&self) -> usize { self.data[0].k() } #[allow(dead_code)] pub(crate) fn size(&self) -> usize { self.data[0].size() } #[allow(dead_code)] pub(crate) fn rank(&self) -> usize { self.data[0].rank() } pub(crate) fn basek(&self) -> usize { self.data[0].basek() } pub(crate) fn block_size(&self) -> usize { match self.dist { Distribution::BinaryBlock(value) => value, _ => 1, } } } pub trait BlindRotationKeyCGGIExecLayoutFamily = GGSWLayoutFamily + SvpPPolAlloc + SvpPrepare; impl BlindRotationKeyCGGIExec, B> { pub fn alloc(module: &Module, n_lwe: usize, basek: usize, k: usize, rows: usize, rank: usize) -> Self where Module: BlindRotationKeyCGGIExecLayoutFamily, { let mut data: Vec, B>> = Vec::with_capacity(n_lwe); (0..n_lwe).for_each(|_| data.push(GGSWCiphertextExec::alloc(module, basek, k, rows, 1, rank))); Self { data, dist: Distribution::NONE, x_pow_a: None, } } pub fn from(module: &Module, other: &BlindRotationKeyCGGI, scratch: &mut Scratch) -> Self where DataOther: DataRef, Module: BlindRotationKeyCGGIExecLayoutFamily + ScalarZnxAlloc, { let mut brk: BlindRotationKeyCGGIExec, B> = Self::alloc( module, other.keys.len(), other.basek(), other.k(), other.rows(), other.rank(), ); brk.prepare(module, other, scratch); brk } } impl BlindRotationKeyCGGIExec { pub fn prepare(&mut self, module: &Module, other: &BlindRotationKeyCGGI, scratch: &mut Scratch) where DataOther: DataRef, Module: BlindRotationKeyCGGIExecLayoutFamily + ScalarZnxAlloc, { #[cfg(debug_assertions)] { assert_eq!(self.data.len(), other.keys.len()); } self.data .iter_mut() .zip(other.keys.iter()) .for_each(|(ggsw_exec, other)| { ggsw_exec.prepare(module, other, scratch); }); self.dist = other.dist; match other.dist { Distribution::BinaryBlock(_) => { let mut x_pow_a: Vec, B>> = Vec::with_capacity(module.n() << 1); let mut buf: ScalarZnx> = module.scalar_znx_alloc(1); (0..module.n() << 1).for_each(|i| { let mut res: SvpPPol, B> = module.svp_ppol_alloc(1); set_xai_plus_y(module, i, 0, &mut res, &mut buf); x_pow_a.push(res); }); self.x_pow_a = Some(x_pow_a); } _ => {} } } } pub fn set_xai_plus_y(module: &Module, ai: usize, y: i64, res: &mut SvpPPol, buf: &mut ScalarZnx) where A: DataMut, C: DataMut, Module: SvpPrepare, { let n: usize = module.n(); { let raw: &mut [i64] = buf.at_mut(0, 0); if ai < n { raw[ai] = 1; } else { raw[(ai - n) & (n - 1)] = -1; } raw[0] += y; } module.svp_prepare(res, 0, buf, 0); { let raw: &mut [i64] = buf.at_mut(0, 0); if ai < n { raw[ai] = 0; } else { raw[(ai - n) & (n - 1)] = 0; } raw[0] = 0; } }