mirror of
https://github.com/arnaucube/poulpy.git
synced 2026-02-10 05:06:44 +01:00
Added basic GLWE ops
This commit is contained in:
@@ -168,7 +168,7 @@ impl AutomorphismKey<Vec<u8>, FFT64> {
|
||||
|
||||
impl<DataSelf> AutomorphismKey<DataSelf, FFT64>
|
||||
where
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64> + MatZnxDftToRef<FFT64>,
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64>,
|
||||
{
|
||||
pub fn generate_from_sk<DataSk>(
|
||||
&mut self,
|
||||
@@ -221,7 +221,7 @@ where
|
||||
|
||||
impl<DataSelf> AutomorphismKey<DataSelf, FFT64>
|
||||
where
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64> + MatZnxDftToRef<FFT64>,
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64>,
|
||||
{
|
||||
pub fn automorphism<DataLhs, DataRhs>(
|
||||
&mut self,
|
||||
|
||||
@@ -27,6 +27,10 @@ pub trait Infos {
|
||||
self.inner().cols()
|
||||
}
|
||||
|
||||
fn rank(&self) -> usize {
|
||||
self.cols() - 1
|
||||
}
|
||||
|
||||
/// Returns the number of size per polynomial.
|
||||
fn size(&self) -> usize {
|
||||
let size: usize = self.inner().size();
|
||||
@@ -46,6 +50,11 @@ pub trait Infos {
|
||||
fn k(&self) -> usize;
|
||||
}
|
||||
|
||||
pub trait SetMetaData {
|
||||
fn set_basek(&mut self, basek: usize);
|
||||
fn set_k(&mut self, k: usize);
|
||||
}
|
||||
|
||||
pub trait GetRow<B: Backend> {
|
||||
fn get_row<R>(&self, module: &Module<B>, row_i: usize, col_j: usize, res: &mut R)
|
||||
where
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use backend::{
|
||||
Backend, FFT64, MatZnxDft, MatZnxDftAlloc, MatZnxDftOps, MatZnxDftScratch, MatZnxDftToMut, MatZnxDftToRef, Module, ScalarZnx,
|
||||
ScalarZnxDft, ScalarZnxDftToRef, ScalarZnxToRef, Scratch, VecZnx, VecZnxAlloc, VecZnxBigAlloc, VecZnxBigOps,
|
||||
VecZnxBigScratch, VecZnxDft, VecZnxDftAlloc, VecZnxDftOps, VecZnxDftToMut, VecZnxDftToRef, VecZnxOps, VecZnxToMut,
|
||||
VecZnxToRef, ZnxInfos, ZnxZero,
|
||||
VecZnxBigScratch, VecZnxDft, VecZnxDftAlloc, VecZnxDftOps, VecZnxDftToMut, VecZnxDftToRef, VecZnxOps, VecZnxToMut, ZnxInfos,
|
||||
ZnxZero,
|
||||
};
|
||||
use sampling::source::Source;
|
||||
|
||||
@@ -196,7 +196,7 @@ impl GGSWCiphertext<Vec<u8>, FFT64> {
|
||||
|
||||
impl<DataSelf> GGSWCiphertext<DataSelf, FFT64>
|
||||
where
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64> + MatZnxDftToRef<FFT64>,
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64>,
|
||||
{
|
||||
pub fn encrypt_sk<DataPt, DataSk>(
|
||||
&mut self,
|
||||
@@ -639,7 +639,7 @@ where
|
||||
ksk: &GLWESwitchingKey<DataKsk, FFT64>,
|
||||
scratch: &mut Scratch,
|
||||
) where
|
||||
VecZnx<DataRes>: VecZnxToMut + VecZnxToRef,
|
||||
VecZnx<DataRes>: VecZnxToMut,
|
||||
MatZnxDft<DataKsk, FFT64>: MatZnxDftToRef<FFT64>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
|
||||
@@ -2,16 +2,17 @@ use backend::{
|
||||
AddNormal, Backend, FFT64, FillUniform, MatZnxDft, MatZnxDftOps, MatZnxDftScratch, MatZnxDftToRef, Module, ScalarZnxAlloc,
|
||||
ScalarZnxDft, ScalarZnxDftAlloc, ScalarZnxDftOps, ScalarZnxDftToRef, Scratch, VecZnx, VecZnxAlloc, VecZnxBig, VecZnxBigAlloc,
|
||||
VecZnxBigOps, VecZnxBigScratch, VecZnxDft, VecZnxDftAlloc, VecZnxDftOps, VecZnxDftToMut, VecZnxDftToRef, VecZnxOps,
|
||||
VecZnxToMut, VecZnxToRef, ZnxInfos, ZnxZero, copy_vec_znx_from,
|
||||
VecZnxToMut, VecZnxToRef, ZnxInfos, ZnxZero,
|
||||
};
|
||||
use sampling::source::Source;
|
||||
|
||||
use crate::{
|
||||
SIX_SIGMA,
|
||||
automorphism::AutomorphismKey,
|
||||
elem::Infos,
|
||||
elem::{Infos, SetMetaData},
|
||||
ggsw_ciphertext::GGSWCiphertext,
|
||||
glwe_ciphertext_fourier::GLWECiphertextFourier,
|
||||
glwe_ops::GLWEOps,
|
||||
glwe_plaintext::GLWEPlaintext,
|
||||
keys::{GLWEPublicKey, SecretDistribution, SecretKeyFourier},
|
||||
keyswitch_key::GLWESwitchingKey,
|
||||
@@ -201,9 +202,24 @@ impl GLWECiphertext<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf> SetMetaData for GLWECiphertext<DataSelf>
|
||||
where
|
||||
VecZnx<DataSelf>: VecZnxToMut,
|
||||
{
|
||||
fn set_k(&mut self, k: usize) {
|
||||
self.k = k
|
||||
}
|
||||
|
||||
fn set_basek(&mut self, basek: usize) {
|
||||
self.basek = basek
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf> GLWEOps<FFT64> for GLWECiphertext<DataSelf> where VecZnx<DataSelf>: VecZnxToMut {}
|
||||
|
||||
impl<DataSelf> GLWECiphertext<DataSelf>
|
||||
where
|
||||
VecZnx<DataSelf>: VecZnxToMut + VecZnxToRef,
|
||||
VecZnx<DataSelf>: VecZnxToMut,
|
||||
{
|
||||
pub fn encrypt_sk<DataPt, DataSk>(
|
||||
&mut self,
|
||||
@@ -281,21 +297,6 @@ where
|
||||
self.encrypt_pk_private(module, None, pk, source_xu, source_xe, sigma, scratch);
|
||||
}
|
||||
|
||||
pub fn copy<DataOther>(&mut self, other: &GLWECiphertext<DataOther>)
|
||||
where
|
||||
VecZnx<DataOther>: VecZnxToRef,
|
||||
{
|
||||
copy_vec_znx_from(&mut self.data.to_mut(), &other.to_ref());
|
||||
self.k = other.k;
|
||||
self.basek = other.basek;
|
||||
}
|
||||
|
||||
pub fn rsh(&mut self, k: usize, scratch: &mut Scratch) {
|
||||
let basek: usize = self.basek();
|
||||
let mut self_mut: VecZnx<&mut [u8]> = self.data.to_mut();
|
||||
self_mut.rsh(basek, k, scratch);
|
||||
}
|
||||
|
||||
pub fn automorphism<DataLhs, DataRhs>(
|
||||
&mut self,
|
||||
module: &Module<FFT64>,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use backend::{
|
||||
Backend, FFT64, MatZnxDft, MatZnxDftOps, MatZnxDftScratch, MatZnxDftToRef, Module, ScalarZnxDft, ScalarZnxDftOps,
|
||||
ScalarZnxDftToRef, Scratch, VecZnx, VecZnxAlloc, VecZnxBig, VecZnxBigAlloc, VecZnxBigOps, VecZnxBigScratch, VecZnxDft,
|
||||
VecZnxDftAlloc, VecZnxDftOps, VecZnxDftToMut, VecZnxDftToRef, VecZnxToMut, VecZnxToRef, ZnxZero,
|
||||
VecZnxDftAlloc, VecZnxDftOps, VecZnxDftToMut, VecZnxDftToRef, VecZnxToMut, ZnxZero,
|
||||
};
|
||||
use sampling::source::Source;
|
||||
|
||||
@@ -126,7 +126,7 @@ impl GLWECiphertextFourier<Vec<u8>, FFT64> {
|
||||
|
||||
impl<DataSelf> GLWECiphertextFourier<DataSelf, FFT64>
|
||||
where
|
||||
VecZnxDft<DataSelf, FFT64>: VecZnxDftToMut<FFT64> + VecZnxDftToRef<FFT64>,
|
||||
VecZnxDft<DataSelf, FFT64>: VecZnxDftToMut<FFT64>,
|
||||
{
|
||||
pub fn encrypt_zero_sk<DataSk>(
|
||||
&mut self,
|
||||
@@ -261,7 +261,7 @@ where
|
||||
sk_dft: &SecretKeyFourier<DataSk, FFT64>,
|
||||
scratch: &mut Scratch,
|
||||
) where
|
||||
VecZnx<DataPt>: VecZnxToMut + VecZnxToRef,
|
||||
VecZnx<DataPt>: VecZnxToMut,
|
||||
ScalarZnxDft<DataSk, FFT64>: ScalarZnxDftToRef<FFT64>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
|
||||
213
core/src/glwe_ops.rs
Normal file
213
core/src/glwe_ops.rs
Normal file
@@ -0,0 +1,213 @@
|
||||
use backend::{Backend, Module, Scratch, VecZnx, VecZnxOps, VecZnxToMut, VecZnxToRef, ZnxZero};
|
||||
|
||||
use crate::elem::{Infos, SetMetaData};
|
||||
|
||||
pub trait GLWEOps<BACKEND: Backend>
|
||||
where
|
||||
Self: Sized + VecZnxToMut + SetMetaData + Infos,
|
||||
{
|
||||
fn add<A, B>(&mut self, module: &Module<BACKEND>, a: &A, b: &B)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
B: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(a.n(), module.n());
|
||||
assert_eq!(b.n(), module.n());
|
||||
assert_eq!(self.n(), module.n());
|
||||
assert_eq!(a.basek(), b.basek());
|
||||
assert!(self.rank() >= a.rank().max(b.rank()));
|
||||
}
|
||||
|
||||
let min_col: usize = a.rank().min(b.rank()) + 1;
|
||||
let max_col: usize = a.rank().max(b.rank() + 1);
|
||||
let self_col: usize = self.rank() + 1;
|
||||
|
||||
(0..min_col).for_each(|i| {
|
||||
module.vec_znx_add(self, i, a, i, b, i);
|
||||
});
|
||||
|
||||
if a.rank() > b.rank() {
|
||||
(min_col..max_col).for_each(|i| {
|
||||
module.vec_znx_copy(self, i, a, i);
|
||||
});
|
||||
} else {
|
||||
(min_col..max_col).for_each(|i| {
|
||||
module.vec_znx_copy(self, i, b, i);
|
||||
});
|
||||
}
|
||||
|
||||
let size: usize = self.size();
|
||||
let mut self_mut: VecZnx<&mut [u8]> = self.to_mut();
|
||||
(max_col..self_col).for_each(|i| {
|
||||
(0..size).for_each(|j| {
|
||||
self_mut.zero_at(i, j);
|
||||
});
|
||||
});
|
||||
|
||||
self.set_basek(a.basek());
|
||||
self.set_k(a.k().max(b.k()));
|
||||
}
|
||||
|
||||
fn add_inplace<A>(&mut self, module: &Module<BACKEND>, a: &A)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(a.n(), module.n());
|
||||
assert_eq!(self.n(), module.n());
|
||||
assert_eq!(self.basek(), a.basek());
|
||||
assert!(self.rank() >= a.rank())
|
||||
}
|
||||
|
||||
(0..a.rank() + 1).for_each(|i| {
|
||||
module.vec_znx_add_inplace(self, i, a, i);
|
||||
});
|
||||
|
||||
self.set_k(a.k().max(self.k()));
|
||||
}
|
||||
|
||||
fn sub<A, B>(&mut self, module: &Module<BACKEND>, a: &A, b: &B)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
B: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(a.n(), module.n());
|
||||
assert_eq!(b.n(), module.n());
|
||||
assert_eq!(self.n(), module.n());
|
||||
assert_eq!(a.basek(), b.basek());
|
||||
assert!(self.rank() >= a.rank().max(b.rank()));
|
||||
}
|
||||
|
||||
let min_col: usize = a.rank().min(b.rank()) + 1;
|
||||
let max_col: usize = a.rank().max(b.rank() + 1);
|
||||
let self_col: usize = self.rank() + 1;
|
||||
|
||||
(0..min_col).for_each(|i| {
|
||||
module.vec_znx_sub(self, i, a, i, b, i);
|
||||
});
|
||||
|
||||
if a.rank() > b.rank() {
|
||||
(min_col..max_col).for_each(|i| {
|
||||
module.vec_znx_copy(self, i, a, i);
|
||||
});
|
||||
} else {
|
||||
(min_col..max_col).for_each(|i| {
|
||||
module.vec_znx_copy(self, i, b, i);
|
||||
module.vec_znx_negate_inplace(self, i);
|
||||
});
|
||||
}
|
||||
|
||||
let size: usize = self.size();
|
||||
let mut self_mut: VecZnx<&mut [u8]> = self.to_mut();
|
||||
(max_col..self_col).for_each(|i| {
|
||||
(0..size).for_each(|j| {
|
||||
self_mut.zero_at(i, j);
|
||||
});
|
||||
});
|
||||
|
||||
self.set_basek(a.basek());
|
||||
self.set_k(a.k().max(b.k()));
|
||||
}
|
||||
|
||||
fn sub_inplace_ab<A>(&mut self, module: &Module<BACKEND>, a: &A)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(a.n(), module.n());
|
||||
assert_eq!(self.n(), module.n());
|
||||
assert_eq!(self.basek(), a.basek());
|
||||
assert!(self.rank() >= a.rank())
|
||||
}
|
||||
|
||||
(0..a.rank() + 1).for_each(|i| {
|
||||
module.vec_znx_sub_ab_inplace(self, i, a, i);
|
||||
});
|
||||
|
||||
self.set_k(a.k().max(self.k()));
|
||||
}
|
||||
|
||||
fn sub_inplace_ba<A>(&mut self, module: &Module<BACKEND>, a: &A)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(a.n(), module.n());
|
||||
assert_eq!(self.n(), module.n());
|
||||
assert_eq!(self.basek(), a.basek());
|
||||
assert!(self.rank() >= a.rank())
|
||||
}
|
||||
|
||||
(0..a.rank() + 1).for_each(|i| {
|
||||
module.vec_znx_sub_ba_inplace(self, i, a, i);
|
||||
});
|
||||
|
||||
self.set_k(a.k().max(self.k()));
|
||||
}
|
||||
|
||||
fn rotate<A>(&mut self, module: &Module<BACKEND>, k: i64, a: &A)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(a.n(), module.n());
|
||||
assert_eq!(self.n(), module.n());
|
||||
assert_eq!(self.basek(), a.basek());
|
||||
assert_eq!(self.rank(), a.rank())
|
||||
}
|
||||
|
||||
(0..a.rank() + 1).for_each(|i| {
|
||||
module.vec_znx_rotate(k, self, i, a, i);
|
||||
});
|
||||
|
||||
self.set_k(a.k());
|
||||
}
|
||||
|
||||
fn rotate_inplace<A>(&mut self, module: &Module<BACKEND>, k: i64)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.n(), module.n());
|
||||
}
|
||||
|
||||
(0..self.rank() + 1).for_each(|i| {
|
||||
module.vec_znx_rotate_inplace(k, self, i);
|
||||
});
|
||||
}
|
||||
|
||||
fn copy<A>(&mut self, module: &Module<BACKEND>, a: &A)
|
||||
where
|
||||
A: VecZnxToRef + Infos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.n(), module.n());
|
||||
assert_eq!(a.n(), module.n());
|
||||
}
|
||||
|
||||
let cols: usize = self.rank().min(a.rank()) + 1;
|
||||
|
||||
(0..cols).for_each(|i| {
|
||||
module.vec_znx_copy(self, i, a, i);
|
||||
});
|
||||
|
||||
self.set_k(a.k());
|
||||
self.set_basek(a.basek());
|
||||
}
|
||||
|
||||
fn rsh(&mut self, k: usize, scratch: &mut Scratch) {
|
||||
let basek: usize = self.basek();
|
||||
let mut self_mut: VecZnx<&mut [u8]> = self.to_mut();
|
||||
self_mut.rsh(basek, k, scratch);
|
||||
}
|
||||
}
|
||||
@@ -217,7 +217,7 @@ impl<C> GLWEPublicKey<C, FFT64> {
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
) where
|
||||
VecZnxDft<C, FFT64>: VecZnxDftToMut<FFT64> + VecZnxDftToRef<FFT64>,
|
||||
VecZnxDft<C, FFT64>: VecZnxDftToMut<FFT64>,
|
||||
ScalarZnxDft<S, FFT64>: ScalarZnxDftToRef<FFT64> + ZnxInfos,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
|
||||
@@ -149,7 +149,7 @@ impl GLWESwitchingKey<Vec<u8>, FFT64> {
|
||||
}
|
||||
impl<DataSelf> GLWESwitchingKey<DataSelf, FFT64>
|
||||
where
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64> + MatZnxDftToRef<FFT64>,
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64>,
|
||||
{
|
||||
pub fn encrypt_sk<DataSkIn, DataSkOut>(
|
||||
&mut self,
|
||||
|
||||
@@ -4,6 +4,7 @@ pub mod gglwe_ciphertext;
|
||||
pub mod ggsw_ciphertext;
|
||||
pub mod glwe_ciphertext;
|
||||
pub mod glwe_ciphertext_fourier;
|
||||
pub mod glwe_ops;
|
||||
pub mod glwe_plaintext;
|
||||
pub mod keys;
|
||||
pub mod keyswitch_key;
|
||||
|
||||
@@ -63,7 +63,7 @@ impl TensorKey<Vec<u8>, FFT64> {
|
||||
|
||||
impl<DataSelf> TensorKey<DataSelf, FFT64>
|
||||
where
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64> + MatZnxDftToRef<FFT64>,
|
||||
MatZnxDft<DataSelf, FFT64>: MatZnxDftToMut<FFT64>,
|
||||
{
|
||||
pub fn encrypt_sk<DataSk>(
|
||||
&mut self,
|
||||
|
||||
@@ -110,7 +110,8 @@ fn test_encrypt_sk(log_n: usize, basek: usize, k_ksk: usize, sigma: f64, rank_in
|
||||
scratch.borrow(),
|
||||
);
|
||||
|
||||
let mut ct_glwe_fourier: GLWECiphertextFourier<Vec<u8>, FFT64> = GLWECiphertextFourier::alloc(&module, basek, k_ksk, rank_out);
|
||||
let mut ct_glwe_fourier: GLWECiphertextFourier<Vec<u8>, FFT64> =
|
||||
GLWECiphertextFourier::alloc(&module, basek, k_ksk, rank_out);
|
||||
|
||||
(0..ksk.rank_in()).for_each(|col_i| {
|
||||
(0..ksk.rows()).for_each(|row_i| {
|
||||
@@ -202,7 +203,8 @@ fn test_key_switch(
|
||||
// gglwe_{s1}(s0) (x) gglwe_{s2}(s1) = gglwe_{s2}(s0)
|
||||
ct_gglwe_s0s2.keyswitch(&module, &ct_gglwe_s0s1, &ct_gglwe_s1s2, scratch.borrow());
|
||||
|
||||
let mut ct_glwe_dft: GLWECiphertextFourier<Vec<u8>, FFT64> = GLWECiphertextFourier::alloc(&module, basek, k_ksk, rank_out_s1s2);
|
||||
let mut ct_glwe_dft: GLWECiphertextFourier<Vec<u8>, FFT64> =
|
||||
GLWECiphertextFourier::alloc(&module, basek, k_ksk, rank_out_s1s2);
|
||||
let mut pt: GLWEPlaintext<Vec<u8>> = GLWEPlaintext::alloc(&module, basek, k_ksk);
|
||||
|
||||
(0..ct_gglwe_s0s2.rank_in()).for_each(|col_i| {
|
||||
@@ -304,7 +306,8 @@ fn test_key_switch_inplace(log_n: usize, basek: usize, k_ksk: usize, sigma: f64,
|
||||
|
||||
let ct_gglwe_s0s2: GLWESwitchingKey<Vec<u8>, FFT64> = ct_gglwe_s0s1;
|
||||
|
||||
let mut ct_glwe_dft: GLWECiphertextFourier<Vec<u8>, FFT64> = GLWECiphertextFourier::alloc(&module, basek, k_ksk, rank_out_s0s1);
|
||||
let mut ct_glwe_dft: GLWECiphertextFourier<Vec<u8>, FFT64> =
|
||||
GLWECiphertextFourier::alloc(&module, basek, k_ksk, rank_out_s0s1);
|
||||
let mut pt: GLWEPlaintext<Vec<u8>> = GLWEPlaintext::alloc(&module, basek, k_ksk);
|
||||
|
||||
(0..ct_gglwe_s0s2.rank_in()).for_each(|col_i| {
|
||||
|
||||
@@ -61,7 +61,8 @@ fn test_keyswitch(
|
||||
|
||||
let mut ksk: GLWESwitchingKey<Vec<u8>, FFT64> = GLWESwitchingKey::alloc(&module, basek, k_ksk, rows, rank_in, rank_out);
|
||||
let mut ct_glwe_in: GLWECiphertext<Vec<u8>> = GLWECiphertext::alloc(&module, basek, k_ct_in, rank_in);
|
||||
let mut ct_glwe_dft_in: GLWECiphertextFourier<Vec<u8>, FFT64> = GLWECiphertextFourier::alloc(&module, basek, k_ct_in, rank_in);
|
||||
let mut ct_glwe_dft_in: GLWECiphertextFourier<Vec<u8>, FFT64> =
|
||||
GLWECiphertextFourier::alloc(&module, basek, k_ct_in, rank_in);
|
||||
let mut ct_glwe_out: GLWECiphertext<Vec<u8>> = GLWECiphertext::alloc(&module, basek, k_ct_out, rank_out);
|
||||
let mut ct_glwe_dft_out: GLWECiphertextFourier<Vec<u8>, FFT64> =
|
||||
GLWECiphertextFourier::alloc(&module, basek, k_ct_out, rank_out);
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use backend::{Module, ScalarZnx, ScalarZnxDft, ScalarZnxDftAlloc, ScalarZnxDftOps, ScratchOwned, Stats, VecZnxDftOps, VecZnxOps, FFT64};
|
||||
use backend::{
|
||||
FFT64, Module, ScalarZnx, ScalarZnxDft, ScalarZnxDftAlloc, ScalarZnxDftOps, ScratchOwned, Stats, VecZnxDftOps, VecZnxOps,
|
||||
};
|
||||
use sampling::source::Source;
|
||||
|
||||
use crate::{
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::collections::HashMap;
|
||||
|
||||
use backend::{FFT64, MatZnxDft, MatZnxDftToRef, Module, Scratch, VecZnx, VecZnxToMut, VecZnxToRef};
|
||||
|
||||
use crate::{automorphism::AutomorphismKey, glwe_ciphertext::GLWECiphertext};
|
||||
use crate::{automorphism::AutomorphismKey, glwe_ciphertext::GLWECiphertext, glwe_ops::GLWEOps};
|
||||
|
||||
impl GLWECiphertext<Vec<u8>> {
|
||||
pub fn trace_galois_elements(module: &Module<FFT64>) -> Vec<i64> {
|
||||
@@ -34,7 +34,7 @@ impl GLWECiphertext<Vec<u8>> {
|
||||
|
||||
impl<DataSelf> GLWECiphertext<DataSelf>
|
||||
where
|
||||
VecZnx<DataSelf>: VecZnxToMut + VecZnxToRef,
|
||||
VecZnx<DataSelf>: VecZnxToMut,
|
||||
{
|
||||
pub fn trace<DataLhs, DataAK>(
|
||||
&mut self,
|
||||
@@ -48,7 +48,7 @@ where
|
||||
VecZnx<DataLhs>: VecZnxToRef,
|
||||
MatZnxDft<DataAK, FFT64>: MatZnxDftToRef<FFT64>,
|
||||
{
|
||||
self.copy(lhs);
|
||||
self.copy(module, lhs);
|
||||
self.trace_inplace(module, start, end, auto_keys, scratch);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user