mirror of
https://github.com/arnaucube/poulpy.git
synced 2026-02-10 13:16:44 +01:00
wip
This commit is contained in:
@@ -1,19 +1,15 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyDftToDftInplace, SvpPPolAllocBytes, SvpPrepare, TakeScalarZnx, TakeVecZnx, TakeVecZnxDft,
|
||||
VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace, VecZnxAutomorphism, VecZnxBigNormalize, VecZnxDftAllocBytes,
|
||||
VecZnxDftApply, VecZnxFillUniform, VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace,
|
||||
VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace, VecZnxSwitchRing,
|
||||
},
|
||||
api::{ScratchAvailable, SvpPPolAllocBytes, VecZnxAutomorphism, VecZnxDftAllocBytes, VecZnxNormalizeTmpBytes},
|
||||
layouts::{Backend, DataMut, DataRef, Module, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWESecret, TakeGLWESecretPrepared,
|
||||
TakeGLWESecret,
|
||||
encryption::compressed::gglwe_ksk::GGLWEKeyCompressedEncryptSk,
|
||||
layouts::{
|
||||
GGLWEInfos, GLWEInfos, GLWESecret, LWEInfos,
|
||||
compressed::{GGLWEAutomorphismKeyCompressed, GGLWESwitchingKeyCompressed},
|
||||
GGLWEInfos, GLWEInfos, GLWESecret, GLWESecretToRef, LWEInfos,
|
||||
compressed::{GGLWEAutomorphismKeyCompressed, GGLWEAutomorphismKeyCompressedToMut, GGLWEKeyCompressed},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -24,8 +20,75 @@ impl GGLWEAutomorphismKeyCompressed<Vec<u8>> {
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes + SvpPPolAllocBytes,
|
||||
{
|
||||
assert_eq!(module.n() as u32, infos.n());
|
||||
GGLWESwitchingKeyCompressed::encrypt_sk_scratch_space(module, infos)
|
||||
+ GLWESecret::alloc_bytes_with(infos.n(), infos.rank_out())
|
||||
GGLWEKeyCompressed::encrypt_sk_scratch_space(module, infos) + GLWESecret::alloc_bytes_with(infos.n(), infos.rank_out())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GGLWEAutomorphismKeyCompressedEncryptSk<B: Backend> {
|
||||
fn gglwe_automorphism_key_compressed_encrypt_sk<R, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
p: i64,
|
||||
sk: &S,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWEAutomorphismKeyCompressedToMut,
|
||||
S: GLWESecretToRef;
|
||||
}
|
||||
|
||||
impl<B: Backend> GGLWEAutomorphismKeyCompressedEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>:
|
||||
GGLWEKeyCompressedEncryptSk<B> + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + SvpPPolAllocBytes + VecZnxAutomorphism,
|
||||
Scratch<B>: TakeGLWESecret + ScratchAvailable,
|
||||
{
|
||||
fn gglwe_automorphism_key_compressed_encrypt_sk<R, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
p: i64,
|
||||
sk: &S,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWEAutomorphismKeyCompressedToMut,
|
||||
S: GLWESecretToRef,
|
||||
{
|
||||
let res: &mut GGLWEAutomorphismKeyCompressed<&mut [u8]> = &mut res.to_mut();
|
||||
let sk: &GLWESecret<&[u8]> = &sk.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(res.n(), sk.n());
|
||||
assert_eq!(res.rank_out(), res.rank_in());
|
||||
assert_eq!(sk.rank(), res.rank_out());
|
||||
assert!(
|
||||
scratch.available() >= GGLWEAutomorphismKeyCompressed::encrypt_sk_scratch_space(self, res),
|
||||
"scratch.available(): {} < AutomorphismKey::encrypt_sk_scratch_space: {}",
|
||||
scratch.available(),
|
||||
GGLWEAutomorphismKeyCompressed::encrypt_sk_scratch_space(self, res)
|
||||
)
|
||||
}
|
||||
|
||||
let (mut sk_out, scratch_1) = scratch.take_glwe_secret(sk.n(), sk.rank());
|
||||
|
||||
{
|
||||
(0..res.rank_out().into()).for_each(|i| {
|
||||
self.vec_znx_automorphism(
|
||||
self.galois_element_inv(p),
|
||||
&mut sk_out.data.as_vec_znx_mut(),
|
||||
i,
|
||||
&sk.data.as_vec_znx(),
|
||||
i,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
self.gglwe_key_compressed_encrypt_sk(&mut res.key, sk, &sk_out, seed_xa, source_xe, scratch_1);
|
||||
|
||||
res.p = p;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,56 +103,8 @@ impl<DataSelf: DataMut> GGLWEAutomorphismKeyCompressed<DataSelf> {
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAutomorphism
|
||||
+ SvpPrepare<B>
|
||||
+ SvpPPolAllocBytes
|
||||
+ VecZnxSwitchRing
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub
|
||||
+ VecZnxAddScalarInplace,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx + TakeScalarZnx + TakeGLWESecretPrepared<B>,
|
||||
Module<B>: GGLWEAutomorphismKeyCompressedEncryptSk<B>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(self.rank_out(), self.rank_in());
|
||||
assert_eq!(sk.rank(), self.rank_out());
|
||||
assert!(
|
||||
scratch.available() >= GGLWEAutomorphismKeyCompressed::encrypt_sk_scratch_space(module, self),
|
||||
"scratch.available(): {} < AutomorphismKey::encrypt_sk_scratch_space: {}",
|
||||
scratch.available(),
|
||||
GGLWEAutomorphismKeyCompressed::encrypt_sk_scratch_space(module, self)
|
||||
)
|
||||
}
|
||||
|
||||
let (mut sk_out, scratch_1) = scratch.take_glwe_secret(sk.n(), sk.rank());
|
||||
|
||||
{
|
||||
(0..self.rank_out().into()).for_each(|i| {
|
||||
module.vec_znx_automorphism(
|
||||
module.galois_element_inv(p),
|
||||
&mut sk_out.data.as_vec_znx_mut(),
|
||||
i,
|
||||
&sk.data.as_vec_znx(),
|
||||
i,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
self.key
|
||||
.encrypt_sk(module, sk, &sk_out, seed_xa, source_xe, scratch_1);
|
||||
|
||||
self.p = p;
|
||||
module.gglwe_automorphism_key_compressed_encrypt_sk(self, p, sk, seed_xa, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,22 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyDftToDftInplace, TakeVecZnx, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal,
|
||||
VecZnxAddScalarInplace, VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftApply, VecZnxFillUniform,
|
||||
VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace,
|
||||
ScratchAvailable, VecZnxAddScalarInplace, VecZnxDftAllocBytes, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes,
|
||||
ZnNormalizeInplace,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch, ZnxZero},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, ScalarZnxToRef, Scratch, ZnxZero},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWEPt,
|
||||
encryption::{SIGMA, glwe_encrypt_sk_internal},
|
||||
layouts::{GGLWECiphertext, GGLWEInfos, LWEInfos, compressed::GGLWECiphertextCompressed, prepared::GLWESecretPrepared},
|
||||
encryption::{SIGMA, glwe_ct::GLWEEncryptSkInternal},
|
||||
layouts::{
|
||||
GGLWECiphertext, GGLWEInfos, LWEInfos,
|
||||
compressed::{GGLWECiphertextCompressed, GGLWECiphertextCompressedToMut},
|
||||
prepared::{GLWESecretPrepared, GLWESecretPreparedToRef},
|
||||
},
|
||||
};
|
||||
|
||||
impl GGLWECiphertextCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend, A>(module: &Module<B>, infos: &A) -> usize
|
||||
where
|
||||
A: GGLWEInfos,
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes,
|
||||
{
|
||||
GGLWECiphertext::encrypt_sk_scratch_space(module, infos)
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: DataMut> GGLWECiphertextCompressed<D> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
@@ -35,83 +28,124 @@ impl<D: DataMut> GGLWECiphertextCompressed<D> {
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
Module<B>: GGLWECompressedEncryptSk<B>,
|
||||
{
|
||||
module.gglwe_compressed_encrypt_sk(self, pt, sk, seed, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
impl GGLWECiphertextCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend, A>(module: &Module<B>, infos: &A) -> usize
|
||||
where
|
||||
A: GGLWEInfos,
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes,
|
||||
{
|
||||
GGLWECiphertext::encrypt_sk_scratch_space(module, infos)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GGLWECompressedEncryptSk<B: Backend> {
|
||||
fn gglwe_compressed_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
seed: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWECiphertextCompressedToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GGLWECompressedEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptSkInternal<B>
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxAddScalarInplace
|
||||
+ ZnNormalizeInplace<B>,
|
||||
Scratch<B>: TakeGLWEPt<B> + ScratchAvailable,
|
||||
{
|
||||
fn gglwe_compressed_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
seed: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWECiphertextCompressedToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
let res: &mut GGLWECiphertextCompressed<&mut [u8]> = &mut res.to_mut();
|
||||
let pt: &ScalarZnx<&[u8]> = &pt.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_hal::layouts::ZnxInfos;
|
||||
let sk = &sk.to_ref();
|
||||
|
||||
assert_eq!(
|
||||
self.rank_in(),
|
||||
res.rank_in(),
|
||||
pt.cols() as u32,
|
||||
"self.rank_in(): {} != pt.cols(): {}",
|
||||
self.rank_in(),
|
||||
"res.rank_in(): {} != pt.cols(): {}",
|
||||
res.rank_in(),
|
||||
pt.cols()
|
||||
);
|
||||
assert_eq!(
|
||||
self.rank_out(),
|
||||
res.rank_out(),
|
||||
sk.rank(),
|
||||
"self.rank_out(): {} != sk.rank(): {}",
|
||||
self.rank_out(),
|
||||
"res.rank_out(): {} != sk.rank(): {}",
|
||||
res.rank_out(),
|
||||
sk.rank()
|
||||
);
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(res.n(), sk.n());
|
||||
assert_eq!(pt.n() as u32, sk.n());
|
||||
assert!(
|
||||
scratch.available() >= GGLWECiphertextCompressed::encrypt_sk_scratch_space(module, self),
|
||||
scratch.available() >= GGLWECiphertextCompressed::encrypt_sk_scratch_space(self, res),
|
||||
"scratch.available: {} < GGLWECiphertext::encrypt_sk_scratch_space: {}",
|
||||
scratch.available(),
|
||||
GGLWECiphertextCompressed::encrypt_sk_scratch_space(module, self)
|
||||
GGLWECiphertextCompressed::encrypt_sk_scratch_space(self, res)
|
||||
);
|
||||
assert!(
|
||||
self.dnum().0 * self.dsize().0 * self.base2k().0 <= self.k().0,
|
||||
"self.dnum() : {} * self.dsize() : {} * self.base2k() : {} = {} >= self.k() = {}",
|
||||
self.dnum(),
|
||||
self.dsize(),
|
||||
self.base2k(),
|
||||
self.dnum().0 * self.dsize().0 * self.base2k().0,
|
||||
self.k()
|
||||
res.dnum().0 * res.dsize().0 * res.base2k().0 <= res.k().0,
|
||||
"res.dnum() : {} * res.dsize() : {} * res.base2k() : {} = {} >= res.k() = {}",
|
||||
res.dnum(),
|
||||
res.dsize(),
|
||||
res.base2k(),
|
||||
res.dnum().0 * res.dsize().0 * res.base2k().0,
|
||||
res.k()
|
||||
);
|
||||
}
|
||||
|
||||
let dnum: usize = self.dnum().into();
|
||||
let dsize: usize = self.dsize().into();
|
||||
let base2k: usize = self.base2k().into();
|
||||
let rank_in: usize = self.rank_in().into();
|
||||
let cols: usize = (self.rank_out() + 1).into();
|
||||
let dnum: usize = res.dnum().into();
|
||||
let dsize: usize = res.dsize().into();
|
||||
let base2k: usize = res.base2k().into();
|
||||
let rank_in: usize = res.rank_in().into();
|
||||
let cols: usize = (res.rank_out() + 1).into();
|
||||
|
||||
let mut source_xa = Source::new(seed);
|
||||
|
||||
let (mut tmp_pt, scrach_1) = scratch.take_glwe_pt(self);
|
||||
let (mut tmp_pt, scrach_1) = scratch.take_glwe_pt(res);
|
||||
(0..rank_in).for_each(|col_i| {
|
||||
(0..dnum).for_each(|d_i| {
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
tmp_pt.data.zero(); // zeroes for next iteration
|
||||
module.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + d_i * dsize, pt, col_i);
|
||||
module.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scrach_1);
|
||||
self.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + d_i * dsize, pt, col_i);
|
||||
self.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scrach_1);
|
||||
|
||||
let (seed, mut source_xa_tmp) = source_xa.branch();
|
||||
self.seed[col_i * dnum + d_i] = seed;
|
||||
res.seed[col_i * dnum + d_i] = seed;
|
||||
|
||||
glwe_encrypt_sk_internal(
|
||||
module,
|
||||
self.base2k().into(),
|
||||
self.k().into(),
|
||||
&mut self.at_mut(d_i, col_i).data,
|
||||
self.glwe_encrypt_sk_internal(
|
||||
res.base2k().into(),
|
||||
res.k().into(),
|
||||
&mut res.at_mut(d_i, col_i).data,
|
||||
cols,
|
||||
true,
|
||||
Some((&tmp_pt, 0)),
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyDftToDftInplace, SvpPPolAllocBytes, SvpPrepare, TakeScalarZnx, TakeVecZnx, TakeVecZnxDft,
|
||||
VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace, VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftApply,
|
||||
VecZnxFillUniform, VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub,
|
||||
VecZnxSubInplace, VecZnxSwitchRing,
|
||||
ScratchAvailable, SvpPPolAllocBytes, SvpPrepare, TakeScalarZnx, VecZnxDftAllocBytes, VecZnxNormalizeTmpBytes,
|
||||
VecZnxSwitchRing,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch},
|
||||
source::Source,
|
||||
@@ -11,13 +9,15 @@ use poulpy_hal::{
|
||||
|
||||
use crate::{
|
||||
TakeGLWESecretPrepared,
|
||||
encryption::compressed::gglwe_ct::GGLWECompressedEncryptSk,
|
||||
layouts::{
|
||||
Degree, GGLWECiphertext, GGLWEInfos, GLWEInfos, GLWESecret, LWEInfos, compressed::GGLWESwitchingKeyCompressed,
|
||||
Degree, GGLWECiphertext, GGLWEInfos, GLWEInfos, GLWESecret, GLWESecretToRef, LWEInfos,
|
||||
compressed::{GGLWEKeyCompressed, GGLWEKeyCompressedToMut},
|
||||
prepared::GLWESecretPrepared,
|
||||
},
|
||||
};
|
||||
|
||||
impl GGLWESwitchingKeyCompressed<Vec<u8>> {
|
||||
impl GGLWEKeyCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend, A>(module: &Module<B>, infos: &A) -> usize
|
||||
where
|
||||
A: GGLWEInfos,
|
||||
@@ -29,7 +29,7 @@ impl GGLWESwitchingKeyCompressed<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWESwitchingKeyCompressed<DataSelf> {
|
||||
impl<DataSelf: DataMut> GGLWEKeyCompressed<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataSkIn: DataRef, DataSkOut: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
@@ -40,36 +40,65 @@ impl<DataSelf: DataMut> GGLWESwitchingKeyCompressed<DataSelf> {
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: SvpPrepare<B>
|
||||
+ SvpPPolAllocBytes
|
||||
+ VecZnxSwitchRing
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub
|
||||
+ VecZnxAddScalarInplace,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx + TakeScalarZnx + TakeGLWESecretPrepared<B>,
|
||||
Module<B>: GGLWEKeyCompressedEncryptSk<B>,
|
||||
{
|
||||
module.gglwe_key_compressed_encrypt_sk(self, sk_in, sk_out, seed_xa, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GGLWEKeyCompressedEncryptSk<B: Backend> {
|
||||
fn gglwe_key_compressed_encrypt_sk<R, SI, SO>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
sk_in: &SI,
|
||||
sk_out: &SO,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWEKeyCompressedToMut,
|
||||
SI: GLWESecretToRef,
|
||||
SO: GLWESecretToRef;
|
||||
}
|
||||
|
||||
impl<B: Backend> GGLWEKeyCompressedEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GGLWECompressedEncryptSk<B>
|
||||
+ SvpPPolAllocBytes
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxSwitchRing
|
||||
+ SvpPrepare<B>,
|
||||
Scratch<B>: ScratchAvailable + TakeScalarZnx + TakeGLWESecretPrepared<B>,
|
||||
{
|
||||
fn gglwe_key_compressed_encrypt_sk<R, SI, SO>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
sk_in: &SI,
|
||||
sk_out: &SO,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWEKeyCompressedToMut,
|
||||
SI: GLWESecretToRef,
|
||||
SO: GLWESecretToRef,
|
||||
{
|
||||
let res: &mut GGLWEKeyCompressed<&mut [u8]> = &mut res.to_mut();
|
||||
let sk_in: &GLWESecret<&[u8]> = &sk_in.to_ref();
|
||||
let sk_out: &GLWESecret<&[u8]> = &sk_out.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use crate::layouts::GGLWESwitchingKey;
|
||||
|
||||
assert!(sk_in.n().0 <= module.n() as u32);
|
||||
assert!(sk_out.n().0 <= module.n() as u32);
|
||||
assert!(sk_in.n().0 <= self.n() as u32);
|
||||
assert!(sk_out.n().0 <= self.n() as u32);
|
||||
assert!(
|
||||
scratch.available() >= GGLWESwitchingKey::encrypt_sk_scratch_space(module, self),
|
||||
scratch.available() >= GGLWESwitchingKey::encrypt_sk_scratch_space(self, res),
|
||||
"scratch.available()={} < GLWESwitchingKey::encrypt_sk_scratch_space={}",
|
||||
scratch.available(),
|
||||
GGLWESwitchingKey::encrypt_sk_scratch_space(module, self)
|
||||
GGLWESwitchingKey::encrypt_sk_scratch_space(self, res)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -77,7 +106,7 @@ impl<DataSelf: DataMut> GGLWESwitchingKeyCompressed<DataSelf> {
|
||||
|
||||
let (mut sk_in_tmp, scratch_1) = scratch.take_scalar_znx(n, sk_in.rank().into());
|
||||
(0..sk_in.rank().into()).for_each(|i| {
|
||||
module.vec_znx_switch_ring(
|
||||
self.vec_znx_switch_ring(
|
||||
&mut sk_in_tmp.as_vec_znx_mut(),
|
||||
i,
|
||||
&sk_in.data.as_vec_znx(),
|
||||
@@ -89,20 +118,20 @@ impl<DataSelf: DataMut> GGLWESwitchingKeyCompressed<DataSelf> {
|
||||
{
|
||||
let (mut tmp, _) = scratch_2.take_scalar_znx(n, 1);
|
||||
(0..sk_out.rank().into()).for_each(|i| {
|
||||
module.vec_znx_switch_ring(&mut tmp.as_vec_znx_mut(), 0, &sk_out.data.as_vec_znx(), i);
|
||||
module.svp_prepare(&mut sk_out_tmp.data, i, &tmp, 0);
|
||||
self.vec_znx_switch_ring(&mut tmp.as_vec_znx_mut(), 0, &sk_out.data.as_vec_znx(), i);
|
||||
self.svp_prepare(&mut sk_out_tmp.data, i, &tmp, 0);
|
||||
});
|
||||
}
|
||||
|
||||
self.key.encrypt_sk(
|
||||
module,
|
||||
self.gglwe_compressed_encrypt_sk(
|
||||
&mut res.key,
|
||||
&sk_in_tmp,
|
||||
&sk_out_tmp,
|
||||
seed_xa,
|
||||
source_xe,
|
||||
scratch_2,
|
||||
);
|
||||
self.sk_in_n = sk_in.n().into();
|
||||
self.sk_out_n = sk_out.n().into();
|
||||
res.sk_in_n = sk_in.n().into();
|
||||
res.sk_out_n = sk_out.n().into();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyDftToDft, SvpApplyDftToDftInplace, SvpPPolAlloc, SvpPPolAllocBytes, SvpPrepare, TakeScalarZnx,
|
||||
TakeVecZnx, TakeVecZnxBig, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace, VecZnxBigAllocBytes,
|
||||
VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftApply, VecZnxFillUniform, VecZnxIdftApplyConsume, VecZnxIdftApplyTmpA,
|
||||
VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace, VecZnxSwitchRing,
|
||||
SvpApplyDftToDft, SvpPPolAllocBytes, SvpPrepare, TakeVecZnxBig, TakeVecZnxDft, VecZnxBigAllocBytes, VecZnxBigNormalize,
|
||||
VecZnxDftAllocBytes, VecZnxDftApply, VecZnxIdftApplyTmpA, VecZnxNormalizeTmpBytes,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, Scratch},
|
||||
source::Source,
|
||||
@@ -11,8 +9,10 @@ use poulpy_hal::{
|
||||
|
||||
use crate::{
|
||||
TakeGLWESecret, TakeGLWESecretPrepared,
|
||||
encryption::compressed::gglwe_ksk::GGLWEKeyCompressedEncryptSk,
|
||||
layouts::{
|
||||
GGLWEInfos, GGLWETensorKey, GLWEInfos, GLWESecret, LWEInfos, Rank, compressed::GGLWETensorKeyCompressed,
|
||||
GGLWEInfos, GGLWETensorKey, GLWEInfos, GLWESecret, GLWESecretToRef, LWEInfos, Rank,
|
||||
compressed::{GGLWETensorKeyCompressed, GGLWETensorKeyCompressedToMut},
|
||||
prepared::Prepare,
|
||||
},
|
||||
};
|
||||
@@ -28,59 +28,59 @@ impl GGLWETensorKeyCompressed<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWETensorKeyCompressed<DataSelf> {
|
||||
pub fn encrypt_sk<DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk: &GLWESecret<DataSk>,
|
||||
pub trait GGLWETensorKeyCompressedEncryptSk<B: Backend> {
|
||||
fn gglwe_tensor_key_encrypt_sk<R, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
sk: &S,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: SvpApplyDftToDft<B>
|
||||
+ VecZnxIdftApplyTmpA<B>
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub
|
||||
+ VecZnxSwitchRing
|
||||
+ VecZnxAddScalarInplace
|
||||
+ SvpPrepare<B>
|
||||
+ SvpPPolAllocBytes
|
||||
+ SvpPPolAlloc<B>,
|
||||
Scratch<B>: ScratchAvailable
|
||||
+ TakeScalarZnx
|
||||
+ TakeVecZnxDft<B>
|
||||
+ TakeGLWESecretPrepared<B>
|
||||
+ ScratchAvailable
|
||||
+ TakeVecZnx
|
||||
+ TakeVecZnxBig<B>,
|
||||
R: GGLWETensorKeyCompressedToMut,
|
||||
S: GLWESecretToRef;
|
||||
}
|
||||
|
||||
impl<B: Backend> GGLWETensorKeyCompressedEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GGLWEKeyCompressedEncryptSk<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDft<B>
|
||||
+ VecZnxIdftApplyTmpA<B>
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ SvpPrepare<B>,
|
||||
Scratch<B>: TakeGLWESecretPrepared<B> + TakeVecZnxDft<B> + TakeVecZnxBig<B> + TakeGLWESecret,
|
||||
{
|
||||
fn gglwe_tensor_key_encrypt_sk<R, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
sk: &S,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWETensorKeyCompressedToMut,
|
||||
S: GLWESecretToRef,
|
||||
{
|
||||
let res: &mut GGLWETensorKeyCompressed<&mut [u8]> = &mut res.to_mut();
|
||||
let sk: &GLWESecret<&[u8]> = &sk.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.rank_out(), sk.rank());
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(res.rank_out(), sk.rank());
|
||||
assert_eq!(res.n(), sk.n());
|
||||
}
|
||||
|
||||
let n: usize = sk.n().into();
|
||||
let rank: usize = self.rank_out().into();
|
||||
let rank: usize = res.rank_out().into();
|
||||
|
||||
let (mut sk_dft_prep, scratch_1) = scratch.take_glwe_secret_prepared(sk.n(), self.rank_out());
|
||||
sk_dft_prep.prepare(module, sk, scratch_1);
|
||||
let (mut sk_dft_prep, scratch_1) = scratch.take_glwe_secret_prepared(sk.n(), res.rank_out());
|
||||
sk_dft_prep.prepare(self, sk, scratch_1);
|
||||
|
||||
let (mut sk_dft, scratch_2) = scratch_1.take_vec_znx_dft(n, rank, 1);
|
||||
|
||||
for i in 0..rank {
|
||||
module.vec_znx_dft_apply(1, 0, &mut sk_dft, i, &sk.data.as_vec_znx(), i);
|
||||
self.vec_znx_dft_apply(1, 0, &mut sk_dft, i, &sk.data.as_vec_znx(), i);
|
||||
}
|
||||
|
||||
let (mut sk_ij_big, scratch_3) = scratch_2.take_vec_znx_big(n, 1, 1);
|
||||
@@ -91,14 +91,14 @@ impl<DataSelf: DataMut> GGLWETensorKeyCompressed<DataSelf> {
|
||||
|
||||
for i in 0..rank {
|
||||
for j in i..rank {
|
||||
module.svp_apply_dft_to_dft(&mut sk_ij_dft, 0, &sk_dft_prep.data, j, &sk_dft, i);
|
||||
self.svp_apply_dft_to_dft(&mut sk_ij_dft, 0, &sk_dft_prep.data, j, &sk_dft, i);
|
||||
|
||||
module.vec_znx_idft_apply_tmpa(&mut sk_ij_big, 0, &mut sk_ij_dft, 0);
|
||||
module.vec_znx_big_normalize(
|
||||
self.base2k().into(),
|
||||
self.vec_znx_idft_apply_tmpa(&mut sk_ij_big, 0, &mut sk_ij_dft, 0);
|
||||
self.vec_znx_big_normalize(
|
||||
res.base2k().into(),
|
||||
&mut sk_ij.data.as_vec_znx_mut(),
|
||||
0,
|
||||
self.base2k().into(),
|
||||
res.base2k().into(),
|
||||
&sk_ij_big,
|
||||
0,
|
||||
scratch_5,
|
||||
@@ -106,9 +106,30 @@ impl<DataSelf: DataMut> GGLWETensorKeyCompressed<DataSelf> {
|
||||
|
||||
let (seed_xa_tmp, _) = source_xa.branch();
|
||||
|
||||
self.at_mut(i, j)
|
||||
.encrypt_sk(module, &sk_ij, sk, seed_xa_tmp, source_xe, scratch_5);
|
||||
self.gglwe_key_compressed_encrypt_sk(
|
||||
res.at_mut(i, j),
|
||||
&sk_ij,
|
||||
sk,
|
||||
seed_xa_tmp,
|
||||
source_xe,
|
||||
scratch_5,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWETensorKeyCompressed<DataSelf> {
|
||||
pub fn encrypt_sk<DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk: &GLWESecret<DataSk>,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: GGLWETensorKeyCompressedEncryptSk<B>,
|
||||
{
|
||||
module.gglwe_tensor_key_encrypt_sk(self, sk, seed_xa, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyDftToDftInplace, TakeVecZnx, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal,
|
||||
VecZnxAddScalarInplace, VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftApply, VecZnxFillUniform,
|
||||
VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch, ZnxZero},
|
||||
api::{VecZnxAddScalarInplace, VecZnxDftAllocBytes, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, ScalarZnxToRef, Scratch, ZnxZero},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWEPt,
|
||||
encryption::{SIGMA, glwe_encrypt_sk_internal},
|
||||
encryption::{SIGMA, glwe_ct::GLWEEncryptSkInternal},
|
||||
layouts::{
|
||||
GGSWCiphertext, GGSWInfos, GLWEInfos, LWEInfos, compressed::GGSWCiphertextCompressed, prepared::GLWESecretPrepared,
|
||||
GGSWCiphertext, GGSWInfos, GLWEInfos, LWEInfos,
|
||||
compressed::{GGSWCiphertextCompressed, GGSWCiphertextCompressedToMut},
|
||||
prepared::{GLWESecretPrepared, GLWESecretPreparedToRef},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -26,6 +24,95 @@ impl GGSWCiphertextCompressed<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GGSWCompressedEncryptSk<B: Backend> {
|
||||
fn ggsw_compressed_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGSWCiphertextCompressedToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GGSWCompressedEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptSkInternal<B> + VecZnxAddScalarInplace + VecZnxNormalizeInplace<B>,
|
||||
Scratch<B>: TakeGLWEPt<B>,
|
||||
{
|
||||
fn ggsw_compressed_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGSWCiphertextCompressedToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
let res: &mut GGSWCiphertextCompressed<&mut [u8]> = &mut res.to_mut();
|
||||
let sk: &GLWESecretPrepared<&[u8], B> = &sk.to_ref();
|
||||
let pt: &ScalarZnx<&[u8]> = &pt.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_hal::layouts::ZnxInfos;
|
||||
|
||||
assert_eq!(res.rank(), sk.rank());
|
||||
assert_eq!(res.n(), sk.n());
|
||||
assert_eq!(pt.n() as u32, sk.n());
|
||||
}
|
||||
|
||||
let base2k: usize = res.base2k().into();
|
||||
let rank: usize = res.rank().into();
|
||||
let cols: usize = rank + 1;
|
||||
let dsize: usize = res.dsize().into();
|
||||
|
||||
let (mut tmp_pt, scratch_1) = scratch.take_glwe_pt(&res.glwe_layout());
|
||||
|
||||
let mut source = Source::new(seed_xa);
|
||||
|
||||
res.seed = vec![[0u8; 32]; res.dnum().0 as usize * cols];
|
||||
|
||||
for row_i in 0..res.dnum().into() {
|
||||
tmp_pt.data.zero();
|
||||
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
self.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + row_i * dsize, pt, 0);
|
||||
self.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scratch_1);
|
||||
|
||||
for col_j in 0..rank + 1 {
|
||||
// rlwe encrypt of vec_znx_pt into vec_znx_ct
|
||||
|
||||
let (seed, mut source_xa_tmp) = source.branch();
|
||||
|
||||
res.seed[row_i * cols + col_j] = seed;
|
||||
|
||||
self.glwe_encrypt_sk_internal(
|
||||
res.base2k().into(),
|
||||
res.k().into(),
|
||||
&mut res.at_mut(row_i, col_j).data,
|
||||
cols,
|
||||
true,
|
||||
Some((&tmp_pt, col_j)),
|
||||
sk,
|
||||
&mut source_xa_tmp,
|
||||
source_xe,
|
||||
SIGMA,
|
||||
scratch_1,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGSWCiphertextCompressed<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
@@ -37,71 +124,8 @@ impl<DataSelf: DataMut> GGSWCiphertextCompressed<DataSelf> {
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
Module<B>: GGSWCompressedEncryptSk<B>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_hal::layouts::ZnxInfos;
|
||||
|
||||
assert_eq!(self.rank(), sk.rank());
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(pt.n() as u32, sk.n());
|
||||
}
|
||||
|
||||
let base2k: usize = self.base2k().into();
|
||||
let rank: usize = self.rank().into();
|
||||
let cols: usize = rank + 1;
|
||||
let dsize: usize = self.dsize().into();
|
||||
|
||||
let (mut tmp_pt, scratch_1) = scratch.take_glwe_pt(&self.glwe_layout());
|
||||
|
||||
let mut source = Source::new(seed_xa);
|
||||
|
||||
self.seed = vec![[0u8; 32]; self.dnum().0 as usize * cols];
|
||||
|
||||
(0..self.dnum().into()).for_each(|row_i| {
|
||||
tmp_pt.data.zero();
|
||||
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
module.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + row_i * dsize, pt, 0);
|
||||
module.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scratch_1);
|
||||
|
||||
(0..rank + 1).for_each(|col_j| {
|
||||
// rlwe encrypt of vec_znx_pt into vec_znx_ct
|
||||
|
||||
let (seed, mut source_xa_tmp) = source.branch();
|
||||
|
||||
self.seed[row_i * cols + col_j] = seed;
|
||||
|
||||
glwe_encrypt_sk_internal(
|
||||
module,
|
||||
self.base2k().into(),
|
||||
self.k().into(),
|
||||
&mut self.at_mut(row_i, col_j).data,
|
||||
cols,
|
||||
true,
|
||||
Some((&tmp_pt, col_j)),
|
||||
sk,
|
||||
&mut source_xa_tmp,
|
||||
source_xe,
|
||||
SIGMA,
|
||||
scratch_1,
|
||||
);
|
||||
});
|
||||
});
|
||||
module.ggsw_compressed_encrypt_sk(self, pt, sk, seed_xa, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,20 +5,23 @@ use poulpy_hal::{
|
||||
VecZnxDftApply, VecZnxFillUniform, VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace,
|
||||
VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace, VecZnxSwitchRing,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, Scratch},
|
||||
layouts::{Backend, DataMut, Module, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWESecret, TakeGLWESecretPrepared,
|
||||
layouts::{GGLWEAutomorphismKey, GGLWEInfos, GGLWESwitchingKey, GLWEInfos, GLWESecret, LWEInfos},
|
||||
layouts::{
|
||||
GGLWEAutomorphismKey, GGLWEAutomorphismKeyToMut, GGLWEInfos, GGLWESwitchingKey, GLWEInfos, GLWESecret, GLWESecretToRef,
|
||||
LWEInfos,
|
||||
},
|
||||
};
|
||||
|
||||
impl GGLWEAutomorphismKey<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend, A>(module: &Module<B>, infos: &A) -> usize
|
||||
pub fn encrypt_sk_scratch_space<BE: Backend, A>(module: &Module<BE>, infos: &A) -> usize
|
||||
where
|
||||
A: GGLWEInfos,
|
||||
Module<B>: SvpPPolAllocBytes + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes,
|
||||
Module<BE>: SvpPPolAllocBytes + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes,
|
||||
{
|
||||
assert_eq!(
|
||||
infos.rank_in(),
|
||||
@@ -28,7 +31,7 @@ impl GGLWEAutomorphismKey<Vec<u8>> {
|
||||
GGLWESwitchingKey::encrypt_sk_scratch_space(module, infos) + GLWESecret::alloc_bytes(&infos.glwe_layout())
|
||||
}
|
||||
|
||||
pub fn encrypt_pk_scratch_space<B: Backend, A>(module: &Module<B>, _infos: &A) -> usize
|
||||
pub fn encrypt_pk_scratch_space<BE: Backend, A>(module: &Module<BE>, _infos: &A) -> usize
|
||||
where
|
||||
A: GGLWEInfos,
|
||||
{
|
||||
@@ -41,58 +44,98 @@ impl GGLWEAutomorphismKey<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWEAutomorphismKey<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pub trait GGLWEAutomorphismKeyEncryptSk<BE: Backend> {
|
||||
fn gglwe_automorphism_key_encrypt_sk<A, B>(
|
||||
&self,
|
||||
res: &mut A,
|
||||
p: i64,
|
||||
sk: &GLWESecret<DataSk>,
|
||||
sk: &B,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
scratch: &mut Scratch<BE>,
|
||||
) where
|
||||
Module<B>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub
|
||||
+ SvpPrepare<B>
|
||||
+ VecZnxSwitchRing
|
||||
+ SvpPPolAllocBytes
|
||||
+ VecZnxAutomorphism,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx + TakeScalarZnx + TakeGLWESecretPrepared<B>,
|
||||
A: GGLWEAutomorphismKeyToMut,
|
||||
B: GLWESecretToRef;
|
||||
}
|
||||
|
||||
impl<DM: DataMut> GGLWEAutomorphismKey<DM>
|
||||
where
|
||||
Self: GGLWEAutomorphismKeyToMut,
|
||||
{
|
||||
pub fn encrypt_sk<S, BE: Backend>(
|
||||
&mut self,
|
||||
module: &Module<BE>,
|
||||
p: i64,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<BE>,
|
||||
) where
|
||||
S: GLWESecretToRef,
|
||||
Module<BE>: GGLWEAutomorphismKeyEncryptSk<BE>,
|
||||
{
|
||||
module.gglwe_automorphism_key_encrypt_sk(self, p, sk, source_xa, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
impl<BE: Backend> GGLWEAutomorphismKeyEncryptSk<BE> for Module<BE>
|
||||
where
|
||||
Module<BE>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<BE>
|
||||
+ VecZnxDftApply<BE>
|
||||
+ SvpApplyDftToDftInplace<BE>
|
||||
+ VecZnxIdftApplyConsume<BE>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<BE>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<BE>
|
||||
+ VecZnxSub
|
||||
+ SvpPrepare<BE>
|
||||
+ VecZnxSwitchRing
|
||||
+ SvpPPolAllocBytes
|
||||
+ VecZnxAutomorphism,
|
||||
Scratch<BE>: TakeVecZnxDft<BE> + ScratchAvailable + TakeVecZnx + TakeScalarZnx + TakeGLWESecretPrepared<BE>,
|
||||
{
|
||||
fn gglwe_automorphism_key_encrypt_sk<A, B>(
|
||||
&self,
|
||||
res: &mut A,
|
||||
p: i64,
|
||||
sk: &B,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<BE>,
|
||||
) where
|
||||
A: GGLWEAutomorphismKeyToMut,
|
||||
B: GLWESecretToRef,
|
||||
{
|
||||
let res: &mut GGLWEAutomorphismKey<&mut [u8]> = &mut res.to_mut();
|
||||
let sk: &GLWESecret<&[u8]> = &sk.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use crate::layouts::{GLWEInfos, LWEInfos};
|
||||
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(self.rank_out(), self.rank_in());
|
||||
assert_eq!(sk.rank(), self.rank_out());
|
||||
assert_eq!(res.n(), sk.n());
|
||||
assert_eq!(res.rank_out(), res.rank_in());
|
||||
assert_eq!(sk.rank(), res.rank_out());
|
||||
assert!(
|
||||
scratch.available() >= GGLWEAutomorphismKey::encrypt_sk_scratch_space(module, self),
|
||||
scratch.available() >= GGLWEAutomorphismKey::encrypt_sk_scratch_space(self, res),
|
||||
"scratch.available(): {} < AutomorphismKey::encrypt_sk_scratch_space: {:?}",
|
||||
scratch.available(),
|
||||
GGLWEAutomorphismKey::encrypt_sk_scratch_space(module, self)
|
||||
GGLWEAutomorphismKey::encrypt_sk_scratch_space(self, res)
|
||||
)
|
||||
}
|
||||
|
||||
let (mut sk_out, scratch_1) = scratch.take_glwe_secret(sk.n(), sk.rank());
|
||||
|
||||
{
|
||||
(0..self.rank_out().into()).for_each(|i| {
|
||||
module.vec_znx_automorphism(
|
||||
module.galois_element_inv(p),
|
||||
(0..res.rank_out().into()).for_each(|i| {
|
||||
self.vec_znx_automorphism(
|
||||
self.galois_element_inv(p),
|
||||
&mut sk_out.data.as_vec_znx_mut(),
|
||||
i,
|
||||
&sk.data.as_vec_znx(),
|
||||
@@ -101,9 +144,9 @@ impl<DataSelf: DataMut> GGLWEAutomorphismKey<DataSelf> {
|
||||
});
|
||||
}
|
||||
|
||||
self.key
|
||||
.encrypt_sk(module, sk, &sk_out, source_xa, source_xe, scratch_1);
|
||||
res.key
|
||||
.encrypt_sk(self, sk, &sk_out, source_xa, source_xe, scratch_1);
|
||||
|
||||
self.p = p;
|
||||
res.p = p;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyDftToDftInplace, TakeVecZnx, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal,
|
||||
VecZnxAddScalarInplace, VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftApply, VecZnxFillUniform,
|
||||
VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace,
|
||||
ScratchAvailable, TakeVecZnx, TakeVecZnxDft, VecZnxAddScalarInplace, VecZnxDftAllocBytes, VecZnxNormalizeInplace,
|
||||
VecZnxNormalizeTmpBytes,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch, ZnxZero},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, ScalarZnxToRef, Scratch, ZnxZero},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWEPt,
|
||||
layouts::{GGLWECiphertext, GGLWEInfos, GLWECiphertext, GLWEPlaintext, LWEInfos, prepared::GLWESecretPrepared},
|
||||
encryption::glwe_ct::GLWEEncryptSk,
|
||||
layouts::{
|
||||
GGLWECiphertext, GGLWECiphertextToMut, GGLWEInfos, GLWECiphertext, GLWEPlaintext, LWEInfos,
|
||||
prepared::{GLWESecretPrepared, GLWESecretPreparedToRef},
|
||||
},
|
||||
};
|
||||
|
||||
impl GGLWECiphertext<Vec<u8>> {
|
||||
@@ -31,78 +34,89 @@ impl GGLWECiphertext<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWECiphertext<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &ScalarZnx<DataPt>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
pub trait GGLWEEncryptSk<B: Backend> {
|
||||
fn gglwe_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
R: GGLWECiphertextToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GGLWEEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>:
|
||||
GLWEEncryptSk<B> + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxAddScalarInplace + VecZnxNormalizeInplace<B>,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
fn gglwe_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGLWECiphertextToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
let res: &mut GGLWECiphertext<&mut [u8]> = &mut res.to_mut();
|
||||
let pt: &ScalarZnx<&[u8]> = &pt.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_hal::layouts::ZnxInfos;
|
||||
let sk: GLWESecretPrepared<&[u8], B> = sk.to_ref();
|
||||
|
||||
assert_eq!(
|
||||
self.rank_in(),
|
||||
res.rank_in(),
|
||||
pt.cols() as u32,
|
||||
"self.rank_in(): {} != pt.cols(): {}",
|
||||
self.rank_in(),
|
||||
"res.rank_in(): {} != pt.cols(): {}",
|
||||
res.rank_in(),
|
||||
pt.cols()
|
||||
);
|
||||
assert_eq!(
|
||||
self.rank_out(),
|
||||
res.rank_out(),
|
||||
sk.rank(),
|
||||
"self.rank_out(): {} != sk.rank(): {}",
|
||||
self.rank_out(),
|
||||
"res.rank_out(): {} != sk.rank(): {}",
|
||||
res.rank_out(),
|
||||
sk.rank()
|
||||
);
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(res.n(), sk.n());
|
||||
assert_eq!(pt.n() as u32, sk.n());
|
||||
assert!(
|
||||
scratch.available() >= GGLWECiphertext::encrypt_sk_scratch_space(module, self),
|
||||
"scratch.available: {} < GGLWECiphertext::encrypt_sk_scratch_space(module, self.rank()={}, self.size()={}): {}",
|
||||
scratch.available() >= GGLWECiphertext::encrypt_sk_scratch_space(self, res),
|
||||
"scratch.available: {} < GGLWECiphertext::encrypt_sk_scratch_space(self, res.rank()={}, res.size()={}): {}",
|
||||
scratch.available(),
|
||||
self.rank_out(),
|
||||
self.size(),
|
||||
GGLWECiphertext::encrypt_sk_scratch_space(module, self)
|
||||
res.rank_out(),
|
||||
res.size(),
|
||||
GGLWECiphertext::encrypt_sk_scratch_space(self, res)
|
||||
);
|
||||
assert!(
|
||||
self.dnum().0 * self.dsize().0 * self.base2k().0 <= self.k().0,
|
||||
"self.dnum() : {} * self.dsize() : {} * self.base2k() : {} = {} >= self.k() = {}",
|
||||
self.dnum(),
|
||||
self.dsize(),
|
||||
self.base2k(),
|
||||
self.dnum().0 * self.dsize().0 * self.base2k().0,
|
||||
self.k()
|
||||
res.dnum().0 * res.dsize().0 * res.base2k().0 <= res.k().0,
|
||||
"res.dnum() : {} * res.dsize() : {} * res.base2k() : {} = {} >= res.k() = {}",
|
||||
res.dnum(),
|
||||
res.dsize(),
|
||||
res.base2k(),
|
||||
res.dnum().0 * res.dsize().0 * res.base2k().0,
|
||||
res.k()
|
||||
);
|
||||
}
|
||||
|
||||
let dnum: usize = self.dnum().into();
|
||||
let dsize: usize = self.dsize().into();
|
||||
let base2k: usize = self.base2k().into();
|
||||
let rank_in: usize = self.rank_in().into();
|
||||
let dnum: usize = res.dnum().into();
|
||||
let dsize: usize = res.dsize().into();
|
||||
let base2k: usize = res.base2k().into();
|
||||
let rank_in: usize = res.rank_in().into();
|
||||
|
||||
let (mut tmp_pt, scrach_1) = scratch.take_glwe_pt(self);
|
||||
let (mut tmp_pt, scrach_1) = scratch.take_glwe_pt(res);
|
||||
// For each input column (i.e. rank) produces a GGLWE ciphertext of rank_out+1 columns
|
||||
//
|
||||
// Example for ksk rank 2 to rank 3:
|
||||
@@ -114,17 +128,39 @@ impl<DataSelf: DataMut> GGLWECiphertext<DataSelf> {
|
||||
//
|
||||
// (-(a*s) + s0, a)
|
||||
// (-(b*s) + s1, b)
|
||||
(0..rank_in).for_each(|col_i| {
|
||||
(0..dnum).for_each(|row_i| {
|
||||
|
||||
for col_i in 0..rank_in {
|
||||
for row_i in 0..dnum {
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
tmp_pt.data.zero(); // zeroes for next iteration
|
||||
module.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + row_i * dsize, pt, col_i);
|
||||
module.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scrach_1);
|
||||
|
||||
// rlwe encrypt of vec_znx_pt into vec_znx_ct
|
||||
self.at_mut(row_i, col_i)
|
||||
.encrypt_sk(module, &tmp_pt, sk, source_xa, source_xe, scrach_1);
|
||||
});
|
||||
});
|
||||
self.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + row_i * dsize, pt, col_i);
|
||||
self.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scrach_1);
|
||||
self.glwe_encrypt_sk(
|
||||
&mut res.at_mut(row_i, col_i),
|
||||
&tmp_pt,
|
||||
sk,
|
||||
source_xa,
|
||||
source_xe,
|
||||
scrach_1,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWECiphertext<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &ScalarZnx<DataPt>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: GGLWEEncryptSk<B>,
|
||||
{
|
||||
module.gglwe_encrypt_sk(self, pt, sk, source_xa, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyDftToDftInplace, TakeVecZnx, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal,
|
||||
VecZnxAddScalarInplace, VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftApply, VecZnxFillUniform,
|
||||
VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch, VecZnx, ZnxZero},
|
||||
api::{VecZnxAddScalarInplace, VecZnxDftAllocBytes, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, ScalarZnxToRef, Scratch, VecZnx, ZnxZero},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWEPt,
|
||||
layouts::{GGSWCiphertext, GGSWInfos, GLWECiphertext, GLWEInfos, LWEInfos, prepared::GLWESecretPrepared},
|
||||
SIGMA, TakeGLWEPt,
|
||||
encryption::glwe_ct::GLWEEncryptSkInternal,
|
||||
layouts::{
|
||||
GGSWCiphertext, GGSWCiphertextToMut, GGSWInfos, GLWECiphertext, GLWEInfos, LWEInfos,
|
||||
prepared::{GLWESecretPrepared, GLWESecretPreparedToRef},
|
||||
},
|
||||
};
|
||||
|
||||
impl GGSWCiphertext<Vec<u8>> {
|
||||
@@ -27,6 +27,85 @@ impl GGSWCiphertext<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GGSWEncryptSk<B: Backend> {
|
||||
fn ggsw_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGSWCiphertextToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GGSWEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptSkInternal<B> + VecZnxAddScalarInplace + VecZnxNormalizeInplace<B>,
|
||||
Scratch<B>: TakeGLWEPt<B>,
|
||||
{
|
||||
fn ggsw_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GGSWCiphertextToMut,
|
||||
P: ScalarZnxToRef,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
let res: &mut GGSWCiphertext<&mut [u8]> = &mut res.to_mut();
|
||||
let pt: &ScalarZnx<&[u8]> = &pt.to_ref();
|
||||
let sk: &GLWESecretPrepared<&[u8], B> = &sk.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_hal::layouts::ZnxInfos;
|
||||
|
||||
assert_eq!(res.rank(), sk.rank());
|
||||
assert_eq!(res.n(), self.n() as u32);
|
||||
assert_eq!(pt.n(), self.n());
|
||||
assert_eq!(sk.n(), self.n() as u32);
|
||||
}
|
||||
|
||||
let k: usize = res.k().into();
|
||||
let base2k: usize = res.base2k().into();
|
||||
let rank: usize = res.rank().into();
|
||||
let dsize: usize = res.dsize().into();
|
||||
let cols: usize = (rank + 1).into();
|
||||
|
||||
let (mut tmp_pt, scratch_1) = scratch.take_glwe_pt(&res.glwe_layout());
|
||||
|
||||
for row_i in 0..res.dnum().into() {
|
||||
tmp_pt.data.zero();
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
self.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + row_i * dsize, pt, 0);
|
||||
self.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scratch_1);
|
||||
for col_j in 0..rank + 1 {
|
||||
self.glwe_encrypt_sk_internal(
|
||||
base2k,
|
||||
k,
|
||||
res.at_mut(row_i, col_j).data_mut(),
|
||||
cols,
|
||||
false,
|
||||
Some((&tmp_pt, col_j)),
|
||||
sk,
|
||||
source_xa,
|
||||
source_xe,
|
||||
SIGMA,
|
||||
scratch_1,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGSWCiphertext<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
@@ -38,56 +117,8 @@ impl<DataSelf: DataMut> GGSWCiphertext<DataSelf> {
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
Module<B>: GGSWEncryptSk<B>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_hal::layouts::ZnxInfos;
|
||||
|
||||
assert_eq!(self.rank(), sk.rank());
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(pt.n() as u32, sk.n());
|
||||
}
|
||||
|
||||
let base2k: usize = self.base2k().into();
|
||||
let rank: usize = self.rank().into();
|
||||
let dsize: usize = self.dsize().into();
|
||||
|
||||
let (mut tmp_pt, scratch_1) = scratch.take_glwe_pt(&self.glwe_layout());
|
||||
|
||||
(0..self.dnum().into()).for_each(|row_i| {
|
||||
tmp_pt.data.zero();
|
||||
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
module.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (dsize - 1) + row_i * dsize, pt, 0);
|
||||
module.vec_znx_normalize_inplace(base2k, &mut tmp_pt.data, 0, scratch_1);
|
||||
|
||||
(0..rank + 1).for_each(|col_j| {
|
||||
// rlwe encrypt of vec_znx_pt into vec_znx_ct
|
||||
|
||||
self.at_mut(row_i, col_j).encrypt_sk_internal(
|
||||
module,
|
||||
Some((&tmp_pt, col_j)),
|
||||
sk,
|
||||
source_xa,
|
||||
source_xe,
|
||||
scratch_1,
|
||||
);
|
||||
});
|
||||
});
|
||||
module.ggsw_encrypt_sk(self, pt, sk, source_xa, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use poulpy_hal::{
|
||||
VecZnxBigAllocBytes, VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftApply, VecZnxFillUniform, VecZnxIdftApplyConsume,
|
||||
VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch, VecZnx, VecZnxBig, ZnxInfos, ZnxZero},
|
||||
layouts::{Backend, DataMut, Module, ScalarZnx, Scratch, VecZnx, VecZnxBig, VecZnxToMut, ZnxInfos, ZnxZero},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
@@ -13,8 +13,8 @@ use crate::{
|
||||
dist::Distribution,
|
||||
encryption::{SIGMA, SIGMA_BOUND},
|
||||
layouts::{
|
||||
GLWECiphertext, GLWEInfos, GLWEPlaintext, LWEInfos,
|
||||
prepared::{GLWEPublicKeyPrepared, GLWESecretPrepared},
|
||||
GLWECiphertext, GLWECiphertextToMut, GLWEInfos, GLWEPlaintext, GLWEPlaintextToRef, LWEInfos,
|
||||
prepared::{GLWEPublicKeyPrepared, GLWEPublicKeyPreparedToRef, GLWESecretPrepared, GLWESecretPreparedToRef},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -44,126 +44,127 @@ impl GLWECiphertext<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GLWECiphertext<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
impl<D: DataMut> GLWECiphertext<D> {
|
||||
pub fn encrypt_sk<R, P, S, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &GLWEPlaintext<DataPt>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
P: GLWEPlaintextToRef,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
Module<B>: GLWEEncryptSk<B>,
|
||||
{
|
||||
module.glwe_encrypt_sk(self, pt, sk, source_xa, source_xe, scratch);
|
||||
}
|
||||
|
||||
pub fn encrypt_zero_sk<S, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
Module<B>: GLWEEncryptZeroSk<B>,
|
||||
{
|
||||
module.glwe_encrypt_zero_sk(self, sk, source_xa, source_xe, scratch);
|
||||
}
|
||||
|
||||
pub fn encrypt_pk<P, K, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &P,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
P: GLWEPlaintextToRef,
|
||||
K: GLWEPublicKeyPreparedToRef<B>,
|
||||
Module<B>: GLWEEncryptPk<B>,
|
||||
{
|
||||
module.glwe_encrypt_pk(self, pt, pk, source_xu, source_xe, scratch);
|
||||
}
|
||||
|
||||
pub fn encrypt_zero_pk<K, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
K: GLWEPublicKeyPreparedToRef<B>,
|
||||
Module<B>: GLWEEncryptZeroPk<B>,
|
||||
{
|
||||
module.glwe_encrypt_zero_pk(self, pk, source_xu, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GLWEEncryptSk<B: Backend> {
|
||||
fn glwe_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GLWECiphertextToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GLWEEncryptSk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptSkInternal<B> + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes,
|
||||
Scratch<B>: ScratchAvailable,
|
||||
{
|
||||
fn glwe_encrypt_sk<R, P, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GLWECiphertextToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
let mut res: GLWECiphertext<&mut [u8]> = res.to_mut();
|
||||
let pt: GLWEPlaintext<&[u8]> = pt.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.rank(), sk.rank());
|
||||
assert_eq!(sk.n(), self.n());
|
||||
assert_eq!(pt.n(), self.n());
|
||||
let sk: GLWESecretPrepared<&[u8], B> = sk.to_ref();
|
||||
assert_eq!(res.rank(), sk.rank());
|
||||
assert_eq!(res.n(), self.n() as u32);
|
||||
assert_eq!(sk.n(), self.n() as u32);
|
||||
assert_eq!(pt.n(), self.n() as u32);
|
||||
assert!(
|
||||
scratch.available() >= GLWECiphertext::encrypt_sk_scratch_space(module, self),
|
||||
scratch.available() >= GLWECiphertext::encrypt_sk_scratch_space(self, &res),
|
||||
"scratch.available(): {} < GLWECiphertext::encrypt_sk_scratch_space: {}",
|
||||
scratch.available(),
|
||||
GLWECiphertext::encrypt_sk_scratch_space(module, self)
|
||||
GLWECiphertext::encrypt_sk_scratch_space(self, &res)
|
||||
)
|
||||
}
|
||||
|
||||
self.encrypt_sk_internal(module, Some((pt, 0)), sk, source_xa, source_xe, scratch);
|
||||
}
|
||||
|
||||
pub fn encrypt_zero_sk<DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.rank(), sk.rank());
|
||||
assert_eq!(sk.n(), self.n());
|
||||
assert!(
|
||||
scratch.available() >= GLWECiphertext::encrypt_sk_scratch_space(module, self),
|
||||
"scratch.available(): {} < GLWECiphertext::encrypt_sk_scratch_space: {}",
|
||||
scratch.available(),
|
||||
GLWECiphertext::encrypt_sk_scratch_space(module, self)
|
||||
)
|
||||
}
|
||||
self.encrypt_sk_internal(
|
||||
module,
|
||||
None::<(&GLWEPlaintext<Vec<u8>>, usize)>,
|
||||
sk,
|
||||
source_xa,
|
||||
source_xe,
|
||||
scratch,
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn encrypt_sk_internal<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: Option<(&GLWEPlaintext<DataPt>, usize)>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
let cols: usize = (self.rank() + 1).into();
|
||||
glwe_encrypt_sk_internal(
|
||||
module,
|
||||
self.base2k().into(),
|
||||
self.k().into(),
|
||||
&mut self.data,
|
||||
let cols: usize = (res.rank() + 1).into();
|
||||
self.glwe_encrypt_sk_internal(
|
||||
res.base2k().into(),
|
||||
res.k().into(),
|
||||
res.data_mut(),
|
||||
cols,
|
||||
false,
|
||||
pt,
|
||||
Some((&pt, 0)),
|
||||
sk,
|
||||
source_xa,
|
||||
source_xe,
|
||||
@@ -171,46 +172,136 @@ impl<DataSelf: DataMut> GLWECiphertext<DataSelf> {
|
||||
scratch,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_pk<DataPt: DataRef, DataPk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &GLWEPlaintext<DataPt>,
|
||||
pk: &GLWEPublicKeyPrepared<DataPk, B>,
|
||||
source_xu: &mut Source,
|
||||
pub trait GLWEEncryptZeroSk<B: Backend> {
|
||||
fn glwe_encrypt_zero_sk<R, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: SvpPrepare<B>
|
||||
+ SvpApplyDftToDft<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxBigAddNormal<B>
|
||||
+ VecZnxBigAddSmallInplace<B>
|
||||
+ VecZnxBigNormalize<B>,
|
||||
Scratch<B>: TakeSvpPPol<B> + TakeScalarZnx + TakeVecZnxDft<B>,
|
||||
R: GLWECiphertextToMut,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GLWEEncryptZeroSk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptSkInternal<B> + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes,
|
||||
Scratch<B>: ScratchAvailable,
|
||||
{
|
||||
fn glwe_encrypt_zero_sk<R, S>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GLWECiphertextToMut,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
self.encrypt_pk_internal::<DataPt, DataPk, B>(module, Some((pt, 0)), pk, source_xu, source_xe, scratch);
|
||||
let mut res: GLWECiphertext<&mut [u8]> = res.to_mut();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let sk: GLWESecretPrepared<&[u8], B> = sk.to_ref();
|
||||
assert_eq!(res.rank(), sk.rank());
|
||||
assert_eq!(res.n(), self.n() as u32);
|
||||
assert_eq!(sk.n(), self.n() as u32);
|
||||
assert!(
|
||||
scratch.available() >= GLWECiphertext::encrypt_sk_scratch_space(self, &res),
|
||||
"scratch.available(): {} < GLWECiphertext::encrypt_sk_scratch_space: {}",
|
||||
scratch.available(),
|
||||
GLWECiphertext::encrypt_sk_scratch_space(self, &res)
|
||||
)
|
||||
}
|
||||
|
||||
let cols: usize = (res.rank() + 1).into();
|
||||
self.glwe_encrypt_sk_internal(
|
||||
res.base2k().into(),
|
||||
res.k().into(),
|
||||
res.data_mut(),
|
||||
cols,
|
||||
false,
|
||||
None::<(&GLWEPlaintext<Vec<u8>>, usize)>,
|
||||
sk,
|
||||
source_xa,
|
||||
source_xe,
|
||||
SIGMA,
|
||||
scratch,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encrypt_zero_pk<DataPk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pk: &GLWEPublicKeyPrepared<DataPk, B>,
|
||||
pub trait GLWEEncryptPk<B: Backend> {
|
||||
fn glwe_encrypt_pk<R, P, K>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: SvpPrepare<B>
|
||||
+ SvpApplyDftToDft<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxBigAddNormal<B>
|
||||
+ VecZnxBigAddSmallInplace<B>
|
||||
+ VecZnxBigNormalize<B>,
|
||||
Scratch<B>: TakeSvpPPol<B> + TakeScalarZnx + TakeVecZnxDft<B>,
|
||||
R: GLWECiphertextToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
K: GLWEPublicKeyPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GLWEEncryptPk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptPkInternal<B>,
|
||||
{
|
||||
fn glwe_encrypt_pk<R, P, K>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: &P,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GLWECiphertextToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
K: GLWEPublicKeyPreparedToRef<B>,
|
||||
{
|
||||
self.encrypt_pk_internal::<Vec<u8>, DataPk, B>(
|
||||
module,
|
||||
self.glwe_encrypt_pk_internal(res, Some((pt, 0)), pk, source_xu, source_xe, scratch);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GLWEEncryptZeroPk<B: Backend> {
|
||||
fn glwe_encrypt_zero_pk<R, K>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GLWECiphertextToMut,
|
||||
K: GLWEPublicKeyPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GLWEEncryptZeroPk<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptPkInternal<B>,
|
||||
{
|
||||
fn glwe_encrypt_zero_pk<R, K>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GLWECiphertextToMut,
|
||||
K: GLWEPublicKeyPreparedToRef<B>,
|
||||
{
|
||||
self.glwe_encrypt_pk_internal(
|
||||
res,
|
||||
None::<(&GLWEPlaintext<Vec<u8>>, usize)>,
|
||||
pk,
|
||||
source_xu,
|
||||
@@ -218,45 +309,69 @@ impl<DataSelf: DataMut> GLWECiphertext<DataSelf> {
|
||||
scratch,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn encrypt_pk_internal<DataPt: DataRef, DataPk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: Option<(&GLWEPlaintext<DataPt>, usize)>,
|
||||
pk: &GLWEPublicKeyPrepared<DataPk, B>,
|
||||
pub(crate) trait GLWEEncryptPkInternal<B: Backend> {
|
||||
fn glwe_encrypt_pk_internal<R, P, K>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: Option<(&P, usize)>,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: SvpPrepare<B>
|
||||
+ SvpApplyDftToDft<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxBigAddNormal<B>
|
||||
+ VecZnxBigAddSmallInplace<B>
|
||||
+ VecZnxBigNormalize<B>,
|
||||
Scratch<B>: TakeSvpPPol<B> + TakeScalarZnx + TakeVecZnxDft<B>,
|
||||
R: GLWECiphertextToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
K: GLWEPublicKeyPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GLWEEncryptPkInternal<B> for Module<B>
|
||||
where
|
||||
Module<B>: SvpPrepare<B>
|
||||
+ SvpApplyDftToDft<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxBigAddNormal<B>
|
||||
+ VecZnxBigAddSmallInplace<B>
|
||||
+ VecZnxBigNormalize<B>,
|
||||
Scratch<B>: TakeSvpPPol<B> + TakeScalarZnx + TakeVecZnxDft<B>,
|
||||
{
|
||||
fn glwe_encrypt_pk_internal<R, P, K>(
|
||||
&self,
|
||||
res: &mut R,
|
||||
pt: Option<(&P, usize)>,
|
||||
pk: &K,
|
||||
source_xu: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: GLWECiphertextToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
K: GLWEPublicKeyPreparedToRef<B>,
|
||||
{
|
||||
let res: &mut GLWECiphertext<&mut [u8]> = &mut res.to_mut();
|
||||
let pk: &GLWEPublicKeyPrepared<&[u8], B> = &pk.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.base2k(), pk.base2k());
|
||||
assert_eq!(self.n(), pk.n());
|
||||
assert_eq!(self.rank(), pk.rank());
|
||||
assert_eq!(res.base2k(), pk.base2k());
|
||||
assert_eq!(res.n(), pk.n());
|
||||
assert_eq!(res.rank(), pk.rank());
|
||||
if let Some((pt, _)) = pt {
|
||||
assert_eq!(pt.base2k(), pk.base2k());
|
||||
assert_eq!(pt.n(), pk.n());
|
||||
assert_eq!(pt.to_ref().base2k(), pk.base2k());
|
||||
assert_eq!(pt.to_ref().n(), pk.n());
|
||||
}
|
||||
}
|
||||
|
||||
let base2k: usize = pk.base2k().into();
|
||||
let size_pk: usize = pk.size();
|
||||
let cols: usize = (self.rank() + 1).into();
|
||||
let cols: usize = (res.rank() + 1).into();
|
||||
|
||||
// Generates u according to the underlying secret distribution.
|
||||
let (mut u_dft, scratch_1) = scratch.take_svp_ppol(self.n().into(), 1);
|
||||
let (mut u_dft, scratch_1) = scratch.take_svp_ppol(res.n().into(), 1);
|
||||
|
||||
{
|
||||
let (mut u, _) = scratch_1.take_scalar_znx(self.n().into(), 1);
|
||||
let (mut u, _) = scratch_1.take_scalar_znx(res.n().into(), 1);
|
||||
match pk.dist {
|
||||
Distribution::NONE => panic!(
|
||||
"invalid public key: SecretDistribution::NONE, ensure it has been correctly intialized through \
|
||||
@@ -270,20 +385,20 @@ impl<DataSelf: DataMut> GLWECiphertext<DataSelf> {
|
||||
Distribution::ZERO => {}
|
||||
}
|
||||
|
||||
module.svp_prepare(&mut u_dft, 0, &u, 0);
|
||||
self.svp_prepare(&mut u_dft, 0, &u, 0);
|
||||
}
|
||||
|
||||
// ct[i] = pk[i] * u + ei (+ m if col = i)
|
||||
(0..cols).for_each(|i| {
|
||||
let (mut ci_dft, scratch_2) = scratch_1.take_vec_znx_dft(self.n().into(), 1, size_pk);
|
||||
let (mut ci_dft, scratch_2) = scratch_1.take_vec_znx_dft(res.n().into(), 1, size_pk);
|
||||
// ci_dft = DFT(u) * DFT(pk[i])
|
||||
module.svp_apply_dft_to_dft(&mut ci_dft, 0, &u_dft, 0, &pk.data, i);
|
||||
self.svp_apply_dft_to_dft(&mut ci_dft, 0, &u_dft, 0, &pk.data, i);
|
||||
|
||||
// ci_big = u * p[i]
|
||||
let mut ci_big = module.vec_znx_idft_apply_consume(ci_dft);
|
||||
let mut ci_big = self.vec_znx_idft_apply_consume(ci_dft);
|
||||
|
||||
// ci_big = u * pk[i] + e
|
||||
module.vec_znx_big_add_normal(
|
||||
self.vec_znx_big_add_normal(
|
||||
base2k,
|
||||
&mut ci_big,
|
||||
0,
|
||||
@@ -297,30 +412,37 @@ impl<DataSelf: DataMut> GLWECiphertext<DataSelf> {
|
||||
if let Some((pt, col)) = pt
|
||||
&& col == i
|
||||
{
|
||||
module.vec_znx_big_add_small_inplace(&mut ci_big, 0, &pt.data, 0);
|
||||
self.vec_znx_big_add_small_inplace(&mut ci_big, 0, &pt.to_ref().data, 0);
|
||||
}
|
||||
|
||||
// ct[i] = norm(ci_big)
|
||||
module.vec_znx_big_normalize(base2k, &mut self.data, i, base2k, &ci_big, 0, scratch_2);
|
||||
self.vec_znx_big_normalize(base2k, &mut res.data, i, base2k, &ci_big, 0, scratch_2);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn glwe_encrypt_sk_internal<DataCt: DataMut, DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
module: &Module<B>,
|
||||
base2k: usize,
|
||||
k: usize,
|
||||
ct: &mut VecZnx<DataCt>,
|
||||
cols: usize,
|
||||
compressed: bool,
|
||||
pt: Option<(&GLWEPlaintext<DataPt>, usize)>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
pub(crate) trait GLWEEncryptSkInternal<B: Backend> {
|
||||
fn glwe_encrypt_sk_internal<R, P, S>(
|
||||
&self,
|
||||
base2k: usize,
|
||||
k: usize,
|
||||
res: &mut R,
|
||||
cols: usize,
|
||||
compressed: bool,
|
||||
pt: Option<(&P, usize)>,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: VecZnxToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GLWEEncryptSkInternal<B> for Module<B>
|
||||
where
|
||||
Module<B>: VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
@@ -336,72 +458,94 @@ pub(crate) fn glwe_encrypt_sk_internal<DataCt: DataMut, DataPt: DataRef, DataSk:
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
fn glwe_encrypt_sk_internal<R, P, S>(
|
||||
&self,
|
||||
base2k: usize,
|
||||
k: usize,
|
||||
res: &mut R,
|
||||
cols: usize,
|
||||
compressed: bool,
|
||||
pt: Option<(&P, usize)>,
|
||||
sk: &S,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
R: VecZnxToMut,
|
||||
P: GLWEPlaintextToRef,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
if compressed {
|
||||
assert_eq!(
|
||||
ct.cols(),
|
||||
1,
|
||||
"invalid ciphertext: compressed tag=true but #cols={} != 1",
|
||||
ct.cols()
|
||||
)
|
||||
}
|
||||
}
|
||||
let ct: &mut VecZnx<&mut [u8]> = &mut res.to_mut();
|
||||
let sk: GLWESecretPrepared<&[u8], B> = sk.to_ref();
|
||||
|
||||
let size: usize = ct.size();
|
||||
|
||||
let (mut c0, scratch_1) = scratch.take_vec_znx(ct.n(), 1, size);
|
||||
c0.zero();
|
||||
|
||||
{
|
||||
let (mut ci, scratch_2) = scratch_1.take_vec_znx(ct.n(), 1, size);
|
||||
|
||||
// ct[i] = uniform
|
||||
// ct[0] -= c[i] * s[i],
|
||||
(1..cols).for_each(|i| {
|
||||
let col_ct: usize = if compressed { 0 } else { i };
|
||||
|
||||
// ct[i] = uniform (+ pt)
|
||||
module.vec_znx_fill_uniform(base2k, ct, col_ct, source_xa);
|
||||
|
||||
let (mut ci_dft, scratch_3) = scratch_2.take_vec_znx_dft(ct.n(), 1, size);
|
||||
|
||||
// ci = ct[i] - pt
|
||||
// i.e. we act as we sample ct[i] already as uniform + pt
|
||||
// and if there is a pt, then we subtract it before applying DFT
|
||||
if let Some((pt, col)) = pt {
|
||||
if i == col {
|
||||
module.vec_znx_sub(&mut ci, 0, ct, col_ct, &pt.data, 0);
|
||||
module.vec_znx_normalize_inplace(base2k, &mut ci, 0, scratch_3);
|
||||
module.vec_znx_dft_apply(1, 0, &mut ci_dft, 0, &ci, 0);
|
||||
} else {
|
||||
module.vec_znx_dft_apply(1, 0, &mut ci_dft, 0, ct, col_ct);
|
||||
}
|
||||
} else {
|
||||
module.vec_znx_dft_apply(1, 0, &mut ci_dft, 0, ct, col_ct);
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if compressed {
|
||||
assert_eq!(
|
||||
ct.cols(),
|
||||
1,
|
||||
"invalid ciphertext: compressed tag=true but #cols={} != 1",
|
||||
ct.cols()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module.svp_apply_dft_to_dft_inplace(&mut ci_dft, 0, &sk.data, i - 1);
|
||||
let ci_big: VecZnxBig<&mut [u8], B> = module.vec_znx_idft_apply_consume(ci_dft);
|
||||
let size: usize = ct.size();
|
||||
|
||||
// use c[0] as buffer, which is overwritten later by the normalization step
|
||||
module.vec_znx_big_normalize(base2k, &mut ci, 0, base2k, &ci_big, 0, scratch_3);
|
||||
let (mut c0, scratch_1) = scratch.take_vec_znx(ct.n(), 1, size);
|
||||
c0.zero();
|
||||
|
||||
// c0_tmp = -c[i] * s[i] (use c[0] as buffer)
|
||||
module.vec_znx_sub_inplace(&mut c0, 0, &ci, 0);
|
||||
});
|
||||
{
|
||||
let (mut ci, scratch_2) = scratch_1.take_vec_znx(ct.n(), 1, size);
|
||||
|
||||
// ct[i] = uniform
|
||||
// ct[0] -= c[i] * s[i],
|
||||
(1..cols).for_each(|i| {
|
||||
let col_ct: usize = if compressed { 0 } else { i };
|
||||
|
||||
// ct[i] = uniform (+ pt)
|
||||
self.vec_znx_fill_uniform(base2k, ct, col_ct, source_xa);
|
||||
|
||||
let (mut ci_dft, scratch_3) = scratch_2.take_vec_znx_dft(ct.n(), 1, size);
|
||||
|
||||
// ci = ct[i] - pt
|
||||
// i.e. we act as we sample ct[i] already as uniform + pt
|
||||
// and if there is a pt, then we subtract it before applying DFT
|
||||
if let Some((pt, col)) = pt {
|
||||
if i == col {
|
||||
self.vec_znx_sub(&mut ci, 0, ct, col_ct, &pt.to_ref().data, 0);
|
||||
self.vec_znx_normalize_inplace(base2k, &mut ci, 0, scratch_3);
|
||||
self.vec_znx_dft_apply(1, 0, &mut ci_dft, 0, &ci, 0);
|
||||
} else {
|
||||
self.vec_znx_dft_apply(1, 0, &mut ci_dft, 0, ct, col_ct);
|
||||
}
|
||||
} else {
|
||||
self.vec_znx_dft_apply(1, 0, &mut ci_dft, 0, ct, col_ct);
|
||||
}
|
||||
|
||||
self.svp_apply_dft_to_dft_inplace(&mut ci_dft, 0, &sk.data, i - 1);
|
||||
let ci_big: VecZnxBig<&mut [u8], B> = self.vec_znx_idft_apply_consume(ci_dft);
|
||||
|
||||
// use c[0] as buffer, which is overwritten later by the normalization step
|
||||
self.vec_znx_big_normalize(base2k, &mut ci, 0, base2k, &ci_big, 0, scratch_3);
|
||||
|
||||
// c0_tmp = -c[i] * s[i] (use c[0] as buffer)
|
||||
self.vec_znx_sub_inplace(&mut c0, 0, &ci, 0);
|
||||
});
|
||||
}
|
||||
|
||||
// c[0] += e
|
||||
self.vec_znx_add_normal(base2k, &mut c0, 0, k, source_xe, sigma, SIGMA_BOUND);
|
||||
|
||||
// c[0] += m if col = 0
|
||||
if let Some((pt, col)) = pt
|
||||
&& col == 0
|
||||
{
|
||||
self.vec_znx_add_inplace(&mut c0, 0, &pt.to_ref().data, 0);
|
||||
}
|
||||
|
||||
// c[0] = norm(c[0])
|
||||
self.vec_znx_normalize(base2k, ct, 0, base2k, &c0, 0, scratch_1);
|
||||
}
|
||||
|
||||
// c[0] += e
|
||||
module.vec_znx_add_normal(base2k, &mut c0, 0, k, source_xe, sigma, SIGMA_BOUND);
|
||||
|
||||
// c[0] += m if col = 0
|
||||
if let Some((pt, col)) = pt
|
||||
&& col == 0
|
||||
{
|
||||
module.vec_znx_add_inplace(&mut c0, 0, &pt.data, 0);
|
||||
}
|
||||
|
||||
// c[0] = norm(c[0])
|
||||
module.vec_znx_normalize(base2k, ct, 0, base2k, &c0, 0, scratch_1);
|
||||
}
|
||||
|
||||
@@ -1,50 +1,43 @@
|
||||
use poulpy_hal::{
|
||||
api::{
|
||||
ScratchOwnedAlloc, ScratchOwnedBorrow, SvpApplyDftToDftInplace, VecZnxAddInplace, VecZnxAddNormal, VecZnxBigNormalize,
|
||||
VecZnxDftAllocBytes, VecZnxDftApply, VecZnxFillUniform, VecZnxIdftApplyConsume, VecZnxNormalize, VecZnxNormalizeInplace,
|
||||
VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubInplace,
|
||||
},
|
||||
api::{ScratchOwnedAlloc, ScratchOwnedBorrow, VecZnxDftAllocBytes, VecZnxNormalizeTmpBytes},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScratchOwned},
|
||||
oep::{ScratchAvailableImpl, ScratchOwnedAllocImpl, ScratchOwnedBorrowImpl, TakeVecZnxDftImpl, TakeVecZnxImpl},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::layouts::{GLWECiphertext, GLWEPublicKey, prepared::GLWESecretPrepared};
|
||||
use crate::{
|
||||
encryption::glwe_ct::GLWEEncryptZeroSk,
|
||||
layouts::{
|
||||
GLWECiphertext, GLWEPublicKey, GLWEPublicKeyToMut,
|
||||
prepared::{GLWESecretPrepared, GLWESecretPreparedToRef},
|
||||
},
|
||||
};
|
||||
|
||||
impl<D: DataMut> GLWEPublicKey<D> {
|
||||
pub fn generate_from_sk<S: DataRef, B>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk: &GLWESecretPrepared<S, B>,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
) where
|
||||
Module<B>:,
|
||||
Module<B>: VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftApply<B>
|
||||
+ SvpApplyDftToDftInplace<B>
|
||||
+ VecZnxIdftApplyConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
B: Backend
|
||||
+ ScratchOwnedAllocImpl<B>
|
||||
+ ScratchOwnedBorrowImpl<B>
|
||||
+ TakeVecZnxDftImpl<B>
|
||||
+ ScratchAvailableImpl<B>
|
||||
+ TakeVecZnxImpl<B>,
|
||||
pub trait GLWEPublicKeyGenerate<B: Backend> {
|
||||
fn glwe_public_key_generate<R, S>(&self, res: &mut R, sk: &S, source_xa: &mut Source, source_xe: &mut Source)
|
||||
where
|
||||
R: GLWEPublicKeyToMut,
|
||||
S: GLWESecretPreparedToRef<B>;
|
||||
}
|
||||
|
||||
impl<B: Backend> GLWEPublicKeyGenerate<B> for Module<B>
|
||||
where
|
||||
Module<B>: GLWEEncryptZeroSk<B> + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes,
|
||||
ScratchOwned<B>: ScratchOwnedAlloc<B> + ScratchOwnedBorrow<B>,
|
||||
{
|
||||
fn glwe_public_key_generate<R, S>(&self, res: &mut R, sk: &S, source_xa: &mut Source, source_xe: &mut Source)
|
||||
where
|
||||
R: GLWEPublicKeyToMut,
|
||||
S: GLWESecretPreparedToRef<B>,
|
||||
{
|
||||
let res: &mut GLWEPublicKey<&mut [u8]> = &mut res.to_mut();
|
||||
let sk: &GLWESecretPrepared<&[u8], B> = &sk.to_ref();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use crate::{Distribution, layouts::LWEInfos};
|
||||
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(res.n(), self.n() as u32);
|
||||
assert_eq!(sk.n(), self.n() as u32);
|
||||
|
||||
if sk.dist == Distribution::NONE {
|
||||
panic!("invalid sk: SecretDistribution::NONE")
|
||||
@@ -52,10 +45,25 @@ impl<D: DataMut> GLWEPublicKey<D> {
|
||||
}
|
||||
|
||||
// Its ok to allocate scratch space here since pk is usually generated only once.
|
||||
let mut scratch: ScratchOwned<B> = ScratchOwned::alloc(GLWECiphertext::encrypt_sk_scratch_space(module, self));
|
||||
let mut scratch: ScratchOwned<B> = ScratchOwned::alloc(GLWECiphertext::encrypt_sk_scratch_space(self, res));
|
||||
|
||||
let mut tmp: GLWECiphertext<Vec<u8>> = GLWECiphertext::alloc(self);
|
||||
tmp.encrypt_zero_sk(module, sk, source_xa, source_xe, scratch.borrow());
|
||||
self.dist = sk.dist;
|
||||
let mut tmp: GLWECiphertext<Vec<u8>> = GLWECiphertext::alloc(res);
|
||||
|
||||
tmp.encrypt_zero_sk(self, sk, source_xa, source_xe, scratch.borrow());
|
||||
res.dist = sk.dist;
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: DataMut> GLWEPublicKey<D> {
|
||||
pub fn generate<S: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk: &GLWESecretPrepared<S, B>,
|
||||
source_xa: &mut Source,
|
||||
source_xe: &mut Source,
|
||||
) where
|
||||
Module<B>: GLWEPublicKeyGenerate<B>,
|
||||
{
|
||||
module.glwe_public_key_generate(self, sk, source_xa, source_xe);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,5 @@ mod lwe_ct;
|
||||
mod lwe_ksk;
|
||||
mod lwe_to_glwe_ksk;
|
||||
|
||||
pub(crate) use glwe_ct::glwe_encrypt_sk_internal;
|
||||
|
||||
pub const SIGMA: f64 = 3.2;
|
||||
pub(crate) const SIGMA_BOUND: f64 = 6.0 * SIGMA;
|
||||
|
||||
Reference in New Issue
Block a user