mirror of
https://github.com/arnaucube/poulpy.git
synced 2026-02-10 13:16:44 +01:00
updated repo for publishing (#74)
This commit is contained in:
committed by
GitHub
parent
0be569eca0
commit
62eb87cc07
104
poulpy-core/src/encryption/compressed/gglwe_atk.rs
Normal file
104
poulpy-core/src/encryption/compressed/gglwe_atk.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use poulpy_backend::hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyInplace, SvpPPolAllocBytes, SvpPrepare, TakeScalarZnx, TakeVecZnx, TakeVecZnxDft,
|
||||
VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace, VecZnxAutomorphism, VecZnxBigNormalize, VecZnxDftAllocBytes,
|
||||
VecZnxDftFromVecZnx, VecZnxDftToVecZnxBigConsume, VecZnxFillUniform, VecZnxNormalize, VecZnxNormalizeInplace,
|
||||
VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubABInplace, VecZnxSwithcDegree,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWESecret, TakeGLWESecretPrepared,
|
||||
layouts::{
|
||||
GLWESecret,
|
||||
compressed::{GGLWEAutomorphismKeyCompressed, GGLWESwitchingKeyCompressed},
|
||||
},
|
||||
};
|
||||
|
||||
impl GGLWEAutomorphismKeyCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend>(module: &Module<B>, n: usize, basek: usize, k: usize, rank: usize) -> usize
|
||||
where
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes + SvpPPolAllocBytes,
|
||||
{
|
||||
GGLWESwitchingKeyCompressed::encrypt_sk_scratch_space(module, n, basek, k, rank, rank) + GLWESecret::bytes_of(n, rank)
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWEAutomorphismKeyCompressed<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
p: i64,
|
||||
sk: &GLWESecret<DataSk>,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAutomorphism
|
||||
+ SvpPrepare<B>
|
||||
+ SvpPPolAllocBytes
|
||||
+ VecZnxSwithcDegree
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftFromVecZnx<B>
|
||||
+ SvpApplyInplace<B>
|
||||
+ VecZnxDftToVecZnxBigConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubABInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub
|
||||
+ VecZnxAddScalarInplace,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx + TakeScalarZnx + TakeGLWESecretPrepared<B>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use crate::layouts::Infos;
|
||||
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(self.rank_out(), self.rank_in());
|
||||
assert_eq!(sk.rank(), self.rank());
|
||||
assert!(
|
||||
scratch.available()
|
||||
>= GGLWEAutomorphismKeyCompressed::encrypt_sk_scratch_space(
|
||||
module,
|
||||
sk.n(),
|
||||
self.basek(),
|
||||
self.k(),
|
||||
self.rank()
|
||||
),
|
||||
"scratch.available(): {} < AutomorphismKey::encrypt_sk_scratch_space(module, self.rank()={}, self.size()={}): {}",
|
||||
scratch.available(),
|
||||
self.rank(),
|
||||
self.size(),
|
||||
GGLWEAutomorphismKeyCompressed::encrypt_sk_scratch_space(module, sk.n(), self.basek(), self.k(), self.rank())
|
||||
)
|
||||
}
|
||||
|
||||
let (mut sk_out, scratch_1) = scratch.take_glwe_secret(sk.n(), sk.rank());
|
||||
|
||||
{
|
||||
(0..self.rank()).for_each(|i| {
|
||||
module.vec_znx_automorphism(
|
||||
module.galois_element_inv(p),
|
||||
&mut sk_out.data.as_vec_znx_mut(),
|
||||
i,
|
||||
&sk.data.as_vec_znx(),
|
||||
i,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
self.key
|
||||
.encrypt_sk(module, sk, &sk_out, seed_xa, source_xe, sigma, scratch_1);
|
||||
|
||||
self.p = p;
|
||||
}
|
||||
}
|
||||
137
poulpy-core/src/encryption/compressed/gglwe_ct.rs
Normal file
137
poulpy-core/src/encryption/compressed/gglwe_ct.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use poulpy_backend::hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyInplace, TakeVecZnx, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace,
|
||||
VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftFromVecZnx, VecZnxDftToVecZnxBigConsume, VecZnxFillUniform,
|
||||
VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubABInplace, ZnxZero,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWEPt,
|
||||
encryption::glwe_encrypt_sk_internal,
|
||||
layouts::{GGLWECiphertext, Infos, compressed::GGLWECiphertextCompressed, prepared::GLWESecretPrepared},
|
||||
};
|
||||
|
||||
impl GGLWECiphertextCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend>(module: &Module<B>, n: usize, basek: usize, k: usize) -> usize
|
||||
where
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes,
|
||||
{
|
||||
GGLWECiphertext::encrypt_sk_scratch_space(module, n, basek, k)
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: DataMut> GGLWECiphertextCompressed<D> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &ScalarZnx<DataPt>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
seed: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftFromVecZnx<B>
|
||||
+ SvpApplyInplace<B>
|
||||
+ VecZnxDftToVecZnxBigConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubABInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_backend::hal::api::ZnxInfos;
|
||||
|
||||
assert_eq!(
|
||||
self.rank_in(),
|
||||
pt.cols(),
|
||||
"self.rank_in(): {} != pt.cols(): {}",
|
||||
self.rank_in(),
|
||||
pt.cols()
|
||||
);
|
||||
assert_eq!(
|
||||
self.rank_out(),
|
||||
sk.rank(),
|
||||
"self.rank_out(): {} != sk.rank(): {}",
|
||||
self.rank_out(),
|
||||
sk.rank()
|
||||
);
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(pt.n(), sk.n());
|
||||
assert!(
|
||||
scratch.available()
|
||||
>= GGLWECiphertextCompressed::encrypt_sk_scratch_space(module, sk.n(), self.basek(), self.k()),
|
||||
"scratch.available: {} < GGLWECiphertext::encrypt_sk_scratch_space(module, self.rank()={}, self.size()={}): {}",
|
||||
scratch.available(),
|
||||
self.rank(),
|
||||
self.size(),
|
||||
GGLWECiphertextCompressed::encrypt_sk_scratch_space(module, sk.n(), self.basek(), self.k())
|
||||
);
|
||||
assert!(
|
||||
self.rows() * self.digits() * self.basek() <= self.k(),
|
||||
"self.rows() : {} * self.digits() : {} * self.basek() : {} = {} >= self.k() = {}",
|
||||
self.rows(),
|
||||
self.digits(),
|
||||
self.basek(),
|
||||
self.rows() * self.digits() * self.basek(),
|
||||
self.k()
|
||||
);
|
||||
}
|
||||
|
||||
let rows: usize = self.rows();
|
||||
let digits: usize = self.digits();
|
||||
let basek: usize = self.basek();
|
||||
let k: usize = self.k();
|
||||
let rank_in: usize = self.rank_in();
|
||||
let cols: usize = self.rank_out() + 1;
|
||||
|
||||
let mut source_xa = Source::new(seed);
|
||||
|
||||
let (mut tmp_pt, scrach_1) = scratch.take_glwe_pt(sk.n(), basek, k);
|
||||
(0..rank_in).for_each(|col_i| {
|
||||
(0..rows).for_each(|row_i| {
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
tmp_pt.data.zero(); // zeroes for next iteration
|
||||
module.vec_znx_add_scalar_inplace(
|
||||
&mut tmp_pt.data,
|
||||
0,
|
||||
(digits - 1) + row_i * digits,
|
||||
pt,
|
||||
col_i,
|
||||
);
|
||||
module.vec_znx_normalize_inplace(basek, &mut tmp_pt.data, 0, scrach_1);
|
||||
|
||||
let (seed, mut source_xa_tmp) = source_xa.branch();
|
||||
self.seed[col_i * rows + row_i] = seed;
|
||||
|
||||
glwe_encrypt_sk_internal(
|
||||
module,
|
||||
self.basek(),
|
||||
self.k(),
|
||||
&mut self.at_mut(row_i, col_i).data,
|
||||
cols,
|
||||
true,
|
||||
Some((&tmp_pt, 0)),
|
||||
sk,
|
||||
&mut source_xa_tmp,
|
||||
source_xe,
|
||||
sigma,
|
||||
scrach_1,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
128
poulpy-core/src/encryption/compressed/gglwe_ksk.rs
Normal file
128
poulpy-core/src/encryption/compressed/gglwe_ksk.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
use poulpy_backend::hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyInplace, SvpPPolAllocBytes, SvpPrepare, TakeScalarZnx, TakeVecZnx, TakeVecZnxDft,
|
||||
VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace, VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftFromVecZnx,
|
||||
VecZnxDftToVecZnxBigConsume, VecZnxFillUniform, VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes,
|
||||
VecZnxSub, VecZnxSubABInplace, VecZnxSwithcDegree,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWESecretPrepared,
|
||||
layouts::{GGLWECiphertext, GLWESecret, compressed::GGLWESwitchingKeyCompressed, prepared::GLWESecretPrepared},
|
||||
};
|
||||
|
||||
impl GGLWESwitchingKeyCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend>(
|
||||
module: &Module<B>,
|
||||
n: usize,
|
||||
basek: usize,
|
||||
k: usize,
|
||||
rank_in: usize,
|
||||
rank_out: usize,
|
||||
) -> usize
|
||||
where
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes + SvpPPolAllocBytes,
|
||||
{
|
||||
(GGLWECiphertext::encrypt_sk_scratch_space(module, n, basek, k) | ScalarZnx::alloc_bytes(n, 1))
|
||||
+ ScalarZnx::alloc_bytes(n, rank_in)
|
||||
+ GLWESecretPrepared::bytes_of(module, n, rank_out)
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWESwitchingKeyCompressed<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataSkIn: DataRef, DataSkOut: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk_in: &GLWESecret<DataSkIn>,
|
||||
sk_out: &GLWESecret<DataSkOut>,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: SvpPrepare<B>
|
||||
+ SvpPPolAllocBytes
|
||||
+ VecZnxSwithcDegree
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftFromVecZnx<B>
|
||||
+ SvpApplyInplace<B>
|
||||
+ VecZnxDftToVecZnxBigConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubABInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub
|
||||
+ VecZnxAddScalarInplace,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx + TakeScalarZnx + TakeGLWESecretPrepared<B>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use crate::layouts::{GGLWESwitchingKey, Infos};
|
||||
|
||||
assert!(sk_in.n() <= module.n());
|
||||
assert!(sk_out.n() <= module.n());
|
||||
assert!(
|
||||
scratch.available()
|
||||
>= GGLWESwitchingKey::encrypt_sk_scratch_space(
|
||||
module,
|
||||
sk_out.n(),
|
||||
self.basek(),
|
||||
self.k(),
|
||||
self.rank_in(),
|
||||
self.rank_out()
|
||||
),
|
||||
"scratch.available()={} < GLWESwitchingKey::encrypt_sk_scratch_space={}",
|
||||
scratch.available(),
|
||||
GGLWESwitchingKey::encrypt_sk_scratch_space(
|
||||
module,
|
||||
sk_out.n(),
|
||||
self.basek(),
|
||||
self.k(),
|
||||
self.rank_in(),
|
||||
self.rank_out()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
let n: usize = sk_in.n().max(sk_out.n());
|
||||
|
||||
let (mut sk_in_tmp, scratch1) = scratch.take_scalar_znx(n, sk_in.rank());
|
||||
(0..sk_in.rank()).for_each(|i| {
|
||||
module.vec_znx_switch_degree(
|
||||
&mut sk_in_tmp.as_vec_znx_mut(),
|
||||
i,
|
||||
&sk_in.data.as_vec_znx(),
|
||||
i,
|
||||
);
|
||||
});
|
||||
|
||||
let (mut sk_out_tmp, scratch2) = scratch1.take_glwe_secret_prepared(n, sk_out.rank());
|
||||
{
|
||||
let (mut tmp, _) = scratch2.take_scalar_znx(n, 1);
|
||||
(0..sk_out.rank()).for_each(|i| {
|
||||
module.vec_znx_switch_degree(&mut tmp.as_vec_znx_mut(), 0, &sk_out.data.as_vec_znx(), i);
|
||||
module.svp_prepare(&mut sk_out_tmp.data, i, &tmp, 0);
|
||||
});
|
||||
}
|
||||
|
||||
self.key.encrypt_sk(
|
||||
module,
|
||||
&sk_in_tmp,
|
||||
&sk_out_tmp,
|
||||
seed_xa,
|
||||
source_xe,
|
||||
sigma,
|
||||
scratch2,
|
||||
);
|
||||
self.sk_in_n = sk_in.n();
|
||||
self.sk_out_n = sk_out.n();
|
||||
}
|
||||
}
|
||||
111
poulpy-core/src/encryption/compressed/gglwe_tsk.rs
Normal file
111
poulpy-core/src/encryption/compressed/gglwe_tsk.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use poulpy_backend::hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApply, SvpApplyInplace, SvpPPolAlloc, SvpPPolAllocBytes, SvpPrepare, TakeScalarZnx, TakeVecZnx,
|
||||
TakeVecZnxBig, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace, VecZnxBigAllocBytes,
|
||||
VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftFromVecZnx, VecZnxDftToVecZnxBigConsume, VecZnxDftToVecZnxBigTmpA,
|
||||
VecZnxFillUniform, VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubABInplace,
|
||||
VecZnxSwithcDegree,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWESecret, TakeGLWESecretPrepared,
|
||||
layouts::{GGLWETensorKey, GLWESecret, Infos, compressed::GGLWETensorKeyCompressed, prepared::Prepare},
|
||||
};
|
||||
|
||||
impl GGLWETensorKeyCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend>(module: &Module<B>, n: usize, basek: usize, k: usize, rank: usize) -> usize
|
||||
where
|
||||
Module<B>:
|
||||
SvpPPolAllocBytes + VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes + VecZnxNormalizeTmpBytes + VecZnxBigAllocBytes,
|
||||
{
|
||||
GGLWETensorKey::encrypt_sk_scratch_space(module, n, basek, k, rank)
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGLWETensorKeyCompressed<DataSelf> {
|
||||
pub fn encrypt_sk<DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
sk: &GLWESecret<DataSk>,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: SvpApply<B>
|
||||
+ VecZnxDftToVecZnxBigTmpA<B>
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftFromVecZnx<B>
|
||||
+ SvpApplyInplace<B>
|
||||
+ VecZnxDftToVecZnxBigConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubABInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub
|
||||
+ VecZnxSwithcDegree
|
||||
+ VecZnxAddScalarInplace
|
||||
+ SvpPrepare<B>
|
||||
+ SvpPPolAllocBytes
|
||||
+ SvpPPolAlloc<B>,
|
||||
Scratch<B>: ScratchAvailable
|
||||
+ TakeScalarZnx
|
||||
+ TakeVecZnxDft<B>
|
||||
+ TakeGLWESecretPrepared<B>
|
||||
+ ScratchAvailable
|
||||
+ TakeVecZnx
|
||||
+ TakeVecZnxBig<B>,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(self.rank(), sk.rank());
|
||||
assert_eq!(self.n(), sk.n());
|
||||
}
|
||||
|
||||
let n: usize = sk.n();
|
||||
let rank: usize = self.rank();
|
||||
|
||||
let (mut sk_dft_prep, scratch1) = scratch.take_glwe_secret_prepared(n, rank);
|
||||
sk_dft_prep.prepare(module, sk, scratch1);
|
||||
|
||||
let (mut sk_dft, scratch2) = scratch1.take_vec_znx_dft(n, rank, 1);
|
||||
|
||||
(0..rank).for_each(|i| {
|
||||
module.vec_znx_dft_from_vec_znx(1, 0, &mut sk_dft, i, &sk.data.as_vec_znx(), i);
|
||||
});
|
||||
|
||||
let (mut sk_ij_big, scratch3) = scratch2.take_vec_znx_big(n, 1, 1);
|
||||
let (mut sk_ij, scratch4) = scratch3.take_glwe_secret(n, 1);
|
||||
let (mut sk_ij_dft, scratch5) = scratch4.take_vec_znx_dft(n, 1, 1);
|
||||
|
||||
let mut source_xa: Source = Source::new(seed_xa);
|
||||
|
||||
(0..rank).for_each(|i| {
|
||||
(i..rank).for_each(|j| {
|
||||
module.svp_apply(&mut sk_ij_dft, 0, &sk_dft_prep.data, j, &sk_dft, i);
|
||||
|
||||
module.vec_znx_dft_to_vec_znx_big_tmp_a(&mut sk_ij_big, 0, &mut sk_ij_dft, 0);
|
||||
module.vec_znx_big_normalize(
|
||||
self.basek(),
|
||||
&mut sk_ij.data.as_vec_znx_mut(),
|
||||
0,
|
||||
&sk_ij_big,
|
||||
0,
|
||||
scratch5,
|
||||
);
|
||||
|
||||
let (seed_xa_tmp, _) = source_xa.branch();
|
||||
|
||||
self.at_mut(i, j)
|
||||
.encrypt_sk(module, &sk_ij, sk, seed_xa_tmp, source_xe, sigma, scratch5);
|
||||
});
|
||||
})
|
||||
}
|
||||
}
|
||||
106
poulpy-core/src/encryption/compressed/ggsw_ct.rs
Normal file
106
poulpy-core/src/encryption/compressed/ggsw_ct.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use poulpy_backend::hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyInplace, TakeVecZnx, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal, VecZnxAddScalarInplace,
|
||||
VecZnxBigNormalize, VecZnxDftAllocBytes, VecZnxDftFromVecZnx, VecZnxDftToVecZnxBigConsume, VecZnxFillUniform,
|
||||
VecZnxNormalize, VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubABInplace, ZnxZero,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, ScalarZnx, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
TakeGLWEPt,
|
||||
encryption::glwe_encrypt_sk_internal,
|
||||
layouts::{GGSWCiphertext, Infos, compressed::GGSWCiphertextCompressed, prepared::GLWESecretPrepared},
|
||||
};
|
||||
|
||||
impl GGSWCiphertextCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend>(module: &Module<B>, n: usize, basek: usize, k: usize, rank: usize) -> usize
|
||||
where
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes,
|
||||
{
|
||||
GGSWCiphertext::encrypt_sk_scratch_space(module, n, basek, k, rank)
|
||||
}
|
||||
}
|
||||
|
||||
impl<DataSelf: DataMut> GGSWCiphertextCompressed<DataSelf> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &ScalarZnx<DataPt>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxAddScalarInplace
|
||||
+ VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftFromVecZnx<B>
|
||||
+ SvpApplyInplace<B>
|
||||
+ VecZnxDftToVecZnxBigConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubABInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
use poulpy_backend::hal::api::ZnxInfos;
|
||||
|
||||
assert_eq!(self.rank(), sk.rank());
|
||||
assert_eq!(self.n(), sk.n());
|
||||
assert_eq!(pt.n(), sk.n());
|
||||
}
|
||||
|
||||
let basek: usize = self.basek();
|
||||
let k: usize = self.k();
|
||||
let rank: usize = self.rank();
|
||||
let cols: usize = rank + 1;
|
||||
let digits: usize = self.digits();
|
||||
|
||||
let (mut tmp_pt, scratch_1) = scratch.take_glwe_pt(self.n(), basek, k);
|
||||
|
||||
let mut source = Source::new(seed_xa);
|
||||
|
||||
self.seed = vec![[0u8; 32]; self.rows() * cols];
|
||||
|
||||
(0..self.rows()).for_each(|row_i| {
|
||||
tmp_pt.data.zero();
|
||||
|
||||
// Adds the scalar_znx_pt to the i-th limb of the vec_znx_pt
|
||||
module.vec_znx_add_scalar_inplace(&mut tmp_pt.data, 0, (digits - 1) + row_i * digits, pt, 0);
|
||||
module.vec_znx_normalize_inplace(basek, &mut tmp_pt.data, 0, scratch_1);
|
||||
|
||||
(0..rank + 1).for_each(|col_j| {
|
||||
// rlwe encrypt of vec_znx_pt into vec_znx_ct
|
||||
|
||||
let (seed, mut source_xa_tmp) = source.branch();
|
||||
|
||||
self.seed[row_i * cols + col_j] = seed;
|
||||
|
||||
glwe_encrypt_sk_internal(
|
||||
module,
|
||||
self.basek(),
|
||||
self.k(),
|
||||
&mut self.at_mut(row_i, col_j).data,
|
||||
cols,
|
||||
true,
|
||||
Some((&tmp_pt, col_j)),
|
||||
sk,
|
||||
&mut source_xa_tmp,
|
||||
source_xe,
|
||||
sigma,
|
||||
scratch_1,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
107
poulpy-core/src/encryption/compressed/glwe_ct.rs
Normal file
107
poulpy-core/src/encryption/compressed/glwe_ct.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
use poulpy_backend::hal::{
|
||||
api::{
|
||||
ScratchAvailable, SvpApplyInplace, TakeVecZnx, TakeVecZnxDft, VecZnxAddInplace, VecZnxAddNormal, VecZnxBigNormalize,
|
||||
VecZnxDftAllocBytes, VecZnxDftFromVecZnx, VecZnxDftToVecZnxBigConsume, VecZnxFillUniform, VecZnxNormalize,
|
||||
VecZnxNormalizeInplace, VecZnxNormalizeTmpBytes, VecZnxSub, VecZnxSubABInplace,
|
||||
},
|
||||
layouts::{Backend, DataMut, DataRef, Module, Scratch},
|
||||
source::Source,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
encryption::glwe_ct::glwe_encrypt_sk_internal,
|
||||
layouts::{GLWECiphertext, GLWEPlaintext, Infos, compressed::GLWECiphertextCompressed, prepared::GLWESecretPrepared},
|
||||
};
|
||||
|
||||
impl GLWECiphertextCompressed<Vec<u8>> {
|
||||
pub fn encrypt_sk_scratch_space<B: Backend>(module: &Module<B>, n: usize, basek: usize, k: usize) -> usize
|
||||
where
|
||||
Module<B>: VecZnxNormalizeTmpBytes + VecZnxDftAllocBytes,
|
||||
{
|
||||
GLWECiphertext::encrypt_sk_scratch_space(module, n, basek, k)
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: DataMut> GLWECiphertextCompressed<D> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn encrypt_sk<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: &GLWEPlaintext<DataPt>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftFromVecZnx<B>
|
||||
+ SvpApplyInplace<B>
|
||||
+ VecZnxDftToVecZnxBigConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubABInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
self.encrypt_sk_internal(
|
||||
module,
|
||||
Some((pt, 0)),
|
||||
sk,
|
||||
seed_xa,
|
||||
source_xe,
|
||||
sigma,
|
||||
scratch,
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn encrypt_sk_internal<DataPt: DataRef, DataSk: DataRef, B: Backend>(
|
||||
&mut self,
|
||||
module: &Module<B>,
|
||||
pt: Option<(&GLWEPlaintext<DataPt>, usize)>,
|
||||
sk: &GLWESecretPrepared<DataSk, B>,
|
||||
seed_xa: [u8; 32],
|
||||
source_xe: &mut Source,
|
||||
sigma: f64,
|
||||
scratch: &mut Scratch<B>,
|
||||
) where
|
||||
Module<B>: VecZnxDftAllocBytes
|
||||
+ VecZnxBigNormalize<B>
|
||||
+ VecZnxDftFromVecZnx<B>
|
||||
+ SvpApplyInplace<B>
|
||||
+ VecZnxDftToVecZnxBigConsume<B>
|
||||
+ VecZnxNormalizeTmpBytes
|
||||
+ VecZnxFillUniform
|
||||
+ VecZnxSubABInplace
|
||||
+ VecZnxAddInplace
|
||||
+ VecZnxNormalizeInplace<B>
|
||||
+ VecZnxAddNormal
|
||||
+ VecZnxNormalize<B>
|
||||
+ VecZnxSub,
|
||||
Scratch<B>: TakeVecZnxDft<B> + ScratchAvailable + TakeVecZnx,
|
||||
{
|
||||
let mut source_xa = Source::new(seed_xa);
|
||||
let cols: usize = self.rank() + 1;
|
||||
glwe_encrypt_sk_internal(
|
||||
module,
|
||||
self.basek(),
|
||||
self.k(),
|
||||
&mut self.data,
|
||||
cols,
|
||||
true,
|
||||
pt,
|
||||
sk,
|
||||
&mut source_xa,
|
||||
source_xe,
|
||||
sigma,
|
||||
scratch,
|
||||
);
|
||||
self.seed = seed_xa;
|
||||
}
|
||||
}
|
||||
6
poulpy-core/src/encryption/compressed/mod.rs
Normal file
6
poulpy-core/src/encryption/compressed/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
mod gglwe_atk;
|
||||
mod gglwe_ct;
|
||||
mod gglwe_ksk;
|
||||
mod gglwe_tsk;
|
||||
mod ggsw_ct;
|
||||
mod glwe_ct;
|
||||
Reference in New Issue
Block a user