Code organisation for glwe

This commit is contained in:
Jean-Philippe Bossuat
2025-06-12 15:46:05 +02:00
parent ec4253bb1c
commit 989ea077a9
30 changed files with 1305 additions and 1229 deletions

View File

@@ -0,0 +1,121 @@
use backend::{FFT64, Module, Scratch, VecZnxOps};
use crate::{AutomorphismKey, GLWECiphertext};
impl GLWECiphertext<Vec<u8>> {
pub fn automorphism_scratch_space(
module: &Module<FFT64>,
basek: usize,
k_out: usize,
k_in: usize,
k_ksk: usize,
digits: usize,
rank: usize,
) -> usize {
Self::keyswitch_scratch_space(module, basek, k_out, k_in, k_ksk, digits, rank, rank)
}
pub fn automorphism_inplace_scratch_space(
module: &Module<FFT64>,
basek: usize,
k_out: usize,
k_ksk: usize,
digits: usize,
rank: usize,
) -> usize {
Self::keyswitch_inplace_scratch_space(module, basek, k_out, k_ksk, digits, rank)
}
}
impl<DataSelf: AsRef<[u8]> + AsMut<[u8]>> GLWECiphertext<DataSelf> {
pub fn automorphism<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
lhs: &GLWECiphertext<DataLhs>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
self.keyswitch(module, lhs, &rhs.key, scratch);
(0..self.rank() + 1).for_each(|i| {
module.vec_znx_automorphism_inplace(rhs.p(), &mut self.data, i);
})
}
pub fn automorphism_inplace<DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
self.keyswitch_inplace(module, &rhs.key, scratch);
(0..self.rank() + 1).for_each(|i| {
module.vec_znx_automorphism_inplace(rhs.p(), &mut self.data, i);
})
}
pub fn automorphism_add<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
lhs: &GLWECiphertext<DataLhs>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
Self::keyswitch_private::<_, _, 1>(self, rhs.p(), module, lhs, &rhs.key, scratch);
}
pub fn automorphism_add_inplace<DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
unsafe {
let self_ptr: *mut GLWECiphertext<DataSelf> = self as *mut GLWECiphertext<DataSelf>;
Self::keyswitch_private::<_, _, 1>(self, rhs.p(), module, &*self_ptr, &rhs.key, scratch);
}
}
pub fn automorphism_sub_ab<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
lhs: &GLWECiphertext<DataLhs>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
Self::keyswitch_private::<_, _, 2>(self, rhs.p(), module, lhs, &rhs.key, scratch);
}
pub fn automorphism_sub_ab_inplace<DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
unsafe {
let self_ptr: *mut GLWECiphertext<DataSelf> = self as *mut GLWECiphertext<DataSelf>;
Self::keyswitch_private::<_, _, 2>(self, rhs.p(), module, &*self_ptr, &rhs.key, scratch);
}
}
pub fn automorphism_sub_ba<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
lhs: &GLWECiphertext<DataLhs>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
Self::keyswitch_private::<_, _, 3>(self, rhs.p(), module, lhs, &rhs.key, scratch);
}
pub fn automorphism_sub_ba_inplace<DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
rhs: &AutomorphismKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
unsafe {
let self_ptr: *mut GLWECiphertext<DataSelf> = self as *mut GLWECiphertext<DataSelf>;
Self::keyswitch_private::<_, _, 3>(self, rhs.p(), module, &*self_ptr, &rhs.key, scratch);
}
}
}

115
core/src/glwe/ciphertext.rs Normal file
View File

@@ -0,0 +1,115 @@
use backend::{
Backend, FFT64, Module, VecZnx, VecZnxAlloc, VecZnxBigAlloc, VecZnxBigScratch, VecZnxDftAlloc, VecZnxDftOps, VecZnxToMut,
VecZnxToRef,
};
use crate::{FourierGLWECiphertext, GLWEOps, Infos, SetMetaData, div_ceil};
pub struct GLWECiphertext<C> {
pub data: VecZnx<C>,
pub basek: usize,
pub k: usize,
}
impl GLWECiphertext<Vec<u8>> {
pub fn alloc<B: Backend>(module: &Module<B>, basek: usize, k: usize, rank: usize) -> Self {
Self {
data: module.new_vec_znx(rank + 1, div_ceil(k, basek)),
basek,
k,
}
}
pub fn bytes_of(module: &Module<FFT64>, basek: usize, k: usize, rank: usize) -> usize {
module.bytes_of_vec_znx(rank + 1, div_ceil(k, basek))
}
}
impl<T> Infos for GLWECiphertext<T> {
type Inner = VecZnx<T>;
fn inner(&self) -> &Self::Inner {
&self.data
}
fn basek(&self) -> usize {
self.basek
}
fn k(&self) -> usize {
self.k
}
}
impl<T> GLWECiphertext<T> {
pub fn rank(&self) -> usize {
self.cols() - 1
}
}
impl<C: AsRef<[u8]>> GLWECiphertext<C> {
#[allow(dead_code)]
pub(crate) fn dft<R: AsMut<[u8]> + AsRef<[u8]>>(&self, module: &Module<FFT64>, res: &mut FourierGLWECiphertext<R, FFT64>) {
#[cfg(debug_assertions)]
{
assert_eq!(self.rank(), res.rank());
assert_eq!(self.basek(), res.basek())
}
(0..self.rank() + 1).for_each(|i| {
module.vec_znx_dft(1, 0, &mut res.data, i, &self.data, i);
})
}
}
impl GLWECiphertext<Vec<u8>> {
pub fn decrypt_scratch_space(module: &Module<FFT64>, basek: usize, k: usize) -> usize {
let size: usize = div_ceil(k, basek);
(module.vec_znx_big_normalize_tmp_bytes() | module.bytes_of_vec_znx_dft(1, size)) + module.bytes_of_vec_znx_big(1, size)
}
}
impl<DataSelf: AsMut<[u8]> + AsRef<[u8]>> SetMetaData for GLWECiphertext<DataSelf> {
fn set_k(&mut self, k: usize) {
self.k = k
}
fn set_basek(&mut self, basek: usize) {
self.basek = basek
}
}
pub trait GLWECiphertextToRef {
fn to_ref(&self) -> GLWECiphertext<&[u8]>;
}
impl<D: AsRef<[u8]>> GLWECiphertextToRef for GLWECiphertext<D> {
fn to_ref(&self) -> GLWECiphertext<&[u8]> {
GLWECiphertext {
data: self.data.to_ref(),
basek: self.basek,
k: self.k,
}
}
}
pub trait GLWECiphertextToMut {
fn to_mut(&mut self) -> GLWECiphertext<&mut [u8]>;
}
impl<D: AsMut<[u8]> + AsRef<[u8]>> GLWECiphertextToMut for GLWECiphertext<D> {
fn to_mut(&mut self) -> GLWECiphertext<&mut [u8]> {
GLWECiphertext {
data: self.data.to_mut(),
basek: self.basek,
k: self.k,
}
}
}
impl<D> GLWEOps for GLWECiphertext<D>
where
D: AsRef<[u8]> + AsMut<[u8]>,
GLWECiphertext<D>: GLWECiphertextToMut + Infos + SetMetaData,
{
}

View File

@@ -0,0 +1,56 @@
use backend::{FFT64, Module, ScalarZnxDftOps, Scratch, VecZnxBigOps, VecZnxDftOps, ZnxZero};
use crate::{GLWECiphertext, GLWEPlaintext, GLWESecret, Infos};
impl<DataSelf: AsRef<[u8]>> GLWECiphertext<DataSelf> {
pub fn clone(&self) -> GLWECiphertext<Vec<u8>> {
GLWECiphertext {
data: self.data.clone(),
basek: self.basek(),
k: self.k(),
}
}
pub fn decrypt<DataPt: AsMut<[u8]> + AsRef<[u8]>, DataSk: AsRef<[u8]>>(
&self,
module: &Module<FFT64>,
pt: &mut GLWEPlaintext<DataPt>,
sk: &GLWESecret<DataSk, FFT64>,
scratch: &mut Scratch,
) {
#[cfg(debug_assertions)]
{
assert_eq!(self.rank(), sk.rank());
assert_eq!(self.n(), module.n());
assert_eq!(pt.n(), module.n());
assert_eq!(sk.n(), module.n());
}
let cols: usize = self.rank() + 1;
let (mut c0_big, scratch_1) = scratch.tmp_vec_znx_big(module, 1, self.size()); // TODO optimize size when pt << ct
c0_big.zero();
{
(1..cols).for_each(|i| {
// ci_dft = DFT(a[i]) * DFT(s[i])
let (mut ci_dft, _) = scratch_1.tmp_vec_znx_dft(module, 1, self.size()); // TODO optimize size when pt << ct
module.vec_znx_dft(1, 0, &mut ci_dft, 0, &self.data, i);
module.svp_apply_inplace(&mut ci_dft, 0, &sk.data_fourier, i - 1);
let ci_big = module.vec_znx_idft_consume(ci_dft);
// c0_big += a[i] * s[i]
module.vec_znx_big_add_inplace(&mut c0_big, 0, &ci_big, 0);
});
}
// c0_big = (a * s) + (-a * s + m + e) = BIG(m + e)
module.vec_znx_big_add_small_inplace(&mut c0_big, 0, &self.data, 0);
// pt = norm(BIG(m + e))
module.vec_znx_big_normalize(self.basek(), &mut pt.data, 0, &mut c0_big, 0, scratch_1);
pt.basek = self.basek();
pt.k = pt.k().min(self.k());
}
}

254
core/src/glwe/encryption.rs Normal file
View File

@@ -0,0 +1,254 @@
use backend::{
AddNormal, FFT64, FillUniform, Module, ScalarZnxAlloc, ScalarZnxDftAlloc, ScalarZnxDftOps, Scratch, VecZnxAlloc, VecZnxBig,
VecZnxBigAlloc, VecZnxBigOps, VecZnxBigScratch, VecZnxDftAlloc, VecZnxDftOps, VecZnxOps, ZnxZero,
};
use sampling::source::Source;
use crate::{GLWECiphertext, GLWEPlaintext, GLWEPublicKey, GLWESecret, Infos, SIX_SIGMA, div_ceil, keys::SecretDistribution};
impl GLWECiphertext<Vec<u8>> {
pub fn encrypt_sk_scratch_space(module: &Module<FFT64>, basek: usize, k: usize) -> usize {
let size: usize = div_ceil(k, basek);
module.vec_znx_big_normalize_tmp_bytes() + module.bytes_of_vec_znx_dft(1, size) + module.bytes_of_vec_znx(1, size)
}
pub fn encrypt_pk_scratch_space(module: &Module<FFT64>, basek: usize, k: usize) -> usize {
let size: usize = div_ceil(k, basek);
((module.bytes_of_vec_znx_dft(1, size) + module.bytes_of_vec_znx_big(1, size)) | module.bytes_of_scalar_znx(1))
+ module.bytes_of_scalar_znx_dft(1)
+ module.vec_znx_big_normalize_tmp_bytes()
}
}
impl<DataSelf: AsRef<[u8]> + AsMut<[u8]>> GLWECiphertext<DataSelf> {
pub fn encrypt_sk<DataPt: AsRef<[u8]>, DataSk: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
pt: &GLWEPlaintext<DataPt>,
sk: &GLWESecret<DataSk, FFT64>,
source_xa: &mut Source,
source_xe: &mut Source,
sigma: f64,
scratch: &mut Scratch,
) {
self.encrypt_sk_private(
module,
Some((pt, 0)),
sk,
source_xa,
source_xe,
sigma,
scratch,
);
}
pub fn encrypt_zero_sk<DataSk: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
sk: &GLWESecret<DataSk, FFT64>,
source_xa: &mut Source,
source_xe: &mut Source,
sigma: f64,
scratch: &mut Scratch,
) {
self.encrypt_sk_private(
module,
None::<(&GLWEPlaintext<Vec<u8>>, usize)>,
sk,
source_xa,
source_xe,
sigma,
scratch,
);
}
pub fn encrypt_pk<DataPt: AsRef<[u8]>, DataPk: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
pt: &GLWEPlaintext<DataPt>,
pk: &GLWEPublicKey<DataPk, FFT64>,
source_xu: &mut Source,
source_xe: &mut Source,
sigma: f64,
scratch: &mut Scratch,
) {
self.encrypt_pk_private(
module,
Some((pt, 0)),
pk,
source_xu,
source_xe,
sigma,
scratch,
);
}
pub fn encrypt_zero_pk<DataPk: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
pk: &GLWEPublicKey<DataPk, FFT64>,
source_xu: &mut Source,
source_xe: &mut Source,
sigma: f64,
scratch: &mut Scratch,
) {
self.encrypt_pk_private(
module,
None::<(&GLWEPlaintext<Vec<u8>>, usize)>,
pk,
source_xu,
source_xe,
sigma,
scratch,
);
}
pub(crate) fn encrypt_sk_private<DataPt: AsRef<[u8]>, DataSk: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
pt: Option<(&GLWEPlaintext<DataPt>, usize)>,
sk: &GLWESecret<DataSk, FFT64>,
source_xa: &mut Source,
source_xe: &mut Source,
sigma: f64,
scratch: &mut Scratch,
) {
#[cfg(debug_assertions)]
{
assert_eq!(self.rank(), sk.rank());
assert_eq!(sk.n(), module.n());
assert_eq!(self.n(), module.n());
if let Some((pt, col)) = pt {
assert_eq!(pt.n(), module.n());
assert!(col < self.rank() + 1);
}
assert!(
scratch.available() >= GLWECiphertext::encrypt_sk_scratch_space(module, self.basek(), self.k()),
"scratch.available(): {} < GLWECiphertext::encrypt_sk_scratch_space: {}",
scratch.available(),
GLWECiphertext::encrypt_sk_scratch_space(module, self.basek(), self.k())
)
}
let basek: usize = self.basek();
let k: usize = self.k();
let size: usize = self.size();
let cols: usize = self.rank() + 1;
let (mut c0_big, scratch_1) = scratch.tmp_vec_znx(module, 1, size);
c0_big.zero();
{
// c[i] = uniform
// c[0] -= c[i] * s[i],
(1..cols).for_each(|i| {
let (mut ci_dft, scratch_2) = scratch_1.tmp_vec_znx_dft(module, 1, size);
// c[i] = uniform
self.data.fill_uniform(basek, i, size, source_xa);
// c[i] = norm(IDFT(DFT(c[i]) * DFT(s[i])))
module.vec_znx_dft(1, 0, &mut ci_dft, 0, &self.data, i);
module.svp_apply_inplace(&mut ci_dft, 0, &sk.data_fourier, i - 1);
let ci_big: VecZnxBig<&mut [u8], FFT64> = module.vec_znx_idft_consume(ci_dft);
// use c[0] as buffer, which is overwritten later by the normalization step
module.vec_znx_big_normalize(basek, &mut self.data, 0, &ci_big, 0, scratch_2);
// c0_tmp = -c[i] * s[i] (use c[0] as buffer)
module.vec_znx_sub_ab_inplace(&mut c0_big, 0, &self.data, 0);
// c[i] += m if col = i
if let Some((pt, col)) = pt {
if i == col {
module.vec_znx_add_inplace(&mut self.data, i, &pt.data, 0);
module.vec_znx_normalize_inplace(basek, &mut self.data, i, scratch_2);
}
}
});
}
// c[0] += e
c0_big.add_normal(basek, 0, k, source_xe, sigma, sigma * SIX_SIGMA);
// c[0] += m if col = 0
if let Some((pt, col)) = pt {
if col == 0 {
module.vec_znx_add_inplace(&mut c0_big, 0, &pt.data, 0);
}
}
// c[0] = norm(c[0])
module.vec_znx_normalize(basek, &mut self.data, 0, &c0_big, 0, scratch_1);
}
pub(crate) fn encrypt_pk_private<DataPt: AsRef<[u8]>, DataPk: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
pt: Option<(&GLWEPlaintext<DataPt>, usize)>,
pk: &GLWEPublicKey<DataPk, FFT64>,
source_xu: &mut Source,
source_xe: &mut Source,
sigma: f64,
scratch: &mut Scratch,
) {
#[cfg(debug_assertions)]
{
assert_eq!(self.basek(), pk.basek());
assert_eq!(self.n(), module.n());
assert_eq!(pk.n(), module.n());
assert_eq!(self.rank(), pk.rank());
if let Some((pt, _)) = pt {
assert_eq!(pt.basek(), pk.basek());
assert_eq!(pt.n(), module.n());
}
}
let basek: usize = pk.basek();
let size_pk: usize = pk.size();
let cols: usize = self.rank() + 1;
// Generates u according to the underlying secret distribution.
let (mut u_dft, scratch_1) = scratch.tmp_scalar_znx_dft(module, 1);
{
let (mut u, _) = scratch_1.tmp_scalar_znx(module, 1);
match pk.dist {
SecretDistribution::NONE => panic!(
"invalid public key: SecretDistribution::NONE, ensure it has been correctly intialized through \
Self::generate"
),
SecretDistribution::TernaryFixed(hw) => u.fill_ternary_hw(0, hw, source_xu),
SecretDistribution::TernaryProb(prob) => u.fill_ternary_prob(0, prob, source_xu),
SecretDistribution::BinaryFixed(hw) => u.fill_binary_hw(0, hw, source_xu),
SecretDistribution::BinaryProb(prob) => u.fill_binary_prob(0, prob, source_xu),
SecretDistribution::BinaryBlock(block_size) => u.fill_binary_block(0, block_size, source_xu),
SecretDistribution::ZERO => {}
}
module.svp_prepare(&mut u_dft, 0, &u, 0);
}
// ct[i] = pk[i] * u + ei (+ m if col = i)
(0..cols).for_each(|i| {
let (mut ci_dft, scratch_2) = scratch_1.tmp_vec_znx_dft(module, 1, size_pk);
// ci_dft = DFT(u) * DFT(pk[i])
module.svp_apply(&mut ci_dft, 0, &u_dft, 0, &pk.data.data, i);
// ci_big = u * p[i]
let mut ci_big = module.vec_znx_idft_consume(ci_dft);
// ci_big = u * pk[i] + e
ci_big.add_normal(basek, 0, pk.k(), source_xe, sigma, sigma * SIX_SIGMA);
// ci_big = u * pk[i] + e + m (if col = i)
if let Some((pt, col)) = pt {
if col == i {
module.vec_znx_big_add_small_inplace(&mut ci_big, 0, &pt.data, 0);
}
}
// ct[i] = norm(ci_big)
module.vec_znx_big_normalize(basek, &mut self.data, i, &ci_big, 0, scratch_2);
});
}
}

View File

@@ -0,0 +1,129 @@
use backend::{
FFT64, MatZnxDftOps, MatZnxDftScratch, Module, Scratch, VecZnxBig, VecZnxBigOps, VecZnxDftAlloc, VecZnxDftOps, VecZnxScratch,
};
use crate::{FourierGLWECiphertext, GGSWCiphertext, GLWECiphertext, Infos, div_ceil};
impl GLWECiphertext<Vec<u8>> {
pub fn external_product_scratch_space(
module: &Module<FFT64>,
basek: usize,
k_out: usize,
k_in: usize,
ggsw_k: usize,
digits: usize,
rank: usize,
) -> usize {
let res_dft: usize = FourierGLWECiphertext::bytes_of(module, basek, k_out, rank);
let in_size: usize = div_ceil(div_ceil(k_in, basek), digits);
let out_size: usize = div_ceil(k_out, basek);
let ggsw_size: usize = div_ceil(ggsw_k, basek);
let vmp: usize = module.bytes_of_vec_znx_dft(rank + 1, in_size)
+ module.vmp_apply_tmp_bytes(
out_size,
in_size,
in_size, // rows
rank + 1, // cols in
rank + 1, // cols out
ggsw_size,
);
let normalize: usize = module.vec_znx_normalize_tmp_bytes();
res_dft + (vmp | normalize)
}
pub fn external_product_inplace_scratch_space(
module: &Module<FFT64>,
basek: usize,
k_out: usize,
ggsw_k: usize,
digits: usize,
rank: usize,
) -> usize {
Self::external_product_scratch_space(module, basek, k_out, k_out, ggsw_k, digits, rank)
}
}
impl<DataSelf: AsRef<[u8]> + AsMut<[u8]>> GLWECiphertext<DataSelf> {
pub fn external_product<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
lhs: &GLWECiphertext<DataLhs>,
rhs: &GGSWCiphertext<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
let basek: usize = self.basek();
#[cfg(debug_assertions)]
{
assert_eq!(rhs.rank(), lhs.rank());
assert_eq!(rhs.rank(), self.rank());
assert_eq!(self.basek(), basek);
assert_eq!(lhs.basek(), basek);
assert_eq!(rhs.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(lhs.n(), module.n());
assert!(
scratch.available()
>= GLWECiphertext::external_product_scratch_space(
module,
self.basek(),
self.k(),
lhs.k(),
rhs.k(),
rhs.digits(),
rhs.rank(),
)
);
}
let cols: usize = rhs.rank() + 1;
let digits: usize = rhs.digits();
let (mut res_dft, scratch1) = scratch.tmp_vec_znx_dft(module, cols, rhs.size()); // Todo optimise
let (mut a_dft, scratch2) = scratch1.tmp_vec_znx_dft(module, cols, (lhs.size() + digits - 1) / digits);
{
(0..digits).for_each(|di| {
// (lhs.size() + di) / digits = (a - (digit - di - 1) + digit - 1) / digits
a_dft.set_size((lhs.size() + di) / digits);
// Small optimization for digits > 2
// VMP produce some error e, and since we aggregate vmp * 2^{di * B}, then
// we also aggregate ei * 2^{di * B}, with the largest error being ei * 2^{(digits-1) * B}.
// As such we can ignore the last digits-2 limbs safely of the sum of vmp products.
// It is possible to further ignore the last digits-1 limbs, but this introduce
// ~0.5 to 1 bit of additional noise, and thus not chosen here to ensure that the same
// noise is kept with respect to the ideal functionality.
res_dft.set_size(rhs.size() - ((digits - di) as isize - 2).max(0) as usize);
(0..cols).for_each(|col_i| {
module.vec_znx_dft(digits, digits - 1 - di, &mut a_dft, col_i, &lhs.data, col_i);
});
if di == 0 {
module.vmp_apply(&mut res_dft, &a_dft, &rhs.data, scratch2);
} else {
module.vmp_apply_add(&mut res_dft, &a_dft, &rhs.data, di, scratch2);
}
});
}
let res_big: VecZnxBig<&mut [u8], FFT64> = module.vec_znx_idft_consume(res_dft);
(0..cols).for_each(|i| {
module.vec_znx_big_normalize(basek, &mut self.data, i, &res_big, i, scratch1);
});
}
pub fn external_product_inplace<DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
rhs: &GGSWCiphertext<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
unsafe {
let self_ptr: *mut GLWECiphertext<DataSelf> = self as *mut GLWECiphertext<DataSelf>;
self.external_product(&module, &*self_ptr, rhs, scratch);
}
}
}

244
core/src/glwe/keyswitch.rs Normal file
View File

@@ -0,0 +1,244 @@
use backend::{
FFT64, MatZnxDftOps, MatZnxDftScratch, Module, Scratch, VecZnxBig, VecZnxBigOps, VecZnxBigScratch, VecZnxDftAlloc,
VecZnxDftOps, ZnxZero,
};
use crate::{FourierGLWECiphertext, GLWECiphertext, GLWESwitchingKey, Infos, div_ceil};
impl GLWECiphertext<Vec<u8>> {
pub fn keyswitch_scratch_space(
module: &Module<FFT64>,
basek: usize,
k_out: usize,
k_in: usize,
k_ksk: usize,
digits: usize,
rank_in: usize,
rank_out: usize,
) -> usize {
let res_dft: usize = FourierGLWECiphertext::bytes_of(module, basek, k_out, rank_out + 1);
let in_size: usize = div_ceil(div_ceil(k_in, basek), digits);
let out_size: usize = div_ceil(k_out, basek);
let ksk_size: usize = div_ceil(k_ksk, basek);
let ai_dft: usize = module.bytes_of_vec_znx_dft(rank_in, in_size);
let vmp: usize = module.vmp_apply_tmp_bytes(out_size, in_size, in_size, rank_in, rank_out + 1, ksk_size)
+ module.bytes_of_vec_znx_dft(rank_in, in_size);
let normalize: usize = module.vec_znx_big_normalize_tmp_bytes();
return res_dft + ((ai_dft + vmp) | normalize);
}
pub fn keyswitch_from_fourier_scratch_space(
module: &Module<FFT64>,
basek: usize,
k_out: usize,
k_in: usize,
k_ksk: usize,
digits: usize,
rank_in: usize,
rank_out: usize,
) -> usize {
Self::keyswitch_scratch_space(module, basek, k_out, k_in, k_ksk, digits, rank_in, rank_out)
}
pub fn keyswitch_inplace_scratch_space(
module: &Module<FFT64>,
basek: usize,
k_out: usize,
k_ksk: usize,
digits: usize,
rank: usize,
) -> usize {
Self::keyswitch_scratch_space(module, basek, k_out, k_out, k_ksk, digits, rank, rank)
}
}
impl<DataSelf: AsRef<[u8]> + AsMut<[u8]>> GLWECiphertext<DataSelf> {
pub fn keyswitch<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
lhs: &GLWECiphertext<DataLhs>,
rhs: &GLWESwitchingKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
Self::keyswitch_private::<_, _, 0>(self, 0, module, lhs, rhs, scratch);
}
pub fn keyswitch_inplace<DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
rhs: &GLWESwitchingKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
unsafe {
let self_ptr: *mut GLWECiphertext<DataSelf> = self as *mut GLWECiphertext<DataSelf>;
self.keyswitch(&module, &*self_ptr, rhs, scratch);
}
}
pub(crate) fn keyswitch_private<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>, const OP: u8>(
&mut self,
apply_auto: i64,
module: &Module<FFT64>,
lhs: &GLWECiphertext<DataLhs>,
rhs: &GLWESwitchingKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
let basek: usize = self.basek();
#[cfg(debug_assertions)]
{
assert_eq!(lhs.rank(), rhs.rank_in());
assert_eq!(self.rank(), rhs.rank_out());
assert_eq!(self.basek(), basek);
assert_eq!(lhs.basek(), basek);
assert_eq!(rhs.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(lhs.n(), module.n());
assert!(
scratch.available()
>= GLWECiphertext::keyswitch_scratch_space(
module,
self.basek(),
self.k(),
lhs.k(),
rhs.k(),
rhs.digits(),
rhs.rank_in(),
rhs.rank_out(),
)
);
}
let cols_in: usize = rhs.rank_in();
let cols_out: usize = rhs.rank_out() + 1;
let digits: usize = rhs.digits();
let (mut res_dft, scratch1) = scratch.tmp_vec_znx_dft(module, cols_out, rhs.size()); // Todo optimise
let (mut ai_dft, scratch2) = scratch1.tmp_vec_znx_dft(module, cols_in, (lhs.size() + digits - 1) / digits);
ai_dft.zero();
{
(0..digits).for_each(|di| {
ai_dft.set_size((lhs.size() + di) / digits);
// Small optimization for digits > 2
// VMP produce some error e, and since we aggregate vmp * 2^{di * B}, then
// we also aggregate ei * 2^{di * B}, with the largest error being ei * 2^{(digits-1) * B}.
// As such we can ignore the last digits-2 limbs safely of the sum of vmp products.
// It is possible to further ignore the last digits-1 limbs, but this introduce
// ~0.5 to 1 bit of additional noise, and thus not chosen here to ensure that the same
// noise is kept with respect to the ideal functionality.
res_dft.set_size(rhs.size() - ((digits - di) as isize - 2).max(0) as usize);
(0..cols_in).for_each(|col_i| {
module.vec_znx_dft(
digits,
digits - di - 1,
&mut ai_dft,
col_i,
&lhs.data,
col_i + 1,
);
});
if di == 0 {
module.vmp_apply(&mut res_dft, &ai_dft, &rhs.0.data, scratch2);
} else {
module.vmp_apply_add(&mut res_dft, &ai_dft, &rhs.0.data, di, scratch2);
}
});
}
let mut res_big: VecZnxBig<&mut [u8], FFT64> = module.vec_znx_idft_consume(res_dft);
module.vec_znx_big_add_small_inplace(&mut res_big, 0, &lhs.data, 0);
(0..cols_out).for_each(|i| {
if apply_auto != 0 {
module.vec_znx_big_automorphism_inplace(apply_auto, &mut res_big, i);
}
match OP {
1 => module.vec_znx_big_add_small_inplace(&mut res_big, i, &lhs.data, i),
2 => module.vec_znx_big_sub_small_a_inplace(&mut res_big, i, &lhs.data, i),
3 => module.vec_znx_big_sub_small_b_inplace(&mut res_big, i, &lhs.data, i),
_ => {}
}
module.vec_znx_big_normalize(basek, &mut self.data, i, &res_big, i, scratch1);
});
}
pub(crate) fn keyswitch_from_fourier<DataLhs: AsRef<[u8]>, DataRhs: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
lhs: &FourierGLWECiphertext<DataLhs, FFT64>,
rhs: &GLWESwitchingKey<DataRhs, FFT64>,
scratch: &mut Scratch,
) {
let basek: usize = self.basek();
#[cfg(debug_assertions)]
{
assert_eq!(lhs.rank(), rhs.rank_in());
assert_eq!(self.rank(), rhs.rank_out());
assert_eq!(self.basek(), basek);
assert_eq!(lhs.basek(), basek);
assert_eq!(rhs.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(lhs.n(), module.n());
assert!(
scratch.available()
>= GLWECiphertext::keyswitch_from_fourier_scratch_space(
module,
self.basek(),
self.k(),
lhs.k(),
rhs.k(),
rhs.digits(),
rhs.rank_in(),
rhs.rank_out(),
)
);
}
let cols_in: usize = rhs.rank_in();
let cols_out: usize = rhs.rank_out() + 1;
// Buffer of the result of VMP in DFT
let (mut res_dft, scratch1) = scratch.tmp_vec_znx_dft(module, cols_out, rhs.size()); // Todo optimise
{
let digits = rhs.digits();
(0..digits).for_each(|di| {
// (lhs.size() + di) / digits = (a - (digit - di - 1) + digit - 1) / digits
let (mut ai_dft, scratch2) = scratch1.tmp_vec_znx_dft(module, cols_in, (lhs.size() + di) / digits);
(0..cols_in).for_each(|col_i| {
module.vec_znx_dft_copy(
digits,
digits - 1 - di,
&mut ai_dft,
col_i,
&lhs.data,
col_i + 1,
);
});
if di == 0 {
module.vmp_apply(&mut res_dft, &ai_dft, &rhs.0.data, scratch2);
} else {
module.vmp_apply_add(&mut res_dft, &ai_dft, &rhs.0.data, di, scratch2);
}
});
}
module.vec_znx_dft_add_inplace(&mut res_dft, 0, &lhs.data, 0);
// Switches result of VMP outside of DFT
let res_big: VecZnxBig<&mut [u8], FFT64> = module.vec_znx_idft_consume::<&mut [u8]>(res_dft);
(0..cols_out).for_each(|i| {
module.vec_znx_big_normalize(basek, &mut self.data, i, &res_big, i, scratch1);
});
}
}

31
core/src/glwe/mod.rs Normal file
View File

@@ -0,0 +1,31 @@
pub mod automorphism;
pub mod ciphertext;
pub mod decryption;
pub mod encryption;
pub mod external_product;
pub mod keyswitch;
pub mod ops;
pub mod packing;
pub mod plaintext;
pub mod public_key;
pub mod secret;
pub mod trace;
#[allow(unused_imports)]
pub use automorphism::*;
pub use ciphertext::*;
#[allow(unused_imports)]
pub use decryption::*;
#[allow(unused_imports)]
pub use encryption::*;
#[allow(unused_imports)]
pub use external_product::*;
#[allow(unused_imports)]
pub use keyswitch::*;
pub use ops::*;
pub use packing::*;
pub use plaintext::*;
pub use public_key::*;
pub use secret::*;
#[allow(unused_imports)]
pub use trace::*;

267
core/src/glwe/ops.rs Normal file
View File

@@ -0,0 +1,267 @@
use backend::{FFT64, Module, Scratch, VecZnx, VecZnxOps, ZnxZero};
use crate::{GLWECiphertext, GLWECiphertextToMut, GLWECiphertextToRef, Infos, SetMetaData};
pub trait GLWEOps: GLWECiphertextToMut + Infos + SetMetaData {
fn add<A, B>(&mut self, module: &Module<FFT64>, a: &A, b: &B)
where
A: GLWECiphertextToRef + Infos,
B: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
assert_eq!(b.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(a.basek(), b.basek());
assert!(self.rank() >= a.rank().max(b.rank()));
}
let min_col: usize = a.rank().min(b.rank()) + 1;
let max_col: usize = a.rank().max(b.rank() + 1);
let self_col: usize = self.rank() + 1;
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
let b_ref: &GLWECiphertext<&[u8]> = &b.to_ref();
(0..min_col).for_each(|i| {
module.vec_znx_add(&mut self_mut.data, i, &a_ref.data, i, &b_ref.data, i);
});
if a.rank() > b.rank() {
(min_col..max_col).for_each(|i| {
module.vec_znx_copy(&mut self_mut.data, i, &a_ref.data, i);
});
} else {
(min_col..max_col).for_each(|i| {
module.vec_znx_copy(&mut self_mut.data, i, &b_ref.data, i);
});
}
let size: usize = self_mut.size();
(max_col..self_col).for_each(|i| {
(0..size).for_each(|j| {
self_mut.data.zero_at(i, j);
});
});
self.set_basek(a.basek());
self.set_k(a.k().max(b.k()));
}
fn add_inplace<A>(&mut self, module: &Module<FFT64>, a: &A)
where
A: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(self.basek(), a.basek());
assert!(self.rank() >= a.rank())
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
(0..a.rank() + 1).for_each(|i| {
module.vec_znx_add_inplace(&mut self_mut.data, i, &a_ref.data, i);
});
self.set_k(a.k().max(self.k()));
}
fn sub<A, B>(&mut self, module: &Module<FFT64>, a: &A, b: &B)
where
A: GLWECiphertextToRef + Infos,
B: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
assert_eq!(b.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(a.basek(), b.basek());
assert!(self.rank() >= a.rank().max(b.rank()));
}
let min_col: usize = a.rank().min(b.rank()) + 1;
let max_col: usize = a.rank().max(b.rank() + 1);
let self_col: usize = self.rank() + 1;
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
let b_ref: &GLWECiphertext<&[u8]> = &b.to_ref();
(0..min_col).for_each(|i| {
module.vec_znx_sub(&mut self_mut.data, i, &a_ref.data, i, &b_ref.data, i);
});
if a.rank() > b.rank() {
(min_col..max_col).for_each(|i| {
module.vec_znx_copy(&mut self_mut.data, i, &a_ref.data, i);
});
} else {
(min_col..max_col).for_each(|i| {
module.vec_znx_copy(&mut self_mut.data, i, &b_ref.data, i);
module.vec_znx_negate_inplace(&mut self_mut.data, i);
});
}
let size: usize = self_mut.size();
(max_col..self_col).for_each(|i| {
(0..size).for_each(|j| {
self_mut.data.zero_at(i, j);
});
});
self.set_basek(a.basek());
self.set_k(a.k().max(b.k()));
}
fn sub_inplace_ab<A>(&mut self, module: &Module<FFT64>, a: &A)
where
A: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(self.basek(), a.basek());
assert!(self.rank() >= a.rank())
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
(0..a.rank() + 1).for_each(|i| {
module.vec_znx_sub_ab_inplace(&mut self_mut.data, i, &a_ref.data, i);
});
self.set_k(a.k().max(self.k()));
}
fn sub_inplace_ba<A>(&mut self, module: &Module<FFT64>, a: &A)
where
A: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(self.basek(), a.basek());
assert!(self.rank() >= a.rank())
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
(0..a.rank() + 1).for_each(|i| {
module.vec_znx_sub_ba_inplace(&mut self_mut.data, i, &a_ref.data, i);
});
self.set_k(a.k().max(self.k()));
}
fn rotate<A>(&mut self, module: &Module<FFT64>, k: i64, a: &A)
where
A: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
assert_eq!(self.n(), module.n());
assert_eq!(self.rank(), a.rank())
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
(0..a.rank() + 1).for_each(|i| {
module.vec_znx_rotate(k, &mut self_mut.data, i, &a_ref.data, i);
});
self.set_basek(a.basek());
self.set_k(a.k());
}
fn rotate_inplace(&mut self, module: &Module<FFT64>, k: i64) {
#[cfg(debug_assertions)]
{
assert_eq!(self.n(), module.n());
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
(0..self_mut.rank() + 1).for_each(|i| {
module.vec_znx_rotate_inplace(k, &mut self_mut.data, i);
});
}
fn copy<A>(&mut self, module: &Module<FFT64>, a: &A)
where
A: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(self.n(), module.n());
assert_eq!(a.n(), module.n());
assert_eq!(self.rank(), a.rank());
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
(0..self_mut.rank() + 1).for_each(|i| {
module.vec_znx_copy(&mut self_mut.data, i, &a_ref.data, i);
});
self.set_k(a.k());
self.set_basek(a.basek());
}
fn rsh(&mut self, k: usize, scratch: &mut Scratch) {
let basek: usize = self.basek();
let mut self_mut: GLWECiphertext<&mut [u8]> = self.to_mut();
self_mut.data.rsh(basek, k, scratch);
}
fn normalize<A>(&mut self, module: &Module<FFT64>, a: &A, scratch: &mut Scratch)
where
A: GLWECiphertextToRef + Infos,
{
#[cfg(debug_assertions)]
{
assert_eq!(self.n(), module.n());
assert_eq!(a.n(), module.n());
assert_eq!(self.rank(), a.rank());
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
let a_ref: &GLWECiphertext<&[u8]> = &a.to_ref();
(0..self_mut.rank() + 1).for_each(|i| {
module.vec_znx_normalize(a.basek(), &mut self_mut.data, i, &a_ref.data, i, scratch);
});
self.set_basek(a.basek());
self.set_k(a.k());
}
fn normalize_inplace(&mut self, module: &Module<FFT64>, scratch: &mut Scratch) {
#[cfg(debug_assertions)]
{
assert_eq!(self.n(), module.n());
}
let self_mut: &mut GLWECiphertext<&mut [u8]> = &mut self.to_mut();
(0..self_mut.rank() + 1).for_each(|i| {
module.vec_znx_normalize_inplace(self_mut.basek(), &mut self_mut.data, i, scratch);
});
}
}
impl GLWECiphertext<Vec<u8>> {
pub fn rsh_scratch_space(module: &Module<FFT64>) -> usize {
VecZnx::rsh_scratch_space(module.n())
}
}

304
core/src/glwe/packing.rs Normal file
View File

@@ -0,0 +1,304 @@
use crate::{AutomorphismKey, GLWECiphertext, GLWEOps, Infos, ScratchCore};
use std::collections::HashMap;
use backend::{FFT64, Module, Scratch};
/// [StreamPacker] enables only the fly GLWE packing
/// with constant memory of Log(N) ciphertexts.
/// Main difference with usual GLWE packing is that
/// the output is bit-reversed.
pub struct StreamPacker {
accumulators: Vec<Accumulator>,
log_batch: usize,
counter: usize,
}
/// [Accumulator] stores intermediate packing result.
/// There are Log(N) such accumulators in a [StreamPacker].
struct Accumulator {
data: GLWECiphertext<Vec<u8>>,
value: bool, // Implicit flag for zero ciphertext
control: bool, // Can be combined with incoming value
}
impl Accumulator {
/// Allocates a new [Accumulator].
///
/// #Arguments
///
/// * `module`: static backend FFT tables.
/// * `basek`: base 2 logarithm of the GLWE ciphertext in memory digit representation.
/// * `k`: base 2 precision of the GLWE ciphertext precision over the Torus.
/// * `rank`: rank of the GLWE ciphertext.
pub fn alloc(module: &Module<FFT64>, basek: usize, k: usize, rank: usize) -> Self {
Self {
data: GLWECiphertext::alloc(module, basek, k, rank),
value: false,
control: false,
}
}
}
impl StreamPacker {
/// Instantiates a new [StreamPacker].
///
/// #Arguments
///
/// * `module`: static backend FFT tables.
/// * `log_batch`: packs coefficients which are multiples of X^{N/2^log_batch}.
/// i.e. with `log_batch=0` only the constant coefficient is packed
/// and N GLWE ciphertext can be packed. With `log_batch=2` all coefficients
/// which are multiples of X^{N/4} are packed. Meaning that N/4 ciphertexts
/// can be packed.
/// * `basek`: base 2 logarithm of the GLWE ciphertext in memory digit representation.
/// * `k`: base 2 precision of the GLWE ciphertext precision over the Torus.
/// * `rank`: rank of the GLWE ciphertext.
pub fn new(module: &Module<FFT64>, log_batch: usize, basek: usize, k: usize, rank: usize) -> Self {
let mut accumulators: Vec<Accumulator> = Vec::<Accumulator>::new();
let log_n: usize = module.log_n();
(0..log_n - log_batch).for_each(|_| accumulators.push(Accumulator::alloc(module, basek, k, rank)));
Self {
accumulators: accumulators,
log_batch,
counter: 0,
}
}
/// Implicit reset of the internal state (to be called before a new packing procedure).
pub fn reset(&mut self) {
for i in 0..self.accumulators.len() {
self.accumulators[i].value = false;
self.accumulators[i].control = false;
}
self.counter = 0;
}
/// Number of scratch space bytes required to call [Self::add].
pub fn scratch_space(module: &Module<FFT64>, basek: usize, ct_k: usize, k_ksk: usize, digits: usize, rank: usize) -> usize {
pack_core_scratch_space(module, basek, ct_k, k_ksk, digits, rank)
}
pub fn galois_elements(module: &Module<FFT64>) -> Vec<i64> {
GLWECiphertext::trace_galois_elements(module)
}
/// Adds a GLWE ciphertext to the [StreamPacker]. And propagates
/// intermediate results among the [Accumulator]s.
///
/// #Arguments
///
/// * `module`: static backend FFT tables.
/// * `res`: space to append fully packed ciphertext. Only when the number
/// of packed ciphertexts reaches N/2^log_batch is a result written.
/// * `a`: ciphertext to pack. Can optionally give None to pack a 0 ciphertext.
/// * `auto_keys`: a [HashMap] containing the [AutomorphismKey]s.
/// * `scratch`: scratch space of size at least [Self::add_scratch_space].
pub fn add<DataA: AsRef<[u8]>, DataAK: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
res: &mut Vec<GLWECiphertext<Vec<u8>>>,
a: Option<&GLWECiphertext<DataA>>,
auto_keys: &HashMap<i64, AutomorphismKey<DataAK, FFT64>>,
scratch: &mut Scratch,
) {
pack_core(
module,
a,
&mut self.accumulators,
self.log_batch,
auto_keys,
scratch,
);
self.counter += 1 << self.log_batch;
if self.counter == module.n() {
res.push(
self.accumulators[module.log_n() - self.log_batch - 1]
.data
.clone(),
);
self.reset();
}
}
/// Flushes all accumlators and appends the result to `res`.
pub fn flush<DataAK: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
res: &mut Vec<GLWECiphertext<Vec<u8>>>,
auto_keys: &HashMap<i64, AutomorphismKey<DataAK, FFT64>>,
scratch: &mut Scratch,
) {
if self.counter != 0 {
while self.counter != 0 {
self.add(
module,
res,
None::<&GLWECiphertext<Vec<u8>>>,
auto_keys,
scratch,
);
}
}
}
}
fn pack_core_scratch_space(module: &Module<FFT64>, basek: usize, ct_k: usize, k_ksk: usize, digits: usize, rank: usize) -> usize {
combine_scratch_space(module, basek, ct_k, k_ksk, digits, rank)
}
fn pack_core<D: AsRef<[u8]>, DataAK: AsRef<[u8]>>(
module: &Module<FFT64>,
a: Option<&GLWECiphertext<D>>,
accumulators: &mut [Accumulator],
i: usize,
auto_keys: &HashMap<i64, AutomorphismKey<DataAK, FFT64>>,
scratch: &mut Scratch,
) {
let log_n: usize = module.log_n();
if i == log_n {
return;
}
// Isolate the first accumulator
let (acc_prev, acc_next) = accumulators.split_at_mut(1);
// Control = true accumlator is free to overide
if !acc_prev[0].control {
let acc_mut_ref: &mut Accumulator = &mut acc_prev[0]; // from split_at_mut
// No previous value -> copies and sets flags accordingly
if let Some(a_ref) = a {
acc_mut_ref.data.copy(module, a_ref);
acc_mut_ref.value = true
} else {
acc_mut_ref.value = false
}
acc_mut_ref.control = true; // Able to be combined on next call
} else {
// Compresses acc_prev <- combine(acc_prev, a).
combine(module, &mut acc_prev[0], a, i, auto_keys, scratch);
acc_prev[0].control = false;
// Propagates to next accumulator
if acc_prev[0].value {
pack_core(
module,
Some(&acc_prev[0].data),
acc_next,
i + 1,
auto_keys,
scratch,
);
} else {
pack_core(
module,
None::<&GLWECiphertext<Vec<u8>>>,
acc_next,
i + 1,
auto_keys,
scratch,
);
}
}
}
fn combine_scratch_space(module: &Module<FFT64>, basek: usize, ct_k: usize, k_ksk: usize, digits: usize, rank: usize) -> usize {
GLWECiphertext::bytes_of(module, basek, ct_k, rank)
+ (GLWECiphertext::rsh_scratch_space(module)
| GLWECiphertext::automorphism_scratch_space(module, basek, ct_k, ct_k, k_ksk, digits, rank))
}
/// [combine] merges two ciphertexts together.
fn combine<D: AsRef<[u8]>, DataAK: AsRef<[u8]>>(
module: &Module<FFT64>,
acc: &mut Accumulator,
b: Option<&GLWECiphertext<D>>,
i: usize,
auto_keys: &HashMap<i64, AutomorphismKey<DataAK, FFT64>>,
scratch: &mut Scratch,
) {
let log_n: usize = module.log_n();
let a: &mut GLWECiphertext<Vec<u8>> = &mut acc.data;
let basek: usize = a.basek();
let k: usize = a.k();
let rank: usize = a.rank();
let gal_el: i64;
if i == 0 {
gal_el = -1;
} else {
gal_el = module.galois_element(1 << (i - 1))
}
let t: i64 = 1 << (log_n - i - 1);
// Goal is to evaluate: a = a + b*X^t + phi(a - b*X^t))
// We also use the identity: AUTO(a * X^t, g) = -X^t * AUTO(a, g)
// where t = 2^(log_n - i - 1) and g = 5^{2^(i - 1)}
// Different cases for wether a and/or b are zero.
//
// Implicite RSH without modulus switch, introduces extra I(X) * Q/2 on decryption.
// Necessary so that the scaling of the plaintext remains constant.
// It however is ok to do so here because coefficients are eventually
// either mapped to garbage or twice their value which vanishes I(X)
// since 2*(I(X) * Q/2) = I(X) * Q = 0 mod Q.
if acc.value {
if let Some(b) = b {
let (mut tmp_b, scratch_1) = scratch.tmp_glwe_ct(module, basek, k, rank);
// a = a * X^-t
a.rotate_inplace(module, -t);
// tmp_b = a * X^-t - b
tmp_b.sub(module, a, b);
tmp_b.rsh(1, scratch_1);
// a = a * X^-t + b
a.add_inplace(module, b);
a.rsh(1, scratch_1);
tmp_b.normalize_inplace(module, scratch_1);
// tmp_b = phi(a * X^-t - b)
if let Some(key) = auto_keys.get(&gal_el) {
tmp_b.automorphism_inplace(module, key, scratch_1);
} else {
panic!("auto_key[{}] not found", gal_el);
}
// a = a * X^-t + b - phi(a * X^-t - b)
a.sub_inplace_ab(module, &tmp_b);
a.normalize_inplace(module, scratch_1);
// a = a + b * X^t - phi(a * X^-t - b) * X^t
// = a + b * X^t - phi(a * X^-t - b) * - phi(X^t)
// = a + b * X^t + phi(a - b * X^t)
a.rotate_inplace(module, t);
} else {
a.rsh(1, scratch);
// a = a + phi(a)
if let Some(key) = auto_keys.get(&gal_el) {
a.automorphism_add_inplace(module, key, scratch);
} else {
panic!("auto_key[{}] not found", gal_el);
}
}
} else {
if let Some(b) = b {
let (mut tmp_b, scratch_1) = scratch.tmp_glwe_ct(module, basek, k, rank);
tmp_b.rotate(module, 1 << (log_n - i - 1), b);
tmp_b.rsh(1, scratch_1);
// a = (b* X^t - phi(b* X^t))
if let Some(key) = auto_keys.get(&gal_el) {
a.automorphism_sub_ba::<&mut [u8], _>(module, &tmp_b, key, scratch_1);
} else {
panic!("auto_key[{}] not found", gal_el);
}
acc.value = true;
}
}
}

View File

@@ -0,0 +1,80 @@
use backend::{Backend, FFT64, Module, VecZnx, VecZnxAlloc, VecZnxToMut, VecZnxToRef};
use crate::{
GLWEOps, Infos, SetMetaData,
ciphertext::{GLWECiphertext, GLWECiphertextToMut, GLWECiphertextToRef},
div_ceil,
};
pub struct GLWEPlaintext<C> {
pub data: VecZnx<C>,
pub basek: usize,
pub k: usize,
}
impl<T> Infos for GLWEPlaintext<T> {
type Inner = VecZnx<T>;
fn inner(&self) -> &Self::Inner {
&self.data
}
fn basek(&self) -> usize {
self.basek
}
fn k(&self) -> usize {
self.k
}
}
impl<DataSelf: AsMut<[u8]> + AsRef<[u8]>> SetMetaData for GLWEPlaintext<DataSelf> {
fn set_k(&mut self, k: usize) {
self.k = k
}
fn set_basek(&mut self, basek: usize) {
self.basek = basek
}
}
impl GLWEPlaintext<Vec<u8>> {
pub fn alloc<B: Backend>(module: &Module<B>, basek: usize, k: usize) -> Self {
Self {
data: module.new_vec_znx(1, k.div_ceil(basek)),
basek: basek,
k,
}
}
pub fn byte_of(module: &Module<FFT64>, basek: usize, k: usize) -> usize {
module.bytes_of_vec_znx(1, k.div_ceil(basek))
}
}
impl<D: AsRef<[u8]>> GLWECiphertextToRef for GLWEPlaintext<D> {
fn to_ref(&self) -> GLWECiphertext<&[u8]> {
GLWECiphertext {
data: self.data.to_ref(),
basek: self.basek,
k: self.k,
}
}
}
impl<D: AsMut<[u8]> + AsRef<[u8]>> GLWECiphertextToMut for GLWEPlaintext<D> {
fn to_mut(&mut self) -> GLWECiphertext<&mut [u8]> {
GLWECiphertext {
data: self.data.to_mut(),
basek: self.basek,
k: self.k,
}
}
}
impl<D> GLWEOps for GLWEPlaintext<D>
where
D: AsRef<[u8]> + AsMut<[u8]>,
GLWEPlaintext<D>: GLWECiphertextToMut + Infos + SetMetaData,
{
}

View File

@@ -0,0 +1,75 @@
use backend::{Backend, FFT64, Module, ScratchOwned, VecZnxDft};
use sampling::source::Source;
use crate::{FourierGLWECiphertext, GLWESecret, Infos, keys::SecretDistribution};
pub struct GLWEPublicKey<D, B: Backend> {
pub(crate) data: FourierGLWECiphertext<D, B>,
pub(crate) dist: SecretDistribution,
}
impl<B: Backend> GLWEPublicKey<Vec<u8>, B> {
pub fn alloc(module: &Module<B>, basek: usize, k: usize, rank: usize) -> Self {
Self {
data: FourierGLWECiphertext::alloc(module, basek, k, rank),
dist: SecretDistribution::NONE,
}
}
pub fn bytes_of(module: &Module<B>, basek: usize, k: usize, rank: usize) -> usize {
FourierGLWECiphertext::<Vec<u8>, B>::bytes_of(module, basek, k, rank)
}
}
impl<T, B: Backend> Infos for GLWEPublicKey<T, B> {
type Inner = VecZnxDft<T, B>;
fn inner(&self) -> &Self::Inner {
&self.data.data
}
fn basek(&self) -> usize {
self.data.basek
}
fn k(&self) -> usize {
self.data.k
}
}
impl<T, B: Backend> GLWEPublicKey<T, B> {
pub fn rank(&self) -> usize {
self.cols() - 1
}
}
impl<C: AsRef<[u8]> + AsMut<[u8]>> GLWEPublicKey<C, FFT64> {
pub fn generate_from_sk<S: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
sk: &GLWESecret<S, FFT64>,
source_xa: &mut Source,
source_xe: &mut Source,
sigma: f64,
) {
#[cfg(debug_assertions)]
{
match sk.dist {
SecretDistribution::NONE => panic!("invalid sk: SecretDistribution::NONE"),
_ => {}
}
}
// Its ok to allocate scratch space here since pk is usually generated only once.
let mut scratch: ScratchOwned = ScratchOwned::new(FourierGLWECiphertext::encrypt_sk_scratch_space(
module,
self.basek(),
self.k(),
self.rank(),
));
self.data
.encrypt_zero_sk(module, sk, source_xa, source_xe, sigma, scratch.borrow());
self.dist = sk.dist;
}
}

93
core/src/glwe/secret.rs Normal file
View File

@@ -0,0 +1,93 @@
use backend::{
Backend, FFT64, Module, ScalarZnx, ScalarZnxAlloc, ScalarZnxDft, ScalarZnxDftAlloc, ScalarZnxDftOps, ZnxInfos, ZnxZero,
};
use sampling::source::Source;
use crate::keys::SecretDistribution;
pub struct GLWESecret<T, B: Backend> {
pub(crate) data: ScalarZnx<T>,
pub(crate) data_fourier: ScalarZnxDft<T, B>,
pub(crate) dist: SecretDistribution,
}
impl<B: Backend> GLWESecret<Vec<u8>, B> {
pub fn alloc(module: &Module<B>, rank: usize) -> Self {
Self {
data: module.new_scalar_znx(rank),
data_fourier: module.new_scalar_znx_dft(rank),
dist: SecretDistribution::NONE,
}
}
pub fn bytes_of(module: &Module<B>, rank: usize) -> usize {
module.bytes_of_scalar_znx(rank) + module.bytes_of_scalar_znx_dft(rank)
}
}
impl<DataSelf, B: Backend> GLWESecret<DataSelf, B> {
pub fn n(&self) -> usize {
self.data.n()
}
pub fn log_n(&self) -> usize {
self.data.log_n()
}
pub fn rank(&self) -> usize {
self.data.cols()
}
}
impl<S: AsMut<[u8]> + AsRef<[u8]>> GLWESecret<S, FFT64> {
pub fn fill_ternary_prob(&mut self, module: &Module<FFT64>, prob: f64, source: &mut Source) {
(0..self.rank()).for_each(|i| {
self.data.fill_ternary_prob(i, prob, source);
});
self.prep_fourier(module);
self.dist = SecretDistribution::TernaryProb(prob);
}
pub fn fill_ternary_hw(&mut self, module: &Module<FFT64>, hw: usize, source: &mut Source) {
(0..self.rank()).for_each(|i| {
self.data.fill_ternary_hw(i, hw, source);
});
self.prep_fourier(module);
self.dist = SecretDistribution::TernaryFixed(hw);
}
pub fn fill_binary_prob(&mut self, module: &Module<FFT64>, prob: f64, source: &mut Source) {
(0..self.rank()).for_each(|i| {
self.data.fill_binary_prob(i, prob, source);
});
self.prep_fourier(module);
self.dist = SecretDistribution::BinaryProb(prob);
}
pub fn fill_binary_hw(&mut self, module: &Module<FFT64>, hw: usize, source: &mut Source) {
(0..self.rank()).for_each(|i| {
self.data.fill_binary_hw(i, hw, source);
});
self.prep_fourier(module);
self.dist = SecretDistribution::BinaryFixed(hw);
}
pub fn fill_binary_block(&mut self, module: &Module<FFT64>, block_size: usize, source: &mut Source) {
(0..self.rank()).for_each(|i| {
self.data.fill_binary_block(i, block_size, source);
});
self.prep_fourier(module);
self.dist = SecretDistribution::BinaryBlock(block_size);
}
pub fn fill_zero(&mut self) {
self.data.zero();
self.dist = SecretDistribution::ZERO;
}
pub(crate) fn prep_fourier(&mut self, module: &Module<FFT64>) {
(0..self.rank()).for_each(|i| {
module.svp_prepare(&mut self.data_fourier, i, &self.data, i);
});
}
}

88
core/src/glwe/trace.rs Normal file
View File

@@ -0,0 +1,88 @@
use std::collections::HashMap;
use backend::{FFT64, Module, Scratch};
use crate::{AutomorphismKey, GLWECiphertext, GLWECiphertextToMut, GLWECiphertextToRef, GLWEOps, Infos, SetMetaData};
impl GLWECiphertext<Vec<u8>> {
pub fn trace_galois_elements(module: &Module<FFT64>) -> Vec<i64> {
let mut gal_els: Vec<i64> = Vec::new();
(0..module.log_n()).for_each(|i| {
if i == 0 {
gal_els.push(-1);
} else {
gal_els.push(module.galois_element(1 << (i - 1)));
}
});
gal_els
}
pub fn trace_scratch_space(
module: &Module<FFT64>,
basek: usize,
out_k: usize,
in_k: usize,
ksk_k: usize,
digits: usize,
rank: usize,
) -> usize {
Self::automorphism_inplace_scratch_space(module, basek, out_k.min(in_k), ksk_k, digits, rank)
}
pub fn trace_inplace_scratch_space(
module: &Module<FFT64>,
basek: usize,
out_k: usize,
ksk_k: usize,
digits: usize,
rank: usize,
) -> usize {
Self::automorphism_inplace_scratch_space(module, basek, out_k, ksk_k, digits, rank)
}
}
impl<DataSelf: AsRef<[u8]> + AsMut<[u8]>> GLWECiphertext<DataSelf>
where
GLWECiphertext<DataSelf>: GLWECiphertextToMut + Infos + SetMetaData,
{
pub fn trace<DataLhs: AsRef<[u8]>, DataAK: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
start: usize,
end: usize,
lhs: &GLWECiphertext<DataLhs>,
auto_keys: &HashMap<i64, AutomorphismKey<DataAK, FFT64>>,
scratch: &mut Scratch,
) where
GLWECiphertext<DataLhs>: GLWECiphertextToRef + Infos,
{
self.copy(module, lhs);
self.trace_inplace(module, start, end, auto_keys, scratch);
}
pub fn trace_inplace<DataAK: AsRef<[u8]>>(
&mut self,
module: &Module<FFT64>,
start: usize,
end: usize,
auto_keys: &HashMap<i64, AutomorphismKey<DataAK, FFT64>>,
scratch: &mut Scratch,
) {
(start..end).for_each(|i| {
self.rsh(1, scratch);
let p: i64;
if i == 0 {
p = -1;
} else {
p = module.galois_element(1 << (i - 1));
}
if let Some(key) = auto_keys.get(&p) {
self.automorphism_add_inplace(module, key, scratch);
} else {
panic!("auto_keys[{}] is empty", p)
}
});
}
}