Dev serialization (#64)

* Added compressed serialization for GLWECiphertext + Ciphertext decompression

* Added compressed serialization for GGLWECiphertext & GLWESwitchingkey

* generalized automorphism test

* Removed ops on scalar_znx, replaced by as_vec_znx/as_vec_znx_mut and then call op on vec_znx

* Added tests for automorphism key encryption

* Added tensorkey compressed

* added ggsw compressed
This commit is contained in:
Jean-Philippe Bossuat
2025-08-12 17:43:28 +02:00
committed by GitHub
parent 4c59733566
commit 9aa4b1f1e2
68 changed files with 3430 additions and 1695 deletions

View File

@@ -1,4 +1,4 @@
use crate::hal::layouts::{ScalarZnxOwned, ScalarZnxToMut, ScalarZnxToRef};
use crate::hal::layouts::ScalarZnxOwned;
/// Allocates as [crate::hal::layouts::ScalarZnx].
pub trait ScalarZnxAlloc {
@@ -15,33 +15,3 @@ pub trait ScalarZnxAllocBytes {
pub trait ScalarZnxFromBytes {
fn scalar_znx_from_bytes(&self, cols: usize, bytes: Vec<u8>) -> ScalarZnxOwned;
}
/// Applies the mapping X -> X^k to a\[a_col\] and write the result on res\[res_col\].
pub trait ScalarZnxAutomorphism {
fn scalar_znx_automorphism<R, A>(&self, k: i64, res: &mut R, res_col: usize, a: &A, a_col: usize)
where
R: ScalarZnxToMut,
A: ScalarZnxToRef;
}
/// Applies the mapping X -> X^k on res\[res_col\].
pub trait ScalarZnxAutomorphismInplace {
fn scalar_znx_automorphism_inplace<R>(&self, k: i64, res: &mut R, res_col: usize)
where
R: ScalarZnxToMut;
}
/// Multiply a\[a_col\] with (X^p - 1) and write the result on res\[res_col\].
pub trait ScalarZnxMulXpMinusOne {
fn scalar_znx_mul_xp_minus_one<R, A>(&self, p: i64, r: &mut R, r_col: usize, a: &A, a_col: usize)
where
R: ScalarZnxToMut,
A: ScalarZnxToRef;
}
/// Multiply res\[res_col\] with (X^p - 1).
pub trait ScalarZnxMulXpMinusOneInplace {
fn scalar_znx_mul_xp_minus_one_inplace<R>(&self, p: i64, res: &mut R, res_col: usize)
where
R: ScalarZnxToMut;
}

View File

@@ -20,12 +20,12 @@ pub trait VecZnxBigFromBytes<B: Backend> {
}
/// Add a discrete normal distribution on res.
///
///
/// # Arguments
/// * `basek`: base two logarithm of the bivariate representation
/// * `res`: receiver.
/// * `res_col`: column of the receiver on which the operation is performed/stored.
/// * `k`:
/// * `k`:
/// * `source`: random coin source.
/// * `sigma`: standard deviation of the discrete normal distribution.
/// * `bound`: rejection sampling bound.

View File

@@ -1,5 +1,6 @@
use crate::hal::layouts::{Data, DataMut, DataRef};
use rand_distr::num_traits::Zero;
use sampling::source::Source;
pub trait ZnxInfos {
/// Returns the ring degree of the polynomials.
@@ -108,3 +109,7 @@ where
fn zero(&mut self);
fn zero_at(&mut self, i: usize, j: usize);
}
pub trait FillUniform {
fn fill_uniform(&mut self, source: &mut Source);
}

View File

@@ -1,13 +1,7 @@
use crate::hal::{
api::{
ScalarZnxAlloc, ScalarZnxAllocBytes, ScalarZnxAutomorphism, ScalarZnxAutomorphismInplace, ScalarZnxFromBytes,
ScalarZnxMulXpMinusOne, ScalarZnxMulXpMinusOneInplace,
},
layouts::{Backend, Module, ScalarZnxOwned, ScalarZnxToMut, ScalarZnxToRef},
oep::{
ScalarZnxAllocBytesImpl, ScalarZnxAllocImpl, ScalarZnxAutomorphismImpl, ScalarZnxAutomorphismInplaceIml,
ScalarZnxFromBytesImpl, ScalarZnxMulXpMinusOneImpl, ScalarZnxMulXpMinusOneInplaceImpl,
},
api::{ScalarZnxAlloc, ScalarZnxAllocBytes},
layouts::{Backend, Module, ScalarZnxOwned},
oep::{ScalarZnxAllocBytesImpl, ScalarZnxAllocImpl},
};
impl<B> ScalarZnxAllocBytes for Module<B>
@@ -27,62 +21,3 @@ where
B::scalar_znx_alloc_impl(self.n(), cols)
}
}
impl<B> ScalarZnxFromBytes for Module<B>
where
B: Backend + ScalarZnxFromBytesImpl<B>,
{
fn scalar_znx_from_bytes(&self, cols: usize, bytes: Vec<u8>) -> ScalarZnxOwned {
B::scalar_znx_from_bytes_impl(self.n(), cols, bytes)
}
}
impl<B> ScalarZnxAutomorphism for Module<B>
where
B: Backend + ScalarZnxAutomorphismImpl<B>,
{
fn scalar_znx_automorphism<R, A>(&self, k: i64, res: &mut R, res_col: usize, a: &A, a_col: usize)
where
R: ScalarZnxToMut,
A: ScalarZnxToRef,
{
B::scalar_znx_automorphism_impl(self, k, res, res_col, a, a_col);
}
}
impl<B> ScalarZnxAutomorphismInplace for Module<B>
where
B: Backend + ScalarZnxAutomorphismInplaceIml<B>,
{
fn scalar_znx_automorphism_inplace<A>(&self, k: i64, a: &mut A, a_col: usize)
where
A: ScalarZnxToMut,
{
B::scalar_znx_automorphism_inplace_impl(self, k, a, a_col);
}
}
impl<B> ScalarZnxMulXpMinusOne for Module<B>
where
B: Backend + ScalarZnxMulXpMinusOneImpl<B>,
{
fn scalar_znx_mul_xp_minus_one<R, A>(&self, p: i64, r: &mut R, r_col: usize, a: &A, a_col: usize)
where
R: ScalarZnxToMut,
A: ScalarZnxToRef,
{
B::scalar_znx_mul_xp_minus_one_impl(self, p, r, r_col, a, a_col);
}
}
impl<B> ScalarZnxMulXpMinusOneInplace for Module<B>
where
B: Backend + ScalarZnxMulXpMinusOneInplaceImpl<B>,
{
fn scalar_znx_mul_xp_minus_one_inplace<R>(&self, p: i64, r: &mut R, r_col: usize)
where
R: ScalarZnxToMut,
{
B::scalar_znx_mul_xp_minus_one_inplace_impl(self, p, r, r_col);
}
}

View File

@@ -1,12 +1,17 @@
use crate::{
alloc_aligned,
hal::{
api::{DataView, DataViewMut, ZnxInfos, ZnxSliceSize, ZnxView},
api::{DataView, DataViewMut, FillUniform, ZnxInfos, ZnxSliceSize, ZnxView, ZnxViewMut, ZnxZero},
layouts::{Data, DataMut, DataRef, ReaderFrom, VecZnx, WriterTo},
},
};
use std::fmt;
#[derive(PartialEq, Eq)]
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use rand::RngCore;
use sampling::source::Source;
#[derive(PartialEq, Eq, Clone)]
pub struct MatZnx<D: Data> {
data: D,
n: usize,
@@ -16,6 +21,12 @@ pub struct MatZnx<D: Data> {
cols_out: usize,
}
impl<D: DataRef> fmt::Debug for MatZnx<D> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self)
}
}
impl<D: Data> ZnxInfos for MatZnx<D> {
fn cols(&self) -> usize {
self.cols_in
@@ -74,7 +85,7 @@ impl<D: DataRef> MatZnx<D> {
}
impl<D: DataRef + From<Vec<u8>>> MatZnx<D> {
pub(crate) fn new(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> Self {
pub(crate) fn alloc(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> Self {
let data: Vec<u8> = alloc_aligned(Self::bytes_of(n, rows, cols_in, cols_out, size));
Self {
data: data.into(),
@@ -86,7 +97,7 @@ impl<D: DataRef + From<Vec<u8>>> MatZnx<D> {
}
}
pub(crate) fn new_from_bytes(
pub(crate) fn from_bytes(
n: usize,
rows: usize,
cols_in: usize,
@@ -158,6 +169,12 @@ impl<D: DataMut> MatZnx<D> {
}
}
impl<D: DataMut> FillUniform for MatZnx<D> {
fn fill_uniform(&mut self, source: &mut Source) {
source.fill_bytes(self.data.as_mut());
}
}
pub type MatZnxOwned = MatZnx<Vec<u8>>;
pub type MatZnxMut<'a> = MatZnx<&'a mut [u8]>;
pub type MatZnxRef<'a> = MatZnx<&'a [u8]>;
@@ -209,8 +226,6 @@ impl<D: Data> MatZnx<D> {
}
}
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
impl<D: DataMut> ReaderFrom for MatZnx<D> {
fn read_from<R: std::io::Read>(&mut self, reader: &mut R) -> std::io::Result<()> {
self.n = reader.read_u64::<LittleEndian>()? as usize;
@@ -244,3 +259,32 @@ impl<D: DataRef> WriterTo for MatZnx<D> {
Ok(())
}
}
impl<D: DataRef> fmt::Display for MatZnx<D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(
f,
"MatZnx(n={}, rows={}, cols_in={}, cols_out={}, size={})",
self.n, self.rows, self.cols_in, self.cols_out, self.size
)?;
for row_i in 0..self.rows {
writeln!(f, "Row {}:", row_i)?;
for col_i in 0..self.cols_in {
writeln!(f, "cols_in {}:", col_i)?;
writeln!(f, "{}:", self.at(row_i, col_i))?;
}
}
Ok(())
}
}
impl<D: DataMut> ZnxZero for MatZnx<D> {
fn zero(&mut self) {
self.raw_mut().fill(0)
}
fn zero_at(&mut self, i: usize, j: usize) {
self.at_mut(i, j).zero();
}
}

View File

@@ -6,12 +6,12 @@ use sampling::source::Source;
use crate::{
alloc_aligned,
hal::{
api::{DataView, DataViewMut, ZnxInfos, ZnxSliceSize, ZnxView, ZnxViewMut, ZnxZero},
layouts::{Data, DataMut, DataRef, ReaderFrom, VecZnx, VecZnxToMut, VecZnxToRef, WriterTo},
api::{DataView, DataViewMut, FillUniform, ZnxInfos, ZnxSliceSize, ZnxView, ZnxViewMut, ZnxZero},
layouts::{Data, DataMut, DataRef, ReaderFrom, VecZnx, WriterTo},
},
};
#[derive(PartialEq, Eq)]
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct ScalarZnx<D: Data> {
pub(crate) data: D,
pub(crate) n: usize,
@@ -114,7 +114,7 @@ impl<D: DataRef> ScalarZnx<D> {
}
impl<D: DataRef + From<Vec<u8>>> ScalarZnx<D> {
pub fn new(n: usize, cols: usize) -> Self {
pub fn alloc(n: usize, cols: usize) -> Self {
let data: Vec<u8> = alloc_aligned::<u8>(Self::bytes_of(n, cols));
Self {
data: data.into(),
@@ -123,7 +123,7 @@ impl<D: DataRef + From<Vec<u8>>> ScalarZnx<D> {
}
}
pub(crate) fn new_from_bytes(n: usize, cols: usize, bytes: impl Into<Vec<u8>>) -> Self {
pub(crate) fn from_bytes(n: usize, cols: usize, bytes: impl Into<Vec<u8>>) -> Self {
let data: Vec<u8> = bytes.into();
assert!(data.len() == Self::bytes_of(n, cols));
Self {
@@ -143,6 +143,12 @@ impl<D: DataMut> ZnxZero for ScalarZnx<D> {
}
}
impl<D: DataMut> FillUniform for ScalarZnx<D> {
fn fill_uniform(&mut self, source: &mut Source) {
source.fill_bytes(self.data.as_mut());
}
}
pub type ScalarZnxOwned = ScalarZnx<Vec<u8>>;
impl<D: Data> ScalarZnx<D> {
@@ -179,8 +185,8 @@ impl<D: DataMut> ScalarZnxToMut for ScalarZnx<D> {
}
}
impl<D: DataRef> VecZnxToRef for ScalarZnx<D> {
fn to_ref(&self) -> VecZnx<&[u8]> {
impl<D: DataRef> ScalarZnx<D> {
pub fn as_vec_znx(&self) -> VecZnx<&[u8]> {
VecZnx {
data: self.data.as_ref(),
n: self.n,
@@ -191,8 +197,8 @@ impl<D: DataRef> VecZnxToRef for ScalarZnx<D> {
}
}
impl<D: DataMut> VecZnxToMut for ScalarZnx<D> {
fn to_mut(&mut self) -> VecZnx<&mut [u8]> {
impl<D: DataMut> ScalarZnx<D> {
pub fn as_vec_znx_mut(&mut self) -> VecZnx<&mut [u8]> {
VecZnx {
data: self.data.as_mut(),
n: self.n,

View File

@@ -3,12 +3,12 @@ use std::fmt;
use crate::{
alloc_aligned,
hal::{
api::{DataView, DataViewMut, ZnxInfos, ZnxSliceSize, ZnxView, ZnxViewMut, ZnxZero},
api::{DataView, DataViewMut, FillUniform, ZnxInfos, ZnxSliceSize, ZnxView, ZnxViewMut, ZnxZero},
layouts::{Data, DataMut, DataRef, ReaderFrom, WriterTo},
},
};
#[derive(PartialEq, Eq)]
#[derive(PartialEq, Eq, Clone)]
pub struct VecZnx<D: Data> {
pub(crate) data: D,
pub(crate) n: usize,
@@ -86,7 +86,7 @@ impl<D: DataRef> VecZnx<D> {
}
impl<D: DataRef + From<Vec<u8>>> VecZnx<D> {
pub fn new<Scalar: Sized>(n: usize, cols: usize, size: usize) -> Self {
pub fn alloc<Scalar: Sized>(n: usize, cols: usize, size: usize) -> Self {
let data: Vec<u8> = alloc_aligned::<u8>(Self::alloc_bytes::<Scalar>(n, cols, size));
Self {
data: data.into(),
@@ -157,6 +157,12 @@ impl<D: DataRef> fmt::Display for VecZnx<D> {
}
}
impl<D: DataMut> FillUniform for VecZnx<D> {
fn fill_uniform(&mut self, source: &mut Source) {
source.fill_bytes(self.data.as_mut());
}
}
pub type VecZnxOwned = VecZnx<Vec<u8>>;
pub type VecZnxMut<'a> = VecZnx<&'a mut [u8]>;
pub type VecZnxRef<'a> = VecZnx<&'a [u8]>;
@@ -207,6 +213,8 @@ impl<D: DataRef> VecZnx<D> {
}
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use rand::RngCore;
use sampling::source::Source;
impl<D: DataMut> ReaderFrom for VecZnx<D> {
fn read_from<R: std::io::Read>(&mut self, reader: &mut R) -> std::io::Result<()> {

View File

@@ -1,4 +1,4 @@
use crate::hal::layouts::{Backend, Module, ScalarZnxOwned, ScalarZnxToMut, ScalarZnxToRef};
use crate::hal::layouts::{Backend, ScalarZnxOwned};
pub unsafe trait ScalarZnxFromBytesImpl<B: Backend> {
fn scalar_znx_from_bytes_impl(n: usize, cols: usize, bytes: Vec<u8>) -> ScalarZnxOwned;
@@ -11,29 +11,3 @@ pub unsafe trait ScalarZnxAllocBytesImpl<B: Backend> {
pub unsafe trait ScalarZnxAllocImpl<B: Backend> {
fn scalar_znx_alloc_impl(n: usize, cols: usize) -> ScalarZnxOwned;
}
pub unsafe trait ScalarZnxAutomorphismImpl<B: Backend> {
fn scalar_znx_automorphism_impl<R, A>(module: &Module<B>, k: i64, res: &mut R, res_col: usize, a: &A, a_col: usize)
where
R: ScalarZnxToMut,
A: ScalarZnxToRef;
}
pub unsafe trait ScalarZnxAutomorphismInplaceIml<B: Backend> {
fn scalar_znx_automorphism_inplace_impl<A>(module: &Module<B>, k: i64, a: &mut A, a_col: usize)
where
A: ScalarZnxToMut;
}
pub unsafe trait ScalarZnxMulXpMinusOneImpl<B: Backend> {
fn scalar_znx_mul_xp_minus_one_impl<R, A>(module: &Module<B>, p: i64, r: &mut R, r_col: usize, a: &A, a_col: usize)
where
R: ScalarZnxToMut,
A: ScalarZnxToRef;
}
pub unsafe trait ScalarZnxMulXpMinusOneInplaceImpl<B: Backend> {
fn scalar_znx_mul_xp_minus_one_inplace_impl<R>(module: &Module<B>, p: i64, r: &mut R, r_col: usize)
where
R: ScalarZnxToMut;
}

View File

@@ -1 +1,2 @@
pub mod serialization;
pub mod vec_znx;

View File

@@ -0,0 +1,56 @@
use std::fmt::Debug;
use sampling::source::Source;
use crate::hal::{
api::{FillUniform, ZnxZero},
layouts::{ReaderFrom, WriterTo},
};
/// Generic test for serialization and deserialization.
///
/// - `T` must implement I/O traits, zeroing, cloning, and random filling.
pub fn test_reader_writer_interface<T>(mut original: T)
where
T: WriterTo + ReaderFrom + PartialEq + Eq + Debug + Clone + ZnxZero + FillUniform,
{
// Fill original with uniform random data
let mut source = Source::new([0u8; 32]);
original.fill_uniform(&mut source);
// Serialize into a buffer
let mut buffer = Vec::new();
original.write_to(&mut buffer).expect("write_to failed");
// Prepare receiver: same shape, but zeroed
let mut receiver = original.clone();
receiver.zero();
// Deserialize from buffer
let mut reader: &[u8] = &buffer;
receiver.read_from(&mut reader).expect("read_from failed");
// Ensure serialization round-trip correctness
assert_eq!(
&original, &receiver,
"Deserialized object does not match the original"
);
}
#[test]
fn scalar_znx_serialize() {
let original: crate::hal::layouts::ScalarZnx<Vec<u8>> = crate::hal::layouts::ScalarZnx::alloc(1024, 3);
test_reader_writer_interface(original);
}
#[test]
fn vec_znx_serialize() {
let original: crate::hal::layouts::VecZnx<Vec<u8>> = crate::hal::layouts::VecZnx::alloc::<i64>(1024, 3, 4);
test_reader_writer_interface(original);
}
#[test]
fn mat_znx_serialize() {
let original: crate::hal::layouts::MatZnx<Vec<u8>> = crate::hal::layouts::MatZnx::alloc(1024, 3, 2, 2, 4);
test_reader_writer_interface(original);
}

View File

@@ -11,7 +11,7 @@ where
B: CPUAVX,
{
fn mat_znx_alloc_impl(module: &Module<B>, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> MatZnxOwned {
MatZnxOwned::new(module.n(), rows, cols_in, cols_out, size)
MatZnxOwned::alloc(module.n(), rows, cols_in, cols_out, size)
}
}
@@ -36,6 +36,6 @@ where
size: usize,
bytes: Vec<u8>,
) -> MatZnxOwned {
MatZnxOwned::new_from_bytes(module.n(), rows, cols_in, cols_out, size, bytes)
MatZnxOwned::from_bytes(module.n(), rows, cols_in, cols_out, size, bytes)
}
}

View File

@@ -21,6 +21,9 @@ pub use module_fft64::*;
pub use module_ntt120::*;
/// For external documentation
pub use vec_znx::{vec_znx_copy_ref, vec_znx_lsh_inplace_ref, vec_znx_merge_ref, vec_znx_rsh_inplace_ref, vec_znx_split_ref, vec_znx_switch_degree_ref};
pub use vec_znx::{
vec_znx_copy_ref, vec_znx_lsh_inplace_ref, vec_znx_merge_ref, vec_znx_rsh_inplace_ref, vec_znx_split_ref,
vec_znx_switch_degree_ref,
};
pub trait CPUAVX {}

View File

@@ -1,16 +1,9 @@
use crate::{
hal::{
api::{ZnxInfos, ZnxSliceSize, ZnxView, ZnxViewMut},
layouts::{Backend, Module, ScalarZnx, ScalarZnxOwned, ScalarZnxToMut, ScalarZnxToRef},
oep::{
ScalarZnxAllocBytesImpl, ScalarZnxAllocImpl, ScalarZnxAutomorphismImpl, ScalarZnxAutomorphismInplaceIml,
ScalarZnxFromBytesImpl,
},
},
implementation::cpu_spqlios::{
CPUAVX,
ffi::{module::module_info_t, vec_znx},
layouts::{Backend, ScalarZnxOwned},
oep::{ScalarZnxAllocBytesImpl, ScalarZnxAllocImpl, ScalarZnxFromBytesImpl},
},
implementation::cpu_spqlios::CPUAVX,
};
unsafe impl<B: Backend> ScalarZnxAllocBytesImpl<B> for B
@@ -27,7 +20,7 @@ where
B: CPUAVX,
{
fn scalar_znx_alloc_impl(n: usize, cols: usize) -> ScalarZnxOwned {
ScalarZnxOwned::new(n, cols)
ScalarZnxOwned::alloc(n, cols)
}
}
@@ -36,65 +29,6 @@ where
B: CPUAVX,
{
fn scalar_znx_from_bytes_impl(n: usize, cols: usize, bytes: Vec<u8>) -> ScalarZnxOwned {
ScalarZnxOwned::new_from_bytes(n, cols, bytes)
}
}
unsafe impl<B: Backend> ScalarZnxAutomorphismImpl<B> for B
where
B: CPUAVX,
{
fn scalar_znx_automorphism_impl<R, A>(module: &Module<B>, k: i64, res: &mut R, res_col: usize, a: &A, a_col: usize)
where
R: ScalarZnxToMut,
A: ScalarZnxToRef,
{
let a: ScalarZnx<&[u8]> = a.to_ref();
let mut res: ScalarZnx<&mut [u8]> = res.to_mut();
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
assert_eq!(res.n(), module.n());
}
unsafe {
vec_znx::vec_znx_automorphism(
module.ptr() as *const module_info_t,
k,
res.at_mut_ptr(res_col, 0),
res.size() as u64,
res.sl() as u64,
a.at_ptr(a_col, 0),
a.size() as u64,
a.sl() as u64,
)
}
}
}
unsafe impl<B: Backend> ScalarZnxAutomorphismInplaceIml<B> for B
where
B: CPUAVX,
{
fn scalar_znx_automorphism_inplace_impl<A>(module: &Module<B>, k: i64, a: &mut A, a_col: usize)
where
A: ScalarZnxToMut,
{
let mut a: ScalarZnx<&mut [u8]> = a.to_mut();
#[cfg(debug_assertions)]
{
assert_eq!(a.n(), module.n());
}
unsafe {
vec_znx::vec_znx_automorphism(
module.ptr() as *const module_info_t,
k,
a.at_mut_ptr(a_col, 0),
a.size() as u64,
a.sl() as u64,
a.at_ptr(a_col, 0),
a.size() as u64,
a.sl() as u64,
)
}
ScalarZnxOwned::from_bytes(n, cols, bytes)
}
}

View File

@@ -37,7 +37,7 @@ where
B: CPUAVX,
{
fn vec_znx_alloc_impl(n: usize, cols: usize, size: usize) -> VecZnxOwned {
VecZnxOwned::new::<i64>(n, cols, size)
VecZnxOwned::alloc::<i64>(n, cols, size)
}
}