mirror of
https://github.com/arnaucube/poulpy.git
synced 2026-02-10 21:26:41 +01:00
added rgsw encrypt + test
This commit is contained in:
@@ -196,7 +196,7 @@ impl Scratch {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tmp_scalar<B: Backend>(&mut self, module: &Module<B>, cols: usize) -> (ScalarZnx<&mut [u8]>, &mut Self) {
|
||||
pub fn tmp_scalar_znx<B: Backend>(&mut self, module: &Module<B>, cols: usize) -> (ScalarZnx<&mut [u8]>, &mut Self) {
|
||||
let (take_slice, rem_slice) = Self::take_slice_aligned(&mut self.data, bytes_of_scalar_znx(module, cols));
|
||||
|
||||
(
|
||||
@@ -205,7 +205,7 @@ impl Scratch {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn tmp_scalar_dft<B: Backend>(&mut self, module: &Module<B>, cols: usize) -> (ScalarZnxDft<&mut [u8], B>, &mut Self) {
|
||||
pub fn tmp_scalar_znx_dft<B: Backend>(&mut self, module: &Module<B>, cols: usize) -> (ScalarZnxDft<&mut [u8], B>, &mut Self) {
|
||||
let (take_slice, rem_slice) = Self::take_slice_aligned(&mut self.data, bytes_of_scalar_znx_dft(module, cols));
|
||||
|
||||
(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::znx_base::ZnxInfos;
|
||||
use crate::{Backend, DataView, DataViewMut, Module, ZnxSliceSize, ZnxView, ZnxViewMut, alloc_aligned};
|
||||
use crate::{alloc_aligned, Backend, DataView, DataViewMut, Module, VecZnx, VecZnxToMut, VecZnxToRef, ZnxSliceSize, ZnxView, ZnxViewMut};
|
||||
use rand::seq::SliceRandom;
|
||||
use rand_core::RngCore;
|
||||
use rand_distr::{Distribution, weighted::WeightedIndex};
|
||||
@@ -144,6 +144,17 @@ impl ScalarZnxToMut for ScalarZnx<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl VecZnxToMut for ScalarZnx<Vec<u8>>{
|
||||
fn to_mut(&mut self) -> VecZnx<&mut [u8]> {
|
||||
VecZnx {
|
||||
data: self.data.as_mut_slice(),
|
||||
n: self.n,
|
||||
cols: self.cols,
|
||||
size: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarZnxToRef for ScalarZnx<Vec<u8>> {
|
||||
fn to_ref(&self) -> ScalarZnx<&[u8]> {
|
||||
ScalarZnx {
|
||||
@@ -154,6 +165,17 @@ impl ScalarZnxToRef for ScalarZnx<Vec<u8>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl VecZnxToRef for ScalarZnx<Vec<u8>>{
|
||||
fn to_ref(&self) -> VecZnx<&[u8]> {
|
||||
VecZnx {
|
||||
data: self.data.as_slice(),
|
||||
n: self.n,
|
||||
cols: self.cols,
|
||||
size: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarZnxToMut for ScalarZnx<&mut [u8]> {
|
||||
fn to_mut(&mut self) -> ScalarZnx<&mut [u8]> {
|
||||
ScalarZnx {
|
||||
@@ -164,6 +186,17 @@ impl ScalarZnxToMut for ScalarZnx<&mut [u8]> {
|
||||
}
|
||||
}
|
||||
|
||||
impl VecZnxToMut for ScalarZnx<&mut [u8]> {
|
||||
fn to_mut(&mut self) -> VecZnx<&mut [u8]> {
|
||||
VecZnx {
|
||||
data: self.data,
|
||||
n: self.n,
|
||||
cols: self.cols,
|
||||
size: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarZnxToRef for ScalarZnx<&mut [u8]> {
|
||||
fn to_ref(&self) -> ScalarZnx<&[u8]> {
|
||||
ScalarZnx {
|
||||
@@ -174,6 +207,17 @@ impl ScalarZnxToRef for ScalarZnx<&mut [u8]> {
|
||||
}
|
||||
}
|
||||
|
||||
impl VecZnxToRef for ScalarZnx<&mut [u8]> {
|
||||
fn to_ref(&self) -> VecZnx<&[u8]> {
|
||||
VecZnx {
|
||||
data: self.data,
|
||||
n: self.n,
|
||||
cols: self.cols,
|
||||
size: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarZnxToRef for ScalarZnx<&[u8]> {
|
||||
fn to_ref(&self) -> ScalarZnx<&[u8]> {
|
||||
ScalarZnx {
|
||||
@@ -183,3 +227,14 @@ impl ScalarZnxToRef for ScalarZnx<&[u8]> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl VecZnxToRef for ScalarZnx<&[u8]> {
|
||||
fn to_ref(&self) -> VecZnx<&[u8]> {
|
||||
VecZnx {
|
||||
data: self.data,
|
||||
n: self.n,
|
||||
cols: self.cols,
|
||||
size: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,103 +1,103 @@
|
||||
use crate::ffi::svp;
|
||||
use crate::ffi::vec_znx_dft::vec_znx_dft_t;
|
||||
use crate::znx_base::{ZnxInfos, ZnxView, ZnxViewMut};
|
||||
use crate::{
|
||||
Backend, FFT64, Module, ScalarZnxDft, ScalarZnxDftOwned, ScalarZnxDftToMut, ScalarZnxDftToRef, ScalarZnxToRef, VecZnxDft,
|
||||
VecZnxDftToMut, VecZnxDftToRef,
|
||||
};
|
||||
|
||||
pub trait ScalarZnxDftAlloc<B: Backend> {
|
||||
fn new_scalar_znx_dft(&self, cols: usize) -> ScalarZnxDftOwned<B>;
|
||||
fn bytes_of_scalar_znx_dft(&self, cols: usize) -> usize;
|
||||
fn new_scalar_znx_dft_from_bytes(&self, cols: usize, bytes: Vec<u8>) -> ScalarZnxDftOwned<B>;
|
||||
}
|
||||
|
||||
pub trait ScalarZnxDftOps<BACKEND: Backend> {
|
||||
fn svp_prepare<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: ScalarZnxDftToMut<BACKEND>,
|
||||
A: ScalarZnxToRef;
|
||||
fn svp_apply<R, A, B>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<BACKEND>,
|
||||
A: ScalarZnxDftToRef<BACKEND>,
|
||||
B: VecZnxDftToRef<FFT64>;
|
||||
fn svp_apply_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<BACKEND>,
|
||||
A: ScalarZnxDftToRef<BACKEND>;
|
||||
}
|
||||
|
||||
impl<B: Backend> ScalarZnxDftAlloc<B> for Module<B> {
|
||||
fn new_scalar_znx_dft(&self, cols: usize) -> ScalarZnxDftOwned<B> {
|
||||
ScalarZnxDftOwned::new(self, cols)
|
||||
}
|
||||
|
||||
fn bytes_of_scalar_znx_dft(&self, cols: usize) -> usize {
|
||||
ScalarZnxDftOwned::bytes_of(self, cols)
|
||||
}
|
||||
|
||||
fn new_scalar_znx_dft_from_bytes(&self, cols: usize, bytes: Vec<u8>) -> ScalarZnxDftOwned<B> {
|
||||
ScalarZnxDftOwned::new_from_bytes(self, cols, bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarZnxDftOps<FFT64> for Module<FFT64> {
|
||||
fn svp_prepare<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: ScalarZnxDftToMut<FFT64>,
|
||||
A: ScalarZnxToRef,
|
||||
{
|
||||
unsafe {
|
||||
svp::svp_prepare(
|
||||
self.ptr,
|
||||
res.to_mut().at_mut_ptr(res_col, 0) as *mut svp::svp_ppol_t,
|
||||
a.to_ref().at_ptr(a_col, 0),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn svp_apply<R, A, B>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<FFT64>,
|
||||
A: ScalarZnxDftToRef<FFT64>,
|
||||
B: VecZnxDftToRef<FFT64>,
|
||||
{
|
||||
let mut res: VecZnxDft<&mut [u8], FFT64> = res.to_mut();
|
||||
let a: ScalarZnxDft<&[u8], FFT64> = a.to_ref();
|
||||
let b: VecZnxDft<&[u8], FFT64> = b.to_ref();
|
||||
unsafe {
|
||||
svp::svp_apply_dft_to_dft(
|
||||
self.ptr,
|
||||
res.at_mut_ptr(res_col, 0) as *mut vec_znx_dft_t,
|
||||
res.size() as u64,
|
||||
res.cols() as u64,
|
||||
a.at_ptr(a_col, 0) as *const svp::svp_ppol_t,
|
||||
b.at_ptr(b_col, 0) as *const vec_znx_dft_t,
|
||||
b.size() as u64,
|
||||
b.cols() as u64,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn svp_apply_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<FFT64>,
|
||||
A: ScalarZnxDftToRef<FFT64>,
|
||||
{
|
||||
let mut res: VecZnxDft<&mut [u8], FFT64> = res.to_mut();
|
||||
let a: ScalarZnxDft<&[u8], FFT64> = a.to_ref();
|
||||
unsafe {
|
||||
svp::svp_apply_dft_to_dft(
|
||||
self.ptr,
|
||||
res.at_mut_ptr(res_col, 0) as *mut vec_znx_dft_t,
|
||||
res.size() as u64,
|
||||
res.cols() as u64,
|
||||
a.at_ptr(a_col, 0) as *const svp::svp_ppol_t,
|
||||
res.at_ptr(res_col, 0) as *const vec_znx_dft_t,
|
||||
res.size() as u64,
|
||||
res.cols() as u64,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
use crate::ffi::svp;
|
||||
use crate::ffi::vec_znx_dft::vec_znx_dft_t;
|
||||
use crate::znx_base::{ZnxInfos, ZnxView, ZnxViewMut};
|
||||
use crate::{
|
||||
Backend, FFT64, Module, ScalarZnxDft, ScalarZnxDftOwned, ScalarZnxDftToMut, ScalarZnxDftToRef, ScalarZnxToRef, VecZnxDft,
|
||||
VecZnxDftToMut, VecZnxDftToRef,
|
||||
};
|
||||
|
||||
pub trait ScalarZnxDftAlloc<B: Backend> {
|
||||
fn new_scalar_znx_dft(&self, cols: usize) -> ScalarZnxDftOwned<B>;
|
||||
fn bytes_of_scalar_znx_dft(&self, cols: usize) -> usize;
|
||||
fn new_scalar_znx_dft_from_bytes(&self, cols: usize, bytes: Vec<u8>) -> ScalarZnxDftOwned<B>;
|
||||
}
|
||||
|
||||
pub trait ScalarZnxDftOps<BACKEND: Backend> {
|
||||
fn svp_prepare<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: ScalarZnxDftToMut<BACKEND>,
|
||||
A: ScalarZnxToRef;
|
||||
fn svp_apply<R, A, B>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<BACKEND>,
|
||||
A: ScalarZnxDftToRef<BACKEND>,
|
||||
B: VecZnxDftToRef<FFT64>;
|
||||
fn svp_apply_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<BACKEND>,
|
||||
A: ScalarZnxDftToRef<BACKEND>;
|
||||
}
|
||||
|
||||
impl<B: Backend> ScalarZnxDftAlloc<B> for Module<B> {
|
||||
fn new_scalar_znx_dft(&self, cols: usize) -> ScalarZnxDftOwned<B> {
|
||||
ScalarZnxDftOwned::new(self, cols)
|
||||
}
|
||||
|
||||
fn bytes_of_scalar_znx_dft(&self, cols: usize) -> usize {
|
||||
ScalarZnxDftOwned::bytes_of(self, cols)
|
||||
}
|
||||
|
||||
fn new_scalar_znx_dft_from_bytes(&self, cols: usize, bytes: Vec<u8>) -> ScalarZnxDftOwned<B> {
|
||||
ScalarZnxDftOwned::new_from_bytes(self, cols, bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarZnxDftOps<FFT64> for Module<FFT64> {
|
||||
fn svp_prepare<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: ScalarZnxDftToMut<FFT64>,
|
||||
A: ScalarZnxToRef,
|
||||
{
|
||||
unsafe {
|
||||
svp::svp_prepare(
|
||||
self.ptr,
|
||||
res.to_mut().at_mut_ptr(res_col, 0) as *mut svp::svp_ppol_t,
|
||||
a.to_ref().at_ptr(a_col, 0),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn svp_apply<R, A, B>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<FFT64>,
|
||||
A: ScalarZnxDftToRef<FFT64>,
|
||||
B: VecZnxDftToRef<FFT64>,
|
||||
{
|
||||
let mut res: VecZnxDft<&mut [u8], FFT64> = res.to_mut();
|
||||
let a: ScalarZnxDft<&[u8], FFT64> = a.to_ref();
|
||||
let b: VecZnxDft<&[u8], FFT64> = b.to_ref();
|
||||
unsafe {
|
||||
svp::svp_apply_dft_to_dft(
|
||||
self.ptr,
|
||||
res.at_mut_ptr(res_col, 0) as *mut vec_znx_dft_t,
|
||||
res.size() as u64,
|
||||
res.cols() as u64,
|
||||
a.at_ptr(a_col, 0) as *const svp::svp_ppol_t,
|
||||
b.at_ptr(b_col, 0) as *const vec_znx_dft_t,
|
||||
b.size() as u64,
|
||||
b.cols() as u64,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn svp_apply_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
|
||||
where
|
||||
R: VecZnxDftToMut<FFT64>,
|
||||
A: ScalarZnxDftToRef<FFT64>,
|
||||
{
|
||||
let mut res: VecZnxDft<&mut [u8], FFT64> = res.to_mut();
|
||||
let a: ScalarZnxDft<&[u8], FFT64> = a.to_ref();
|
||||
unsafe {
|
||||
svp::svp_apply_dft_to_dft(
|
||||
self.ptr,
|
||||
res.at_mut_ptr(res_col, 0) as *mut vec_znx_dft_t,
|
||||
res.size() as u64,
|
||||
res.cols() as u64,
|
||||
a.at_ptr(a_col, 0) as *const svp::svp_ppol_t,
|
||||
res.at_ptr(res_col, 0) as *const vec_znx_dft_t,
|
||||
res.size() as u64,
|
||||
res.cols() as u64,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,9 +20,9 @@ use std::{cmp::min, fmt};
|
||||
/// are small polynomials of Zn\[X\].
|
||||
pub struct VecZnx<D> {
|
||||
pub data: D,
|
||||
n: usize,
|
||||
cols: usize,
|
||||
size: usize,
|
||||
pub n: usize,
|
||||
pub cols: usize,
|
||||
pub size: usize,
|
||||
}
|
||||
|
||||
impl<D> ZnxInfos for VecZnx<D> {
|
||||
|
||||
@@ -114,6 +114,9 @@ pub trait VecZnxBigOps<BACKEND: Backend> {
|
||||
R: VecZnxBigToMut<BACKEND>,
|
||||
A: VecZnxToRef;
|
||||
|
||||
/// Negates `a` inplace.
|
||||
fn vec_znx_big_negate_inplace<A>(&self, a: &mut A, a_col: usize) where A: VecZnxBigToMut<BACKEND>;
|
||||
|
||||
/// Normalizes `a` and stores the result on `b`.
|
||||
///
|
||||
/// # Arguments
|
||||
@@ -503,6 +506,25 @@ impl VecZnxBigOps<FFT64> for Module<FFT64> {
|
||||
}
|
||||
}
|
||||
|
||||
fn vec_znx_big_negate_inplace<A>(&self, a: &mut A, res_col: usize) where A: VecZnxBigToMut<FFT64> {
|
||||
let mut a: VecZnxBig<&mut [u8], FFT64> = a.to_mut();
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
assert_eq!(a.n(), self.n());
|
||||
}
|
||||
unsafe {
|
||||
vec_znx::vec_znx_negate(
|
||||
self.ptr,
|
||||
a.at_mut_ptr(res_col, 0),
|
||||
a.size() as u64,
|
||||
a.sl() as u64,
|
||||
a.at_ptr(res_col, 0),
|
||||
a.size() as u64,
|
||||
a.sl() as u64,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn vec_znx_big_normalize<R, A>(
|
||||
&self,
|
||||
log_base2k: usize,
|
||||
|
||||
Reference in New Issue
Block a user