mirror of
https://github.com/arnaucube/poulpy.git
synced 2026-02-10 05:06:44 +01:00
wip
This commit is contained in:
committed by
Jean-Philippe Bossuat
parent
f72363cc4b
commit
2b2b994f7d
@@ -4,3 +4,7 @@ use crate::layouts::Backend;
|
||||
pub trait ModuleNew<B: Backend> {
|
||||
fn new(n: u64) -> Self;
|
||||
}
|
||||
|
||||
pub trait ModuleN {
|
||||
fn n(&self) -> usize;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
use crate::layouts::{Backend, MatZnx, ScalarZnx, Scratch, SvpPPol, VecZnx, VecZnxBig, VecZnxDft, VmpPMat};
|
||||
use crate::{
|
||||
api::{ModuleN, SvpPPolBytesOf, VecZnxBigBytesOf, VecZnxDftBytesOf, VmpPMatBytesOf},
|
||||
layouts::{Backend, MatZnx, ScalarZnx, Scratch, SvpPPol, VecZnx, VecZnxBig, VecZnxDft, VmpPMat},
|
||||
};
|
||||
|
||||
/// Allocates a new [crate::layouts::ScratchOwned] of `size` aligned bytes.
|
||||
pub trait ScratchOwnedAlloc<B: Backend> {
|
||||
@@ -25,76 +28,130 @@ pub trait TakeSlice {
|
||||
fn take_slice<T>(&mut self, len: usize) -> (&mut [T], &mut Self);
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], wraps it into a [ScalarZnx] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeScalarZnx {
|
||||
fn take_scalar_znx(&mut self, n: usize, cols: usize) -> (ScalarZnx<&mut [u8]>, &mut Self);
|
||||
}
|
||||
pub trait ScratchTakeBasic
|
||||
where
|
||||
Self: TakeSlice,
|
||||
{
|
||||
fn take_scalar_znx<M>(&mut self, module: &M, cols: usize) -> (ScalarZnx<&mut [u8]>, &mut Self)
|
||||
where
|
||||
M: ModuleN,
|
||||
{
|
||||
let (take_slice, rem_slice) = self.take_slice(ScalarZnx::bytes_of(module.n(), cols));
|
||||
(
|
||||
ScalarZnx::from_data(take_slice, module.n(), cols),
|
||||
rem_slice,
|
||||
)
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], wraps it into a [SvpPPol] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeSvpPPol<B: Backend> {
|
||||
fn take_svp_ppol(&mut self, n: usize, cols: usize) -> (SvpPPol<&mut [u8], B>, &mut Self);
|
||||
}
|
||||
fn take_svp_ppol<M, B: Backend>(&mut self, module: &M, cols: usize) -> (SvpPPol<&mut [u8], B>, &mut Self)
|
||||
where
|
||||
M: SvpPPolBytesOf + ModuleN,
|
||||
{
|
||||
let (take_slice, rem_slice) = self.take_slice(module.bytes_of_svp_ppol(cols));
|
||||
(SvpPPol::from_data(take_slice, module.n(), cols), rem_slice)
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], wraps it into a [VecZnx] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeVecZnx {
|
||||
fn take_vec_znx(&mut self, n: usize, cols: usize, size: usize) -> (VecZnx<&mut [u8]>, &mut Self);
|
||||
}
|
||||
fn take_vec_znx<M>(&mut self, module: &M, cols: usize, size: usize) -> (VecZnx<&mut [u8]>, &mut Self)
|
||||
where
|
||||
M: ModuleN,
|
||||
{
|
||||
let (take_slice, rem_slice) = self.take_slice(VecZnx::bytes_of(module.n(), cols, size));
|
||||
(
|
||||
VecZnx::from_data(take_slice, module.n(), cols, size),
|
||||
rem_slice,
|
||||
)
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], slices it into a vector of [VecZnx] aand returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeVecZnxSlice {
|
||||
fn take_vec_znx_slice(&mut self, len: usize, n: usize, cols: usize, size: usize) -> (Vec<VecZnx<&mut [u8]>>, &mut Self);
|
||||
}
|
||||
fn take_vec_znx_big<M, B: Backend>(&mut self, module: &M, cols: usize, size: usize) -> (VecZnxBig<&mut [u8], B>, &mut Self)
|
||||
where
|
||||
M: VecZnxBigBytesOf + ModuleN,
|
||||
{
|
||||
let (take_slice, rem_slice) = self.take_slice(module.bytes_of_vec_znx_big(cols, size));
|
||||
(
|
||||
VecZnxBig::from_data(take_slice, module.n(), cols, size),
|
||||
rem_slice,
|
||||
)
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], wraps it into a [VecZnxBig] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeVecZnxBig<B: Backend> {
|
||||
fn take_vec_znx_big(&mut self, n: usize, cols: usize, size: usize) -> (VecZnxBig<&mut [u8], B>, &mut Self);
|
||||
}
|
||||
fn take_vec_znx_dft<M, B: Backend>(&mut self, module: &M, cols: usize, size: usize) -> (VecZnxDft<&mut [u8], B>, &mut Self)
|
||||
where
|
||||
M: VecZnxDftBytesOf + ModuleN,
|
||||
{
|
||||
let (take_slice, rem_slice) = self.take_slice(module.bytes_of_vec_znx_dft(cols, size));
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], wraps it into a [VecZnxDft] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeVecZnxDft<B: Backend> {
|
||||
fn take_vec_znx_dft(&mut self, n: usize, cols: usize, size: usize) -> (VecZnxDft<&mut [u8], B>, &mut Self);
|
||||
}
|
||||
(
|
||||
VecZnxDft::from_data(take_slice, module.n(), cols, size),
|
||||
rem_slice,
|
||||
)
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], slices it into a vector of [VecZnxDft] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeVecZnxDftSlice<B: Backend> {
|
||||
fn take_vec_znx_dft_slice(
|
||||
fn take_vec_znx_dft_slice<M, B: Backend>(
|
||||
&mut self,
|
||||
module: &M,
|
||||
len: usize,
|
||||
n: usize,
|
||||
cols: usize,
|
||||
size: usize,
|
||||
) -> (Vec<VecZnxDft<&mut [u8], B>>, &mut Self);
|
||||
}
|
||||
) -> (Vec<VecZnxDft<&mut [u8], B>>, &mut Self)
|
||||
where
|
||||
M: VecZnxDftBytesOf + ModuleN,
|
||||
{
|
||||
let mut scratch: &mut Self = self;
|
||||
let mut slice: Vec<VecZnxDft<&mut [u8], B>> = Vec::with_capacity(len);
|
||||
for _ in 0..len {
|
||||
let (znx, new_scratch) = scratch.take_vec_znx_dft(module, cols, size);
|
||||
scratch = new_scratch;
|
||||
slice.push(znx);
|
||||
}
|
||||
(slice, scratch)
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], wraps it into a [VmpPMat] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeVmpPMat<B: Backend> {
|
||||
fn take_vmp_pmat(
|
||||
fn take_vec_znx_slice<M>(&mut self, module: &M, len: usize, cols: usize, size: usize) -> (Vec<VecZnx<&mut [u8]>>, &mut Self)
|
||||
where
|
||||
M: ModuleN,
|
||||
{
|
||||
let mut scratch: &mut Self = self;
|
||||
let mut slice: Vec<VecZnx<&mut [u8]>> = Vec::with_capacity(len);
|
||||
for _ in 0..len {
|
||||
let (znx, new_scratch) = scratch.take_vec_znx(module, cols, size);
|
||||
scratch = new_scratch;
|
||||
slice.push(znx);
|
||||
}
|
||||
(slice, scratch)
|
||||
}
|
||||
|
||||
fn take_vmp_pmat<M, B: Backend>(
|
||||
&mut self,
|
||||
n: usize,
|
||||
module: &M,
|
||||
rows: usize,
|
||||
cols_in: usize,
|
||||
cols_out: usize,
|
||||
size: usize,
|
||||
) -> (VmpPMat<&mut [u8], B>, &mut Self);
|
||||
}
|
||||
) -> (VmpPMat<&mut [u8], B>, &mut Self)
|
||||
where
|
||||
M: VmpPMatBytesOf + ModuleN,
|
||||
{
|
||||
let (take_slice, rem_slice) = self.take_slice(module.bytes_of_vmp_pmat(rows, cols_in, cols_out, size));
|
||||
(
|
||||
VmpPMat::from_data(take_slice, module.n(), rows, cols_in, cols_out, size),
|
||||
rem_slice,
|
||||
)
|
||||
}
|
||||
|
||||
/// Take a slice of bytes from a [Scratch], wraps it into a [MatZnx] and returns it
|
||||
/// as well as a new [Scratch] minus the taken array of bytes.
|
||||
pub trait TakeMatZnx {
|
||||
fn take_mat_znx(
|
||||
fn take_mat_znx<M>(
|
||||
&mut self,
|
||||
n: usize,
|
||||
module: &M,
|
||||
rows: usize,
|
||||
cols_in: usize,
|
||||
cols_out: usize,
|
||||
size: usize,
|
||||
) -> (MatZnx<&mut [u8]>, &mut Self);
|
||||
) -> (MatZnx<&mut [u8]>, &mut Self)
|
||||
where
|
||||
M: ModuleN,
|
||||
{
|
||||
let (take_slice, rem_slice) = self.take_slice(MatZnx::bytes_of(module.n(), rows, cols_in, cols_out, size));
|
||||
(
|
||||
MatZnx::from_data(take_slice, module.n(), rows, cols_in, cols_out, size),
|
||||
rem_slice,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,8 +8,8 @@ pub trait SvpPPolAlloc<B: Backend> {
|
||||
}
|
||||
|
||||
/// Returns the size in bytes to allocate a [crate::layouts::SvpPPol].
|
||||
pub trait SvpPPolAllocBytes {
|
||||
fn svp_ppol_alloc_bytes(&self, cols: usize) -> usize;
|
||||
pub trait SvpPPolBytesOf {
|
||||
fn bytes_of_svp_ppol(&self, cols: usize) -> usize;
|
||||
}
|
||||
|
||||
/// Consume a vector of bytes into a [crate::layouts::MatZnx].
|
||||
|
||||
@@ -16,8 +16,8 @@ pub trait VecZnxBigAlloc<B: Backend> {
|
||||
}
|
||||
|
||||
/// Returns the size in bytes to allocate a [crate::layouts::VecZnxBig].
|
||||
pub trait VecZnxBigAllocBytes {
|
||||
fn vec_znx_big_alloc_bytes(&self, cols: usize, size: usize) -> usize;
|
||||
pub trait VecZnxBigBytesOf {
|
||||
fn bytes_of_vec_znx_big(&self, cols: usize, size: usize) -> usize;
|
||||
}
|
||||
|
||||
/// Consume a vector of bytes into a [crate::layouts::VecZnxBig].
|
||||
|
||||
@@ -10,8 +10,8 @@ pub trait VecZnxDftFromBytes<B: Backend> {
|
||||
fn vec_znx_dft_from_bytes(&self, cols: usize, size: usize, bytes: Vec<u8>) -> VecZnxDftOwned<B>;
|
||||
}
|
||||
|
||||
pub trait VecZnxDftAllocBytes {
|
||||
fn vec_znx_dft_alloc_bytes(&self, cols: usize, size: usize) -> usize;
|
||||
pub trait VecZnxDftBytesOf {
|
||||
fn bytes_of_vec_znx_dft(&self, cols: usize, size: usize) -> usize;
|
||||
}
|
||||
|
||||
pub trait VecZnxDftApply<B: Backend> {
|
||||
|
||||
@@ -6,8 +6,8 @@ pub trait VmpPMatAlloc<B: Backend> {
|
||||
fn vmp_pmat_alloc(&self, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> VmpPMatOwned<B>;
|
||||
}
|
||||
|
||||
pub trait VmpPMatAllocBytes {
|
||||
fn vmp_pmat_alloc_bytes(&self, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize;
|
||||
pub trait VmpPMatBytesOf {
|
||||
fn bytes_of_vmp_pmat(&self, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize;
|
||||
}
|
||||
|
||||
pub trait VmpPMatFromBytes<B: Backend> {
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
use crate::{
|
||||
api::{
|
||||
ScratchAvailable, ScratchFromBytes, ScratchOwnedAlloc, ScratchOwnedBorrow, TakeMatZnx, TakeScalarZnx, TakeSlice,
|
||||
TakeSvpPPol, TakeVecZnx, TakeVecZnxBig, TakeVecZnxDft, TakeVecZnxDftSlice, TakeVecZnxSlice, TakeVmpPMat,
|
||||
},
|
||||
layouts::{Backend, MatZnx, ScalarZnx, Scratch, ScratchOwned, SvpPPol, VecZnx, VecZnxBig, VecZnxDft, VmpPMat},
|
||||
oep::{
|
||||
ScratchAvailableImpl, ScratchFromBytesImpl, ScratchOwnedAllocImpl, ScratchOwnedBorrowImpl, TakeMatZnxImpl,
|
||||
TakeScalarZnxImpl, TakeSliceImpl, TakeSvpPPolImpl, TakeVecZnxBigImpl, TakeVecZnxDftImpl, TakeVecZnxDftSliceImpl,
|
||||
TakeVecZnxImpl, TakeVecZnxSliceImpl, TakeVmpPMatImpl,
|
||||
},
|
||||
api::{ScratchAvailable, ScratchFromBytes, ScratchOwnedAlloc, ScratchOwnedBorrow, TakeSlice},
|
||||
layouts::{Backend, Scratch, ScratchOwned},
|
||||
oep::{ScratchAvailableImpl, ScratchFromBytesImpl, ScratchOwnedAllocImpl, ScratchOwnedBorrowImpl, TakeSliceImpl},
|
||||
};
|
||||
|
||||
impl<B> ScratchOwnedAlloc<B> for ScratchOwned<B>
|
||||
@@ -55,104 +48,3 @@ where
|
||||
B::take_slice_impl(self, len)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeScalarZnx for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeScalarZnxImpl<B>,
|
||||
{
|
||||
fn take_scalar_znx(&mut self, n: usize, cols: usize) -> (ScalarZnx<&mut [u8]>, &mut Self) {
|
||||
B::take_scalar_znx_impl(self, n, cols)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeSvpPPol<B> for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeSvpPPolImpl<B>,
|
||||
{
|
||||
fn take_svp_ppol(&mut self, n: usize, cols: usize) -> (SvpPPol<&mut [u8], B>, &mut Self) {
|
||||
B::take_svp_ppol_impl(self, n, cols)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeVecZnx for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeVecZnxImpl<B>,
|
||||
{
|
||||
fn take_vec_znx(&mut self, n: usize, cols: usize, size: usize) -> (VecZnx<&mut [u8]>, &mut Self) {
|
||||
B::take_vec_znx_impl(self, n, cols, size)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeVecZnxSlice for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeVecZnxSliceImpl<B>,
|
||||
{
|
||||
fn take_vec_znx_slice(&mut self, len: usize, n: usize, cols: usize, size: usize) -> (Vec<VecZnx<&mut [u8]>>, &mut Self) {
|
||||
B::take_vec_znx_slice_impl(self, len, n, cols, size)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeVecZnxBig<B> for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeVecZnxBigImpl<B>,
|
||||
{
|
||||
fn take_vec_znx_big(&mut self, n: usize, cols: usize, size: usize) -> (VecZnxBig<&mut [u8], B>, &mut Self) {
|
||||
B::take_vec_znx_big_impl(self, n, cols, size)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeVecZnxDft<B> for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeVecZnxDftImpl<B>,
|
||||
{
|
||||
fn take_vec_znx_dft(&mut self, n: usize, cols: usize, size: usize) -> (VecZnxDft<&mut [u8], B>, &mut Self) {
|
||||
B::take_vec_znx_dft_impl(self, n, cols, size)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeVecZnxDftSlice<B> for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeVecZnxDftSliceImpl<B>,
|
||||
{
|
||||
fn take_vec_znx_dft_slice(
|
||||
&mut self,
|
||||
len: usize,
|
||||
n: usize,
|
||||
cols: usize,
|
||||
size: usize,
|
||||
) -> (Vec<VecZnxDft<&mut [u8], B>>, &mut Self) {
|
||||
B::take_vec_znx_dft_slice_impl(self, len, n, cols, size)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeVmpPMat<B> for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeVmpPMatImpl<B>,
|
||||
{
|
||||
fn take_vmp_pmat(
|
||||
&mut self,
|
||||
n: usize,
|
||||
rows: usize,
|
||||
cols_in: usize,
|
||||
cols_out: usize,
|
||||
size: usize,
|
||||
) -> (VmpPMat<&mut [u8], B>, &mut Self) {
|
||||
B::take_vmp_pmat_impl(self, n, rows, cols_in, cols_out, size)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> TakeMatZnx for Scratch<B>
|
||||
where
|
||||
B: Backend + TakeMatZnxImpl<B>,
|
||||
{
|
||||
fn take_mat_znx(
|
||||
&mut self,
|
||||
n: usize,
|
||||
rows: usize,
|
||||
cols_in: usize,
|
||||
cols_out: usize,
|
||||
size: usize,
|
||||
) -> (MatZnx<&mut [u8]>, &mut Self) {
|
||||
B::take_mat_znx_impl(self, n, rows, cols_in, cols_out, size)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
api::{
|
||||
SvpApplyDft, SvpApplyDftToDft, SvpApplyDftToDftAdd, SvpApplyDftToDftInplace, SvpPPolAlloc, SvpPPolAllocBytes,
|
||||
SvpApplyDft, SvpApplyDftToDft, SvpApplyDftToDftAdd, SvpApplyDftToDftInplace, SvpPPolAlloc, SvpPPolBytesOf,
|
||||
SvpPPolFromBytes, SvpPrepare,
|
||||
},
|
||||
layouts::{
|
||||
@@ -30,12 +30,12 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> SvpPPolAllocBytes for Module<B>
|
||||
impl<B> SvpPPolBytesOf for Module<B>
|
||||
where
|
||||
B: Backend + SvpPPolAllocBytesImpl<B>,
|
||||
{
|
||||
fn svp_ppol_alloc_bytes(&self, cols: usize) -> usize {
|
||||
B::svp_ppol_alloc_bytes_impl(self.n(), cols)
|
||||
fn bytes_of_svp_ppol(&self, cols: usize) -> usize {
|
||||
B::svp_ppol_bytes_of_impl(self.n(), cols)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
api::{
|
||||
VecZnxBigAdd, VecZnxBigAddInplace, VecZnxBigAddNormal, VecZnxBigAddSmall, VecZnxBigAddSmallInplace, VecZnxBigAlloc,
|
||||
VecZnxBigAllocBytes, VecZnxBigAutomorphism, VecZnxBigAutomorphismInplace, VecZnxBigAutomorphismInplaceTmpBytes,
|
||||
VecZnxBigAutomorphism, VecZnxBigAutomorphismInplace, VecZnxBigAutomorphismInplaceTmpBytes, VecZnxBigBytesOf,
|
||||
VecZnxBigFromBytes, VecZnxBigFromSmall, VecZnxBigNegate, VecZnxBigNegateInplace, VecZnxBigNormalize,
|
||||
VecZnxBigNormalizeTmpBytes, VecZnxBigSub, VecZnxBigSubInplace, VecZnxBigSubNegateInplace, VecZnxBigSubSmallA,
|
||||
VecZnxBigSubSmallB, VecZnxBigSubSmallInplace, VecZnxBigSubSmallNegateInplace,
|
||||
@@ -49,12 +49,12 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> VecZnxBigAllocBytes for Module<B>
|
||||
impl<B> VecZnxBigBytesOf for Module<B>
|
||||
where
|
||||
B: Backend + VecZnxBigAllocBytesImpl<B>,
|
||||
{
|
||||
fn vec_znx_big_alloc_bytes(&self, cols: usize, size: usize) -> usize {
|
||||
B::vec_znx_big_alloc_bytes_impl(self.n(), cols, size)
|
||||
fn bytes_of_vec_znx_big(&self, cols: usize, size: usize) -> usize {
|
||||
B::vec_znx_big_bytes_of_impl(self.n(), cols, size)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::{
|
||||
api::{
|
||||
VecZnxDftAdd, VecZnxDftAddInplace, VecZnxDftAlloc, VecZnxDftAllocBytes, VecZnxDftApply, VecZnxDftCopy,
|
||||
VecZnxDftFromBytes, VecZnxDftSub, VecZnxDftSubInplace, VecZnxDftSubNegateInplace, VecZnxDftZero, VecZnxIdftApply,
|
||||
VecZnxIdftApplyConsume, VecZnxIdftApplyTmpA, VecZnxIdftApplyTmpBytes,
|
||||
VecZnxDftAdd, VecZnxDftAddInplace, VecZnxDftAlloc, VecZnxDftApply, VecZnxDftBytesOf, VecZnxDftCopy, VecZnxDftFromBytes,
|
||||
VecZnxDftSub, VecZnxDftSubInplace, VecZnxDftSubNegateInplace, VecZnxDftZero, VecZnxIdftApply, VecZnxIdftApplyConsume,
|
||||
VecZnxIdftApplyTmpA, VecZnxIdftApplyTmpBytes,
|
||||
},
|
||||
layouts::{
|
||||
Backend, Data, Module, Scratch, VecZnxBig, VecZnxBigToMut, VecZnxDft, VecZnxDftOwned, VecZnxDftToMut, VecZnxDftToRef,
|
||||
@@ -24,12 +24,12 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> VecZnxDftAllocBytes for Module<B>
|
||||
impl<B> VecZnxDftBytesOf for Module<B>
|
||||
where
|
||||
B: Backend + VecZnxDftAllocBytesImpl<B>,
|
||||
{
|
||||
fn vec_znx_dft_alloc_bytes(&self, cols: usize, size: usize) -> usize {
|
||||
B::vec_znx_dft_alloc_bytes_impl(self.n(), cols, size)
|
||||
fn bytes_of_vec_znx_dft(&self, cols: usize, size: usize) -> usize {
|
||||
B::vec_znx_dft_bytes_of_impl(self.n(), cols, size)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
api::{
|
||||
VmpApplyDft, VmpApplyDftTmpBytes, VmpApplyDftToDft, VmpApplyDftToDftAdd, VmpApplyDftToDftAddTmpBytes,
|
||||
VmpApplyDftToDftTmpBytes, VmpPMatAlloc, VmpPMatAllocBytes, VmpPMatFromBytes, VmpPrepare, VmpPrepareTmpBytes,
|
||||
VmpApplyDftToDftTmpBytes, VmpPMatAlloc, VmpPMatBytesOf, VmpPMatFromBytes, VmpPrepare, VmpPrepareTmpBytes,
|
||||
},
|
||||
layouts::{
|
||||
Backend, MatZnxToRef, Module, Scratch, VecZnxDftToMut, VecZnxDftToRef, VecZnxToRef, VmpPMatOwned, VmpPMatToMut,
|
||||
@@ -23,12 +23,12 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> VmpPMatAllocBytes for Module<B>
|
||||
impl<B> VmpPMatBytesOf for Module<B>
|
||||
where
|
||||
B: Backend + VmpPMatAllocBytesImpl<B>,
|
||||
{
|
||||
fn vmp_pmat_alloc_bytes(&self, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize {
|
||||
B::vmp_pmat_alloc_bytes_impl(self.n(), rows, cols_in, cols_out, size)
|
||||
fn bytes_of_vmp_pmat(&self, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize {
|
||||
B::vmp_pmat_bytes_of_impl(self.n(), rows, cols_in, cols_out, size)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -114,12 +114,12 @@ impl<D: Data> MatZnx<D> {
|
||||
}
|
||||
|
||||
impl MatZnx<Vec<u8>> {
|
||||
pub fn alloc_bytes(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize {
|
||||
rows * cols_in * VecZnx::<Vec<u8>>::alloc_bytes(n, cols_out, size)
|
||||
pub fn bytes_of(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize {
|
||||
rows * cols_in * VecZnx::<Vec<u8>>::bytes_of(n, cols_out, size)
|
||||
}
|
||||
|
||||
pub fn alloc(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> Self {
|
||||
let data: Vec<u8> = alloc_aligned(Self::alloc_bytes(n, rows, cols_in, cols_out, size));
|
||||
let data: Vec<u8> = alloc_aligned(Self::bytes_of(n, rows, cols_in, cols_out, size));
|
||||
Self {
|
||||
data,
|
||||
n,
|
||||
@@ -132,7 +132,7 @@ impl MatZnx<Vec<u8>> {
|
||||
|
||||
pub fn from_bytes(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == Self::alloc_bytes(n, rows, cols_in, cols_out, size));
|
||||
assert!(data.len() == Self::bytes_of(n, rows, cols_in, cols_out, size));
|
||||
Self {
|
||||
data,
|
||||
n,
|
||||
@@ -153,7 +153,7 @@ impl<D: DataRef> MatZnx<D> {
|
||||
}
|
||||
|
||||
let self_ref: MatZnx<&[u8]> = self.to_ref();
|
||||
let nb_bytes: usize = VecZnx::<Vec<u8>>::alloc_bytes(self.n, self.cols_out, self.size);
|
||||
let nb_bytes: usize = VecZnx::<Vec<u8>>::bytes_of(self.n, self.cols_out, self.size);
|
||||
let start: usize = nb_bytes * self.cols() * row + col * nb_bytes;
|
||||
let end: usize = start + nb_bytes;
|
||||
|
||||
@@ -181,7 +181,7 @@ impl<D: DataMut> MatZnx<D> {
|
||||
let size: usize = self.size();
|
||||
|
||||
let self_ref: MatZnx<&mut [u8]> = self.to_mut();
|
||||
let nb_bytes: usize = VecZnx::<Vec<u8>>::alloc_bytes(n, cols_out, size);
|
||||
let nb_bytes: usize = VecZnx::<Vec<u8>>::bytes_of(n, cols_out, size);
|
||||
let start: usize = nb_bytes * cols_in * row + col * nb_bytes;
|
||||
let end: usize = start + nb_bytes;
|
||||
|
||||
|
||||
@@ -132,18 +132,18 @@ impl<D: DataMut> ScalarZnx<D> {
|
||||
}
|
||||
|
||||
impl ScalarZnx<Vec<u8>> {
|
||||
pub fn alloc_bytes(n: usize, cols: usize) -> usize {
|
||||
pub fn bytes_of(n: usize, cols: usize) -> usize {
|
||||
n * cols * size_of::<i64>()
|
||||
}
|
||||
|
||||
pub fn alloc(n: usize, cols: usize) -> Self {
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(Self::alloc_bytes(n, cols));
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(Self::bytes_of(n, cols));
|
||||
Self { data, n, cols }
|
||||
}
|
||||
|
||||
pub fn from_bytes(n: usize, cols: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == Self::alloc_bytes(n, cols));
|
||||
assert!(data.len() == Self::bytes_of(n, cols));
|
||||
Self { data, n, cols }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ where
|
||||
B: SvpPPolAllocBytesImpl<B>,
|
||||
{
|
||||
pub fn alloc(n: usize, cols: usize) -> Self {
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(B::svp_ppol_alloc_bytes_impl(n, cols));
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(B::svp_ppol_bytes_of_impl(n, cols));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
@@ -88,7 +88,7 @@ where
|
||||
|
||||
pub fn from_bytes(n: usize, cols: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == B::svp_ppol_alloc_bytes_impl(n, cols));
|
||||
assert!(data.len() == B::svp_ppol_bytes_of_impl(n, cols));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
|
||||
@@ -110,7 +110,7 @@ impl<D: DataRef> ZnxView for VecZnx<D> {
|
||||
}
|
||||
|
||||
impl VecZnx<Vec<u8>> {
|
||||
pub fn rsh_scratch_space(n: usize) -> usize {
|
||||
pub fn rsh_tmp_bytes(n: usize) -> usize {
|
||||
n * std::mem::size_of::<i64>()
|
||||
}
|
||||
}
|
||||
@@ -125,12 +125,12 @@ impl<D: DataMut> ZnxZero for VecZnx<D> {
|
||||
}
|
||||
|
||||
impl VecZnx<Vec<u8>> {
|
||||
pub fn alloc_bytes(n: usize, cols: usize, size: usize) -> usize {
|
||||
pub fn bytes_of(n: usize, cols: usize, size: usize) -> usize {
|
||||
n * cols * size * size_of::<i64>()
|
||||
}
|
||||
|
||||
pub fn alloc(n: usize, cols: usize, size: usize) -> Self {
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(Self::alloc_bytes(n, cols, size));
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(Self::bytes_of(n, cols, size));
|
||||
Self {
|
||||
data,
|
||||
n,
|
||||
@@ -142,7 +142,7 @@ impl VecZnx<Vec<u8>> {
|
||||
|
||||
pub fn from_bytes<Scalar: Sized>(n: usize, cols: usize, size: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == Self::alloc_bytes(n, cols, size));
|
||||
assert!(data.len() == Self::bytes_of(n, cols, size));
|
||||
Self {
|
||||
data,
|
||||
n,
|
||||
|
||||
@@ -96,7 +96,7 @@ where
|
||||
B: VecZnxBigAllocBytesImpl<B>,
|
||||
{
|
||||
pub fn alloc(n: usize, cols: usize, size: usize) -> Self {
|
||||
let data = alloc_aligned::<u8>(B::vec_znx_big_alloc_bytes_impl(n, cols, size));
|
||||
let data = alloc_aligned::<u8>(B::vec_znx_big_bytes_of_impl(n, cols, size));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
@@ -109,7 +109,7 @@ where
|
||||
|
||||
pub fn from_bytes(n: usize, cols: usize, size: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == B::vec_znx_big_alloc_bytes_impl(n, cols, size));
|
||||
assert!(data.len() == B::vec_znx_big_bytes_of_impl(n, cols, size));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
|
||||
@@ -116,7 +116,7 @@ where
|
||||
B: VecZnxDftAllocBytesImpl<B>,
|
||||
{
|
||||
pub fn alloc(n: usize, cols: usize, size: usize) -> Self {
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(B::vec_znx_dft_alloc_bytes_impl(n, cols, size));
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(B::vec_znx_dft_bytes_of_impl(n, cols, size));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
@@ -129,7 +129,7 @@ where
|
||||
|
||||
pub fn from_bytes(n: usize, cols: usize, size: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == B::vec_znx_dft_alloc_bytes_impl(n, cols, size));
|
||||
assert!(data.len() == B::vec_znx_dft_bytes_of_impl(n, cols, size));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
|
||||
@@ -88,9 +88,7 @@ where
|
||||
B: VmpPMatAllocBytesImpl<B>,
|
||||
{
|
||||
pub fn alloc(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> Self {
|
||||
let data: Vec<u8> = alloc_aligned(B::vmp_pmat_alloc_bytes_impl(
|
||||
n, rows, cols_in, cols_out, size,
|
||||
));
|
||||
let data: Vec<u8> = alloc_aligned(B::vmp_pmat_bytes_of_impl(n, rows, cols_in, cols_out, size));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
@@ -104,7 +102,7 @@ where
|
||||
|
||||
pub fn from_bytes(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == B::vmp_pmat_alloc_bytes_impl(n, rows, cols_in, cols_out, size));
|
||||
assert!(data.len() == B::vmp_pmat_bytes_of_impl(n, rows, cols_in, cols_out, size));
|
||||
Self {
|
||||
data: data.into(),
|
||||
n,
|
||||
|
||||
@@ -98,7 +98,7 @@ impl<D: DataRef> ZnxView for Zn<D> {
|
||||
}
|
||||
|
||||
impl Zn<Vec<u8>> {
|
||||
pub fn rsh_scratch_space(n: usize) -> usize {
|
||||
pub fn rsh_tmp_bytes(n: usize) -> usize {
|
||||
n * std::mem::size_of::<i64>()
|
||||
}
|
||||
}
|
||||
@@ -113,12 +113,12 @@ impl<D: DataMut> ZnxZero for Zn<D> {
|
||||
}
|
||||
|
||||
impl Zn<Vec<u8>> {
|
||||
pub fn alloc_bytes(n: usize, cols: usize, size: usize) -> usize {
|
||||
pub fn bytes_of(n: usize, cols: usize, size: usize) -> usize {
|
||||
n * cols * size * size_of::<i64>()
|
||||
}
|
||||
|
||||
pub fn alloc(n: usize, cols: usize, size: usize) -> Self {
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(Self::alloc_bytes(n, cols, size));
|
||||
let data: Vec<u8> = alloc_aligned::<u8>(Self::bytes_of(n, cols, size));
|
||||
Self {
|
||||
data,
|
||||
n,
|
||||
@@ -130,7 +130,7 @@ impl Zn<Vec<u8>> {
|
||||
|
||||
pub fn from_bytes<Scalar: Sized>(n: usize, cols: usize, size: usize, bytes: impl Into<Vec<u8>>) -> Self {
|
||||
let data: Vec<u8> = bytes.into();
|
||||
assert!(data.len() == Self::alloc_bytes(n, cols, size));
|
||||
assert!(data.len() == Self::bytes_of(n, cols, size));
|
||||
Self {
|
||||
data,
|
||||
n,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::layouts::{Backend, MatZnx, ScalarZnx, Scratch, ScratchOwned, SvpPPol, VecZnx, VecZnxBig, VecZnxDft, VmpPMat};
|
||||
use crate::layouts::{Backend, Scratch, ScratchOwned};
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
@@ -39,111 +39,3 @@ pub unsafe trait ScratchAvailableImpl<B: Backend> {
|
||||
pub unsafe trait TakeSliceImpl<B: Backend> {
|
||||
fn take_slice_impl<T>(scratch: &mut Scratch<B>, len: usize) -> (&mut [T], &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeScalarZnx] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeScalarZnxImpl<B: Backend> {
|
||||
fn take_scalar_znx_impl(scratch: &mut Scratch<B>, n: usize, cols: usize) -> (ScalarZnx<&mut [u8]>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeSvpPPol] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeSvpPPolImpl<B: Backend> {
|
||||
fn take_svp_ppol_impl(scratch: &mut Scratch<B>, n: usize, cols: usize) -> (SvpPPol<&mut [u8], B>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeVecZnx] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeVecZnxImpl<B: Backend> {
|
||||
fn take_vec_znx_impl(scratch: &mut Scratch<B>, n: usize, cols: usize, size: usize) -> (VecZnx<&mut [u8]>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeVecZnxSlice] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeVecZnxSliceImpl<B: Backend> {
|
||||
fn take_vec_znx_slice_impl(
|
||||
scratch: &mut Scratch<B>,
|
||||
len: usize,
|
||||
n: usize,
|
||||
cols: usize,
|
||||
size: usize,
|
||||
) -> (Vec<VecZnx<&mut [u8]>>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeVecZnxBig] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeVecZnxBigImpl<B: Backend> {
|
||||
fn take_vec_znx_big_impl(
|
||||
scratch: &mut Scratch<B>,
|
||||
n: usize,
|
||||
cols: usize,
|
||||
size: usize,
|
||||
) -> (VecZnxBig<&mut [u8], B>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeVecZnxDft] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeVecZnxDftImpl<B: Backend> {
|
||||
fn take_vec_znx_dft_impl(
|
||||
scratch: &mut Scratch<B>,
|
||||
n: usize,
|
||||
cols: usize,
|
||||
size: usize,
|
||||
) -> (VecZnxDft<&mut [u8], B>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeVecZnxDftSlice] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeVecZnxDftSliceImpl<B: Backend> {
|
||||
fn take_vec_znx_dft_slice_impl(
|
||||
scratch: &mut Scratch<B>,
|
||||
len: usize,
|
||||
n: usize,
|
||||
cols: usize,
|
||||
size: usize,
|
||||
) -> (Vec<VecZnxDft<&mut [u8], B>>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeVmpPMat] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeVmpPMatImpl<B: Backend> {
|
||||
fn take_vmp_pmat_impl(
|
||||
scratch: &mut Scratch<B>,
|
||||
n: usize,
|
||||
rows: usize,
|
||||
cols_in: usize,
|
||||
cols_out: usize,
|
||||
size: usize,
|
||||
) -> (VmpPMat<&mut [u8], B>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
/// * See the [poulpy-backend/src/cpu_fft64_ref/scratch.rs](https://github.com/phantomzone-org/poulpy/blob/main/poulpy-backend/src/cpu_fft64_ref/scratch.rs) reference implementation.
|
||||
/// * See [crate::api::TakeMatZnx] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait TakeMatZnxImpl<B: Backend> {
|
||||
fn take_mat_znx_impl(
|
||||
scratch: &mut Scratch<B>,
|
||||
n: usize,
|
||||
rows: usize,
|
||||
cols_in: usize,
|
||||
cols_out: usize,
|
||||
size: usize,
|
||||
) -> (MatZnx<&mut [u8]>, &mut Scratch<B>);
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ pub unsafe trait SvpPPolAllocImpl<B: Backend> {
|
||||
/// * See [crate::api::SvpPPolAllocBytes] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait SvpPPolAllocBytesImpl<B: Backend> {
|
||||
fn svp_ppol_alloc_bytes_impl(n: usize, cols: usize) -> usize;
|
||||
fn svp_ppol_bytes_of_impl(n: usize, cols: usize) -> usize;
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
|
||||
@@ -35,7 +35,7 @@ pub unsafe trait VecZnxBigFromBytesImpl<B: Backend> {
|
||||
/// * See [crate::api::VecZnxBigAllocBytes] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait VecZnxBigAllocBytesImpl<B: Backend> {
|
||||
fn vec_znx_big_alloc_bytes_impl(n: usize, cols: usize, size: usize) -> usize;
|
||||
fn vec_znx_big_bytes_of_impl(n: usize, cols: usize, size: usize) -> usize;
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
||||
@@ -42,7 +42,7 @@ pub unsafe trait VecZnxDftApplyImpl<B: Backend> {
|
||||
/// * See [crate::api::VecZnxDftAllocBytes] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait VecZnxDftAllocBytesImpl<B: Backend> {
|
||||
fn vec_znx_dft_alloc_bytes_impl(n: usize, cols: usize, size: usize) -> usize;
|
||||
fn vec_znx_dft_bytes_of_impl(n: usize, cols: usize, size: usize) -> usize;
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
|
||||
@@ -15,7 +15,7 @@ pub unsafe trait VmpPMatAllocImpl<B: Backend> {
|
||||
/// * See [crate::api::VmpPMatAllocBytes] for corresponding public API.
|
||||
/// # Safety [crate::doc::backend_safety] for safety contract.
|
||||
pub unsafe trait VmpPMatAllocBytesImpl<B: Backend> {
|
||||
fn vmp_pmat_alloc_bytes_impl(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize;
|
||||
fn vmp_pmat_bytes_of_impl(n: usize, rows: usize, cols_in: usize, cols_out: usize, size: usize) -> usize;
|
||||
}
|
||||
|
||||
/// # THIS TRAIT IS AN OPEN EXTENSION POINT (unsafe)
|
||||
|
||||
@@ -140,7 +140,7 @@ where
|
||||
assert!(a.cols() <= cols);
|
||||
}
|
||||
|
||||
let (data, tmp_bytes) = tmp_bytes.split_at_mut(BE::vec_znx_dft_alloc_bytes_impl(n, cols, size));
|
||||
let (data, tmp_bytes) = tmp_bytes.split_at_mut(BE::vec_znx_dft_bytes_of_impl(n, cols, size));
|
||||
|
||||
let mut a_dft: VecZnxDft<&mut [u8], BE> = VecZnxDft::from_data(cast_mut(data), n, cols, size);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user