This commit is contained in:
Pro7ech
2025-10-12 21:34:10 +02:00
committed by Jean-Philippe Bossuat
parent f72363cc4b
commit 2b2b994f7d
169 changed files with 8705 additions and 7677 deletions

View File

@@ -3,13 +3,8 @@ use std::marker::PhantomData;
use poulpy_hal::{
DEFAULTALIGN, alloc_aligned,
api::ScratchFromBytes,
layouts::{Backend, MatZnx, ScalarZnx, Scratch, ScratchOwned, SvpPPol, VecZnx, VecZnxBig, VecZnxDft, VmpPMat},
oep::{
ScratchAvailableImpl, ScratchFromBytesImpl, ScratchOwnedAllocImpl, ScratchOwnedBorrowImpl, SvpPPolAllocBytesImpl,
TakeMatZnxImpl, TakeScalarZnxImpl, TakeSliceImpl, TakeSvpPPolImpl, TakeVecZnxBigImpl, TakeVecZnxDftImpl,
TakeVecZnxDftSliceImpl, TakeVecZnxImpl, TakeVecZnxSliceImpl, TakeVmpPMatImpl, VecZnxBigAllocBytesImpl,
VecZnxDftAllocBytesImpl, VmpPMatAllocBytesImpl,
},
layouts::{Backend, Scratch, ScratchOwned},
oep::{ScratchAvailableImpl, ScratchFromBytesImpl, ScratchOwnedAllocImpl, ScratchOwnedBorrowImpl, TakeSliceImpl},
};
use crate::cpu_fft64_ref::FFT64Ref;
@@ -64,178 +59,6 @@ where
}
}
unsafe impl<B: Backend> TakeScalarZnxImpl<B> for FFT64Ref
where
B: ScratchFromBytesImpl<B>,
{
fn take_scalar_znx_impl(scratch: &mut Scratch<B>, n: usize, cols: usize) -> (ScalarZnx<&mut [u8]>, &mut Scratch<B>) {
let (take_slice, rem_slice) = take_slice_aligned(&mut scratch.data, ScalarZnx::alloc_bytes(n, cols));
(
ScalarZnx::from_data(take_slice, n, cols),
Scratch::from_bytes(rem_slice),
)
}
}
unsafe impl<B: Backend> TakeSvpPPolImpl<B> for FFT64Ref
where
B: SvpPPolAllocBytesImpl<B> + ScratchFromBytesImpl<B>,
{
fn take_svp_ppol_impl(scratch: &mut Scratch<B>, n: usize, cols: usize) -> (SvpPPol<&mut [u8], B>, &mut Scratch<B>) {
let (take_slice, rem_slice) = take_slice_aligned(&mut scratch.data, B::svp_ppol_alloc_bytes_impl(n, cols));
(
SvpPPol::from_data(take_slice, n, cols),
Scratch::from_bytes(rem_slice),
)
}
}
unsafe impl<B: Backend> TakeVecZnxImpl<B> for FFT64Ref
where
B: ScratchFromBytesImpl<B>,
{
fn take_vec_znx_impl(scratch: &mut Scratch<B>, n: usize, cols: usize, size: usize) -> (VecZnx<&mut [u8]>, &mut Scratch<B>) {
let (take_slice, rem_slice) = take_slice_aligned(&mut scratch.data, VecZnx::alloc_bytes(n, cols, size));
(
VecZnx::from_data(take_slice, n, cols, size),
Scratch::from_bytes(rem_slice),
)
}
}
unsafe impl<B: Backend> TakeVecZnxBigImpl<B> for FFT64Ref
where
B: VecZnxBigAllocBytesImpl<B> + ScratchFromBytesImpl<B>,
{
fn take_vec_znx_big_impl(
scratch: &mut Scratch<B>,
n: usize,
cols: usize,
size: usize,
) -> (VecZnxBig<&mut [u8], B>, &mut Scratch<B>) {
let (take_slice, rem_slice) = take_slice_aligned(
&mut scratch.data,
B::vec_znx_big_alloc_bytes_impl(n, cols, size),
);
(
VecZnxBig::from_data(take_slice, n, cols, size),
Scratch::from_bytes(rem_slice),
)
}
}
unsafe impl<B: Backend> TakeVecZnxDftImpl<B> for FFT64Ref
where
B: VecZnxDftAllocBytesImpl<B> + ScratchFromBytesImpl<B>,
{
fn take_vec_znx_dft_impl(
scratch: &mut Scratch<B>,
n: usize,
cols: usize,
size: usize,
) -> (VecZnxDft<&mut [u8], B>, &mut Scratch<B>) {
let (take_slice, rem_slice) = take_slice_aligned(
&mut scratch.data,
B::vec_znx_dft_alloc_bytes_impl(n, cols, size),
);
(
VecZnxDft::from_data(take_slice, n, cols, size),
Scratch::from_bytes(rem_slice),
)
}
}
unsafe impl<B: Backend> TakeVecZnxDftSliceImpl<B> for FFT64Ref
where
B: VecZnxDftAllocBytesImpl<B> + ScratchFromBytesImpl<B> + TakeVecZnxDftImpl<B>,
{
fn take_vec_znx_dft_slice_impl(
scratch: &mut Scratch<B>,
len: usize,
n: usize,
cols: usize,
size: usize,
) -> (Vec<VecZnxDft<&mut [u8], B>>, &mut Scratch<B>) {
let mut scratch: &mut Scratch<B> = scratch;
let mut slice: Vec<VecZnxDft<&mut [u8], B>> = Vec::with_capacity(len);
for _ in 0..len {
let (znx, new_scratch) = B::take_vec_znx_dft_impl(scratch, n, cols, size);
scratch = new_scratch;
slice.push(znx);
}
(slice, scratch)
}
}
unsafe impl<B: Backend> TakeVecZnxSliceImpl<B> for FFT64Ref
where
B: ScratchFromBytesImpl<B> + TakeVecZnxImpl<B>,
{
fn take_vec_znx_slice_impl(
scratch: &mut Scratch<B>,
len: usize,
n: usize,
cols: usize,
size: usize,
) -> (Vec<VecZnx<&mut [u8]>>, &mut Scratch<B>) {
let mut scratch: &mut Scratch<B> = scratch;
let mut slice: Vec<VecZnx<&mut [u8]>> = Vec::with_capacity(len);
for _ in 0..len {
let (znx, new_scratch) = B::take_vec_znx_impl(scratch, n, cols, size);
scratch = new_scratch;
slice.push(znx);
}
(slice, scratch)
}
}
unsafe impl<B: Backend> TakeVmpPMatImpl<B> for FFT64Ref
where
B: VmpPMatAllocBytesImpl<B> + ScratchFromBytesImpl<B>,
{
fn take_vmp_pmat_impl(
scratch: &mut Scratch<B>,
n: usize,
rows: usize,
cols_in: usize,
cols_out: usize,
size: usize,
) -> (VmpPMat<&mut [u8], B>, &mut Scratch<B>) {
let (take_slice, rem_slice) = take_slice_aligned(
&mut scratch.data,
B::vmp_pmat_alloc_bytes_impl(n, rows, cols_in, cols_out, size),
);
(
VmpPMat::from_data(take_slice, n, rows, cols_in, cols_out, size),
Scratch::from_bytes(rem_slice),
)
}
}
unsafe impl<B: Backend> TakeMatZnxImpl<B> for FFT64Ref
where
B: ScratchFromBytesImpl<B>,
{
fn take_mat_znx_impl(
scratch: &mut Scratch<B>,
n: usize,
rows: usize,
cols_in: usize,
cols_out: usize,
size: usize,
) -> (MatZnx<&mut [u8]>, &mut Scratch<B>) {
let (take_slice, rem_slice) = take_slice_aligned(
&mut scratch.data,
MatZnx::alloc_bytes(n, rows, cols_in, cols_out, size),
);
(
MatZnx::from_data(take_slice, n, rows, cols_in, cols_out, size),
Scratch::from_bytes(rem_slice),
)
}
}
fn take_slice_aligned(data: &mut [u8], take_len: usize) -> (&mut [u8], &mut [u8]) {
let ptr: *mut u8 = data.as_mut_ptr();
let self_len: usize = data.len();