use poulpy_hal::{ api::{ TakeSlice, VecZnxAutomorphismInplaceTmpBytes, VecZnxLshTmpBytes, VecZnxMergeRingsTmpBytes, VecZnxMulXpMinusOneInplaceTmpBytes, VecZnxNormalizeTmpBytes, VecZnxRotateInplaceTmpBytes, VecZnxRshTmpBytes, VecZnxSplitRingTmpBytes, }, layouts::{Module, ScalarZnxToRef, Scratch, VecZnxToMut, VecZnxToRef}, oep::{ TakeSliceImpl, VecZnxAddImpl, VecZnxAddInplaceImpl, VecZnxAddNormalImpl, VecZnxAddScalarImpl, VecZnxAddScalarInplaceImpl, VecZnxAutomorphismImpl, VecZnxAutomorphismInplaceImpl, VecZnxAutomorphismInplaceTmpBytesImpl, VecZnxCopyImpl, VecZnxFillNormalImpl, VecZnxFillUniformImpl, VecZnxLshImpl, VecZnxLshInplaceImpl, VecZnxLshTmpBytesImpl, VecZnxMergeRingsImpl, VecZnxMergeRingsTmpBytesImpl, VecZnxMulXpMinusOneImpl, VecZnxMulXpMinusOneInplaceImpl, VecZnxMulXpMinusOneInplaceTmpBytesImpl, VecZnxNegateImpl, VecZnxNegateInplaceImpl, VecZnxNormalizeImpl, VecZnxNormalizeInplaceImpl, VecZnxNormalizeTmpBytesImpl, VecZnxRotateImpl, VecZnxRotateInplaceImpl, VecZnxRotateInplaceTmpBytesImpl, VecZnxRshImpl, VecZnxRshInplaceImpl, VecZnxRshTmpBytesImpl, VecZnxSplitRingImpl, VecZnxSplitRingTmpBytesImpl, VecZnxSubImpl, VecZnxSubInplaceImpl, VecZnxSubNegateInplaceImpl, VecZnxSubScalarImpl, VecZnxSubScalarInplaceImpl, VecZnxSwitchRingImpl, VecZnxZeroImpl, }, reference::vec_znx::{ vec_znx_add, vec_znx_add_inplace, vec_znx_add_normal_ref, vec_znx_add_scalar, vec_znx_add_scalar_inplace, vec_znx_automorphism, vec_znx_automorphism_inplace, vec_znx_automorphism_inplace_tmp_bytes, vec_znx_copy, vec_znx_fill_normal_ref, vec_znx_fill_uniform_ref, vec_znx_lsh, vec_znx_lsh_inplace, vec_znx_lsh_tmp_bytes, vec_znx_merge_rings, vec_znx_merge_rings_tmp_bytes, vec_znx_mul_xp_minus_one, vec_znx_mul_xp_minus_one_inplace, vec_znx_mul_xp_minus_one_inplace_tmp_bytes, vec_znx_negate, vec_znx_negate_inplace, vec_znx_normalize, vec_znx_normalize_inplace, vec_znx_normalize_tmp_bytes, vec_znx_rotate, vec_znx_rotate_inplace, vec_znx_rotate_inplace_tmp_bytes, vec_znx_rsh, vec_znx_rsh_inplace, vec_znx_rsh_tmp_bytes, vec_znx_split_ring, vec_znx_split_ring_tmp_bytes, vec_znx_sub, vec_znx_sub_inplace, vec_znx_sub_negate_inplace, vec_znx_sub_scalar, vec_znx_sub_scalar_inplace, vec_znx_switch_ring, vec_znx_zero, }, source::Source, }; use crate::FFT64Ref; unsafe impl VecZnxZeroImpl for FFT64Ref { fn vec_znx_zero_impl(_module: &Module, res: &mut R, res_col: usize) where R: VecZnxToMut, { vec_znx_zero::<_, FFT64Ref>(res, res_col); } } unsafe impl VecZnxNormalizeTmpBytesImpl for FFT64Ref { fn vec_znx_normalize_tmp_bytes_impl(module: &Module) -> usize { vec_znx_normalize_tmp_bytes(module.n()) } } unsafe impl VecZnxNormalizeImpl for FFT64Ref where Self: TakeSliceImpl + VecZnxNormalizeTmpBytesImpl, { fn vec_znx_normalize_impl( module: &Module, res_basek: usize, res: &mut R, res_col: usize, a_basek: usize, a: &A, a_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, A: VecZnxToRef, { let (carry, _) = scratch.take_slice(module.vec_znx_normalize_tmp_bytes() / size_of::()); vec_znx_normalize::(res_basek, res, res_col, a_basek, a, a_col, carry); } } unsafe impl VecZnxNormalizeInplaceImpl for FFT64Ref where Self: TakeSliceImpl + VecZnxNormalizeTmpBytesImpl, { fn vec_znx_normalize_inplace_impl( module: &Module, base2k: usize, res: &mut R, res_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, { let (carry, _) = scratch.take_slice(module.vec_znx_normalize_tmp_bytes() / size_of::()); vec_znx_normalize_inplace::(base2k, res, res_col, carry); } } unsafe impl VecZnxAddImpl for FFT64Ref { fn vec_znx_add_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize) where R: VecZnxToMut, A: VecZnxToRef, B: VecZnxToRef, { vec_znx_add::(res, res_col, a, a_col, b, b_col); } } unsafe impl VecZnxAddInplaceImpl for FFT64Ref { fn vec_znx_add_inplace_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_add_inplace::(res, res_col, a, a_col); } } unsafe impl VecZnxAddScalarInplaceImpl for FFT64Ref { fn vec_znx_add_scalar_inplace_impl( _module: &Module, res: &mut R, res_col: usize, res_limb: usize, a: &A, a_col: usize, ) where R: VecZnxToMut, A: ScalarZnxToRef, { vec_znx_add_scalar_inplace::(res, res_col, res_limb, a, a_col); } } unsafe impl VecZnxAddScalarImpl for FFT64Ref { fn vec_znx_add_scalar_impl( _module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize, b_limb: usize, ) where R: VecZnxToMut, A: ScalarZnxToRef, B: VecZnxToRef, { vec_znx_add_scalar::(res, res_col, a, a_col, b, b_col, b_limb); } } unsafe impl VecZnxSubImpl for FFT64Ref { fn vec_znx_sub_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize) where R: VecZnxToMut, A: VecZnxToRef, B: VecZnxToRef, { vec_znx_sub::(res, res_col, a, a_col, b, b_col); } } unsafe impl VecZnxSubInplaceImpl for FFT64Ref { fn vec_znx_sub_inplace_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_sub_inplace::(res, res_col, a, a_col); } } unsafe impl VecZnxSubNegateInplaceImpl for FFT64Ref { fn vec_znx_sub_negate_inplace_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_sub_negate_inplace::(res, res_col, a, a_col); } } unsafe impl VecZnxSubScalarImpl for FFT64Ref { fn vec_znx_sub_scalar_impl( _module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &B, b_col: usize, b_limb: usize, ) where R: VecZnxToMut, A: ScalarZnxToRef, B: VecZnxToRef, { vec_znx_sub_scalar::(res, res_col, a, a_col, b, b_col, b_limb); } } unsafe impl VecZnxSubScalarInplaceImpl for FFT64Ref { fn vec_znx_sub_scalar_inplace_impl( _module: &Module, res: &mut R, res_col: usize, res_limb: usize, a: &A, a_col: usize, ) where R: VecZnxToMut, A: ScalarZnxToRef, { vec_znx_sub_scalar_inplace::(res, res_col, res_limb, a, a_col); } } unsafe impl VecZnxNegateImpl for FFT64Ref { fn vec_znx_negate_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_negate::(res, res_col, a, a_col); } } unsafe impl VecZnxNegateInplaceImpl for FFT64Ref { fn vec_znx_negate_inplace_impl(_module: &Module, res: &mut R, res_col: usize) where R: VecZnxToMut, { vec_znx_negate_inplace::(res, res_col); } } unsafe impl VecZnxLshTmpBytesImpl for FFT64Ref { fn vec_znx_lsh_tmp_bytes_impl(module: &Module) -> usize { vec_znx_lsh_tmp_bytes(module.n()) } } unsafe impl VecZnxRshTmpBytesImpl for FFT64Ref { fn vec_znx_rsh_tmp_bytes_impl(module: &Module) -> usize { vec_znx_rsh_tmp_bytes(module.n()) } } unsafe impl VecZnxLshImpl for FFT64Ref where Module: VecZnxNormalizeTmpBytes, Scratch: TakeSlice, { fn vec_znx_lsh_impl( module: &Module, base2k: usize, k: usize, res: &mut R, res_col: usize, a: &A, a_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, A: VecZnxToRef, { let (carry, _) = scratch.take_slice(module.vec_znx_lsh_tmp_bytes() / size_of::()); vec_znx_lsh::<_, _, Self>(base2k, k, res, res_col, a, a_col, carry); } } unsafe impl VecZnxLshInplaceImpl for FFT64Ref where Module: VecZnxNormalizeTmpBytes, Scratch: TakeSlice, { fn vec_znx_lsh_inplace_impl( module: &Module, base2k: usize, k: usize, a: &mut A, a_col: usize, scratch: &mut Scratch, ) where A: VecZnxToMut, { let (carry, _) = scratch.take_slice(module.vec_znx_lsh_tmp_bytes() / size_of::()); vec_znx_lsh_inplace::<_, Self>(base2k, k, a, a_col, carry); } } unsafe impl VecZnxRshImpl for FFT64Ref where Module: VecZnxNormalizeTmpBytes, Scratch: TakeSlice, { fn vec_znx_rsh_impl( module: &Module, base2k: usize, k: usize, res: &mut R, res_col: usize, a: &A, a_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, A: VecZnxToRef, { let (carry, _) = scratch.take_slice(module.vec_znx_rsh_tmp_bytes() / size_of::()); vec_znx_rsh::<_, _, Self>(base2k, k, res, res_col, a, a_col, carry); } } unsafe impl VecZnxRshInplaceImpl for FFT64Ref where Module: VecZnxNormalizeTmpBytes, Scratch: TakeSlice, { fn vec_znx_rsh_inplace_impl( module: &Module, base2k: usize, k: usize, a: &mut A, a_col: usize, scratch: &mut Scratch, ) where A: VecZnxToMut, { let (carry, _) = scratch.take_slice(module.vec_znx_rsh_tmp_bytes() / size_of::()); vec_znx_rsh_inplace::<_, Self>(base2k, k, a, a_col, carry); } } unsafe impl VecZnxRotateImpl for FFT64Ref { fn vec_znx_rotate_impl(_module: &Module, p: i64, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_rotate::(p, res, res_col, a, a_col); } } unsafe impl VecZnxRotateInplaceTmpBytesImpl for FFT64Ref where Scratch: TakeSlice, { fn vec_znx_rotate_inplace_tmp_bytes_impl(module: &Module) -> usize { vec_znx_rotate_inplace_tmp_bytes(module.n()) } } unsafe impl VecZnxRotateInplaceImpl for FFT64Ref where Scratch: TakeSlice, Self: VecZnxRotateInplaceTmpBytesImpl, { fn vec_znx_rotate_inplace_impl(module: &Module, p: i64, res: &mut R, res_col: usize, scratch: &mut Scratch) where R: VecZnxToMut, { let (tmp, _) = scratch.take_slice(module.vec_znx_rotate_inplace_tmp_bytes() / size_of::()); vec_znx_rotate_inplace::(p, res, res_col, tmp); } } unsafe impl VecZnxAutomorphismImpl for FFT64Ref { fn vec_znx_automorphism_impl(_module: &Module, p: i64, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_automorphism::(p, res, res_col, a, a_col); } } unsafe impl VecZnxAutomorphismInplaceTmpBytesImpl for FFT64Ref { fn vec_znx_automorphism_inplace_tmp_bytes_impl(module: &Module) -> usize { vec_znx_automorphism_inplace_tmp_bytes(module.n()) } } unsafe impl VecZnxAutomorphismInplaceImpl for FFT64Ref where Scratch: TakeSlice, Self: VecZnxAutomorphismInplaceTmpBytesImpl, { fn vec_znx_automorphism_inplace_impl( module: &Module, p: i64, res: &mut R, res_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, { let (tmp, _) = scratch.take_slice(module.vec_znx_automorphism_inplace_tmp_bytes() / size_of::()); vec_znx_automorphism_inplace::(p, res, res_col, tmp); } } unsafe impl VecZnxMulXpMinusOneImpl for FFT64Ref { fn vec_znx_mul_xp_minus_one_impl(_module: &Module, p: i64, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_mul_xp_minus_one::(p, res, res_col, a, a_col); } } unsafe impl VecZnxMulXpMinusOneInplaceTmpBytesImpl for FFT64Ref where Scratch: TakeSlice, Self: VecZnxMulXpMinusOneImpl, { fn vec_znx_mul_xp_minus_one_inplace_tmp_bytes_impl(module: &Module) -> usize { vec_znx_mul_xp_minus_one_inplace_tmp_bytes(module.n()) } } unsafe impl VecZnxMulXpMinusOneInplaceImpl for FFT64Ref { fn vec_znx_mul_xp_minus_one_inplace_impl( module: &Module, p: i64, res: &mut R, res_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, { let (tmp, _) = scratch.take_slice(module.vec_znx_mul_xp_minus_one_inplace_tmp_bytes() / size_of::()); vec_znx_mul_xp_minus_one_inplace::(p, res, res_col, tmp); } } unsafe impl VecZnxSplitRingTmpBytesImpl for FFT64Ref { fn vec_znx_split_ring_tmp_bytes_impl(module: &Module) -> usize { vec_znx_split_ring_tmp_bytes(module.n()) } } unsafe impl VecZnxSplitRingImpl for FFT64Ref where Module: VecZnxSplitRingTmpBytes, Scratch: TakeSlice, { fn vec_znx_split_ring_impl( module: &Module, res: &mut [R], res_col: usize, a: &A, a_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, A: VecZnxToRef, { let (tmp, _) = scratch.take_slice(module.vec_znx_split_ring_tmp_bytes() / size_of::()); vec_znx_split_ring::(res, res_col, a, a_col, tmp); } } unsafe impl VecZnxMergeRingsTmpBytesImpl for FFT64Ref { fn vec_znx_merge_rings_tmp_bytes_impl(module: &Module) -> usize { vec_znx_merge_rings_tmp_bytes(module.n()) } } unsafe impl VecZnxMergeRingsImpl for FFT64Ref where Module: VecZnxMergeRingsTmpBytes, { fn vec_znx_merge_rings_impl( module: &Module, res: &mut R, res_col: usize, a: &[A], a_col: usize, scratch: &mut Scratch, ) where R: VecZnxToMut, A: VecZnxToRef, { let (tmp, _) = scratch.take_slice(module.vec_znx_merge_rings_tmp_bytes() / size_of::()); vec_znx_merge_rings::(res, res_col, a, a_col, tmp); } } unsafe impl VecZnxSwitchRingImpl for FFT64Ref where Self: VecZnxCopyImpl, { fn vec_znx_switch_ring_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_switch_ring::(res, res_col, a, a_col); } } unsafe impl VecZnxCopyImpl for FFT64Ref { fn vec_znx_copy_impl(_module: &Module, res: &mut R, res_col: usize, a: &A, a_col: usize) where R: VecZnxToMut, A: VecZnxToRef, { vec_znx_copy::(res, res_col, a, a_col) } } unsafe impl VecZnxFillUniformImpl for FFT64Ref { fn vec_znx_fill_uniform_impl(_module: &Module, base2k: usize, res: &mut R, res_col: usize, source: &mut Source) where R: VecZnxToMut, { vec_znx_fill_uniform_ref(base2k, res, res_col, source) } } unsafe impl VecZnxFillNormalImpl for FFT64Ref { fn vec_znx_fill_normal_impl( _module: &Module, base2k: usize, res: &mut R, res_col: usize, k: usize, source: &mut Source, sigma: f64, bound: f64, ) where R: VecZnxToMut, { vec_znx_fill_normal_ref(base2k, res, res_col, k, sigma, bound, source); } } unsafe impl VecZnxAddNormalImpl for FFT64Ref { fn vec_znx_add_normal_impl( _module: &Module, base2k: usize, res: &mut R, res_col: usize, k: usize, source: &mut Source, sigma: f64, bound: f64, ) where R: VecZnxToMut, { vec_znx_add_normal_ref(base2k, res, res_col, k, sigma, bound, source); } }