xref: /openbmc/linux/arch/powerpc/lib/xor_vmx_glue.c (revision fadbafc1)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Altivec XOR operations
4  *
5  * Copyright 2017 IBM Corp.
6  */
7 
8 #include <linux/preempt.h>
9 #include <linux/export.h>
10 #include <linux/sched.h>
11 #include <asm/switch_to.h>
12 #include <asm/xor_altivec.h>
13 #include "xor_vmx.h"
14 
15 void xor_altivec_2(unsigned long bytes, unsigned long * __restrict p1,
16 		   const unsigned long * __restrict p2)
17 {
18 	preempt_disable();
19 	enable_kernel_altivec();
20 	__xor_altivec_2(bytes, p1, p2);
21 	disable_kernel_altivec();
22 	preempt_enable();
23 }
24 EXPORT_SYMBOL(xor_altivec_2);
25 
26 void xor_altivec_3(unsigned long bytes, unsigned long * __restrict p1,
27 		   const unsigned long * __restrict p2,
28 		   const unsigned long * __restrict p3)
29 {
30 	preempt_disable();
31 	enable_kernel_altivec();
32 	__xor_altivec_3(bytes, p1, p2, p3);
33 	disable_kernel_altivec();
34 	preempt_enable();
35 }
36 EXPORT_SYMBOL(xor_altivec_3);
37 
38 void xor_altivec_4(unsigned long bytes, unsigned long * __restrict p1,
39 		   const unsigned long * __restrict p2,
40 		   const unsigned long * __restrict p3,
41 		   const unsigned long * __restrict p4)
42 {
43 	preempt_disable();
44 	enable_kernel_altivec();
45 	__xor_altivec_4(bytes, p1, p2, p3, p4);
46 	disable_kernel_altivec();
47 	preempt_enable();
48 }
49 EXPORT_SYMBOL(xor_altivec_4);
50 
51 void xor_altivec_5(unsigned long bytes, unsigned long * __restrict p1,
52 		   const unsigned long * __restrict p2,
53 		   const unsigned long * __restrict p3,
54 		   const unsigned long * __restrict p4,
55 		   const unsigned long * __restrict p5)
56 {
57 	preempt_disable();
58 	enable_kernel_altivec();
59 	__xor_altivec_5(bytes, p1, p2, p3, p4, p5);
60 	disable_kernel_altivec();
61 	preempt_enable();
62 }
63 EXPORT_SYMBOL(xor_altivec_5);
64