treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 156
[linux-block.git] / arch / powerpc / lib / xor_vmx.c
CommitLineData
1a59d1b8 1// SPDX-License-Identifier: GPL-2.0-or-later
ef1313de 2/*
ef1313de
AB
3 *
4 * Copyright (C) IBM Corporation, 2012
5 *
6 * Author: Anton Blanchard <anton@au.ibm.com>
7 */
8fe08885
DA
8
9/*
10 * Sparse (as at v0.5.0) gets very, very confused by this file.
11 * Make it a bit simpler for it.
12 */
13#if !defined(__CHECKER__)
ef1313de 14#include <altivec.h>
8fe08885
DA
15#else
16#define vec_xor(a, b) a ^ b
17#define vector __attribute__((vector_size(16)))
18#endif
ef1313de 19
f718d426 20#include "xor_vmx.h"
ef1313de
AB
21
22typedef vector signed char unative_t;
23
24#define DEFINE(V) \
25 unative_t *V = (unative_t *)V##_in; \
26 unative_t V##_0, V##_1, V##_2, V##_3
27
28#define LOAD(V) \
29 do { \
30 V##_0 = V[0]; \
31 V##_1 = V[1]; \
32 V##_2 = V[2]; \
33 V##_3 = V[3]; \
34 } while (0)
35
36#define STORE(V) \
37 do { \
38 V[0] = V##_0; \
39 V[1] = V##_1; \
40 V[2] = V##_2; \
41 V[3] = V##_3; \
42 } while (0)
43
44#define XOR(V1, V2) \
45 do { \
46 V1##_0 = vec_xor(V1##_0, V2##_0); \
47 V1##_1 = vec_xor(V1##_1, V2##_1); \
48 V1##_2 = vec_xor(V1##_2, V2##_2); \
49 V1##_3 = vec_xor(V1##_3, V2##_3); \
50 } while (0)
51
f718d426
MB
52void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
53 unsigned long *v2_in)
ef1313de
AB
54{
55 DEFINE(v1);
56 DEFINE(v2);
57 unsigned long lines = bytes / (sizeof(unative_t)) / 4;
58
ef1313de
AB
59 do {
60 LOAD(v1);
61 LOAD(v2);
62 XOR(v1, v2);
63 STORE(v1);
64
65 v1 += 4;
66 v2 += 4;
67 } while (--lines > 0);
ef1313de 68}
ef1313de 69
f718d426
MB
70void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
71 unsigned long *v2_in, unsigned long *v3_in)
ef1313de
AB
72{
73 DEFINE(v1);
74 DEFINE(v2);
75 DEFINE(v3);
76 unsigned long lines = bytes / (sizeof(unative_t)) / 4;
77
ef1313de
AB
78 do {
79 LOAD(v1);
80 LOAD(v2);
81 LOAD(v3);
82 XOR(v1, v2);
83 XOR(v1, v3);
84 STORE(v1);
85
86 v1 += 4;
87 v2 += 4;
88 v3 += 4;
89 } while (--lines > 0);
ef1313de 90}
ef1313de 91
f718d426
MB
92void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
93 unsigned long *v2_in, unsigned long *v3_in,
94 unsigned long *v4_in)
ef1313de
AB
95{
96 DEFINE(v1);
97 DEFINE(v2);
98 DEFINE(v3);
99 DEFINE(v4);
100 unsigned long lines = bytes / (sizeof(unative_t)) / 4;
101
ef1313de
AB
102 do {
103 LOAD(v1);
104 LOAD(v2);
105 LOAD(v3);
106 LOAD(v4);
107 XOR(v1, v2);
108 XOR(v3, v4);
109 XOR(v1, v3);
110 STORE(v1);
111
112 v1 += 4;
113 v2 += 4;
114 v3 += 4;
115 v4 += 4;
116 } while (--lines > 0);
ef1313de 117}
ef1313de 118
f718d426
MB
119void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
120 unsigned long *v2_in, unsigned long *v3_in,
121 unsigned long *v4_in, unsigned long *v5_in)
ef1313de
AB
122{
123 DEFINE(v1);
124 DEFINE(v2);
125 DEFINE(v3);
126 DEFINE(v4);
127 DEFINE(v5);
128 unsigned long lines = bytes / (sizeof(unative_t)) / 4;
129
ef1313de
AB
130 do {
131 LOAD(v1);
132 LOAD(v2);
133 LOAD(v3);
134 LOAD(v4);
135 LOAD(v5);
136 XOR(v1, v2);
137 XOR(v3, v4);
138 XOR(v1, v5);
139 XOR(v1, v3);
140 STORE(v1);
141
142 v1 += 4;
143 v2 += 4;
144 v3 += 4;
145 v4 += 4;
146 v5 += 4;
147 } while (--lines > 0);
ef1313de 148}