treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 152
[linux-2.6-block.git] / arch / powerpc / lib / mem_64.S
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * String handling functions for PowerPC.
4  *
5  * Copyright (C) 1996 Paul Mackerras.
6  */
7 #include <asm/processor.h>
8 #include <asm/errno.h>
9 #include <asm/ppc_asm.h>
10 #include <asm/export.h>
11 #include <asm/kasan.h>
12
13 #ifndef CONFIG_KASAN
14 _GLOBAL(__memset16)
15         rlwimi  r4,r4,16,0,15
16         /* fall through */
17
18 _GLOBAL(__memset32)
19         rldimi  r4,r4,32,0
20         /* fall through */
21
22 _GLOBAL(__memset64)
23         neg     r0,r3
24         andi.   r0,r0,7
25         cmplw   cr1,r5,r0
26         b       .Lms
27 EXPORT_SYMBOL(__memset16)
28 EXPORT_SYMBOL(__memset32)
29 EXPORT_SYMBOL(__memset64)
30 #endif
31
32 _GLOBAL_KASAN(memset)
33         neg     r0,r3
34         rlwimi  r4,r4,8,16,23
35         andi.   r0,r0,7                 /* # bytes to be 8-byte aligned */
36         rlwimi  r4,r4,16,0,15
37         cmplw   cr1,r5,r0               /* do we get that far? */
38         rldimi  r4,r4,32,0
39 .Lms:   PPC_MTOCRF(1,r0)
40         mr      r6,r3
41         blt     cr1,8f
42         beq     3f                      /* if already 8-byte aligned */
43         subf    r5,r0,r5
44         bf      31,1f
45         stb     r4,0(r6)
46         addi    r6,r6,1
47 1:      bf      30,2f
48         sth     r4,0(r6)
49         addi    r6,r6,2
50 2:      bf      29,3f
51         stw     r4,0(r6)
52         addi    r6,r6,4
53 3:      srdi.   r0,r5,6
54         clrldi  r5,r5,58
55         mtctr   r0
56         beq     5f
57         .balign 16
58 4:      std     r4,0(r6)
59         std     r4,8(r6)
60         std     r4,16(r6)
61         std     r4,24(r6)
62         std     r4,32(r6)
63         std     r4,40(r6)
64         std     r4,48(r6)
65         std     r4,56(r6)
66         addi    r6,r6,64
67         bdnz    4b
68 5:      srwi.   r0,r5,3
69         clrlwi  r5,r5,29
70         PPC_MTOCRF(1,r0)
71         beq     8f
72         bf      29,6f
73         std     r4,0(r6)
74         std     r4,8(r6)
75         std     r4,16(r6)
76         std     r4,24(r6)
77         addi    r6,r6,32
78 6:      bf      30,7f
79         std     r4,0(r6)
80         std     r4,8(r6)
81         addi    r6,r6,16
82 7:      bf      31,8f
83         std     r4,0(r6)
84         addi    r6,r6,8
85 8:      cmpwi   r5,0
86         PPC_MTOCRF(1,r5)
87         beqlr
88         bf      29,9f
89         stw     r4,0(r6)
90         addi    r6,r6,4
91 9:      bf      30,10f
92         sth     r4,0(r6)
93         addi    r6,r6,2
94 10:     bflr    31
95         stb     r4,0(r6)
96         blr
97 EXPORT_SYMBOL(memset)
98 EXPORT_SYMBOL_KASAN(memset)
99
100 _GLOBAL_TOC_KASAN(memmove)
101         cmplw   0,r3,r4
102         bgt     backwards_memcpy
103         b       memcpy
104
105 _GLOBAL(backwards_memcpy)
106         rlwinm. r7,r5,32-3,3,31         /* r0 = r5 >> 3 */
107         add     r6,r3,r5
108         add     r4,r4,r5
109         beq     2f
110         andi.   r0,r6,3
111         mtctr   r7
112         bne     5f
113         .balign 16
114 1:      lwz     r7,-4(r4)
115         lwzu    r8,-8(r4)
116         stw     r7,-4(r6)
117         stwu    r8,-8(r6)
118         bdnz    1b
119         andi.   r5,r5,7
120 2:      cmplwi  0,r5,4
121         blt     3f
122         lwzu    r0,-4(r4)
123         subi    r5,r5,4
124         stwu    r0,-4(r6)
125 3:      cmpwi   0,r5,0
126         beqlr
127         mtctr   r5
128 4:      lbzu    r0,-1(r4)
129         stbu    r0,-1(r6)
130         bdnz    4b
131         blr
132 5:      mtctr   r0
133 6:      lbzu    r7,-1(r4)
134         stbu    r7,-1(r6)
135         bdnz    6b
136         subf    r5,r0,r5
137         rlwinm. r7,r5,32-3,3,31
138         beq     2b
139         mtctr   r7
140         b       1b
141 EXPORT_SYMBOL(memmove)
142 EXPORT_SYMBOL_KASAN(memmove)