1/* linux/arch/sparc/lib/memset.S: Sparc optimized memset, bzero and clear_user code
2 * Copyright (C) 1991,1996 Free Software Foundation
3 * Copyright (C) 1996,1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
5 *
6 * Returns 0, if ok, and number of bytes not yet set if exception
7 * occurs and we were called as clear_user.
8 */
9
10#include <asm/ptrace.h>
11
12/* Work around cpp -rob */
13#define ALLOC #alloc
14#define EXECINSTR #execinstr
15#define EX(x,y,a,b) 				\
1698: 	x,y;					\
17	.section .fixup,ALLOC,EXECINSTR;	\
18	.align	4;				\
1999:	ba 30f;					\
20	 a, b, %o0;				\
21	.section __ex_table,ALLOC;		\
22	.align	4;				\
23	.word	98b, 99b;			\
24	.text;					\
25	.align	4
26
27#define EXT(start,end,handler) 			\
28	.section __ex_table,ALLOC;		\
29	.align	4;				\
30	.word	start, 0, end, handler;		\
31	.text;					\
32	.align	4
33
34/* Please don't change these macros, unless you change the logic
35 * in the .fixup section below as well.
36 * Store 64 bytes at (BASE + OFFSET) using value SOURCE. */
37#define ZERO_BIG_BLOCK(base, offset, source)    \
38	std	source, [base + offset + 0x00]; \
39	std	source, [base + offset + 0x08]; \
40	std	source, [base + offset + 0x10]; \
41	std	source, [base + offset + 0x18]; \
42	std	source, [base + offset + 0x20]; \
43	std	source, [base + offset + 0x28]; \
44	std	source, [base + offset + 0x30]; \
45	std	source, [base + offset + 0x38];
46
47#define ZERO_LAST_BLOCKS(base, offset, source)	\
48	std	source, [base - offset - 0x38]; \
49	std	source, [base - offset - 0x30]; \
50	std	source, [base - offset - 0x28]; \
51	std	source, [base - offset - 0x20]; \
52	std	source, [base - offset - 0x18]; \
53	std	source, [base - offset - 0x10]; \
54	std	source, [base - offset - 0x08]; \
55	std	source, [base - offset - 0x00];
56
57	.text
58	.align 4
59
60        .globl  __bzero_begin
61__bzero_begin:
62
63	.globl	__bzero
64	.globl	memset
65	.globl	__memset_start, __memset_end
66__memset_start:
67memset:
68	and	%o1, 0xff, %g3
69	sll	%g3, 8, %g2
70	or	%g3, %g2, %g3
71	sll	%g3, 16, %g2
72	or	%g3, %g2, %g3
73	b	1f
74	 mov	%o2, %o1
753:
76	cmp	%o2, 3
77	be	2f
78	 EX(stb	%g3, [%o0], sub %o1, 0)
79
80	cmp	%o2, 2
81	be	2f
82	 EX(stb	%g3, [%o0 + 0x01], sub %o1, 1)
83
84	EX(stb	%g3, [%o0 + 0x02], sub %o1, 2)
852:
86	sub	%o2, 4, %o2
87	add	%o1, %o2, %o1
88	b	4f
89	 sub	%o0, %o2, %o0
90
91__bzero:
92	mov	%g0, %g3
931:
94	cmp	%o1, 7
95	bleu	7f
96	 andcc	%o0, 3, %o2
97
98	bne	3b
994:
100	 andcc	%o0, 4, %g0
101
102	be	2f
103	 mov	%g3, %g2
104
105	EX(st	%g3, [%o0], sub %o1, 0)
106	sub	%o1, 4, %o1
107	add	%o0, 4, %o0
1082:
109	andcc	%o1, 0xffffff80, %o3	! Now everything is 8 aligned and o1 is len to run
110	be	9f
111	 andcc	%o1, 0x78, %o2
11210:
113	ZERO_BIG_BLOCK(%o0, 0x00, %g2)
114	subcc	%o3, 128, %o3
115	ZERO_BIG_BLOCK(%o0, 0x40, %g2)
11611:
117	EXT(10b, 11b, 20f)
118	bne	10b
119	 add	%o0, 128, %o0
120
121	orcc	%o2, %g0, %g0
1229:
123	be	13f
124	 andcc	%o1, 7, %o1
125
126	srl	%o2, 1, %o3
127	set	13f, %o4
128	sub	%o4, %o3, %o4
129	jmp	%o4
130	 add	%o0, %o2, %o0
131
13212:
133	ZERO_LAST_BLOCKS(%o0, 0x48, %g2)
134	ZERO_LAST_BLOCKS(%o0, 0x08, %g2)
13513:
136	be	8f
137	 andcc	%o1, 4, %g0
138
139	be	1f
140	 andcc	%o1, 2, %g0
141
142	EX(st	%g3, [%o0], and %o1, 7)
143	add	%o0, 4, %o0
1441:
145	be	1f
146	 andcc	%o1, 1, %g0
147
148	EX(sth	%g3, [%o0], and %o1, 3)
149	add	%o0, 2, %o0
1501:
151	bne,a	8f
152	 EX(stb	%g3, [%o0], and %o1, 1)
1538:
154	retl
155	 clr	%o0
1567:
157	be	13b
158	 orcc	%o1, 0, %g0
159
160	be	0f
1618:
162	 add	%o0, 1, %o0
163	subcc	%o1, 1, %o1
164	bne	8b
165	 EX(stb	%g3, [%o0 - 1], add %o1, 1)
1660:
167	retl
168	 clr	%o0
169__memset_end:
170
171	.section .fixup,#alloc,#execinstr
172	.align	4
17320:
174	cmp	%g2, 8
175	bleu	1f
176	 and	%o1, 0x7f, %o1
177	sub	%g2, 9, %g2
178	add	%o3, 64, %o3
1791:
180	sll	%g2, 3, %g2
181	add	%o3, %o1, %o0
182	b 30f
183	 sub	%o0, %g2, %o0
18421:
185	mov	8, %o0
186	and	%o1, 7, %o1
187	sub	%o0, %g2, %o0
188	sll	%o0, 3, %o0
189	b 30f
190	 add	%o0, %o1, %o0
19130:
192/* %o4 is faulting address, %o5 is %pc where fault occurred */
193	save	%sp, -104, %sp
194	mov	%i5, %o0
195	mov	%i7, %o1
196	call	lookup_fault
197	 mov	%i4, %o2
198	ret
199	 restore
200
201	.globl __bzero_end
202__bzero_end:
203