aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sh/mm/clear_page.S
blob: 08acead7b2a1b51070b1f2bf93259bfbd4b55523 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
/* $Id: clear_page.S,v 1.13 2003/08/25 17:03:10 lethal Exp $
 *
 * __clear_user_page, __clear_user, clear_page implementation of SuperH
 *
 * Copyright (C) 2001  Kaz Kojima
 * Copyright (C) 2001, 2002  Niibe Yutaka
 *
 */
#include <linux/linkage.h>

/*
 * clear_page_slow
 * @to: P1 address
 *
 * void clear_page_slow(void *to)
 */

/*
 * r0 --- scratch
 * r4 --- to
 * r5 --- to + 4096
 */
ENTRY(clear_page_slow)
	mov	r4,r5
	mov.w	.Llimit,r0
	add	r0,r5
	mov	#0,r0
	!
1:
#if defined(CONFIG_CPU_SH3)
	mov.l	r0,@r4
#elif defined(CONFIG_CPU_SH4)
	movca.l	r0,@r4
	mov	r4,r1
#endif
	add	#32,r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
#if defined(CONFIG_CPU_SH4)
	ocbwb	@r1
#endif
	cmp/eq	r5,r4
	bf/s	1b
	 add	#28,r4
	!
	rts
	 nop
.Llimit:	.word	(4096-28)

ENTRY(__clear_user)
	!
	mov	#0, r0
	mov	#0xe0, r1	! 0xffffffe0
	!
	! r4..(r4+31)&~32 	   -------- not aligned	[ Area 0 ]
	! (r4+31)&~32..(r4+r5)&~32 -------- aligned	[ Area 1 ]
	! (r4+r5)&~32..r4+r5       -------- not aligned	[ Area 2 ]
	!
	! Clear area 0
	mov	r4, r2
	!
	tst	r1, r5		! length < 32
	bt	.Larea2		! skip to remainder
	!
	add	#31, r2
	and	r1, r2
	cmp/eq	r4, r2
	bt	.Larea1
	mov	r2, r3
	sub	r4, r3
	mov	r3, r7
	mov	r4, r2
	!
.L0:	dt	r3
0:	mov.b	r0, @r2
	bf/s	.L0
	 add	#1, r2
	!
	sub	r7, r5
	mov	r2, r4
.Larea1:
	mov	r4, r3
	add	r5, r3
	and	r1, r3
	cmp/hi	r2, r3
	bf	.Larea2
	!
	! Clear area 1
#if defined(CONFIG_CPU_SH4)
1:	movca.l	r0, @r2
#else
1:	mov.l	r0, @r2
#endif
	add	#4, r2
2:	mov.l	r0, @r2
	add	#4, r2
3:	mov.l	r0, @r2
	add	#4, r2
4:	mov.l	r0, @r2
	add	#4, r2
5:	mov.l	r0, @r2
	add	#4, r2
6:	mov.l	r0, @r2
	add	#4, r2
7:	mov.l	r0, @r2
	add	#4, r2
8:	mov.l	r0, @r2
	add	#4, r2
	cmp/hi	r2, r3
	bt/s	1b
	 nop
	!
	! Clear area 2
.Larea2:
	mov	r4, r3
	add	r5, r3
	cmp/hs	r3, r2
	bt/s	.Ldone
	 sub	r2, r3
.L2:	dt	r3
9:	mov.b	r0, @r2
	bf/s	.L2
	 add	#1, r2
	!
.Ldone:	rts
	 mov	#0, r0	! return 0 as normal return

	! return the number of bytes remained
.Lbad_clear_user:
	mov	r4, r0
	add	r5, r0
	rts
	 sub	r2, r0

.section __ex_table,"a"
	.align 2
	.long	0b, .Lbad_clear_user
	.long	1b, .Lbad_clear_user
	.long	2b, .Lbad_clear_user
	.long	3b, .Lbad_clear_user
	.long	4b, .Lbad_clear_user
	.long	5b, .Lbad_clear_user
	.long	6b, .Lbad_clear_user
	.long	7b, .Lbad_clear_user
	.long	8b, .Lbad_clear_user
	.long	9b, .Lbad_clear_user
.previous

#if defined(CONFIG_CPU_SH4)
/*
 * __clear_user_page
 * @to: P3 address (with same color)
 * @orig_to: P1 address
 *
 * void __clear_user_page(void *to, void *orig_to)
 */

/*
 * r0 --- scratch 
 * r4 --- to
 * r5 --- orig_to
 * r6 --- to + 4096
 */
ENTRY(__clear_user_page)
	mov.w	.L4096,r0
	mov	r4,r6
	add	r0,r6
	mov	#0,r0
	!
1:	ocbi	@r5
	add	#32,r5
	movca.l	r0,@r4
	mov	r4,r1
	add	#32,r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	mov.l	r0,@-r4
	add	#28,r4
	cmp/eq	r6,r4
	bf/s	1b
	 ocbwb	@r1
	!
	rts
	 nop
.L4096:	.word	4096

ENTRY(__flush_cache_4096)
	mov.l	1f,r3
	add	r6,r3
	mov	r4,r0
	mov	#64,r2
	shll	r2
	mov	#64,r6
	jmp	@r3
	 mov	#96,r7
	.align	2
1:	.long	2f
2:
	.rept	32
	mov.l	r5,@r0
	mov.l	r5,@(32,r0)
	mov.l	r5,@(r0,r6)
	mov.l	r5,@(r0,r7)
	add	r2,r5
	add	r2,r0
	.endr
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	rts
	 nop

ENTRY(__flush_dcache_all)
	mov.l	2f,r0
	mov.l	3f,r4
	and	r0,r4		! r4 = (unsigned long)&empty_zero_page[0] & ~0xffffc000
	stc	sr,r1		! save SR
	mov.l	4f,r2
	or	r1,r2
	mov	#32,r3
	shll2	r3
1:
	ldc	r2,sr		! set BL bit
	movca.l	r0,@r4
	ocbi	@r4
	add	#32,r4
	movca.l	r0,@r4
	ocbi	@r4
	add	#32,r4
	movca.l	r0,@r4
	ocbi	@r4
	add	#32,r4
	movca.l	r0,@r4
	ocbi	@r4
	ldc	r1,sr		! restore SR
	dt	r3
	bf/s	1b
	 add	#32,r4

	rts
	 nop
	.align	2
2:	.long	0xffffc000
3:	.long	empty_zero_page
4:	.long	0x10000000	! BL bit

/* __flush_cache_4096_all(unsigned long addr) */
ENTRY(__flush_cache_4096_all)
	mov.l	2f,r0
	mov.l	3f,r2
	and	r0,r2
	or	r2,r4		! r4 = addr | (unsigned long)&empty_zero_page[0] & ~0x3fff
	stc	sr,r1		! save SR
	mov.l	4f,r2
	or	r1,r2
	mov	#32,r3
1:
	ldc	r2,sr		! set BL bit
	movca.l	r0,@r4
	ocbi	@r4
	add	#32,r4
	movca.l	r0,@r4
	ocbi	@r4
	add	#32,r4
	movca.l	r0,@r4
	ocbi	@r4
	add	#32,r4
	movca.l	r0,@r4
	ocbi	@r4
	ldc	r1,sr		! restore SR
	dt	r3
	bf/s	1b
	 add	#32,r4

	rts
	 nop
	.align	2
2:	.long	0xffffc000
3:	.long	empty_zero_page
4:	.long	0x10000000	! BL bit
#endif