Update contrib.
1 // Copyright (c) 1997-2009 Nokia Corporation and/or its subsidiary(-ies).
2 // All rights reserved.
3 // This component and the accompanying materials are made available
4 // under the terms of the License "Eclipse Public License v1.0"
5 // which accompanies this distribution, and is available
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
8 // Initial Contributors:
9 // Nokia Corporation - initial contribution.
14 // e32\klib\arm\cumem.cia
18 #include <kernel/klib.h>
21 #if defined(__REPLACE_GENERIC_UTILS)
22 #include "replacement_utils.h"
28 #define CUMEM_FAULT(cc, reason) asm("mov"#cc" r0, #%a0 " : : "i" (reason)); \
29 asm("b"#cc" " CSM_ZN2KL5PanicENS_13TKernLibPanicE)
33 __NAKED__ void kumemget_no_paging_assert(TAny* /*aKernAddr*/, const TAny* /*aAddr*/, TInt /*aLength*/)
35 asm("mrs r3, spsr "); // r3=spsr_svc
36 asm("tst r3, #0x0f "); // test for user mode
37 asm("bne memcpy "); // if not, just do memcpy
38 #ifndef USE_REPLACEMENT_UMEMGET
39 asm("b umemget_no_paging_assert");
46 #ifndef USE_REPLACEMENT_UMEMGET
49 // Conditional returns are not predicted on ARMv6
50 __NAKED__ void dummy_umemget32_exit()
52 asm("_umemget32_exit: ");
53 asm("ldmfd sp!, {r4, pc} ");
55 #define UMEMGET32_EXIT(cc) asm("b"#cc" _umemget32_exit")
57 #define UMEMGET32_EXIT(cc) asm("ldm"#cc"fd sp!, {r4, pc}")
61 EXPORT_C __NAKED__ void kumemget32(TAny* /*aKernAddr*/, const TAny* /*aAddr*/, TInt /*aLength*/)
63 asm("mrs r3, spsr "); // r3=spsr_svc
64 asm("tst r3, #0x0f "); // test for user mode
65 asm("bne wordmove "); // if not, just do wordmove
66 // otherwise fall through to umemget32
70 EXPORT_C __NAKED__ void umemget32(TAny* /*aKernAddr*/, const TAny* /*aUserAddr*/, TInt /*aLength*/)
72 ASM_ASSERT_PAGING_SAFE
74 #ifdef __USER_MEMORY_GUARDS_ENABLED__
75 // Wrap the workings of this function in an internal call, so we can save/restore UMG state
76 asm("stmfd sp!, {r11, lr} ");
77 asm("subs r12, r2, #1");
78 asm("ldrhsb r11, [r0]"); // test access to first byte of kernel memory
79 asm("ldrhsb r11, [r0,r12]"); // test access to last byte of kernel memory
80 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11
81 asm("bl 0f"); // call to label below
82 USER_MEMORY_GUARD_RESTORE(r11,r12);
83 asm("ldmfd sp!, {r11, pc} ");
89 asm("tst r2, #3 "); // check length is a whole number of words
90 CUMEM_FAULT(ne, KL::EWordMoveLengthNotMultipleOf4);
94 asm("_umemget_word_aligned: ");
95 asm("stmfd sp!, {r4, lr} ");
96 asm("subs ip, r2, #32 ");
97 asm("blo _umemget32_small_copy ");
99 asm("beq _umemget32_32_byte_case "); // 32 byte case is common - don't bother to align
101 asm("rsb lr, r0, #32 "); // align destination: 0 - 28 byte copy
102 asm("movs lr, lr, lsl #27 ");
103 asm("beq _umemget32_large_copy ");
104 asm("sub r2, r2, lr, lsr #27 ");
105 asm("msr cpsr_f, lr "); // put length bits 4, 3, 2 into N, Z, C
106 asm("ldrmit r3, [r1], #4 ");
107 asm("ldrmit r4, [r1], #4 ");
108 asm("ldrmit ip, [r1], #4 ");
109 asm("ldrmit lr, [r1], #4 ");
110 asm("stmmiia r0!, {r3, r4, ip, lr} ");
111 asm("ldreqt r3, [r1], #4 ");
112 asm("ldreqt r4, [r1], #4 ");
113 asm("ldrcst ip, [r1], #4 ");
114 asm("stmeqia r0!, {r3, r4} ");
115 asm("strcs ip, [r0], #4 ");
116 asm("subs ip, r2, #32 ");
117 asm("blo _umemget32_small_copy ");
119 asm("_umemget32_large_copy: "); // copy 32 byte blocks
121 asm("_umemget32_32_byte_case: ");
122 asm("ldrt r2, [r1], #4 ");
123 asm("ldrt r3, [r1], #4 ");
124 asm("ldrt r4, [r1], #4 ");
125 asm("ldrt lr, [r1], #4 ");
126 asm("subs ip, ip, #32 ");
127 asm("stmia r0!, {r2, r3, r4, lr} ");
128 asm("ldrt r2, [r1], #4 ");
129 asm("ldrt r3, [r1], #4 ");
130 asm("ldrt r4, [r1], #4 ");
131 asm("ldrt lr, [r1], #4 ");
132 asm("stmia r0!, {r2, r3, r4, lr} ");
133 asm("bhs _umemget32_large_copy ");
135 asm("_umemget32_small_copy: "); // 0 - 31 byte copy, length in ip bits 0-4
136 asm("movs r2, ip, lsl #27 ");
138 asm("msr cpsr_f, r2 "); // put length bits 4, 3, 2 into N, Z, C
139 asm("ldrmit r3, [r1], #4 ");
140 asm("ldrmit r4, [r1], #4 ");
141 asm("ldrmit ip, [r1], #4 ");
142 asm("ldrmit lr, [r1], #4 ");
143 asm("stmmiia r0!, {r3, r4, ip, lr} ");
144 asm("ldreqt r3, [r1], #4 ");
145 asm("ldreqt r4, [r1], #4 ");
146 asm("ldrcst ip, [r1], #4 ");
147 asm("stmeqia r0!, {r3, r4} ");
148 asm("strcs ip, [r0], #4 ");
149 asm("movs r2, r2, lsl #3 ");
151 asm("msr cpsr_f, r2 "); // put length bits 1, 0 into N, Z
152 asm("ldrmibt r3, [r1], #1 ");
153 asm("ldrmibt r4, [r1], #1 ");
154 asm("ldreqbt ip, [r1], #1 ");
155 asm("strmib r3, [r0], #1 ");
156 asm("strmib r4, [r0], #1 ");
157 asm("streqb ip, [r0], #1 ");
158 asm("ldmfd sp!, {r4, pc} ");
162 EXPORT_C __NAKED__ void kumemget(TAny* /*aKernAddr*/, const TAny* /*aAddr*/, TInt /*aLength*/)
164 asm("mrs r3, spsr "); // r3=spsr_svc
165 asm("tst r3, #0x0f "); // test for user mode
166 asm("bne memcpy "); // if not, just do memcpy
167 // otherwise fall through to umemget
171 EXPORT_C __NAKED__ void umemget(TAny* /*aKernAddr*/, const TAny* /*aUserAddr*/, TInt /*aLength*/)
173 // Optimised for aligned transfers, as unaligned are very rare in practice
175 ASM_ASSERT_PAGING_SAFE
176 asm("umemget_no_paging_assert:");
178 #ifdef __USER_MEMORY_GUARDS_ENABLED__
179 // Wrap the workings of this function in an internal call, so we can save/restore UMG state
180 asm("stmfd sp!, {r11, lr} ");
181 asm("subs r12, r2, #1");
182 asm("ldrhsb r11, [r0]"); // test access to first byte of kernel memory
183 asm("ldrhsb r11, [r0,r12]"); // test access to last byte of kernel memory
184 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11
185 asm("bl 0f"); // call to label below
186 USER_MEMORY_GUARD_RESTORE(r11,r12);
187 asm("ldmfd sp!, {r11, pc} ");
194 asm("tsteq r1, #3 ");
195 asm("beq _umemget_word_aligned ");
200 asm("subs r2, r2, #1 ");
201 asm("ldrplbt r3, [r1], #1 ");
202 asm("strplb r3, [r0], #1 ");
206 asm("1: "); // Attempt to word align
207 asm("movs r3, r0, lsl #30 ");
209 asm("rsbs r3, r3, #0 "); // 01->c0000000 (MI,VC) 10->80000000 (MI,VS) 11->40000000 (PL,VC)
210 asm("sub r2, r2, r3, lsr #30 ");
211 asm("ldrmibt r3, [r1], #1 ");
212 asm("strmib r3, [r0], #1 ");
213 asm("ldrmibt r3, [r1], #1 ");
214 asm("strmib r3, [r0], #1 ");
215 asm("ldrvcbt r3, [r1], #1 ");
216 asm("strvcb r3, [r0], #1 "); // r0 now word aligned
218 asm("movs r3, r1, lsl #31 ");
219 asm("bic r1, r1, #3 ");
220 asm("bcs 3f "); // branch if src mod 4 = 2 or 3
221 asm("bpl _umemget_word_aligned "); // branch if src mod 4 = 0
223 asm("4: "); // src mod 4 = 1
224 asm("subs r2, r2, #4 ");
225 asm("ldrget r3, [r1], #4 ");
226 asm("ldrget ip, [r1] ");
227 asm("movge r3, r3, lsr #8 ");
228 asm("orrge r3, r3, ip, lsl #24 ");
229 asm("strge r3, [r0], #4 ");
231 asm("add r1, r1, #1 ");
232 asm("b umemget_do_end ");
236 asm("2: "); // src mod 4 = 2
237 asm("subs r2, r2, #4 ");
238 asm("ldrget r3, [r1], #4 ");
239 asm("ldrget ip, [r1] ");
240 asm("movge r3, r3, lsr #16 ");
241 asm("orrge r3, r3, ip, lsl #16 ");
242 asm("strge r3, [r0], #4 ");
244 asm("add r1, r1, #2 ");
245 asm("b umemget_do_end ");
247 asm("5: "); // src mod 4 = 3
248 asm("subs r2, r2, #4 ");
249 asm("ldrget r3, [r1], #4 ");
250 asm("ldrget ip, [r1] ");
251 asm("movge r3, r3, lsr #24 ");
252 asm("orrge r3, r3, ip, lsl #8 ");
253 asm("strge r3, [r0], #4 ");
255 asm("add r1, r1, #3 ");
257 asm("umemget_do_end: ");
259 asm("adds r2, r2, #2 "); // -1 if 1 left, 0 if 2 left, +1 if 3 left
260 asm("ldrplbt r3, [r1], #1 ");
261 asm("strplb r3, [r0], #1 ");
262 asm("ldrplbt r3, [r1], #1 ");
263 asm("strplb r3, [r0], #1 ");
264 asm("ldrnebt r3, [r1], #1 ");
265 asm("strneb r3, [r0], #1 ");
269 #endif // USE_REPLACEMENT_UMEMGET
271 __NAKED__ void kumemput_no_paging_assert(TAny* /*aAddr*/, const TAny* /*aKernAddr*/, TInt /*aLength*/)
273 asm("mrs r3, spsr "); // r3=spsr_svc
274 asm("tst r3, #0x0f "); // test for user mode
275 asm("bne memcpy "); // if not, just do memcpy
276 #ifndef USE_REPLACEMENT_UMEMPUT
277 asm("b umemput_no_paging_assert");
284 #ifndef USE_REPLACEMENT_UMEMPUT
287 // Conditional returns are not predicted on ARMv6
288 __NAKED__ void dummy_umemput32_exit()
290 asm("_umemput32_exit: ");
291 asm("ldmfd sp!, {r4, pc} ");
293 #define UMEMPUT32_EXIT(cc) asm("b"#cc" _umemput32_exit")
295 #define UMEMPUT32_EXIT(cc) asm("ldm"#cc"fd sp!, {r4, pc}")
299 EXPORT_C __NAKED__ void kumemput32(TAny* /*aAddr*/, const TAny* /*aKernAddr*/, TInt /*aLength*/)
301 asm("mrs r3, spsr "); // r3=spsr_svc
302 asm("tst r3, #0x0f "); // test for user mode
303 asm("bne wordmove "); // if not, just do wordmove
304 // otherwise fall through to umemput32
308 EXPORT_C __NAKED__ void umemput32(TAny* /*aUserAddr*/, const TAny* /*aKernAddr*/, TInt /*aLength*/)
310 ASM_ASSERT_DATA_PAGING_SAFE
312 #ifdef __USER_MEMORY_GUARDS_ENABLED__
313 // Wrap the workings of this function in an internal call, so we can save/restore UMG state
314 asm("stmfd sp!, {r11, lr} ");
315 asm("subs r12, r2, #1");
316 asm("ldrhsb r11, [r1]"); // test access to first byte of kernel memory
317 asm("ldrhsb r11, [r1,r12]"); // test access to last byte of kernel memory
318 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11
319 asm("bl 0f"); // call to label below
320 USER_MEMORY_GUARD_RESTORE(r11,r12);
321 asm("ldmfd sp!, {r11, pc} ");
327 asm("tst r2, #3 "); // check length is a whole number of words
328 CUMEM_FAULT(ne, KL::EWordMoveLengthNotMultipleOf4);
332 asm("cmp r2, #4 "); // special case for 4 byte copy which is common
333 asm("ldrhs r3, [r1], #4 ");
334 asm("subhs r2, r2, #4 ");
335 asm("strhst r3, [r0], #4 ");
338 asm("_umemput_word_aligned: ");
339 asm("stmfd sp!, {r4, lr} ");
340 asm("subs r2, r2, #32 ");
341 asm("bhs _umemput32_align_source ");
343 asm("_umemput32_small_copy: "); // copy 1 - 31 bytes
344 asm("mov r2, r2, lsl #27 ");
345 asm("msr cpsr_f, r2 "); // put length bits 4, 3, 2 into N, Z, C
346 asm("ldmmiia r1!, {r3, r4, ip, lr} ");
347 asm("strmit r3, [r0], #4 ");
348 asm("strmit r4, [r0], #4 ");
349 asm("ldmeqia r1!, {r3, r4} ");
350 asm("strmit ip, [r0], #4 ");
351 asm("ldrcs ip, [r1], #4 ");
352 asm("strmit lr, [r0], #4 ");
353 asm("streqt r3, [r0], #4 ");
354 asm("streqt r4, [r0], #4 ");
355 asm("strcst ip, [r0], #4 ");
356 asm("movs r2, r2, lsl #3 ");
358 asm("msr cpsr_f, r2 "); // put length bits 1, 0 into N, Z
359 asm("ldrmih r3, [r1], #2 ");
360 asm("ldreqb r4, [r1], #1 ");
361 asm("strmibt r3, [r0], #1 ");
362 asm("movmi r3, r3, lsr #8 ");
363 asm("strmibt r3, [r0], #1 ");
364 asm("streqbt r4, [r0], #1 ");
365 asm("ldmfd sp!, {r4, pc} ");
367 asm("_umemput32_align_source: ");
370 asm("bls _umemput32_large_copy "); // don't bother if length <= 64
371 asm("rsb ip, r1, #32 ");
372 asm("movs ip, ip, lsl #27 ");
373 asm("beq _umemput32_large_copy ");
374 asm("msr cpsr_f, ip "); // put length bits 4, 3, 2 into N, Z, C
375 asm("sub r2, r2, ip, lsr #27 ");
376 asm("ldmmiia r1!, {r3, r4, ip, lr} ");
377 asm("strmit r3, [r0], #4 ");
378 asm("strmit r4, [r0], #4 ");
379 asm("ldmeqia r1!, {r3, r4} ");
380 asm("strmit ip, [r0], #4 ");
381 asm("ldrcs ip, [r1], #4 ");
382 asm("strmit lr, [r0], #4 ");
383 asm("streqt r3, [r0], #4 ");
384 asm("streqt r4, [r0], #4 ");
385 asm("strcst ip, [r0], #4 ");
387 asm("_umemput32_large_copy: "); // copy 32 byte blocks
389 asm("ldmia r1!, {r3, r4, ip, lr} ");
390 asm("strt r3, [r0], #4 ");
391 asm("strt r4, [r0], #4 ");
392 asm("strt ip, [r0], #4 ");
393 asm("strt lr, [r0], #4 ");
394 asm("ldmia r1!, {r3, r4, ip, lr} ");
395 asm("strt r3, [r0], #4 ");
396 asm("strt r4, [r0], #4 ");
397 asm("strt ip, [r0], #4 ");
398 asm("strt lr, [r0], #4 ");
399 asm("subs r2, r2, #32 ");
400 asm("bhs _umemput32_large_copy ");
401 asm("adds r2, r2, #32 ");
402 asm("bne _umemput32_small_copy ");
403 asm("ldmfd sp!, {r4, pc} ");
407 __NAKED__ void uumemcpy32(TAny* /*aUserDst*/, const TAny* /*aUserSrc*/, TInt /*aLength*/)
409 ASM_ASSERT_PAGING_SAFE
411 #ifdef __USER_MEMORY_GUARDS_ENABLED__
412 // Wrap the workings of this function in an internal call, so we can save/restore UMG state
413 asm("stmfd sp!, {r11, lr} ");
414 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11
415 asm("bl 0f"); // call to label below
416 USER_MEMORY_GUARD_RESTORE(r11,r12);
417 asm("ldmfd sp!, {r11, pc} ");
423 asm("subs r2, r2, #4 ");
424 asm("ldrplt r3, [r1], #4 ");
425 asm("strplt r3, [r0], #4 ");
431 __NAKED__ void uumemcpy(TAny* /*aUserDst*/, const TAny* /*aUserSrc*/, TInt /*aLength*/)
433 ASM_ASSERT_PAGING_SAFE
435 #ifdef __USER_MEMORY_GUARDS_ENABLED__
436 // Wrap the workings of this function in an internal call, so we can save/restore UMG state
437 asm("stmfd sp!, {r11, lr} ");
438 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11
439 asm("bl 0f"); // call to label below
440 USER_MEMORY_GUARD_RESTORE(r11,r12);
441 asm("ldmfd sp!, {r11, pc} ");
449 asm("subs r2, r2, #1 ");
450 asm("ldrplbt r3, [r1], #1 ");
451 asm("strplbt r3, [r0], #1 ");
455 asm("movs r3, r0, lsl #30 ");
457 asm("rsbs r3, r3, #0 "); // 01->c0000000 (MI,VC) 10->80000000 (MI,VS) 11->40000000 (PL,VC)
458 asm("sub r2, r2, r3, lsr #30 ");
459 asm("ldrmibt r3, [r1], #1 ");
460 asm("strmibt r3, [r0], #1 ");
461 asm("ldrmibt r3, [r1], #1 ");
462 asm("strmibt r3, [r0], #1 ");
463 asm("ldrvcbt r3, [r1], #1 ");
464 asm("strvcbt r3, [r0], #1 "); // r0 now word aligned
466 asm("movs r3, r1, lsl #31 ");
467 asm("bic r1, r1, #3 ");
468 asm("bcs 3f "); // branch if src mod 4 = 2 or 3
469 asm("bmi 4f "); // branch if src mod 4 = 1
471 asm("subs r2, r2, #4 ");
472 asm("ldrget r3, [r1], #4 ");
473 asm("strget r3, [r0], #4 ");
475 asm("uumemcpy_do_end: ");
477 asm("adds r2, r2, #2 "); // -1 if 1 left, 0 if 2 left, +1 if 3 left
478 asm("ldrplbt r3, [r1], #1 ");
479 asm("strplbt r3, [r0], #1 ");
480 asm("ldrplbt r3, [r1], #1 ");
481 asm("strplbt r3, [r0], #1 ");
482 asm("ldrnebt r3, [r1], #1 ");
483 asm("strnebt r3, [r0], #1 ");
485 asm("3: "); // get here if src mod 4 = 2 or 3
486 asm("bmi 5f "); // branch if 3
488 asm("subs r2, r2, #4 ");
489 asm("ldrget r3, [r1], #4 ");
490 asm("ldrget ip, [r1] ");
491 asm("movge r3, r3, lsr #16 ");
492 asm("orrge r3, r3, ip, lsl #16 ");
493 asm("strget r3, [r0], #4 ");
495 asm("add r1, r1, #2 ");
496 asm("b uumemcpy_do_end ");
498 asm("subs r2, r2, #4 ");
499 asm("ldrget r3, [r1], #4 ");
500 asm("ldrget ip, [r1] ");
501 asm("movge r3, r3, lsr #24 ");
502 asm("orrge r3, r3, ip, lsl #8 ");
503 asm("strget r3, [r0], #4 ");
505 asm("add r1, r1, #3 ");
506 asm("b uumemcpy_do_end ");
508 asm("subs r2, r2, #4 ");
509 asm("ldrget r3, [r1], #4 ");
510 asm("ldrget ip, [r1] ");
511 asm("movge r3, r3, lsr #8 ");
512 asm("orrge r3, r3, ip, lsl #24 ");
513 asm("strget r3, [r0], #4 ");
515 asm("add r1, r1, #1 ");
516 asm("b uumemcpy_do_end ");
520 EXPORT_C __NAKED__ void kumemput(TAny* /*aAddr*/, const TAny* /*aKernAddr*/, TInt /*aLength*/)
522 asm("mrs r3, spsr "); // r3=spsr_svc
523 asm("tst r3, #0x0f "); // test for user mode
524 asm("bne memcpy "); // if not, just do memcpy
525 // otherwise fall through to umemput
529 EXPORT_C __NAKED__ void umemput(TAny* /*aUserAddr*/, const TAny* /*aKernAddr*/, TInt /*aLength*/)
531 // Optimised for word-aligned transfers, as unaligned are very rare in practice
533 ASM_ASSERT_DATA_PAGING_SAFE
534 asm("umemput_no_paging_assert:");
536 #ifdef __USER_MEMORY_GUARDS_ENABLED__
537 // Wrap the workings of this function in an internal call, so we can save/restore UMG state
538 asm("stmfd sp!, {r11, lr} ");
539 asm("subs r12, r2, #1");
540 asm("ldrhsb r11, [r1]"); // test access to first byte of kernel memory
541 asm("ldrhsb r11, [r1,r12]"); // test access to last byte of kernel memory
542 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11
543 asm("bl 0f"); // call to label below
544 USER_MEMORY_GUARD_RESTORE(r11,r12);
545 asm("ldmfd sp!, {r11, pc} ");
552 asm("tsteq r1, #3 ");
553 asm("beq _umemput_word_aligned ");
557 asm("2: "); // Copy 0 - 7 bytes
558 asm("subs r2, r2, #1 ");
559 asm("ldrplb r3, [r1], #1 ");
560 asm("strplbt r3, [r0], #1 ");
564 asm("1: "); // Word-align dest
565 asm("movs r3, r0, lsl #30 ");
567 asm("rsbs r3, r3, #0 "); // 01->c0000000 (MI,VC) 10->80000000 (MI,VS) 11->40000000 (PL,VC)
568 asm("sub r2, r2, r3, lsr #30 ");
569 asm("ldrmib r3, [r1], #1 ");
570 asm("strmibt r3, [r0], #1 ");
571 asm("ldrmib r3, [r1], #1 ");
572 asm("strmibt r3, [r0], #1 ");
573 asm("ldrvcb r3, [r1], #1 ");
574 asm("strvcbt r3, [r0], #1 "); // r0 now word aligned
576 asm("movs r3, r1, lsl #31 ");
577 asm("bic r1, r1, #3 ");
578 asm("bcs 3f "); // branch if src mod 4 = 2 or 3
579 asm("bpl _umemput_word_aligned "); // branch if src mod 4 = 0
581 asm("4: "); // get here if src mod 4 = 1
582 asm("subs r2, r2, #4 ");
583 asm("ldrge r3, [r1], #4 ");
584 asm("ldrge ip, [r1] ");
585 asm("movge r3, r3, lsr #8 ");
586 asm("orrge r3, r3, ip, lsl #24 ");
587 asm("strget r3, [r0], #4 ");
589 asm("add r1, r1, #1 ");
590 asm("b _umemput_do_end ");
592 asm("3: "); // get here if src mod 4 = 2 or 3
593 asm("bmi 5f "); // branch if 3
595 asm("subs r2, r2, #4 ");
596 asm("ldrge r3, [r1], #4 ");
597 asm("ldrge ip, [r1] ");
598 asm("movge r3, r3, lsr #16 ");
599 asm("orrge r3, r3, ip, lsl #16 ");
600 asm("strget r3, [r0], #4 ");
602 asm("add r1, r1, #2 ");
603 asm("b _umemput_do_end ");
605 asm("5: "); // get here if src mod 4 = 3
606 asm("subs r2, r2, #4 ");
607 asm("ldrge r3, [r1], #4 ");
608 asm("ldrge ip, [r1] ");
609 asm("movge r3, r3, lsr #24 ");
610 asm("orrge r3, r3, ip, lsl #8 ");
611 asm("strget r3, [r0], #4 ");
613 asm("add r1, r1, #3 ");
615 asm("_umemput_do_end: "); // z set if done, else r2 == length remaining - 4
617 asm("adds r2, r2, #2 "); // r2 = -1 if 1 left, 0 if 2 left, +1 if 3 left
618 asm("ldrplb r3, [r1], #1 ");
619 asm("strplbt r3, [r0], #1 ");
620 asm("ldrplb r3, [r1], #1 ");
621 asm("strplbt r3, [r0], #1 ");
622 asm("ldrneb r3, [r1], #1 ");
623 asm("strnebt r3, [r0], #1 ");
627 #endif // USE_REPLACEMENT_UMEMPUT
630 EXPORT_C __NAKED__ void kumemset(TAny* /*aAddr*/, const TUint8 /*aValue*/, TInt /*aLength*/)
632 asm("mrs r3, spsr "); // r3=spsr_svc
633 asm("tst r3, #0x0f "); // test for user mode
634 asm("bne memset "); // if not, just do memset
635 // otherwise fall through to umemset
639 EXPORT_C __NAKED__ void umemset(TAny* /*aUserAddr*/, const TUint8 /*aValue*/, TInt /*aLength*/)
641 ASM_ASSERT_DATA_PAGING_SAFE
643 #ifdef __USER_MEMORY_GUARDS_ENABLED__
644 // Wrap the workings of this function in an internal call, so we can save/restore UMG state
645 asm("stmfd sp!, {r11, lr} ");
646 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11
647 asm("bl 0f"); // call to label below
648 USER_MEMORY_GUARD_RESTORE(r11,r12);
649 asm("ldmfd sp!, {r11, pc} ");
657 asm("subs r2, r2, #1 ");
658 asm("strplbt r1, [r0], #1 ");
662 asm("and r1, r1, #0xff ");
663 asm("orr r1, r1, r1, lsl #8 ");
664 asm("orr r1, r1, r1, lsl #16 ");
665 asm("movs r3, r0, lsl #30 ");
667 asm("rsbs r3, r3, #0 "); // 01->c0000000 (MI,VC) 10->80000000 (MI,VS) 11->40000000 (PL,VC)
668 asm("strmibt r1, [r0], #1 "); // if 01 or 10 do 2 byte stores
669 asm("strmibt r1, [r0], #1 ");
670 asm("strvcbt r1, [r0], #1 "); // if 01 or 11 do 1 byte store
671 asm("sub r2, r2, r3, lsr #30 ");
672 asm("3: "); // r0 now word aligned
673 asm("subs r2, r2, #4 ");
674 asm("strplt r1, [r0], #4 ");
676 __JUMP(eq,lr); // return if finished
677 asm("adds r2, r2, #2 "); // -1 if 1 left, 0 if 2 left, +1 if 3 left
678 asm("strplbt r1, [r0], #1 ");
679 asm("strplbt r1, [r0], #1 ");
680 asm("strnebt r1, [r0], #1 ");