Update contrib.
1 // Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies).
2 // All rights reserved.
3 // This component and the accompanying materials are made available
4 // under the terms of the License "Eclipse Public License v1.0"
5 // which accompanies this distribution, and is available
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
8 // Initial Contributors:
9 // Nokia Corporation - initial contribution.
14 // e32\include\cpudefs.h
16 // WARNING: This file contains some APIs which are internal and are subject
17 // to change without notice. Such APIs should therefore not be used
18 // outside the Kernel and Hardware Services package.
30 #define __ARM_ASSEMBLER_ISA__ 4 // "Instruction not supported on targeted CPU :("
32 #define __ARM_ASSEMBLER_ISA__ 4
35 // Should really have been __CPU_CORTEX_A8__ instead of __CPU_CORTEX_A8N__
36 #ifdef __CPU_CORTEX_A8N__
37 #undef __CPU_CORTEX_A8__
38 #define __CPU_CORTEX_A8__
47 #undef __CPU_SPECIFIED
48 #if defined(__CPU_ARM710T__)
49 #define __CPU_SPECIFIED
50 #elif defined(__CPU_ARM720T__)
51 #define __CPU_SPECIFIED
52 #elif defined(__CPU_SA1__)
53 #define __CPU_SPECIFIED
54 #elif defined(__CPU_ARM920T__)
55 #define __CPU_SPECIFIED
56 #elif defined(__CPU_ARM925T__)
57 #define __CPU_SPECIFIED
58 #elif defined(__CPU_XSCALE__)
59 #define __CPU_SPECIFIED
60 #elif defined(__CPU_ARM926J__)
61 #define __CPU_SPECIFIED
62 #elif defined(__CPU_ARM1136__)
63 #define __CPU_SPECIFIED
64 #elif defined(__CPU_ARM1176__)
65 #define __CPU_SPECIFIED
66 #elif defined(__CPU_ARM11MP__)
67 #define __CPU_SPECIFIED
68 #elif defined(__CPU_CORTEX_A8__)
69 #define __CPU_SPECIFIED
70 #elif defined(__CPU_CORTEX_A9__)
71 #define __CPU_SPECIFIED
72 #elif defined(__CPU_GENERIC_ARM4__)
73 #define __CPU_SPECIFIED
77 #if defined(__CPU_SPECIFIED)
78 #if !defined(__CPU_ARM11MP__) && !defined(__CPU_CORTEX_A9__)
79 #error Specified CPU does not support SMP
82 // If no CPU specified, assume lowest common denominator SMP
83 #define __CPU_ARM11MP__
87 #if defined(__CPU_ARM710T__)
90 #elif defined(__CPU_ARM720T__)
93 #elif defined(__CPU_SA1__)
96 #elif defined(__CPU_ARM920T__)
99 #elif defined(__CPU_ARM925T__)
102 #elif defined(__CPU_XSCALE__)
104 #define __ENHANCED_DSP_INSTRUCTIONS
106 #elif defined(__CPU_ARM926J__)
108 #define __ENHANCED_DSP_INSTRUCTIONS
109 #define __CPU_HAS_JAZELLE
111 #elif defined(__CPU_ARM1136__)
114 #elif defined(__CPU_ARM1176__)
117 #elif defined(__CPU_ARM11MP__)
119 #define __CPU_ARM_HAS_WFI
120 #define __CPU_ARM_HAS_WFE_SEV
122 #elif defined(__CPU_CORTEX_A8__)
125 #elif defined(__CPU_CORTEX_A9__)
128 #elif defined(__CPU_GENERIC_ARM4__)
132 // #error Unsupported CPU
133 #define __CPU_UNKNOWN
140 // Macros for emitting single bytes of machine code
142 # define BYTE(x) _asm byte x
144 # define BYTE(x) asm(".byte "#x);
146 # define BYTE(x) _asm _emit x
150 // thiscall is different on GCC
152 #define THISCALL_PROLOG0() asm("mov ecx,[esp+4]");
153 #define THISCALL_PROLOG1() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax");
154 #define THISCALL_PROLOG2() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax");
155 #define THISCALL_PROLOG3() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax \n mov eax,[esp+16] \n mov [esp+12],eax");
156 #define THISCALL_PROLOG0_BIGRETVAL() asm("mov ecx,[esp+8]");
157 #define THISCALL_PROLOG1_BIGRETVAL() asm("mov ecx,[esp+8] \n mov eax,[esp+12] \n mov [esp+8],eax");
158 #define THISCALL_EPILOG0() asm("ret");
159 #define THISCALL_EPILOG1() asm("ret");
160 #define THISCALL_EPILOG2() asm("ret");
161 #define THISCALL_EPILOG3() asm("ret");
162 #define THISCALL_EPILOG0_BIGRETVAL() asm("ret 4");
163 #define THISCALL_EPILOG1_BIGRETVAL() asm("ret 4");
165 #define THISCALL_PROLOG0()
166 #define THISCALL_PROLOG1()
167 #define THISCALL_PROLOG2()
168 #define THISCALL_PROLOG3()
169 #define THISCALL_PROLOG0_BIGRETVAL()
170 #define THISCALL_PROLOG1_BIGRETVAL()
171 #define THISCALL_EPILOG0() __asm ret
172 #define THISCALL_EPILOG1() __asm ret 4
173 #define THISCALL_EPILOG2() __asm ret 8
174 #define THISCALL_EPILOG3() __asm ret 12
175 #define THISCALL_EPILOG0_BIGRETVAL() __asm ret 4
176 #define THISCALL_EPILOG1_BIGRETVAL() __asm ret 8
180 // Workaround for MSVC++ 5.0 bug; MSVC incorrectly fixes up conditional jumps
181 // when the destination is a C++ function.
182 #if defined(__VC32__) && (_MSC_VER==1100) // untested on MSVC++ > 5.0
183 # define _ASM_j(cond,dest) _asm jn##cond short $+11 _asm jmp dest
184 # define _ASM_jn(cond,dest) _asm j##cond short $+11 _asm jmp dest
186 # if defined __GCC32__
187 # define _ASM_j(cond,dest) asm("j"#cond " %a0": : "i"(dest));
188 # define _ASM_jn(cond,dest) asm("jn"#cond " %a0": :"i"(dest));
190 # define _ASM_j(cond,dest) _asm j##cond dest
191 # define _ASM_jn(cond,dest) _asm jn##cond dest
197 //#define __MINIMUM_MACHINE_CODE__
199 #if defined(__WINS__)
200 #define __NAKED__ __declspec( naked )
201 #ifndef __MINIMUM_MACHINE_CODE__
202 //#define __MEM_MACHINE_CODED__
209 # define __NAKED__ // GCC does not support naked functions on X86
211 # define __NAKED__ __declspec( naked )
213 # ifndef __MINIMUM_MACHINE_CODE__
214 # define __MEM_MACHINE_CODED__
220 #if defined(__MARM__)
221 #ifndef __NAKED__ // should be defined in prefix file
223 #define __NAKED__ __declspec( naked )
230 #define __NAKED__ ____ONLY_USE_NAKED_IN_CIA____
234 #if defined(__MARM_ARMV5__) && !defined(__CPU_ARMV5T)
238 #ifndef __MINIMUM_MACHINE_CODE__
239 #if !defined(__BIG_ENDIAN__)
240 #define __MEM_MACHINE_CODED__
241 #define __DES_MACHINE_CODED__
242 #define __REGIONS_MACHINE_CODED__
243 #define __DES8_MACHINE_CODED__
244 #define __DES16_MACHINE_CODED__
245 #define __HEAP_MACHINE_CODED__
246 #define __REALS_MACHINE_CODED__
247 #define __COBJECT_MACHINE_CODED__
248 #define __CACTIVESCHEDULER_MACHINE_CODED__
249 #define __CSERVER_MACHINE_CODED__
250 #define __ARRAY_MACHINE_CODED__
251 #define __HUFFMAN_MACHINE_CODED__
252 #if defined(__MARM_ARM4__) || defined(__MARM_ARMI__) || defined(__MARM_THUMB__) || defined(__MARM_ARMV4__) || defined(__MARM_ARMV5__)
253 #define __DES16_MACHINE_CODED_HWORD__
260 #define __CPU_64BIT_MULTIPLY
264 #define __CPU_ARM_SUPPORTS_BX
265 #define __CPU_64BIT_MULTIPLY
269 #define __CPU_ARM_SUPPORTS_BX
270 #define __CPU_ARM_SUPPORTS_BLX
271 #define __CPU_64BIT_MULTIPLY
272 #define __CPU_ARM_LDR_PC_SETS_TBIT
273 #define __CPU_ARM_HAS_CLZ
274 #define __CPU_ARM_HAS_PLD
276 #ifdef __ENHANCED_DSP_INSTRUCTIONS
277 #define __CPU_ARM_HAS_MCRR
278 #define __CPU_ARM_HAS_LDRD_STRD
280 #if defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
282 #define __CPU_ARM_SUPPORTS_BX
283 #define __CPU_ARM_SUPPORTS_BLX
284 #define __CPU_64BIT_MULTIPLY
285 #define __CPU_ARM_LDR_PC_SETS_TBIT
286 #define __CPU_ARM_HAS_CLZ
287 #define __CPU_ARM_HAS_MCRR
288 #define __CPU_ARM_HAS_LDREX_STREX
289 #define __CPU_ARM_HAS_LDRD_STRD
290 #define __CPU_ARM_HAS_PLD
291 #define __CPU_ARM_HAS_CPS
292 #define __CPU_ARM_HAS_SPLIT_FSR
293 #if !defined(__CPU_ARM1136__) && !defined(__CPU_ARM11MP__)
294 #define __CPU_ARM_HAS_CP15_IFAR
296 #define __CPU_ARM_SUPPORTS_USER_MODE_BARRIERS
298 #if defined(__CPU_ARMV7) || (defined(__CPU_ARM1136__) && defined(__CPU_ARM1136_IS_R1__)) || defined(__CPU_ARM1176__) || defined(__CPU_ARM11MP__)
299 #define __CPU_ARM_HAS_LDREX_STREX_V6K
300 #define __CPU_HAS_CP15_THREAD_ID_REG
302 #if defined(__MARM_ARM4T__) || defined(__MARM_INTERWORK__)
303 #define __SUPPORT_THUMB_INTERWORKING
305 #if defined(__CPU_ARMV7)
306 #define __CPU_ARM_HAS_WFI
307 #define __CPU_ARM_HAS_WFE_SEV
309 #define __CPU_SUPPORT_THUMB2EE
313 // ARM CPU macros to allow Thumb/Non-thumb builds
316 #define EXC_TRAP_CTX_SZ 10 // Nonvolatile registers + sp + pc
318 #ifdef __SUPPORT_THUMB_INTERWORKING
319 #define __JUMP(cc,r) asm("bx"#cc " "#r )
320 #ifdef __CPU_ARM_LDR_PC_SETS_TBIT
321 #define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
322 #define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
324 #define __POPRET(rlist) asm("ldmfd sp!, {"rlist"lr} ");\
326 #define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"lr} ");\
330 #define __JUMP(cc,r) asm("mov"#cc " pc, "#r )
331 #define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
332 #define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
335 #ifdef __CPU_ARM_SUPPORTS_BLX
336 #if __ARM_ASSEMBLER_ISA__ >= 5
337 #define BLX(Rm) asm("blx r" #Rm)
339 #define BLX(Rm) asm(".word %a0" : : "i" ((TInt)( 0xe12fff30 | (Rm) )))
341 #define __JUMPL(Rm) BLX(Rm)
343 #ifdef __SUPPORT_THUMB_INTERWORKING
344 #define __JUMPL(Rm) asm("mov lr, pc "); \
347 #define __JUMPL(Rm) asm("mov lr, pc "); \
352 #ifdef __MARM_THUMB__
354 #define __SWITCH_TO_ARM asm("push {r0} ");\
355 asm("add r0, pc, #4 ");\
360 asm("ldr r0, [sp], #4 ")
361 #define __END_ARM asm(".code 16 ")
363 #define __SWITCH_TO_ARM asm(".code 32 ");
367 #define __SWITCH_TO_ARM
387 #ifdef __CPU_ARM_HAS_CLZ
388 #if __ARM_ASSEMBLER_ISA__ >= 5
389 #define CLZ(Rd,Rm) asm("clz r" #Rd ", r" #Rm)
391 #define CLZ(Rd,Rm) asm(".word %a0" : : "i" ((TInt)0xe16f0f10|((Rd)<<12)|(Rm)));
393 #define CLZcc(cc,Rd,Rm) asm(".word %a0" : : "i" ((TInt)0x016f0f10|((cc)<<28)|((Rd)<<12)|(Rm)));
395 #ifdef __CPU_ARM_HAS_MCRR
396 #define MCRR(cop,opc,Rd,Rn,CRm) asm(".word %a0" : : "i" ((TInt)0xec400000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
397 #define MCRRcc(cc,cop,opc,Rd,Rn,CRm) asm(".word %a0" : : "i" ((TInt)0x0c400000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
398 #define MRRC(cop,opc,Rd,Rn,CRm) asm(".word %a0" : : "i" ((TInt)0xec500000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
399 #define MRRCcc(cc,cop,opc,Rd,Rn,CRm) asm(".word %a0" : : "i" ((TInt)0x0c500000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
401 #ifdef __CPU_ARM_HAS_LDREX_STREX
402 // LDREX Rd, [Rn] - load from [Rn] into Rd exclusive
403 // STREX Rd, Rm, [Rn] - store Rm into [Rn] with exclusive access; success/fail indicator into Rd
404 #define LDREXcc(cc,Rd,Rn) asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((cc)<<28)|((Rd)<<12)|((Rn)<<16))));
405 #define STREXcc(cc,Rd,Rm,Rn) asm(".word %a0" : : "i" ((TInt)(0x01800f90|((cc)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
406 #if __ARM_ASSEMBLER_ISA__ >= 6
407 #define LDREX(Rd,Rn) asm("ldrex r" #Rd ", [r" #Rn "] ")
408 #define STREX(Rd,Rm,Rn) asm("strex r" #Rd ", r" #Rm ", [r" #Rn "] ")
410 #define LDREX(Rd,Rn) asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
411 #define STREX(Rd,Rm,Rn) asm(".word %a0" : : "i" ((TInt)(0x01800f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
414 #ifdef __CPU_ARM_HAS_LDREX_STREX_V6K
415 // Byte, halfword, doubleword STREX/LDREX & unconditional CLREX
416 #if __ARM_ASSEMBLER_ISA__ >= 6
417 #define LDREXB(Rd,Rn) asm("ldrexb r" #Rd ", [r" #Rn "] ")
418 #define STREXB(Rd,Rm,Rn) asm("strexb r" #Rd ", r" #Rm ", [r" #Rn "] ")
419 #define LDREXH(Rd,Rn) asm("ldrexh r" #Rd ", [r" #Rn "] ")
420 #define STREXH(Rd,Rm,Rn) asm("strexh r" #Rd ", r" #Rm ", [r" #Rn "] ")
421 #define LDREXD(Rd,Rn) asm("ldrexd r" #Rd ", [r" #Rn "] ")
422 #define STREXD(Rd,Rm,Rn) asm("strexd r" #Rd ", r" #Rm ", [r" #Rn "] ")
424 #define LDREXB(Rd,Rn) asm(".word %a0" : : "i" ((TInt)(0x01D00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
425 #define STREXB(Rd,Rm,Rn) asm(".word %a0" : : "i" ((TInt)(0x01C00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
426 #define LDREXH(Rd,Rn) asm(".word %a0" : : "i" ((TInt)(0x01f00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
427 #define STREXH(Rd,Rm,Rn) asm(".word %a0" : : "i" ((TInt)(0x01e00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
428 #define LDREXD(Rd,Rn) asm(".word %a0" : : "i" ((TInt)(0x01b00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
429 #define STREXD(Rd,Rm,Rn) asm(".word %a0" : : "i" ((TInt)(0x01a00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
431 #if !defined(__CPU_ARM1136__) || defined(__CPU_ARM1136_ERRATUM_406973_FIXED)
432 #define __CPU_ARM_HAS_WORKING_CLREX
433 #if __ARM_ASSEMBLER_ISA__ >= 6
434 #define CLREX asm("clrex ")
436 #define CLREX asm(".word %a0" : : "i" ((TInt)(0xf57ff01f)));
440 #ifdef __CPU_ARM_HAS_LDRD_STRD
441 #if __ARM_ASSEMBLER_ISA__ >= 5
442 #define LDRD(Rd,Rn) asm("ldrd r" #Rd ", [r" #Rn "] ")
443 #define STRD(Rd,Rn) asm("strd r" #Rd ", [r" #Rn "] ")
445 #define LDRD(Rd,Rn) asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) )))
446 #define STRD(Rd,Rn) asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) )))
448 #define LDRD_ioff(Rd,Rn,off) asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
449 #define STRD_ioff(Rd,Rn,off) asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
451 #if defined(__CPU_ARM_HAS_PLD) && !defined(__CPU_ARM926J__) && !defined(__CPU_UNKNOWN) // PLD is a no-op on ARM926
452 #if __ARM_ASSEMBLER_ISA__ >= 5
453 #define PLD(Rn) asm("pld [r" #Rn "] ")
455 #define PLD(Rn) asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) )))
457 #define PLD_ioff(Rn, off) asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) | (off) ))) // preload with immediate offset
458 #define PLD_noff(Rn, off) asm(".word %a0" : : "i" ((TInt)( 0xf550f000 | ((Rn)<<16) | (off) ))) // preload with negative offset
461 #define PLD_ioff(Rn, off)
462 #define PLD_noff(Rn, off)
464 #ifdef __CPU_HAS_CP15_THREAD_ID_REG
465 #define GET_RWRW_TID(cc,r) asm("mrc"#cc" p15, 0, "#r", c13, c0, 2 ");
466 #define GET_RWRO_TID(cc,r) asm("mrc"#cc" p15, 0, "#r", c13, c0, 3 ");
467 #define GET_RWNO_TID(cc,r) asm("mrc"#cc" p15, 0, "#r", c13, c0, 4 ");
468 #define SET_RWRW_TID(cc,r) asm("mcr"#cc" p15, 0, "#r", c13, c0, 2 ");
469 #define SET_RWRO_TID(cc,r) asm("mcr"#cc" p15, 0, "#r", c13, c0, 3 ");
470 #define SET_RWNO_TID(cc,r) asm("mcr"#cc" p15, 0, "#r", c13, c0, 4 ");
473 #ifdef __CPU_SUPPORT_THUMB2EE
474 #define GET_THUMB2EE_HNDLR_BASE(cc,r) asm("mrc"#cc" p14, 6, "#r", c1, c0, 0 ")
475 #define SET_THUMB2EE_HNDLR_BASE(cc,r) asm("mcr"#cc" p14, 6, "#r", c1, c0, 0 ")
478 #if defined(__CPU_ARMV7)
479 #define ARM_DMB_gen(opt) asm(".word %a0" : : "i" ((TInt)(0xf57ff050 | (opt) )) )
480 #define ARM_DSB_gen(opt) asm(".word %a0" : : "i" ((TInt)(0xf57ff040 | (opt) )) )
481 #define ARM_ISB_gen(opt) asm(".word %a0" : : "i" ((TInt)(0xf57ff060 | (opt) )) )
483 #define ARM_DMBSY ARM_DMB_gen(0xf) // full system DMB
484 #define ARM_DSBSY ARM_DSB_gen(0xf) // full system DSB
485 #define ARM_DMBST ARM_DMB_gen(0xe) // full system DMB, orders writes only
486 #define ARM_DSBST ARM_DSB_gen(0xe) // full system DSB, orders writes only
487 #define ARM_DMBSH ARM_DMB_gen(0xb) // DMB encompassing inner-shareable domain
488 #define ARM_DSBSH ARM_DSB_gen(0xb) // DMB encompassing inner-shareable domain
489 #define ARM_DMBSHST ARM_DMB_gen(0xa) // DMB encompassing inner-shareable domain, orders writes only
490 #define ARM_DSBSHST ARM_DSB_gen(0xa) // DMB encompassing inner-shareable domain, orders writes only
492 #define ARM_ISBSY ARM_ISB_gen(0xf) // full system ISB
494 #define ARM_NOP asm(".word 0xe320f000 ")
495 #define ARM_YIELD asm(".word 0xe320f001 ")
497 #define __DATA_MEMORY_BARRIER__(reg) ARM_DMBSH
498 #define __DATA_MEMORY_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_DMBSH
499 #define __DATA_SYNC_BARRIER__(reg) ARM_DSBSH
500 #define __DATA_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_DSBSH
501 #define __INST_SYNC_BARRIER__(reg) ARM_ISBSY
502 #define __INST_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_ISBSY
504 #elif defined(__CPU_ARM11MP__)
506 #define ARM_DMB(reg) asm("mcr p15, 0, "#reg", c7, c10, 5 ")
507 #define ARM_DSB(reg) asm("mcr p15, 0, "#reg", c7, c10, 4 ")
508 #define ARM_ISB(reg) asm("mcr p15, 0, "#reg", c7, c5, 4 ")
510 #define ARM_NOP asm(".word 0xe320f000 ")
511 #define ARM_YIELD asm(".word 0xe320f001 ")
513 #define __DATA_MEMORY_BARRIER__(reg) ARM_DMB(reg)
514 #define __DATA_MEMORY_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_DMB(reg)
515 #define __DATA_SYNC_BARRIER__(reg) ARM_DSB(reg)
516 #define __DATA_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_DSB(reg)
517 #define __INST_SYNC_BARRIER__(reg) ARM_ISB(reg)
518 #define __INST_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_ISB(reg)
520 #elif defined(__CPU_ARMV6__)
522 #define ARM_DMB(reg) asm("mcr p15, 0, "#reg", c7, c10, 5 ")
523 #define ARM_DSB(reg) asm("mcr p15, 0, "#reg", c7, c10, 4 ")
524 #define ARM_ISB(reg) asm("mcr p15, 0, "#reg", c7, c5, 4 ")
526 #define __DATA_MEMORY_BARRIER__(reg) ARM_DMB(reg)
527 #define __DATA_MEMORY_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_DMB(reg)
528 #define __DATA_SYNC_BARRIER__(reg) ARM_DSB(reg)
529 #define __DATA_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_DSB(reg)
530 #define __INST_SYNC_BARRIER__(reg) ARM_ISB(reg)
531 #define __INST_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0"); ARM_ISB(reg)
535 #define __DATA_MEMORY_BARRIER__(reg)
536 #define __DATA_MEMORY_BARRIER_Z__(reg) asm("mov "#reg", #0")
537 #define __DATA_SYNC_BARRIER__(reg) asm("mcr p15, 0, "#reg", c7, c10, 4 ")
538 #define __DATA_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0"); asm("mcr p15, 0, "#reg", c7, c10, 4 ")
539 #define __INST_SYNC_BARRIER__(reg)
540 #define __INST_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0")
545 #define __SMP_DATA_MEMORY_BARRIER__(reg) __DATA_MEMORY_BARRIER__(reg)
546 #define __SMP_DATA_MEMORY_BARRIER_Z__(reg) __DATA_MEMORY_BARRIER_Z__(reg)
547 #define __SMP_DATA_SYNC_BARRIER__(reg) __DATA_SYNC_BARRIER__(reg)
548 #define __SMP_DATA_SYNC_BARRIER_Z__(reg) __DATA_SYNC_BARRIER_Z__(reg)
549 #define __SMP_INST_SYNC_BARRIER__(reg) __INST_SYNC_BARRIER__(reg)
550 #define __SMP_INST_SYNC_BARRIER_Z__(reg) __INST_SYNC_BARRIER_Z__(reg)
552 #define __SMP_DATA_MEMORY_BARRIER__(reg)
553 #define __SMP_DATA_MEMORY_BARRIER_Z__(reg) asm("mov "#reg", #0")
554 #define __SMP_DATA_SYNC_BARRIER__(reg)
555 #define __SMP_DATA_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0")
556 #define __SMP_INST_SYNC_BARRIER__(reg)
557 #define __SMP_INST_SYNC_BARRIER_Z__(reg) asm("mov "#reg", #0")
560 #ifdef __CPU_ARM_HAS_WFI
561 #define ARM_WFIcc(cc) __DATA_SYNC_BARRIER__(r0); \
562 asm(".word %a0" : : "i" ((TInt)(0x0320f003 | ((cc)<<28) )) )
563 #define ARM_WFI ARM_WFIcc(CC_AL)
566 #ifdef __CPU_ARM_HAS_WFE_SEV
567 #define ARM_WFEcc(cc) __DATA_SYNC_BARRIER__(r0); \
568 asm(".word %a0" : : "i" ((TInt)(0x0320f002 | ((cc)<<28) )) )
569 #if __ARM_ASSEMBLER_ISA__ >= 6
570 #define ARM_WFE __DATA_SYNC_BARRIER__(r0); \
573 #define ARM_WFE ARM_WFEcc(CC_AL)
575 #define ARM_SEVcc(cc) asm(".word %a0" : : "i" ((TInt)(0x0320f004 | ((cc)<<28) )) )
576 #if __ARM_ASSEMBLER_ISA__ >= 6
577 #define ARM_SEV asm("sev ")
579 #define ARM_SEV ARM_SEVcc(CC_AL)
584 #define ARM_NOP asm("nop ")
585 #define ARM_YIELD asm("nop ")
588 // Support for throwing exceptions through ARM embedded assembler
589 // Should only be needed user side
590 #ifndef __EH_FRAME_ADDRESS
591 #define __EH_FRAME_ADDRESS(reg,offset)
592 #define __EH_FRAME_PUSH2(reg1,reg2)
593 #define __EH_FRAME_SAVE1(reg,offset)
596 // StrongARM msr bug workaround:
597 // (conditional msr might cause,that the next instruction is executed twice by these processors)
599 #define __MSR_CPSR_C(cc,r) \
600 asm("msr"#cc" cpsr_c," #r); \
602 #else // !__CPU_SA1__
603 #define __MSR_CPSR_C(cc,r) asm("msr"#cc" cpsr_c,"#r);
606 // Causes undefined instruction exception on both ARM and THUMB
607 #define __ASM_CRASH() asm(".word 0xe7ffdeff ")
608 #if defined(__GNUC__)
609 #define __crash() asm(".word 0xe7ffdeff " : : : "memory")
610 #elif defined(__ARMCC__)
611 // RVCT doesn't let us inline an undefined instruction
612 // use a CDP to CP15 instead - doesn't work on THUMB but never mind
613 #if __ARMCC_VERSION < 310000
614 #define __crash() asm("cdp p15, 0, c0, c0, c0, 0 ")
616 // Inline assembler is deprecated in RVCT 3.1 so we use an intrinsic.
617 #define __crash() __cdp(15, 0x00, 0x000)
621 // Macro used to get the caller of the function containing a CHECK_PRECONDITIONS()
622 #if defined(__ARMCC_VERSION) && __ARMCC_VERSION >= 200000
623 #define PRECOND_FUNCTION_CALLER __return_address()
626 #if !defined(__CPU_ARM_HAS_LDREX_STREX_V6K)
627 #if defined(__CPU_ARM_HAS_LDREX_STREX)
628 #define __ATOMIC64_USE_SLOW_EXEC__
630 #define __ATOMIC64_USE_FAST_EXEC__
631 #define __ATOMIC_USE_FAST_EXEC__
638 #define EXC_TRAP_CTX_SZ 10 // ebx, esp, ebp, esi, edi, ds, es, fs, gs, eip
641 #if defined(__VC32__) || defined(__CW32__)
642 #define __crash() do { _asm int 0ffh } while(0)
644 #define __crash() asm("int 0xff " : : : "memory")
648 // Not available in the version of MSVC normally used
649 // #define PRECOND_FUNCTION_CALLER ((TLinAddr)_ReturnAddress())
655 #define PRECOND_FUNCTION_CALLER ((TLinAddr)__builtin_return_address(0))