os/kernelhwsrv/kernel/eka/include/cpudefs.h
changeset 0 bde4ae8d615e
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/os/kernelhwsrv/kernel/eka/include/cpudefs.h	Fri Jun 15 03:10:57 2012 +0200
     1.3 @@ -0,0 +1,658 @@
     1.4 +// Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies).
     1.5 +// All rights reserved.
     1.6 +// This component and the accompanying materials are made available
     1.7 +// under the terms of the License "Eclipse Public License v1.0"
     1.8 +// which accompanies this distribution, and is available
     1.9 +// at the URL "http://www.eclipse.org/legal/epl-v10.html".
    1.10 +//
    1.11 +// Initial Contributors:
    1.12 +// Nokia Corporation - initial contribution.
    1.13 +//
    1.14 +// Contributors:
    1.15 +//
    1.16 +// Description:
    1.17 +// e32\include\cpudefs.h
    1.18 +// 
    1.19 +// WARNING: This file contains some APIs which are internal and are subject
    1.20 +//          to change without notice. Such APIs should therefore not be used
    1.21 +//          outside the Kernel and Hardware Services package.
    1.22 +//
    1.23 +
    1.24 +/**
    1.25 + @file
    1.26 + @internalTechnology
    1.27 +*/
    1.28 +
    1.29 +#ifndef __CPUDEFS_H__
    1.30 +#define __CPUDEFS_H__
    1.31 +
    1.32 +#ifdef __ARMCC__
    1.33 +#define	__ARM_ASSEMBLER_ISA__	4	// "Instruction not supported on targeted CPU :("
    1.34 +#else
    1.35 +#define	__ARM_ASSEMBLER_ISA__	4
    1.36 +#endif
    1.37 +
    1.38 +// Should really have been __CPU_CORTEX_A8__ instead of __CPU_CORTEX_A8N__
    1.39 +#ifdef __CPU_CORTEX_A8N__
    1.40 +#undef __CPU_CORTEX_A8__
    1.41 +#define __CPU_CORTEX_A8__
    1.42 +#endif
    1.43 +
    1.44 +//
    1.45 +// Supported CPUs
    1.46 +//
    1.47 +
    1.48 +#ifdef __MARM__
    1.49 +
    1.50 +#undef __CPU_SPECIFIED
    1.51 +#if defined(__CPU_ARM710T__)
    1.52 +	#define __CPU_SPECIFIED
    1.53 +#elif defined(__CPU_ARM720T__)
    1.54 +	#define __CPU_SPECIFIED
    1.55 +#elif defined(__CPU_SA1__)
    1.56 +	#define __CPU_SPECIFIED
    1.57 +#elif defined(__CPU_ARM920T__)
    1.58 +	#define __CPU_SPECIFIED
    1.59 +#elif defined(__CPU_ARM925T__)
    1.60 +	#define __CPU_SPECIFIED
    1.61 +#elif defined(__CPU_XSCALE__)
    1.62 +	#define __CPU_SPECIFIED
    1.63 +#elif defined(__CPU_ARM926J__)
    1.64 +	#define __CPU_SPECIFIED
    1.65 +#elif defined(__CPU_ARM1136__)
    1.66 +	#define __CPU_SPECIFIED
    1.67 +#elif defined(__CPU_ARM1176__)
    1.68 +	#define __CPU_SPECIFIED
    1.69 +#elif defined(__CPU_ARM11MP__)
    1.70 +	#define __CPU_SPECIFIED
    1.71 +#elif defined(__CPU_CORTEX_A8__)
    1.72 +	#define __CPU_SPECIFIED
    1.73 +#elif defined(__CPU_CORTEX_A9__)
    1.74 +	#define __CPU_SPECIFIED
    1.75 +#elif defined(__CPU_GENERIC_ARM4__)
    1.76 +	#define __CPU_SPECIFIED
    1.77 +#endif
    1.78 +
    1.79 +#if defined(__SMP__)
    1.80 +	#if defined(__CPU_SPECIFIED)
    1.81 +		#if !defined(__CPU_ARM11MP__) && !defined(__CPU_CORTEX_A9__)
    1.82 +			#error Specified CPU does not support SMP
    1.83 +		#endif
    1.84 +	#else
    1.85 +	// If no CPU specified, assume lowest common denominator SMP
    1.86 +	#define	__CPU_ARM11MP__
    1.87 +	#endif
    1.88 +#endif
    1.89 +
    1.90 +#if defined(__CPU_ARM710T__)
    1.91 +	#define __CPU_ARMV4T
    1.92 +
    1.93 +#elif defined(__CPU_ARM720T__)
    1.94 +	#define __CPU_ARMV4T
    1.95 +
    1.96 +#elif defined(__CPU_SA1__)
    1.97 +	#define __CPU_ARMV4
    1.98 +
    1.99 +#elif defined(__CPU_ARM920T__)
   1.100 +	#define __CPU_ARMV4T
   1.101 +
   1.102 +#elif defined(__CPU_ARM925T__)
   1.103 +	#define __CPU_ARMV4T
   1.104 +
   1.105 +#elif defined(__CPU_XSCALE__)
   1.106 +	#define __CPU_ARMV5T
   1.107 +	#define __ENHANCED_DSP_INSTRUCTIONS
   1.108 +
   1.109 +#elif defined(__CPU_ARM926J__)
   1.110 +	#define __CPU_ARMV5T
   1.111 +	#define __ENHANCED_DSP_INSTRUCTIONS
   1.112 +	#define __CPU_HAS_JAZELLE
   1.113 +
   1.114 +#elif defined(__CPU_ARM1136__)
   1.115 +	#define __CPU_ARMV6
   1.116 +
   1.117 +#elif defined(__CPU_ARM1176__)
   1.118 +	#define __CPU_ARMV6
   1.119 +
   1.120 +#elif defined(__CPU_ARM11MP__)
   1.121 +	#define __CPU_ARMV6
   1.122 +	#define	__CPU_ARM_HAS_WFI
   1.123 +	#define	__CPU_ARM_HAS_WFE_SEV
   1.124 +
   1.125 +#elif defined(__CPU_CORTEX_A8__)
   1.126 +	#define __CPU_ARMV7
   1.127 +
   1.128 +#elif defined(__CPU_CORTEX_A9__)
   1.129 +	#define __CPU_ARMV7
   1.130 +
   1.131 +#elif defined(__CPU_GENERIC_ARM4__)
   1.132 +	#define __CPU_ARMV4
   1.133 +
   1.134 +#else
   1.135 +	// #error Unsupported CPU
   1.136 +	#define __CPU_UNKNOWN
   1.137 +#endif
   1.138 +
   1.139 +#endif  // __MARM__
   1.140 +
   1.141 +
   1.142 +
   1.143 +// Macros for emitting single bytes of machine code
   1.144 +#ifdef __CW32__
   1.145 +# define BYTE(x)	_asm byte x
   1.146 +#elif __GCC32__
   1.147 +# define BYTE(x)	asm(".byte "#x);
   1.148 +#else
   1.149 +# define BYTE(x)	_asm _emit x
   1.150 +#endif
   1.151 +
   1.152 +
   1.153 +// thiscall is different on GCC
   1.154 +#ifdef __GCC32__
   1.155 +#define THISCALL_PROLOG0() asm("mov ecx,[esp+4]");
   1.156 +#define THISCALL_PROLOG1() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax");
   1.157 +#define THISCALL_PROLOG2() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax");
   1.158 +#define THISCALL_PROLOG3() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax \n mov eax,[esp+16] \n mov [esp+12],eax");
   1.159 +#define THISCALL_PROLOG0_BIGRETVAL() asm("mov ecx,[esp+8]");
   1.160 +#define THISCALL_PROLOG1_BIGRETVAL() asm("mov ecx,[esp+8] \n mov eax,[esp+12] \n mov [esp+8],eax");
   1.161 +#define THISCALL_EPILOG0() asm("ret");
   1.162 +#define THISCALL_EPILOG1() asm("ret");
   1.163 +#define THISCALL_EPILOG2() asm("ret");
   1.164 +#define THISCALL_EPILOG3() asm("ret");
   1.165 +#define THISCALL_EPILOG0_BIGRETVAL() asm("ret 4");
   1.166 +#define THISCALL_EPILOG1_BIGRETVAL() asm("ret 4");
   1.167 +#else
   1.168 +#define THISCALL_PROLOG0()
   1.169 +#define THISCALL_PROLOG1()
   1.170 +#define THISCALL_PROLOG2()
   1.171 +#define THISCALL_PROLOG3()
   1.172 +#define THISCALL_PROLOG0_BIGRETVAL() 
   1.173 +#define THISCALL_PROLOG1_BIGRETVAL() 
   1.174 +#define THISCALL_EPILOG0() __asm ret
   1.175 +#define THISCALL_EPILOG1() __asm ret 4
   1.176 +#define THISCALL_EPILOG2() __asm ret 8
   1.177 +#define THISCALL_EPILOG3() __asm ret 12
   1.178 +#define THISCALL_EPILOG0_BIGRETVAL() __asm ret 4
   1.179 +#define THISCALL_EPILOG1_BIGRETVAL() __asm ret 8
   1.180 +#endif
   1.181 +
   1.182 +
   1.183 +// Workaround for MSVC++ 5.0 bug; MSVC incorrectly fixes up conditional jumps
   1.184 +// when the destination is a C++ function.
   1.185 +#if defined(__VC32__) && (_MSC_VER==1100)	// untested on MSVC++ > 5.0
   1.186 +# define _ASM_j(cond,dest) _asm jn##cond short $+11 _asm jmp dest
   1.187 +# define _ASM_jn(cond,dest) _asm j##cond short $+11 _asm jmp dest
   1.188 +#else
   1.189 +# if defined __GCC32__
   1.190 +#  define _ASM_j(cond,dest) asm("j"#cond " %a0": : "i"(dest));
   1.191 +#  define _ASM_jn(cond,dest) asm("jn"#cond " %a0": :"i"(dest));
   1.192 +# else
   1.193 +#  define _ASM_j(cond,dest) _asm j##cond dest
   1.194 +#  define _ASM_jn(cond,dest) _asm jn##cond dest
   1.195 +# endif
   1.196 +#endif
   1.197 +
   1.198 +
   1.199 +
   1.200 +//#define __MINIMUM_MACHINE_CODE__
   1.201 +
   1.202 +#if defined(__WINS__)
   1.203 +#define __NAKED__ __declspec( naked )
   1.204 +#ifndef __MINIMUM_MACHINE_CODE__
   1.205 +//#define __MEM_MACHINE_CODED__
   1.206 +#endif
   1.207 +#define __CPU_X86
   1.208 +#endif
   1.209 +
   1.210 +#if defined(__X86__)
   1.211 +# ifdef __GCC32__
   1.212 +#  define __NAKED__	// GCC does not support naked functions on X86
   1.213 +# else
   1.214 +#  define __NAKED__ __declspec( naked )
   1.215 +# endif
   1.216 +# ifndef __MINIMUM_MACHINE_CODE__
   1.217 +#  define __MEM_MACHINE_CODED__
   1.218 +# endif
   1.219 +# define __CPU_X86
   1.220 +#endif
   1.221 +
   1.222 +
   1.223 +#if defined(__MARM__)
   1.224 +#ifndef __NAKED__ // should be defined in prefix file
   1.225 +	#ifndef __GCCXML__
   1.226 +        #define __NAKED__ __declspec( naked )
   1.227 +    #else
   1.228 +        #define __NAKED__
   1.229 +    #endif
   1.230 +#endif
   1.231 +#ifndef __CIA__
   1.232 +#undef __NAKED__
   1.233 +#define __NAKED__ ____ONLY_USE_NAKED_IN_CIA____
   1.234 +#endif
   1.235 +	#define __CPU_ARM
   1.236 +
   1.237 +#if defined(__MARM_ARMV5__) && !defined(__CPU_ARMV5T)
   1.238 +#define __CPU_ARMV5T
   1.239 +#endif
   1.240 +
   1.241 +#ifndef __MINIMUM_MACHINE_CODE__
   1.242 +#if !defined(__BIG_ENDIAN__)
   1.243 +	#define __MEM_MACHINE_CODED__
   1.244 +	#define __DES_MACHINE_CODED__
   1.245 +	#define __REGIONS_MACHINE_CODED__
   1.246 +	#define __DES8_MACHINE_CODED__
   1.247 +	#define __DES16_MACHINE_CODED__
   1.248 +	#define __HEAP_MACHINE_CODED__
   1.249 +	#define __REALS_MACHINE_CODED__
   1.250 +	#define __COBJECT_MACHINE_CODED__
   1.251 +	#define __CACTIVESCHEDULER_MACHINE_CODED__
   1.252 +	#define __CSERVER_MACHINE_CODED__
   1.253 +	#define __ARRAY_MACHINE_CODED__
   1.254 +	#define __HUFFMAN_MACHINE_CODED__
   1.255 +#if defined(__MARM_ARM4__) || defined(__MARM_ARMI__) || defined(__MARM_THUMB__) || defined(__MARM_ARMV4__) || defined(__MARM_ARMV5__)
   1.256 +	#define __DES16_MACHINE_CODED_HWORD__
   1.257 +#endif
   1.258 +#endif
   1.259 +#endif
   1.260 +#endif
   1.261 +
   1.262 +#ifdef __CPU_ARMV4
   1.263 +	#define __CPU_64BIT_MULTIPLY
   1.264 +#endif
   1.265 +#ifdef __CPU_ARMV4T
   1.266 +	#define __CPU_THUMB
   1.267 +	#define __CPU_ARM_SUPPORTS_BX
   1.268 +	#define __CPU_64BIT_MULTIPLY
   1.269 +#endif
   1.270 +#ifdef __CPU_ARMV5T
   1.271 +	#define __CPU_THUMB
   1.272 +	#define __CPU_ARM_SUPPORTS_BX
   1.273 +	#define __CPU_ARM_SUPPORTS_BLX
   1.274 +	#define __CPU_64BIT_MULTIPLY
   1.275 +	#define __CPU_ARM_LDR_PC_SETS_TBIT
   1.276 +	#define __CPU_ARM_HAS_CLZ
   1.277 +	#define __CPU_ARM_HAS_PLD
   1.278 +#endif
   1.279 +#ifdef __ENHANCED_DSP_INSTRUCTIONS
   1.280 +	#define __CPU_ARM_HAS_MCRR
   1.281 +	#define __CPU_ARM_HAS_LDRD_STRD
   1.282 +#endif
   1.283 +#if defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
   1.284 +	#define __CPU_THUMB
   1.285 +	#define __CPU_ARM_SUPPORTS_BX
   1.286 +	#define __CPU_ARM_SUPPORTS_BLX
   1.287 +	#define __CPU_64BIT_MULTIPLY
   1.288 +	#define __CPU_ARM_LDR_PC_SETS_TBIT
   1.289 +	#define __CPU_ARM_HAS_CLZ
   1.290 +	#define __CPU_ARM_HAS_MCRR
   1.291 +	#define __CPU_ARM_HAS_LDREX_STREX
   1.292 +	#define __CPU_ARM_HAS_LDRD_STRD
   1.293 +	#define __CPU_ARM_HAS_PLD
   1.294 +	#define __CPU_ARM_HAS_CPS
   1.295 +	#define __CPU_ARM_HAS_SPLIT_FSR
   1.296 +#if !defined(__CPU_ARM1136__) && !defined(__CPU_ARM11MP__)
   1.297 +	#define __CPU_ARM_HAS_CP15_IFAR
   1.298 +#endif
   1.299 +	#define	__CPU_ARM_SUPPORTS_USER_MODE_BARRIERS
   1.300 +#endif
   1.301 +#if defined(__CPU_ARMV7) || (defined(__CPU_ARM1136__) && defined(__CPU_ARM1136_IS_R1__)) || defined(__CPU_ARM1176__) || defined(__CPU_ARM11MP__)
   1.302 +	#define __CPU_ARM_HAS_LDREX_STREX_V6K
   1.303 +	#define __CPU_HAS_CP15_THREAD_ID_REG
   1.304 +#endif
   1.305 +#if defined(__MARM_ARM4T__) || defined(__MARM_INTERWORK__)
   1.306 +	#define __SUPPORT_THUMB_INTERWORKING
   1.307 +#endif
   1.308 +#if defined(__CPU_ARMV7)
   1.309 +#define	__CPU_ARM_HAS_WFI
   1.310 +#define	__CPU_ARM_HAS_WFE_SEV
   1.311 +#define __CPU_THUMB2
   1.312 +#define __CPU_SUPPORT_THUMB2EE
   1.313 +#endif
   1.314 +
   1.315 +
   1.316 +// ARM CPU macros to allow Thumb/Non-thumb builds
   1.317 +#ifdef __CPU_ARM
   1.318 +
   1.319 +#define	EXC_TRAP_CTX_SZ		10		// Nonvolatile registers + sp + pc
   1.320 +
   1.321 +#ifdef __SUPPORT_THUMB_INTERWORKING
   1.322 +#define __JUMP(cc,r) asm("bx"#cc " "#r )
   1.323 +#ifdef __CPU_ARM_LDR_PC_SETS_TBIT
   1.324 +#define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
   1.325 +#define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
   1.326 +#else
   1.327 +#define __POPRET(rlist) asm("ldmfd sp!, {"rlist"lr} ");\
   1.328 +						asm("bx lr ")
   1.329 +#define __CPOPRET(cc,rlist)	asm("ldm"#cc "fd sp!, {"rlist"lr} ");\
   1.330 +							asm("bx"#cc " lr ")
   1.331 +#endif
   1.332 +#else
   1.333 +#define __JUMP(cc,r) asm("mov"#cc " pc, "#r )
   1.334 +#define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
   1.335 +#define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
   1.336 +#endif
   1.337 +
   1.338 +#ifdef __CPU_ARM_SUPPORTS_BLX
   1.339 +#if __ARM_ASSEMBLER_ISA__ >= 5
   1.340 +#define BLX(Rm)							asm("blx r" #Rm)
   1.341 +#else
   1.342 +#define BLX(Rm)							asm(".word %a0" : : "i" ((TInt)( 0xe12fff30 | (Rm) )))
   1.343 +#endif
   1.344 +#define __JUMPL(Rm) BLX(Rm)
   1.345 +#else
   1.346 +#ifdef __SUPPORT_THUMB_INTERWORKING
   1.347 +#define __JUMPL(Rm) asm("mov lr, pc "); \
   1.348 +                    asm("bx r"#Rm )
   1.349 +#else
   1.350 +#define __JUMPL(Rm) asm("mov lr, pc "); \
   1.351 +                    asm("mov pc, r"#Rm )
   1.352 +#endif
   1.353 +#endif
   1.354 +
   1.355 +#ifdef __MARM_THUMB__
   1.356 +#ifndef __ARMCC__
   1.357 +#define __SWITCH_TO_ARM		asm("push {r0} ");\
   1.358 +							asm("add r0, pc, #4 ");\
   1.359 +							asm("bx r0 ");\
   1.360 +							asm("nop ");\
   1.361 +							asm(".align 2 ");\
   1.362 +							asm(".code 32 ");\
   1.363 +							asm("ldr r0, [sp], #4 ")
   1.364 +#define __END_ARM			asm(".code 16 ")
   1.365 +#else
   1.366 +#define __SWITCH_TO_ARM        asm(".code 32 ");
   1.367 +#define __END_ARM
   1.368 +#endif
   1.369 +#else
   1.370 +#define __SWITCH_TO_ARM
   1.371 +#define __END_ARM
   1.372 +#endif
   1.373 +
   1.374 +#define CC_EQ	0
   1.375 +#define	CC_NE	1
   1.376 +#define CC_CS	2
   1.377 +#define CC_CC	3
   1.378 +#define CC_MI	4
   1.379 +#define CC_PL	5
   1.380 +#define CC_VS	6
   1.381 +#define CC_VC	7
   1.382 +#define CC_HI	8
   1.383 +#define CC_LS	9
   1.384 +#define CC_GE	10
   1.385 +#define CC_LT	11
   1.386 +#define CC_GT	12
   1.387 +#define CC_LE	13
   1.388 +#define	CC_AL	14
   1.389 +
   1.390 +#ifdef __CPU_ARM_HAS_CLZ
   1.391 +#if __ARM_ASSEMBLER_ISA__ >= 5
   1.392 +#define CLZ(Rd,Rm)		asm("clz r" #Rd ", r" #Rm)
   1.393 +#else
   1.394 +#define CLZ(Rd,Rm)		asm(".word %a0" : : "i" ((TInt)0xe16f0f10|((Rd)<<12)|(Rm)));
   1.395 +#endif
   1.396 +#define CLZcc(cc,Rd,Rm)	asm(".word %a0" : : "i" ((TInt)0x016f0f10|((cc)<<28)|((Rd)<<12)|(Rm)));
   1.397 +#endif
   1.398 +#ifdef __CPU_ARM_HAS_MCRR
   1.399 +#define MCRR(cop,opc,Rd,Rn,CRm)			asm(".word %a0" : : "i" ((TInt)0xec400000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   1.400 +#define MCRRcc(cc,cop,opc,Rd,Rn,CRm)	asm(".word %a0" : : "i" ((TInt)0x0c400000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   1.401 +#define MRRC(cop,opc,Rd,Rn,CRm)			asm(".word %a0" : : "i" ((TInt)0xec500000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   1.402 +#define MRRCcc(cc,cop,opc,Rd,Rn,CRm)	asm(".word %a0" : : "i" ((TInt)0x0c500000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   1.403 +#endif
   1.404 +#ifdef __CPU_ARM_HAS_LDREX_STREX
   1.405 +// LDREX Rd, [Rn] 		- load from [Rn] into Rd exclusive
   1.406 +// STREX Rd, Rm, [Rn] 	- store Rm into [Rn] with exclusive access; success/fail indicator into Rd
   1.407 +#define LDREXcc(cc,Rd,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((cc)<<28)|((Rd)<<12)|((Rn)<<16))));
   1.408 +#define STREXcc(cc,Rd,Rm,Rn)			asm(".word %a0" : : "i" ((TInt)(0x01800f90|((cc)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   1.409 +#if __ARM_ASSEMBLER_ISA__ >= 6
   1.410 +#define LDREX(Rd,Rn)					asm("ldrex r" #Rd ", [r" #Rn "] ")
   1.411 +#define STREX(Rd,Rm,Rn)					asm("strex r" #Rd ", r" #Rm ", [r" #Rn "] ")
   1.412 +#else
   1.413 +#define LDREX(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   1.414 +#define STREX(Rd,Rm,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01800f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   1.415 +#endif
   1.416 +#endif
   1.417 +#ifdef __CPU_ARM_HAS_LDREX_STREX_V6K
   1.418 +// Byte, halfword, doubleword STREX/LDREX & unconditional CLREX
   1.419 +#if __ARM_ASSEMBLER_ISA__ >= 6
   1.420 +#define LDREXB(Rd,Rn)					asm("ldrexb r" #Rd ", [r" #Rn "] ")
   1.421 +#define STREXB(Rd,Rm,Rn)				asm("strexb r" #Rd ", r" #Rm ", [r" #Rn "] ")
   1.422 +#define LDREXH(Rd,Rn)					asm("ldrexh r" #Rd ", [r" #Rn "] ")
   1.423 +#define STREXH(Rd,Rm,Rn)				asm("strexh r" #Rd ", r" #Rm ", [r" #Rn "] ")
   1.424 +#define LDREXD(Rd,Rn)					asm("ldrexd r" #Rd ", [r" #Rn "] ")
   1.425 +#define STREXD(Rd,Rm,Rn)				asm("strexd r" #Rd ", r" #Rm ", [r" #Rn "] ")
   1.426 +#else
   1.427 +#define LDREXB(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01D00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   1.428 +#define STREXB(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01C00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   1.429 +#define LDREXH(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01f00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   1.430 +#define STREXH(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01e00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   1.431 +#define LDREXD(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01b00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   1.432 +#define STREXD(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01a00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   1.433 +#endif
   1.434 +#if !defined(__CPU_ARM1136__) || defined(__CPU_ARM1136_ERRATUM_406973_FIXED)
   1.435 +#define __CPU_ARM_HAS_WORKING_CLREX
   1.436 +#if __ARM_ASSEMBLER_ISA__ >= 6
   1.437 +#define CLREX							asm("clrex ")
   1.438 +#else
   1.439 +#define CLREX							asm(".word %a0" : : "i" ((TInt)(0xf57ff01f)));
   1.440 +#endif
   1.441 +#endif
   1.442 +#endif 
   1.443 +#ifdef __CPU_ARM_HAS_LDRD_STRD
   1.444 +#if __ARM_ASSEMBLER_ISA__ >= 5
   1.445 +#define LDRD(Rd,Rn)						asm("ldrd r" #Rd ", [r" #Rn "] ")
   1.446 +#define STRD(Rd,Rn)						asm("strd r" #Rd ", [r" #Rn "] ")
   1.447 +#else
   1.448 +#define LDRD(Rd,Rn)						asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) )))
   1.449 +#define STRD(Rd,Rn)						asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) )))
   1.450 +#endif
   1.451 +#define LDRD_ioff(Rd,Rn,off)			asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
   1.452 +#define STRD_ioff(Rd,Rn,off)			asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
   1.453 +#endif
   1.454 +#if defined(__CPU_ARM_HAS_PLD) && !defined(__CPU_ARM926J__) && !defined(__CPU_UNKNOWN)		// PLD is a no-op on ARM926
   1.455 +#if __ARM_ASSEMBLER_ISA__ >= 5
   1.456 +#define PLD(Rn)							asm("pld [r" #Rn "] ")
   1.457 +#else
   1.458 +#define PLD(Rn)							asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) )))
   1.459 +#endif
   1.460 +#define PLD_ioff(Rn, off)				asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) | (off) )))	// preload with immediate offset
   1.461 +#define PLD_noff(Rn, off)				asm(".word %a0" : : "i" ((TInt)( 0xf550f000 | ((Rn)<<16) | (off) )))	// preload with negative offset
   1.462 +#else
   1.463 +#define PLD(Rn)
   1.464 +#define PLD_ioff(Rn, off)
   1.465 +#define PLD_noff(Rn, off)
   1.466 +#endif
   1.467 +#ifdef __CPU_HAS_CP15_THREAD_ID_REG
   1.468 +#define GET_RWRW_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 2 ");
   1.469 +#define GET_RWRO_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 3 ");
   1.470 +#define GET_RWNO_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 4 ");
   1.471 +#define SET_RWRW_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 2 ");
   1.472 +#define SET_RWRO_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 3 ");
   1.473 +#define SET_RWNO_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 4 ");
   1.474 +#endif
   1.475 +
   1.476 +#ifdef __CPU_SUPPORT_THUMB2EE
   1.477 +#define GET_THUMB2EE_HNDLR_BASE(cc,r)	asm("mrc"#cc" p14, 6, "#r", c1, c0, 0 ")
   1.478 +#define SET_THUMB2EE_HNDLR_BASE(cc,r)	asm("mcr"#cc" p14, 6, "#r", c1, c0, 0 ")
   1.479 +#endif
   1.480 +
   1.481 +#if defined(__CPU_ARMV7)
   1.482 +#define	ARM_DMB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff050 | (opt) )) )
   1.483 +#define	ARM_DSB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff040 | (opt) )) )
   1.484 +#define	ARM_ISB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff060 | (opt) )) )
   1.485 +
   1.486 +#define	ARM_DMBSY	ARM_DMB_gen(0xf)	// full system DMB
   1.487 +#define	ARM_DSBSY	ARM_DSB_gen(0xf)	// full system DSB
   1.488 +#define	ARM_DMBST	ARM_DMB_gen(0xe)	// full system DMB, orders writes only
   1.489 +#define	ARM_DSBST	ARM_DSB_gen(0xe)	// full system DSB, orders writes only
   1.490 +#define	ARM_DMBSH	ARM_DMB_gen(0xb)	// DMB encompassing inner-shareable domain
   1.491 +#define	ARM_DSBSH	ARM_DSB_gen(0xb)	// DMB encompassing inner-shareable domain
   1.492 +#define	ARM_DMBSHST	ARM_DMB_gen(0xa)	// DMB encompassing inner-shareable domain, orders writes only
   1.493 +#define	ARM_DSBSHST	ARM_DSB_gen(0xa)	// DMB encompassing inner-shareable domain, orders writes only
   1.494 +
   1.495 +#define	ARM_ISBSY	ARM_ISB_gen(0xf)	// full system ISB
   1.496 +
   1.497 +#define	ARM_NOP							asm(".word 0xe320f000 ")
   1.498 +#define	ARM_YIELD						asm(".word 0xe320f001 ")
   1.499 +
   1.500 +#define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMBSH
   1.501 +#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMBSH
   1.502 +#define	__DATA_SYNC_BARRIER__(reg)		ARM_DSBSH
   1.503 +#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSBSH
   1.504 +#define	__INST_SYNC_BARRIER__(reg)		ARM_ISBSY
   1.505 +#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISBSY
   1.506 +
   1.507 +#elif defined(__CPU_ARM11MP__)
   1.508 +
   1.509 +#define	ARM_DMB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 5 ")
   1.510 +#define	ARM_DSB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   1.511 +#define	ARM_ISB(reg)					asm("mcr p15, 0, "#reg", c7, c5, 4 ")
   1.512 +
   1.513 +#define	ARM_NOP							asm(".word 0xe320f000 ")
   1.514 +#define	ARM_YIELD						asm(".word 0xe320f001 ")
   1.515 +
   1.516 +#define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMB(reg)
   1.517 +#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMB(reg)
   1.518 +#define	__DATA_SYNC_BARRIER__(reg)		ARM_DSB(reg)
   1.519 +#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSB(reg)
   1.520 +#define	__INST_SYNC_BARRIER__(reg)		ARM_ISB(reg)
   1.521 +#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISB(reg)
   1.522 +
   1.523 +#elif defined(__CPU_ARMV6__)
   1.524 +
   1.525 +#define	ARM_DMB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 5 ")
   1.526 +#define	ARM_DSB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   1.527 +#define	ARM_ISB(reg)					asm("mcr p15, 0, "#reg", c7, c5, 4 ")
   1.528 +
   1.529 +#define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMB(reg)
   1.530 +#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMB(reg)
   1.531 +#define	__DATA_SYNC_BARRIER__(reg)		ARM_DSB(reg)
   1.532 +#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSB(reg)
   1.533 +#define	__INST_SYNC_BARRIER__(reg)		ARM_ISB(reg)
   1.534 +#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISB(reg)
   1.535 +
   1.536 +#else
   1.537 +
   1.538 +#define	__DATA_MEMORY_BARRIER__(reg)
   1.539 +#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   1.540 +#define	__DATA_SYNC_BARRIER__(reg)		asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   1.541 +#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   1.542 +#define	__INST_SYNC_BARRIER__(reg)
   1.543 +#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   1.544 +
   1.545 +#endif
   1.546 +
   1.547 +#ifdef __SMP__
   1.548 +#define	__SMP_DATA_MEMORY_BARRIER__(reg)	__DATA_MEMORY_BARRIER__(reg)
   1.549 +#define	__SMP_DATA_MEMORY_BARRIER_Z__(reg)	__DATA_MEMORY_BARRIER_Z__(reg)
   1.550 +#define	__SMP_DATA_SYNC_BARRIER__(reg)		__DATA_SYNC_BARRIER__(reg)
   1.551 +#define	__SMP_DATA_SYNC_BARRIER_Z__(reg)	__DATA_SYNC_BARRIER_Z__(reg)
   1.552 +#define	__SMP_INST_SYNC_BARRIER__(reg)		__INST_SYNC_BARRIER__(reg)
   1.553 +#define	__SMP_INST_SYNC_BARRIER_Z__(reg)	__INST_SYNC_BARRIER_Z__(reg)
   1.554 +#else
   1.555 +#define	__SMP_DATA_MEMORY_BARRIER__(reg)
   1.556 +#define	__SMP_DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   1.557 +#define	__SMP_DATA_SYNC_BARRIER__(reg)
   1.558 +#define	__SMP_DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   1.559 +#define	__SMP_INST_SYNC_BARRIER__(reg)
   1.560 +#define	__SMP_INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   1.561 +#endif
   1.562 +
   1.563 +#ifdef	__CPU_ARM_HAS_WFI
   1.564 +#define	ARM_WFIcc(cc)					__DATA_SYNC_BARRIER__(r0); \
   1.565 +										asm(".word %a0" : : "i" ((TInt)(0x0320f003 | ((cc)<<28) )) )				    
   1.566 +#define	ARM_WFI		ARM_WFIcc(CC_AL)
   1.567 +#endif
   1.568 +
   1.569 +#ifdef	__CPU_ARM_HAS_WFE_SEV
   1.570 +#define	ARM_WFEcc(cc)					__DATA_SYNC_BARRIER__(r0); \
   1.571 +										asm(".word %a0" : : "i" ((TInt)(0x0320f002 | ((cc)<<28) )) )
   1.572 +#if __ARM_ASSEMBLER_ISA__ >= 6
   1.573 +#define	ARM_WFE		__DATA_SYNC_BARRIER__(r0); \
   1.574 +					asm("wfe ")
   1.575 +#else
   1.576 +#define	ARM_WFE		ARM_WFEcc(CC_AL)
   1.577 +#endif
   1.578 +#define	ARM_SEVcc(cc)					asm(".word %a0" : : "i" ((TInt)(0x0320f004 | ((cc)<<28) )) )
   1.579 +#if __ARM_ASSEMBLER_ISA__ >= 6
   1.580 +#define	ARM_SEV		asm("sev ")
   1.581 +#else
   1.582 +#define	ARM_SEV		ARM_SEVcc(CC_AL)
   1.583 +#endif
   1.584 +#endif
   1.585 +
   1.586 +#ifndef	ARM_NOP
   1.587 +#define	ARM_NOP							asm("nop ")
   1.588 +#define	ARM_YIELD						asm("nop ")
   1.589 +#endif
   1.590 +
   1.591 +// Support for throwing exceptions through ARM embedded assembler
   1.592 +// Should only be needed user side
   1.593 +#ifndef __EH_FRAME_ADDRESS
   1.594 +#define	__EH_FRAME_ADDRESS(reg,offset)
   1.595 +#define __EH_FRAME_PUSH2(reg1,reg2) 
   1.596 +#define __EH_FRAME_SAVE1(reg,offset)
   1.597 +#endif
   1.598 +
   1.599 +// StrongARM msr bug workaround: 
   1.600 +// (conditional msr might cause,that the next instruction is executed twice by these processors)  
   1.601 +#ifdef __CPU_SA1__
   1.602 +#define __MSR_CPSR_C(cc,r)   \
   1.603 +				asm("msr"#cc" cpsr_c," #r);  \
   1.604 +				ARM_NOP; 		
   1.605 +#else // !__CPU_SA1__
   1.606 +#define __MSR_CPSR_C(cc,r) asm("msr"#cc" cpsr_c,"#r);
   1.607 +#endif
   1.608 +
   1.609 +// Causes undefined instruction exception on both ARM and THUMB
   1.610 +#define __ASM_CRASH()					asm(".word 0xe7ffdeff ")
   1.611 +#if defined(__GNUC__)
   1.612 +#define	__crash()						asm(".word 0xe7ffdeff " : : : "memory")
   1.613 +#elif defined(__ARMCC__)
   1.614 +// RVCT doesn't let us inline an undefined instruction
   1.615 +// use a CDP to CP15 instead - doesn't work on THUMB but never mind
   1.616 +#if __ARMCC_VERSION < 310000
   1.617 +#define	__crash()						asm("cdp p15, 0, c0, c0, c0, 0 ")
   1.618 +#else
   1.619 +// Inline assembler is deprecated in RVCT 3.1 so we use an intrinsic.
   1.620 +#define __crash()						__cdp(15, 0x00, 0x000)
   1.621 +#endif
   1.622 +#endif
   1.623 +
   1.624 +// Macro used to get the caller of the function containing a CHECK_PRECONDITIONS()
   1.625 +#if defined(__ARMCC_VERSION) && __ARMCC_VERSION >= 200000
   1.626 +#define PRECOND_FUNCTION_CALLER		__return_address()
   1.627 +#endif
   1.628 +
   1.629 +#if !defined(__CPU_ARM_HAS_LDREX_STREX_V6K)
   1.630 +#if defined(__CPU_ARM_HAS_LDREX_STREX)
   1.631 +#define	__ATOMIC64_USE_SLOW_EXEC__
   1.632 +#else
   1.633 +#define	__ATOMIC64_USE_FAST_EXEC__
   1.634 +#define	__ATOMIC_USE_FAST_EXEC__
   1.635 +#endif
   1.636 +#endif
   1.637 +
   1.638 +#endif	// __CPU_ARM
   1.639 +
   1.640 +#ifdef	__CPU_X86
   1.641 +#define	EXC_TRAP_CTX_SZ		10		// ebx, esp, ebp, esi, edi, ds, es, fs, gs, eip
   1.642 +
   1.643 +// Causes exception
   1.644 +#if defined(__VC32__) || defined(__CW32__)
   1.645 +#define	__crash()						do { _asm int 0ffh } while(0)
   1.646 +#else
   1.647 +#define	__crash()						asm("int 0xff " : : : "memory")
   1.648 +#endif
   1.649 +
   1.650 +#ifdef __VC32__
   1.651 +// Not available in the version of MSVC normally used
   1.652 +// #define PRECOND_FUNCTION_CALLER		((TLinAddr)_ReturnAddress())
   1.653 +#endif
   1.654 +
   1.655 +#endif	// __CPU_X86
   1.656 +
   1.657 +#ifdef __GCC32__
   1.658 +#define PRECOND_FUNCTION_CALLER		((TLinAddr)__builtin_return_address(0))
   1.659 +#endif
   1.660 +
   1.661 +#endif