os/kernelhwsrv/kernel/eka/include/cpudefs.h
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
     1 // Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies).
     2 // All rights reserved.
     3 // This component and the accompanying materials are made available
     4 // under the terms of the License "Eclipse Public License v1.0"
     5 // which accompanies this distribution, and is available
     6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
     7 //
     8 // Initial Contributors:
     9 // Nokia Corporation - initial contribution.
    10 //
    11 // Contributors:
    12 //
    13 // Description:
    14 // e32\include\cpudefs.h
    15 // 
    16 // WARNING: This file contains some APIs which are internal and are subject
    17 //          to change without notice. Such APIs should therefore not be used
    18 //          outside the Kernel and Hardware Services package.
    19 //
    20 
    21 /**
    22  @file
    23  @internalTechnology
    24 */
    25 
    26 #ifndef __CPUDEFS_H__
    27 #define __CPUDEFS_H__
    28 
    29 #ifdef __ARMCC__
    30 #define	__ARM_ASSEMBLER_ISA__	4	// "Instruction not supported on targeted CPU :("
    31 #else
    32 #define	__ARM_ASSEMBLER_ISA__	4
    33 #endif
    34 
    35 // Should really have been __CPU_CORTEX_A8__ instead of __CPU_CORTEX_A8N__
    36 #ifdef __CPU_CORTEX_A8N__
    37 #undef __CPU_CORTEX_A8__
    38 #define __CPU_CORTEX_A8__
    39 #endif
    40 
    41 //
    42 // Supported CPUs
    43 //
    44 
    45 #ifdef __MARM__
    46 
    47 #undef __CPU_SPECIFIED
    48 #if defined(__CPU_ARM710T__)
    49 	#define __CPU_SPECIFIED
    50 #elif defined(__CPU_ARM720T__)
    51 	#define __CPU_SPECIFIED
    52 #elif defined(__CPU_SA1__)
    53 	#define __CPU_SPECIFIED
    54 #elif defined(__CPU_ARM920T__)
    55 	#define __CPU_SPECIFIED
    56 #elif defined(__CPU_ARM925T__)
    57 	#define __CPU_SPECIFIED
    58 #elif defined(__CPU_XSCALE__)
    59 	#define __CPU_SPECIFIED
    60 #elif defined(__CPU_ARM926J__)
    61 	#define __CPU_SPECIFIED
    62 #elif defined(__CPU_ARM1136__)
    63 	#define __CPU_SPECIFIED
    64 #elif defined(__CPU_ARM1176__)
    65 	#define __CPU_SPECIFIED
    66 #elif defined(__CPU_ARM11MP__)
    67 	#define __CPU_SPECIFIED
    68 #elif defined(__CPU_CORTEX_A8__)
    69 	#define __CPU_SPECIFIED
    70 #elif defined(__CPU_CORTEX_A9__)
    71 	#define __CPU_SPECIFIED
    72 #elif defined(__CPU_GENERIC_ARM4__)
    73 	#define __CPU_SPECIFIED
    74 #endif
    75 
    76 #if defined(__SMP__)
    77 	#if defined(__CPU_SPECIFIED)
    78 		#if !defined(__CPU_ARM11MP__) && !defined(__CPU_CORTEX_A9__)
    79 			#error Specified CPU does not support SMP
    80 		#endif
    81 	#else
    82 	// If no CPU specified, assume lowest common denominator SMP
    83 	#define	__CPU_ARM11MP__
    84 	#endif
    85 #endif
    86 
    87 #if defined(__CPU_ARM710T__)
    88 	#define __CPU_ARMV4T
    89 
    90 #elif defined(__CPU_ARM720T__)
    91 	#define __CPU_ARMV4T
    92 
    93 #elif defined(__CPU_SA1__)
    94 	#define __CPU_ARMV4
    95 
    96 #elif defined(__CPU_ARM920T__)
    97 	#define __CPU_ARMV4T
    98 
    99 #elif defined(__CPU_ARM925T__)
   100 	#define __CPU_ARMV4T
   101 
   102 #elif defined(__CPU_XSCALE__)
   103 	#define __CPU_ARMV5T
   104 	#define __ENHANCED_DSP_INSTRUCTIONS
   105 
   106 #elif defined(__CPU_ARM926J__)
   107 	#define __CPU_ARMV5T
   108 	#define __ENHANCED_DSP_INSTRUCTIONS
   109 	#define __CPU_HAS_JAZELLE
   110 
   111 #elif defined(__CPU_ARM1136__)
   112 	#define __CPU_ARMV6
   113 
   114 #elif defined(__CPU_ARM1176__)
   115 	#define __CPU_ARMV6
   116 
   117 #elif defined(__CPU_ARM11MP__)
   118 	#define __CPU_ARMV6
   119 	#define	__CPU_ARM_HAS_WFI
   120 	#define	__CPU_ARM_HAS_WFE_SEV
   121 
   122 #elif defined(__CPU_CORTEX_A8__)
   123 	#define __CPU_ARMV7
   124 
   125 #elif defined(__CPU_CORTEX_A9__)
   126 	#define __CPU_ARMV7
   127 
   128 #elif defined(__CPU_GENERIC_ARM4__)
   129 	#define __CPU_ARMV4
   130 
   131 #else
   132 	// #error Unsupported CPU
   133 	#define __CPU_UNKNOWN
   134 #endif
   135 
   136 #endif  // __MARM__
   137 
   138 
   139 
   140 // Macros for emitting single bytes of machine code
   141 #ifdef __CW32__
   142 # define BYTE(x)	_asm byte x
   143 #elif __GCC32__
   144 # define BYTE(x)	asm(".byte "#x);
   145 #else
   146 # define BYTE(x)	_asm _emit x
   147 #endif
   148 
   149 
   150 // thiscall is different on GCC
   151 #ifdef __GCC32__
   152 #define THISCALL_PROLOG0() asm("mov ecx,[esp+4]");
   153 #define THISCALL_PROLOG1() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax");
   154 #define THISCALL_PROLOG2() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax");
   155 #define THISCALL_PROLOG3() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax \n mov eax,[esp+16] \n mov [esp+12],eax");
   156 #define THISCALL_PROLOG0_BIGRETVAL() asm("mov ecx,[esp+8]");
   157 #define THISCALL_PROLOG1_BIGRETVAL() asm("mov ecx,[esp+8] \n mov eax,[esp+12] \n mov [esp+8],eax");
   158 #define THISCALL_EPILOG0() asm("ret");
   159 #define THISCALL_EPILOG1() asm("ret");
   160 #define THISCALL_EPILOG2() asm("ret");
   161 #define THISCALL_EPILOG3() asm("ret");
   162 #define THISCALL_EPILOG0_BIGRETVAL() asm("ret 4");
   163 #define THISCALL_EPILOG1_BIGRETVAL() asm("ret 4");
   164 #else
   165 #define THISCALL_PROLOG0()
   166 #define THISCALL_PROLOG1()
   167 #define THISCALL_PROLOG2()
   168 #define THISCALL_PROLOG3()
   169 #define THISCALL_PROLOG0_BIGRETVAL() 
   170 #define THISCALL_PROLOG1_BIGRETVAL() 
   171 #define THISCALL_EPILOG0() __asm ret
   172 #define THISCALL_EPILOG1() __asm ret 4
   173 #define THISCALL_EPILOG2() __asm ret 8
   174 #define THISCALL_EPILOG3() __asm ret 12
   175 #define THISCALL_EPILOG0_BIGRETVAL() __asm ret 4
   176 #define THISCALL_EPILOG1_BIGRETVAL() __asm ret 8
   177 #endif
   178 
   179 
   180 // Workaround for MSVC++ 5.0 bug; MSVC incorrectly fixes up conditional jumps
   181 // when the destination is a C++ function.
   182 #if defined(__VC32__) && (_MSC_VER==1100)	// untested on MSVC++ > 5.0
   183 # define _ASM_j(cond,dest) _asm jn##cond short $+11 _asm jmp dest
   184 # define _ASM_jn(cond,dest) _asm j##cond short $+11 _asm jmp dest
   185 #else
   186 # if defined __GCC32__
   187 #  define _ASM_j(cond,dest) asm("j"#cond " %a0": : "i"(dest));
   188 #  define _ASM_jn(cond,dest) asm("jn"#cond " %a0": :"i"(dest));
   189 # else
   190 #  define _ASM_j(cond,dest) _asm j##cond dest
   191 #  define _ASM_jn(cond,dest) _asm jn##cond dest
   192 # endif
   193 #endif
   194 
   195 
   196 
   197 //#define __MINIMUM_MACHINE_CODE__
   198 
   199 #if defined(__WINS__)
   200 #define __NAKED__ __declspec( naked )
   201 #ifndef __MINIMUM_MACHINE_CODE__
   202 //#define __MEM_MACHINE_CODED__
   203 #endif
   204 #define __CPU_X86
   205 #endif
   206 
   207 #if defined(__X86__)
   208 # ifdef __GCC32__
   209 #  define __NAKED__	// GCC does not support naked functions on X86
   210 # else
   211 #  define __NAKED__ __declspec( naked )
   212 # endif
   213 # ifndef __MINIMUM_MACHINE_CODE__
   214 #  define __MEM_MACHINE_CODED__
   215 # endif
   216 # define __CPU_X86
   217 #endif
   218 
   219 
   220 #if defined(__MARM__)
   221 #ifndef __NAKED__ // should be defined in prefix file
   222 	#ifndef __GCCXML__
   223         #define __NAKED__ __declspec( naked )
   224     #else
   225         #define __NAKED__
   226     #endif
   227 #endif
   228 #ifndef __CIA__
   229 #undef __NAKED__
   230 #define __NAKED__ ____ONLY_USE_NAKED_IN_CIA____
   231 #endif
   232 	#define __CPU_ARM
   233 
   234 #if defined(__MARM_ARMV5__) && !defined(__CPU_ARMV5T)
   235 #define __CPU_ARMV5T
   236 #endif
   237 
   238 #ifndef __MINIMUM_MACHINE_CODE__
   239 #if !defined(__BIG_ENDIAN__)
   240 	#define __MEM_MACHINE_CODED__
   241 	#define __DES_MACHINE_CODED__
   242 	#define __REGIONS_MACHINE_CODED__
   243 	#define __DES8_MACHINE_CODED__
   244 	#define __DES16_MACHINE_CODED__
   245 	#define __HEAP_MACHINE_CODED__
   246 	#define __REALS_MACHINE_CODED__
   247 	#define __COBJECT_MACHINE_CODED__
   248 	#define __CACTIVESCHEDULER_MACHINE_CODED__
   249 	#define __CSERVER_MACHINE_CODED__
   250 	#define __ARRAY_MACHINE_CODED__
   251 	#define __HUFFMAN_MACHINE_CODED__
   252 #if defined(__MARM_ARM4__) || defined(__MARM_ARMI__) || defined(__MARM_THUMB__) || defined(__MARM_ARMV4__) || defined(__MARM_ARMV5__)
   253 	#define __DES16_MACHINE_CODED_HWORD__
   254 #endif
   255 #endif
   256 #endif
   257 #endif
   258 
   259 #ifdef __CPU_ARMV4
   260 	#define __CPU_64BIT_MULTIPLY
   261 #endif
   262 #ifdef __CPU_ARMV4T
   263 	#define __CPU_THUMB
   264 	#define __CPU_ARM_SUPPORTS_BX
   265 	#define __CPU_64BIT_MULTIPLY
   266 #endif
   267 #ifdef __CPU_ARMV5T
   268 	#define __CPU_THUMB
   269 	#define __CPU_ARM_SUPPORTS_BX
   270 	#define __CPU_ARM_SUPPORTS_BLX
   271 	#define __CPU_64BIT_MULTIPLY
   272 	#define __CPU_ARM_LDR_PC_SETS_TBIT
   273 	#define __CPU_ARM_HAS_CLZ
   274 	#define __CPU_ARM_HAS_PLD
   275 #endif
   276 #ifdef __ENHANCED_DSP_INSTRUCTIONS
   277 	#define __CPU_ARM_HAS_MCRR
   278 	#define __CPU_ARM_HAS_LDRD_STRD
   279 #endif
   280 #if defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
   281 	#define __CPU_THUMB
   282 	#define __CPU_ARM_SUPPORTS_BX
   283 	#define __CPU_ARM_SUPPORTS_BLX
   284 	#define __CPU_64BIT_MULTIPLY
   285 	#define __CPU_ARM_LDR_PC_SETS_TBIT
   286 	#define __CPU_ARM_HAS_CLZ
   287 	#define __CPU_ARM_HAS_MCRR
   288 	#define __CPU_ARM_HAS_LDREX_STREX
   289 	#define __CPU_ARM_HAS_LDRD_STRD
   290 	#define __CPU_ARM_HAS_PLD
   291 	#define __CPU_ARM_HAS_CPS
   292 	#define __CPU_ARM_HAS_SPLIT_FSR
   293 #if !defined(__CPU_ARM1136__) && !defined(__CPU_ARM11MP__)
   294 	#define __CPU_ARM_HAS_CP15_IFAR
   295 #endif
   296 	#define	__CPU_ARM_SUPPORTS_USER_MODE_BARRIERS
   297 #endif
   298 #if defined(__CPU_ARMV7) || (defined(__CPU_ARM1136__) && defined(__CPU_ARM1136_IS_R1__)) || defined(__CPU_ARM1176__) || defined(__CPU_ARM11MP__)
   299 	#define __CPU_ARM_HAS_LDREX_STREX_V6K
   300 	#define __CPU_HAS_CP15_THREAD_ID_REG
   301 #endif
   302 #if defined(__MARM_ARM4T__) || defined(__MARM_INTERWORK__)
   303 	#define __SUPPORT_THUMB_INTERWORKING
   304 #endif
   305 #if defined(__CPU_ARMV7)
   306 #define	__CPU_ARM_HAS_WFI
   307 #define	__CPU_ARM_HAS_WFE_SEV
   308 #define __CPU_THUMB2
   309 #define __CPU_SUPPORT_THUMB2EE
   310 #endif
   311 
   312 
   313 // ARM CPU macros to allow Thumb/Non-thumb builds
   314 #ifdef __CPU_ARM
   315 
   316 #define	EXC_TRAP_CTX_SZ		10		// Nonvolatile registers + sp + pc
   317 
   318 #ifdef __SUPPORT_THUMB_INTERWORKING
   319 #define __JUMP(cc,r) asm("bx"#cc " "#r )
   320 #ifdef __CPU_ARM_LDR_PC_SETS_TBIT
   321 #define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
   322 #define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
   323 #else
   324 #define __POPRET(rlist) asm("ldmfd sp!, {"rlist"lr} ");\
   325 						asm("bx lr ")
   326 #define __CPOPRET(cc,rlist)	asm("ldm"#cc "fd sp!, {"rlist"lr} ");\
   327 							asm("bx"#cc " lr ")
   328 #endif
   329 #else
   330 #define __JUMP(cc,r) asm("mov"#cc " pc, "#r )
   331 #define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
   332 #define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
   333 #endif
   334 
   335 #ifdef __CPU_ARM_SUPPORTS_BLX
   336 #if __ARM_ASSEMBLER_ISA__ >= 5
   337 #define BLX(Rm)							asm("blx r" #Rm)
   338 #else
   339 #define BLX(Rm)							asm(".word %a0" : : "i" ((TInt)( 0xe12fff30 | (Rm) )))
   340 #endif
   341 #define __JUMPL(Rm) BLX(Rm)
   342 #else
   343 #ifdef __SUPPORT_THUMB_INTERWORKING
   344 #define __JUMPL(Rm) asm("mov lr, pc "); \
   345                     asm("bx r"#Rm )
   346 #else
   347 #define __JUMPL(Rm) asm("mov lr, pc "); \
   348                     asm("mov pc, r"#Rm )
   349 #endif
   350 #endif
   351 
   352 #ifdef __MARM_THUMB__
   353 #ifndef __ARMCC__
   354 #define __SWITCH_TO_ARM		asm("push {r0} ");\
   355 							asm("add r0, pc, #4 ");\
   356 							asm("bx r0 ");\
   357 							asm("nop ");\
   358 							asm(".align 2 ");\
   359 							asm(".code 32 ");\
   360 							asm("ldr r0, [sp], #4 ")
   361 #define __END_ARM			asm(".code 16 ")
   362 #else
   363 #define __SWITCH_TO_ARM        asm(".code 32 ");
   364 #define __END_ARM
   365 #endif
   366 #else
   367 #define __SWITCH_TO_ARM
   368 #define __END_ARM
   369 #endif
   370 
   371 #define CC_EQ	0
   372 #define	CC_NE	1
   373 #define CC_CS	2
   374 #define CC_CC	3
   375 #define CC_MI	4
   376 #define CC_PL	5
   377 #define CC_VS	6
   378 #define CC_VC	7
   379 #define CC_HI	8
   380 #define CC_LS	9
   381 #define CC_GE	10
   382 #define CC_LT	11
   383 #define CC_GT	12
   384 #define CC_LE	13
   385 #define	CC_AL	14
   386 
   387 #ifdef __CPU_ARM_HAS_CLZ
   388 #if __ARM_ASSEMBLER_ISA__ >= 5
   389 #define CLZ(Rd,Rm)		asm("clz r" #Rd ", r" #Rm)
   390 #else
   391 #define CLZ(Rd,Rm)		asm(".word %a0" : : "i" ((TInt)0xe16f0f10|((Rd)<<12)|(Rm)));
   392 #endif
   393 #define CLZcc(cc,Rd,Rm)	asm(".word %a0" : : "i" ((TInt)0x016f0f10|((cc)<<28)|((Rd)<<12)|(Rm)));
   394 #endif
   395 #ifdef __CPU_ARM_HAS_MCRR
   396 #define MCRR(cop,opc,Rd,Rn,CRm)			asm(".word %a0" : : "i" ((TInt)0xec400000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   397 #define MCRRcc(cc,cop,opc,Rd,Rn,CRm)	asm(".word %a0" : : "i" ((TInt)0x0c400000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   398 #define MRRC(cop,opc,Rd,Rn,CRm)			asm(".word %a0" : : "i" ((TInt)0xec500000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   399 #define MRRCcc(cc,cop,opc,Rd,Rn,CRm)	asm(".word %a0" : : "i" ((TInt)0x0c500000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
   400 #endif
   401 #ifdef __CPU_ARM_HAS_LDREX_STREX
   402 // LDREX Rd, [Rn] 		- load from [Rn] into Rd exclusive
   403 // STREX Rd, Rm, [Rn] 	- store Rm into [Rn] with exclusive access; success/fail indicator into Rd
   404 #define LDREXcc(cc,Rd,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((cc)<<28)|((Rd)<<12)|((Rn)<<16))));
   405 #define STREXcc(cc,Rd,Rm,Rn)			asm(".word %a0" : : "i" ((TInt)(0x01800f90|((cc)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   406 #if __ARM_ASSEMBLER_ISA__ >= 6
   407 #define LDREX(Rd,Rn)					asm("ldrex r" #Rd ", [r" #Rn "] ")
   408 #define STREX(Rd,Rm,Rn)					asm("strex r" #Rd ", r" #Rm ", [r" #Rn "] ")
   409 #else
   410 #define LDREX(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   411 #define STREX(Rd,Rm,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01800f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   412 #endif
   413 #endif
   414 #ifdef __CPU_ARM_HAS_LDREX_STREX_V6K
   415 // Byte, halfword, doubleword STREX/LDREX & unconditional CLREX
   416 #if __ARM_ASSEMBLER_ISA__ >= 6
   417 #define LDREXB(Rd,Rn)					asm("ldrexb r" #Rd ", [r" #Rn "] ")
   418 #define STREXB(Rd,Rm,Rn)				asm("strexb r" #Rd ", r" #Rm ", [r" #Rn "] ")
   419 #define LDREXH(Rd,Rn)					asm("ldrexh r" #Rd ", [r" #Rn "] ")
   420 #define STREXH(Rd,Rm,Rn)				asm("strexh r" #Rd ", r" #Rm ", [r" #Rn "] ")
   421 #define LDREXD(Rd,Rn)					asm("ldrexd r" #Rd ", [r" #Rn "] ")
   422 #define STREXD(Rd,Rm,Rn)				asm("strexd r" #Rd ", r" #Rm ", [r" #Rn "] ")
   423 #else
   424 #define LDREXB(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01D00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   425 #define STREXB(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01C00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   426 #define LDREXH(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01f00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   427 #define STREXH(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01e00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   428 #define LDREXD(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01b00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
   429 #define STREXD(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01a00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
   430 #endif
   431 #if !defined(__CPU_ARM1136__) || defined(__CPU_ARM1136_ERRATUM_406973_FIXED)
   432 #define __CPU_ARM_HAS_WORKING_CLREX
   433 #if __ARM_ASSEMBLER_ISA__ >= 6
   434 #define CLREX							asm("clrex ")
   435 #else
   436 #define CLREX							asm(".word %a0" : : "i" ((TInt)(0xf57ff01f)));
   437 #endif
   438 #endif
   439 #endif 
   440 #ifdef __CPU_ARM_HAS_LDRD_STRD
   441 #if __ARM_ASSEMBLER_ISA__ >= 5
   442 #define LDRD(Rd,Rn)						asm("ldrd r" #Rd ", [r" #Rn "] ")
   443 #define STRD(Rd,Rn)						asm("strd r" #Rd ", [r" #Rn "] ")
   444 #else
   445 #define LDRD(Rd,Rn)						asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) )))
   446 #define STRD(Rd,Rn)						asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) )))
   447 #endif
   448 #define LDRD_ioff(Rd,Rn,off)			asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
   449 #define STRD_ioff(Rd,Rn,off)			asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
   450 #endif
   451 #if defined(__CPU_ARM_HAS_PLD) && !defined(__CPU_ARM926J__) && !defined(__CPU_UNKNOWN)		// PLD is a no-op on ARM926
   452 #if __ARM_ASSEMBLER_ISA__ >= 5
   453 #define PLD(Rn)							asm("pld [r" #Rn "] ")
   454 #else
   455 #define PLD(Rn)							asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) )))
   456 #endif
   457 #define PLD_ioff(Rn, off)				asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) | (off) )))	// preload with immediate offset
   458 #define PLD_noff(Rn, off)				asm(".word %a0" : : "i" ((TInt)( 0xf550f000 | ((Rn)<<16) | (off) )))	// preload with negative offset
   459 #else
   460 #define PLD(Rn)
   461 #define PLD_ioff(Rn, off)
   462 #define PLD_noff(Rn, off)
   463 #endif
   464 #ifdef __CPU_HAS_CP15_THREAD_ID_REG
   465 #define GET_RWRW_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 2 ");
   466 #define GET_RWRO_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 3 ");
   467 #define GET_RWNO_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 4 ");
   468 #define SET_RWRW_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 2 ");
   469 #define SET_RWRO_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 3 ");
   470 #define SET_RWNO_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 4 ");
   471 #endif
   472 
   473 #ifdef __CPU_SUPPORT_THUMB2EE
   474 #define GET_THUMB2EE_HNDLR_BASE(cc,r)	asm("mrc"#cc" p14, 6, "#r", c1, c0, 0 ")
   475 #define SET_THUMB2EE_HNDLR_BASE(cc,r)	asm("mcr"#cc" p14, 6, "#r", c1, c0, 0 ")
   476 #endif
   477 
   478 #if defined(__CPU_ARMV7)
   479 #define	ARM_DMB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff050 | (opt) )) )
   480 #define	ARM_DSB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff040 | (opt) )) )
   481 #define	ARM_ISB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff060 | (opt) )) )
   482 
   483 #define	ARM_DMBSY	ARM_DMB_gen(0xf)	// full system DMB
   484 #define	ARM_DSBSY	ARM_DSB_gen(0xf)	// full system DSB
   485 #define	ARM_DMBST	ARM_DMB_gen(0xe)	// full system DMB, orders writes only
   486 #define	ARM_DSBST	ARM_DSB_gen(0xe)	// full system DSB, orders writes only
   487 #define	ARM_DMBSH	ARM_DMB_gen(0xb)	// DMB encompassing inner-shareable domain
   488 #define	ARM_DSBSH	ARM_DSB_gen(0xb)	// DMB encompassing inner-shareable domain
   489 #define	ARM_DMBSHST	ARM_DMB_gen(0xa)	// DMB encompassing inner-shareable domain, orders writes only
   490 #define	ARM_DSBSHST	ARM_DSB_gen(0xa)	// DMB encompassing inner-shareable domain, orders writes only
   491 
   492 #define	ARM_ISBSY	ARM_ISB_gen(0xf)	// full system ISB
   493 
   494 #define	ARM_NOP							asm(".word 0xe320f000 ")
   495 #define	ARM_YIELD						asm(".word 0xe320f001 ")
   496 
   497 #define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMBSH
   498 #define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMBSH
   499 #define	__DATA_SYNC_BARRIER__(reg)		ARM_DSBSH
   500 #define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSBSH
   501 #define	__INST_SYNC_BARRIER__(reg)		ARM_ISBSY
   502 #define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISBSY
   503 
   504 #elif defined(__CPU_ARM11MP__)
   505 
   506 #define	ARM_DMB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 5 ")
   507 #define	ARM_DSB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   508 #define	ARM_ISB(reg)					asm("mcr p15, 0, "#reg", c7, c5, 4 ")
   509 
   510 #define	ARM_NOP							asm(".word 0xe320f000 ")
   511 #define	ARM_YIELD						asm(".word 0xe320f001 ")
   512 
   513 #define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMB(reg)
   514 #define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMB(reg)
   515 #define	__DATA_SYNC_BARRIER__(reg)		ARM_DSB(reg)
   516 #define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSB(reg)
   517 #define	__INST_SYNC_BARRIER__(reg)		ARM_ISB(reg)
   518 #define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISB(reg)
   519 
   520 #elif defined(__CPU_ARMV6__)
   521 
   522 #define	ARM_DMB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 5 ")
   523 #define	ARM_DSB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   524 #define	ARM_ISB(reg)					asm("mcr p15, 0, "#reg", c7, c5, 4 ")
   525 
   526 #define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMB(reg)
   527 #define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMB(reg)
   528 #define	__DATA_SYNC_BARRIER__(reg)		ARM_DSB(reg)
   529 #define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSB(reg)
   530 #define	__INST_SYNC_BARRIER__(reg)		ARM_ISB(reg)
   531 #define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISB(reg)
   532 
   533 #else
   534 
   535 #define	__DATA_MEMORY_BARRIER__(reg)
   536 #define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   537 #define	__DATA_SYNC_BARRIER__(reg)		asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   538 #define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); asm("mcr p15, 0, "#reg", c7, c10, 4 ")
   539 #define	__INST_SYNC_BARRIER__(reg)
   540 #define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   541 
   542 #endif
   543 
   544 #ifdef __SMP__
   545 #define	__SMP_DATA_MEMORY_BARRIER__(reg)	__DATA_MEMORY_BARRIER__(reg)
   546 #define	__SMP_DATA_MEMORY_BARRIER_Z__(reg)	__DATA_MEMORY_BARRIER_Z__(reg)
   547 #define	__SMP_DATA_SYNC_BARRIER__(reg)		__DATA_SYNC_BARRIER__(reg)
   548 #define	__SMP_DATA_SYNC_BARRIER_Z__(reg)	__DATA_SYNC_BARRIER_Z__(reg)
   549 #define	__SMP_INST_SYNC_BARRIER__(reg)		__INST_SYNC_BARRIER__(reg)
   550 #define	__SMP_INST_SYNC_BARRIER_Z__(reg)	__INST_SYNC_BARRIER_Z__(reg)
   551 #else
   552 #define	__SMP_DATA_MEMORY_BARRIER__(reg)
   553 #define	__SMP_DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   554 #define	__SMP_DATA_SYNC_BARRIER__(reg)
   555 #define	__SMP_DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   556 #define	__SMP_INST_SYNC_BARRIER__(reg)
   557 #define	__SMP_INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
   558 #endif
   559 
   560 #ifdef	__CPU_ARM_HAS_WFI
   561 #define	ARM_WFIcc(cc)					__DATA_SYNC_BARRIER__(r0); \
   562 										asm(".word %a0" : : "i" ((TInt)(0x0320f003 | ((cc)<<28) )) )				    
   563 #define	ARM_WFI		ARM_WFIcc(CC_AL)
   564 #endif
   565 
   566 #ifdef	__CPU_ARM_HAS_WFE_SEV
   567 #define	ARM_WFEcc(cc)					__DATA_SYNC_BARRIER__(r0); \
   568 										asm(".word %a0" : : "i" ((TInt)(0x0320f002 | ((cc)<<28) )) )
   569 #if __ARM_ASSEMBLER_ISA__ >= 6
   570 #define	ARM_WFE		__DATA_SYNC_BARRIER__(r0); \
   571 					asm("wfe ")
   572 #else
   573 #define	ARM_WFE		ARM_WFEcc(CC_AL)
   574 #endif
   575 #define	ARM_SEVcc(cc)					asm(".word %a0" : : "i" ((TInt)(0x0320f004 | ((cc)<<28) )) )
   576 #if __ARM_ASSEMBLER_ISA__ >= 6
   577 #define	ARM_SEV		asm("sev ")
   578 #else
   579 #define	ARM_SEV		ARM_SEVcc(CC_AL)
   580 #endif
   581 #endif
   582 
   583 #ifndef	ARM_NOP
   584 #define	ARM_NOP							asm("nop ")
   585 #define	ARM_YIELD						asm("nop ")
   586 #endif
   587 
   588 // Support for throwing exceptions through ARM embedded assembler
   589 // Should only be needed user side
   590 #ifndef __EH_FRAME_ADDRESS
   591 #define	__EH_FRAME_ADDRESS(reg,offset)
   592 #define __EH_FRAME_PUSH2(reg1,reg2) 
   593 #define __EH_FRAME_SAVE1(reg,offset)
   594 #endif
   595 
   596 // StrongARM msr bug workaround: 
   597 // (conditional msr might cause,that the next instruction is executed twice by these processors)  
   598 #ifdef __CPU_SA1__
   599 #define __MSR_CPSR_C(cc,r)   \
   600 				asm("msr"#cc" cpsr_c," #r);  \
   601 				ARM_NOP; 		
   602 #else // !__CPU_SA1__
   603 #define __MSR_CPSR_C(cc,r) asm("msr"#cc" cpsr_c,"#r);
   604 #endif
   605 
   606 // Causes undefined instruction exception on both ARM and THUMB
   607 #define __ASM_CRASH()					asm(".word 0xe7ffdeff ")
   608 #if defined(__GNUC__)
   609 #define	__crash()						asm(".word 0xe7ffdeff " : : : "memory")
   610 #elif defined(__ARMCC__)
   611 // RVCT doesn't let us inline an undefined instruction
   612 // use a CDP to CP15 instead - doesn't work on THUMB but never mind
   613 #if __ARMCC_VERSION < 310000
   614 #define	__crash()						asm("cdp p15, 0, c0, c0, c0, 0 ")
   615 #else
   616 // Inline assembler is deprecated in RVCT 3.1 so we use an intrinsic.
   617 #define __crash()						__cdp(15, 0x00, 0x000)
   618 #endif
   619 #endif
   620 
   621 // Macro used to get the caller of the function containing a CHECK_PRECONDITIONS()
   622 #if defined(__ARMCC_VERSION) && __ARMCC_VERSION >= 200000
   623 #define PRECOND_FUNCTION_CALLER		__return_address()
   624 #endif
   625 
   626 #if !defined(__CPU_ARM_HAS_LDREX_STREX_V6K)
   627 #if defined(__CPU_ARM_HAS_LDREX_STREX)
   628 #define	__ATOMIC64_USE_SLOW_EXEC__
   629 #else
   630 #define	__ATOMIC64_USE_FAST_EXEC__
   631 #define	__ATOMIC_USE_FAST_EXEC__
   632 #endif
   633 #endif
   634 
   635 #endif	// __CPU_ARM
   636 
   637 #ifdef	__CPU_X86
   638 #define	EXC_TRAP_CTX_SZ		10		// ebx, esp, ebp, esi, edi, ds, es, fs, gs, eip
   639 
   640 // Causes exception
   641 #if defined(__VC32__) || defined(__CW32__)
   642 #define	__crash()						do { _asm int 0ffh } while(0)
   643 #else
   644 #define	__crash()						asm("int 0xff " : : : "memory")
   645 #endif
   646 
   647 #ifdef __VC32__
   648 // Not available in the version of MSVC normally used
   649 // #define PRECOND_FUNCTION_CALLER		((TLinAddr)_ReturnAddress())
   650 #endif
   651 
   652 #endif	// __CPU_X86
   653 
   654 #ifdef __GCC32__
   655 #define PRECOND_FUNCTION_CALLER		((TLinAddr)__builtin_return_address(0))
   656 #endif
   657 
   658 #endif