os/kernelhwsrv/kernel/eka/include/cpudefs.h
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
sl@0
     1
// Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies).
sl@0
     2
// All rights reserved.
sl@0
     3
// This component and the accompanying materials are made available
sl@0
     4
// under the terms of the License "Eclipse Public License v1.0"
sl@0
     5
// which accompanies this distribution, and is available
sl@0
     6
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
sl@0
     7
//
sl@0
     8
// Initial Contributors:
sl@0
     9
// Nokia Corporation - initial contribution.
sl@0
    10
//
sl@0
    11
// Contributors:
sl@0
    12
//
sl@0
    13
// Description:
sl@0
    14
// e32\include\cpudefs.h
sl@0
    15
// 
sl@0
    16
// WARNING: This file contains some APIs which are internal and are subject
sl@0
    17
//          to change without notice. Such APIs should therefore not be used
sl@0
    18
//          outside the Kernel and Hardware Services package.
sl@0
    19
//
sl@0
    20
sl@0
    21
/**
sl@0
    22
 @file
sl@0
    23
 @internalTechnology
sl@0
    24
*/
sl@0
    25
sl@0
    26
#ifndef __CPUDEFS_H__
sl@0
    27
#define __CPUDEFS_H__
sl@0
    28
sl@0
    29
#ifdef __ARMCC__
sl@0
    30
#define	__ARM_ASSEMBLER_ISA__	4	// "Instruction not supported on targeted CPU :("
sl@0
    31
#else
sl@0
    32
#define	__ARM_ASSEMBLER_ISA__	4
sl@0
    33
#endif
sl@0
    34
sl@0
    35
// Should really have been __CPU_CORTEX_A8__ instead of __CPU_CORTEX_A8N__
sl@0
    36
#ifdef __CPU_CORTEX_A8N__
sl@0
    37
#undef __CPU_CORTEX_A8__
sl@0
    38
#define __CPU_CORTEX_A8__
sl@0
    39
#endif
sl@0
    40
sl@0
    41
//
sl@0
    42
// Supported CPUs
sl@0
    43
//
sl@0
    44
sl@0
    45
#ifdef __MARM__
sl@0
    46
sl@0
    47
#undef __CPU_SPECIFIED
sl@0
    48
#if defined(__CPU_ARM710T__)
sl@0
    49
	#define __CPU_SPECIFIED
sl@0
    50
#elif defined(__CPU_ARM720T__)
sl@0
    51
	#define __CPU_SPECIFIED
sl@0
    52
#elif defined(__CPU_SA1__)
sl@0
    53
	#define __CPU_SPECIFIED
sl@0
    54
#elif defined(__CPU_ARM920T__)
sl@0
    55
	#define __CPU_SPECIFIED
sl@0
    56
#elif defined(__CPU_ARM925T__)
sl@0
    57
	#define __CPU_SPECIFIED
sl@0
    58
#elif defined(__CPU_XSCALE__)
sl@0
    59
	#define __CPU_SPECIFIED
sl@0
    60
#elif defined(__CPU_ARM926J__)
sl@0
    61
	#define __CPU_SPECIFIED
sl@0
    62
#elif defined(__CPU_ARM1136__)
sl@0
    63
	#define __CPU_SPECIFIED
sl@0
    64
#elif defined(__CPU_ARM1176__)
sl@0
    65
	#define __CPU_SPECIFIED
sl@0
    66
#elif defined(__CPU_ARM11MP__)
sl@0
    67
	#define __CPU_SPECIFIED
sl@0
    68
#elif defined(__CPU_CORTEX_A8__)
sl@0
    69
	#define __CPU_SPECIFIED
sl@0
    70
#elif defined(__CPU_CORTEX_A9__)
sl@0
    71
	#define __CPU_SPECIFIED
sl@0
    72
#elif defined(__CPU_GENERIC_ARM4__)
sl@0
    73
	#define __CPU_SPECIFIED
sl@0
    74
#endif
sl@0
    75
sl@0
    76
#if defined(__SMP__)
sl@0
    77
	#if defined(__CPU_SPECIFIED)
sl@0
    78
		#if !defined(__CPU_ARM11MP__) && !defined(__CPU_CORTEX_A9__)
sl@0
    79
			#error Specified CPU does not support SMP
sl@0
    80
		#endif
sl@0
    81
	#else
sl@0
    82
	// If no CPU specified, assume lowest common denominator SMP
sl@0
    83
	#define	__CPU_ARM11MP__
sl@0
    84
	#endif
sl@0
    85
#endif
sl@0
    86
sl@0
    87
#if defined(__CPU_ARM710T__)
sl@0
    88
	#define __CPU_ARMV4T
sl@0
    89
sl@0
    90
#elif defined(__CPU_ARM720T__)
sl@0
    91
	#define __CPU_ARMV4T
sl@0
    92
sl@0
    93
#elif defined(__CPU_SA1__)
sl@0
    94
	#define __CPU_ARMV4
sl@0
    95
sl@0
    96
#elif defined(__CPU_ARM920T__)
sl@0
    97
	#define __CPU_ARMV4T
sl@0
    98
sl@0
    99
#elif defined(__CPU_ARM925T__)
sl@0
   100
	#define __CPU_ARMV4T
sl@0
   101
sl@0
   102
#elif defined(__CPU_XSCALE__)
sl@0
   103
	#define __CPU_ARMV5T
sl@0
   104
	#define __ENHANCED_DSP_INSTRUCTIONS
sl@0
   105
sl@0
   106
#elif defined(__CPU_ARM926J__)
sl@0
   107
	#define __CPU_ARMV5T
sl@0
   108
	#define __ENHANCED_DSP_INSTRUCTIONS
sl@0
   109
	#define __CPU_HAS_JAZELLE
sl@0
   110
sl@0
   111
#elif defined(__CPU_ARM1136__)
sl@0
   112
	#define __CPU_ARMV6
sl@0
   113
sl@0
   114
#elif defined(__CPU_ARM1176__)
sl@0
   115
	#define __CPU_ARMV6
sl@0
   116
sl@0
   117
#elif defined(__CPU_ARM11MP__)
sl@0
   118
	#define __CPU_ARMV6
sl@0
   119
	#define	__CPU_ARM_HAS_WFI
sl@0
   120
	#define	__CPU_ARM_HAS_WFE_SEV
sl@0
   121
sl@0
   122
#elif defined(__CPU_CORTEX_A8__)
sl@0
   123
	#define __CPU_ARMV7
sl@0
   124
sl@0
   125
#elif defined(__CPU_CORTEX_A9__)
sl@0
   126
	#define __CPU_ARMV7
sl@0
   127
sl@0
   128
#elif defined(__CPU_GENERIC_ARM4__)
sl@0
   129
	#define __CPU_ARMV4
sl@0
   130
sl@0
   131
#else
sl@0
   132
	// #error Unsupported CPU
sl@0
   133
	#define __CPU_UNKNOWN
sl@0
   134
#endif
sl@0
   135
sl@0
   136
#endif  // __MARM__
sl@0
   137
sl@0
   138
sl@0
   139
sl@0
   140
// Macros for emitting single bytes of machine code
sl@0
   141
#ifdef __CW32__
sl@0
   142
# define BYTE(x)	_asm byte x
sl@0
   143
#elif __GCC32__
sl@0
   144
# define BYTE(x)	asm(".byte "#x);
sl@0
   145
#else
sl@0
   146
# define BYTE(x)	_asm _emit x
sl@0
   147
#endif
sl@0
   148
sl@0
   149
sl@0
   150
// thiscall is different on GCC
sl@0
   151
#ifdef __GCC32__
sl@0
   152
#define THISCALL_PROLOG0() asm("mov ecx,[esp+4]");
sl@0
   153
#define THISCALL_PROLOG1() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax");
sl@0
   154
#define THISCALL_PROLOG2() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax");
sl@0
   155
#define THISCALL_PROLOG3() asm("mov ecx,[esp+4] \n mov eax,[esp+8] \n mov [esp+4],eax \n mov eax,[esp+12] \n mov [esp+8],eax \n mov eax,[esp+16] \n mov [esp+12],eax");
sl@0
   156
#define THISCALL_PROLOG0_BIGRETVAL() asm("mov ecx,[esp+8]");
sl@0
   157
#define THISCALL_PROLOG1_BIGRETVAL() asm("mov ecx,[esp+8] \n mov eax,[esp+12] \n mov [esp+8],eax");
sl@0
   158
#define THISCALL_EPILOG0() asm("ret");
sl@0
   159
#define THISCALL_EPILOG1() asm("ret");
sl@0
   160
#define THISCALL_EPILOG2() asm("ret");
sl@0
   161
#define THISCALL_EPILOG3() asm("ret");
sl@0
   162
#define THISCALL_EPILOG0_BIGRETVAL() asm("ret 4");
sl@0
   163
#define THISCALL_EPILOG1_BIGRETVAL() asm("ret 4");
sl@0
   164
#else
sl@0
   165
#define THISCALL_PROLOG0()
sl@0
   166
#define THISCALL_PROLOG1()
sl@0
   167
#define THISCALL_PROLOG2()
sl@0
   168
#define THISCALL_PROLOG3()
sl@0
   169
#define THISCALL_PROLOG0_BIGRETVAL() 
sl@0
   170
#define THISCALL_PROLOG1_BIGRETVAL() 
sl@0
   171
#define THISCALL_EPILOG0() __asm ret
sl@0
   172
#define THISCALL_EPILOG1() __asm ret 4
sl@0
   173
#define THISCALL_EPILOG2() __asm ret 8
sl@0
   174
#define THISCALL_EPILOG3() __asm ret 12
sl@0
   175
#define THISCALL_EPILOG0_BIGRETVAL() __asm ret 4
sl@0
   176
#define THISCALL_EPILOG1_BIGRETVAL() __asm ret 8
sl@0
   177
#endif
sl@0
   178
sl@0
   179
sl@0
   180
// Workaround for MSVC++ 5.0 bug; MSVC incorrectly fixes up conditional jumps
sl@0
   181
// when the destination is a C++ function.
sl@0
   182
#if defined(__VC32__) && (_MSC_VER==1100)	// untested on MSVC++ > 5.0
sl@0
   183
# define _ASM_j(cond,dest) _asm jn##cond short $+11 _asm jmp dest
sl@0
   184
# define _ASM_jn(cond,dest) _asm j##cond short $+11 _asm jmp dest
sl@0
   185
#else
sl@0
   186
# if defined __GCC32__
sl@0
   187
#  define _ASM_j(cond,dest) asm("j"#cond " %a0": : "i"(dest));
sl@0
   188
#  define _ASM_jn(cond,dest) asm("jn"#cond " %a0": :"i"(dest));
sl@0
   189
# else
sl@0
   190
#  define _ASM_j(cond,dest) _asm j##cond dest
sl@0
   191
#  define _ASM_jn(cond,dest) _asm jn##cond dest
sl@0
   192
# endif
sl@0
   193
#endif
sl@0
   194
sl@0
   195
sl@0
   196
sl@0
   197
//#define __MINIMUM_MACHINE_CODE__
sl@0
   198
sl@0
   199
#if defined(__WINS__)
sl@0
   200
#define __NAKED__ __declspec( naked )
sl@0
   201
#ifndef __MINIMUM_MACHINE_CODE__
sl@0
   202
//#define __MEM_MACHINE_CODED__
sl@0
   203
#endif
sl@0
   204
#define __CPU_X86
sl@0
   205
#endif
sl@0
   206
sl@0
   207
#if defined(__X86__)
sl@0
   208
# ifdef __GCC32__
sl@0
   209
#  define __NAKED__	// GCC does not support naked functions on X86
sl@0
   210
# else
sl@0
   211
#  define __NAKED__ __declspec( naked )
sl@0
   212
# endif
sl@0
   213
# ifndef __MINIMUM_MACHINE_CODE__
sl@0
   214
#  define __MEM_MACHINE_CODED__
sl@0
   215
# endif
sl@0
   216
# define __CPU_X86
sl@0
   217
#endif
sl@0
   218
sl@0
   219
sl@0
   220
#if defined(__MARM__)
sl@0
   221
#ifndef __NAKED__ // should be defined in prefix file
sl@0
   222
	#ifndef __GCCXML__
sl@0
   223
        #define __NAKED__ __declspec( naked )
sl@0
   224
    #else
sl@0
   225
        #define __NAKED__
sl@0
   226
    #endif
sl@0
   227
#endif
sl@0
   228
#ifndef __CIA__
sl@0
   229
#undef __NAKED__
sl@0
   230
#define __NAKED__ ____ONLY_USE_NAKED_IN_CIA____
sl@0
   231
#endif
sl@0
   232
	#define __CPU_ARM
sl@0
   233
sl@0
   234
#if defined(__MARM_ARMV5__) && !defined(__CPU_ARMV5T)
sl@0
   235
#define __CPU_ARMV5T
sl@0
   236
#endif
sl@0
   237
sl@0
   238
#ifndef __MINIMUM_MACHINE_CODE__
sl@0
   239
#if !defined(__BIG_ENDIAN__)
sl@0
   240
	#define __MEM_MACHINE_CODED__
sl@0
   241
	#define __DES_MACHINE_CODED__
sl@0
   242
	#define __REGIONS_MACHINE_CODED__
sl@0
   243
	#define __DES8_MACHINE_CODED__
sl@0
   244
	#define __DES16_MACHINE_CODED__
sl@0
   245
	#define __HEAP_MACHINE_CODED__
sl@0
   246
	#define __REALS_MACHINE_CODED__
sl@0
   247
	#define __COBJECT_MACHINE_CODED__
sl@0
   248
	#define __CACTIVESCHEDULER_MACHINE_CODED__
sl@0
   249
	#define __CSERVER_MACHINE_CODED__
sl@0
   250
	#define __ARRAY_MACHINE_CODED__
sl@0
   251
	#define __HUFFMAN_MACHINE_CODED__
sl@0
   252
#if defined(__MARM_ARM4__) || defined(__MARM_ARMI__) || defined(__MARM_THUMB__) || defined(__MARM_ARMV4__) || defined(__MARM_ARMV5__)
sl@0
   253
	#define __DES16_MACHINE_CODED_HWORD__
sl@0
   254
#endif
sl@0
   255
#endif
sl@0
   256
#endif
sl@0
   257
#endif
sl@0
   258
sl@0
   259
#ifdef __CPU_ARMV4
sl@0
   260
	#define __CPU_64BIT_MULTIPLY
sl@0
   261
#endif
sl@0
   262
#ifdef __CPU_ARMV4T
sl@0
   263
	#define __CPU_THUMB
sl@0
   264
	#define __CPU_ARM_SUPPORTS_BX
sl@0
   265
	#define __CPU_64BIT_MULTIPLY
sl@0
   266
#endif
sl@0
   267
#ifdef __CPU_ARMV5T
sl@0
   268
	#define __CPU_THUMB
sl@0
   269
	#define __CPU_ARM_SUPPORTS_BX
sl@0
   270
	#define __CPU_ARM_SUPPORTS_BLX
sl@0
   271
	#define __CPU_64BIT_MULTIPLY
sl@0
   272
	#define __CPU_ARM_LDR_PC_SETS_TBIT
sl@0
   273
	#define __CPU_ARM_HAS_CLZ
sl@0
   274
	#define __CPU_ARM_HAS_PLD
sl@0
   275
#endif
sl@0
   276
#ifdef __ENHANCED_DSP_INSTRUCTIONS
sl@0
   277
	#define __CPU_ARM_HAS_MCRR
sl@0
   278
	#define __CPU_ARM_HAS_LDRD_STRD
sl@0
   279
#endif
sl@0
   280
#if defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
sl@0
   281
	#define __CPU_THUMB
sl@0
   282
	#define __CPU_ARM_SUPPORTS_BX
sl@0
   283
	#define __CPU_ARM_SUPPORTS_BLX
sl@0
   284
	#define __CPU_64BIT_MULTIPLY
sl@0
   285
	#define __CPU_ARM_LDR_PC_SETS_TBIT
sl@0
   286
	#define __CPU_ARM_HAS_CLZ
sl@0
   287
	#define __CPU_ARM_HAS_MCRR
sl@0
   288
	#define __CPU_ARM_HAS_LDREX_STREX
sl@0
   289
	#define __CPU_ARM_HAS_LDRD_STRD
sl@0
   290
	#define __CPU_ARM_HAS_PLD
sl@0
   291
	#define __CPU_ARM_HAS_CPS
sl@0
   292
	#define __CPU_ARM_HAS_SPLIT_FSR
sl@0
   293
#if !defined(__CPU_ARM1136__) && !defined(__CPU_ARM11MP__)
sl@0
   294
	#define __CPU_ARM_HAS_CP15_IFAR
sl@0
   295
#endif
sl@0
   296
	#define	__CPU_ARM_SUPPORTS_USER_MODE_BARRIERS
sl@0
   297
#endif
sl@0
   298
#if defined(__CPU_ARMV7) || (defined(__CPU_ARM1136__) && defined(__CPU_ARM1136_IS_R1__)) || defined(__CPU_ARM1176__) || defined(__CPU_ARM11MP__)
sl@0
   299
	#define __CPU_ARM_HAS_LDREX_STREX_V6K
sl@0
   300
	#define __CPU_HAS_CP15_THREAD_ID_REG
sl@0
   301
#endif
sl@0
   302
#if defined(__MARM_ARM4T__) || defined(__MARM_INTERWORK__)
sl@0
   303
	#define __SUPPORT_THUMB_INTERWORKING
sl@0
   304
#endif
sl@0
   305
#if defined(__CPU_ARMV7)
sl@0
   306
#define	__CPU_ARM_HAS_WFI
sl@0
   307
#define	__CPU_ARM_HAS_WFE_SEV
sl@0
   308
#define __CPU_THUMB2
sl@0
   309
#define __CPU_SUPPORT_THUMB2EE
sl@0
   310
#endif
sl@0
   311
sl@0
   312
sl@0
   313
// ARM CPU macros to allow Thumb/Non-thumb builds
sl@0
   314
#ifdef __CPU_ARM
sl@0
   315
sl@0
   316
#define	EXC_TRAP_CTX_SZ		10		// Nonvolatile registers + sp + pc
sl@0
   317
sl@0
   318
#ifdef __SUPPORT_THUMB_INTERWORKING
sl@0
   319
#define __JUMP(cc,r) asm("bx"#cc " "#r )
sl@0
   320
#ifdef __CPU_ARM_LDR_PC_SETS_TBIT
sl@0
   321
#define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
sl@0
   322
#define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
sl@0
   323
#else
sl@0
   324
#define __POPRET(rlist) asm("ldmfd sp!, {"rlist"lr} ");\
sl@0
   325
						asm("bx lr ")
sl@0
   326
#define __CPOPRET(cc,rlist)	asm("ldm"#cc "fd sp!, {"rlist"lr} ");\
sl@0
   327
							asm("bx"#cc " lr ")
sl@0
   328
#endif
sl@0
   329
#else
sl@0
   330
#define __JUMP(cc,r) asm("mov"#cc " pc, "#r )
sl@0
   331
#define __POPRET(rlist) asm("ldmfd sp!, {"rlist"pc} ")
sl@0
   332
#define __CPOPRET(cc,rlist) asm("ldm"#cc "fd sp!, {"rlist"pc} ")
sl@0
   333
#endif
sl@0
   334
sl@0
   335
#ifdef __CPU_ARM_SUPPORTS_BLX
sl@0
   336
#if __ARM_ASSEMBLER_ISA__ >= 5
sl@0
   337
#define BLX(Rm)							asm("blx r" #Rm)
sl@0
   338
#else
sl@0
   339
#define BLX(Rm)							asm(".word %a0" : : "i" ((TInt)( 0xe12fff30 | (Rm) )))
sl@0
   340
#endif
sl@0
   341
#define __JUMPL(Rm) BLX(Rm)
sl@0
   342
#else
sl@0
   343
#ifdef __SUPPORT_THUMB_INTERWORKING
sl@0
   344
#define __JUMPL(Rm) asm("mov lr, pc "); \
sl@0
   345
                    asm("bx r"#Rm )
sl@0
   346
#else
sl@0
   347
#define __JUMPL(Rm) asm("mov lr, pc "); \
sl@0
   348
                    asm("mov pc, r"#Rm )
sl@0
   349
#endif
sl@0
   350
#endif
sl@0
   351
sl@0
   352
#ifdef __MARM_THUMB__
sl@0
   353
#ifndef __ARMCC__
sl@0
   354
#define __SWITCH_TO_ARM		asm("push {r0} ");\
sl@0
   355
							asm("add r0, pc, #4 ");\
sl@0
   356
							asm("bx r0 ");\
sl@0
   357
							asm("nop ");\
sl@0
   358
							asm(".align 2 ");\
sl@0
   359
							asm(".code 32 ");\
sl@0
   360
							asm("ldr r0, [sp], #4 ")
sl@0
   361
#define __END_ARM			asm(".code 16 ")
sl@0
   362
#else
sl@0
   363
#define __SWITCH_TO_ARM        asm(".code 32 ");
sl@0
   364
#define __END_ARM
sl@0
   365
#endif
sl@0
   366
#else
sl@0
   367
#define __SWITCH_TO_ARM
sl@0
   368
#define __END_ARM
sl@0
   369
#endif
sl@0
   370
sl@0
   371
#define CC_EQ	0
sl@0
   372
#define	CC_NE	1
sl@0
   373
#define CC_CS	2
sl@0
   374
#define CC_CC	3
sl@0
   375
#define CC_MI	4
sl@0
   376
#define CC_PL	5
sl@0
   377
#define CC_VS	6
sl@0
   378
#define CC_VC	7
sl@0
   379
#define CC_HI	8
sl@0
   380
#define CC_LS	9
sl@0
   381
#define CC_GE	10
sl@0
   382
#define CC_LT	11
sl@0
   383
#define CC_GT	12
sl@0
   384
#define CC_LE	13
sl@0
   385
#define	CC_AL	14
sl@0
   386
sl@0
   387
#ifdef __CPU_ARM_HAS_CLZ
sl@0
   388
#if __ARM_ASSEMBLER_ISA__ >= 5
sl@0
   389
#define CLZ(Rd,Rm)		asm("clz r" #Rd ", r" #Rm)
sl@0
   390
#else
sl@0
   391
#define CLZ(Rd,Rm)		asm(".word %a0" : : "i" ((TInt)0xe16f0f10|((Rd)<<12)|(Rm)));
sl@0
   392
#endif
sl@0
   393
#define CLZcc(cc,Rd,Rm)	asm(".word %a0" : : "i" ((TInt)0x016f0f10|((cc)<<28)|((Rd)<<12)|(Rm)));
sl@0
   394
#endif
sl@0
   395
#ifdef __CPU_ARM_HAS_MCRR
sl@0
   396
#define MCRR(cop,opc,Rd,Rn,CRm)			asm(".word %a0" : : "i" ((TInt)0xec400000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
sl@0
   397
#define MCRRcc(cc,cop,opc,Rd,Rn,CRm)	asm(".word %a0" : : "i" ((TInt)0x0c400000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
sl@0
   398
#define MRRC(cop,opc,Rd,Rn,CRm)			asm(".word %a0" : : "i" ((TInt)0xec500000|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
sl@0
   399
#define MRRCcc(cc,cop,opc,Rd,Rn,CRm)	asm(".word %a0" : : "i" ((TInt)0x0c500000|((cc)<<28)|((Rn)<<16)|((Rd)<<12)|((cop)<<8)|((opc)<<4)|(CRm)));
sl@0
   400
#endif
sl@0
   401
#ifdef __CPU_ARM_HAS_LDREX_STREX
sl@0
   402
// LDREX Rd, [Rn] 		- load from [Rn] into Rd exclusive
sl@0
   403
// STREX Rd, Rm, [Rn] 	- store Rm into [Rn] with exclusive access; success/fail indicator into Rd
sl@0
   404
#define LDREXcc(cc,Rd,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((cc)<<28)|((Rd)<<12)|((Rn)<<16))));
sl@0
   405
#define STREXcc(cc,Rd,Rm,Rn)			asm(".word %a0" : : "i" ((TInt)(0x01800f90|((cc)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
sl@0
   406
#if __ARM_ASSEMBLER_ISA__ >= 6
sl@0
   407
#define LDREX(Rd,Rn)					asm("ldrex r" #Rd ", [r" #Rn "] ")
sl@0
   408
#define STREX(Rd,Rm,Rn)					asm("strex r" #Rd ", r" #Rm ", [r" #Rn "] ")
sl@0
   409
#else
sl@0
   410
#define LDREX(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01900f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
sl@0
   411
#define STREX(Rd,Rm,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01800f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
sl@0
   412
#endif
sl@0
   413
#endif
sl@0
   414
#ifdef __CPU_ARM_HAS_LDREX_STREX_V6K
sl@0
   415
// Byte, halfword, doubleword STREX/LDREX & unconditional CLREX
sl@0
   416
#if __ARM_ASSEMBLER_ISA__ >= 6
sl@0
   417
#define LDREXB(Rd,Rn)					asm("ldrexb r" #Rd ", [r" #Rn "] ")
sl@0
   418
#define STREXB(Rd,Rm,Rn)				asm("strexb r" #Rd ", r" #Rm ", [r" #Rn "] ")
sl@0
   419
#define LDREXH(Rd,Rn)					asm("ldrexh r" #Rd ", [r" #Rn "] ")
sl@0
   420
#define STREXH(Rd,Rm,Rn)				asm("strexh r" #Rd ", r" #Rm ", [r" #Rn "] ")
sl@0
   421
#define LDREXD(Rd,Rn)					asm("ldrexd r" #Rd ", [r" #Rn "] ")
sl@0
   422
#define STREXD(Rd,Rm,Rn)				asm("strexd r" #Rd ", r" #Rm ", [r" #Rn "] ")
sl@0
   423
#else
sl@0
   424
#define LDREXB(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01D00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
sl@0
   425
#define STREXB(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01C00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
sl@0
   426
#define LDREXH(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01f00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
sl@0
   427
#define STREXH(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01e00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
sl@0
   428
#define LDREXD(Rd,Rn)					asm(".word %a0" : : "i" ((TInt)(0x01b00f9f|((CC_AL)<<28)|((Rd)<<12)|((Rn)<<16))));
sl@0
   429
#define STREXD(Rd,Rm,Rn)				asm(".word %a0" : : "i" ((TInt)(0x01a00f90|((CC_AL)<<28)|((Rd)<<12)|(Rm)|((Rn)<<16))));
sl@0
   430
#endif
sl@0
   431
#if !defined(__CPU_ARM1136__) || defined(__CPU_ARM1136_ERRATUM_406973_FIXED)
sl@0
   432
#define __CPU_ARM_HAS_WORKING_CLREX
sl@0
   433
#if __ARM_ASSEMBLER_ISA__ >= 6
sl@0
   434
#define CLREX							asm("clrex ")
sl@0
   435
#else
sl@0
   436
#define CLREX							asm(".word %a0" : : "i" ((TInt)(0xf57ff01f)));
sl@0
   437
#endif
sl@0
   438
#endif
sl@0
   439
#endif 
sl@0
   440
#ifdef __CPU_ARM_HAS_LDRD_STRD
sl@0
   441
#if __ARM_ASSEMBLER_ISA__ >= 5
sl@0
   442
#define LDRD(Rd,Rn)						asm("ldrd r" #Rd ", [r" #Rn "] ")
sl@0
   443
#define STRD(Rd,Rn)						asm("strd r" #Rd ", [r" #Rn "] ")
sl@0
   444
#else
sl@0
   445
#define LDRD(Rd,Rn)						asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) )))
sl@0
   446
#define STRD(Rd,Rn)						asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) )))
sl@0
   447
#endif
sl@0
   448
#define LDRD_ioff(Rd,Rn,off)			asm(".word %a0" : : "i" ((TInt)( 0xe1c000d0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
sl@0
   449
#define STRD_ioff(Rd,Rn,off)			asm(".word %a0" : : "i" ((TInt)( 0xe1c000f0 | ((Rn)<<16) | ((Rd)<<12) | (((off)&0xf0)<<4) | ((off)&0x0f) )))
sl@0
   450
#endif
sl@0
   451
#if defined(__CPU_ARM_HAS_PLD) && !defined(__CPU_ARM926J__) && !defined(__CPU_UNKNOWN)		// PLD is a no-op on ARM926
sl@0
   452
#if __ARM_ASSEMBLER_ISA__ >= 5
sl@0
   453
#define PLD(Rn)							asm("pld [r" #Rn "] ")
sl@0
   454
#else
sl@0
   455
#define PLD(Rn)							asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) )))
sl@0
   456
#endif
sl@0
   457
#define PLD_ioff(Rn, off)				asm(".word %a0" : : "i" ((TInt)( 0xf5d0f000 | ((Rn)<<16) | (off) )))	// preload with immediate offset
sl@0
   458
#define PLD_noff(Rn, off)				asm(".word %a0" : : "i" ((TInt)( 0xf550f000 | ((Rn)<<16) | (off) )))	// preload with negative offset
sl@0
   459
#else
sl@0
   460
#define PLD(Rn)
sl@0
   461
#define PLD_ioff(Rn, off)
sl@0
   462
#define PLD_noff(Rn, off)
sl@0
   463
#endif
sl@0
   464
#ifdef __CPU_HAS_CP15_THREAD_ID_REG
sl@0
   465
#define GET_RWRW_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 2 ");
sl@0
   466
#define GET_RWRO_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 3 ");
sl@0
   467
#define GET_RWNO_TID(cc,r)				asm("mrc"#cc" p15, 0, "#r", c13, c0, 4 ");
sl@0
   468
#define SET_RWRW_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 2 ");
sl@0
   469
#define SET_RWRO_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 3 ");
sl@0
   470
#define SET_RWNO_TID(cc,r)				asm("mcr"#cc" p15, 0, "#r", c13, c0, 4 ");
sl@0
   471
#endif
sl@0
   472
sl@0
   473
#ifdef __CPU_SUPPORT_THUMB2EE
sl@0
   474
#define GET_THUMB2EE_HNDLR_BASE(cc,r)	asm("mrc"#cc" p14, 6, "#r", c1, c0, 0 ")
sl@0
   475
#define SET_THUMB2EE_HNDLR_BASE(cc,r)	asm("mcr"#cc" p14, 6, "#r", c1, c0, 0 ")
sl@0
   476
#endif
sl@0
   477
sl@0
   478
#if defined(__CPU_ARMV7)
sl@0
   479
#define	ARM_DMB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff050 | (opt) )) )
sl@0
   480
#define	ARM_DSB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff040 | (opt) )) )
sl@0
   481
#define	ARM_ISB_gen(opt)				asm(".word %a0" : : "i" ((TInt)(0xf57ff060 | (opt) )) )
sl@0
   482
sl@0
   483
#define	ARM_DMBSY	ARM_DMB_gen(0xf)	// full system DMB
sl@0
   484
#define	ARM_DSBSY	ARM_DSB_gen(0xf)	// full system DSB
sl@0
   485
#define	ARM_DMBST	ARM_DMB_gen(0xe)	// full system DMB, orders writes only
sl@0
   486
#define	ARM_DSBST	ARM_DSB_gen(0xe)	// full system DSB, orders writes only
sl@0
   487
#define	ARM_DMBSH	ARM_DMB_gen(0xb)	// DMB encompassing inner-shareable domain
sl@0
   488
#define	ARM_DSBSH	ARM_DSB_gen(0xb)	// DMB encompassing inner-shareable domain
sl@0
   489
#define	ARM_DMBSHST	ARM_DMB_gen(0xa)	// DMB encompassing inner-shareable domain, orders writes only
sl@0
   490
#define	ARM_DSBSHST	ARM_DSB_gen(0xa)	// DMB encompassing inner-shareable domain, orders writes only
sl@0
   491
sl@0
   492
#define	ARM_ISBSY	ARM_ISB_gen(0xf)	// full system ISB
sl@0
   493
sl@0
   494
#define	ARM_NOP							asm(".word 0xe320f000 ")
sl@0
   495
#define	ARM_YIELD						asm(".word 0xe320f001 ")
sl@0
   496
sl@0
   497
#define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMBSH
sl@0
   498
#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMBSH
sl@0
   499
#define	__DATA_SYNC_BARRIER__(reg)		ARM_DSBSH
sl@0
   500
#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSBSH
sl@0
   501
#define	__INST_SYNC_BARRIER__(reg)		ARM_ISBSY
sl@0
   502
#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISBSY
sl@0
   503
sl@0
   504
#elif defined(__CPU_ARM11MP__)
sl@0
   505
sl@0
   506
#define	ARM_DMB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 5 ")
sl@0
   507
#define	ARM_DSB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 4 ")
sl@0
   508
#define	ARM_ISB(reg)					asm("mcr p15, 0, "#reg", c7, c5, 4 ")
sl@0
   509
sl@0
   510
#define	ARM_NOP							asm(".word 0xe320f000 ")
sl@0
   511
#define	ARM_YIELD						asm(".word 0xe320f001 ")
sl@0
   512
sl@0
   513
#define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMB(reg)
sl@0
   514
#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMB(reg)
sl@0
   515
#define	__DATA_SYNC_BARRIER__(reg)		ARM_DSB(reg)
sl@0
   516
#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSB(reg)
sl@0
   517
#define	__INST_SYNC_BARRIER__(reg)		ARM_ISB(reg)
sl@0
   518
#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISB(reg)
sl@0
   519
sl@0
   520
#elif defined(__CPU_ARMV6__)
sl@0
   521
sl@0
   522
#define	ARM_DMB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 5 ")
sl@0
   523
#define	ARM_DSB(reg)					asm("mcr p15, 0, "#reg", c7, c10, 4 ")
sl@0
   524
#define	ARM_ISB(reg)					asm("mcr p15, 0, "#reg", c7, c5, 4 ")
sl@0
   525
sl@0
   526
#define	__DATA_MEMORY_BARRIER__(reg)	ARM_DMB(reg)
sl@0
   527
#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DMB(reg)
sl@0
   528
#define	__DATA_SYNC_BARRIER__(reg)		ARM_DSB(reg)
sl@0
   529
#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_DSB(reg)
sl@0
   530
#define	__INST_SYNC_BARRIER__(reg)		ARM_ISB(reg)
sl@0
   531
#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); ARM_ISB(reg)
sl@0
   532
sl@0
   533
#else
sl@0
   534
sl@0
   535
#define	__DATA_MEMORY_BARRIER__(reg)
sl@0
   536
#define	__DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0")
sl@0
   537
#define	__DATA_SYNC_BARRIER__(reg)		asm("mcr p15, 0, "#reg", c7, c10, 4 ")
sl@0
   538
#define	__DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0"); asm("mcr p15, 0, "#reg", c7, c10, 4 ")
sl@0
   539
#define	__INST_SYNC_BARRIER__(reg)
sl@0
   540
#define	__INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
sl@0
   541
sl@0
   542
#endif
sl@0
   543
sl@0
   544
#ifdef __SMP__
sl@0
   545
#define	__SMP_DATA_MEMORY_BARRIER__(reg)	__DATA_MEMORY_BARRIER__(reg)
sl@0
   546
#define	__SMP_DATA_MEMORY_BARRIER_Z__(reg)	__DATA_MEMORY_BARRIER_Z__(reg)
sl@0
   547
#define	__SMP_DATA_SYNC_BARRIER__(reg)		__DATA_SYNC_BARRIER__(reg)
sl@0
   548
#define	__SMP_DATA_SYNC_BARRIER_Z__(reg)	__DATA_SYNC_BARRIER_Z__(reg)
sl@0
   549
#define	__SMP_INST_SYNC_BARRIER__(reg)		__INST_SYNC_BARRIER__(reg)
sl@0
   550
#define	__SMP_INST_SYNC_BARRIER_Z__(reg)	__INST_SYNC_BARRIER_Z__(reg)
sl@0
   551
#else
sl@0
   552
#define	__SMP_DATA_MEMORY_BARRIER__(reg)
sl@0
   553
#define	__SMP_DATA_MEMORY_BARRIER_Z__(reg)	asm("mov "#reg", #0")
sl@0
   554
#define	__SMP_DATA_SYNC_BARRIER__(reg)
sl@0
   555
#define	__SMP_DATA_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
sl@0
   556
#define	__SMP_INST_SYNC_BARRIER__(reg)
sl@0
   557
#define	__SMP_INST_SYNC_BARRIER_Z__(reg)	asm("mov "#reg", #0")
sl@0
   558
#endif
sl@0
   559
sl@0
   560
#ifdef	__CPU_ARM_HAS_WFI
sl@0
   561
#define	ARM_WFIcc(cc)					__DATA_SYNC_BARRIER__(r0); \
sl@0
   562
										asm(".word %a0" : : "i" ((TInt)(0x0320f003 | ((cc)<<28) )) )				    
sl@0
   563
#define	ARM_WFI		ARM_WFIcc(CC_AL)
sl@0
   564
#endif
sl@0
   565
sl@0
   566
#ifdef	__CPU_ARM_HAS_WFE_SEV
sl@0
   567
#define	ARM_WFEcc(cc)					__DATA_SYNC_BARRIER__(r0); \
sl@0
   568
										asm(".word %a0" : : "i" ((TInt)(0x0320f002 | ((cc)<<28) )) )
sl@0
   569
#if __ARM_ASSEMBLER_ISA__ >= 6
sl@0
   570
#define	ARM_WFE		__DATA_SYNC_BARRIER__(r0); \
sl@0
   571
					asm("wfe ")
sl@0
   572
#else
sl@0
   573
#define	ARM_WFE		ARM_WFEcc(CC_AL)
sl@0
   574
#endif
sl@0
   575
#define	ARM_SEVcc(cc)					asm(".word %a0" : : "i" ((TInt)(0x0320f004 | ((cc)<<28) )) )
sl@0
   576
#if __ARM_ASSEMBLER_ISA__ >= 6
sl@0
   577
#define	ARM_SEV		asm("sev ")
sl@0
   578
#else
sl@0
   579
#define	ARM_SEV		ARM_SEVcc(CC_AL)
sl@0
   580
#endif
sl@0
   581
#endif
sl@0
   582
sl@0
   583
#ifndef	ARM_NOP
sl@0
   584
#define	ARM_NOP							asm("nop ")
sl@0
   585
#define	ARM_YIELD						asm("nop ")
sl@0
   586
#endif
sl@0
   587
sl@0
   588
// Support for throwing exceptions through ARM embedded assembler
sl@0
   589
// Should only be needed user side
sl@0
   590
#ifndef __EH_FRAME_ADDRESS
sl@0
   591
#define	__EH_FRAME_ADDRESS(reg,offset)
sl@0
   592
#define __EH_FRAME_PUSH2(reg1,reg2) 
sl@0
   593
#define __EH_FRAME_SAVE1(reg,offset)
sl@0
   594
#endif
sl@0
   595
sl@0
   596
// StrongARM msr bug workaround: 
sl@0
   597
// (conditional msr might cause,that the next instruction is executed twice by these processors)  
sl@0
   598
#ifdef __CPU_SA1__
sl@0
   599
#define __MSR_CPSR_C(cc,r)   \
sl@0
   600
				asm("msr"#cc" cpsr_c," #r);  \
sl@0
   601
				ARM_NOP; 		
sl@0
   602
#else // !__CPU_SA1__
sl@0
   603
#define __MSR_CPSR_C(cc,r) asm("msr"#cc" cpsr_c,"#r);
sl@0
   604
#endif
sl@0
   605
sl@0
   606
// Causes undefined instruction exception on both ARM and THUMB
sl@0
   607
#define __ASM_CRASH()					asm(".word 0xe7ffdeff ")
sl@0
   608
#if defined(__GNUC__)
sl@0
   609
#define	__crash()						asm(".word 0xe7ffdeff " : : : "memory")
sl@0
   610
#elif defined(__ARMCC__)
sl@0
   611
// RVCT doesn't let us inline an undefined instruction
sl@0
   612
// use a CDP to CP15 instead - doesn't work on THUMB but never mind
sl@0
   613
#if __ARMCC_VERSION < 310000
sl@0
   614
#define	__crash()						asm("cdp p15, 0, c0, c0, c0, 0 ")
sl@0
   615
#else
sl@0
   616
// Inline assembler is deprecated in RVCT 3.1 so we use an intrinsic.
sl@0
   617
#define __crash()						__cdp(15, 0x00, 0x000)
sl@0
   618
#endif
sl@0
   619
#endif
sl@0
   620
sl@0
   621
// Macro used to get the caller of the function containing a CHECK_PRECONDITIONS()
sl@0
   622
#if defined(__ARMCC_VERSION) && __ARMCC_VERSION >= 200000
sl@0
   623
#define PRECOND_FUNCTION_CALLER		__return_address()
sl@0
   624
#endif
sl@0
   625
sl@0
   626
#if !defined(__CPU_ARM_HAS_LDREX_STREX_V6K)
sl@0
   627
#if defined(__CPU_ARM_HAS_LDREX_STREX)
sl@0
   628
#define	__ATOMIC64_USE_SLOW_EXEC__
sl@0
   629
#else
sl@0
   630
#define	__ATOMIC64_USE_FAST_EXEC__
sl@0
   631
#define	__ATOMIC_USE_FAST_EXEC__
sl@0
   632
#endif
sl@0
   633
#endif
sl@0
   634
sl@0
   635
#endif	// __CPU_ARM
sl@0
   636
sl@0
   637
#ifdef	__CPU_X86
sl@0
   638
#define	EXC_TRAP_CTX_SZ		10		// ebx, esp, ebp, esi, edi, ds, es, fs, gs, eip
sl@0
   639
sl@0
   640
// Causes exception
sl@0
   641
#if defined(__VC32__) || defined(__CW32__)
sl@0
   642
#define	__crash()						do { _asm int 0ffh } while(0)
sl@0
   643
#else
sl@0
   644
#define	__crash()						asm("int 0xff " : : : "memory")
sl@0
   645
#endif
sl@0
   646
sl@0
   647
#ifdef __VC32__
sl@0
   648
// Not available in the version of MSVC normally used
sl@0
   649
// #define PRECOND_FUNCTION_CALLER		((TLinAddr)_ReturnAddress())
sl@0
   650
#endif
sl@0
   651
sl@0
   652
#endif	// __CPU_X86
sl@0
   653
sl@0
   654
#ifdef __GCC32__
sl@0
   655
#define PRECOND_FUNCTION_CALLER		((TLinAddr)__builtin_return_address(0))
sl@0
   656
#endif
sl@0
   657
sl@0
   658
#endif