os/kernelhwsrv/kernel/eka/nkern/arm/ncutils.cia
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
sl@0
     1
// Copyright (c) 1994-2009 Nokia Corporation and/or its subsidiary(-ies).
sl@0
     2
// All rights reserved.
sl@0
     3
// This component and the accompanying materials are made available
sl@0
     4
// under the terms of the License "Eclipse Public License v1.0"
sl@0
     5
// which accompanies this distribution, and is available
sl@0
     6
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
sl@0
     7
//
sl@0
     8
// Initial Contributors:
sl@0
     9
// Nokia Corporation - initial contribution.
sl@0
    10
//
sl@0
    11
// Contributors:
sl@0
    12
//
sl@0
    13
// Description:
sl@0
    14
// e32\nkern\arm\ncutils.cia
sl@0
    15
// 
sl@0
    16
//
sl@0
    17
sl@0
    18
#include <e32cia.h>
sl@0
    19
#include <arm.h>
sl@0
    20
sl@0
    21
//#define __DBG_MON_FAULT__
sl@0
    22
//#define __RAM_LOADED_CODE__
sl@0
    23
//#define __EARLY_DEBUG__
sl@0
    24
sl@0
    25
#ifdef _DEBUG
sl@0
    26
#define ASM_KILL_LINK(rp,rs)	asm("mov "#rs", #0xdf ");\
sl@0
    27
								asm("orr "#rs", "#rs", "#rs", lsl #8 ");\
sl@0
    28
								asm("orr "#rs", "#rs", "#rs", lsl #16 ");\
sl@0
    29
								asm("str "#rs", ["#rp"] ");\
sl@0
    30
								asm("str "#rs", ["#rp", #4] ");
sl@0
    31
#else
sl@0
    32
#define ASM_KILL_LINK(rp,rs)
sl@0
    33
#endif
sl@0
    34
sl@0
    35
sl@0
    36
#ifdef __PRI_LIST_MACHINE_CODED__
sl@0
    37
/** Return the priority of the highest priority item present on a priority list.
sl@0
    38
sl@0
    39
	@return	The highest priority present or -1 if the list is empty.
sl@0
    40
 */
sl@0
    41
EXPORT_C __NAKED__ TInt TPriListBase::HighestPriority()
sl@0
    42
	{
sl@0
    43
#ifdef __CPU_ARM_HAS_CLZ
sl@0
    44
	asm("ldr r2, [r0, #4] ");				// r2=iPresent MSW
sl@0
    45
	asm("ldr r1, [r0, #0] ");				// r1=iPresent LSW
sl@0
    46
	CLZ(0,2);								// r0=31-MSB(r2)
sl@0
    47
	asm("subs r0, r0, #32 ");				// r0=-1-MSB(r2), 0 if r2=0
sl@0
    48
	CLZcc(CC_EQ,0,1);						// if r2=0, r0=31-MSB(r1)
sl@0
    49
	asm("rsb r0, r0, #31 ");				// r0=highest priority
sl@0
    50
#else
sl@0
    51
	asm("ldmia r0, {r1,r2} ");				// r2:r1=iPresent
sl@0
    52
	asm("mov r0, #31 ");					// start at 31
sl@0
    53
	asm("cmp r2, #0 ");						// high word non-zero?
sl@0
    54
	asm("movne r0, #63 ");					// if so, start at 63
sl@0
    55
	asm("movne r1, r2 ");					// and set r1=high word
sl@0
    56
	asm("cmp r1, #0 ");
sl@0
    57
	asm("beq highest_pri_0 ");
sl@0
    58
	asm("cmp r1, #0x00010000 ");
sl@0
    59
	asm("movcc r1, r1, lsl #16 ");
sl@0
    60
	asm("subcc r0, r0, #16 ");
sl@0
    61
	asm("cmp r1, #0x01000000 ");
sl@0
    62
	asm("movcc r1, r1, lsl #8 ");
sl@0
    63
	asm("subcc r0, r0, #8 ");
sl@0
    64
	asm("cmp r1, #0x10000000 ");
sl@0
    65
	asm("movcc r1, r1, lsl #4 ");
sl@0
    66
	asm("subcc r0, r0, #4 ");
sl@0
    67
	asm("cmp r1, #0x40000000 ");
sl@0
    68
	asm("movcc r1, r1, lsl #2 ");
sl@0
    69
	asm("subcc r0, r0, #2 ");
sl@0
    70
	asm("cmp r1, #0x80000000 ");
sl@0
    71
	asm("subcc r0, r0, #1 ");
sl@0
    72
	__JUMP(,lr);
sl@0
    73
	asm("highest_pri_0: ");
sl@0
    74
	asm("mvn r0, #0 ");						// if list empty, return -1
sl@0
    75
#endif
sl@0
    76
	__JUMP(,lr);
sl@0
    77
	}
sl@0
    78
sl@0
    79
/** Find the highest priority item present on a priority list.
sl@0
    80
	If multiple items at the same priority are present, return the first to be
sl@0
    81
	added in chronological order.
sl@0
    82
sl@0
    83
	@return	a pointer to the item or NULL if the list is empty.
sl@0
    84
 */
sl@0
    85
EXPORT_C __NAKED__ TPriListLink* TPriListBase::First()
sl@0
    86
	{
sl@0
    87
#ifdef __CPU_ARM_HAS_CLZ
sl@0
    88
	asm("ldr r2, [r0, #4] ");				// r2=iPresent MSW
sl@0
    89
	asm("ldr r1, [r0], #8 ");				// r1=iPresent LSW, r0=&iQueue[0]
sl@0
    90
	CLZ(3,2);								// r3=31-MSB(r2)
sl@0
    91
	asm("subs r3, r3, #32 ");				// r3=-1-MSB(r2), 0 if r2=0
sl@0
    92
	CLZcc(CC_EQ,3,1);						// if r2=0, r3=31-MSB(r1)
sl@0
    93
	asm("rsbs r3, r3, #31 ");				// r3=highest priority
sl@0
    94
	asm("ldrpl r0, [r0, r3, lsl #2] ");		// if r3>=0 list is nonempty, r0->first entry
sl@0
    95
	asm("movmi r0, #0 ");					// if r3<0 list empty, return NULL
sl@0
    96
#else
sl@0
    97
	asm("ldmia r0!, {r1,r2} ");				// r2:r1=iPresent, r0=&iQueue[0]
sl@0
    98
	asm("cmp r2, #0 ");						// high word non-zero?
sl@0
    99
	asm("addne r0, r0, #128 ");				// if so, r0=&iQueue[32]
sl@0
   100
	asm("movne r1, r2 ");					// and set r1=high word
sl@0
   101
	asm("cmp r1, #0x00010000 ");
sl@0
   102
	asm("movcc r1, r1, lsl #16 ");
sl@0
   103
	asm("addcs r0, r0, #0x40 ");			// if iPresent>=0x00010000, step r0 on by 16 words
sl@0
   104
	asm("cmp r1, #0x01000000 ");
sl@0
   105
	asm("movcc r1, r1, lsl #8 ");
sl@0
   106
	asm("addcs r0, r0, #0x20 ");			// if iPresent>=0x01000000, step r0 on by 8 words
sl@0
   107
	asm("cmp r1, #0x10000000 ");
sl@0
   108
	asm("movcc r1, r1, lsl #4 ");
sl@0
   109
	asm("addcs r0, r0, #0x10 ");			// if iPresent>=0x10000000, step r0 on by 4 words
sl@0
   110
	asm("cmp r1, #0x40000000 ");
sl@0
   111
	asm("movcc r1, r1, lsl #2 ");
sl@0
   112
	asm("addcs r0, r0, #0x08 ");			// if iPresent>=0x40000000, step r0 on by 2 words
sl@0
   113
	asm("cmp r1, #0 ");
sl@0
   114
	asm("addmi r0, r0, #4 ");				// if iPresent>=0x80000000, step r0 on by 1 word
sl@0
   115
	asm("ldrne r0, [r0] ");					// if iPresent was not zero, r0 points to first entry
sl@0
   116
	asm("moveq r0, #0 ");					// else r0=NULL
sl@0
   117
#endif
sl@0
   118
	__JUMP(,lr);
sl@0
   119
	}
sl@0
   120
sl@0
   121
/** Add an item to a priority list.
sl@0
   122
sl@0
   123
	@param aLink = a pointer to the item - must not be NULL
sl@0
   124
 */
sl@0
   125
EXPORT_C __NAKED__ void TPriListBase::Add(TPriListLink* /*aLink*/)
sl@0
   126
	{
sl@0
   127
	asm("ldrb r2, [r1, #8]" );				// r2=priority of aLink
sl@0
   128
	asm("add ip, r0, #8 ");					// ip=&iQueue[0]
sl@0
   129
	asm("ldr r3, [ip, r2, lsl #2]! ");		// r3->first entry at this priority
sl@0
   130
	asm("cmp r3, #0 ");						// is this first entry at this priority?
sl@0
   131
	asm("bne pri_list_add_1 ");				// branch if not
sl@0
   132
	asm("str r1, [ip] ");					// if queue originally empty, iQueue[pri]=aThread
sl@0
   133
	asm("ldrb ip, [r0, r2, lsr #3]! ");		// ip=relevant byte of present mask, r0->same
sl@0
   134
	asm("and r2, r2, #7 ");
sl@0
   135
	asm("mov r3, #1 ");
sl@0
   136
	asm("str r1, [r1, #0] ");				// aThread->next=aThread
sl@0
   137
	asm("orr ip, ip, r3, lsl r2 ");			// ip |= 1<<(pri&7)
sl@0
   138
	asm("str r1, [r1, #4] ");				// aThread->iPrev=aThread
sl@0
   139
	asm("strb ip, [r0] ");					// update relevant byte of present mask
sl@0
   140
	__JUMP(,lr);
sl@0
   141
	asm("pri_list_add_1: ");
sl@0
   142
	asm("ldr ip, [r3, #4] ");				// if nonempty, ip=last
sl@0
   143
	asm("str r1, [r3, #4] ");				// first->prev=aThread
sl@0
   144
	asm("stmia r1, {r3,ip} ");				// aThread->next=r3=first, aThread->prev=ip=last
sl@0
   145
	asm("str r1, [ip, #0] ");				// last->next=aThread
sl@0
   146
	__JUMP(,lr);
sl@0
   147
	}
sl@0
   148
sl@0
   149
/** Change the priority of an item on a priority list
sl@0
   150
sl@0
   151
	@param	aLink = pointer to the item to act on - must not be NULL
sl@0
   152
	@param	aNewPriority = new priority for the item
sl@0
   153
 */
sl@0
   154
EXPORT_C __NAKED__ void TPriListBase::ChangePriority(TPriListLink* /*aLink*/, TInt /*aNewPriority*/)
sl@0
   155
	{
sl@0
   156
	asm("ldrb r3, [r1, #8] ");				// r3=old priority
sl@0
   157
	asm("stmfd sp!, {r4-r6,lr} ");
sl@0
   158
	asm("cmp r3, r2 ");
sl@0
   159
	asm("ldmeqfd sp!, {r4-r6,pc} ");		// if old priority=new, finished
sl@0
   160
	asm("ldmia r1, {r4,r12} ");				// r4=next, r12=prev
sl@0
   161
	asm("ldmia r0!, {r6,lr} ");				// lr:r6=present mask, r0=&iQueue[0]
sl@0
   162
	asm("subs r5, r4, r1 ");				// check if aLink is only one at that priority, r5=0 if it is
sl@0
   163
	asm("beq change_pri_1 ");				// branch if it is
sl@0
   164
	asm("ldr r5, [r0, r3, lsl #2] ");		// r5=iQueue[old priority]
sl@0
   165
	asm("str r4, [r12, #0] ");				// prev->next=next
sl@0
   166
	asm("str r12, [r4, #4] ");				// next->prev=prev
sl@0
   167
	asm("cmp r5, r1 ");						// was aLink first?
sl@0
   168
	asm("streq r4, [r0, r3, lsl #2] ");		// if it was, iQueue[old priority]=aLink->next
sl@0
   169
	asm("b change_pri_2 ");
sl@0
   170
	asm("change_pri_1: ");
sl@0
   171
	asm("str r5, [r0, r3, lsl #2] ");		// if empty, set iQueue[old priority]=NULL
sl@0
   172
	asm("mov r12, #0x80000000 ");
sl@0
   173
	asm("rsbs r3, r3, #31 ");				// r3=31-priority
sl@0
   174
	asm("bicmi lr, lr, r12, ror r3 ");		// if pri>31, clear bit is MS word
sl@0
   175
	asm("bicpl r6, r6, r12, ror r3 ");		// if pri<=31, clear bit in LS word
sl@0
   176
	asm("change_pri_2: ");
sl@0
   177
	asm("ldr r4, [r0, r2, lsl #2] ");		// r4=iQueue[new priority]
sl@0
   178
	asm("strb r2, [r1, #8] ");				// store new priority
sl@0
   179
	asm("cmp r4, #0 ");						// new priority queue empty?
sl@0
   180
	asm("bne change_pri_3 ");				// branch if not
sl@0
   181
	asm("str r1, [r0, r2, lsl #2] ");		// if new priority queue was empty, iQueue[new p]=aLink
sl@0
   182
	asm("mov r12, #0x80000000 ");
sl@0
   183
	asm("str r1, [r1, #0] ");				// aLink->next=aLink
sl@0
   184
	asm("rsbs r2, r2, #31 ");				// r2=31-priority
sl@0
   185
	asm("str r1, [r1, #4] ");				// aLink->prev=aLink
sl@0
   186
	asm("orrmi lr, lr, r12, ror r2 ");		// if pri>31, set bit is MS word
sl@0
   187
	asm("orrpl r6, r6, r12, ror r2 ");		// if pri<=31, set bit in LS word
sl@0
   188
	asm("stmdb r0!, {r6,lr} ");				// store present mask and restore r0
sl@0
   189
	asm("ldmfd sp!, {r4-r6,pc} ");
sl@0
   190
	asm("change_pri_3: ");
sl@0
   191
	asm("ldr r12, [r4, #4] ");				// r12->last link at this priority
sl@0
   192
	asm("str r1, [r4, #4] ");				// first->prev=aLink
sl@0
   193
	asm("str r1, [r12, #0] ");				// old last->next=aLink
sl@0
   194
	asm("stmia r1, {r4,r12} ");				// aLink->next=r3=first, aLink->prev=r12=old last
sl@0
   195
	asm("stmdb r0!, {r6,lr} ");				// store present mask and restore r0
sl@0
   196
	asm("ldmfd sp!, {r4-r6,pc} ");
sl@0
   197
	}
sl@0
   198
#endif
sl@0
   199
sl@0
   200
__NAKED__ void initialiseState()
sl@0
   201
	{
sl@0
   202
	// entry in mode_svc with irqs and fiqs off	
sl@0
   203
	asm("mrs r0, cpsr ");
sl@0
   204
	asm("bic r1, r0, #0x1f ");
sl@0
   205
	asm("orr r1, r1, #0xd3 ");				// mode_svc
sl@0
   206
	asm("msr cpsr, r1 ");
sl@0
   207
	__JUMP(,lr);
sl@0
   208
	}
sl@0
   209
sl@0
   210
// Called by a thread when it first runs
sl@0
   211
__NAKED__ void __StartThread()
sl@0
   212
	{
sl@0
   213
	// On entry r4->current thread, r5->entry point, r6->parameter block
sl@0
   214
	asm("mov r0, r6 ");
sl@0
   215
	USER_MEMORY_GUARD_OFF_IF_MODE_USR(r6);
sl@0
   216
	ERRATUM_353494_MODE_CHANGE(,r6);
sl@0
   217
	asm("mov lr, pc ");
sl@0
   218
	asm("movs pc, r5 ");
sl@0
   219
	asm("b  " CSM_ZN5NKern4ExitEv);
sl@0
   220
	}
sl@0
   221
sl@0
   222
// Called by a thread which has been forced to exit
sl@0
   223
// Interrupts off here, kernel unlocked
sl@0
   224
__NAKED__ void __DoForcedExit()
sl@0
   225
	{
sl@0
   226
	asm("mov r0, #0x13 ");
sl@0
   227
	asm("msr cpsr, r0 ");		// interrupts back on
sl@0
   228
	asm("bic sp, sp, #4 ");		// align stack since it may be misaligned on return from scheduler
sl@0
   229
	asm("bl  " CSM_ZN5NKern4LockEv);	// lock the kernel (must do this before setting iCsCount=0)
sl@0
   230
	asm("ldr r0, __TheScheduler ");			// r0 points to scheduler data
sl@0
   231
	asm("mov r1, #0 ");
sl@0
   232
	asm("ldr r0, [r0, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));	// r0=iCurrentThread
sl@0
   233
	asm("str r1, [r0, #%a0]" : : "i" _FOFF(NThreadBase,iCsCount));	// set iCsCount=0
sl@0
   234
	asm("b  " CSM_ZN11NThreadBase4ExitEv);	// exit
sl@0
   235
sl@0
   236
	asm("__TheScheduler: ");
sl@0
   237
	asm(".word TheScheduler ");
sl@0
   238
	asm("__BTraceData: ");
sl@0
   239
	asm(".word BTraceData ");
sl@0
   240
	asm("__DBTraceFilter2_iCleanupHead:");
sl@0
   241
#ifdef __EABI__
sl@0
   242
	asm(".word _ZN14DBTraceFilter212iCleanupHeadE");
sl@0
   243
#else
sl@0
   244
	asm(".word _14DBTraceFilter2.iCleanupHead");
sl@0
   245
#endif
sl@0
   246
	}
sl@0
   247
sl@0
   248
sl@0
   249
/** @internalTechnology
sl@0
   250
sl@0
   251
	Called to indicate that the system has crashed and all CPUs should be
sl@0
   252
	halted and should dump their registers.
sl@0
   253
sl@0
   254
*/
sl@0
   255
__NAKED__ void NKern::NotifyCrash(const TAny* /*a0*/, TInt /*a1*/)
sl@0
   256
	{
sl@0
   257
	asm("stmfd	sp!, {r0-r1} ");			// save parameters
sl@0
   258
	asm("ldr	r0, __CrashState ");
sl@0
   259
	asm("mov	r1, #1 ");
sl@0
   260
	asm("str	r1, [r0] ");				// CrashState = ETrue
sl@0
   261
	asm("ldr	r0, __TheScheduler ");
sl@0
   262
	asm("ldr	r0, [r0, #%a0]" : : "i" _FOFF(TScheduler,i_Regs));
sl@0
   263
	asm("ldr	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet, iExcCode));
sl@0
   264
	asm("cmp	r1, #0 ");					// context already saved?
sl@0
   265
	asm("bge	state_already_saved ");		// skip if so
sl@0
   266
	asm("mov	r1, lr ");
sl@0
   267
	asm("bl "	CSM_ZN3Arm9SaveStateER14SFullArmRegSet );
sl@0
   268
	asm("str	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet, iN.iR15));
sl@0
   269
	asm("ldmia	sp!, {r2-r3} ");			// original R0,R1
sl@0
   270
	asm("stmia	r0, {r2-r3} ");				// save original R0,R1
sl@0
   271
	asm("add	r1, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iExcCode));
sl@0
   272
	asm("stmib	r1, {r2-r3} ");				// save a0, a1 in iCrashArgs
sl@0
   273
	asm("mov	r1, #13 ");					// r1 = regnum
sl@0
   274
	asm("mrs	r2, cpsr ");				// r2 = mode
sl@0
   275
	asm("bl "	CSM_ZN3Arm3RegER14SFullArmRegSetim );	// r0 = pointer to exception mode R13
sl@0
   276
	asm("str	sp, [r0] ");				// save correct original value for exception mode R13
sl@0
   277
	asm("b		state_save_complete ");
sl@0
   278
sl@0
   279
	asm("state_already_saved: ");
sl@0
   280
	asm("ldmia	sp!, {r2-r3} ");			// original R0,R1
sl@0
   281
	asm("add	r1, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iExcCode));
sl@0
   282
	asm("ldr	r4, [r1, #4]! ");
sl@0
   283
	asm("cmp	r4, #0 ");
sl@0
   284
	asm("stmeqia	r1, {r2-r3} ");			// save a0, a1 in iCrashArgs, provided iCrashArgs not already set
sl@0
   285
	asm("state_save_complete: ");
sl@0
   286
sl@0
   287
	asm("mov	r2, #0xd1 ");
sl@0
   288
	asm("msr	cpsr, r2 ");				// mode_fiq, interrupts off
sl@0
   289
	asm("mov	r4, r0 ");
sl@0
   290
	asm("bic	sp, sp, #4 ");				// align stack to multiple of 8
sl@0
   291
sl@0
   292
	asm("mov	r0, #0 ");
sl@0
   293
	asm("mov	r1, #0 ");
sl@0
   294
	asm("mov	r2, #0 ");
sl@0
   295
	asm("bl		NKCrashHandler ");
sl@0
   296
sl@0
   297
	asm("mov	r0, #1 ");
sl@0
   298
	asm("ldr	r1, [r4, #%a0] " : : "i" _FOFF(SFullArmRegSet,iN.iR0));	// original R0 = a0 parameter
sl@0
   299
	asm("ldr	r2, [r4, #%a0] " : : "i" _FOFF(SFullArmRegSet,iN.iR1));	// original R1 = a1 parameter
sl@0
   300
	asm("bl		NKCrashHandler ");
sl@0
   301
sl@0
   302
	// shouldn't get back here
sl@0
   303
	__ASM_CRASH();
sl@0
   304
sl@0
   305
	asm("__CrashState: ");
sl@0
   306
	asm(".word %a0" : : "i" ((TInt)&CrashState));
sl@0
   307
	}
sl@0
   308
sl@0
   309
sl@0
   310
sl@0
   311
__NAKED__ EXPORT_C TBool BTrace::Out(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   312
	{
sl@0
   313
	asm("ldr	r12, __BTraceData");
sl@0
   314
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   315
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   316
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   317
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   318
	asm("adr	lr, 9f");
sl@0
   319
	asm("cmp	r2, #0");
sl@0
   320
	asm("moveq	r0, #0");
sl@0
   321
	asm("ldrne	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   322
	asm("9:");
sl@0
   323
	__POPRET("r2,r3,r4,");
sl@0
   324
	}
sl@0
   325
sl@0
   326
__NAKED__ EXPORT_C TBool BTrace::OutN(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   327
	{
sl@0
   328
	asm("ldr	r12, __BTraceData");
sl@0
   329
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   330
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   331
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   332
	asm("ldr	r4, [sp, #16]");	// r2 = aDataSize
sl@0
   333
	asm("cmp	r2, #0");
sl@0
   334
	asm("moveq	r0, #0");
sl@0
   335
	__CPOPRET(eq,"r2,r3,r4,");
sl@0
   336
sl@0
   337
	asm("cmp	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   338
	asm("movhi	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   339
 	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
sl@0
   340
	asm("add	r0, r0, r4");
sl@0
   341
	asm("subs	r4, r4, #1");
sl@0
   342
	asm("ldrhs	r2, [r3]");			// get first word of aData is aDataSize!=0
sl@0
   343
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   344
	asm("cmp	r4, #4");
sl@0
   345
	asm("strlo	r2, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
sl@0
   346
sl@0
   347
	asm("mov	lr, pc");
sl@0
   348
	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   349
	__POPRET("r2,r3,r4,");
sl@0
   350
	}
sl@0
   351
sl@0
   352
__NAKED__ EXPORT_C TBool BTrace::OutX(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   353
	{
sl@0
   354
	asm("ldr	r12, __BTraceData");
sl@0
   355
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   356
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   357
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   358
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   359
	asm("ldr	lr, __TheScheduler");
sl@0
   360
	asm("cmp	r2, #0");
sl@0
   361
	asm("moveq	r0, #0");
sl@0
   362
	__CPOPRET(eq,"r2,r3,r4,");
sl@0
   363
sl@0
   364
	// set r2 = context id
sl@0
   365
	asm("ldrb	r4, [lr, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
sl@0
   366
	asm("mrs	r2, cpsr");
sl@0
   367
	asm("and	r2, r2, #0x0f");
sl@0
   368
	asm("cmp	r2, #3");
sl@0
   369
	asm("movhi	r2, #2");		// r2 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   370
	asm("cmpeq	r4, #0");
sl@0
   371
	asm("ldreq	r2, [lr, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
sl@0
   372
sl@0
   373
	asm("mov	lr, pc");
sl@0
   374
	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   375
	__POPRET("r2,r3,r4,");
sl@0
   376
	}
sl@0
   377
sl@0
   378
__NAKED__ EXPORT_C TBool BTrace::OutNX(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   379
	{
sl@0
   380
	asm("ldr	r12, __BTraceData");
sl@0
   381
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   382
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   383
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   384
	asm("ldr	r4, [sp, #16]");	// r2 = aDataSize
sl@0
   385
	asm("ldr	lr, __TheScheduler");
sl@0
   386
	asm("cmp	r2, #0");
sl@0
   387
	asm("moveq	r0, #0");
sl@0
   388
	__CPOPRET(eq,"r2,r3,r4,");
sl@0
   389
sl@0
   390
	asm("cmp	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   391
	asm("movhi	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   392
 	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
sl@0
   393
	asm("add	r0, r0, r4");
sl@0
   394
	asm("subs	r4, r4, #1");
sl@0
   395
	asm("ldrhs	r2, [r3]");			// get first word of aData is aDataSize!=0
sl@0
   396
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   397
	asm("cmp	r4, #4");
sl@0
   398
	asm("strlo	r2, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
sl@0
   399
sl@0
   400
	// set r2 = context id
sl@0
   401
	asm("ldrb	r4, [lr, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
sl@0
   402
	asm("mrs	r2, cpsr");
sl@0
   403
	asm("and	r2, r2, #0x0f");
sl@0
   404
	asm("cmp	r2, #3");
sl@0
   405
	asm("movhi	r2, #2");		// r2 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   406
	asm("cmpeq	r4, #0");
sl@0
   407
	asm("ldreq	r2, [lr, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
sl@0
   408
sl@0
   409
	asm("mov	lr, pc");
sl@0
   410
	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   411
	__POPRET("r2,r3,r4,");
sl@0
   412
	}
sl@0
   413
sl@0
   414
__NAKED__ EXPORT_C TBool BTrace::OutBig(TUint32 a0, TUint32 a1, const TAny* aData, TInt aDataSize)
sl@0
   415
	{
sl@0
   416
	asm("ldr	r12, __BTraceData");
sl@0
   417
	asm("stmdb	sp!, {r4,lr}");
sl@0
   418
	asm("and	r4, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   419
	asm("ldrb	r4, [r12, r4, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   420
	asm("cmp	r4, #0");
sl@0
   421
	asm("moveq	r0, #0");
sl@0
   422
	__CPOPRET(eq,"r4,");
sl@0
   423
sl@0
   424
	asm("ldr	r12, __TheScheduler");
sl@0
   425
	asm("stmdb	sp!, {lr}");
sl@0
   426
	asm("ldrb	lr, [r12, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
sl@0
   427
	asm("mrs	r4, cpsr");
sl@0
   428
	asm("and	r4, r4, #0x0f");
sl@0
   429
	asm("cmp	r4, #3");
sl@0
   430
	asm("movhi	r4, #2");		// r4 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   431
	asm("cmpeq	lr, #0");
sl@0
   432
	asm("ldreq	r4, [r12, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
sl@0
   433
	asm("stmdb	sp!, {r4}");
sl@0
   434
	asm("bl " CSM_ZN6BTrace8DoOutBigEmmPKvimm);
sl@0
   435
	asm("add	sp, sp, #8");
sl@0
   436
	__POPRET("r4,");
sl@0
   437
	}
sl@0
   438
sl@0
   439
sl@0
   440
__NAKED__ TBool DBTraceFilter2::Check(TUint32 aUid)
sl@0
   441
	{
sl@0
   442
	asm("stmdb	sp!, {lr}");
sl@0
   443
	asm("ldr	r3, [r0,#%a0]" : : "i" _FOFF(DBTraceFilter2,iNumUids));
sl@0
   444
	asm("add	r0, r0, #%a0" : : "i" _FOFF(DBTraceFilter2,iUids));
sl@0
   445
	asm("mov	r2, #0");
sl@0
   446
	asm("0:");
sl@0
   447
	asm("cmp	r3, r2");
sl@0
   448
	asm("bls	9f");
sl@0
   449
	asm("add	r12, r2, r3");
sl@0
   450
	asm("mov	r12, r12, asr #1");
sl@0
   451
	asm("ldr	lr, [r0, r12, lsl #2]");
sl@0
   452
	asm("cmp	r1, lr");
sl@0
   453
	asm("addhi	r2, r12, #1");
sl@0
   454
	asm("movlo	r3, r12");
sl@0
   455
	asm("bne	0b");
sl@0
   456
	asm("movs	r0, #1");
sl@0
   457
	__POPRET("");
sl@0
   458
	asm("9:");
sl@0
   459
	asm("movs	r0, #0");
sl@0
   460
	__POPRET("");
sl@0
   461
	}
sl@0
   462
sl@0
   463
sl@0
   464
__NAKED__ TBool SBTraceData::CheckFilter2(TUint32 aUid)
sl@0
   465
	{
sl@0
   466
	asm("btrace_check_filter2:");
sl@0
   467
	// returns r0 = 0 or 1 indicating if trace passed the filter check
sl@0
   468
	// returns r2 = trace context id
sl@0
   469
sl@0
   470
	asm("ldr	r12, __TheScheduler");
sl@0
   471
	asm("stmdb	sp!, {r4-r6,lr}");
sl@0
   472
	asm("mrs	r2, cpsr");
sl@0
   473
	// r2 = cpsr
sl@0
   474
	asm("ldrb	lr, [r12, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
sl@0
   475
	asm("and	r4, r2, #0x0f");
sl@0
   476
	asm("cmp	r4, #3");
sl@0
   477
	asm("movhi	r4, #2");		// r4 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   478
	asm("cmpeq	lr, #0");
sl@0
   479
	asm("ldreq	lr, [r12, #%a0]" : : "i" _FOFF(TScheduler,iKernCSLocked));
sl@0
   480
	asm("ldreq	r4, [r12, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
sl@0
   481
	asm("cmpeq	lr, #0");
sl@0
   482
	// r4 = context value for trace
sl@0
   483
	// zero flag set if we need to enter a critical section
sl@0
   484
sl@0
   485
	// NKern::ThreadEnterCS()
sl@0
   486
	asm("ldreq	r5, [r4, #%a0]" : : "i" _FOFF(NThreadBase,iCsCount));
sl@0
   487
	asm("movne	r5, #0");
sl@0
   488
	asm("addeq	r5, r5, #1");
sl@0
   489
	asm("streq	r5, [r4, #%a0]" : : "i" _FOFF(NThreadBase,iCsCount));
sl@0
   490
	// r5 = true if we entered a critical section
sl@0
   491
sl@0
   492
	// DBTraceFilter2::Open()
sl@0
   493
	INTS_OFF(r12, r2, INTS_ALL_OFF);
sl@0
   494
	asm("ldr	r0, [r0, #%a0]" : : "i" (_FOFF(SBTraceData,iFilter2)));
sl@0
   495
	asm("cmp	r0, #1");
sl@0
   496
	asm("ldrhi	r12, [r0, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
sl@0
   497
	asm("addhi	r12, r12, #1");
sl@0
   498
	asm("strhi	r12, [r0, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
sl@0
   499
	asm("msr	cpsr_c, r2");
sl@0
   500
	asm("bls	8f");
sl@0
   501
sl@0
   502
sl@0
   503
	asm("mov	r6, r0");
sl@0
   504
	asm("bl		Check__14DBTraceFilter2Ul");
sl@0
   505
	// r0 = result
sl@0
   506
sl@0
   507
sl@0
   508
	// DBTraceFilter2::Close()
sl@0
   509
	asm("mrs	r2, cpsr");
sl@0
   510
	INTS_OFF(r12, r2, INTS_ALL_OFF);
sl@0
   511
	asm("ldr	r12, [r6, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
sl@0
   512
	asm("ldr	r1, __DBTraceFilter2_iCleanupHead");
sl@0
   513
	asm("subs	r12, r12, #1");
sl@0
   514
	asm("str	r12, [r6, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
sl@0
   515
	asm("ldreq	r12, [r1]");
sl@0
   516
	asm("streq	r6, [r1]");
sl@0
   517
	asm("streq	r12, [r6, #%a0]" : : "i" _FOFF(DBTraceFilter2,iCleanupLink));
sl@0
   518
	asm("msr	cpsr_c, r2");
sl@0
   519
sl@0
   520
	// NKern::ThreadLeaveCS()	
sl@0
   521
	asm("8:");
sl@0
   522
	asm("cmp	r5, #0");
sl@0
   523
	asm("beq	9f");
sl@0
   524
	asm("mov	r5, r0");
sl@0
   525
	asm("bl " CSM_ZN5NKern13ThreadLeaveCSEv);
sl@0
   526
	asm("mov	r0, r5");
sl@0
   527
	asm("9:");
sl@0
   528
	asm("mov	r2, r4"); // r2 = context id
sl@0
   529
	__POPRET("r4-r6,");
sl@0
   530
	}
sl@0
   531
sl@0
   532
__NAKED__ EXPORT_C TBool BTrace::OutFiltered(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   533
	{
sl@0
   534
	// fall through to OutFilteredX...
sl@0
   535
	}
sl@0
   536
sl@0
   537
__NAKED__ EXPORT_C TBool BTrace::OutFilteredX(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   538
	{
sl@0
   539
	asm("ldr	r12, __BTraceData");
sl@0
   540
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   541
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   542
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   543
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   544
	asm("cmp	r2, #0");
sl@0
   545
	asm("moveq	r0, #0");
sl@0
   546
	__CPOPRET(eq,"r2,r3,r4,");
sl@0
   547
sl@0
   548
	asm("stmdb	sp!, {r0,r3,r12}");
sl@0
   549
	asm("mov	r0, r12");
sl@0
   550
	asm("bl		btrace_check_filter2");
sl@0
   551
	asm("cmp	r0, #0");
sl@0
   552
	asm("ldmia	sp!, {r0,r3,r12}");
sl@0
   553
	asm("moveq	r0, #0");
sl@0
   554
	__CPOPRET(eq,"r2,r3,r4,");
sl@0
   555
sl@0
   556
	asm("adr	lr, 9f");
sl@0
   557
	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   558
	asm("9:");
sl@0
   559
	__POPRET("r2,r3,r4,");
sl@0
   560
	}
sl@0
   561
sl@0
   562
__NAKED__ EXPORT_C TBool BTrace::OutFilteredN(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   563
	{
sl@0
   564
	// fall through to OutFilteredNX...
sl@0
   565
	}
sl@0
   566
sl@0
   567
__NAKED__ EXPORT_C TBool BTrace::OutFilteredNX(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   568
	{
sl@0
   569
	asm("ldr	r12, __BTraceData");
sl@0
   570
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   571
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   572
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   573
	asm("cmp	r2, #0");
sl@0
   574
	asm("moveq	r0, #0");
sl@0
   575
	__CPOPRET(eq,"r2,r3,r4,");
sl@0
   576
sl@0
   577
	asm("stmdb	sp!, {r0,r1,r3,r12}");
sl@0
   578
	asm("mov	r0, r12");
sl@0
   579
	asm("bl		btrace_check_filter2");
sl@0
   580
	asm("cmp	r0, #0");
sl@0
   581
	asm("ldmia	sp!, {r0,r1,r3,r12}");
sl@0
   582
	asm("moveq	r0, #0");
sl@0
   583
	__CPOPRET(eq,"r2,r3,r4,");
sl@0
   584
sl@0
   585
	asm("ldr	r4, [sp, #16]");	// r4 = aDataSize
sl@0
   586
	asm("cmp	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   587
	asm("movhi	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   588
 	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
sl@0
   589
	asm("add	r0, r0, r4");
sl@0
   590
	asm("subs	r4, r4, #1");
sl@0
   591
	asm("ldrhs	lr, [r3]");			// get first word of aData is aDataSize!=0
sl@0
   592
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   593
	asm("cmp	r4, #4");
sl@0
   594
	asm("strlo	lr, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
sl@0
   595
sl@0
   596
	asm("mov	lr, pc");
sl@0
   597
	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   598
	__POPRET("r2,r3,r4,");
sl@0
   599
	}
sl@0
   600
sl@0
   601
__NAKED__ EXPORT_C TBool BTrace::OutFilteredBig(TUint32 a0, TUint32 a1, const TAny* aData, TInt aDataSize)
sl@0
   602
	{
sl@0
   603
	asm("ldr	r12, __BTraceData");
sl@0
   604
	asm("stmdb	sp!, {r4,lr}");
sl@0
   605
	asm("and	r4, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   606
	asm("ldrb	r4, [r12, r4, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   607
	asm("cmp	r4, #0");
sl@0
   608
	asm("moveq	r0, #0");
sl@0
   609
	__CPOPRET(eq,"r4,");
sl@0
   610
sl@0
   611
	asm("stmdb	sp!, {r0-r3,r4,lr}");
sl@0
   612
	asm("mov	r0, r12");
sl@0
   613
	asm("bl		btrace_check_filter2");
sl@0
   614
	asm("cmp	r0, #0");
sl@0
   615
	asm("mov	r12, r2");
sl@0
   616
	asm("ldmia	sp!, {r0-r3,r4,lr}");
sl@0
   617
	asm("moveq	r0, #0");
sl@0
   618
	__CPOPRET(eq,"r4,");
sl@0
   619
sl@0
   620
	asm("stmdb	sp!, {r12,lr}");
sl@0
   621
	asm("bl " CSM_ZN6BTrace8DoOutBigEmmPKvimm);
sl@0
   622
	asm("add	sp, sp, #8");
sl@0
   623
	__POPRET("r4,");
sl@0
   624
	}
sl@0
   625
sl@0
   626
	
sl@0
   627
__NAKED__ EXPORT_C TBool BTrace::OutFilteredPcFormatBig(TUint32 a0, TUint32 aModuleUid, TUint32 aPc, TUint16 aFormatId, const TAny* aData, TInt aDataSize)
sl@0
   628
	{
sl@0
   629
	asm("mov	r0, #0"); //kernel side not implemented yet
sl@0
   630
	}
sl@0
   631
sl@0
   632
/******************************************************************************/
sl@0
   633
sl@0
   634
/** Save all the ARM registers
sl@0
   635
sl@0
   636
@internalTechnology
sl@0
   637
*/
sl@0
   638
__NAKED__ void Arm::SaveState(SFullArmRegSet&)
sl@0
   639
	{
sl@0
   640
	asm("stmia	r0, {r0-r14}^ ");	// save R0-R7, R8_usr-R14_usr
sl@0
   641
	asm("str	lr, [r0, #60]! ");	// save R15
sl@0
   642
	asm("mrs	r1, cpsr ");
sl@0
   643
	asm("str	r1, [r0, #4]! ");	// save CPSR
sl@0
   644
	asm("bic	r2, r1, #0x1f ");
sl@0
   645
	asm("orr	r2, r2, #0xd3 ");	// mode_svc, all interrupts off
sl@0
   646
	asm("msr	cpsr, r2 ");
sl@0
   647
	asm("stmib	r0!, {r13,r14} ");	// save R13_svc, R14_svc
sl@0
   648
	asm("mrs	r3, spsr ");
sl@0
   649
	asm("str	r3, [r0, #4]! ");	// save SPSR_svc
sl@0
   650
	asm("bic	r2, r1, #0x1f ");
sl@0
   651
	asm("orr	r2, r2, #0xd7 ");	// mode_abt, all interrupts off
sl@0
   652
	asm("msr	cpsr, r2 ");
sl@0
   653
	asm("stmib	r0!, {r13,r14} ");	// save R13_abt, R14_abt
sl@0
   654
	asm("mrs	r3, spsr ");
sl@0
   655
	asm("str	r3, [r0, #4]! ");	// save SPSR_abt
sl@0
   656
	asm("bic	r2, r1, #0x1f ");
sl@0
   657
	asm("orr	r2, r2, #0xdb ");	// mode_und, all interrupts off
sl@0
   658
	asm("msr	cpsr, r2 ");
sl@0
   659
	asm("stmib	r0!, {r13,r14} ");	// save R13_und, R14_und
sl@0
   660
	asm("mrs	r3, spsr ");
sl@0
   661
	asm("str	r3, [r0, #4]! ");	// save SPSR_und
sl@0
   662
	asm("bic	r2, r1, #0x1f ");
sl@0
   663
	asm("orr	r2, r2, #0xd2 ");	// mode_irq, all interrupts off
sl@0
   664
	asm("msr	cpsr, r2 ");
sl@0
   665
	asm("stmib	r0!, {r13,r14} ");	// save R13_irq, R14_irq
sl@0
   666
	asm("mrs	r3, spsr ");
sl@0
   667
	asm("str	r3, [r0, #4]! ");	// save SPSR_irq
sl@0
   668
	asm("bic	r2, r1, #0x1f ");
sl@0
   669
	asm("orr	r2, r2, #0xd1 ");	// mode_fiq, all interrupts off
sl@0
   670
	asm("msr	cpsr, r2 ");
sl@0
   671
	asm("stmib	r0!, {r8-r14} ");	// save R8_fiq ... R14_fiq
sl@0
   672
	asm("mrs	r3, spsr ");
sl@0
   673
	asm("str	r3, [r0, #4]! ");	// save SPSR_fiq
sl@0
   674
	asm("bic	r2, r1, #0x1f ");
sl@0
   675
	asm("orr	r2, r2, #0xd3 ");	// mode_svc, all interrupts off
sl@0
   676
	asm("msr	cpsr, r2 ");
sl@0
   677
sl@0
   678
	asm("mov	r4, #0 ");
sl@0
   679
	asm("mov	r5, #0 ");
sl@0
   680
	asm("mov	r6, #0 ");
sl@0
   681
	asm("mov	r7, #0 ");
sl@0
   682
	asm("mov	r8, #0 ");
sl@0
   683
	asm("mov	r9, #0 ");
sl@0
   684
	asm("mov	r10, #0 ");
sl@0
   685
	asm("mov	r11, #0 ");
sl@0
   686
sl@0
   687
	// monitor mode - skip for now
sl@0
   688
	asm("mov	r3, #0 ");
sl@0
   689
	asm("stmib	r0!, {r4-r6} ");	// R13_mon, R14_mon, SPSR_mon
sl@0
   690
sl@0
   691
	// zero spare words
sl@0
   692
	asm("mov	r3, #0 ");
sl@0
   693
	asm("stmib	r0!, {r4-r11} ");
sl@0
   694
	asm("add	r0, r0, #4 ");		// r0 = &a.iA
sl@0
   695
sl@0
   696
#ifdef __CPU_ARMV7
sl@0
   697
	asm("mrc	p14, 6, r3, c1, c0, 0 ");
sl@0
   698
#else
sl@0
   699
	asm("mov	r3, #0 ");
sl@0
   700
#endif
sl@0
   701
	asm("str	r3, [r0], #4 ");	// TEEHBR
sl@0
   702
#ifdef __CPU_HAS_COPROCESSOR_ACCESS_REG
sl@0
   703
	GET_CAR(,r3);
sl@0
   704
#else
sl@0
   705
	asm("mov	r3, #0 ");
sl@0
   706
#endif
sl@0
   707
	asm("str	r3, [r0], #4 ");	// CPACR
sl@0
   708
sl@0
   709
	// skip SCR, SDER, NSACR, PMCR, MVBAR for now
sl@0
   710
	asm("mov	r3, #0 ");
sl@0
   711
	asm("stmia	r0!, {r4-r8} ");	// SCR, SDER, NSACR, PMCR, MVBAR
sl@0
   712
sl@0
   713
	// zero spare words
sl@0
   714
	asm("mov	r3, #0 ");
sl@0
   715
	asm("stmia	r0!, {r3-r11} ");	// r0 = &a.iB[0]
sl@0
   716
sl@0
   717
	// just fill in iB[0]
sl@0
   718
#ifdef __CPU_HAS_MMU
sl@0
   719
	asm("mrc	p15, 0, r3, c1, c0, 0 ");
sl@0
   720
	asm("str	r3, [r0], #4 ");	// SCTLR
sl@0
   721
#ifdef __CPU_HAS_ACTLR
sl@0
   722
	asm("mrc	p15, 0, r3, c1, c0, 1 ");
sl@0
   723
#else
sl@0
   724
	asm("mov	r3, #0 ");
sl@0
   725
#endif
sl@0
   726
	asm("str	r3, [r0], #4 ");	// ACTLR
sl@0
   727
	asm("mrc	p15, 0, r3, c2, c0, 0 ");
sl@0
   728
	asm("str	r3, [r0], #4 ");	// TTBR0
sl@0
   729
#ifdef __CPU_HAS_TTBR1
sl@0
   730
	asm("mrc	p15, 0, r2, c2, c0, 1 ");
sl@0
   731
	asm("mrc	p15, 0, r3, c2, c0, 2 ");
sl@0
   732
#else
sl@0
   733
	asm("mov	r2, #0 ");
sl@0
   734
	asm("mov	r3, #0 ");
sl@0
   735
#endif
sl@0
   736
	asm("stmia	r0!, {r2,r3} ");	// TTBR1, TTBCR
sl@0
   737
	asm("mrc	p15, 0, r3, c3, c0, 0 ");
sl@0
   738
	asm("str	r3, [r0], #4 ");	// DACR
sl@0
   739
#ifdef __CPU_MEMORY_TYPE_REMAPPING
sl@0
   740
	asm("mrc	p15, 0, r2, c10, c2, 0 ");
sl@0
   741
	asm("mrc	p15, 0, r3, c10, c2, 1 ");
sl@0
   742
#else
sl@0
   743
	asm("mov	r2, #0 ");
sl@0
   744
	asm("mov	r3, #0 ");
sl@0
   745
#endif
sl@0
   746
	asm("stmia	r0!, {r2,r3} ");	// PRRR, NMRR
sl@0
   747
#ifdef __CPU_ARMV7
sl@0
   748
	asm("mrc	p15, 0, r3, c12, c0, 0 ");
sl@0
   749
#else
sl@0
   750
	asm("mov	r3, #0 ");
sl@0
   751
#endif
sl@0
   752
	asm("str	r3, [r0], #4 ");	// VBAR
sl@0
   753
#if defined(__CPU_SA1) || defined(__CPU_ARM920T) || defined(__CPU_ARM925T) || defined(__CPU_ARMV5T) || defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
sl@0
   754
	asm("mrc	p15, 0, r3, c13, c0, 0 ");
sl@0
   755
#else
sl@0
   756
	asm("mov	r3, #0 ");
sl@0
   757
#endif
sl@0
   758
	asm("str	r3, [r0], #4 ");	// FCSEIDR
sl@0
   759
#if defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
sl@0
   760
	asm("mrc	p15, 0, r3, c13, c0, 1 ");
sl@0
   761
#else
sl@0
   762
	asm("mov	r3, #0 ");
sl@0
   763
#endif
sl@0
   764
	asm("str	r3, [r0], #4 ");	// CONTEXTIDR
sl@0
   765
#ifdef __CPU_HAS_CP15_THREAD_ID_REG
sl@0
   766
	GET_RWRW_TID(,r2);
sl@0
   767
	GET_RWRO_TID(,r3);
sl@0
   768
	GET_RWNO_TID(,r12);
sl@0
   769
#else
sl@0
   770
	asm("mov	r2, #0 ");
sl@0
   771
	asm("mov	r3, #0 ");
sl@0
   772
	asm("mov	r12, #0 ");
sl@0
   773
#endif
sl@0
   774
	asm("stmia	r0!, {r2,r3,r12} ");	// RWRWTID, RWROTID, RWNOTID
sl@0
   775
	asm("mrc	p15, 0, r2, c5, c0, 0 ");	// DFSR
sl@0
   776
#ifdef __CPU_ARM_HAS_SPLIT_FSR
sl@0
   777
	asm("mrc	p15, 0, r3, c5, c0, 1 ");	// IFSR
sl@0
   778
#else
sl@0
   779
	asm("mov	r3, #0 ");
sl@0
   780
#endif
sl@0
   781
	asm("stmia	r0!, {r2,r3} ");	// DFSR, IFSR
sl@0
   782
#ifdef __CPU_ARMV7
sl@0
   783
	asm("mrc	p15, 0, r2, c5, c1, 0 ");	// ADFSR
sl@0
   784
	asm("mrc	p15, 0, r3, c5, c1, 1 ");	// AIFSR
sl@0
   785
#else
sl@0
   786
	asm("mov	r2, #0 ");
sl@0
   787
	asm("mov	r3, #0 ");
sl@0
   788
#endif
sl@0
   789
	asm("stmia	r0!, {r2,r3} ");	// ADFSR, AIFSR
sl@0
   790
	asm("mrc	p15, 0, r2, c6, c0, 0 ");	// DFAR
sl@0
   791
#ifdef __CPU_ARM_HAS_CP15_IFAR
sl@0
   792
	asm("mrc	p15, 0, r3, c6, c0, 2 ");	// IFAR
sl@0
   793
#else
sl@0
   794
	asm("mov	r3, #0 ");
sl@0
   795
#endif
sl@0
   796
	asm("stmia	r0!, {r2,r3} ");	// DFAR, IFAR
sl@0
   797
sl@0
   798
	// zero spare words
sl@0
   799
	asm("stmia	r0!, {r4-r7} ");
sl@0
   800
	asm("stmia	r0!, {r4-r11} ");
sl@0
   801
#else	// __CPU_HAS_MMU
sl@0
   802
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   803
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   804
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   805
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   806
#endif	// __CPU_HAS_MMU
sl@0
   807
sl@0
   808
	// zero iB[1]
sl@0
   809
	asm("stmia	r0!, {r4-r11} ");
sl@0
   810
	asm("stmia	r0!, {r4-r11} ");
sl@0
   811
	asm("stmia	r0!, {r4-r11} ");
sl@0
   812
	asm("stmia	r0!, {r4-r11} ");	// r0 = &a.iMore[0]
sl@0
   813
	asm("add	r1, r0, #62*8 ");	// r1 = &a.iExcCode
sl@0
   814
sl@0
   815
	// Save VFP state
sl@0
   816
	// Save order:
sl@0
   817
	//				FPEXC	FPSCR
sl@0
   818
	// VFPv2 ONLY:	FPINST	FPINST2
sl@0
   819
	//				D0-D3	D4-D7	D8-D11	D12-D15 
sl@0
   820
	// VFPv3 ONLY:	D16-D19	D20-D23	D24-D27	D28-D31
sl@0
   821
#ifdef __CPU_HAS_VFP
sl@0
   822
	GET_CAR(,r2);
sl@0
   823
	asm("bic	r2, r2, #0x00f00000 ");
sl@0
   824
#ifdef __VFP_V3
sl@0
   825
	asm("bic	r2, r2, #0xc0000000 ");	// mask off ASEDIS, D32DIS
sl@0
   826
#endif
sl@0
   827
	asm("orr	r2, r2, #0x00500000 ");	// enable privileged access to CP10, CP11
sl@0
   828
	SET_CAR(,r2);
sl@0
   829
	VFP_FMRX(,2,VFP_XREG_FPEXC);		// r2=FPEXC
sl@0
   830
	asm("orr	r3, r2, #%a0" : : "i" ((TInt)VFP_FPEXC_EN));
sl@0
   831
	VFP_FMXR(,VFP_XREG_FPEXC,3);		// enable VFP
sl@0
   832
	__DATA_SYNC_BARRIER__(r4);
sl@0
   833
	__INST_SYNC_BARRIER__(r4);
sl@0
   834
	VFP_FMRX(,3,VFP_XREG_FPSCR);		// r3=FPSCR
sl@0
   835
	asm("stmia	r0!, {r2,r3} ");		//
sl@0
   836
#ifdef __VFP_V3
sl@0
   837
	VFP_FSTMIADW(CC_AL,0,0,16);			// save D0 - D15
sl@0
   838
	VFP_FMRX(,3,VFP_XREG_MVFR0);
sl@0
   839
	asm("tst r3, #%a0" : : "i" ((TInt)VFP_MVFR0_ASIMD32)); // Check to see if all 32 Advanced SIMD registers are present
sl@0
   840
	VFP_FSTMIADW(CC_NE,0,16,16);		// if so then save D16 - D31 (don't need to check CPACR.D32DIS as it is cleared above)
sl@0
   841
#else
sl@0
   842
	VFP_FMRX(,2,VFP_XREG_FPINST);
sl@0
   843
	VFP_FMRX(,3,VFP_XREG_FPINST2);
sl@0
   844
	asm("stmia	r0!, {r2,r3} ");		// FPINST, FPINST2
sl@0
   845
	VFP_FSTMIADW(CC_AL,0,0,16);			// save D0 - D15
sl@0
   846
#endif
sl@0
   847
#endif	// __CPU_HAS_VFP
sl@0
   848
	asm("1:		");
sl@0
   849
	asm("cmp	r0, r1 ");
sl@0
   850
	asm("strlo	r4, [r0], #4 ");		// clear up to end of iMore[61]
sl@0
   851
	asm("blo	1b ");
sl@0
   852
	asm("mov	r1, #%a0" : : "i" ((TInt)KMaxTInt));
sl@0
   853
	asm("stmia	r0!, {r1,r5-r7} ");		// iExcCode=KMaxTInt, iCrashArgs[0...2]=0
sl@0
   854
	asm("sub	r0, r0, #1024 ");		// r0 = &a
sl@0
   855
#ifdef __CPU_HAS_VFP
sl@0
   856
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iMore[0]));
sl@0
   857
	VFP_FMXR(,VFP_XREG_FPEXC,2);		// restore FPEXC
sl@0
   858
	__DATA_SYNC_BARRIER__(r4);
sl@0
   859
	__INST_SYNC_BARRIER__(r4);
sl@0
   860
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iA.iCPACR));
sl@0
   861
	SET_CAR(,r2);						// restore CPACR
sl@0
   862
#endif
sl@0
   863
	asm("ldr	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iFlags));
sl@0
   864
	asm("orr	r1, r1, #0xC0 ");		// interrupts off
sl@0
   865
	asm("msr	cpsr, r1 ");			// restore CPSR with interrupts off
sl@0
   866
	asm("ldmia	r0, {r0-r11} ");		// restore R4-R11
sl@0
   867
	__JUMP(,lr);
sl@0
   868
	}
sl@0
   869
sl@0
   870
sl@0
   871
/** Update the saved ARM registers with information from an exception
sl@0
   872
sl@0
   873
@internalTechnology
sl@0
   874
*/
sl@0
   875
__NAKED__ void Arm::UpdateState(SFullArmRegSet&, TArmExcInfo&)
sl@0
   876
	{
sl@0
   877
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   878
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iFlags));
sl@0
   879
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iExcCode));
sl@0
   880
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR13Svc));
sl@0
   881
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   882
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR4));
sl@0
   883
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR5));
sl@0
   884
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR6));
sl@0
   885
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   886
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR7));
sl@0
   887
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR8));
sl@0
   888
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR9));
sl@0
   889
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   890
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR10));
sl@0
   891
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR11));
sl@0
   892
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR14Svc));
sl@0
   893
	asm("ldr	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iExcCode));
sl@0
   894
	asm("ldmia	r1!, {r2,r3} ");	// r2=iFaultAddress, r3=iFaultStatus
sl@0
   895
	asm("cmp	r12, #%a0 " : : "i" ((TInt)EArmExceptionPrefetchAbort));
sl@0
   896
	asm("streq	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iIFAR));
sl@0
   897
	asm("strne	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iDFAR));
sl@0
   898
	asm("streq	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iIFSR));
sl@0
   899
	asm("strne	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iDFSR));
sl@0
   900
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   901
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iSpsrSvc));
sl@0
   902
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR13));
sl@0
   903
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR14));
sl@0
   904
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   905
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR0));
sl@0
   906
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR1));
sl@0
   907
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR2));
sl@0
   908
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   909
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR3));
sl@0
   910
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR12));
sl@0
   911
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR15));
sl@0
   912
	__JUMP(,lr);
sl@0
   913
	}
sl@0
   914
sl@0
   915
sl@0
   916
/** Get a pointer to a stored integer register, accounting for registers which
sl@0
   917
	are banked across modes.
sl@0
   918
sl@0
   919
@param	a		Pointer to saved register block
sl@0
   920
@param	aRegNum	Number of register required, 0-15 or -1 (indicates SPSR)
sl@0
   921
@param	aMode	Bottom 5 bits indicate which processor mode
sl@0
   922
				Other bits of aMode are ignored
sl@0
   923
@return			Pointer to the required saved register value
sl@0
   924
sl@0
   925
@internalTechnology
sl@0
   926
*/
sl@0
   927
__NAKED__ TArmReg* Arm::Reg(SFullArmRegSet& /*a*/, TInt /*aRegNum*/, TArmReg /*aMode*/)
sl@0
   928
	{
sl@0
   929
	asm("cmp	r1, #8 ");				// register number < 8 ?
sl@0
   930
	asm("addlo	r0, r0, r1, lsl #2 ");	// register R0-R7 are not banked
sl@0
   931
	asm("blo	0f ");
sl@0
   932
	asm("cmp	r1, #15 ");				// register number = 15 ?
sl@0
   933
	asm("addeq	r0, r0, r1, lsl #2 ");	// register R15 not banked
sl@0
   934
	asm("movgt	r0, #0 ");				// no registers > 15
sl@0
   935
	asm("bge	0f ");
sl@0
   936
	asm("cmn	r1, #1 ");
sl@0
   937
	asm("movlt	r0, #0 ");				// no registers < -1
sl@0
   938
	asm("blt	0f ");
sl@0
   939
	asm("and	r12, r2, #0x1F ");
sl@0
   940
	asm("cmp	r12, #0x11 ");			// mode_fiq?
sl@0
   941
	asm("beq	1f ");					// skip if it is
sl@0
   942
	asm("cmp	r1, #13 ");
sl@0
   943
	asm("addlo	r0, r0, r1, lsl #2 ");	// register R8-R12 are only banked in mode_fiq
sl@0
   944
	asm("blo	0f ");
sl@0
   945
	asm("cmp	r12, #0x10 ");			// mode_usr ?
sl@0
   946
	asm("cmpne	r12, #0x1F ");			// if not, mode_sys ?
sl@0
   947
	asm("bne	2f ");					// skip if neither
sl@0
   948
	asm("cmp	r1, #16 ");
sl@0
   949
	asm("addlo	r0, r0, r1, lsl #2 ");	// handle R13_usr, R14_usr
sl@0
   950
	asm("movhs	r0, #0 ");				// no SPSR in mode_usr or mode_sys
sl@0
   951
	asm("blo	0f ");
sl@0
   952
	asm("1: ");							// mode_fiq, regnum = 8-12
sl@0
   953
	asm("2: ");							// exception mode, regnum not 0-12 or 15
sl@0
   954
	asm("cmn	r1, #1 ");				// regnum = -1 ?
sl@0
   955
	asm("moveq	r1, #15 ");				// if so, change to 15
sl@0
   956
	asm("sub	r1, r1, #13 ");
sl@0
   957
	asm("add	r0, r0, r1, lsl #2 ");	// add 0 for R13, 4 for R14, 8 for SPSR
sl@0
   958
	asm("cmp	r12, #0x16 ");
sl@0
   959
	asm("addeq	r0, r0, #12 ");			// if mon, add offset from R13Fiq to R13Mon
sl@0
   960
	asm("cmpne	r12, #0x11 ");
sl@0
   961
	asm("addeq	r0, r0, #32 ");			// if valid but not svc/abt/und/irq, add offset from R13Irq to R13Fiq
sl@0
   962
	asm("cmpne	r12, #0x12 ");
sl@0
   963
	asm("addeq	r0, r0, #12 ");			// if valid but not svc/abt/und, add offset from R13Und to R13Irq
sl@0
   964
	asm("cmpne	r12, #0x1b ");
sl@0
   965
	asm("addeq	r0, r0, #12 ");			// if valid but not svc/abt, add offset from R13Abt to R13Und
sl@0
   966
	asm("cmpne	r12, #0x17 ");
sl@0
   967
	asm("addeq	r0, r0, #12 ");			// if valid but not svc, add offset from R13Svc to R13Abt
sl@0
   968
	asm("cmpne	r12, #0x13 ");
sl@0
   969
	asm("addeq	r0, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iN.iR13Svc));	// if valid mode add offset to R13Svc
sl@0
   970
	asm("movne	r0, #0 ");
sl@0
   971
	asm("0: ");
sl@0
   972
	__JUMP(,lr);
sl@0
   973
	}
sl@0
   974
sl@0
   975
sl@0
   976
/** Restore all the ARM registers
sl@0
   977
sl@0
   978
@internalTechnology
sl@0
   979
*/
sl@0
   980
__NAKED__ void Arm::RestoreState(SFullArmRegSet&)
sl@0
   981
	{
sl@0
   982
	}
sl@0
   983
sl@0
   984
sl@0
   985
sl@0
   986
sl@0
   987