os/kernelhwsrv/kernel/eka/nkernsmp/arm/ncutils.cia
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
sl@0
     1
// Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies).
sl@0
     2
// All rights reserved.
sl@0
     3
// This component and the accompanying materials are made available
sl@0
     4
// under the terms of the License "Eclipse Public License v1.0"
sl@0
     5
// which accompanies this distribution, and is available
sl@0
     6
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
sl@0
     7
//
sl@0
     8
// Initial Contributors:
sl@0
     9
// Nokia Corporation - initial contribution.
sl@0
    10
//
sl@0
    11
// Contributors:
sl@0
    12
//
sl@0
    13
// Description:
sl@0
    14
// e32\nkernsmp\arm\ncutils.cia
sl@0
    15
// 
sl@0
    16
//
sl@0
    17
sl@0
    18
#include <e32cia.h>
sl@0
    19
#include <arm.h>
sl@0
    20
#include <arm_gic.h>
sl@0
    21
sl@0
    22
extern TSpinLock BTraceLock;
sl@0
    23
sl@0
    24
extern "C" {
sl@0
    25
extern TUint32 CrashStateOut;
sl@0
    26
extern SFullArmRegSet DefaultRegSet;
sl@0
    27
}
sl@0
    28
sl@0
    29
//#define __DBG_MON_FAULT__
sl@0
    30
//#define __RAM_LOADED_CODE__
sl@0
    31
//#define __EARLY_DEBUG__
sl@0
    32
sl@0
    33
#ifdef _DEBUG
sl@0
    34
#define ASM_KILL_LINK(rp,rs)	asm("mov "#rs", #0xdf ");\
sl@0
    35
								asm("orr "#rs", "#rs", "#rs", lsl #8 ");\
sl@0
    36
								asm("orr "#rs", "#rs", "#rs", lsl #16 ");\
sl@0
    37
								asm("str "#rs", ["#rp"] ");\
sl@0
    38
								asm("str "#rs", ["#rp", #4] ");
sl@0
    39
#else
sl@0
    40
#define ASM_KILL_LINK(rp,rs)
sl@0
    41
#endif
sl@0
    42
sl@0
    43
#ifdef __PRI_LIST_MACHINE_CODED__
sl@0
    44
/** Return the priority of the highest priority item present on a priority list.
sl@0
    45
sl@0
    46
	@return	The highest priority present or -1 if the list is empty.
sl@0
    47
 */
sl@0
    48
EXPORT_C __NAKED__ TInt TPriListBase::HighestPriority()
sl@0
    49
	{
sl@0
    50
	asm("ldr r2, [r0, #4] ");				// r2=iPresent MSW
sl@0
    51
	asm("ldr r1, [r0, #0] ");				// r1=iPresent LSW
sl@0
    52
	CLZ(0,2);								// r0=31-MSB(r2)
sl@0
    53
	asm("subs r0, r0, #32 ");				// r0=-1-MSB(r2), 0 if r2=0
sl@0
    54
	CLZcc(CC_EQ,0,1);						// if r2=0, r0=31-MSB(r1)
sl@0
    55
	asm("rsb r0, r0, #31 ");				// r0=highest priority
sl@0
    56
	__JUMP(,lr);
sl@0
    57
	}
sl@0
    58
sl@0
    59
/** Find the highest priority item present on a priority list.
sl@0
    60
	If multiple items at the same priority are present, return the first to be
sl@0
    61
	added in chronological order.
sl@0
    62
sl@0
    63
	@return	a pointer to the item or NULL if the list is empty.
sl@0
    64
 */
sl@0
    65
EXPORT_C __NAKED__ TPriListLink* TPriListBase::First()
sl@0
    66
	{
sl@0
    67
	asm("ldr r2, [r0, #4] ");				// r2=iPresent MSW
sl@0
    68
	asm("ldr r1, [r0], #8 ");				// r1=iPresent LSW, r0=&iQueue[0]
sl@0
    69
	CLZ(3,2);								// r3=31-MSB(r2)
sl@0
    70
	asm("subs r3, r3, #32 ");				// r3=-1-MSB(r2), 0 if r2=0
sl@0
    71
	CLZcc(CC_EQ,3,1);						// if r2=0, r3=31-MSB(r1)
sl@0
    72
	asm("rsbs r3, r3, #31 ");				// r3=highest priority
sl@0
    73
	asm("ldrpl r0, [r0, r3, lsl #2] ");		// if r3>=0 list is nonempty, r0->first entry
sl@0
    74
	asm("movmi r0, #0 ");					// if r3<0 list empty, return NULL
sl@0
    75
	__JUMP(,lr);
sl@0
    76
	}
sl@0
    77
sl@0
    78
/** Add an item to a priority list.
sl@0
    79
sl@0
    80
	@param aLink = a pointer to the item - must not be NULL
sl@0
    81
 */
sl@0
    82
EXPORT_C __NAKED__ void TPriListBase::Add(TPriListLink* /*aLink*/)
sl@0
    83
	{
sl@0
    84
	asm("ldrb r2, [r1, #8]" );				// r2=priority of aLink
sl@0
    85
	asm("add ip, r0, #8 ");					// ip=&iQueue[0]
sl@0
    86
	asm("ldr r3, [ip, r2, lsl #2]! ");		// r3->first entry at this priority
sl@0
    87
	asm("cmp r3, #0 ");						// is this first entry at this priority?
sl@0
    88
	asm("bne pri_list_add_1 ");				// branch if not
sl@0
    89
	asm("str r1, [ip] ");					// if queue originally empty, iQueue[pri]=aThread
sl@0
    90
	asm("ldrb ip, [r0, r2, lsr #3]! ");		// ip=relevant byte of present mask, r0->same
sl@0
    91
	asm("and r2, r2, #7 ");
sl@0
    92
	asm("mov r3, #1 ");
sl@0
    93
	asm("str r1, [r1, #0] ");				// aThread->next=aThread
sl@0
    94
	asm("orr ip, ip, r3, lsl r2 ");			// ip |= 1<<(pri&7)
sl@0
    95
	asm("str r1, [r1, #4] ");				// aThread->iPrev=aThread
sl@0
    96
	asm("strb ip, [r0] ");					// update relevant byte of present mask
sl@0
    97
	__JUMP(,lr);
sl@0
    98
	asm("pri_list_add_1: ");
sl@0
    99
	asm("ldr ip, [r3, #4] ");				// if nonempty, ip=last
sl@0
   100
	asm("str r1, [r3, #4] ");				// first->prev=aThread
sl@0
   101
	asm("stmia r1, {r3,ip} ");				// aThread->next=r3=first, aThread->prev=ip=last
sl@0
   102
	asm("str r1, [ip, #0] ");				// last->next=aThread
sl@0
   103
	__JUMP(,lr);
sl@0
   104
	}
sl@0
   105
sl@0
   106
sl@0
   107
/** Removes an item from a priority list.
sl@0
   108
sl@0
   109
	@param aLink A pointer to the item - this must not be NULL.
sl@0
   110
 */
sl@0
   111
EXPORT_C __NAKED__ void TPriListBase::Remove(TPriListLink* /*aLink*/)
sl@0
   112
	{
sl@0
   113
	asm("ldmia r1, {r2,r3} ");				// r2=aLink->iNext, r3=aLink->iPrev
sl@0
   114
	ASM_KILL_LINK(r1,r12);
sl@0
   115
	asm("subs r12, r1, r2 ");				// check if more threads at this priority, r12=0 if not
sl@0
   116
	asm("bne 1f ");							// branch if there are more at same priority
sl@0
   117
	asm("ldrb r2, [r1, #%a0]" : : "i" _FOFF(NThread, iPriority));	// r2=thread priority
sl@0
   118
	asm("add r1, r0, #%a0" : : "i" _FOFF(TPriListBase, iQueue));	// r1->iQueue[0]
sl@0
   119
	asm("str r12, [r1, r2, lsl #2] ");		// iQueue[priority]=NULL
sl@0
   120
	asm("ldrb r1, [r0, r2, lsr #3] ");		// r1=relevant byte in present mask
sl@0
   121
	asm("and r3, r2, #7 ");					// r3=priority & 7
sl@0
   122
	asm("mov r12, #1 ");
sl@0
   123
	asm("bic r1, r1, r12, lsl r3 ");		// clear bit in present mask
sl@0
   124
	asm("strb r1, [r0, r2, lsr #3] ");		// update relevant byte in present mask
sl@0
   125
	__JUMP(,lr);
sl@0
   126
	asm("1: ");								// get here if there are other threads at same priority
sl@0
   127
	asm("ldrb r12, [r1, #%a0]" : : "i" _FOFF(NThread, iPriority));	// r12=thread priority
sl@0
   128
	asm("add r0, r0, #%a0" : : "i" _FOFF(TPriListBase, iQueue));		// r0=&iQueue[0]
sl@0
   129
	asm("str r3, [r2, #4] ");				// next->prev=prev
sl@0
   130
	asm("ldr r12, [r0, r12, lsl #2]! ");	// r12=iQueue[priority], r0=&iQueue[priority]
sl@0
   131
	asm("str r2, [r3, #0] ");				// and prev->next=next
sl@0
   132
	asm("cmp r12, r1 ");					// if aThread was first...
sl@0
   133
	asm("streq r2, [r0, #0] ");				// iQueue[priority]=aThread->next
sl@0
   134
	__JUMP(,lr);							// finished
sl@0
   135
	}
sl@0
   136
sl@0
   137
sl@0
   138
/** Change the priority of an item on a priority list
sl@0
   139
sl@0
   140
	@param	aLink = pointer to the item to act on - must not be NULL
sl@0
   141
	@param	aNewPriority = new priority for the item
sl@0
   142
 */
sl@0
   143
EXPORT_C __NAKED__ void TPriListBase::ChangePriority(TPriListLink* /*aLink*/, TInt /*aNewPriority*/)
sl@0
   144
	{
sl@0
   145
	asm("ldrb r3, [r1, #8] ");				// r3=old priority
sl@0
   146
	asm("stmfd sp!, {r4-r6,lr} ");
sl@0
   147
	asm("cmp r3, r2 ");
sl@0
   148
	asm("ldmeqfd sp!, {r4-r6,pc} ");		// if old priority=new, finished
sl@0
   149
	asm("ldmia r1, {r4,r12} ");				// r4=next, r12=prev
sl@0
   150
	asm("ldmia r0!, {r6,lr} ");				// lr:r6=present mask, r0=&iQueue[0]
sl@0
   151
	asm("subs r5, r4, r1 ");				// check if aLink is only one at that priority, r5=0 if it is
sl@0
   152
	asm("beq change_pri_1 ");				// branch if it is
sl@0
   153
	asm("ldr r5, [r0, r3, lsl #2] ");		// r5=iQueue[old priority]
sl@0
   154
	asm("str r4, [r12, #0] ");				// prev->next=next
sl@0
   155
	asm("str r12, [r4, #4] ");				// next->prev=prev
sl@0
   156
	asm("cmp r5, r1 ");						// was aLink first?
sl@0
   157
	asm("streq r4, [r0, r3, lsl #2] ");		// if it was, iQueue[old priority]=aLink->next
sl@0
   158
	asm("b change_pri_2 ");
sl@0
   159
	asm("change_pri_1: ");
sl@0
   160
	asm("str r5, [r0, r3, lsl #2] ");		// if empty, set iQueue[old priority]=NULL
sl@0
   161
	asm("mov r12, #0x80000000 ");
sl@0
   162
	asm("rsbs r3, r3, #31 ");				// r3=31-priority
sl@0
   163
	asm("bicmi lr, lr, r12, ror r3 ");		// if pri>31, clear bit is MS word
sl@0
   164
	asm("bicpl r6, r6, r12, ror r3 ");		// if pri<=31, clear bit in LS word
sl@0
   165
	asm("change_pri_2: ");
sl@0
   166
	asm("ldr r4, [r0, r2, lsl #2] ");		// r4=iQueue[new priority]
sl@0
   167
	asm("strb r2, [r1, #8] ");				// store new priority
sl@0
   168
	asm("cmp r4, #0 ");						// new priority queue empty?
sl@0
   169
	asm("bne change_pri_3 ");				// branch if not
sl@0
   170
	asm("str r1, [r0, r2, lsl #2] ");		// if new priority queue was empty, iQueue[new p]=aLink
sl@0
   171
	asm("mov r12, #0x80000000 ");
sl@0
   172
	asm("str r1, [r1, #0] ");				// aLink->next=aLink
sl@0
   173
	asm("rsbs r2, r2, #31 ");				// r2=31-priority
sl@0
   174
	asm("str r1, [r1, #4] ");				// aLink->prev=aLink
sl@0
   175
	asm("orrmi lr, lr, r12, ror r2 ");		// if pri>31, set bit is MS word
sl@0
   176
	asm("orrpl r6, r6, r12, ror r2 ");		// if pri<=31, set bit in LS word
sl@0
   177
	asm("stmdb r0!, {r6,lr} ");				// store present mask and restore r0
sl@0
   178
	asm("ldmfd sp!, {r4-r6,pc} ");
sl@0
   179
	asm("change_pri_3: ");
sl@0
   180
	asm("ldr r12, [r4, #4] ");				// r12->last link at this priority
sl@0
   181
	asm("str r1, [r4, #4] ");				// first->prev=aLink
sl@0
   182
	asm("str r1, [r12, #0] ");				// old last->next=aLink
sl@0
   183
	asm("stmia r1, {r4,r12} ");				// aLink->next=r3=first, aLink->prev=r12=old last
sl@0
   184
	asm("stmdb r0!, {r6,lr} ");				// store present mask and restore r0
sl@0
   185
	asm("ldmfd sp!, {r4-r6,pc} ");
sl@0
   186
	}
sl@0
   187
#endif
sl@0
   188
sl@0
   189
__NAKED__ void initialiseState(TInt /*aCpu*/, TSubScheduler* /*aSS*/)
sl@0
   190
	{
sl@0
   191
	SET_RWNO_TID(,r1);
sl@0
   192
	__ASM_CLI_MODE(MODE_ABT);
sl@0
   193
	asm("str	sp, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_AbtStackTop));
sl@0
   194
	asm("mvn	r3, #0 ");
sl@0
   195
	asm("str	r3, [sp, #%a0]" : : "i" _FOFF(SFullArmRegSet, iExcCode));
sl@0
   196
	asm("str	r3, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_IrqNestCount));
sl@0
   197
	__ASM_CLI_MODE(MODE_UND);
sl@0
   198
	asm("str	sp, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_UndStackTop));
sl@0
   199
	__ASM_CLI_MODE(MODE_FIQ);
sl@0
   200
	asm("str	sp, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_FiqStackTop));
sl@0
   201
	__ASM_CLI_MODE(MODE_IRQ);
sl@0
   202
	asm("str	sp, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_IrqStackTop));
sl@0
   203
	__ASM_CLI_MODE(MODE_SVC);
sl@0
   204
	asm("ldr	r2, __TheScheduler ");
sl@0
   205
	asm("ldr	r3, [r2, #%a0]" : : "i" _FOFF(TScheduler, i_ScuAddr));
sl@0
   206
	asm("str	r3, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_ScuAddr));
sl@0
   207
	asm("ldr	r3, [r2, #%a0]" : : "i" _FOFF(TScheduler, i_GicDistAddr));
sl@0
   208
	asm("str	r3, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_GicDistAddr));
sl@0
   209
	asm("ldr	r3, [r2, #%a0]" : : "i" _FOFF(TScheduler, i_GicCpuIfcAddr));
sl@0
   210
	asm("str	r3, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_GicCpuIfcAddr));
sl@0
   211
	asm("ldr	r3, [r2, #%a0]" : : "i" _FOFF(TScheduler, i_LocalTimerAddr));
sl@0
   212
	asm("str	r3, [r1, #%a0]" : : "i" _FOFF(TSubScheduler, i_LocalTimerAddr));
sl@0
   213
	asm("mov	r3, #0 ");
sl@0
   214
	SET_RWRO_TID(,r3);
sl@0
   215
	SET_RWRW_TID(,r3);
sl@0
   216
sl@0
   217
	__JUMP(,lr);
sl@0
   218
sl@0
   219
	asm("__TheScheduler: ");
sl@0
   220
	asm(".word TheScheduler ");
sl@0
   221
	}
sl@0
   222
sl@0
   223
__NAKED__ TUint32 __mpid()
sl@0
   224
	{
sl@0
   225
	asm("mrc	p15, 0, r0, c0, c0, 5 ");
sl@0
   226
	__JUMP(,lr);
sl@0
   227
	}
sl@0
   228
sl@0
   229
/** @internalTechnology
sl@0
   230
sl@0
   231
	Called to indicate that the system has crashed and all CPUs should be
sl@0
   232
	halted and should dump their registers.
sl@0
   233
sl@0
   234
*/
sl@0
   235
__NAKED__ void NKern::NotifyCrash(const TAny* /*a0*/, TInt /*a1*/)
sl@0
   236
	{
sl@0
   237
	asm("stmfd	sp!, {r0-r1} ");			// save parameters
sl@0
   238
	GET_RWNO_TID(,r0);
sl@0
   239
	asm("cmp	r0, #0 ");
sl@0
   240
	asm("ldreq	r0, __SS0 ");
sl@0
   241
	asm("ldr	r0, [r0, #%a0]" : : "i" _FOFF(TSubScheduler,i_Regs));
sl@0
   242
	asm("cmp	r0, #0 ");
sl@0
   243
	asm("ldreq	r0, __DefaultRegs ");
sl@0
   244
	asm("ldr	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet, iExcCode));
sl@0
   245
	asm("cmp	r1, #0 ");					// context already saved?
sl@0
   246
	asm("bge	state_already_saved ");		// skip if so
sl@0
   247
	asm("mov	r1, lr ");
sl@0
   248
	asm("bl "	CSM_ZN3Arm9SaveStateER14SFullArmRegSet );
sl@0
   249
	asm("str	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet, iN.iR15));
sl@0
   250
	asm("ldmia	sp!, {r2-r3} ");			// original R0,R1
sl@0
   251
	asm("stmia	r0, {r2-r3} ");				// save original R0,R1
sl@0
   252
	asm("add	r1, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iExcCode));
sl@0
   253
	asm("mov	r4, r0 ");					// save pointer to i_Regs
sl@0
   254
	asm("stmib	r1, {r2-r3} ");				// save a0, a1 in iCrashArgs
sl@0
   255
	asm("mov	r1, #13 ");					// r1 = regnum
sl@0
   256
	asm("mrs	r2, cpsr ");				// r2 = mode
sl@0
   257
	asm("bl "	CSM_ZN3Arm3RegER14SFullArmRegSetim );	// r0 = pointer to exception mode R13
sl@0
   258
	asm("str	sp, [r0] ");				// save correct original value for exception mode R13
sl@0
   259
	asm("b		state_save_complete ");
sl@0
   260
sl@0
   261
	asm("state_already_saved: ");
sl@0
   262
	asm("ldmia	sp!, {r2-r3} ");			// original R0,R1
sl@0
   263
	asm("add	r1, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iExcCode));
sl@0
   264
	asm("ldr	r4, [r1, #4]! ");
sl@0
   265
	asm("cmp	r4, #0 ");
sl@0
   266
	asm("stmeqia	r1, {r2-r3} ");			// save a0, a1 in iCrashArgs, provided iCrashArgs not already set
sl@0
   267
	asm("mov	r4, r0 ");					// save pointer to i_Regs
sl@0
   268
	asm("state_save_complete: ");
sl@0
   269
sl@0
   270
	__ASM_CLI_MODE(MODE_FIQ);				// mode_fiq, interrupts off
sl@0
   271
	GET_RWNO_TID(,r0);
sl@0
   272
	asm("ldr	r1, __CrashState ");
sl@0
   273
	asm("cmp	r0, #0 ");
sl@0
   274
	asm("moveq	r2, #1 ");
sl@0
   275
	asm("streq	r2, [r1] ");
sl@0
   276
	asm("beq	skip_other_cores ");		// If subscheduler not yet set, don't bother with other cores
sl@0
   277
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(TSubScheduler, iCpuMask));
sl@0
   278
	asm("ldr	r5, [r0, #%a0]" : : "i" _FOFF(TSubScheduler, i_GicCpuIfcAddr));
sl@0
   279
//	asm("ldr	r4, [r0, #%a0]" : : "i" _FOFF(TSubScheduler,i_Regs));
sl@0
   280
	asm("bic	sp, sp, #4 ");				// align stack to multiple of 8
sl@0
   281
sl@0
   282
	__DATA_MEMORY_BARRIER_Z__(r6);
sl@0
   283
	asm("1: ");
sl@0
   284
	LDREX(3,1);
sl@0
   285
	asm("orr	r5, r3, r2 ");
sl@0
   286
	STREX(12,5,1);							// set bit in CrashState for this CPU
sl@0
   287
	asm("cmp	r12, #0 ");
sl@0
   288
	asm("bne	1b ");
sl@0
   289
	__DATA_MEMORY_BARRIER__(r6);
sl@0
   290
	asm("cmp	r3, #0 ");					// were we first to crash?
sl@0
   291
	asm("beq	first_to_crash ");			// branch if so
sl@0
   292
sl@0
   293
	// we weren't first to crash, so wait here for a crash IPI
sl@0
   294
	// disable all interrupts except for CRASH_IPI
sl@0
   295
	GET_RWNO_TID(,r0);
sl@0
   296
	asm("ldr	r0, [r0, #%a0]" : : "i" _FOFF(TSubScheduler, i_GicCpuIfcAddr));
sl@0
   297
	asm("mov	r1, #0 ");
sl@0
   298
	asm("1: ");
sl@0
   299
	asm("add	r1, r1, #1 ");
sl@0
   300
	asm("str	r1, [r0, #%a0]" : : "i" _FOFF(GicCpuIfc, iPriMask));
sl@0
   301
	__DATA_SYNC_BARRIER__(r6);
sl@0
   302
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(GicCpuIfc, iPriMask));
sl@0
   303
	asm("cmp	r2, #0 ");
sl@0
   304
	asm("beq	1b ");						// loop until priority mask is nonzero
sl@0
   305
sl@0
   306
	asm("2: ");
sl@0
   307
	__ASM_STI_MODE(MODE_ABT);
sl@0
   308
	ARM_WFE;
sl@0
   309
	asm("b		2b ");						// loop until we get a CRASH_IPI
sl@0
   310
sl@0
   311
	// This CPU was first to crash
sl@0
   312
	asm("first_to_crash: ");
sl@0
   313
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(TSubScheduler, iScheduler));
sl@0
   314
	asm("ldr	r7, __CrashStateOut ");
sl@0
   315
	asm("ldr	r3, [r2, #%a0]" : : "i" _FOFF(TScheduler, iActiveCpus1));
sl@0
   316
	asm("str	r3, [r7] ");			// mask of CPUs pending
sl@0
   317
	asm("ldr	r5, [r0, #%a0]" : : "i" _FOFF(TSubScheduler, i_GicDistAddr));
sl@0
   318
	asm("ldr	r1, __CrashIPIWord ");
sl@0
   319
	__DATA_SYNC_BARRIER_Z__(r6);
sl@0
   320
	asm("str	r1, [r5, #%a0]" : : "i" _FOFF(GicDistributor, iSoftIrq));	// send CRASH_IPI to all other CPUs
sl@0
   321
	__DATA_SYNC_BARRIER__(r6);
sl@0
   322
sl@0
   323
	asm("skip_other_cores: ");
sl@0
   324
	asm("mov	r0, #0 ");
sl@0
   325
	asm("mov	r1, #0 ");
sl@0
   326
	asm("mov	r2, #0 ");
sl@0
   327
	asm("bl		NKCrashHandler ");		// call NKCrashHandler(0,0,0)
sl@0
   328
sl@0
   329
	__DATA_SYNC_BARRIER__(r6);
sl@0
   330
	GET_RWNO_TID(,r0);
sl@0
   331
	asm("cmp	r0, #0 ");
sl@0
   332
	asm("beq	skip_other_cores2 ");	// If subscheduler not yet set, don't bother with other cores
sl@0
   333
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(TSubScheduler, iCpuMask));
sl@0
   334
	asm("7: ");
sl@0
   335
	LDREX(1,7);
sl@0
   336
	asm("bic	r1, r1, r2 ");
sl@0
   337
	STREX(3,1,7);						// atomic { CrashStateOut &= ~iCpuMask; }
sl@0
   338
	asm("cmp	r3, #0 ");
sl@0
   339
	asm("bne	7b ");
sl@0
   340
	asm("1: ");
sl@0
   341
	asm("ldr	r1, [r7] ");
sl@0
   342
	asm("cmp	r1, #0 ");				// wait for all CPUs to acknowledge
sl@0
   343
	asm("beq	2f ");
sl@0
   344
	asm("adds	r6, r6, #1 ");
sl@0
   345
	asm("bne	1b ");					// if not ACKed after 2^32 iterations give up waiting
sl@0
   346
	asm("2: ");
sl@0
   347
	__DATA_MEMORY_BARRIER_Z__(r0);
sl@0
   348
sl@0
   349
	asm("skip_other_cores2: ");
sl@0
   350
	asm("mov	r0, #1 ");
sl@0
   351
	asm("ldr	r1, [r4, #%a0] " : : "i" _FOFF(SFullArmRegSet,iN.iR0));	// original R0 = a0 parameter
sl@0
   352
	asm("ldr	r2, [r4, #%a0] " : : "i" _FOFF(SFullArmRegSet,iN.iR1));	// original R1 = a1 parameter
sl@0
   353
	asm("bl		NKCrashHandler ");		// call NKCrashHandler(1,a0,a1) - shouldn't return
sl@0
   354
sl@0
   355
	// shouldn't get back here
sl@0
   356
	__ASM_CRASH();
sl@0
   357
sl@0
   358
	asm("__CrashState: ");
sl@0
   359
	asm(".word %a0" : : "i" ((TInt)&CrashState));
sl@0
   360
	asm("__CrashStateOut: ");
sl@0
   361
	asm(".word CrashStateOut ");
sl@0
   362
	asm("__CrashIPIWord: ");
sl@0
   363
	asm(".word %a0" : : "i" ( (TInt)GIC_IPI_OTHERS(CRASH_IPI_VECTOR) ));
sl@0
   364
	asm("__SS0: ");
sl@0
   365
	asm(".word %a0" : : "i" ((TInt)&TheSubSchedulers[0]));
sl@0
   366
	asm("__DefaultRegs: ");
sl@0
   367
	asm(".word %a0" : : "i" ((TInt)&DefaultRegSet));
sl@0
   368
	}
sl@0
   369
sl@0
   370
sl@0
   371
#ifdef __USE_BTRACE_LOCK__
sl@0
   372
#define	__ASM_ACQUIRE_BTRACE_LOCK(regs)					\
sl@0
   373
	asm("stmfd sp!, " regs);							\
sl@0
   374
	asm("ldr r0, __BTraceLock ");						\
sl@0
   375
	asm("bl " CSM_ZN9TSpinLock11LockIrqSaveEv );		\
sl@0
   376
	asm("mov r4, r0 ");									\
sl@0
   377
	asm("ldmfd sp!, " regs)
sl@0
   378
sl@0
   379
#define	__ASM_RELEASE_BTRACE_LOCK()						\
sl@0
   380
	asm("stmfd sp!, {r0-r1} ");							\
sl@0
   381
	asm("ldr r0, __BTraceLock ");						\
sl@0
   382
	asm("mov r1, r4 ");									\
sl@0
   383
	asm("bl " CSM_ZN9TSpinLock16UnlockIrqRestoreEi );	\
sl@0
   384
	asm("ldmfd sp!, {r0-r1} ")
sl@0
   385
sl@0
   386
#else
sl@0
   387
#define	__ASM_ACQUIRE_BTRACE_LOCK(regs)
sl@0
   388
#define	__ASM_RELEASE_BTRACE_LOCK()
sl@0
   389
#endif
sl@0
   390
sl@0
   391
sl@0
   392
__NAKED__ EXPORT_C TBool BTrace::Out(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   393
	{
sl@0
   394
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   395
	__ASM_ACQUIRE_BTRACE_LOCK("{r0-r1}");
sl@0
   396
	asm("ldr	r12, __BTraceData");
sl@0
   397
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   398
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   399
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   400
	asm("ldr	r12, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   401
	asm("adr	lr, 9f");
sl@0
   402
	asm("cmp	r2, #0");
sl@0
   403
	asm("moveq	r0, #0");
sl@0
   404
	__JUMP(ne,	r12);
sl@0
   405
	asm("9: ");
sl@0
   406
	__ASM_RELEASE_BTRACE_LOCK();
sl@0
   407
	__POPRET("r2,r3,r4,");
sl@0
   408
	}
sl@0
   409
sl@0
   410
__NAKED__ EXPORT_C TBool BTrace::OutN(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   411
	{
sl@0
   412
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   413
	__ASM_ACQUIRE_BTRACE_LOCK("{r0-r3}");
sl@0
   414
	asm("ldr	r12, __BTraceData");
sl@0
   415
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   416
	asm("ldr	r14, [sp, #16]");	// r14 = aDataSize
sl@0
   417
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   418
	asm("ldr	r12, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   419
	asm("cmp	r2, #0");
sl@0
   420
	asm("moveq	r0, #0");
sl@0
   421
	asm("beq	0f ");
sl@0
   422
sl@0
   423
	asm("cmp	r14, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   424
	asm("movhi	r14, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   425
 	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
sl@0
   426
	asm("add	r0, r0, r14");
sl@0
   427
	asm("subs	r14, r14, #1");
sl@0
   428
	asm("ldrhs	r2, [r3]");			// get first word of aData is aDataSize!=0
sl@0
   429
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   430
	asm("cmp	r14, #4");
sl@0
   431
	asm("strlo	r2, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
sl@0
   432
sl@0
   433
	asm("mov	lr, pc");
sl@0
   434
	__JUMP(,	r12);
sl@0
   435
	asm("0: ");
sl@0
   436
	__ASM_RELEASE_BTRACE_LOCK();
sl@0
   437
	__POPRET("r2,r3,r4,");
sl@0
   438
	}
sl@0
   439
sl@0
   440
__NAKED__ EXPORT_C TBool BTrace::OutX(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   441
	{
sl@0
   442
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   443
	__ASM_ACQUIRE_BTRACE_LOCK("{r0-r1}");
sl@0
   444
	asm("ldr	r12, __BTraceData");
sl@0
   445
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   446
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   447
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   448
	asm("mrs	r14, cpsr ");
sl@0
   449
	asm("ldr	r12, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   450
	asm("cmp	r2, #0");			// check category filter
sl@0
   451
	asm("moveq	r0, #0");
sl@0
   452
	asm("beq	0f ");				// if category disabled, exit now
sl@0
   453
	__ASM_CLI();
sl@0
   454
	asm("and	r2, r14, #0x0f ");
sl@0
   455
	asm("cmp	r2, #3 ");
sl@0
   456
	asm("movhi	r2, #2 ");			// r2 = context ID = 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   457
	asm("bne	1f ");
sl@0
   458
	GET_RWNO_TID(,r1);
sl@0
   459
	asm("movs   r2, r1 ");			// r2 = context ID = 0 for early boot, no threads
sl@0
   460
	asm("beq    1f ");
sl@0
   461
	asm("ldrb	r2, [r1, #%a0]" : : "i" _FOFF(TSubScheduler,iInIDFC));
sl@0
   462
	asm("cmp	r2, #0 ");
sl@0
   463
	asm("ldreq	r2, [r1, #%a0]" : : "i" _FOFF(TSubScheduler,iCurrentThread));
sl@0
   464
	asm("movne	r2, #3 ");			// r2 = context ID = 3 for IDFC = NThread pointer for thread
sl@0
   465
	asm("1: ");
sl@0
   466
	asm("msr	cpsr, r14 ");
sl@0
   467
	asm("mov	lr, pc");
sl@0
   468
	__JUMP(,	r12);
sl@0
   469
	asm("0: ");
sl@0
   470
	__ASM_RELEASE_BTRACE_LOCK();
sl@0
   471
	__POPRET("r2,r3,r4,");
sl@0
   472
	}
sl@0
   473
sl@0
   474
__NAKED__ EXPORT_C TBool BTrace::OutNX(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   475
	{
sl@0
   476
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   477
	__ASM_ACQUIRE_BTRACE_LOCK("{r0-r3}");
sl@0
   478
	asm("ldr	r12, __BTraceData");
sl@0
   479
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   480
	asm("ldr	r14, [sp, #16]");	// r14 = aDataSize
sl@0
   481
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   482
	asm("ldr	r12, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   483
	asm("cmp	r2, #0");			// check category filter
sl@0
   484
	asm("moveq	r0, #0");
sl@0
   485
	asm("beq	0f ");				// if category disabled, exit now
sl@0
   486
sl@0
   487
	asm("cmp	r14, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   488
	asm("movhi	r14, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   489
 	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
sl@0
   490
	asm("add	r0, r0, r14");
sl@0
   491
	asm("subs	r14, r14, #1");
sl@0
   492
	asm("ldrhs	r2, [r3]");			// get first word of aData is aDataSize!=0
sl@0
   493
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   494
	asm("cmp	r14, #4");
sl@0
   495
	asm("strlo	r2, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
sl@0
   496
sl@0
   497
	asm("mrs	r14, cpsr ");
sl@0
   498
	__ASM_CLI();
sl@0
   499
	asm("and	r2, r14, #0x0f ");
sl@0
   500
	asm("cmp	r2, #3 ");
sl@0
   501
	asm("movhi	r2, #2 ");			// r2 = context ID = 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   502
	asm("bne	1f ");
sl@0
   503
	GET_RWNO_TID(,r1);
sl@0
   504
	asm("movs   r2, r1 ");			// r2 = context ID = 0 for early boot, no threads
sl@0
   505
	asm("beq    1f ");
sl@0
   506
	asm("ldrb	r2, [r1, #%a0]" : : "i" _FOFF(TSubScheduler,iInIDFC));
sl@0
   507
	asm("cmp	r2, #0 ");
sl@0
   508
	asm("ldreq	r2, [r1, #%a0]" : : "i" _FOFF(TSubScheduler,iCurrentThread));
sl@0
   509
	asm("movne	r2, #3 ");			// r2 = context ID = 3 for IDFC = NThread pointer for thread
sl@0
   510
	asm("1: ");
sl@0
   511
	asm("msr	cpsr, r14 ");
sl@0
   512
sl@0
   513
	asm("mov	lr, pc");
sl@0
   514
	__JUMP(,	r12);
sl@0
   515
	asm("0: ");
sl@0
   516
	__ASM_RELEASE_BTRACE_LOCK();
sl@0
   517
	__POPRET("r2,r3,r4,");
sl@0
   518
	}
sl@0
   519
sl@0
   520
__NAKED__ EXPORT_C TBool BTrace::OutBig(TUint32 a0, TUint32 a1, const TAny* aData, TInt aDataSize)
sl@0
   521
	{
sl@0
   522
	asm("stmdb	sp!, {r4,lr}");
sl@0
   523
	asm("ldr	r12, __BTraceData");
sl@0
   524
	asm("str	lr, [sp, #-4]! ");	// PC
sl@0
   525
	asm("and	r14, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   526
	asm("ldrb	r14, [r12, r14, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   527
	asm("cmp	r14, #0");			// check category filter
sl@0
   528
	asm("addeq	sp, sp, #4 ");
sl@0
   529
	asm("moveq	r0, #0 ");
sl@0
   530
	asm("beq	0f ");				// if category disabled, exit now
sl@0
   531
sl@0
   532
	asm("mrs	r14, cpsr ");
sl@0
   533
	__ASM_CLI();
sl@0
   534
	asm("and	r12, r14, #0x0f ");
sl@0
   535
	asm("cmp	r12, #3 ");
sl@0
   536
	asm("movhi	r12, #2 ");			// r12 = context ID = 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   537
	asm("bne	1f ");
sl@0
   538
	GET_RWNO_TID(,r12);
sl@0
   539
	asm("cmp    r12, #0 ");			// r2 = context ID = 0 for early boot, no threads
sl@0
   540
	asm("beq    1f ");
sl@0
   541
	asm("ldrb	r12, [r12, #%a0]" : : "i" _FOFF(TSubScheduler,iInIDFC));
sl@0
   542
	asm("cmp	r12, #0 ");
sl@0
   543
	GET_RWNO_TID(eq,r12);
sl@0
   544
	asm("ldreq	r12, [r12, #%a0]" : : "i" _FOFF(TSubScheduler,iCurrentThread));
sl@0
   545
	asm("movne	r12, #3 ");			// r12 = context ID = 3 for IDFC = NThread pointer for thread
sl@0
   546
	asm("1: ");
sl@0
   547
	asm("msr	cpsr, r14 ");
sl@0
   548
	asm("str	r12, [sp, #-4]! ");	// context ID
sl@0
   549
	asm("bl "	CSM_ZN6BTrace8DoOutBigEmmPKvimm);
sl@0
   550
	asm("add	sp, sp, #8");
sl@0
   551
	asm("0: ");
sl@0
   552
	__POPRET("r4,");
sl@0
   553
sl@0
   554
	asm("__BTraceLock: ");
sl@0
   555
	asm(".word %a0" : : "i" ((TInt)&BTraceLock));
sl@0
   556
	asm("__BTraceData: ");
sl@0
   557
	asm(".word BTraceData ");
sl@0
   558
	}
sl@0
   559
sl@0
   560
sl@0
   561
__NAKED__ EXPORT_C TBool BTrace::OutFiltered(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   562
	{
sl@0
   563
	// fall through to OutFilteredX...
sl@0
   564
	}
sl@0
   565
sl@0
   566
__NAKED__ EXPORT_C TBool BTrace::OutFilteredX(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
sl@0
   567
	{
sl@0
   568
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   569
	asm("ldr	r12, __BTraceData");
sl@0
   570
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   571
	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
sl@0
   572
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   573
	asm("cmp	r2, #0");
sl@0
   574
	asm("moveq	r0, #0");
sl@0
   575
	asm("beq	9f ");
sl@0
   576
sl@0
   577
	// r0=header, r1=a1=secondary filter UID, r2=unused, r3=a1, r12->SBTraceData
sl@0
   578
	// if trace enabled return r0,r1,r3 unmodified, r2=context value r12->handler, Z=0
sl@0
   579
	// if trace disabled return r0=0 Z=1
sl@0
   580
	asm("bl		btrace_check_filter2 ");
sl@0
   581
	asm("beq	9f ");
sl@0
   582
	__ASM_ACQUIRE_BTRACE_LOCK("{r0,r2,r3,r12}");
sl@0
   583
	asm("adr	lr, 1f ");
sl@0
   584
	__JUMP(,	r12);
sl@0
   585
	asm("1: ");
sl@0
   586
	__ASM_RELEASE_BTRACE_LOCK();
sl@0
   587
	asm("9: ");
sl@0
   588
	__POPRET("r2,r3,r4,");
sl@0
   589
sl@0
   590
	asm("btrace_check_filter2: ");
sl@0
   591
	asm("stmfd	sp!, {r0,r1,r3,r4,r12,lr} ");
sl@0
   592
	asm("mov	r0, r12 ");
sl@0
   593
	asm("bl		CheckFilter2__11SBTraceDataUl ");
sl@0
   594
	asm("cmp	r0, #0 ");
sl@0
   595
	asm("beq	0f ");
sl@0
   596
	asm("mrs	r14, cpsr ");
sl@0
   597
	__ASM_CLI();
sl@0
   598
	asm("and	r2, r14, #0x0f ");
sl@0
   599
	asm("cmp	r2, #3 ");
sl@0
   600
	asm("movhi	r2, #2 ");			// r2 = context ID = 1 for FIQ, 2 for IRQ/ABT/UND/SYS
sl@0
   601
	asm("bne	1f ");
sl@0
   602
	GET_RWNO_TID(,r4);
sl@0
   603
	asm("movs   r2, r4 ");			// r2 = context ID = 0 for early boot, no threads
sl@0
   604
	asm("beq    1f ");
sl@0
   605
	asm("ldrb	r2, [r4, #%a0]" : : "i" _FOFF(TSubScheduler,iInIDFC));
sl@0
   606
	asm("cmp	r2, #0 ");
sl@0
   607
	asm("ldreq	r2, [r4, #%a0]" : : "i" _FOFF(TSubScheduler,iCurrentThread));
sl@0
   608
	asm("movne	r2, #3 ");			// r2 = context ID = 3 for IDFC = NThread pointer for thread
sl@0
   609
	asm("1: ");
sl@0
   610
	asm("msr	cpsr, r14 ");
sl@0
   611
	asm("0: ");
sl@0
   612
	asm("ldmfd	sp!, {r0,r1,r3,r4,r12,lr} ");
sl@0
   613
	asm("moveq	r0, #0 ");
sl@0
   614
	asm("ldrne	r12, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
sl@0
   615
	__JUMP(,lr);
sl@0
   616
	}
sl@0
   617
sl@0
   618
__NAKED__ EXPORT_C TBool BTrace::OutFilteredN(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   619
	{
sl@0
   620
	// fall through to OutFilteredNX...
sl@0
   621
	}
sl@0
   622
sl@0
   623
__NAKED__ EXPORT_C TBool BTrace::OutFilteredNX(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
sl@0
   624
	{
sl@0
   625
	asm("stmdb	sp!, {r2,r3,r4,lr}");
sl@0
   626
	asm("ldr	r12, __BTraceData");
sl@0
   627
	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   628
	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   629
	asm("cmp	r2, #0");
sl@0
   630
	asm("moveq	r0, #0");
sl@0
   631
	asm("beq	9f ");
sl@0
   632
sl@0
   633
	// r0=header, r1=a1=secondary filter UID, r2=unused, r3=aData, r12->SBTraceData
sl@0
   634
	// if trace enabled return r0,r1,r3 unmodified, r2=context value r12->handler, Z=0
sl@0
   635
	// if trace disabled return r0=0 Z=1
sl@0
   636
	asm("bl		btrace_check_filter2 ");
sl@0
   637
	asm("beq	9f ");
sl@0
   638
sl@0
   639
	__ASM_ACQUIRE_BTRACE_LOCK("{r0-r3,r11,r12}");
sl@0
   640
	asm("ldr	r14, [sp, #16] ");	// r14 = aDataSize
sl@0
   641
	asm("cmp	r14, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   642
	asm("movhi	r14, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
sl@0
   643
 	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
sl@0
   644
	asm("add	r0, r0, r14 ");
sl@0
   645
	asm("subs	r14, r14, #1 ");
sl@0
   646
	asm("ldrhs	r3, [r3] ");		// get first word of aData if aDataSize!=0
sl@0
   647
	asm("cmp	r14, #4 ");
sl@0
   648
	asm("strlo	r3, [sp, #4] ");	// replace aData with first word if aDataSize is 1-4
sl@0
   649
	asm("mov	r3, r1 ");			// r3 = a1 (ready for call to handler)
sl@0
   650
	asm("adr	lr, 1f ");
sl@0
   651
	__JUMP(,	r12);
sl@0
   652
	asm("1: ");
sl@0
   653
	__ASM_RELEASE_BTRACE_LOCK();
sl@0
   654
	asm("9: ");
sl@0
   655
	__POPRET("r2,r3,r4,");
sl@0
   656
	}
sl@0
   657
sl@0
   658
__NAKED__ EXPORT_C TBool BTrace::OutFilteredBig(TUint32 a0, TUint32 a1, const TAny* aData, TInt aDataSize)
sl@0
   659
	{
sl@0
   660
	asm("stmdb	sp!, {r4,lr} ");
sl@0
   661
	asm("ldr	r12, __BTraceData ");
sl@0
   662
	asm("stmfd	sp!, {r2,lr} ");	// save aData, PC
sl@0
   663
	asm("and	r14, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
sl@0
   664
	asm("ldrb	r14, [r12, r14, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
sl@0
   665
	asm("cmp	r14, #0 ");			// check category filter
sl@0
   666
	asm("blne	btrace_check_filter2 ");	// if enabled, check secondary filter
sl@0
   667
	asm("addeq	sp, sp, #8 ");
sl@0
   668
	asm("moveq	r0, #0 ");
sl@0
   669
	asm("beq	9f ");				// if category or secondary filter disabled, exit now
sl@0
   670
	asm("mov	r12, r2 ");
sl@0
   671
	asm("ldr	r2, [sp, #0] ");	// restore aData into r2
sl@0
   672
	asm("str	r12, [sp, #0] ");	// Context ID
sl@0
   673
	asm("bl "	CSM_ZN6BTrace8DoOutBigEmmPKvimm);
sl@0
   674
	asm("add	sp, sp, #8 ");
sl@0
   675
	asm("9: ");
sl@0
   676
	__POPRET("r4,");
sl@0
   677
	}
sl@0
   678
sl@0
   679
sl@0
   680
/******************************************************************************/
sl@0
   681
sl@0
   682
/** Save all the ARM registers
sl@0
   683
sl@0
   684
@internalTechnology
sl@0
   685
*/
sl@0
   686
__NAKED__ void Arm::SaveState(SFullArmRegSet&)
sl@0
   687
	{
sl@0
   688
	asm("stmia	r0, {r0-r14}^ ");	// save R0-R7, R8_usr-R14_usr
sl@0
   689
	asm("str	lr, [r0, #60]! ");	// save R15
sl@0
   690
	asm("mrs	r1, cpsr ");
sl@0
   691
	asm("str	r1, [r0, #4]! ");	// save CPSR
sl@0
   692
	asm("bic	r2, r1, #0x1f ");
sl@0
   693
	asm("orr	r2, r2, #0xd3 ");	// mode_svc, all interrupts off
sl@0
   694
	asm("msr	cpsr, r2 ");
sl@0
   695
	asm("stmib	r0!, {r13,r14} ");	// save R13_svc, R14_svc
sl@0
   696
	asm("mrs	r3, spsr ");
sl@0
   697
	asm("str	r3, [r0, #4]! ");	// save SPSR_svc
sl@0
   698
	asm("bic	r2, r1, #0x1f ");
sl@0
   699
	asm("orr	r2, r2, #0xd7 ");	// mode_abt, all interrupts off
sl@0
   700
	asm("msr	cpsr, r2 ");
sl@0
   701
	asm("stmib	r0!, {r13,r14} ");	// save R13_abt, R14_abt
sl@0
   702
	asm("mrs	r3, spsr ");
sl@0
   703
	asm("str	r3, [r0, #4]! ");	// save SPSR_abt
sl@0
   704
	asm("bic	r2, r1, #0x1f ");
sl@0
   705
	asm("orr	r2, r2, #0xdb ");	// mode_und, all interrupts off
sl@0
   706
	asm("msr	cpsr, r2 ");
sl@0
   707
	asm("stmib	r0!, {r13,r14} ");	// save R13_und, R14_und
sl@0
   708
	asm("mrs	r3, spsr ");
sl@0
   709
	asm("str	r3, [r0, #4]! ");	// save SPSR_und
sl@0
   710
	asm("bic	r2, r1, #0x1f ");
sl@0
   711
	asm("orr	r2, r2, #0xd2 ");	// mode_irq, all interrupts off
sl@0
   712
	asm("msr	cpsr, r2 ");
sl@0
   713
	asm("stmib	r0!, {r13,r14} ");	// save R13_irq, R14_irq
sl@0
   714
	asm("mrs	r3, spsr ");
sl@0
   715
	asm("str	r3, [r0, #4]! ");	// save SPSR_irq
sl@0
   716
	asm("bic	r2, r1, #0x1f ");
sl@0
   717
	asm("orr	r2, r2, #0xd1 ");	// mode_fiq, all interrupts off
sl@0
   718
	asm("msr	cpsr, r2 ");
sl@0
   719
	asm("stmib	r0!, {r8-r14} ");	// save R8_fiq ... R14_fiq
sl@0
   720
	asm("mrs	r3, spsr ");
sl@0
   721
	asm("str	r3, [r0, #4]! ");	// save SPSR_fiq
sl@0
   722
	asm("bic	r2, r1, #0x1f ");
sl@0
   723
	asm("orr	r2, r2, #0xd3 ");	// mode_svc, all interrupts off
sl@0
   724
	asm("msr	cpsr, r2 ");
sl@0
   725
sl@0
   726
	asm("mov	r4, #0 ");
sl@0
   727
	asm("mov	r5, #0 ");
sl@0
   728
	asm("mov	r6, #0 ");
sl@0
   729
	asm("mov	r7, #0 ");
sl@0
   730
	asm("mov	r8, #0 ");
sl@0
   731
	asm("mov	r9, #0 ");
sl@0
   732
	asm("mov	r10, #0 ");
sl@0
   733
	asm("mov	r11, #0 ");
sl@0
   734
sl@0
   735
	// monitor mode - skip for now
sl@0
   736
	asm("mov	r3, #0 ");
sl@0
   737
	asm("stmib	r0!, {r4-r6} ");	// R13_mon, R14_mon, SPSR_mon
sl@0
   738
sl@0
   739
	// zero spare words
sl@0
   740
	asm("mov	r3, #0 ");
sl@0
   741
	asm("stmib	r0!, {r4-r11} ");
sl@0
   742
	asm("add	r0, r0, #4 ");		// r0 = &a.iA
sl@0
   743
sl@0
   744
#ifdef __CPU_ARMV7
sl@0
   745
	asm("mrc	p14, 6, r3, c1, c0, 0 ");
sl@0
   746
#else
sl@0
   747
	asm("mov	r3, #0 ");
sl@0
   748
#endif
sl@0
   749
	asm("str	r3, [r0], #4 ");	// TEEHBR
sl@0
   750
#ifdef __CPU_HAS_COPROCESSOR_ACCESS_REG
sl@0
   751
	GET_CAR(,r3);
sl@0
   752
#else
sl@0
   753
	asm("mov	r3, #0 ");
sl@0
   754
#endif
sl@0
   755
	asm("str	r3, [r0], #4 ");	// CPACR
sl@0
   756
sl@0
   757
	// skip SCR, SDER, NSACR, PMCR, MVBAR for now
sl@0
   758
	asm("mov	r3, #0 ");
sl@0
   759
	asm("stmia	r0!, {r4-r8} ");	// SCR, SDER, NSACR, PMCR, MVBAR
sl@0
   760
sl@0
   761
	// zero spare words
sl@0
   762
	asm("mov	r3, #0 ");
sl@0
   763
	asm("stmia	r0!, {r3-r11} ");	// r0 = &a.iB[0]
sl@0
   764
sl@0
   765
	// just fill in iB[0]
sl@0
   766
#ifdef __CPU_HAS_MMU
sl@0
   767
	asm("mrc	p15, 0, r3, c1, c0, 0 ");
sl@0
   768
	asm("str	r3, [r0], #4 ");	// SCTLR
sl@0
   769
#ifdef __CPU_HAS_ACTLR
sl@0
   770
	asm("mrc	p15, 0, r3, c1, c0, 1 ");
sl@0
   771
#else
sl@0
   772
	asm("mov	r3, #0 ");
sl@0
   773
#endif
sl@0
   774
	asm("str	r3, [r0], #4 ");	// ACTLR
sl@0
   775
	asm("mrc	p15, 0, r3, c2, c0, 0 ");
sl@0
   776
	asm("str	r3, [r0], #4 ");	// TTBR0
sl@0
   777
#ifdef __CPU_HAS_TTBR1
sl@0
   778
	asm("mrc	p15, 0, r2, c2, c0, 1 ");
sl@0
   779
	asm("mrc	p15, 0, r3, c2, c0, 2 ");
sl@0
   780
#else
sl@0
   781
	asm("mov	r2, #0 ");
sl@0
   782
	asm("mov	r3, #0 ");
sl@0
   783
#endif
sl@0
   784
	asm("stmia	r0!, {r2,r3} ");	// TTBR1, TTBCR
sl@0
   785
	asm("mrc	p15, 0, r3, c3, c0, 0 ");
sl@0
   786
	asm("str	r3, [r0], #4 ");	// DACR
sl@0
   787
#ifdef __CPU_MEMORY_TYPE_REMAPPING
sl@0
   788
	asm("mrc	p15, 0, r2, c10, c2, 0 ");
sl@0
   789
	asm("mrc	p15, 0, r3, c10, c2, 1 ");
sl@0
   790
#else
sl@0
   791
	asm("mov	r2, #0 ");
sl@0
   792
	asm("mov	r3, #0 ");
sl@0
   793
#endif
sl@0
   794
	asm("stmia	r0!, {r2,r3} ");	// PRRR, NMRR
sl@0
   795
#ifdef __CPU_ARMV7
sl@0
   796
	asm("mrc	p15, 0, r3, c12, c0, 0 ");
sl@0
   797
#else
sl@0
   798
	asm("mov	r3, #0 ");
sl@0
   799
#endif
sl@0
   800
	asm("str	r3, [r0], #4 ");	// VBAR
sl@0
   801
#if defined(__CPU_SA1) || defined(__CPU_ARM920T) || defined(__CPU_ARM925T) || defined(__CPU_ARMV5T) || defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
sl@0
   802
	asm("mrc	p15, 0, r3, c13, c0, 0 ");
sl@0
   803
#else
sl@0
   804
	asm("mov	r3, #0 ");
sl@0
   805
#endif
sl@0
   806
	asm("str	r3, [r0], #4 ");	// FCSEIDR
sl@0
   807
#if defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
sl@0
   808
	asm("mrc	p15, 0, r3, c13, c0, 1 ");
sl@0
   809
#else
sl@0
   810
	asm("mov	r3, #0 ");
sl@0
   811
#endif
sl@0
   812
	asm("str	r3, [r0], #4 ");	// CONTEXTIDR
sl@0
   813
#ifdef __CPU_HAS_CP15_THREAD_ID_REG
sl@0
   814
	GET_RWRW_TID(,r2);
sl@0
   815
	GET_RWRO_TID(,r3);
sl@0
   816
	GET_RWNO_TID(,r12);
sl@0
   817
#else
sl@0
   818
	asm("mov	r2, #0 ");
sl@0
   819
	asm("mov	r3, #0 ");
sl@0
   820
	asm("mov	r12, #0 ");
sl@0
   821
#endif
sl@0
   822
	asm("stmia	r0!, {r2,r3,r12} ");	// RWRWTID, RWROTID, RWNOTID
sl@0
   823
	asm("mrc	p15, 0, r2, c5, c0, 0 ");	// DFSR
sl@0
   824
#ifdef __CPU_ARM_HAS_SPLIT_FSR
sl@0
   825
	asm("mrc	p15, 0, r3, c5, c0, 1 ");	// IFSR
sl@0
   826
#else
sl@0
   827
	asm("mov	r3, #0 ");
sl@0
   828
#endif
sl@0
   829
	asm("stmia	r0!, {r2,r3} ");	// DFSR, IFSR
sl@0
   830
#ifdef __CPU_ARMV7
sl@0
   831
	asm("mrc	p15, 0, r2, c5, c1, 0 ");	// ADFSR
sl@0
   832
	asm("mrc	p15, 0, r3, c5, c1, 1 ");	// AIFSR
sl@0
   833
#else
sl@0
   834
	asm("mov	r2, #0 ");
sl@0
   835
	asm("mov	r3, #0 ");
sl@0
   836
#endif
sl@0
   837
	asm("stmia	r0!, {r2,r3} ");	// ADFSR, AIFSR
sl@0
   838
	asm("mrc	p15, 0, r2, c6, c0, 0 ");	// DFAR
sl@0
   839
#ifdef __CPU_ARM_HAS_CP15_IFAR
sl@0
   840
	asm("mrc	p15, 0, r3, c6, c0, 2 ");	// IFAR
sl@0
   841
#else
sl@0
   842
	asm("mov	r3, #0 ");
sl@0
   843
#endif
sl@0
   844
	asm("stmia	r0!, {r2,r3} ");	// DFAR, IFAR
sl@0
   845
sl@0
   846
	// zero spare words
sl@0
   847
	asm("stmia	r0!, {r4-r7} ");
sl@0
   848
	asm("stmia	r0!, {r4-r11} ");
sl@0
   849
#else	// __CPU_HAS_MMU
sl@0
   850
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   851
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   852
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   853
	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
sl@0
   854
#endif	// __CPU_HAS_MMU
sl@0
   855
sl@0
   856
	// zero iB[1]
sl@0
   857
	asm("stmia	r0!, {r4-r11} ");
sl@0
   858
	asm("stmia	r0!, {r4-r11} ");
sl@0
   859
	asm("stmia	r0!, {r4-r11} ");
sl@0
   860
	asm("stmia	r0!, {r4-r11} ");	// r0 = &a.iMore[0]
sl@0
   861
	asm("add	r1, r0, #62*8 ");	// r1 = &a.iExcCode
sl@0
   862
sl@0
   863
	// Save VFP state
sl@0
   864
	// Save order:
sl@0
   865
	//				FPEXC	FPSCR
sl@0
   866
	// VFPv2 ONLY:	FPINST	FPINST2
sl@0
   867
	//				D0-D3	D4-D7	D8-D11	D12-D15 
sl@0
   868
	// VFPv3 ONLY:	D16-D19	D20-D23	D24-D27	D28-D31
sl@0
   869
#ifdef __CPU_HAS_VFP
sl@0
   870
	GET_CAR(,r2);
sl@0
   871
	asm("bic	r2, r2, #0x00f00000 ");
sl@0
   872
#ifdef __VFP_V3
sl@0
   873
	asm("bic	r2, r2, #0xc0000000 ");	// mask off ASEDIS, D32DIS
sl@0
   874
#endif
sl@0
   875
	asm("orr	r2, r2, #0x00500000 ");	// enable privileged access to CP10, CP11
sl@0
   876
	SET_CAR(,r2);
sl@0
   877
	VFP_FMRX(,2,VFP_XREG_FPEXC);		// r2=FPEXC
sl@0
   878
	asm("orr	r3, r2, #%a0" : : "i" ((TInt)VFP_FPEXC_EN));
sl@0
   879
	asm("bic	r3, r3, #%a0" : : "i" ((TInt)VFP_FPEXC_EX));
sl@0
   880
	VFP_FMXR(,VFP_XREG_FPEXC,3);		// enable VFP
sl@0
   881
	__DATA_SYNC_BARRIER__(r4);
sl@0
   882
	__INST_SYNC_BARRIER__(r4);
sl@0
   883
	VFP_FMRX(,3,VFP_XREG_FPSCR);		// r3=FPSCR
sl@0
   884
	asm("stmia	r0!, {r2,r3} ");		//
sl@0
   885
#ifdef __VFP_V3
sl@0
   886
	VFP_FSTMIADW(CC_AL,0,0,16);			// save D0 - D15
sl@0
   887
	VFP_FMRX(,3,VFP_XREG_MVFR0);
sl@0
   888
	asm("tst r3, #%a0" : : "i" ((TInt)VFP_MVFR0_ASIMD32)); // check to see if all 32 Advanced SIMD registers are present
sl@0
   889
	VFP_FSTMIADW(CC_NE,0,16,16);		// if so then save D15 - D31 (don't need to check CPACR.D32DIS as it is cleared above)
sl@0
   890
#else
sl@0
   891
	VFP_FMRX(,2,VFP_XREG_FPINST);
sl@0
   892
	VFP_FMRX(,3,VFP_XREG_FPINST2);
sl@0
   893
	asm("stmia	r0!, {r2,r3} ");		// FPINST, FPINST2
sl@0
   894
	VFP_FSTMIADW(CC_AL,0,0,16);			// save D0 - D15
sl@0
   895
#endif
sl@0
   896
#endif	// __CPU_HAS_VFP
sl@0
   897
	asm("1:		");
sl@0
   898
	asm("cmp	r0, r1 ");
sl@0
   899
	asm("strlo	r4, [r0], #4 ");		// clear up to end of iMore[61]
sl@0
   900
	asm("blo	1b ");
sl@0
   901
	asm("mov	r1, #%a0" : : "i" ((TInt)KMaxTInt));
sl@0
   902
	asm("stmia	r0!, {r1,r5-r7} ");		// iExcCode=KMaxTInt, iCrashArgs[0...2]=0
sl@0
   903
	asm("sub	r0, r0, #1024 ");		// r0 = &a
sl@0
   904
#ifdef __CPU_HAS_VFP
sl@0
   905
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iMore[0]));
sl@0
   906
	VFP_FMXR(,VFP_XREG_FPEXC,2);		// restore FPEXC
sl@0
   907
	__DATA_SYNC_BARRIER__(r4);
sl@0
   908
	__INST_SYNC_BARRIER__(r4);
sl@0
   909
	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iA.iCPACR));
sl@0
   910
	SET_CAR(,r2);						// restore CPACR
sl@0
   911
#endif
sl@0
   912
	asm("ldr	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iFlags));
sl@0
   913
	asm("orr	r1, r1, #0xC0 ");		// interrupts off
sl@0
   914
	asm("msr	cpsr, r1 ");			// restore CPSR with interrupts off
sl@0
   915
	asm("ldmia	r0, {r0-r11} ");		// restore R4-R11
sl@0
   916
	__JUMP(,lr);
sl@0
   917
	}
sl@0
   918
sl@0
   919
sl@0
   920
/** Update the saved ARM registers with information from an exception
sl@0
   921
sl@0
   922
@internalTechnology
sl@0
   923
*/
sl@0
   924
__NAKED__ void Arm::UpdateState(SFullArmRegSet&, TArmExcInfo&)
sl@0
   925
	{
sl@0
   926
	asm("ldr	r2, [r1, #%a0]" : : "i" _FOFF(TArmExcInfo, iExcCode));
sl@0
   927
	asm("cmp	r2, #%a0 " : : "i" ((TInt)EArmExceptionPrefetchAbort));
sl@0
   928
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   929
	asm("streq	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iIFAR));
sl@0
   930
	asm("strne	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iDFAR));
sl@0
   931
	asm("streq	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iIFSR));
sl@0
   932
	asm("strne	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iDFSR));
sl@0
   933
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iSpsrSvc));
sl@0
   934
	asm("add	r1, r1, #4 ");
sl@0
   935
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   936
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR13Svc));
sl@0
   937
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR14Svc));
sl@0
   938
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR0));
sl@0
   939
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   940
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR1));
sl@0
   941
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR2));
sl@0
   942
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR3));
sl@0
   943
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   944
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR4));
sl@0
   945
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR5));
sl@0
   946
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR6));
sl@0
   947
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   948
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR7));
sl@0
   949
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR8));
sl@0
   950
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR9));
sl@0
   951
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   952
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR10));
sl@0
   953
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR11));
sl@0
   954
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR12));
sl@0
   955
	asm("ldmia	r1!, {r2,r3,r12} ");
sl@0
   956
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR13));
sl@0
   957
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR14));
sl@0
   958
	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iExcCode));
sl@0
   959
	asm("ldmia	r1!, {r2,r3} ");
sl@0
   960
	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR15));
sl@0
   961
	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iFlags));
sl@0
   962
	__JUMP(,lr);
sl@0
   963
	}
sl@0
   964
sl@0
   965
sl@0
   966
/** Get a pointer to a stored integer register, accounting for registers which
sl@0
   967
	are banked across modes.
sl@0
   968
sl@0
   969
@param	a		Pointer to saved register block
sl@0
   970
@param	aRegNum	Number of register required, 0-15 or -1 (indicates SPSR)
sl@0
   971
@param	aMode	Bottom 5 bits indicate which processor mode
sl@0
   972
				Other bits of aMode are ignored
sl@0
   973
@return			Pointer to the required saved register value
sl@0
   974
sl@0
   975
@internalTechnology
sl@0
   976
*/
sl@0
   977
__NAKED__ TArmReg* Arm::Reg(SFullArmRegSet& /*a*/, TInt /*aRegNum*/, TArmReg /*aMode*/)
sl@0
   978
	{
sl@0
   979
	asm("cmp	r1, #8 ");				// register number < 8 ?
sl@0
   980
	asm("addlo	r0, r0, r1, lsl #2 ");	// register R0-R7 are not banked
sl@0
   981
	asm("blo	0f ");
sl@0
   982
	asm("cmp	r1, #15 ");				// register number = 15 ?
sl@0
   983
	asm("addeq	r0, r0, r1, lsl #2 ");	// register R15 not banked
sl@0
   984
	asm("movgt	r0, #0 ");				// no registers > 15
sl@0
   985
	asm("bge	0f ");
sl@0
   986
	asm("cmn	r1, #1 ");
sl@0
   987
	asm("movlt	r0, #0 ");				// no registers < -1
sl@0
   988
	asm("blt	0f ");
sl@0
   989
	asm("and	r12, r2, #0x1F ");
sl@0
   990
	asm("cmp	r12, #0x11 ");			// mode_fiq?
sl@0
   991
	asm("beq	1f ");					// skip if it is
sl@0
   992
	asm("cmp	r1, #13 ");
sl@0
   993
	asm("addlo	r0, r0, r1, lsl #2 ");	// register R8-R12 are only banked in mode_fiq
sl@0
   994
	asm("blo	0f ");
sl@0
   995
	asm("cmp	r12, #0x10 ");			// mode_usr ?
sl@0
   996
	asm("cmpne	r12, #0x1F ");			// if not, mode_sys ?
sl@0
   997
	asm("bne	2f ");					// skip if neither
sl@0
   998
	asm("cmp	r1, #16 ");
sl@0
   999
	asm("addlo	r0, r0, r1, lsl #2 ");	// handle R13_usr, R14_usr
sl@0
  1000
	asm("movhs	r0, #0 ");				// no SPSR in mode_usr or mode_sys
sl@0
  1001
	asm("blo	0f ");
sl@0
  1002
	asm("1: ");							// mode_fiq, regnum = 8-12
sl@0
  1003
	asm("2: ");							// exception mode, regnum not 0-12 or 15
sl@0
  1004
	asm("cmn	r1, #1 ");				// regnum = -1 ?
sl@0
  1005
	asm("moveq	r1, #15 ");				// if so, change to 15
sl@0
  1006
	asm("sub	r1, r1, #13 ");
sl@0
  1007
	asm("add	r0, r0, r1, lsl #2 ");	// add 0 for R13, 4 for R14, 8 for SPSR
sl@0
  1008
	asm("cmp	r12, #0x16 ");
sl@0
  1009
	asm("addeq	r0, r0, #12 ");			// if mon, add offset from R13Fiq to R13Mon
sl@0
  1010
	asm("cmpne	r12, #0x11 ");
sl@0
  1011
	asm("addeq	r0, r0, #32 ");			// if valid but not svc/abt/und/irq, add offset from R13Irq to R13Fiq
sl@0
  1012
	asm("cmpne	r12, #0x12 ");
sl@0
  1013
	asm("addeq	r0, r0, #12 ");			// if valid but not svc/abt/und, add offset from R13Und to R13Irq
sl@0
  1014
	asm("cmpne	r12, #0x1b ");
sl@0
  1015
	asm("addeq	r0, r0, #12 ");			// if valid but not svc/abt, add offset from R13Abt to R13Und
sl@0
  1016
	asm("cmpne	r12, #0x17 ");
sl@0
  1017
	asm("addeq	r0, r0, #12 ");			// if valid but not svc, add offset from R13Svc to R13Abt
sl@0
  1018
	asm("cmpne	r12, #0x13 ");
sl@0
  1019
	asm("addeq	r0, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iN.iR13Svc));	// if valid mode add offset to R13Svc
sl@0
  1020
	asm("movne	r0, #0 ");
sl@0
  1021
	asm("0: ");
sl@0
  1022
	__JUMP(,lr);
sl@0
  1023
	}
sl@0
  1024
sl@0
  1025
sl@0
  1026
/** Restore all the ARM registers
sl@0
  1027
sl@0
  1028
@internalTechnology
sl@0
  1029
*/
sl@0
  1030
__NAKED__ void Arm::RestoreState(SFullArmRegSet&)
sl@0
  1031
	{
sl@0
  1032
	}
sl@0
  1033
sl@0
  1034
__NAKED__ EXPORT_C TBool BTrace::OutFilteredPcFormatBig(TUint32 a0, TUint32 aModuleUid, TUint32 aPc, TUint16 aFormatId, const TAny* aData, TInt aDataSize)
sl@0
  1035
	{
sl@0
  1036
	asm("mov	r0, #0"); //Kernel side not implemented yet
sl@0
  1037
	}
sl@0
  1038
sl@0
  1039
sl@0
  1040
sl@0
  1041
sl@0
  1042