os/kernelhwsrv/kernel/eka/nkern/arm/ncutils.cia
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
     1 // Copyright (c) 1994-2009 Nokia Corporation and/or its subsidiary(-ies).
     2 // All rights reserved.
     3 // This component and the accompanying materials are made available
     4 // under the terms of the License "Eclipse Public License v1.0"
     5 // which accompanies this distribution, and is available
     6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
     7 //
     8 // Initial Contributors:
     9 // Nokia Corporation - initial contribution.
    10 //
    11 // Contributors:
    12 //
    13 // Description:
    14 // e32\nkern\arm\ncutils.cia
    15 // 
    16 //
    17 
    18 #include <e32cia.h>
    19 #include <arm.h>
    20 
    21 //#define __DBG_MON_FAULT__
    22 //#define __RAM_LOADED_CODE__
    23 //#define __EARLY_DEBUG__
    24 
    25 #ifdef _DEBUG
    26 #define ASM_KILL_LINK(rp,rs)	asm("mov "#rs", #0xdf ");\
    27 								asm("orr "#rs", "#rs", "#rs", lsl #8 ");\
    28 								asm("orr "#rs", "#rs", "#rs", lsl #16 ");\
    29 								asm("str "#rs", ["#rp"] ");\
    30 								asm("str "#rs", ["#rp", #4] ");
    31 #else
    32 #define ASM_KILL_LINK(rp,rs)
    33 #endif
    34 
    35 
    36 #ifdef __PRI_LIST_MACHINE_CODED__
    37 /** Return the priority of the highest priority item present on a priority list.
    38 
    39 	@return	The highest priority present or -1 if the list is empty.
    40  */
    41 EXPORT_C __NAKED__ TInt TPriListBase::HighestPriority()
    42 	{
    43 #ifdef __CPU_ARM_HAS_CLZ
    44 	asm("ldr r2, [r0, #4] ");				// r2=iPresent MSW
    45 	asm("ldr r1, [r0, #0] ");				// r1=iPresent LSW
    46 	CLZ(0,2);								// r0=31-MSB(r2)
    47 	asm("subs r0, r0, #32 ");				// r0=-1-MSB(r2), 0 if r2=0
    48 	CLZcc(CC_EQ,0,1);						// if r2=0, r0=31-MSB(r1)
    49 	asm("rsb r0, r0, #31 ");				// r0=highest priority
    50 #else
    51 	asm("ldmia r0, {r1,r2} ");				// r2:r1=iPresent
    52 	asm("mov r0, #31 ");					// start at 31
    53 	asm("cmp r2, #0 ");						// high word non-zero?
    54 	asm("movne r0, #63 ");					// if so, start at 63
    55 	asm("movne r1, r2 ");					// and set r1=high word
    56 	asm("cmp r1, #0 ");
    57 	asm("beq highest_pri_0 ");
    58 	asm("cmp r1, #0x00010000 ");
    59 	asm("movcc r1, r1, lsl #16 ");
    60 	asm("subcc r0, r0, #16 ");
    61 	asm("cmp r1, #0x01000000 ");
    62 	asm("movcc r1, r1, lsl #8 ");
    63 	asm("subcc r0, r0, #8 ");
    64 	asm("cmp r1, #0x10000000 ");
    65 	asm("movcc r1, r1, lsl #4 ");
    66 	asm("subcc r0, r0, #4 ");
    67 	asm("cmp r1, #0x40000000 ");
    68 	asm("movcc r1, r1, lsl #2 ");
    69 	asm("subcc r0, r0, #2 ");
    70 	asm("cmp r1, #0x80000000 ");
    71 	asm("subcc r0, r0, #1 ");
    72 	__JUMP(,lr);
    73 	asm("highest_pri_0: ");
    74 	asm("mvn r0, #0 ");						// if list empty, return -1
    75 #endif
    76 	__JUMP(,lr);
    77 	}
    78 
    79 /** Find the highest priority item present on a priority list.
    80 	If multiple items at the same priority are present, return the first to be
    81 	added in chronological order.
    82 
    83 	@return	a pointer to the item or NULL if the list is empty.
    84  */
    85 EXPORT_C __NAKED__ TPriListLink* TPriListBase::First()
    86 	{
    87 #ifdef __CPU_ARM_HAS_CLZ
    88 	asm("ldr r2, [r0, #4] ");				// r2=iPresent MSW
    89 	asm("ldr r1, [r0], #8 ");				// r1=iPresent LSW, r0=&iQueue[0]
    90 	CLZ(3,2);								// r3=31-MSB(r2)
    91 	asm("subs r3, r3, #32 ");				// r3=-1-MSB(r2), 0 if r2=0
    92 	CLZcc(CC_EQ,3,1);						// if r2=0, r3=31-MSB(r1)
    93 	asm("rsbs r3, r3, #31 ");				// r3=highest priority
    94 	asm("ldrpl r0, [r0, r3, lsl #2] ");		// if r3>=0 list is nonempty, r0->first entry
    95 	asm("movmi r0, #0 ");					// if r3<0 list empty, return NULL
    96 #else
    97 	asm("ldmia r0!, {r1,r2} ");				// r2:r1=iPresent, r0=&iQueue[0]
    98 	asm("cmp r2, #0 ");						// high word non-zero?
    99 	asm("addne r0, r0, #128 ");				// if so, r0=&iQueue[32]
   100 	asm("movne r1, r2 ");					// and set r1=high word
   101 	asm("cmp r1, #0x00010000 ");
   102 	asm("movcc r1, r1, lsl #16 ");
   103 	asm("addcs r0, r0, #0x40 ");			// if iPresent>=0x00010000, step r0 on by 16 words
   104 	asm("cmp r1, #0x01000000 ");
   105 	asm("movcc r1, r1, lsl #8 ");
   106 	asm("addcs r0, r0, #0x20 ");			// if iPresent>=0x01000000, step r0 on by 8 words
   107 	asm("cmp r1, #0x10000000 ");
   108 	asm("movcc r1, r1, lsl #4 ");
   109 	asm("addcs r0, r0, #0x10 ");			// if iPresent>=0x10000000, step r0 on by 4 words
   110 	asm("cmp r1, #0x40000000 ");
   111 	asm("movcc r1, r1, lsl #2 ");
   112 	asm("addcs r0, r0, #0x08 ");			// if iPresent>=0x40000000, step r0 on by 2 words
   113 	asm("cmp r1, #0 ");
   114 	asm("addmi r0, r0, #4 ");				// if iPresent>=0x80000000, step r0 on by 1 word
   115 	asm("ldrne r0, [r0] ");					// if iPresent was not zero, r0 points to first entry
   116 	asm("moveq r0, #0 ");					// else r0=NULL
   117 #endif
   118 	__JUMP(,lr);
   119 	}
   120 
   121 /** Add an item to a priority list.
   122 
   123 	@param aLink = a pointer to the item - must not be NULL
   124  */
   125 EXPORT_C __NAKED__ void TPriListBase::Add(TPriListLink* /*aLink*/)
   126 	{
   127 	asm("ldrb r2, [r1, #8]" );				// r2=priority of aLink
   128 	asm("add ip, r0, #8 ");					// ip=&iQueue[0]
   129 	asm("ldr r3, [ip, r2, lsl #2]! ");		// r3->first entry at this priority
   130 	asm("cmp r3, #0 ");						// is this first entry at this priority?
   131 	asm("bne pri_list_add_1 ");				// branch if not
   132 	asm("str r1, [ip] ");					// if queue originally empty, iQueue[pri]=aThread
   133 	asm("ldrb ip, [r0, r2, lsr #3]! ");		// ip=relevant byte of present mask, r0->same
   134 	asm("and r2, r2, #7 ");
   135 	asm("mov r3, #1 ");
   136 	asm("str r1, [r1, #0] ");				// aThread->next=aThread
   137 	asm("orr ip, ip, r3, lsl r2 ");			// ip |= 1<<(pri&7)
   138 	asm("str r1, [r1, #4] ");				// aThread->iPrev=aThread
   139 	asm("strb ip, [r0] ");					// update relevant byte of present mask
   140 	__JUMP(,lr);
   141 	asm("pri_list_add_1: ");
   142 	asm("ldr ip, [r3, #4] ");				// if nonempty, ip=last
   143 	asm("str r1, [r3, #4] ");				// first->prev=aThread
   144 	asm("stmia r1, {r3,ip} ");				// aThread->next=r3=first, aThread->prev=ip=last
   145 	asm("str r1, [ip, #0] ");				// last->next=aThread
   146 	__JUMP(,lr);
   147 	}
   148 
   149 /** Change the priority of an item on a priority list
   150 
   151 	@param	aLink = pointer to the item to act on - must not be NULL
   152 	@param	aNewPriority = new priority for the item
   153  */
   154 EXPORT_C __NAKED__ void TPriListBase::ChangePriority(TPriListLink* /*aLink*/, TInt /*aNewPriority*/)
   155 	{
   156 	asm("ldrb r3, [r1, #8] ");				// r3=old priority
   157 	asm("stmfd sp!, {r4-r6,lr} ");
   158 	asm("cmp r3, r2 ");
   159 	asm("ldmeqfd sp!, {r4-r6,pc} ");		// if old priority=new, finished
   160 	asm("ldmia r1, {r4,r12} ");				// r4=next, r12=prev
   161 	asm("ldmia r0!, {r6,lr} ");				// lr:r6=present mask, r0=&iQueue[0]
   162 	asm("subs r5, r4, r1 ");				// check if aLink is only one at that priority, r5=0 if it is
   163 	asm("beq change_pri_1 ");				// branch if it is
   164 	asm("ldr r5, [r0, r3, lsl #2] ");		// r5=iQueue[old priority]
   165 	asm("str r4, [r12, #0] ");				// prev->next=next
   166 	asm("str r12, [r4, #4] ");				// next->prev=prev
   167 	asm("cmp r5, r1 ");						// was aLink first?
   168 	asm("streq r4, [r0, r3, lsl #2] ");		// if it was, iQueue[old priority]=aLink->next
   169 	asm("b change_pri_2 ");
   170 	asm("change_pri_1: ");
   171 	asm("str r5, [r0, r3, lsl #2] ");		// if empty, set iQueue[old priority]=NULL
   172 	asm("mov r12, #0x80000000 ");
   173 	asm("rsbs r3, r3, #31 ");				// r3=31-priority
   174 	asm("bicmi lr, lr, r12, ror r3 ");		// if pri>31, clear bit is MS word
   175 	asm("bicpl r6, r6, r12, ror r3 ");		// if pri<=31, clear bit in LS word
   176 	asm("change_pri_2: ");
   177 	asm("ldr r4, [r0, r2, lsl #2] ");		// r4=iQueue[new priority]
   178 	asm("strb r2, [r1, #8] ");				// store new priority
   179 	asm("cmp r4, #0 ");						// new priority queue empty?
   180 	asm("bne change_pri_3 ");				// branch if not
   181 	asm("str r1, [r0, r2, lsl #2] ");		// if new priority queue was empty, iQueue[new p]=aLink
   182 	asm("mov r12, #0x80000000 ");
   183 	asm("str r1, [r1, #0] ");				// aLink->next=aLink
   184 	asm("rsbs r2, r2, #31 ");				// r2=31-priority
   185 	asm("str r1, [r1, #4] ");				// aLink->prev=aLink
   186 	asm("orrmi lr, lr, r12, ror r2 ");		// if pri>31, set bit is MS word
   187 	asm("orrpl r6, r6, r12, ror r2 ");		// if pri<=31, set bit in LS word
   188 	asm("stmdb r0!, {r6,lr} ");				// store present mask and restore r0
   189 	asm("ldmfd sp!, {r4-r6,pc} ");
   190 	asm("change_pri_3: ");
   191 	asm("ldr r12, [r4, #4] ");				// r12->last link at this priority
   192 	asm("str r1, [r4, #4] ");				// first->prev=aLink
   193 	asm("str r1, [r12, #0] ");				// old last->next=aLink
   194 	asm("stmia r1, {r4,r12} ");				// aLink->next=r3=first, aLink->prev=r12=old last
   195 	asm("stmdb r0!, {r6,lr} ");				// store present mask and restore r0
   196 	asm("ldmfd sp!, {r4-r6,pc} ");
   197 	}
   198 #endif
   199 
   200 __NAKED__ void initialiseState()
   201 	{
   202 	// entry in mode_svc with irqs and fiqs off	
   203 	asm("mrs r0, cpsr ");
   204 	asm("bic r1, r0, #0x1f ");
   205 	asm("orr r1, r1, #0xd3 ");				// mode_svc
   206 	asm("msr cpsr, r1 ");
   207 	__JUMP(,lr);
   208 	}
   209 
   210 // Called by a thread when it first runs
   211 __NAKED__ void __StartThread()
   212 	{
   213 	// On entry r4->current thread, r5->entry point, r6->parameter block
   214 	asm("mov r0, r6 ");
   215 	USER_MEMORY_GUARD_OFF_IF_MODE_USR(r6);
   216 	ERRATUM_353494_MODE_CHANGE(,r6);
   217 	asm("mov lr, pc ");
   218 	asm("movs pc, r5 ");
   219 	asm("b  " CSM_ZN5NKern4ExitEv);
   220 	}
   221 
   222 // Called by a thread which has been forced to exit
   223 // Interrupts off here, kernel unlocked
   224 __NAKED__ void __DoForcedExit()
   225 	{
   226 	asm("mov r0, #0x13 ");
   227 	asm("msr cpsr, r0 ");		// interrupts back on
   228 	asm("bic sp, sp, #4 ");		// align stack since it may be misaligned on return from scheduler
   229 	asm("bl  " CSM_ZN5NKern4LockEv);	// lock the kernel (must do this before setting iCsCount=0)
   230 	asm("ldr r0, __TheScheduler ");			// r0 points to scheduler data
   231 	asm("mov r1, #0 ");
   232 	asm("ldr r0, [r0, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));	// r0=iCurrentThread
   233 	asm("str r1, [r0, #%a0]" : : "i" _FOFF(NThreadBase,iCsCount));	// set iCsCount=0
   234 	asm("b  " CSM_ZN11NThreadBase4ExitEv);	// exit
   235 
   236 	asm("__TheScheduler: ");
   237 	asm(".word TheScheduler ");
   238 	asm("__BTraceData: ");
   239 	asm(".word BTraceData ");
   240 	asm("__DBTraceFilter2_iCleanupHead:");
   241 #ifdef __EABI__
   242 	asm(".word _ZN14DBTraceFilter212iCleanupHeadE");
   243 #else
   244 	asm(".word _14DBTraceFilter2.iCleanupHead");
   245 #endif
   246 	}
   247 
   248 
   249 /** @internalTechnology
   250 
   251 	Called to indicate that the system has crashed and all CPUs should be
   252 	halted and should dump their registers.
   253 
   254 */
   255 __NAKED__ void NKern::NotifyCrash(const TAny* /*a0*/, TInt /*a1*/)
   256 	{
   257 	asm("stmfd	sp!, {r0-r1} ");			// save parameters
   258 	asm("ldr	r0, __CrashState ");
   259 	asm("mov	r1, #1 ");
   260 	asm("str	r1, [r0] ");				// CrashState = ETrue
   261 	asm("ldr	r0, __TheScheduler ");
   262 	asm("ldr	r0, [r0, #%a0]" : : "i" _FOFF(TScheduler,i_Regs));
   263 	asm("ldr	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet, iExcCode));
   264 	asm("cmp	r1, #0 ");					// context already saved?
   265 	asm("bge	state_already_saved ");		// skip if so
   266 	asm("mov	r1, lr ");
   267 	asm("bl "	CSM_ZN3Arm9SaveStateER14SFullArmRegSet );
   268 	asm("str	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet, iN.iR15));
   269 	asm("ldmia	sp!, {r2-r3} ");			// original R0,R1
   270 	asm("stmia	r0, {r2-r3} ");				// save original R0,R1
   271 	asm("add	r1, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iExcCode));
   272 	asm("stmib	r1, {r2-r3} ");				// save a0, a1 in iCrashArgs
   273 	asm("mov	r1, #13 ");					// r1 = regnum
   274 	asm("mrs	r2, cpsr ");				// r2 = mode
   275 	asm("bl "	CSM_ZN3Arm3RegER14SFullArmRegSetim );	// r0 = pointer to exception mode R13
   276 	asm("str	sp, [r0] ");				// save correct original value for exception mode R13
   277 	asm("b		state_save_complete ");
   278 
   279 	asm("state_already_saved: ");
   280 	asm("ldmia	sp!, {r2-r3} ");			// original R0,R1
   281 	asm("add	r1, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iExcCode));
   282 	asm("ldr	r4, [r1, #4]! ");
   283 	asm("cmp	r4, #0 ");
   284 	asm("stmeqia	r1, {r2-r3} ");			// save a0, a1 in iCrashArgs, provided iCrashArgs not already set
   285 	asm("state_save_complete: ");
   286 
   287 	asm("mov	r2, #0xd1 ");
   288 	asm("msr	cpsr, r2 ");				// mode_fiq, interrupts off
   289 	asm("mov	r4, r0 ");
   290 	asm("bic	sp, sp, #4 ");				// align stack to multiple of 8
   291 
   292 	asm("mov	r0, #0 ");
   293 	asm("mov	r1, #0 ");
   294 	asm("mov	r2, #0 ");
   295 	asm("bl		NKCrashHandler ");
   296 
   297 	asm("mov	r0, #1 ");
   298 	asm("ldr	r1, [r4, #%a0] " : : "i" _FOFF(SFullArmRegSet,iN.iR0));	// original R0 = a0 parameter
   299 	asm("ldr	r2, [r4, #%a0] " : : "i" _FOFF(SFullArmRegSet,iN.iR1));	// original R1 = a1 parameter
   300 	asm("bl		NKCrashHandler ");
   301 
   302 	// shouldn't get back here
   303 	__ASM_CRASH();
   304 
   305 	asm("__CrashState: ");
   306 	asm(".word %a0" : : "i" ((TInt)&CrashState));
   307 	}
   308 
   309 
   310 
   311 __NAKED__ EXPORT_C TBool BTrace::Out(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
   312 	{
   313 	asm("ldr	r12, __BTraceData");
   314 	asm("stmdb	sp!, {r2,r3,r4,lr}");
   315 	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   316 	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   317 	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
   318 	asm("adr	lr, 9f");
   319 	asm("cmp	r2, #0");
   320 	asm("moveq	r0, #0");
   321 	asm("ldrne	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
   322 	asm("9:");
   323 	__POPRET("r2,r3,r4,");
   324 	}
   325 
   326 __NAKED__ EXPORT_C TBool BTrace::OutN(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
   327 	{
   328 	asm("ldr	r12, __BTraceData");
   329 	asm("stmdb	sp!, {r2,r3,r4,lr}");
   330 	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   331 	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   332 	asm("ldr	r4, [sp, #16]");	// r2 = aDataSize
   333 	asm("cmp	r2, #0");
   334 	asm("moveq	r0, #0");
   335 	__CPOPRET(eq,"r2,r3,r4,");
   336 
   337 	asm("cmp	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
   338 	asm("movhi	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
   339  	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
   340 	asm("add	r0, r0, r4");
   341 	asm("subs	r4, r4, #1");
   342 	asm("ldrhs	r2, [r3]");			// get first word of aData is aDataSize!=0
   343 	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
   344 	asm("cmp	r4, #4");
   345 	asm("strlo	r2, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
   346 
   347 	asm("mov	lr, pc");
   348 	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
   349 	__POPRET("r2,r3,r4,");
   350 	}
   351 
   352 __NAKED__ EXPORT_C TBool BTrace::OutX(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
   353 	{
   354 	asm("ldr	r12, __BTraceData");
   355 	asm("stmdb	sp!, {r2,r3,r4,lr}");
   356 	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   357 	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   358 	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
   359 	asm("ldr	lr, __TheScheduler");
   360 	asm("cmp	r2, #0");
   361 	asm("moveq	r0, #0");
   362 	__CPOPRET(eq,"r2,r3,r4,");
   363 
   364 	// set r2 = context id
   365 	asm("ldrb	r4, [lr, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
   366 	asm("mrs	r2, cpsr");
   367 	asm("and	r2, r2, #0x0f");
   368 	asm("cmp	r2, #3");
   369 	asm("movhi	r2, #2");		// r2 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
   370 	asm("cmpeq	r4, #0");
   371 	asm("ldreq	r2, [lr, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
   372 
   373 	asm("mov	lr, pc");
   374 	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
   375 	__POPRET("r2,r3,r4,");
   376 	}
   377 
   378 __NAKED__ EXPORT_C TBool BTrace::OutNX(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
   379 	{
   380 	asm("ldr	r12, __BTraceData");
   381 	asm("stmdb	sp!, {r2,r3,r4,lr}");
   382 	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   383 	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   384 	asm("ldr	r4, [sp, #16]");	// r2 = aDataSize
   385 	asm("ldr	lr, __TheScheduler");
   386 	asm("cmp	r2, #0");
   387 	asm("moveq	r0, #0");
   388 	__CPOPRET(eq,"r2,r3,r4,");
   389 
   390 	asm("cmp	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
   391 	asm("movhi	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
   392  	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
   393 	asm("add	r0, r0, r4");
   394 	asm("subs	r4, r4, #1");
   395 	asm("ldrhs	r2, [r3]");			// get first word of aData is aDataSize!=0
   396 	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
   397 	asm("cmp	r4, #4");
   398 	asm("strlo	r2, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
   399 
   400 	// set r2 = context id
   401 	asm("ldrb	r4, [lr, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
   402 	asm("mrs	r2, cpsr");
   403 	asm("and	r2, r2, #0x0f");
   404 	asm("cmp	r2, #3");
   405 	asm("movhi	r2, #2");		// r2 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
   406 	asm("cmpeq	r4, #0");
   407 	asm("ldreq	r2, [lr, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
   408 
   409 	asm("mov	lr, pc");
   410 	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
   411 	__POPRET("r2,r3,r4,");
   412 	}
   413 
   414 __NAKED__ EXPORT_C TBool BTrace::OutBig(TUint32 a0, TUint32 a1, const TAny* aData, TInt aDataSize)
   415 	{
   416 	asm("ldr	r12, __BTraceData");
   417 	asm("stmdb	sp!, {r4,lr}");
   418 	asm("and	r4, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   419 	asm("ldrb	r4, [r12, r4, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   420 	asm("cmp	r4, #0");
   421 	asm("moveq	r0, #0");
   422 	__CPOPRET(eq,"r4,");
   423 
   424 	asm("ldr	r12, __TheScheduler");
   425 	asm("stmdb	sp!, {lr}");
   426 	asm("ldrb	lr, [r12, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
   427 	asm("mrs	r4, cpsr");
   428 	asm("and	r4, r4, #0x0f");
   429 	asm("cmp	r4, #3");
   430 	asm("movhi	r4, #2");		// r4 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
   431 	asm("cmpeq	lr, #0");
   432 	asm("ldreq	r4, [r12, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
   433 	asm("stmdb	sp!, {r4}");
   434 	asm("bl " CSM_ZN6BTrace8DoOutBigEmmPKvimm);
   435 	asm("add	sp, sp, #8");
   436 	__POPRET("r4,");
   437 	}
   438 
   439 
   440 __NAKED__ TBool DBTraceFilter2::Check(TUint32 aUid)
   441 	{
   442 	asm("stmdb	sp!, {lr}");
   443 	asm("ldr	r3, [r0,#%a0]" : : "i" _FOFF(DBTraceFilter2,iNumUids));
   444 	asm("add	r0, r0, #%a0" : : "i" _FOFF(DBTraceFilter2,iUids));
   445 	asm("mov	r2, #0");
   446 	asm("0:");
   447 	asm("cmp	r3, r2");
   448 	asm("bls	9f");
   449 	asm("add	r12, r2, r3");
   450 	asm("mov	r12, r12, asr #1");
   451 	asm("ldr	lr, [r0, r12, lsl #2]");
   452 	asm("cmp	r1, lr");
   453 	asm("addhi	r2, r12, #1");
   454 	asm("movlo	r3, r12");
   455 	asm("bne	0b");
   456 	asm("movs	r0, #1");
   457 	__POPRET("");
   458 	asm("9:");
   459 	asm("movs	r0, #0");
   460 	__POPRET("");
   461 	}
   462 
   463 
   464 __NAKED__ TBool SBTraceData::CheckFilter2(TUint32 aUid)
   465 	{
   466 	asm("btrace_check_filter2:");
   467 	// returns r0 = 0 or 1 indicating if trace passed the filter check
   468 	// returns r2 = trace context id
   469 
   470 	asm("ldr	r12, __TheScheduler");
   471 	asm("stmdb	sp!, {r4-r6,lr}");
   472 	asm("mrs	r2, cpsr");
   473 	// r2 = cpsr
   474 	asm("ldrb	lr, [r12, #%a0]" : : "i" _FOFF(TScheduler,iInIDFC));
   475 	asm("and	r4, r2, #0x0f");
   476 	asm("cmp	r4, #3");
   477 	asm("movhi	r4, #2");		// r4 = context ID => 1 for FIQ, 2 for IRQ/ABT/UND/SYS
   478 	asm("cmpeq	lr, #0");
   479 	asm("ldreq	lr, [r12, #%a0]" : : "i" _FOFF(TScheduler,iKernCSLocked));
   480 	asm("ldreq	r4, [r12, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
   481 	asm("cmpeq	lr, #0");
   482 	// r4 = context value for trace
   483 	// zero flag set if we need to enter a critical section
   484 
   485 	// NKern::ThreadEnterCS()
   486 	asm("ldreq	r5, [r4, #%a0]" : : "i" _FOFF(NThreadBase,iCsCount));
   487 	asm("movne	r5, #0");
   488 	asm("addeq	r5, r5, #1");
   489 	asm("streq	r5, [r4, #%a0]" : : "i" _FOFF(NThreadBase,iCsCount));
   490 	// r5 = true if we entered a critical section
   491 
   492 	// DBTraceFilter2::Open()
   493 	INTS_OFF(r12, r2, INTS_ALL_OFF);
   494 	asm("ldr	r0, [r0, #%a0]" : : "i" (_FOFF(SBTraceData,iFilter2)));
   495 	asm("cmp	r0, #1");
   496 	asm("ldrhi	r12, [r0, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
   497 	asm("addhi	r12, r12, #1");
   498 	asm("strhi	r12, [r0, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
   499 	asm("msr	cpsr_c, r2");
   500 	asm("bls	8f");
   501 
   502 
   503 	asm("mov	r6, r0");
   504 	asm("bl		Check__14DBTraceFilter2Ul");
   505 	// r0 = result
   506 
   507 
   508 	// DBTraceFilter2::Close()
   509 	asm("mrs	r2, cpsr");
   510 	INTS_OFF(r12, r2, INTS_ALL_OFF);
   511 	asm("ldr	r12, [r6, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
   512 	asm("ldr	r1, __DBTraceFilter2_iCleanupHead");
   513 	asm("subs	r12, r12, #1");
   514 	asm("str	r12, [r6, #%a0]" : : "i" _FOFF(DBTraceFilter2,iAccessCount));
   515 	asm("ldreq	r12, [r1]");
   516 	asm("streq	r6, [r1]");
   517 	asm("streq	r12, [r6, #%a0]" : : "i" _FOFF(DBTraceFilter2,iCleanupLink));
   518 	asm("msr	cpsr_c, r2");
   519 
   520 	// NKern::ThreadLeaveCS()	
   521 	asm("8:");
   522 	asm("cmp	r5, #0");
   523 	asm("beq	9f");
   524 	asm("mov	r5, r0");
   525 	asm("bl " CSM_ZN5NKern13ThreadLeaveCSEv);
   526 	asm("mov	r0, r5");
   527 	asm("9:");
   528 	asm("mov	r2, r4"); // r2 = context id
   529 	__POPRET("r4-r6,");
   530 	}
   531 
   532 __NAKED__ EXPORT_C TBool BTrace::OutFiltered(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
   533 	{
   534 	// fall through to OutFilteredX...
   535 	}
   536 
   537 __NAKED__ EXPORT_C TBool BTrace::OutFilteredX(TUint32 a0, TUint32 a1, TUint32 a2, TUint32 a3)
   538 	{
   539 	asm("ldr	r12, __BTraceData");
   540 	asm("stmdb	sp!, {r2,r3,r4,lr}");
   541 	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   542 	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   543 	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
   544 	asm("cmp	r2, #0");
   545 	asm("moveq	r0, #0");
   546 	__CPOPRET(eq,"r2,r3,r4,");
   547 
   548 	asm("stmdb	sp!, {r0,r3,r12}");
   549 	asm("mov	r0, r12");
   550 	asm("bl		btrace_check_filter2");
   551 	asm("cmp	r0, #0");
   552 	asm("ldmia	sp!, {r0,r3,r12}");
   553 	asm("moveq	r0, #0");
   554 	__CPOPRET(eq,"r2,r3,r4,");
   555 
   556 	asm("adr	lr, 9f");
   557 	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
   558 	asm("9:");
   559 	__POPRET("r2,r3,r4,");
   560 	}
   561 
   562 __NAKED__ EXPORT_C TBool BTrace::OutFilteredN(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
   563 	{
   564 	// fall through to OutFilteredNX...
   565 	}
   566 
   567 __NAKED__ EXPORT_C TBool BTrace::OutFilteredNX(TUint32 a0, TUint32 a1, TUint32 a2, const TAny* aData, TInt aDataSize)
   568 	{
   569 	asm("ldr	r12, __BTraceData");
   570 	asm("stmdb	sp!, {r2,r3,r4,lr}");
   571 	asm("and	r2, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   572 	asm("ldrb	r2, [r12, r2, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   573 	asm("cmp	r2, #0");
   574 	asm("moveq	r0, #0");
   575 	__CPOPRET(eq,"r2,r3,r4,");
   576 
   577 	asm("stmdb	sp!, {r0,r1,r3,r12}");
   578 	asm("mov	r0, r12");
   579 	asm("bl		btrace_check_filter2");
   580 	asm("cmp	r0, #0");
   581 	asm("ldmia	sp!, {r0,r1,r3,r12}");
   582 	asm("moveq	r0, #0");
   583 	__CPOPRET(eq,"r2,r3,r4,");
   584 
   585 	asm("ldr	r4, [sp, #16]");	// r4 = aDataSize
   586 	asm("cmp	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
   587 	asm("movhi	r4, #%a0" : : "i" ((TInt)KMaxBTraceDataArray));
   588  	asm("orrhi	r0, r0, #%a0" : : "i" ((TInt)(BTrace::ERecordTruncated<<(BTrace::EFlagsIndex*8))));
   589 	asm("add	r0, r0, r4");
   590 	asm("subs	r4, r4, #1");
   591 	asm("ldrhs	lr, [r3]");			// get first word of aData is aDataSize!=0
   592 	asm("mov	r3, r1");			// r3 = a1 (ready for call to handler)
   593 	asm("cmp	r4, #4");
   594 	asm("strlo	lr, [sp, #4]");		// replace aData with first word if aDataSize is 1-4
   595 
   596 	asm("mov	lr, pc");
   597 	asm("ldr	pc, [r12, #%a0]" : : "i" _FOFF(SBTraceData,iHandler));
   598 	__POPRET("r2,r3,r4,");
   599 	}
   600 
   601 __NAKED__ EXPORT_C TBool BTrace::OutFilteredBig(TUint32 a0, TUint32 a1, const TAny* aData, TInt aDataSize)
   602 	{
   603 	asm("ldr	r12, __BTraceData");
   604 	asm("stmdb	sp!, {r4,lr}");
   605 	asm("and	r4, r0, #%a0" : : "i" ((TInt)(0xff<<(BTrace::ECategoryIndex*8))));
   606 	asm("ldrb	r4, [r12, r4, lsr #%a0]" : : "i" ((TInt)(BTrace::ECategoryIndex*8)));
   607 	asm("cmp	r4, #0");
   608 	asm("moveq	r0, #0");
   609 	__CPOPRET(eq,"r4,");
   610 
   611 	asm("stmdb	sp!, {r0-r3,r4,lr}");
   612 	asm("mov	r0, r12");
   613 	asm("bl		btrace_check_filter2");
   614 	asm("cmp	r0, #0");
   615 	asm("mov	r12, r2");
   616 	asm("ldmia	sp!, {r0-r3,r4,lr}");
   617 	asm("moveq	r0, #0");
   618 	__CPOPRET(eq,"r4,");
   619 
   620 	asm("stmdb	sp!, {r12,lr}");
   621 	asm("bl " CSM_ZN6BTrace8DoOutBigEmmPKvimm);
   622 	asm("add	sp, sp, #8");
   623 	__POPRET("r4,");
   624 	}
   625 
   626 	
   627 __NAKED__ EXPORT_C TBool BTrace::OutFilteredPcFormatBig(TUint32 a0, TUint32 aModuleUid, TUint32 aPc, TUint16 aFormatId, const TAny* aData, TInt aDataSize)
   628 	{
   629 	asm("mov	r0, #0"); //kernel side not implemented yet
   630 	}
   631 
   632 /******************************************************************************/
   633 
   634 /** Save all the ARM registers
   635 
   636 @internalTechnology
   637 */
   638 __NAKED__ void Arm::SaveState(SFullArmRegSet&)
   639 	{
   640 	asm("stmia	r0, {r0-r14}^ ");	// save R0-R7, R8_usr-R14_usr
   641 	asm("str	lr, [r0, #60]! ");	// save R15
   642 	asm("mrs	r1, cpsr ");
   643 	asm("str	r1, [r0, #4]! ");	// save CPSR
   644 	asm("bic	r2, r1, #0x1f ");
   645 	asm("orr	r2, r2, #0xd3 ");	// mode_svc, all interrupts off
   646 	asm("msr	cpsr, r2 ");
   647 	asm("stmib	r0!, {r13,r14} ");	// save R13_svc, R14_svc
   648 	asm("mrs	r3, spsr ");
   649 	asm("str	r3, [r0, #4]! ");	// save SPSR_svc
   650 	asm("bic	r2, r1, #0x1f ");
   651 	asm("orr	r2, r2, #0xd7 ");	// mode_abt, all interrupts off
   652 	asm("msr	cpsr, r2 ");
   653 	asm("stmib	r0!, {r13,r14} ");	// save R13_abt, R14_abt
   654 	asm("mrs	r3, spsr ");
   655 	asm("str	r3, [r0, #4]! ");	// save SPSR_abt
   656 	asm("bic	r2, r1, #0x1f ");
   657 	asm("orr	r2, r2, #0xdb ");	// mode_und, all interrupts off
   658 	asm("msr	cpsr, r2 ");
   659 	asm("stmib	r0!, {r13,r14} ");	// save R13_und, R14_und
   660 	asm("mrs	r3, spsr ");
   661 	asm("str	r3, [r0, #4]! ");	// save SPSR_und
   662 	asm("bic	r2, r1, #0x1f ");
   663 	asm("orr	r2, r2, #0xd2 ");	// mode_irq, all interrupts off
   664 	asm("msr	cpsr, r2 ");
   665 	asm("stmib	r0!, {r13,r14} ");	// save R13_irq, R14_irq
   666 	asm("mrs	r3, spsr ");
   667 	asm("str	r3, [r0, #4]! ");	// save SPSR_irq
   668 	asm("bic	r2, r1, #0x1f ");
   669 	asm("orr	r2, r2, #0xd1 ");	// mode_fiq, all interrupts off
   670 	asm("msr	cpsr, r2 ");
   671 	asm("stmib	r0!, {r8-r14} ");	// save R8_fiq ... R14_fiq
   672 	asm("mrs	r3, spsr ");
   673 	asm("str	r3, [r0, #4]! ");	// save SPSR_fiq
   674 	asm("bic	r2, r1, #0x1f ");
   675 	asm("orr	r2, r2, #0xd3 ");	// mode_svc, all interrupts off
   676 	asm("msr	cpsr, r2 ");
   677 
   678 	asm("mov	r4, #0 ");
   679 	asm("mov	r5, #0 ");
   680 	asm("mov	r6, #0 ");
   681 	asm("mov	r7, #0 ");
   682 	asm("mov	r8, #0 ");
   683 	asm("mov	r9, #0 ");
   684 	asm("mov	r10, #0 ");
   685 	asm("mov	r11, #0 ");
   686 
   687 	// monitor mode - skip for now
   688 	asm("mov	r3, #0 ");
   689 	asm("stmib	r0!, {r4-r6} ");	// R13_mon, R14_mon, SPSR_mon
   690 
   691 	// zero spare words
   692 	asm("mov	r3, #0 ");
   693 	asm("stmib	r0!, {r4-r11} ");
   694 	asm("add	r0, r0, #4 ");		// r0 = &a.iA
   695 
   696 #ifdef __CPU_ARMV7
   697 	asm("mrc	p14, 6, r3, c1, c0, 0 ");
   698 #else
   699 	asm("mov	r3, #0 ");
   700 #endif
   701 	asm("str	r3, [r0], #4 ");	// TEEHBR
   702 #ifdef __CPU_HAS_COPROCESSOR_ACCESS_REG
   703 	GET_CAR(,r3);
   704 #else
   705 	asm("mov	r3, #0 ");
   706 #endif
   707 	asm("str	r3, [r0], #4 ");	// CPACR
   708 
   709 	// skip SCR, SDER, NSACR, PMCR, MVBAR for now
   710 	asm("mov	r3, #0 ");
   711 	asm("stmia	r0!, {r4-r8} ");	// SCR, SDER, NSACR, PMCR, MVBAR
   712 
   713 	// zero spare words
   714 	asm("mov	r3, #0 ");
   715 	asm("stmia	r0!, {r3-r11} ");	// r0 = &a.iB[0]
   716 
   717 	// just fill in iB[0]
   718 #ifdef __CPU_HAS_MMU
   719 	asm("mrc	p15, 0, r3, c1, c0, 0 ");
   720 	asm("str	r3, [r0], #4 ");	// SCTLR
   721 #ifdef __CPU_HAS_ACTLR
   722 	asm("mrc	p15, 0, r3, c1, c0, 1 ");
   723 #else
   724 	asm("mov	r3, #0 ");
   725 #endif
   726 	asm("str	r3, [r0], #4 ");	// ACTLR
   727 	asm("mrc	p15, 0, r3, c2, c0, 0 ");
   728 	asm("str	r3, [r0], #4 ");	// TTBR0
   729 #ifdef __CPU_HAS_TTBR1
   730 	asm("mrc	p15, 0, r2, c2, c0, 1 ");
   731 	asm("mrc	p15, 0, r3, c2, c0, 2 ");
   732 #else
   733 	asm("mov	r2, #0 ");
   734 	asm("mov	r3, #0 ");
   735 #endif
   736 	asm("stmia	r0!, {r2,r3} ");	// TTBR1, TTBCR
   737 	asm("mrc	p15, 0, r3, c3, c0, 0 ");
   738 	asm("str	r3, [r0], #4 ");	// DACR
   739 #ifdef __CPU_MEMORY_TYPE_REMAPPING
   740 	asm("mrc	p15, 0, r2, c10, c2, 0 ");
   741 	asm("mrc	p15, 0, r3, c10, c2, 1 ");
   742 #else
   743 	asm("mov	r2, #0 ");
   744 	asm("mov	r3, #0 ");
   745 #endif
   746 	asm("stmia	r0!, {r2,r3} ");	// PRRR, NMRR
   747 #ifdef __CPU_ARMV7
   748 	asm("mrc	p15, 0, r3, c12, c0, 0 ");
   749 #else
   750 	asm("mov	r3, #0 ");
   751 #endif
   752 	asm("str	r3, [r0], #4 ");	// VBAR
   753 #if defined(__CPU_SA1) || defined(__CPU_ARM920T) || defined(__CPU_ARM925T) || defined(__CPU_ARMV5T) || defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
   754 	asm("mrc	p15, 0, r3, c13, c0, 0 ");
   755 #else
   756 	asm("mov	r3, #0 ");
   757 #endif
   758 	asm("str	r3, [r0], #4 ");	// FCSEIDR
   759 #if defined(__CPU_ARMV6) || defined(__CPU_ARMV7)
   760 	asm("mrc	p15, 0, r3, c13, c0, 1 ");
   761 #else
   762 	asm("mov	r3, #0 ");
   763 #endif
   764 	asm("str	r3, [r0], #4 ");	// CONTEXTIDR
   765 #ifdef __CPU_HAS_CP15_THREAD_ID_REG
   766 	GET_RWRW_TID(,r2);
   767 	GET_RWRO_TID(,r3);
   768 	GET_RWNO_TID(,r12);
   769 #else
   770 	asm("mov	r2, #0 ");
   771 	asm("mov	r3, #0 ");
   772 	asm("mov	r12, #0 ");
   773 #endif
   774 	asm("stmia	r0!, {r2,r3,r12} ");	// RWRWTID, RWROTID, RWNOTID
   775 	asm("mrc	p15, 0, r2, c5, c0, 0 ");	// DFSR
   776 #ifdef __CPU_ARM_HAS_SPLIT_FSR
   777 	asm("mrc	p15, 0, r3, c5, c0, 1 ");	// IFSR
   778 #else
   779 	asm("mov	r3, #0 ");
   780 #endif
   781 	asm("stmia	r0!, {r2,r3} ");	// DFSR, IFSR
   782 #ifdef __CPU_ARMV7
   783 	asm("mrc	p15, 0, r2, c5, c1, 0 ");	// ADFSR
   784 	asm("mrc	p15, 0, r3, c5, c1, 1 ");	// AIFSR
   785 #else
   786 	asm("mov	r2, #0 ");
   787 	asm("mov	r3, #0 ");
   788 #endif
   789 	asm("stmia	r0!, {r2,r3} ");	// ADFSR, AIFSR
   790 	asm("mrc	p15, 0, r2, c6, c0, 0 ");	// DFAR
   791 #ifdef __CPU_ARM_HAS_CP15_IFAR
   792 	asm("mrc	p15, 0, r3, c6, c0, 2 ");	// IFAR
   793 #else
   794 	asm("mov	r3, #0 ");
   795 #endif
   796 	asm("stmia	r0!, {r2,r3} ");	// DFAR, IFAR
   797 
   798 	// zero spare words
   799 	asm("stmia	r0!, {r4-r7} ");
   800 	asm("stmia	r0!, {r4-r11} ");
   801 #else	// __CPU_HAS_MMU
   802 	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
   803 	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
   804 	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
   805 	asm("stmia	r0!, {r4-r11} ");	// no MMU so zero fill
   806 #endif	// __CPU_HAS_MMU
   807 
   808 	// zero iB[1]
   809 	asm("stmia	r0!, {r4-r11} ");
   810 	asm("stmia	r0!, {r4-r11} ");
   811 	asm("stmia	r0!, {r4-r11} ");
   812 	asm("stmia	r0!, {r4-r11} ");	// r0 = &a.iMore[0]
   813 	asm("add	r1, r0, #62*8 ");	// r1 = &a.iExcCode
   814 
   815 	// Save VFP state
   816 	// Save order:
   817 	//				FPEXC	FPSCR
   818 	// VFPv2 ONLY:	FPINST	FPINST2
   819 	//				D0-D3	D4-D7	D8-D11	D12-D15 
   820 	// VFPv3 ONLY:	D16-D19	D20-D23	D24-D27	D28-D31
   821 #ifdef __CPU_HAS_VFP
   822 	GET_CAR(,r2);
   823 	asm("bic	r2, r2, #0x00f00000 ");
   824 #ifdef __VFP_V3
   825 	asm("bic	r2, r2, #0xc0000000 ");	// mask off ASEDIS, D32DIS
   826 #endif
   827 	asm("orr	r2, r2, #0x00500000 ");	// enable privileged access to CP10, CP11
   828 	SET_CAR(,r2);
   829 	VFP_FMRX(,2,VFP_XREG_FPEXC);		// r2=FPEXC
   830 	asm("orr	r3, r2, #%a0" : : "i" ((TInt)VFP_FPEXC_EN));
   831 	VFP_FMXR(,VFP_XREG_FPEXC,3);		// enable VFP
   832 	__DATA_SYNC_BARRIER__(r4);
   833 	__INST_SYNC_BARRIER__(r4);
   834 	VFP_FMRX(,3,VFP_XREG_FPSCR);		// r3=FPSCR
   835 	asm("stmia	r0!, {r2,r3} ");		//
   836 #ifdef __VFP_V3
   837 	VFP_FSTMIADW(CC_AL,0,0,16);			// save D0 - D15
   838 	VFP_FMRX(,3,VFP_XREG_MVFR0);
   839 	asm("tst r3, #%a0" : : "i" ((TInt)VFP_MVFR0_ASIMD32)); // Check to see if all 32 Advanced SIMD registers are present
   840 	VFP_FSTMIADW(CC_NE,0,16,16);		// if so then save D16 - D31 (don't need to check CPACR.D32DIS as it is cleared above)
   841 #else
   842 	VFP_FMRX(,2,VFP_XREG_FPINST);
   843 	VFP_FMRX(,3,VFP_XREG_FPINST2);
   844 	asm("stmia	r0!, {r2,r3} ");		// FPINST, FPINST2
   845 	VFP_FSTMIADW(CC_AL,0,0,16);			// save D0 - D15
   846 #endif
   847 #endif	// __CPU_HAS_VFP
   848 	asm("1:		");
   849 	asm("cmp	r0, r1 ");
   850 	asm("strlo	r4, [r0], #4 ");		// clear up to end of iMore[61]
   851 	asm("blo	1b ");
   852 	asm("mov	r1, #%a0" : : "i" ((TInt)KMaxTInt));
   853 	asm("stmia	r0!, {r1,r5-r7} ");		// iExcCode=KMaxTInt, iCrashArgs[0...2]=0
   854 	asm("sub	r0, r0, #1024 ");		// r0 = &a
   855 #ifdef __CPU_HAS_VFP
   856 	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iMore[0]));
   857 	VFP_FMXR(,VFP_XREG_FPEXC,2);		// restore FPEXC
   858 	__DATA_SYNC_BARRIER__(r4);
   859 	__INST_SYNC_BARRIER__(r4);
   860 	asm("ldr	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iA.iCPACR));
   861 	SET_CAR(,r2);						// restore CPACR
   862 #endif
   863 	asm("ldr	r1, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iFlags));
   864 	asm("orr	r1, r1, #0xC0 ");		// interrupts off
   865 	asm("msr	cpsr, r1 ");			// restore CPSR with interrupts off
   866 	asm("ldmia	r0, {r0-r11} ");		// restore R4-R11
   867 	__JUMP(,lr);
   868 	}
   869 
   870 
   871 /** Update the saved ARM registers with information from an exception
   872 
   873 @internalTechnology
   874 */
   875 __NAKED__ void Arm::UpdateState(SFullArmRegSet&, TArmExcInfo&)
   876 	{
   877 	asm("ldmia	r1!, {r2,r3,r12} ");
   878 	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iFlags));
   879 	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iExcCode));
   880 	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR13Svc));
   881 	asm("ldmia	r1!, {r2,r3,r12} ");
   882 	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR4));
   883 	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR5));
   884 	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR6));
   885 	asm("ldmia	r1!, {r2,r3,r12} ");
   886 	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR7));
   887 	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR8));
   888 	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR9));
   889 	asm("ldmia	r1!, {r2,r3,r12} ");
   890 	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR10));
   891 	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR11));
   892 	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR14Svc));
   893 	asm("ldr	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iExcCode));
   894 	asm("ldmia	r1!, {r2,r3} ");	// r2=iFaultAddress, r3=iFaultStatus
   895 	asm("cmp	r12, #%a0 " : : "i" ((TInt)EArmExceptionPrefetchAbort));
   896 	asm("streq	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iIFAR));
   897 	asm("strne	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iDFAR));
   898 	asm("streq	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iIFSR));
   899 	asm("strne	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iB[0].iDFSR));
   900 	asm("ldmia	r1!, {r2,r3,r12} ");
   901 	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iSpsrSvc));
   902 	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR13));
   903 	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR14));
   904 	asm("ldmia	r1!, {r2,r3,r12} ");
   905 	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR0));
   906 	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR1));
   907 	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR2));
   908 	asm("ldmia	r1!, {r2,r3,r12} ");
   909 	asm("str	r2, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR3));
   910 	asm("str	r3, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR12));
   911 	asm("str	r12, [r0, #%a0]" : : "i" _FOFF(SFullArmRegSet,iN.iR15));
   912 	__JUMP(,lr);
   913 	}
   914 
   915 
   916 /** Get a pointer to a stored integer register, accounting for registers which
   917 	are banked across modes.
   918 
   919 @param	a		Pointer to saved register block
   920 @param	aRegNum	Number of register required, 0-15 or -1 (indicates SPSR)
   921 @param	aMode	Bottom 5 bits indicate which processor mode
   922 				Other bits of aMode are ignored
   923 @return			Pointer to the required saved register value
   924 
   925 @internalTechnology
   926 */
   927 __NAKED__ TArmReg* Arm::Reg(SFullArmRegSet& /*a*/, TInt /*aRegNum*/, TArmReg /*aMode*/)
   928 	{
   929 	asm("cmp	r1, #8 ");				// register number < 8 ?
   930 	asm("addlo	r0, r0, r1, lsl #2 ");	// register R0-R7 are not banked
   931 	asm("blo	0f ");
   932 	asm("cmp	r1, #15 ");				// register number = 15 ?
   933 	asm("addeq	r0, r0, r1, lsl #2 ");	// register R15 not banked
   934 	asm("movgt	r0, #0 ");				// no registers > 15
   935 	asm("bge	0f ");
   936 	asm("cmn	r1, #1 ");
   937 	asm("movlt	r0, #0 ");				// no registers < -1
   938 	asm("blt	0f ");
   939 	asm("and	r12, r2, #0x1F ");
   940 	asm("cmp	r12, #0x11 ");			// mode_fiq?
   941 	asm("beq	1f ");					// skip if it is
   942 	asm("cmp	r1, #13 ");
   943 	asm("addlo	r0, r0, r1, lsl #2 ");	// register R8-R12 are only banked in mode_fiq
   944 	asm("blo	0f ");
   945 	asm("cmp	r12, #0x10 ");			// mode_usr ?
   946 	asm("cmpne	r12, #0x1F ");			// if not, mode_sys ?
   947 	asm("bne	2f ");					// skip if neither
   948 	asm("cmp	r1, #16 ");
   949 	asm("addlo	r0, r0, r1, lsl #2 ");	// handle R13_usr, R14_usr
   950 	asm("movhs	r0, #0 ");				// no SPSR in mode_usr or mode_sys
   951 	asm("blo	0f ");
   952 	asm("1: ");							// mode_fiq, regnum = 8-12
   953 	asm("2: ");							// exception mode, regnum not 0-12 or 15
   954 	asm("cmn	r1, #1 ");				// regnum = -1 ?
   955 	asm("moveq	r1, #15 ");				// if so, change to 15
   956 	asm("sub	r1, r1, #13 ");
   957 	asm("add	r0, r0, r1, lsl #2 ");	// add 0 for R13, 4 for R14, 8 for SPSR
   958 	asm("cmp	r12, #0x16 ");
   959 	asm("addeq	r0, r0, #12 ");			// if mon, add offset from R13Fiq to R13Mon
   960 	asm("cmpne	r12, #0x11 ");
   961 	asm("addeq	r0, r0, #32 ");			// if valid but not svc/abt/und/irq, add offset from R13Irq to R13Fiq
   962 	asm("cmpne	r12, #0x12 ");
   963 	asm("addeq	r0, r0, #12 ");			// if valid but not svc/abt/und, add offset from R13Und to R13Irq
   964 	asm("cmpne	r12, #0x1b ");
   965 	asm("addeq	r0, r0, #12 ");			// if valid but not svc/abt, add offset from R13Abt to R13Und
   966 	asm("cmpne	r12, #0x17 ");
   967 	asm("addeq	r0, r0, #12 ");			// if valid but not svc, add offset from R13Svc to R13Abt
   968 	asm("cmpne	r12, #0x13 ");
   969 	asm("addeq	r0, r0, #%a0" : : "i" _FOFF(SFullArmRegSet, iN.iR13Svc));	// if valid mode add offset to R13Svc
   970 	asm("movne	r0, #0 ");
   971 	asm("0: ");
   972 	__JUMP(,lr);
   973 	}
   974 
   975 
   976 /** Restore all the ARM registers
   977 
   978 @internalTechnology
   979 */
   980 __NAKED__ void Arm::RestoreState(SFullArmRegSet&)
   981 	{
   982 	}
   983 
   984 
   985 
   986 
   987