1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1.2 +++ b/os/kernelhwsrv/kernel/eka/nkernsmp/x86/ncirq.cia Fri Jun 15 03:10:57 2012 +0200
1.3 @@ -0,0 +1,214 @@
1.4 +// Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
1.5 +// All rights reserved.
1.6 +// This component and the accompanying materials are made available
1.7 +// under the terms of the License "Eclipse Public License v1.0"
1.8 +// which accompanies this distribution, and is available
1.9 +// at the URL "http://www.eclipse.org/legal/epl-v10.html".
1.10 +//
1.11 +// Initial Contributors:
1.12 +// Nokia Corporation - initial contribution.
1.13 +//
1.14 +// Contributors:
1.15 +//
1.16 +// Description:
1.17 +// e32\nkernsmp\x86\ncirq.cia
1.18 +//
1.19 +//
1.20 +
1.21 +/**
1.22 + @file
1.23 + @internalTechnology
1.24 +*/
1.25 +
1.26 +#include "nk_priv.h"
1.27 +#include "nk_plat.h"
1.28 +#include <nk_irq.h>
1.29 +#include <apic.h>
1.30 +
1.31 +#define OFFSET_NIrqHandler_iHState 8
1.32 +
1.33 +__ASSERT_COMPILE(_FOFF(NIrqHandler, iHState) == OFFSET_NIrqHandler_iHState);
1.34 +
1.35 +//
1.36 +// Atomically increment run count provided ECount set or count <2.
1.37 +// If originally zero, atomically set CPU field
1.38 +// Wait for EWait to be clear
1.39 +// Return state of iIState immediately before increment
1.40 +//
1.41 +__NAKED__ TUint32 NIrq::EnterIsr()
1.42 + {
1.43 + THISCALL_PROLOG0()
1.44 + asm("push ebx ");
1.45 + asm("xor ebx, ebx ");
1.46 + asm("str bx ");
1.47 + asm("sub bl, 0x28 ");
1.48 + asm("shr bl, 3 "); // BL = CPU number
1.49 + asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrq,iIState));
1.50 + asm("enterisr_loop: ");
1.51 + asm("mov edx, eax ");
1.52 + asm("cmp edx, 0x10000 "); // compare run count to 1
1.53 + asm("jae short enterisr_not0 "); // skip if >=1
1.54 + asm("mov dh, bl "); // else update CPU
1.55 + asm("enterisr_not0: ");
1.56 + asm("add edx, 0x10000 "); // increment run count
1.57 + asm("cmp edx, 0x20000 "); // compare to 2
1.58 + asm("jb short enterisr_lt2 ");
1.59 + asm("test dl, 6 "); // ECount|ERaw
1.60 + asm("jz short enterisr_wait "); // if !ECount && !ERaw limit count to 2
1.61 + asm("enterisr_lt2: ");
1.62 + asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrq,iIState));
1.63 + asm("jne short enterisr_loop ");
1.64 +
1.65 + asm("enterisr_wait: ");
1.66 + asm("mov edx, 1 "); // EWait
1.67 + asm("enterisr_loop1: ");
1.68 + asm("test edx, [ecx+%0]" : : "i" _FOFF(NIrq,iIState));
1.69 + asm("jnz short enterisr_loop2 "); // loop while EWait set
1.70 + asm("pop ebx ");
1.71 + asm("lock add dword ptr [esp], 0 ");
1.72 + THISCALL_EPILOG0()
1.73 +
1.74 + asm("enterisr_loop2: ");
1.75 + X86_PAUSE
1.76 + asm("jmp short enterisr_loop ");
1.77 + }
1.78 +
1.79 +//
1.80 +// Atomically decrement run count
1.81 +// Return TRUE if run count nonzero after decrement
1.82 +//
1.83 +__NAKED__ TBool NIrq::IsrDone()
1.84 + {
1.85 + THISCALL_PROLOG0()
1.86 + asm("mov eax, 0xffff0000 "); // -1<<run count shift
1.87 + asm("lock xadd [ecx+%0], eax" : : "i" _FOFF(NIrq,iIState));
1.88 + asm("shr eax, 16 ");
1.89 + asm("dec eax "); // eax=new run count = TRUE if nonzero
1.90 + THISCALL_EPILOG0()
1.91 + }
1.92 +
1.93 +//
1.94 +// Wait (allowing interrupts and preemption) until run count = 0 and EWait clear
1.95 +// Then atomically set EWait and return with interrupts disabled
1.96 +//
1.97 +__NAKED__ void NIrq::Wait()
1.98 + {
1.99 + THISCALL_PROLOG0()
1.100 + asm("wait_loop: ");
1.101 + asm("cli ");
1.102 + asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrq,iIState));
1.103 + asm("wait_loop1: ");
1.104 + asm("mov edx, eax ");
1.105 + asm("test edx, 0xffff0001 "); // test run count and EWait
1.106 + asm("jnz short wait_loop2 "); // if not both zero, must wait
1.107 + asm("inc edx "); // else try to set EWait
1.108 + asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrq,iIState));
1.109 + asm("jne short wait_loop1 "); // someone beat us to it
1.110 + THISCALL_EPILOG0() // success - return with interrupts disabled
1.111 +
1.112 + // spin, allowing interrupts, while we wait for run count and EWait both zero
1.113 + asm("wait_loop2: ");
1.114 + asm("sti ");
1.115 + X86_PAUSE
1.116 + asm("jmp short wait_loop ");
1.117 + }
1.118 +
1.119 +//
1.120 +// Atomically clear EWait and reenable interrupts
1.121 +//
1.122 +__NAKED__ void NIrq::Done()
1.123 + {
1.124 + THISCALL_PROLOG0()
1.125 + asm("lock and dword ptr [ecx+%0], 0xfffffffe" : : "i" _FOFF(NIrq,iIState));
1.126 + asm("sti ");
1.127 + THISCALL_EPILOG0()
1.128 + }
1.129 +
1.130 +
1.131 +
1.132 +//
1.133 +// atomic { if !EUnbind && !ENotReady clear EDisable }
1.134 +// Return the initial value of iHState
1.135 +//
1.136 +__NAKED__ TUint32 NIrqHandler::DoSetEnabled()
1.137 + {
1.138 + THISCALL_PROLOG0()
1.139 + asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrqHandler,iHState));
1.140 + asm("dse_loop: ");
1.141 + asm("mov edx, eax ");
1.142 + asm("test dh, 0x0A "); // EUnbind|ENotReady
1.143 + asm("jnz short dse_end "); // if either set, finished
1.144 + asm("and dh, 0xFA "); // else try to clear EDisable and EBind
1.145 + asm("dse_end: ");
1.146 + asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrqHandler,iHState));
1.147 + asm("jne short dse_loop "); // someone beat us to it
1.148 + THISCALL_EPILOG0() // success - return original iHState
1.149 + }
1.150 +
1.151 +//
1.152 +// Atomically increment run count by aCount if ECount set or run count initially zero.
1.153 +// If !EDisable and !EUnbind set EActive
1.154 +// Return initial iHState
1.155 +//
1.156 +__NAKED__ TUint32 NIrqHandler::DoActivate(TInt aCount)
1.157 + {
1.158 + THISCALL_PROLOG1()
1.159 + asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrqHandler,iHState));
1.160 + asm("da_loop: ");
1.161 + asm("mov edx, eax ");
1.162 + asm("cmp edx, 0x10000 ");
1.163 + asm("jb short da_zero "); // skip if run count initially zero
1.164 + asm("test dh, 0x10 "); // else check ECount
1.165 + asm("jz short da_end "); // if clear, don't increment
1.166 + asm("da_zero: ");
1.167 + asm("mov edx, [esp+4] "); // edx = aCount
1.168 + asm("shl edx, 16 ");
1.169 + asm("add edx, eax "); // increment run count
1.170 + asm("da_end: ");
1.171 + asm("test dh, 0x03 "); // EUnbind|EDisable
1.172 + asm("jnz short da_1 "); // skip if EUnbind or EDisable set
1.173 + asm("or dh, 0x20 "); // set EActive
1.174 + asm("da_1: ");
1.175 + asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrqHandler,iHState));
1.176 + asm("jne short da_loop "); // someone beat us to it
1.177 + THISCALL_EPILOG1() // success - return original iHState
1.178 + }
1.179 +
1.180 +//
1.181 +// Decrement run count
1.182 +// Return initial iHState
1.183 +//
1.184 +__NAKED__ TUint32 NIrqHandler::EventBegin()
1.185 + {
1.186 + THISCALL_PROLOG0()
1.187 + asm("mov eax, 0xffff0000 "); // -1<<run count shift
1.188 + asm("lock xadd [ecx+%0], eax" : : "i" _FOFF(NIrqHandler,iHState));
1.189 + THISCALL_EPILOG0()
1.190 + }
1.191 +
1.192 +//
1.193 +// If count is zero or EDisable or EUnbind
1.194 +// are set, clear EActive.
1.195 +// Return initial iHState, except for new EActive bit
1.196 +//
1.197 +__NAKED__ TUint32 NIrqHandler::EventDone()
1.198 + {
1.199 + THISCALL_PROLOG0()
1.200 + asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrqHandler,iHState));
1.201 + asm("ed_loop: ");
1.202 + asm("mov edx, eax ");
1.203 + asm("cmp edx, 0x10000 ");
1.204 + asm("jb short ed_rc_0 "); // skip if run count now zero
1.205 + asm("test dh, 0x03 "); // test EUnbind and EDisable
1.206 + asm("jz short ed_1 "); // skip if neither set
1.207 + asm("ed_rc_0: ");
1.208 + asm("and dh, 0xDF "); // clear EActive
1.209 + asm("ed_1: ");
1.210 + asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrqHandler,iHState));
1.211 + asm("jne short ed_loop "); // someone beat us to it
1.212 + asm("or dh, 0xDF "); // set all except EActive
1.213 + asm("and ah, dh "); // clear EActive in return value if we cleared it
1.214 + THISCALL_EPILOG0() // success - return original iHState with new EActive
1.215 + }
1.216 +
1.217 +