Update contrib.
1 // Copyright (c) 1998-2009 Nokia Corporation and/or its subsidiary(-ies).
2 // All rights reserved.
3 // This component and the accompanying materials are made available
4 // under the terms of the License "Eclipse Public License v1.0"
5 // which accompanies this distribution, and is available
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
8 // Initial Contributors:
9 // Nokia Corporation - initial contribution.
14 // e32\include\nkern\nkern.h
16 // WARNING: This file contains some APIs which are internal and are subject
17 // to change without notice. Such APIs should therefore not be used
18 // outside the Kernel and Hardware Services package.
24 #ifdef __STANDALONE_NANOKERNEL__
33 #include <e32atomics.h>
36 /** @internalComponent */
37 IMPORT_C void NKFault(const char* file, TInt line);
38 /** @internalComponent */
39 void NKIdle(TInt aStage);
46 #define FAULT() NKFault(__FILE__,__LINE__)
54 #define __NK_ASSERT_DEBUG(c) ((void) ((c)||(FAULT(),0)) )
58 #define __NK_ASSERT_DEBUG(c)
66 #define __NK_ASSERT_ALWAYS(c) ((void) ((c)||(FAULT(),0)) )
72 const TInt KNumPriorities=64;
74 const TInt KMaxCpus=8;
81 Used for protecting a code fragment against both interrupts and concurrent
82 execution on another processor.
91 // Bit 7 of order clear for locks used with interrupts disabled
92 EOrderGenericIrqLow0 =0x00u, // Device driver spin locks, low range
93 EOrderGenericIrqLow1 =0x01u, // Device driver spin locks, low range
94 EOrderGenericIrqLow2 =0x02u, // Device driver spin locks, low range
95 EOrderGenericIrqLow3 =0x03u, // Device driver spin locks, low range
96 EOrderGenericIrqHigh0 =0x18u, // Device driver spin locks, high range
97 EOrderGenericIrqHigh1 =0x19u, // Device driver spin locks, high range
98 EOrderGenericIrqHigh2 =0x1Au, // Device driver spin locks, high range
99 EOrderGenericIrqHigh3 =0x1Bu, // Device driver spin locks, high range
101 // Bit 7 of order set for locks used with interrupts enabled, preemption disabled
102 EOrderGenericPreLow0 =0x80u, // Device driver spin locks, low range
103 EOrderGenericPreLow1 =0x81u, // Device driver spin locks, low range
104 EOrderGenericPreHigh0 =0x9Eu, // Device driver spin locks, high range
105 EOrderGenericPreHigh1 =0x9Fu, // Device driver spin locks, high range
107 EOrderNone =0xFFu // No order check required (e.g. for dynamic ordering)
110 IMPORT_C TSpinLock(TUint aOrder);
112 volatile TUint64 iLock;
115 /** Macro to disable interrupts and acquire the lock.
120 #define __SPIN_LOCK_IRQ(lock) ((void)NKern::DisableAllInterrupts())
122 /** Macro to release the lock and enable interrupts.
127 #define __SPIN_UNLOCK_IRQ(lock) (NKern::EnableAllInterrupts())
129 /** Macro to see if someone else is waiting for the lock, enabling IRQs
130 then disabling IRQs again.
135 #define __SPIN_FLASH_IRQ(lock) (NKern::EnableAllInterrupts(),(void)NKern::DisableAllInterrupts(),((TBool)TRUE))
137 /** Macro to remember original interrupt state then disable interrupts
138 and acquire the lock.
143 #define __SPIN_LOCK_IRQSAVE(lock) (NKern::DisableAllInterrupts())
145 /** Macro to release the lock then restore original interrupt state to that
151 #define __SPIN_UNLOCK_IRQRESTORE(lock,irq) (NKern::RestoreInterrupts(irq))
153 /** Macro to see if someone else is waiting for the lock, enabling IRQs to
154 the original state supplied then disabling IRQs again.
159 #define __SPIN_FLASH_IRQRESTORE(lock,irq) (NKern::RestoreInterrupts(irq),((void)NKern::DisableAllInterrupts()),((TBool)TRUE))
161 /** Macro to acquire the lock. This assumes the caller has already disabled
162 interrupts/preemption.
164 If interrupts/preemption is not disabled a run-time assert will occur
165 This is to protect against unsafe code that might lead to same core
168 In device driver code it is safer to use __SPIN_LOCK_IRQSAVE() instead,
169 although not as efficient should interrupts aleady be disabled for the
170 duration the lock is held.
175 #define __SPIN_LOCK(lock)
177 /** Macro to release the lock, don't change interrupt/preemption state.
182 #define __SPIN_UNLOCK(lock)
187 #define __SPIN_FLASH(lock) ((TBool)FALSE)
189 /** Macro to see if someone else is waiting for the lock, enabling preemption
190 then disabling it again.
195 #define __SPIN_FLASH_PREEMPT(lock) ((TBool)NKern::PreemptionPoint())
198 /** Read/Write Spin lock
205 IMPORT_C TRWSpinLock(TUint aOrder); // Uses same order space as TSpinLock
207 volatile TUint64 iLock;
215 #define __SPIN_LOCK_IRQ_R(lock) ((void)NKern::DisableAllInterrupts())
221 #define __SPIN_UNLOCK_IRQ_R(lock) (NKern::EnableAllInterrupts())
227 #define __SPIN_FLASH_IRQ_R(lock) (NKern::EnableAllInterrupts(),(void)NKern::DisableAllInterrupts(),((TBool)TRUE))
233 #define __SPIN_LOCK_IRQ_W(lock) ((void)NKern::DisableAllInterrupts())
239 #define __SPIN_UNLOCK_IRQ_W(lock) (NKern::EnableAllInterrupts())
245 #define __SPIN_FLASH_IRQ_W(lock) (NKern::EnableAllInterrupts(),(void)NKern::DisableAllInterrupts(),((TBool)TRUE))
252 #define __SPIN_LOCK_R(lock)
258 #define __SPIN_UNLOCK_R(lock)
263 #define __SPIN_FLASH_R(lock) ((TBool)FALSE)
269 #define __SPIN_LOCK_W(lock)
275 #define __SPIN_UNLOCK_W(lock)
280 #define __SPIN_FLASH_W(lock) ((TBool)FALSE)
287 #define __SPIN_LOCK_IRQSAVE_R(lock) (NKern::DisableAllInterrupts())
293 #define __SPIN_UNLOCK_IRQRESTORE_R(lock,irq) (NKern::RestoreInterrupts(irq))
299 #define __SPIN_FLASH_IRQRESTORE_R(lock,irq) (NKern::RestoreInterrupts(irq),((void)NKern::DisableAllInterrupts()),((TBool)TRUE))
305 #define __SPIN_LOCK_IRQSAVE_W(lock) (NKern::DisableAllInterrupts())
311 #define __SPIN_UNLOCK_IRQRESTORE_W(lock,irq) (NKern::RestoreInterrupts(irq))
317 #define __SPIN_FLASH_IRQRESTORE_W(lock,irq) (NKern::RestoreInterrupts(irq),((void)NKern::DisableAllInterrupts()),((TBool)TRUE))
324 #define __SPIN_FLASH_PREEMPT_R(lock) ((TBool)NKern::PreemptionPoint())
330 #define __SPIN_FLASH_PREEMPT_W(lock) ((TBool)NKern::PreemptionPoint())
333 /** Nanokernel fast semaphore
335 A light-weight semaphore class that only supports a single waiting thread,
336 suitable for the Symbian OS thread I/O semaphore.
338 Initialising a NFastSemaphore involves two steps:
340 - Constructing the semaphore
341 - Setting the semaphore owning thread (the one allowed to wait on it)
343 For example, creating one for the current thread to wait on:
347 sem.iOwningThread = NKern::CurrentThread();
356 inline NFastSemaphore();
357 inline NFastSemaphore(NThreadBase* aThread);
358 IMPORT_C void SetOwner(NThreadBase* aThread);
359 IMPORT_C void Wait();
360 IMPORT_C void Signal();
361 IMPORT_C void SignalN(TInt aCount);
362 IMPORT_C void Reset();
365 TInt iCount; /**< @internalComponent */
367 /** The thread allowed to wait on the semaphore
370 NThreadBase* iOwningThread;
373 /** Create a fast semaphore
378 inline NFastSemaphore::NFastSemaphore()
379 : iCount(0), iOwningThread(NULL)
382 /** Nanokernel fast mutex
384 A light-weight priority-inheritance mutex that can be used if the following
387 - Threads that hold the mutex never block.
388 - The mutex is never acquired in a nested fashion
390 If either of these conditions is not met, a DMutex object is more appropriate.
398 IMPORT_C NFastMutex();
399 IMPORT_C void Wait();
400 IMPORT_C void Signal();
401 IMPORT_C TBool HeldByCurrentThread(); /**< @internalComponent */
403 NThreadBase* iHoldingThread; /**< @internalComponent */
405 /** MUST ALWAYS BE 0 or 1
416 The type of the callback function used by the nanokernel timer.
420 typedef void (*NTimerFn)(TAny*);
429 A basic relative timer provided by the nanokernel.
431 It can generate either a one-shot interrupt or periodic interrupts.
433 A timeout handler is called when the timer expires, either:
434 - from the timer ISR - if the timer is queued via OneShot(TInt aTime) or OneShot(TInt aTime, TBool EFalse), or
435 - from the nanokernel timer dfc1 thread - if the timer is queued via OneShot(TInt aTime, TBool ETrue) call, or
436 - from any other dfc thread that provided DFC belongs to - if the timer is queued via OneShot(TInt aTime, TDfc& aDfc) call.
437 Call-back mechanism cannot be changed in the life time of a timer.
439 These timer objects may be manipulated from any context.
440 The timers are driven from a periodic system tick interrupt,
441 usually a 1ms period.
445 class NTimer : public SDblQueLink
455 Constructor taking a callback function and a pointer to be passed
456 to the callback function.
458 @param aFunction The callback function.
459 @param aPtr A pointer to be passed to the callback function
462 inline NTimer(NTimerFn aFunction, TAny* aPtr)
463 : iPtr(aPtr), iFunction(aFunction), iState(EIdle)
465 IMPORT_C TInt OneShot(TInt aTime);
466 IMPORT_C TInt OneShot(TInt aTime, TBool aDfc);
467 IMPORT_C TInt OneShot(TInt aTime, TDfc& aDfc);
468 IMPORT_C TInt Again(TInt aTime);
469 IMPORT_C TBool Cancel();
470 IMPORT_C TBool IsPending();
477 EIdle=0, // not queued
478 ETransferring=1, // being transferred from holding to ordered queue
479 EHolding=2, // on holding queue
480 EOrdered=3, // on ordered queue
481 ECritical=4, // on ordered queue and in use by queue walk routine
482 EFinal=5, // on final queue
485 /** Argument for callback function or the pointer to TDfc */
486 TAny* iPtr; /**< @internalComponent */
488 /** Pointer to callback function. NULL value indicates that queuing of provided Dfc queue will be done
489 instead of calling callback function on completion */
490 NTimerFn iFunction; /**< @internalComponent */
492 TUint32 iTriggerTime; /**< @internalComponent */
493 TUint8 iCompleteInDfc; /**< @internalComponent */
494 TUint8 iState; /**< @internalComponent */
495 TUint8 iPad1; /**< @internalComponent */
497 /** Available for timer client to use.
498 @internalTechnology */
505 #define i_NTimer_iUserFlags iUserFlags
510 #define i_NTimer_iState iState
516 typedef void (*NThreadFunction)(TAny*);
522 typedef TDfc* (*NThreadExitHandler)(NThread*);
528 typedef void (*NThreadStateHandler)(NThread*,TInt,TInt);
534 typedef void (*NThreadExceptionHandler)(TAny*,NThread*);
540 typedef void (*NThreadTimeoutHandler)(NThread*,TInt);
546 struct SNThreadHandlers
548 NThreadExitHandler iExitHandler;
549 NThreadStateHandler iStateHandler;
550 NThreadExceptionHandler iExceptionHandler;
551 NThreadTimeoutHandler iTimeoutHandler;
554 /** @internalComponent */
555 extern void NThread_Default_State_Handler(NThread*, TInt, TInt);
557 /** @internalComponent */
558 extern void NThread_Default_Exception_Handler(TAny*, NThread*);
560 /** @internalComponent */
561 #define NTHREAD_DEFAULT_EXIT_HANDLER ((NThreadExitHandler)0)
563 /** @internalComponent */
564 #define NTHREAD_DEFAULT_STATE_HANDLER (&NThread_Default_State_Handler)
566 /** @internalComponent */
567 #define NTHREAD_DEFAULT_EXCEPTION_HANDLER (&NThread_Default_Exception_Handler)
569 /** @internalComponent */
570 #define NTHREAD_DEFAULT_TIMEOUT_HANDLER ((NThreadTimeoutHandler)0)
577 struct SFastExecTable
579 TInt iFastExecCount; // includes implicit function#0
580 TLinAddr iFunction[1]; // first entry is for call number 1
587 const TUint32 KExecFlagClaim=0x80000000; // claim system lock
593 const TUint32 KExecFlagRelease=0x40000000; // release system lock
599 const TUint32 KExecFlagPreprocess=0x20000000; // preprocess
605 const TUint32 KExecFlagExtraArgMask=0x1C000000; // 3 bits indicating additional arguments
611 const TUint32 KExecFlagExtraArgs2=0x04000000; // 2 additional arguments
617 const TUint32 KExecFlagExtraArgs3=0x08000000; // 3 additional arguments
623 const TUint32 KExecFlagExtraArgs4=0x0C000000; // 4 additional arguments
629 const TUint32 KExecFlagExtraArgs5=0x10000000; // 5 additional arguments
635 const TUint32 KExecFlagExtraArgs6=0x14000000; // 6 additional arguments
641 const TUint32 KExecFlagExtraArgs7=0x18000000; // 7 additional arguments
647 const TUint32 KExecFlagExtraArgs8=0x1C000000; // 8 additional arguments
654 struct SSlowExecEntry
656 TUint32 iFlags; // information about call
657 TLinAddr iFunction; // address of function to be called
665 struct SSlowExecTable
668 TLinAddr iInvalidExecHandler; // used if call number invalid
669 TLinAddr iPreprocessHandler; // used for handle lookups
670 SSlowExecEntry iEntries[1]; // first entry is for call number 0
673 // Thread iAttributes Constants
674 const TUint8 KThreadAttImplicitSystemLock=1; /**< @internalComponent */
675 const TUint8 KThreadAttAddressSpace=2; /**< @internalComponent */
676 const TUint8 KThreadAttLoggable=4; /**< @internalComponent */
677 const TUint8 KThreadAttDelayed=8; /**< @internalComponent */
681 const TUint32 KCpuAffinityAny=0xffffffffu; /**< @internalComponent */
683 /** Information needed for creating a nanothread.
688 struct SNThreadCreateInfo
690 NThreadFunction iFunction;
696 TUint32 iCpuAffinity;
697 const SNThreadHandlers* iHandlers;
698 const SFastExecTable* iFastExecTable;
699 const SSlowExecTable* iSlowExecTable;
700 const TUint32* iParameterBlock;
701 TInt iParameterBlockSize; // if zero, iParameterBlock _is_ the initial data
702 // otherwise it points to n bytes of initial data
705 /** Constant for use with NKern:: functions which release a fast mutex as well
706 as performing some other operations.
711 #define SYSTEM_LOCK (NFastMutex*)0
714 /** Idle handler function
715 Pointer to a function which is called whenever a CPU goes idle
717 @param aPtr The iPtr stored in the SCpuIdleHandler structure
718 @param aStage If positive, the number of processors still active
719 If zero, indicates all processors are now idle
720 -1 indicates that postamble processing is required after waking up
724 typedef void (*TCpuIdleHandlerFn)(TAny* aPtr, TInt aStage);
726 /** Idle handler structure
730 struct SCpuIdleHandler
732 TCpuIdleHandlerFn iHandler;
734 volatile TBool iPostambleRequired;
741 enum TUserModeCallbackReason
743 EUserModeCallbackRun,
744 EUserModeCallbackCancel,
749 A callback function executed when a thread returns to user mode.
753 typedef void (*TUserModeCallbackFunc)(TAny* aThisPtr, TUserModeCallbackReason aReasonCode);
757 An object representing a queued callback to be executed when a thread returns to user mode.
761 class TUserModeCallback
764 TUserModeCallback(TUserModeCallbackFunc);
765 ~TUserModeCallback();
768 TUserModeCallback* volatile iNext;
769 TUserModeCallbackFunc iFunc;
772 TUserModeCallback* const KUserModeCallbackUnqueued = ((TUserModeCallback*)1);
775 /** Nanokernel functions
783 /** Bitmask values used when blocking a nanothread.
788 EEnterCS=1, /**< Enter thread critical section before blocking */
789 ERelease=2, /**< Release specified fast mutex before blocking */
790 EClaim=4, /**< Re-acquire specified fast mutex when unblocked */
791 EObstruct=8, /**< Signifies obstruction of thread rather than lack of work to do */
794 /** Values that specify the context of the processor.
795 @see NKern::CurrentContext()
799 EThread=0, /**< The processor is in a thread context*/
800 EIDFC=1, /**< The processor is in an IDFC context*/
801 EInterrupt=2, /**< The processor is in an interrupt context*/
802 EEscaped=KMaxTInt /**< Not valid a process context on target hardware*/
807 IMPORT_C static TInt ThreadCreate(NThread* aThread, SNThreadCreateInfo& anInfo);
808 IMPORT_C static TBool ThreadSuspend(NThread* aThread, TInt aCount);
809 IMPORT_C static TBool ThreadResume(NThread* aThread);
810 IMPORT_C static TBool ThreadResume(NThread* aThread, NFastMutex* aMutex);
811 IMPORT_C static TBool ThreadForceResume(NThread* aThread);
812 IMPORT_C static TBool ThreadForceResume(NThread* aThread, NFastMutex* aMutex);
813 IMPORT_C static void ThreadRelease(NThread* aThread, TInt aReturnValue);
814 IMPORT_C static void ThreadRelease(NThread* aThread, TInt aReturnValue, NFastMutex* aMutex);
815 IMPORT_C static void ThreadSetPriority(NThread* aThread, TInt aPriority);
816 IMPORT_C static void ThreadSetPriority(NThread* aThread, TInt aPriority, NFastMutex* aMutex);
817 IMPORT_C static void ThreadRequestSignal(NThread* aThread);
818 IMPORT_C static void ThreadRequestSignal(NThread* aThread, NFastMutex* aMutex);
819 IMPORT_C static void ThreadRequestSignal(NThread* aThread, TInt aCount);
820 IMPORT_C static void ThreadKill(NThread* aThread);
821 IMPORT_C static void ThreadKill(NThread* aThread, NFastMutex* aMutex);
822 IMPORT_C static void ThreadEnterCS();
823 IMPORT_C static void ThreadLeaveCS();
824 static NThread* _ThreadEnterCS(); /**< @internalComponent */
825 static void _ThreadLeaveCS(); /**< @internalComponent */
826 IMPORT_C static TInt Block(TUint32 aTimeout, TUint aMode, NFastMutex* aMutex);
827 IMPORT_C static TInt Block(TUint32 aTimeout, TUint aMode);
828 IMPORT_C static void NanoBlock(TUint32 aTimeout, TUint aState, TAny* aWaitObj);
829 IMPORT_C static void ThreadGetUserContext(NThread* aThread, TAny* aContext, TUint32& aAvailRegistersMask);
830 IMPORT_C static void ThreadSetUserContext(NThread* aThread, TAny* aContext);
831 IMPORT_C static void ThreadGetSystemContext(NThread* aThread, TAny* aContext, TUint32& aAvailRegistersMask);
832 static void ThreadModifyUsp(NThread* aThread, TLinAddr aUsp);
833 IMPORT_C static TInt FreezeCpu(); /**< @internalComponent */
834 IMPORT_C static void EndFreezeCpu(TInt aCookie); /**< @internalComponent */
835 IMPORT_C static TUint32 ThreadSetCpuAffinity(NThread* aThread, TUint32 aAffinity); /**< @internalComponent */
836 IMPORT_C static void ThreadSetTimeslice(NThread* aThread, TInt aTimeslice); /**< @internalComponent */
837 IMPORT_C static TUint64 ThreadCpuTime(NThread* aThread); /**< @internalComponent */
838 IMPORT_C static TUint32 CpuTimeMeasFreq(); /**< @internalComponent */
839 static TInt QueueUserModeCallback(NThreadBase* aThread, TUserModeCallback* aCallback); /**< @internalComponent */
840 static void MoveUserModeCallbacks(NThreadBase* aSrcThread, NThreadBase* aDestThread); /**< @internalComponent */
841 static void CancelUserModeCallbacks(); /**< @internalComponent */
844 IMPORT_C static void FSSetOwner(NFastSemaphore* aSem,NThreadBase* aThread);
845 IMPORT_C static void FSWait(NFastSemaphore* aSem);
846 IMPORT_C static void FSSignal(NFastSemaphore* aSem);
847 IMPORT_C static void FSSignal(NFastSemaphore* aSem, NFastMutex* aMutex);
848 IMPORT_C static void FSSignalN(NFastSemaphore* aSem, TInt aCount);
849 IMPORT_C static void FSSignalN(NFastSemaphore* aSem, TInt aCount, NFastMutex* aMutex);
852 IMPORT_C static void FMWait(NFastMutex* aMutex);
853 IMPORT_C static void FMSignal(NFastMutex* aMutex);
854 IMPORT_C static TBool FMFlash(NFastMutex* aMutex);
857 IMPORT_C static void Lock();
858 IMPORT_C static NThread* LockC();
859 IMPORT_C static void Unlock();
860 IMPORT_C static TInt PreemptionPoint();
863 IMPORT_C static TInt DisableAllInterrupts();
864 IMPORT_C static TInt DisableInterrupts(TInt aLevel);
865 IMPORT_C static void RestoreInterrupts(TInt aRestoreData);
866 IMPORT_C static void EnableAllInterrupts();
869 inline static TInt LockedInc(TInt& aCount)
870 { return __e32_atomic_add_ord32(&aCount,1); }
871 inline static TInt LockedDec(TInt& aCount)
872 { return __e32_atomic_add_ord32(&aCount,0xffffffff); }
873 inline static TInt LockedAdd(TInt& aDest, TInt aSrc)
874 { return __e32_atomic_add_ord32(&aDest,aSrc); }
875 inline static TInt64 LockedInc(TInt64& aCount)
876 { return __e32_atomic_add_ord64(&aCount,1); }
877 inline static TInt64 LockedDec(TInt64& aCount)
878 { return __e32_atomic_add_ord64(&aCount,TUint64(TInt64(-1))); }
879 inline static TInt64 LockedAdd(TInt64& aDest, TInt64 aSrc) /**< @internalComponent */
880 { return __e32_atomic_add_ord64(&aDest,aSrc); }
881 inline static TUint32 LockedSetClear(TUint32& aDest, TUint32 aClearMask, TUint32 aSetMask)
882 { return __e32_atomic_axo_ord32(&aDest,~(aClearMask|aSetMask),aSetMask); }
883 inline static TUint16 LockedSetClear16(TUint16& aDest, TUint16 aClearMask, TUint16 aSetMask) /**< @internalComponent */
884 { return __e32_atomic_axo_ord16(&aDest,TUint16(~(aClearMask|aSetMask)),aSetMask); }
885 inline static TUint8 LockedSetClear8(TUint8& aDest, TUint8 aClearMask, TUint8 aSetMask)
886 { return __e32_atomic_axo_ord8(&aDest,TUint8(~(aClearMask|aSetMask)),aSetMask); }
887 inline static TInt SafeInc(TInt& aCount)
888 { return __e32_atomic_tas_ord32(&aCount,1,1,0); }
889 inline static TInt SafeDec(TInt& aCount)
890 { return __e32_atomic_tas_ord32(&aCount,1,-1,0); }
891 inline static TInt AddIfGe(TInt& aCount, TInt aLimit, TInt aInc) /**< @internalComponent */
892 { return __e32_atomic_tas_ord32(&aCount,aLimit,aInc,0); }
893 inline static TInt AddIfLt(TInt& aCount, TInt aLimit, TInt aInc) /**< @internalComponent */
894 { return __e32_atomic_tas_ord32(&aCount,aLimit,0,aInc); }
895 inline static TAny* SafeSwap(TAny* aNewValue, TAny*& aPtr)
896 { return __e32_atomic_swp_ord_ptr(&aPtr, aNewValue); }
897 inline static TUint8 SafeSwap8(TUint8 aNewValue, TUint8& aPtr)
898 { return __e32_atomic_swp_ord8(&aPtr, aNewValue); }
899 inline static TUint16 SafeSwap16(TUint16 aNewValue, TUint16& aPtr) /**< @internalComponent */
900 { return __e32_atomic_swp_ord16(&aPtr, aNewValue); }
901 inline static TBool CompareAndSwap(TAny*& aPtr, TAny* aExpected, TAny* aNew) /**< @internalComponent */
902 { return __e32_atomic_cas_ord_ptr(&aPtr, &aExpected, aNew); }
903 inline static TBool CompareAndSwap8(TUint8& aPtr, TUint8 aExpected, TUint8 aNew) /**< @internalComponent */
904 { return __e32_atomic_cas_ord8(&aPtr, (TUint8*)&aExpected, (TUint8)aNew); }
905 inline static TBool CompareAndSwap16(TUint16& aPtr, TUint16 aExpected, TUint16 aNew) /**< @internalComponent */
906 { return __e32_atomic_cas_ord16(&aPtr, (TUint16*)&aExpected, (TUint16)aNew); }
907 inline static TUint32 SafeSwap(TUint32 aNewValue, TUint32& aPtr) /**< @internalComponent */
908 { return __e32_atomic_swp_ord32(&aPtr, aNewValue); }
909 inline static TUint SafeSwap(TUint aNewValue, TUint& aPtr) /**< @internalComponent */
910 { return __e32_atomic_swp_ord32(&aPtr, aNewValue); }
911 inline static TInt SafeSwap(TInt aNewValue, TInt& aPtr) /**< @internalComponent */
912 { return __e32_atomic_swp_ord32(&aPtr, aNewValue); }
913 inline static TBool CompareAndSwap(TUint32& aPtr, TUint32 aExpected, TUint32 aNew) /**< @internalComponent */
914 { return __e32_atomic_cas_ord32(&aPtr, &aExpected, aNew); }
915 inline static TBool CompareAndSwap(TUint& aPtr, TUint aExpected, TUint aNew) /**< @internalComponent */
916 { return __e32_atomic_cas_ord32(&aPtr, (TUint32*)&aExpected, (TUint32)aNew); }
917 inline static TBool CompareAndSwap(TInt& aPtr, TInt aExpected, TInt aNew) /**< @internalComponent */
918 { return __e32_atomic_cas_ord32(&aPtr, (TUint32*)&aExpected, (TUint32)aNew); }
922 IMPORT_C static NThread* CurrentThread();
923 IMPORT_C static TInt CurrentCpu(); /**< @internalComponent */
924 IMPORT_C static TInt NumberOfCpus(); /**< @internalComponent */
925 IMPORT_C static void LockSystem();
926 IMPORT_C static void UnlockSystem();
927 IMPORT_C static TBool FlashSystem();
928 IMPORT_C static void WaitForAnyRequest();
929 IMPORT_C static void Sleep(TUint32 aTime);
930 IMPORT_C static void Exit();
931 IMPORT_C static void DeferredExit();
932 IMPORT_C static void YieldTimeslice(); /**< @internalComponent */
933 IMPORT_C static void RotateReadyList(TInt aPriority);
934 IMPORT_C static void RotateReadyList(TInt aPriority, TInt aCpu); /**< @internalTechnology */
935 IMPORT_C static void RecordIntLatency(TInt aLatency, TInt aIntMask); /**< @internalTechnology */
936 IMPORT_C static void RecordThreadLatency(TInt aLatency); /**< @internalTechnology */
937 IMPORT_C static TUint32 TickCount();
938 IMPORT_C static TInt TickPeriod();
939 IMPORT_C static TInt TimerTicks(TInt aMilliseconds);
940 IMPORT_C static TInt TimesliceTicks(TUint32 aMicroseconds); /**< @internalTechnology */
941 IMPORT_C static TInt CurrentContext();
942 IMPORT_C static TUint32 FastCounter();
943 IMPORT_C static TInt FastCounterFrequency();
944 static void Init0(TAny* aVariantData);
945 static void Init(NThread* aThread, SNThreadCreateInfo& anInfo);
946 IMPORT_C static TBool KernelLocked(TInt aCount=0); /**< @internalTechnology */
947 IMPORT_C static NFastMutex* HeldFastMutex(); /**< @internalTechnology */
949 IMPORT_C static SCpuIdleHandler* CpuIdleHandler(); /**< @internalTechnology */
950 static void NotifyCrash(const TAny* a0, TInt a1); /**< @internalTechnology */
951 IMPORT_C static TBool Crashed();
952 static TUint32 IdleGenerationCount();
955 typedef void (*TRescheduleCallback)(NThread*);
956 IMPORT_C static void SchedulerHooks(TLinAddr& aStart, TLinAddr& aEnd);
957 IMPORT_C static void InsertSchedulerHooks();
958 IMPORT_C static void RemoveSchedulerHooks();
959 IMPORT_C static void SetRescheduleCallback(TRescheduleCallback aCallback);
963 /** Create a fast semaphore
968 inline NFastSemaphore::NFastSemaphore(NThreadBase* aThread)
970 iOwningThread(aThread ? aThread : (NThreadBase*)NKern::CurrentThread())