sl@0: // Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies). sl@0: // All rights reserved. sl@0: // This component and the accompanying materials are made available sl@0: // under the terms of the License "Eclipse Public License v1.0" sl@0: // which accompanies this distribution, and is available sl@0: // at the URL "http://www.eclipse.org/legal/epl-v10.html". sl@0: // sl@0: // Initial Contributors: sl@0: // Nokia Corporation - initial contribution. sl@0: // sl@0: // Contributors: sl@0: // sl@0: // Description: sl@0: // e32\include\u32exec.h sl@0: // sl@0: // WARNING: This file contains some APIs which are internal and are subject sl@0: // to change without notice. Such APIs should therefore not be used sl@0: // outside the Kernel and Hardware Services package. sl@0: // sl@0: sl@0: /** sl@0: @file sl@0: @internalComponent sl@0: */ sl@0: sl@0: #ifndef __U32EXEC_H__ sl@0: #define __U32EXEC_H__ sl@0: #include sl@0: #include sl@0: sl@0: #ifdef __KERNEL_MODE__ sl@0: #define EXEC_INT64 Int64 sl@0: #define EXEC_TIME TTimeK sl@0: #else sl@0: #define EXEC_INT64 TInt64 sl@0: #define EXEC_TIME TInt64 sl@0: #endif sl@0: sl@0: //#ifdef __SYMC__ sl@0: //#define __EXECDECL__ sl@0: //#else sl@0: #define __EXECDECL__ __NAKED__ sl@0: //#endif sl@0: sl@0: #if defined(__WINS__) sl@0: sl@0: // Executive call macros for WINS sl@0: sl@0: #define EXECUTIVE_FAST 0x00800000 sl@0: #define EXECUTIVE_SLOW 0x00000000 sl@0: sl@0: #define __DISPATCH(func) \ sl@0: __asm lea edx, [esp + 0x4] \ sl@0: __asm mov ecx, (func) \ sl@0: __asm jmp [TheDispatcher] sl@0: sl@0: sl@0: #define FAST_EXEC0(n) __DISPATCH((n)|EXECUTIVE_FAST) sl@0: #define FAST_EXEC1(n) __DISPATCH((n)|EXECUTIVE_FAST) sl@0: #define SLOW_EXEC0(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC1(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC2(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC3(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC4(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: sl@0: #define KFAST_EXEC0(n) __DISPATCH((n)|EXECUTIVE_FAST) sl@0: #define KFAST_EXEC1(n) __DISPATCH((n)|EXECUTIVE_FAST) sl@0: #define KSLOW_EXEC0(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC1(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC2(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC3(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC4(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: sl@0: #elif defined(__CPU_X86) sl@0: sl@0: // Executive call macros for X86 sl@0: #ifdef __GCC32__ sl@0: #define FAST_EXEC0(n) asm("mov eax, %0": :"i"(n)); \ sl@0: asm("int 0x20 \n ret") sl@0: #define FAST_EXEC1(n) asm("mov eax, %0": :"i"(n)); \ sl@0: asm("mov ecx, [esp+4] \n int 0x20 \n ret") sl@0: #define SLOW_EXEC0(n) asm("mov eax, %0": :"i"(n)); \ sl@0: asm("int 0x21 \n ret") sl@0: #define SLOW_EXEC1(n) asm("mov eax, %0": :"i"(n)); \ sl@0: asm("mov ecx, [esp+4] \n int 0x21 \n ret") sl@0: #define SLOW_EXEC2(n) asm("mov eax, %0": :"i"(n)); \ sl@0: asm("mov ecx, [esp+4] \n mov edx, [esp+8] \n int 0x21 \n ret") sl@0: #define SLOW_EXEC3(n) asm("mov eax, %0": :"i"(n)); \ sl@0: asm("push ebx \n" \ sl@0: "mov ecx, [esp+8] \n" \ sl@0: "mov edx, [esp+12]\n" \ sl@0: "mov ebx, [esp+16]\n" \ sl@0: "int 0x21 \n" \ sl@0: "pop ebx \n" \ sl@0: "ret") sl@0: sl@0: #define SLOW_EXEC4(n) asm("mov eax, %0": :"i"(n)); \ sl@0: asm("push ebx \n" \ sl@0: "push esi \n" \ sl@0: "mov ecx, [esp+12]\n" \ sl@0: "mov edx, [esp+16]\n" \ sl@0: "mov ebx, [esp+20]\n" \ sl@0: "mov esi, [esp+24]\n" \ sl@0: "int 0x21 \n" \ sl@0: "pop esi \n" \ sl@0: "pop ebx \n" \ sl@0: "ret") sl@0: #else sl@0: #define FAST_EXEC0(n) _asm mov eax, n _asm int 20h _asm ret sl@0: #define FAST_EXEC1(n) _asm mov eax, n _asm mov ecx, [esp+4] _asm int 20h _asm ret sl@0: sl@0: #define SLOW_EXEC0(n) _asm mov eax, n _asm int 21h _asm ret sl@0: #define SLOW_EXEC1(n) _asm mov eax, n _asm mov ecx, [esp+4] _asm int 21h _asm ret sl@0: #define SLOW_EXEC2(n) _asm mov eax, n _asm mov ecx, [esp+4] _asm mov edx, [esp+8] _asm int 21h _asm ret sl@0: sl@0: #define SLOW_EXEC3(n) _asm mov eax, n \ sl@0: _asm push ebx \ sl@0: _asm mov ecx, [esp+8] \ sl@0: _asm mov edx, [esp+12] \ sl@0: _asm mov ebx, [esp+16] \ sl@0: _asm int 21h \ sl@0: _asm pop ebx \ sl@0: _asm ret sl@0: sl@0: #define SLOW_EXEC4(n) _asm mov eax, n \ sl@0: _asm push ebx \ sl@0: _asm push esi \ sl@0: _asm mov ecx, [esp+12] \ sl@0: _asm mov edx, [esp+16] \ sl@0: _asm mov ebx, [esp+20] \ sl@0: _asm mov esi, [esp+24] \ sl@0: _asm int 21h \ sl@0: _asm pop esi \ sl@0: _asm pop ebx \ sl@0: _asm ret sl@0: #endif sl@0: sl@0: #define KFAST_EXEC0(n) FAST_EXEC0(n) sl@0: #define KFAST_EXEC1(n) FAST_EXEC1(n) sl@0: #define KSLOW_EXEC0(n) SLOW_EXEC0(n) sl@0: #define KSLOW_EXEC1(n) SLOW_EXEC1(n) sl@0: #define KSLOW_EXEC2(n) SLOW_EXEC2(n) sl@0: #define KSLOW_EXEC3(n) SLOW_EXEC3(n) sl@0: #define KSLOW_EXEC4(n) SLOW_EXEC4(n) sl@0: sl@0: #elif defined(__CPU_ARM) sl@0: sl@0: // Executive call macros for ARM sl@0: sl@0: #define EXECUTIVE_FAST 0x00800000 sl@0: #define EXECUTIVE_SLOW 0x00000000 sl@0: sl@0: #define __DISPATCH(n) \ sl@0: asm("swi %a0" : : "i" (n)); \ sl@0: __JUMP(,lr); sl@0: sl@0: #define FAST_EXEC0(n) __DISPATCH((n)|EXECUTIVE_FAST) sl@0: #define FAST_EXEC1(n) __DISPATCH((n)|EXECUTIVE_FAST) sl@0: #define SLOW_EXEC0(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC1(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC2(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC3(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC4(n) __DISPATCH((n)|EXECUTIVE_SLOW) sl@0: sl@0: #define __KDISPATCH(n) \ sl@0: asm("stmfd sp!, {ip,lr} "); \ sl@0: asm("swi %a0" : : "i" (n)); \ sl@0: __POPRET("ip,"); sl@0: sl@0: #define KFAST_EXEC0(n) __KDISPATCH((n)|EXECUTIVE_FAST) sl@0: #define KFAST_EXEC1(n) __KDISPATCH((n)|EXECUTIVE_FAST) sl@0: #define KSLOW_EXEC0(n) __KDISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC1(n) __KDISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC2(n) __KDISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC3(n) __KDISPATCH((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC4(n) __KDISPATCH((n)|EXECUTIVE_SLOW) sl@0: sl@0: #define __DISPATCH_NR(n) \ sl@0: asm("swi %a0" : : "i" (n)); sl@0: sl@0: #define FAST_EXEC0_NR(n) __DISPATCH_NR((n)|EXECUTIVE_FAST) sl@0: #define FAST_EXEC1_NR(n) __DISPATCH_NR((n)|EXECUTIVE_FAST) sl@0: #define SLOW_EXEC0_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC1_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC2_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC3_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define SLOW_EXEC4_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: sl@0: #define __KDISPATCH_NR(n) \ sl@0: asm("swi %a0" : : "i" (n)); sl@0: sl@0: #define KFAST_EXEC0_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_FAST) sl@0: #define KFAST_EXEC1_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_FAST) sl@0: #define KSLOW_EXEC0_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC1_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC2_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC3_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: #define KSLOW_EXEC4_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW) sl@0: sl@0: #else sl@0: #error Unknown CPU sl@0: #endif sl@0: sl@0: #ifdef __LEAVE_EQUALS_THROW__ sl@0: // Hide TTrap to catch unwary uses of the old cleanup sl@0: // mechanism at compile time sl@0: class TTrap; sl@0: #endif //__LEAVE_EQUALS_THROW__ sl@0: sl@0: #include sl@0: #include sl@0: #include sl@0: sl@0: #endif