Update contrib.
1 // Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies).
2 // All rights reserved.
3 // This component and the accompanying materials are made available
4 // under the terms of the License "Eclipse Public License v1.0"
5 // which accompanies this distribution, and is available
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
8 // Initial Contributors:
9 // Nokia Corporation - initial contribution.
14 // e32\include\u32exec.h
16 // WARNING: This file contains some APIs which are internal and are subject
17 // to change without notice. Such APIs should therefore not be used
18 // outside the Kernel and Hardware Services package.
31 #ifdef __KERNEL_MODE__
32 #define EXEC_INT64 Int64
33 #define EXEC_TIME TTimeK
35 #define EXEC_INT64 TInt64
36 #define EXEC_TIME TInt64
40 //#define __EXECDECL__
42 #define __EXECDECL__ __NAKED__
47 // Executive call macros for WINS
49 #define EXECUTIVE_FAST 0x00800000
50 #define EXECUTIVE_SLOW 0x00000000
52 #define __DISPATCH(func) \
53 __asm lea edx, [esp + 0x4] \
54 __asm mov ecx, (func) \
55 __asm jmp [TheDispatcher]
58 #define FAST_EXEC0(n) __DISPATCH((n)|EXECUTIVE_FAST)
59 #define FAST_EXEC1(n) __DISPATCH((n)|EXECUTIVE_FAST)
60 #define SLOW_EXEC0(n) __DISPATCH((n)|EXECUTIVE_SLOW)
61 #define SLOW_EXEC1(n) __DISPATCH((n)|EXECUTIVE_SLOW)
62 #define SLOW_EXEC2(n) __DISPATCH((n)|EXECUTIVE_SLOW)
63 #define SLOW_EXEC3(n) __DISPATCH((n)|EXECUTIVE_SLOW)
64 #define SLOW_EXEC4(n) __DISPATCH((n)|EXECUTIVE_SLOW)
66 #define KFAST_EXEC0(n) __DISPATCH((n)|EXECUTIVE_FAST)
67 #define KFAST_EXEC1(n) __DISPATCH((n)|EXECUTIVE_FAST)
68 #define KSLOW_EXEC0(n) __DISPATCH((n)|EXECUTIVE_SLOW)
69 #define KSLOW_EXEC1(n) __DISPATCH((n)|EXECUTIVE_SLOW)
70 #define KSLOW_EXEC2(n) __DISPATCH((n)|EXECUTIVE_SLOW)
71 #define KSLOW_EXEC3(n) __DISPATCH((n)|EXECUTIVE_SLOW)
72 #define KSLOW_EXEC4(n) __DISPATCH((n)|EXECUTIVE_SLOW)
74 #elif defined(__CPU_X86)
76 // Executive call macros for X86
78 #define FAST_EXEC0(n) asm("mov eax, %0": :"i"(n)); \
79 asm("int 0x20 \n ret")
80 #define FAST_EXEC1(n) asm("mov eax, %0": :"i"(n)); \
81 asm("mov ecx, [esp+4] \n int 0x20 \n ret")
82 #define SLOW_EXEC0(n) asm("mov eax, %0": :"i"(n)); \
83 asm("int 0x21 \n ret")
84 #define SLOW_EXEC1(n) asm("mov eax, %0": :"i"(n)); \
85 asm("mov ecx, [esp+4] \n int 0x21 \n ret")
86 #define SLOW_EXEC2(n) asm("mov eax, %0": :"i"(n)); \
87 asm("mov ecx, [esp+4] \n mov edx, [esp+8] \n int 0x21 \n ret")
88 #define SLOW_EXEC3(n) asm("mov eax, %0": :"i"(n)); \
90 "mov ecx, [esp+8] \n" \
91 "mov edx, [esp+12]\n" \
92 "mov ebx, [esp+16]\n" \
97 #define SLOW_EXEC4(n) asm("mov eax, %0": :"i"(n)); \
100 "mov ecx, [esp+12]\n" \
101 "mov edx, [esp+16]\n" \
102 "mov ebx, [esp+20]\n" \
103 "mov esi, [esp+24]\n" \
109 #define FAST_EXEC0(n) _asm mov eax, n _asm int 20h _asm ret
110 #define FAST_EXEC1(n) _asm mov eax, n _asm mov ecx, [esp+4] _asm int 20h _asm ret
112 #define SLOW_EXEC0(n) _asm mov eax, n _asm int 21h _asm ret
113 #define SLOW_EXEC1(n) _asm mov eax, n _asm mov ecx, [esp+4] _asm int 21h _asm ret
114 #define SLOW_EXEC2(n) _asm mov eax, n _asm mov ecx, [esp+4] _asm mov edx, [esp+8] _asm int 21h _asm ret
116 #define SLOW_EXEC3(n) _asm mov eax, n \
118 _asm mov ecx, [esp+8] \
119 _asm mov edx, [esp+12] \
120 _asm mov ebx, [esp+16] \
125 #define SLOW_EXEC4(n) _asm mov eax, n \
128 _asm mov ecx, [esp+12] \
129 _asm mov edx, [esp+16] \
130 _asm mov ebx, [esp+20] \
131 _asm mov esi, [esp+24] \
138 #define KFAST_EXEC0(n) FAST_EXEC0(n)
139 #define KFAST_EXEC1(n) FAST_EXEC1(n)
140 #define KSLOW_EXEC0(n) SLOW_EXEC0(n)
141 #define KSLOW_EXEC1(n) SLOW_EXEC1(n)
142 #define KSLOW_EXEC2(n) SLOW_EXEC2(n)
143 #define KSLOW_EXEC3(n) SLOW_EXEC3(n)
144 #define KSLOW_EXEC4(n) SLOW_EXEC4(n)
146 #elif defined(__CPU_ARM)
148 // Executive call macros for ARM
150 #define EXECUTIVE_FAST 0x00800000
151 #define EXECUTIVE_SLOW 0x00000000
153 #define __DISPATCH(n) \
154 asm("swi %a0" : : "i" (n)); \
157 #define FAST_EXEC0(n) __DISPATCH((n)|EXECUTIVE_FAST)
158 #define FAST_EXEC1(n) __DISPATCH((n)|EXECUTIVE_FAST)
159 #define SLOW_EXEC0(n) __DISPATCH((n)|EXECUTIVE_SLOW)
160 #define SLOW_EXEC1(n) __DISPATCH((n)|EXECUTIVE_SLOW)
161 #define SLOW_EXEC2(n) __DISPATCH((n)|EXECUTIVE_SLOW)
162 #define SLOW_EXEC3(n) __DISPATCH((n)|EXECUTIVE_SLOW)
163 #define SLOW_EXEC4(n) __DISPATCH((n)|EXECUTIVE_SLOW)
165 #define __KDISPATCH(n) \
166 asm("stmfd sp!, {ip,lr} "); \
167 asm("swi %a0" : : "i" (n)); \
170 #define KFAST_EXEC0(n) __KDISPATCH((n)|EXECUTIVE_FAST)
171 #define KFAST_EXEC1(n) __KDISPATCH((n)|EXECUTIVE_FAST)
172 #define KSLOW_EXEC0(n) __KDISPATCH((n)|EXECUTIVE_SLOW)
173 #define KSLOW_EXEC1(n) __KDISPATCH((n)|EXECUTIVE_SLOW)
174 #define KSLOW_EXEC2(n) __KDISPATCH((n)|EXECUTIVE_SLOW)
175 #define KSLOW_EXEC3(n) __KDISPATCH((n)|EXECUTIVE_SLOW)
176 #define KSLOW_EXEC4(n) __KDISPATCH((n)|EXECUTIVE_SLOW)
178 #define __DISPATCH_NR(n) \
179 asm("swi %a0" : : "i" (n));
181 #define FAST_EXEC0_NR(n) __DISPATCH_NR((n)|EXECUTIVE_FAST)
182 #define FAST_EXEC1_NR(n) __DISPATCH_NR((n)|EXECUTIVE_FAST)
183 #define SLOW_EXEC0_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW)
184 #define SLOW_EXEC1_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW)
185 #define SLOW_EXEC2_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW)
186 #define SLOW_EXEC3_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW)
187 #define SLOW_EXEC4_NR(n) __DISPATCH_NR((n)|EXECUTIVE_SLOW)
189 #define __KDISPATCH_NR(n) \
190 asm("swi %a0" : : "i" (n));
192 #define KFAST_EXEC0_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_FAST)
193 #define KFAST_EXEC1_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_FAST)
194 #define KSLOW_EXEC0_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW)
195 #define KSLOW_EXEC1_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW)
196 #define KSLOW_EXEC2_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW)
197 #define KSLOW_EXEC3_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW)
198 #define KSLOW_EXEC4_NR(n) __KDISPATCH_NR((n)|EXECUTIVE_SLOW)
204 #ifdef __LEAVE_EQUALS_THROW__
205 // Hide TTrap to catch unwary uses of the old cleanup
206 // mechanism at compile time
208 #endif //__LEAVE_EQUALS_THROW__
210 #include <exec_enum.h>
211 #include <e32btrace.h>
212 #include <exec_user.h>