First public contribution.
1 // Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies).
2 // All rights reserved.
3 // This component and the accompanying materials are made available
4 // under the terms of the License "Eclipse Public License v1.0"
5 // which accompanies this distribution, and is available
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
8 // Initial Contributors:
9 // Nokia Corporation - initial contribution.
14 // e32\common\arm\atomic_32_v6.h
15 // 32 bit atomic operations on V6 and V6K processors
16 // Also 8 and 16 bit atomic operations on V6K processors
17 // Also 8, 16 and 32 bit load/store on all processors
21 #include "atomic_ops.h"
23 #if defined(__OP_LOAD__)
24 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(const volatile TAny* /*a*/)
29 __LDR_INST__( ," r0, [r0] ");
30 __LOCAL_DATA_MEMORY_BARRIER_Z__(r1);
36 #elif defined(__OP_STORE__)
37 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
39 #ifdef __BARRIERS_NEEDED__
41 // return value in R0 equal to v
42 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
43 __STR_INST__( ," r1, [r0] ");
49 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
52 // return value in R0 equal to v
53 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
54 __STR_INST__( ," r1, [r0] ");
55 __LOCAL_DATA_MEMORY_BARRIER__(r12);
61 #elif defined(__OP_RMW1__)
64 #define __SOURCE_REG__ 1
65 #define __DO_PROCESSING__
67 #define __SOURCE_REG__ 2
68 #if defined(__OP_ADD__)
69 #define __DO_PROCESSING__ asm("add r2, r0, r1 ");
70 #elif defined(__OP_AND__)
71 #define __DO_PROCESSING__ asm("and r2, r0, r1 ");
72 #elif defined(__OP_IOR__)
73 #define __DO_PROCESSING__ asm("orr r2, r0, r1 ");
74 #elif defined(__OP_XOR__)
75 #define __DO_PROCESSING__ asm("eor r2, r0, r1 ");
79 #define __DO_RMW1_OP__ \
80 asm("mov r12, r0 "); \
82 __LDREX_INST__(0,12); \
84 __STREX_INST__(3,__SOURCE_REG__,12); \
89 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
93 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
94 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
98 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
101 // return value in R0
102 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
108 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
111 // return value in R0
112 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
113 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
117 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
120 // return value in R0
122 __LOCAL_DATA_MEMORY_BARRIER__(r3);
126 #undef __DO_RMW1_OP__
127 #undef __SOURCE_REG__
128 #undef __DO_PROCESSING__
131 #elif defined(__OP_CAS__)
133 #define __DO_CAS_OP__ \
134 __LDR_INST__( ," r12, [r1] "); \
136 __LDREX_INST__(3,0); \
137 asm("cmp r3, r12 "); \
139 __STREX_INST__(3,2,0); \
140 asm("cmp r3, #0 "); \
143 __STR_INST__(ne, "r3, [r1] "); \
144 asm("movne r0, #0 "); \
145 asm("moveq r0, #1 ");
148 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
151 // return value in R0
152 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
153 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
157 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
160 // return value in R0
161 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
167 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
170 // return value in R0
171 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
172 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
176 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
179 // return value in R0
181 __LOCAL_DATA_MEMORY_BARRIER__(r3);
189 #elif defined(__OP_AXO__)
191 #define __SAVE_REGS__ asm("str r4, [sp, #-4]! ");
192 #define __RESTORE_REGS__ asm("ldr r4, [sp], #4 ");
194 #define __DO_AXO_OP__ \
195 asm("mov r12, r0 "); \
197 __LDREX_INST__(0,12); \
198 asm("and r4, r0, r1 "); \
199 asm("eor r4, r4, r2 "); \
200 __STREX_INST__(3,4,12); \
201 asm("cmp r3, #0 "); \
205 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
208 // return value in R0
209 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
210 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
214 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
217 // return value in R0
218 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
226 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
229 // return value in R0
230 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
231 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
235 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
238 // return value in R0
241 __LOCAL_DATA_MEMORY_BARRIER__(r3);
247 #undef __RESTORE_REGS__
251 #elif defined(__OP_RMW3__)
253 #define __SAVE_REGS__ asm("stmfd sp!, {r4-r5} ");
254 #define __RESTORE_REGS__ asm("ldmfd sp!, {r4-r5} ");
256 #if defined(__OP_TAU__)
257 #define __COND_GE__ "cs"
258 #define __COND_LT__ "cc"
259 #define __DO_SIGN_EXTEND__
260 #elif defined(__OP_TAS__)
261 #define __COND_GE__ "ge"
262 #define __COND_LT__ "lt"
263 #define __DO_SIGN_EXTEND__ __SIGN_EXTEND__(r0)
266 #define __DO_RMW3_OP__ \
267 asm("mov r12, r0 "); \
269 __LDREX_INST__(0,12); \
271 asm("cmp r0, r1 "); \
272 asm("add" __COND_GE__ " r4, r0, r2 "); \
273 asm("add" __COND_LT__ " r4, r0, r3 "); \
274 __STREX_INST__(5,4,12); \
275 asm("cmp r5, #0 "); \
279 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
281 // R0=a, R1=t, R2=u, R3=v
282 // return value in R0
283 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
284 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
288 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
290 // R0=a, R1=t, R2=u, R3=v
291 // return value in R0
292 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
300 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
302 // R0=a, R1=t, R2=u, R3=v
303 // return value in R0
304 #ifdef __BARRIERS_NEEDED__ // If no barriers, all ordering variants collapse to same function
305 __LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
309 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
311 // R0=a, R1=t, R2=u, R3=v
312 // return value in R0
315 __LOCAL_DATA_MEMORY_BARRIER__(r5);
321 #undef __RESTORE_REGS__
322 #undef __DO_RMW3_OP__
325 #undef __DO_SIGN_EXTEND__
330 // Second inclusion undefines temporaries
331 #include "atomic_ops.h"