os/kernelhwsrv/kernel/eka/common/arm/atomic_ops.h
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
sl@0
     1
// Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies).
sl@0
     2
// All rights reserved.
sl@0
     3
// This component and the accompanying materials are made available
sl@0
     4
// under the terms of the License "Eclipse Public License v1.0"
sl@0
     5
// which accompanies this distribution, and is available
sl@0
     6
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
sl@0
     7
//
sl@0
     8
// Initial Contributors:
sl@0
     9
// Nokia Corporation - initial contribution.
sl@0
    10
//
sl@0
    11
// Contributors:
sl@0
    12
//
sl@0
    13
// Description:
sl@0
    14
// e32\common\arm\atomic_ops.h
sl@0
    15
// 
sl@0
    16
//
sl@0
    17
sl@0
    18
#ifdef	__OPERATION__
sl@0
    19
sl@0
    20
#undef	__OPERATION__
sl@0
    21
#undef	__OP_RMW1__
sl@0
    22
#undef	__OP_RMW2__
sl@0
    23
#undef	__OP_RMW3__
sl@0
    24
#undef	__OP_SIGNED__
sl@0
    25
#undef	__TYPE__
sl@0
    26
#undef	__SIZE_CODE__
sl@0
    27
#undef	__LDR_INST__
sl@0
    28
#undef	__LDRS_INST__
sl@0
    29
#undef	__STR_INST__
sl@0
    30
#undef	__LDREX_INST__
sl@0
    31
#undef	__STREX_INST__
sl@0
    32
#undef	__SIGN_EXTEND__
sl@0
    33
#undef	__LOG2_DATA_SIZE__
sl@0
    34
sl@0
    35
#undef	__OP_LOAD__
sl@0
    36
#undef	__OP_STORE__
sl@0
    37
#undef	__OP_SWP__
sl@0
    38
#undef	__OP_CAS__
sl@0
    39
#undef	__OP_ADD__
sl@0
    40
#undef	__OP_AND__
sl@0
    41
#undef	__OP_IOR__
sl@0
    42
#undef	__OP_XOR__
sl@0
    43
#undef	__OP_AXO__
sl@0
    44
#undef	__OP_TAU__
sl@0
    45
#undef	__OP_TAS__
sl@0
    46
sl@0
    47
#undef ENSURE_8BYTE_ALIGNMENT
sl@0
    48
sl@0
    49
#else	// __OPERATION__
sl@0
    50
sl@0
    51
#if defined(__OP_LOAD__)
sl@0
    52
#define __OPERATION__	load
sl@0
    53
#elif defined(__OP_STORE__)
sl@0
    54
#define __OPERATION__	store
sl@0
    55
#elif defined(__OP_SWP__)
sl@0
    56
#define __OPERATION__	swp
sl@0
    57
#define	__OP_RMW1__
sl@0
    58
#elif defined(__OP_CAS__)
sl@0
    59
#define __OPERATION__	cas
sl@0
    60
#elif defined(__OP_ADD__)
sl@0
    61
#define __OPERATION__	add
sl@0
    62
#define	__OP_RMW1__
sl@0
    63
#elif defined(__OP_AND__)
sl@0
    64
#define __OPERATION__	and
sl@0
    65
#define	__OP_RMW1__
sl@0
    66
#elif defined(__OP_IOR__)
sl@0
    67
#define __OPERATION__	ior
sl@0
    68
#define	__OP_RMW1__
sl@0
    69
#elif defined(__OP_XOR__)
sl@0
    70
#define __OPERATION__	xor
sl@0
    71
#define	__OP_RMW1__
sl@0
    72
#elif defined(__OP_AXO__)
sl@0
    73
#define __OPERATION__	axo
sl@0
    74
#define	__OP_RMW2__
sl@0
    75
#elif defined(__OP_TAU__)
sl@0
    76
#define __OPERATION__	tau
sl@0
    77
#define	__OP_RMW3__
sl@0
    78
#elif defined(__OP_TAS__)
sl@0
    79
#define __OPERATION__	tas
sl@0
    80
#define	__OP_RMW3__
sl@0
    81
#define	__OP_SIGNED__
sl@0
    82
#else
sl@0
    83
#error Unknown atomic operation
sl@0
    84
#endif
sl@0
    85
sl@0
    86
#if __DATA_SIZE__==8
sl@0
    87
#define	__LOG2_DATA_SIZE__			3
sl@0
    88
#define __SIZE_CODE__				"b"
sl@0
    89
#ifdef __CPU_ARM_HAS_LDREX_STREX_V6K
sl@0
    90
#define	__LDREX_INST__(Rd,Rn)		LDREXB(Rd,Rn)
sl@0
    91
#define	__STREX_INST__(Rd,Rm,Rn)	STREXB(Rd,Rm,Rn)
sl@0
    92
#endif
sl@0
    93
#define	__SIGN_EXTEND__(reg)		asm("mov "#reg ", "#reg ", lsl #24 "); asm("mov "#reg ", "#reg ", asr #24 ");
sl@0
    94
#define	__LDR_INST__(cc,args)		asm("ldr"#cc "b " args)
sl@0
    95
#define	__LDRS_INST__(cc,args)		asm("ldr"#cc "sb " args)
sl@0
    96
#define	__STR_INST__(cc,args)		asm("str"#cc "b " args)
sl@0
    97
#ifdef	__OP_SIGNED__
sl@0
    98
#define	__TYPE__					TInt8
sl@0
    99
#else
sl@0
   100
#define	__TYPE__					TUint8
sl@0
   101
#endif
sl@0
   102
#elif __DATA_SIZE__==16
sl@0
   103
#define	__LOG2_DATA_SIZE__			4
sl@0
   104
#define __SIZE_CODE__				"h"
sl@0
   105
#ifdef __CPU_ARM_HAS_LDREX_STREX_V6K
sl@0
   106
#define	__LDREX_INST__(Rd,Rn)		LDREXH(Rd,Rn)
sl@0
   107
#define	__STREX_INST__(Rd,Rm,Rn)	STREXH(Rd,Rm,Rn)
sl@0
   108
#endif
sl@0
   109
#define	__SIGN_EXTEND__(reg)		asm("mov "#reg ", "#reg ", lsl #16 "); asm("mov "#reg ", "#reg ", asr #16 ");
sl@0
   110
#define	__LDR_INST__(cc,args)		asm("ldr"#cc "h " args)
sl@0
   111
#define	__LDRS_INST__(cc,args)		asm("ldr"#cc "sh " args)
sl@0
   112
#define	__STR_INST__(cc,args)		asm("str"#cc "h " args)
sl@0
   113
#ifdef	__OP_SIGNED__
sl@0
   114
#define	__TYPE__					TInt16
sl@0
   115
#else
sl@0
   116
#define	__TYPE__					TUint16
sl@0
   117
#endif
sl@0
   118
#elif __DATA_SIZE__==32
sl@0
   119
#define	__LOG2_DATA_SIZE__			5
sl@0
   120
#define __SIZE_CODE__				""
sl@0
   121
#ifdef __CPU_ARM_HAS_LDREX_STREX
sl@0
   122
#define	__LDREX_INST__(Rd,Rn)		LDREX(Rd,Rn)
sl@0
   123
#define	__STREX_INST__(Rd,Rm,Rn)	STREX(Rd,Rm,Rn)
sl@0
   124
#endif
sl@0
   125
#define	__SIGN_EXTEND__(reg)
sl@0
   126
#define	__LDR_INST__(cc,args)		asm("ldr"#cc " " args)
sl@0
   127
#define	__LDRS_INST__(cc,args)		asm("ldr"#cc " " args)
sl@0
   128
#define	__STR_INST__(cc,args)		asm("str"#cc " " args)
sl@0
   129
#ifdef	__OP_SIGNED__
sl@0
   130
#define	__TYPE__					TInt32
sl@0
   131
#else
sl@0
   132
#define	__TYPE__					TUint32
sl@0
   133
#endif
sl@0
   134
#elif __DATA_SIZE__==64
sl@0
   135
#define	__LOG2_DATA_SIZE__			6
sl@0
   136
#define __SIZE_CODE__				"d"
sl@0
   137
#ifdef __CPU_ARM_HAS_LDREX_STREX_V6K
sl@0
   138
#define	__LDREX_INST__(Rd,Rn)		LDREXD(Rd,Rn)
sl@0
   139
#define	__STREX_INST__(Rd,Rm,Rn)	STREXD(Rd,Rm,Rn)
sl@0
   140
#endif
sl@0
   141
#ifdef	__OP_SIGNED__
sl@0
   142
#define	__TYPE__					TInt64
sl@0
   143
#else
sl@0
   144
#define	__TYPE__					TUint64
sl@0
   145
#endif
sl@0
   146
#else
sl@0
   147
#error Invalid data size
sl@0
   148
#endif
sl@0
   149
sl@0
   150
#if (defined(__GNUC__) && (__GNUC__ >= 3)) || defined(__EABI__)
sl@0
   151
// Check 8 byte aligned and cause alignment fault if not.
sl@0
   152
// Doesn't work if alignment checking is disabled but gives consistent behaviour
sl@0
   153
// between processors with ldrexd etc and these hand coded versions.
sl@0
   154
#define ENSURE_8BYTE_ALIGNMENT(rAddr) 		\
sl@0
   155
	asm("tst r"#rAddr", #0x7 "); 			\
sl@0
   156
	asm("orrne r"#rAddr", r"#rAddr", #1 "); \
sl@0
   157
	asm("ldmne r"#rAddr", {r"#rAddr"} ")
sl@0
   158
#else
sl@0
   159
// Don't assert on old gcc (arm4) as it is not eabi compliant and this stops 
sl@0
   160
// kernel booting.
sl@0
   161
#define ENSURE_8BYTE_ALIGNMENT(rAddr)
sl@0
   162
#endif
sl@0
   163
#endif	// __OPERATION__