2 Copyright 2005-2010 Intel Corporation. All Rights Reserved.
4 This file is part of Threading Building Blocks.
6 Threading Building Blocks is free software; you can redistribute it
7 and/or modify it under the terms of the GNU General Public License
8 version 2 as published by the Free Software Foundation.
10 Threading Building Blocks is distributed in the hope that it will be
11 useful, but WITHOUT ANY WARRANTY; without even the implied warranty
12 of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with Threading Building Blocks; if not, write to the Free Software
17 Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
19 As a special exception, you may use this file as part of a free software
20 library without restriction. Specifically, if other files instantiate
21 templates or use macros or inline functions from this file, or you compile
22 this file and link it with other files to produce an executable, this
23 file does not by itself cause the resulting executable to be covered by
24 the GNU General Public License. This exception does not however
25 invalidate any other reasons why the executable file might be covered by
26 the GNU General Public License.
30 #ifndef __TBB_machine_H
31 #error Do not include this file directly; include tbb_machine.h instead
37 #include <sched.h> // sched_yield
39 #define __TBB_WORDSIZE 8
40 #define __TBB_BIG_ENDIAN 1
42 #define __TBB_release_consistency_helper() __asm__ __volatile__ ("": : :"memory")
44 inline void __TBB_rel_acq_fence() { __asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreStore|#StoreLoad": : : "memory"); }
46 //--------------------------------------------------
48 //--------------------------------------------------
51 * Atomic CAS for 32 bit values, if *ptr==comparand, then *ptr=value, returns *ptr
52 * @param ptr pointer to value in memory to be swapped with value if *ptr==comparand
53 * @param value value to assign *ptr to if *ptr==comparand
54 * @param comparand value to compare with *ptr
55 ( @return value originally in memory at ptr, regardless of success
57 static inline int32_t __TBB_machine_cmpswp4(volatile void *ptr, int32_t value, int32_t comparand ){
61 : "=m"(*(int32_t *)ptr), "=r"(result)
62 : "m"(*(int32_t *)ptr), "1"(value), "r"(comparand), "r"(ptr)
68 * Atomic CAS for 64 bit values, if *ptr==comparand, then *ptr=value, returns *ptr
69 * @param ptr pointer to value in memory to be swapped with value if *ptr==comparand
70 * @param value value to assign *ptr to if *ptr==comparand
71 * @param comparand value to compare with *ptr
72 ( @return value originally in memory at ptr, regardless of success
74 static inline int64_t __TBB_machine_cmpswp8(volatile void *ptr, int64_t value, int64_t comparand ){
78 : "=m"(*(int64_t *)ptr), "=r"(result)
79 : "m"(*(int64_t *)ptr), "1"(value), "r"(comparand), "r"(ptr)
84 //---------------------------------------------------
86 //---------------------------------------------------
89 * Atomic fetch and add for 32 bit values, in this case implemented by continuously checking success of atomicity
90 * @param ptr pointer to value to add addend to
91 * @param addened value to add to *ptr
92 * @return value at ptr before addened was added
94 static inline int32_t __TBB_machine_fetchadd4(volatile void *ptr, int32_t addend){
96 __asm__ __volatile__ (
97 "0:\t add\t %3, %4, %0\n" // do addition
98 "\t cas\t [%2], %3, %0\n" // cas to store result in memory
99 "\t cmp\t %3, %0\n" // check if value from memory is original
100 "\t bne,a,pn\t %%icc, 0b\n" // if not try again
101 "\t mov %0, %3\n" // use branch delay slot to move new value in memory to be added
102 : "=&r"(result), "=m"(*(int32_t *)ptr)
103 : "r"(ptr), "r"(*(int32_t *)ptr), "r"(addend), "m"(*(int32_t *)ptr)
109 * Atomic fetch and add for 64 bit values, in this case implemented by continuously checking success of atomicity
110 * @param ptr pointer to value to add addend to
111 * @param addened value to add to *ptr
112 * @return value at ptr before addened was added
114 static inline int64_t __TBB_machine_fetchadd8(volatile void *ptr, int64_t addend){
116 __asm__ __volatile__ (
117 "0:\t add\t %3, %4, %0\n" // do addition
118 "\t casx\t [%2], %3, %0\n" // cas to store result in memory
119 "\t cmp\t %3, %0\n" // check if value from memory is original
120 "\t bne,a,pn\t %%xcc, 0b\n" // if not try again
121 "\t mov %0, %3\n" // use branch delay slot to move new value in memory to be added
122 : "=&r"(result), "=m"(*(int64_t *)ptr)
123 : "r"(ptr), "r"(*(int64_t *)ptr), "r"(addend), "m"(*(int64_t *)ptr)
128 //--------------------------------------------------------
129 // Logarithm (base two, integer)
130 //--------------------------------------------------------
132 static inline int64_t __TBB_machine_lg( uint64_t x ) {
142 __asm__ ("popc %1, %0" : "=r"(count) : "r"(x) );
146 //--------------------------------------------------------
148 static inline void __TBB_machine_or( volatile void *ptr, uint64_t addend ) {
149 __asm__ __volatile__ (
150 "0:\t or\t %2, %3, %%g1\n" // do addition
151 "\t casx\t [%1], %2, %%g1\n" // cas to store result in memory
152 "\t cmp\t %2, %%g1\n" // check if value from memory is original
153 "\t bne,a,pn\t %%xcc, 0b\n" // if not try again
154 "\t mov %%g1, %2\n" // use branch delay slot to move new value in memory to be added
155 : "=m"(*(int64_t *)ptr)
156 : "r"(ptr), "r"(*(int64_t *)ptr), "r"(addend), "m"(*(int64_t *)ptr)
157 : "ccr", "g1", "memory");
160 static inline void __TBB_machine_and( volatile void *ptr, uint64_t addend ) {
161 __asm__ __volatile__ (
162 "0:\t and\t %2, %3, %%g1\n" // do addition
163 "\t casx\t [%1], %2, %%g1\n" // cas to store result in memory
164 "\t cmp\t %2, %%g1\n" // check if value from memory is original
165 "\t bne,a,pn\t %%xcc, 0b\n" // if not try again
166 "\t mov %%g1, %2\n" // use branch delay slot to move new value in memory to be added
167 : "=m"(*(int64_t *)ptr)
168 : "r"(ptr), "r"(*(int64_t *)ptr), "r"(addend), "m"(*(int64_t *)ptr)
169 : "ccr", "g1", "memory");
173 static inline void __TBB_machine_pause( int32_t delay ) {
174 // do nothing, inlined, doesnt matter
177 // put 0xff in memory location, return memory value,
178 // generic trylockbyte puts 0x01, however this is fine
179 // because all that matters is that 0 is unlocked
180 static inline bool __TBB_machine_trylockbyte(unsigned char &flag){
181 unsigned char result;
182 __asm__ __volatile__ (
183 "ldstub\t [%2], %0\n"
184 : "=r"(result), "=m"(flag)
185 : "r"(&flag), "m"(flag)
191 // Machine specific atomic operations
193 //#define __TBB_CompareAndSwap1(P,V,C) __TBB_machine_cmpswp1(P,V,C) // use generic version in tbb_machine.h
194 //#define __TBB_CompareAndSwap2(P,V,C) __TBB_machine_cmpswp2(P,V,C) // use generic version in tbb_machine.h
195 #define __TBB_CompareAndSwap4(P,V,C) __TBB_machine_cmpswp4(P,V,C)
196 #define __TBB_CompareAndSwap8(P,V,C) __TBB_machine_cmpswp8(P,V,C)
197 #define __TBB_CompareAndSwapW(P,V,C) __TBB_machine_cmpswp8(P,V,C)
199 //#define __TBB_FetchAndAdd1(P,V) __TBB_machine_fetchadd1(P,V) // use generic version in tbb_machine.h
200 //#define __TBB_FetchAndAdd2(P,V) __TBB_machine_fetchadd2(P,V) // use generic version in tbb_machine.h
201 #define __TBB_FetchAndAdd4(P,V) __TBB_machine_fetchadd4(P,V)
202 #define __TBB_FetchAndAdd8(P,V) __TBB_machine_fetchadd8(P,V)
203 #define __TBB_FetchAndAddW(P,V) __TBB_machine_fetchadd8(P,V)
205 // use generic version in tbb_machine.h
206 //#define __TBB_FetchAndStore1(P,V) __TBB_machine_fetchstore1(P,V)
207 //#define __TBB_FetchAndStore2(P,V) __TBB_machine_fetchstore2(P,V)
208 //#define __TBB_FetchAndStore4(P,V) __TBB_machine_fetchstore4(P,V)
209 //#define __TBB_FetchAndStore8(P,V) __TBB_machine_fetchstore8(P,V)
210 //#define __TBB_FetchAndStoreW(P,V) __TBB_machine_fetchstore8(P,V)
212 #define __TBB_Store8(P,V) (*P = V)
213 #define __TBB_Load8(P) (*P)
215 #define __TBB_AtomicOR(P,V) __TBB_machine_or(P,V)
216 #define __TBB_AtomicAND(P,V) __TBB_machine_and(P,V)
218 // Definition of other functions
219 #define __TBB_Pause(V) __TBB_machine_pause(V)
220 #define __TBB_Log2(V) __TBB_machine_lg(V)
222 // Special atomic functions
223 #define __TBB_FetchAndAddWrelease(P,V) __TBB_FetchAndAddW(P,V)
224 #define __TBB_FetchAndIncrementWacquire(P) __TBB_FetchAndAddW(P,1)
225 #define __TBB_FetchAndDecrementWrelease(P) __TBB_FetchAndAddW(P,-1)
227 // Definition of Lock functions
228 // Repeatedly runs TryLockByte, no need to implement
229 #undef __TBB_LockByte
231 #define __TBB_TryLockByte(P) __TBB_machine_trylockbyte(P)
233 #define __TBB_Yield() sched_yield()