]> git.sesse.net Git - casparcg/blob - tbb/include/tbb/machine/linux_intel64.h
2.0. Updated tbb library.
[casparcg] / tbb / include / tbb / machine / linux_intel64.h
1 /*
2     Copyright 2005-2011 Intel Corporation.  All Rights Reserved.
3
4     This file is part of Threading Building Blocks.
5
6     Threading Building Blocks is free software; you can redistribute it
7     and/or modify it under the terms of the GNU General Public License
8     version 2 as published by the Free Software Foundation.
9
10     Threading Building Blocks is distributed in the hope that it will be
11     useful, but WITHOUT ANY WARRANTY; without even the implied warranty
12     of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13     GNU General Public License for more details.
14
15     You should have received a copy of the GNU General Public License
16     along with Threading Building Blocks; if not, write to the Free Software
17     Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
18
19     As a special exception, you may use this file as part of a free software
20     library without restriction.  Specifically, if other files instantiate
21     templates or use macros or inline functions from this file, or you compile
22     this file and link it with other files to produce an executable, this
23     file does not by itself cause the resulting executable to be covered by
24     the GNU General Public License.  This exception does not however
25     invalidate any other reasons why the executable file might be covered by
26     the GNU General Public License.
27 */
28
29 #if !defined(__TBB_machine_H) || defined(__TBB_machine_linux_intel64_H)
30 #error Do not include this file directly; include tbb_machine.h instead
31 #endif
32
33 #define __TBB_machine_linux_intel64_H
34
35 #include <stdint.h>
36 #include <unistd.h>
37
38 #define __TBB_WORDSIZE 8
39 #define __TBB_BIG_ENDIAN 0
40
41 #define __TBB_compiler_fence() __asm__ __volatile__("": : :"memory")
42 #define __TBB_control_consistency_helper() __TBB_compiler_fence()
43 #define __TBB_acquire_consistency_helper() __TBB_compiler_fence()
44 #define __TBB_release_consistency_helper() __TBB_compiler_fence()
45
46 #ifndef __TBB_full_memory_fence
47 #define __TBB_full_memory_fence() __asm__ __volatile__("mfence": : :"memory")
48 #endif
49
50 #define __TBB_MACHINE_DEFINE_ATOMICS(S,T,X)                                          \
51 static inline T __TBB_machine_cmpswp##S (volatile void *ptr, T value, T comparand )  \
52 {                                                                                    \
53     T result;                                                                        \
54                                                                                      \
55     __asm__ __volatile__("lock\ncmpxchg" X " %2,%1"                                  \
56                           : "=a"(result), "=m"(*(volatile T*)ptr)                    \
57                           : "q"(value), "0"(comparand), "m"(*(volatile T*)ptr)       \
58                           : "memory");                                               \
59     return result;                                                                   \
60 }                                                                                    \
61                                                                                      \
62 static inline T __TBB_machine_fetchadd##S(volatile void *ptr, T addend)              \
63 {                                                                                    \
64     T result;                                                                        \
65     __asm__ __volatile__("lock\nxadd" X " %0,%1"                                     \
66                           : "=r"(result),"=m"(*(volatile T*)ptr)                     \
67                           : "0"(addend), "m"(*(volatile T*)ptr)                      \
68                           : "memory");                                               \
69     return result;                                                                   \
70 }                                                                                    \
71                                                                                      \
72 static inline  T __TBB_machine_fetchstore##S(volatile void *ptr, T value)            \
73 {                                                                                    \
74     T result;                                                                        \
75     __asm__ __volatile__("lock\nxchg" X " %0,%1"                                     \
76                           : "=r"(result),"=m"(*(volatile T*)ptr)                     \
77                           : "0"(value), "m"(*(volatile T*)ptr)                       \
78                           : "memory");                                               \
79     return result;                                                                   \
80 }                                                                                    \
81                                                                                      
82 __TBB_MACHINE_DEFINE_ATOMICS(1,int8_t,"")
83 __TBB_MACHINE_DEFINE_ATOMICS(2,int16_t,"")
84 __TBB_MACHINE_DEFINE_ATOMICS(4,int32_t,"")
85 __TBB_MACHINE_DEFINE_ATOMICS(8,int64_t,"q")
86
87 #undef __TBB_MACHINE_DEFINE_ATOMICS
88
89 static inline int64_t __TBB_machine_lg( uint64_t x ) {
90     int64_t j;
91     __asm__ ("bsr %1,%0" : "=r"(j) : "r"(x));
92     return j;
93 }
94
95 static inline void __TBB_machine_or( volatile void *ptr, uint64_t addend ) {
96     __asm__ __volatile__("lock\norq %1,%0" : "=m"(*(volatile uint64_t*)ptr) : "r"(addend), "m"(*(volatile uint64_t*)ptr) : "memory");
97 }
98
99 static inline void __TBB_machine_and( volatile void *ptr, uint64_t addend ) {
100     __asm__ __volatile__("lock\nandq %1,%0" : "=m"(*(volatile uint64_t*)ptr) : "r"(addend), "m"(*(volatile uint64_t*)ptr) : "memory");
101 }
102
103 #define __TBB_AtomicOR(P,V) __TBB_machine_or(P,V)
104 #define __TBB_AtomicAND(P,V) __TBB_machine_and(P,V)
105
106 // Definition of other functions
107 #ifndef __TBB_Pause
108 static inline void __TBB_machine_pause( int32_t delay ) {
109     for (int32_t i = 0; i < delay; i++) {
110        __asm__ __volatile__("pause;");
111     }
112     return;
113 }
114 #define __TBB_Pause(V) __TBB_machine_pause(V)
115 #endif /* !__TBB_Pause */
116
117 #define __TBB_Log2(V)  __TBB_machine_lg(V)
118
119 #define __TBB_USE_FETCHSTORE_AS_FULL_FENCED_STORE   1
120 #define __TBB_USE_GENERIC_HALF_FENCED_LOAD_STORE    1
121 #define __TBB_USE_GENERIC_RELAXED_LOAD_STORE        1
122
123 // API to retrieve/update FPU control setting
124 #ifndef __TBB_CPU_CTL_ENV_PRESENT
125 #define __TBB_CPU_CTL_ENV_PRESENT 1
126
127 struct __TBB_cpu_ctl_env_t {
128     int     mxcsr;
129     short   x87cw;
130 };
131
132 inline void __TBB_get_cpu_ctl_env ( __TBB_cpu_ctl_env_t* ctl ) {
133 #if __TBB_ICC_12_0_INL_ASM_FSTCW_BROKEN
134     __TBB_cpu_ctl_env_t loc_ctl;
135     __asm__ __volatile__ (
136             "stmxcsr %0\n\t"
137             "fstcw %1"
138             : "=m"(loc_ctl.mxcsr), "=m"(loc_ctl.x87cw)
139     );
140     *ctl = loc_ctl;
141 #else
142     __asm__ __volatile__ (
143             "stmxcsr %0\n\t"
144             "fstcw %1"
145             : "=m"(ctl->mxcsr), "=m"(ctl->x87cw)
146     );
147 #endif
148 }
149 inline void __TBB_set_cpu_ctl_env ( const __TBB_cpu_ctl_env_t* ctl ) {
150     __asm__ __volatile__ (
151             "ldmxcsr %0\n\t"
152             "fldcw %1"
153             : : "m"(ctl->mxcsr), "m"(ctl->x87cw)
154     );
155 }
156 #endif /* !__TBB_CPU_CTL_ENV_PRESENT */