llvm.org GIT mirror llvm / 7141782
Now that we have atomics support properly detected by configure, use it to implement Atomic.h. This expunges the code previously imported from libatomic_ops. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@72077 91177308-0d34-0410-b5e6-96231b3b80d8 Owen Anderson 10 years ago
2 changed file(s) with 23 addition(s) and 157 deletion(s). Raw diff Collapse all Expand all
6666 llvm/projects/sample/autoconf
6767 CellSPU backend llvm/lib/Target/CellSPU/README.txt
6868 Google Test llvm/utils/unittest/googletest
69 Atomics Library llvm/include/llvm/System/Atomic.h
88 //
99 // This file declares the llvm::sys atomic operations.
1010 //
11 // Portions of this file use code from libatomic_ops, for which the following
12 // license applies:
13 //
14 // Copyright (c) 2003 by Hewlett-Packard Company. All rights reserved.
15 //
16 // Permission is hereby granted, free of charge, to any person obtaining a copy
17 // of this software and associated documentation files (the "Software"), to deal
18 // in the Software without restriction, including without limitation the rights
19 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
20 // copies of the Software, and to permit persons to whom the Software is
21 // furnished to do so, subject to the following conditions:
22 //
23 // The above copyright notice and this permission notice shall be included in
24 // all copies or substantial portions of the Software.
25 //
26 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
27 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
28 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
29 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
30 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
31 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
32 // SOFTWARE.
33 //
3411 //===----------------------------------------------------------------------===//
3512
3613 #ifndef LLVM_SYSTEM_ATOMIC_H
3714 #define LLVM_SYSTEM_ATOMIC_H
3815
39 #include
40
41 #if defined(_HPUX_SOURCE) && defined(__ia64)
42 #include
43 #elif defined(_MSC_VER)
16 #if defined(_MSC_VER)
4417 #include
45 #endif // defined(_HPUX_SOURCE) && defined(__ia64)
18 #endif
4619
4720
4821 namespace llvm {
4922 namespace sys {
5023
51 inline void CompilerFence() {
52 #if defined(__GNUC__) && !defined(__INTEL_COMPILER)
24 inline void MemoryFence() {
25 #if !defined(ENABLE_THREADS) || ENABLE_THREADS == 0
26 # if defined(__GNUC__)
5327 __asm__ __volatile__("" : : : "memory");
54 #elif defined(_MSC_VER)
28 # elif defined(_MSC_VER)
5529 __asm { };
56 #elif defined(__INTEL_COMPILER)
57 __memory_barrier(); /* Too strong? IA64-only? */
30 # else
31 # error No memory fence implementation for your platform!
32 # endif
5833 #else
59 /* We conjecture that the following usually gives us the right */
60 /* semantics or an error. */
61 asm("");
62 #endif // defined(__GNUC__) && !defined(__INTEL_COMPILER)
34 # if defined(__GNUC__)
35 __sync_synchronize();
36 # elif defined(_MSC_VER)
37 MemoryBarrier();
38 # else
39 # error No memory fence implementation for your platform!
40 # endif
41 #endif
6342 }
6443
6544 #if !defined(ENABLE_THREADS) || ENABLE_THREADS == 0
66 inline void MemoryFence() {
67 CompilerFence();
68 }
69
70 typedef uint32_t cas_flag;
45 typedef unsigned long cas_flag;
7146 inline cas_flag CompareAndSwap(cas_flag* dest, cas_flag exc, cas_flag c) {
7247 cas_flag result = *dest;
7348 if (result == c)
7449 *dest = exc;
7550 return result;
7651 }
77
7852 #elif defined(__GNUC__)
79
80 inline void MemoryFence() {
81 # if defined(__i386__) || defined(__x86_64__)
82 # if defined(__SSE2__)
83 __asm__ __volatile__("mfence" : : : "memory");
84 # else
85 unsigned char dummy = 0;
86 volatile unsigned char* addr = &dummy;
87 unsigned char oldval;
88 __asm __ __volatile__("xchgb %0, %1" : "=r"(oldval),
89 "=m"(*addr), "0"(0xff), "m"(*addr) : "memory");
90 # endif // defined(__SSE2__)
91 # elif defined(__ia64__)
92 __asm__ __volatile__("mf" : : : "memory");
93 # elif defined(__alpha__)
94 __asm__ __volatile__("mb" : : : "memory");
95 # elif defined(__sparc__)
96 __asm__ __volatile__("membar #StoreStore | #LoadStore | #LoadLoad | #StoreLoad");
97 # elif defined(__powerpc__) || defined(__ppc__)
98 __asm__ __volatile__("sync" : : : "memory");
99 # elif defined(__arm__)
100 __asm__ __volatile__ ("mcr p15, 0, r0, c7, c10, 5 @ dmb");
101 # endif
102 } // defined(__i386__) || defined(__x86_64__)
103
10453 typedef unsigned long cas_flag;
10554 inline cas_flag CompareAndSwap(cas_flag* ptr,
10655 cas_flag new_value,
10756 cas_flag old_value) {
108 cas_flag prev;
109 # if defined(__i386__) || defined(__x86_64__)
110 __asm__ __volatile__("lock; cmpxchgl %1,%2"
111 : "=a" (prev)
112 : "q" (new_value), "m" (*ptr), "0" (old_value)
113 : "memory");
114 # elif defined(__ia64__)
115 MemoryFence();
116 # if defined(_ILP32)
117 __asm__("zxt4 %1=%1": "=r"(prev) : "0"(prev));
118 __asm__ __volatile__("addp4 %1=0,%1;;\n"
119 "mov ar.ccv=%[old] ;; cmpxchg 4"
120 ".acq %0=[%1],%[new_val],ar.ccv"
121 : "=r"(prev) "1"(addr),
122 : "=r"(addr), [new_value]"r"(new_value), [old_value]"r"(old_value)
123 : "memory");
124 # else
125 __asm__ __volatile__(
126 "mov ar.ccv=%[old] ;; cmpxchg 8"
127 ".acq %0=[%1],%[new_val],ar.ccv"
128 : "=r"(prev)
129 : "r"(ptr), [new_value]"r"(new_value),
130 [old_value]"r"(old_value)
131 : "memory");
132 # endif // defined(_ILP32)
133 # elif defined(__alpha__)
134 cas_flag was_equal;
135 __asm__ __volatile__(
136 "1: ldq_l %0,%1\n"
137 " cmpeq %0,%4,%2\n"
138 " mov %3,%0\n"
139 " beq %2,2f\n"
140 " stq_c %0,%1\n"
141 " beq %0,1b\n"
142 "2:\n"
143 :"=&r" (prev), "=m" (*ptr), "=&r" (was_equal)
144 : "r" (new_value), "Ir" (old_value)
145 :"memory");
146 #elif defined(__sparc__)
147 #error No CAS implementation for SPARC yet.
148 #elif defined(__powerpc__) || defined(__ppc__)
149 int result = 0;
150 __asm__ __volatile__(
151 "1:lwarx %0,0,%2\n" /* load and reserve */
152 "cmpw %0, %4\n" /* if load is not equal to */
153 "bne 2f\n" /* old, fail */
154 "stwcx. %3,0,%2\n" /* else store conditional */
155 "bne- 1b\n" /* retry if lost reservation */
156 "li %1,1\n" /* result = 1; */
157 "2:\n"
158 : "=&r"(prev), "=&r"(result)
159 : "r"(ptr), "r"(new_value), "r"(old_value), "1"(result)
160 : "memory", "cc");
161 #elif defined(__arm__)
162 int result;
163 __asm__ __volatile__ (
164 "\n"
165 "0:\t"
166 "ldr %1,[%2] \n\t"
167 "mov %0,#0 \n\t"
168 "cmp %1,%4 \n\t"
169 "bne 1f \n\t"
170 "swp %0,%3,[%2] \n\t"
171 "cmp %1,%0 \n\t"
172 "swpne %1,%0,[%2] \n\t"
173 "bne 0b \n\t"
174 "mov %0,#1 \n"
175 "1:\n\t"
176 ""
177 : "=&r"(result), "=&r"(prev)
178 : "r" ptr), "r" (new_value), "r" (old_value)
179 : "cc", "memory");
180 #endif // defined(__i386__)
181 return prev;
57 return __sync_val_compare_and_swap(ptr, old_value, new_value);
18258 }
183
18459 #elif defined(_MSC_VER) && _M_IX86 > 400
185 inline void MemoryFence() {
186 LONG dummy = 0;
187 InterlockedExchanged((LONG volatile *)&dummy, (LONG)0);
188 }
189
190 typedef DWORD cas_flag;
60 typedef LONG cas_flag;
19161 inline cas_flag CompareAndSwap(cas_flag* ptr,
19262 cas_flag new_value,
19363 cas_flag old_value) {
194 /* FIXME - This is nearly useless on win64. */
195 /* Use InterlockedCompareExchange64 for win64? */
196 return InterlockedCompareExchange((DWORD volatile *)addr,
197 (DWORD)new_value, (DWORD) old_value)
64 return InterlockedCompareExchange(addr, new_value, old_value);
19865 }
19966 #else
200 #error No atomics implementation found for your platform.
201 #endif // !defined(ENABLE_THREADS) || ENABLE_THREADS == 0
67 # error No compare-and-swap implementation for your platform!
68 #endif
20269
20370 }
20471 }