1 2014-09-27 Gleb Fotengauer-Malinovskiy <glebfm@altlinux.org>
6 * local_atomic (__always_inline): Rename to...
7 (__libitm_always_inline): ... this.
10 On Mon, Sep 29, 2014 at 03:38:25PM +0200, Jakub Jelinek wrote:
11 > Why do you want to add inline keyword to that? Some inline keywords
12 > are implicit (methods defined inline), so there is no point adding it there.
14 I just didn't get that redefinition of __always_inline was the source of the problem.
16 local_atomic | 299 +++++++++++++++++++++++++++++------------------------------
17 1 file changed, 149 insertions(+), 150 deletions(-)
19 diff --git a/libitm/local_atomic b/libitm/local_atomic
20 index c3e079f..552b919 100644
21 --- a/libitm/local_atomic
22 +++ b/libitm/local_atomic
24 #ifndef _GLIBCXX_ATOMIC
25 #define _GLIBCXX_ATOMIC 1
27 -#undef __always_inline
28 -#define __always_inline __attribute__((always_inline))
29 +#define __libitm_always_inline __attribute__((always_inline))
31 // #pragma GCC system_header
33 @@ -74,7 +74,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
37 - inline __always_inline memory_order
38 + inline __libitm_always_inline memory_order
39 __calculate_memory_order(memory_order __m) noexcept
41 const bool __cond1 = __m == memory_order_release;
42 @@ -84,13 +84,13 @@ namespace std // _GLIBCXX_VISIBILITY(default)
46 - inline __always_inline void
47 + inline __libitm_always_inline void
48 atomic_thread_fence(memory_order __m) noexcept
50 __atomic_thread_fence (__m);
53 - inline __always_inline void
54 + inline __libitm_always_inline void
55 atomic_signal_fence(memory_order __m) noexcept
57 __atomic_thread_fence (__m);
58 @@ -280,19 +280,19 @@ namespace std // _GLIBCXX_VISIBILITY(default)
59 // Conversion to ATOMIC_FLAG_INIT.
60 atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
62 - __always_inline bool
63 + __libitm_always_inline bool
64 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
66 return __atomic_test_and_set (&_M_i, __m);
69 - __always_inline bool
70 + __libitm_always_inline bool
71 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
73 return __atomic_test_and_set (&_M_i, __m);
76 - __always_inline void
77 + __libitm_always_inline void
78 clear(memory_order __m = memory_order_seq_cst) noexcept
80 // __glibcxx_assert(__m != memory_order_consume);
81 @@ -302,7 +302,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
82 __atomic_clear (&_M_i, __m);
85 - __always_inline void
86 + __libitm_always_inline void
87 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
89 // __glibcxx_assert(__m != memory_order_consume);
90 @@ -455,7 +455,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
91 is_lock_free() const volatile noexcept
92 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
94 - __always_inline void
95 + __libitm_always_inline void
96 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
98 // __glibcxx_assert(__m != memory_order_acquire);
99 @@ -465,7 +465,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
100 __atomic_store_n(&_M_i, __i, __m);
103 - __always_inline void
104 + __libitm_always_inline void
105 store(__int_type __i,
106 memory_order __m = memory_order_seq_cst) volatile noexcept
108 @@ -476,7 +476,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
109 __atomic_store_n(&_M_i, __i, __m);
112 - __always_inline __int_type
113 + __libitm_always_inline __int_type
114 load(memory_order __m = memory_order_seq_cst) const noexcept
116 // __glibcxx_assert(__m != memory_order_release);
117 @@ -485,7 +485,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
118 return __atomic_load_n(&_M_i, __m);
121 - __always_inline __int_type
122 + __libitm_always_inline __int_type
123 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
125 // __glibcxx_assert(__m != memory_order_release);
126 @@ -494,21 +494,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
127 return __atomic_load_n(&_M_i, __m);
130 - __always_inline __int_type
131 + __libitm_always_inline __int_type
132 exchange(__int_type __i,
133 memory_order __m = memory_order_seq_cst) noexcept
135 return __atomic_exchange_n(&_M_i, __i, __m);
138 - __always_inline __int_type
139 + __libitm_always_inline __int_type
140 exchange(__int_type __i,
141 memory_order __m = memory_order_seq_cst) volatile noexcept
143 return __atomic_exchange_n(&_M_i, __i, __m);
146 - __always_inline bool
147 + __libitm_always_inline bool
148 compare_exchange_weak(__int_type& __i1, __int_type __i2,
149 memory_order __m1, memory_order __m2) noexcept
151 @@ -519,7 +519,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
152 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
155 - __always_inline bool
156 + __libitm_always_inline bool
157 compare_exchange_weak(__int_type& __i1, __int_type __i2,
159 memory_order __m2) volatile noexcept
160 @@ -531,7 +531,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
161 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
164 - __always_inline bool
165 + __libitm_always_inline bool
166 compare_exchange_weak(__int_type& __i1, __int_type __i2,
167 memory_order __m = memory_order_seq_cst) noexcept
169 @@ -539,7 +539,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
170 __calculate_memory_order(__m));
173 - __always_inline bool
174 + __libitm_always_inline bool
175 compare_exchange_weak(__int_type& __i1, __int_type __i2,
176 memory_order __m = memory_order_seq_cst) volatile noexcept
178 @@ -547,7 +547,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
179 __calculate_memory_order(__m));
182 - __always_inline bool
183 + __libitm_always_inline bool
184 compare_exchange_strong(__int_type& __i1, __int_type __i2,
185 memory_order __m1, memory_order __m2) noexcept
187 @@ -558,7 +558,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
188 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
191 - __always_inline bool
192 + __libitm_always_inline bool
193 compare_exchange_strong(__int_type& __i1, __int_type __i2,
195 memory_order __m2) volatile noexcept
196 @@ -570,7 +570,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
197 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
200 - __always_inline bool
201 + __libitm_always_inline bool
202 compare_exchange_strong(__int_type& __i1, __int_type __i2,
203 memory_order __m = memory_order_seq_cst) noexcept
205 @@ -578,7 +578,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
206 __calculate_memory_order(__m));
209 - __always_inline bool
210 + __libitm_always_inline bool
211 compare_exchange_strong(__int_type& __i1, __int_type __i2,
212 memory_order __m = memory_order_seq_cst) volatile noexcept
214 @@ -586,52 +586,52 @@ namespace std // _GLIBCXX_VISIBILITY(default)
215 __calculate_memory_order(__m));
218 - __always_inline __int_type
219 + __libitm_always_inline __int_type
220 fetch_add(__int_type __i,
221 memory_order __m = memory_order_seq_cst) noexcept
222 { return __atomic_fetch_add(&_M_i, __i, __m); }
224 - __always_inline __int_type
225 + __libitm_always_inline __int_type
226 fetch_add(__int_type __i,
227 memory_order __m = memory_order_seq_cst) volatile noexcept
228 { return __atomic_fetch_add(&_M_i, __i, __m); }
230 - __always_inline __int_type
231 + __libitm_always_inline __int_type
232 fetch_sub(__int_type __i,
233 memory_order __m = memory_order_seq_cst) noexcept
234 { return __atomic_fetch_sub(&_M_i, __i, __m); }
236 - __always_inline __int_type
237 + __libitm_always_inline __int_type
238 fetch_sub(__int_type __i,
239 memory_order __m = memory_order_seq_cst) volatile noexcept
240 { return __atomic_fetch_sub(&_M_i, __i, __m); }
242 - __always_inline __int_type
243 + __libitm_always_inline __int_type
244 fetch_and(__int_type __i,
245 memory_order __m = memory_order_seq_cst) noexcept
246 { return __atomic_fetch_and(&_M_i, __i, __m); }
248 - __always_inline __int_type
249 + __libitm_always_inline __int_type
250 fetch_and(__int_type __i,
251 memory_order __m = memory_order_seq_cst) volatile noexcept
252 { return __atomic_fetch_and(&_M_i, __i, __m); }
254 - __always_inline __int_type
255 + __libitm_always_inline __int_type
256 fetch_or(__int_type __i,
257 memory_order __m = memory_order_seq_cst) noexcept
258 { return __atomic_fetch_or(&_M_i, __i, __m); }
260 - __always_inline __int_type
261 + __libitm_always_inline __int_type
262 fetch_or(__int_type __i,
263 memory_order __m = memory_order_seq_cst) volatile noexcept
264 { return __atomic_fetch_or(&_M_i, __i, __m); }
266 - __always_inline __int_type
267 + __libitm_always_inline __int_type
268 fetch_xor(__int_type __i,
269 memory_order __m = memory_order_seq_cst) noexcept
270 { return __atomic_fetch_xor(&_M_i, __i, __m); }
272 - __always_inline __int_type
273 + __libitm_always_inline __int_type
274 fetch_xor(__int_type __i,
275 memory_order __m = memory_order_seq_cst) volatile noexcept
276 { return __atomic_fetch_xor(&_M_i, __i, __m); }
277 @@ -733,7 +733,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
278 is_lock_free() const volatile noexcept
279 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
281 - __always_inline void
282 + __libitm_always_inline void
283 store(__pointer_type __p,
284 memory_order __m = memory_order_seq_cst) noexcept
286 @@ -744,7 +744,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
287 __atomic_store_n(&_M_p, __p, __m);
290 - __always_inline void
291 + __libitm_always_inline void
292 store(__pointer_type __p,
293 memory_order __m = memory_order_seq_cst) volatile noexcept
295 @@ -755,7 +755,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
296 __atomic_store_n(&_M_p, __p, __m);
299 - __always_inline __pointer_type
300 + __libitm_always_inline __pointer_type
301 load(memory_order __m = memory_order_seq_cst) const noexcept
303 // __glibcxx_assert(__m != memory_order_release);
304 @@ -764,7 +764,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
305 return __atomic_load_n(&_M_p, __m);
308 - __always_inline __pointer_type
309 + __libitm_always_inline __pointer_type
310 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
312 // __glibcxx_assert(__m != memory_order_release);
313 @@ -773,21 +773,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
314 return __atomic_load_n(&_M_p, __m);
317 - __always_inline __pointer_type
318 + __libitm_always_inline __pointer_type
319 exchange(__pointer_type __p,
320 memory_order __m = memory_order_seq_cst) noexcept
322 return __atomic_exchange_n(&_M_p, __p, __m);
325 - __always_inline __pointer_type
326 + __libitm_always_inline __pointer_type
327 exchange(__pointer_type __p,
328 memory_order __m = memory_order_seq_cst) volatile noexcept
330 return __atomic_exchange_n(&_M_p, __p, __m);
333 - __always_inline bool
334 + __libitm_always_inline bool
335 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
337 memory_order __m2) noexcept
338 @@ -799,7 +799,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
339 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
342 - __always_inline bool
343 + __libitm_always_inline bool
344 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
346 memory_order __m2) volatile noexcept
347 @@ -811,22 +811,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
348 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
351 - __always_inline __pointer_type
352 + __libitm_always_inline __pointer_type
353 fetch_add(ptrdiff_t __d,
354 memory_order __m = memory_order_seq_cst) noexcept
355 { return __atomic_fetch_add(&_M_p, __d, __m); }
357 - __always_inline __pointer_type
358 + __libitm_always_inline __pointer_type
359 fetch_add(ptrdiff_t __d,
360 memory_order __m = memory_order_seq_cst) volatile noexcept
361 { return __atomic_fetch_add(&_M_p, __d, __m); }
363 - __always_inline __pointer_type
364 + __libitm_always_inline __pointer_type
365 fetch_sub(ptrdiff_t __d,
366 memory_order __m = memory_order_seq_cst) noexcept
367 { return __atomic_fetch_sub(&_M_p, __d, __m); }
369 - __always_inline __pointer_type
370 + __libitm_always_inline __pointer_type
371 fetch_sub(ptrdiff_t __d,
372 memory_order __m = memory_order_seq_cst) volatile noexcept
373 { return __atomic_fetch_sub(&_M_p, __d, __m); }
374 @@ -870,67 +870,67 @@ namespace std // _GLIBCXX_VISIBILITY(default)
376 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
378 - __always_inline void
379 + __libitm_always_inline void
380 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
381 { _M_base.store(__i, __m); }
383 - __always_inline void
384 + __libitm_always_inline void
385 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
386 { _M_base.store(__i, __m); }
388 - __always_inline bool
389 + __libitm_always_inline bool
390 load(memory_order __m = memory_order_seq_cst) const noexcept
391 { return _M_base.load(__m); }
393 - __always_inline bool
394 + __libitm_always_inline bool
395 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
396 { return _M_base.load(__m); }
398 - __always_inline bool
399 + __libitm_always_inline bool
400 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
401 { return _M_base.exchange(__i, __m); }
403 - __always_inline bool
404 + __libitm_always_inline bool
406 memory_order __m = memory_order_seq_cst) volatile noexcept
407 { return _M_base.exchange(__i, __m); }
409 - __always_inline bool
410 + __libitm_always_inline bool
411 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
412 memory_order __m2) noexcept
413 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
415 - __always_inline bool
416 + __libitm_always_inline bool
417 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
418 memory_order __m2) volatile noexcept
419 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
421 - __always_inline bool
422 + __libitm_always_inline bool
423 compare_exchange_weak(bool& __i1, bool __i2,
424 memory_order __m = memory_order_seq_cst) noexcept
425 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
427 - __always_inline bool
428 + __libitm_always_inline bool
429 compare_exchange_weak(bool& __i1, bool __i2,
430 memory_order __m = memory_order_seq_cst) volatile noexcept
431 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
433 - __always_inline bool
434 + __libitm_always_inline bool
435 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
436 memory_order __m2) noexcept
437 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
439 - __always_inline bool
440 + __libitm_always_inline bool
441 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
442 memory_order __m2) volatile noexcept
443 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
445 - __always_inline bool
446 + __libitm_always_inline bool
447 compare_exchange_strong(bool& __i1, bool __i2,
448 memory_order __m = memory_order_seq_cst) noexcept
449 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
451 - __always_inline bool
452 + __libitm_always_inline bool
453 compare_exchange_strong(bool& __i1, bool __i2,
454 memory_order __m = memory_order_seq_cst) volatile noexcept
455 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
456 @@ -980,11 +980,11 @@ namespace std // _GLIBCXX_VISIBILITY(default)
457 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
458 { __atomic_store(&_M_i, &__i, _m); }
460 - __always_inline void
461 + __libitm_always_inline void
462 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
463 { __atomic_store(&_M_i, &__i, _m); }
465 - __always_inline _Tp
466 + __libitm_always_inline _Tp
467 load(memory_order _m = memory_order_seq_cst) const noexcept
470 @@ -992,7 +992,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
474 - __always_inline _Tp
475 + __libitm_always_inline _Tp
476 load(memory_order _m = memory_order_seq_cst) const volatile noexcept
479 @@ -1000,7 +1000,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
483 - __always_inline _Tp
484 + __libitm_always_inline _Tp
485 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
488 @@ -1008,7 +1008,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
492 - __always_inline _Tp
493 + __libitm_always_inline _Tp
495 memory_order _m = memory_order_seq_cst) volatile noexcept
497 @@ -1017,50 +1017,50 @@ namespace std // _GLIBCXX_VISIBILITY(default)
501 - __always_inline bool
502 + __libitm_always_inline bool
503 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
504 memory_order __f) noexcept
506 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
509 - __always_inline bool
510 + __libitm_always_inline bool
511 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
512 memory_order __f) volatile noexcept
514 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
517 - __always_inline bool
518 + __libitm_always_inline bool
519 compare_exchange_weak(_Tp& __e, _Tp __i,
520 memory_order __m = memory_order_seq_cst) noexcept
521 { return compare_exchange_weak(__e, __i, __m, __m); }
523 - __always_inline bool
524 + __libitm_always_inline bool
525 compare_exchange_weak(_Tp& __e, _Tp __i,
526 memory_order __m = memory_order_seq_cst) volatile noexcept
527 { return compare_exchange_weak(__e, __i, __m, __m); }
529 - __always_inline bool
530 + __libitm_always_inline bool
531 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
532 memory_order __f) noexcept
534 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
537 - __always_inline bool
538 + __libitm_always_inline bool
539 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
540 memory_order __f) volatile noexcept
542 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
545 - __always_inline bool
546 + __libitm_always_inline bool
547 compare_exchange_strong(_Tp& __e, _Tp __i,
548 memory_order __m = memory_order_seq_cst) noexcept
549 { return compare_exchange_strong(__e, __i, __m, __m); }
551 - __always_inline bool
552 + __libitm_always_inline bool
553 compare_exchange_strong(_Tp& __e, _Tp __i,
554 memory_order __m = memory_order_seq_cst) volatile noexcept
555 { return compare_exchange_strong(__e, __i, __m, __m); }
556 @@ -1153,46 +1153,46 @@ namespace std // _GLIBCXX_VISIBILITY(default)
557 is_lock_free() const volatile noexcept
558 { return _M_b.is_lock_free(); }
560 - __always_inline void
561 + __libitm_always_inline void
562 store(__pointer_type __p,
563 memory_order __m = memory_order_seq_cst) noexcept
564 { return _M_b.store(__p, __m); }
566 - __always_inline void
567 + __libitm_always_inline void
568 store(__pointer_type __p,
569 memory_order __m = memory_order_seq_cst) volatile noexcept
570 { return _M_b.store(__p, __m); }
572 - __always_inline __pointer_type
573 + __libitm_always_inline __pointer_type
574 load(memory_order __m = memory_order_seq_cst) const noexcept
575 { return _M_b.load(__m); }
577 - __always_inline __pointer_type
578 + __libitm_always_inline __pointer_type
579 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
580 { return _M_b.load(__m); }
582 - __always_inline __pointer_type
583 + __libitm_always_inline __pointer_type
584 exchange(__pointer_type __p,
585 memory_order __m = memory_order_seq_cst) noexcept
586 { return _M_b.exchange(__p, __m); }
588 - __always_inline __pointer_type
589 + __libitm_always_inline __pointer_type
590 exchange(__pointer_type __p,
591 memory_order __m = memory_order_seq_cst) volatile noexcept
592 { return _M_b.exchange(__p, __m); }
594 - __always_inline bool
595 + __libitm_always_inline bool
596 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
597 memory_order __m1, memory_order __m2) noexcept
598 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
600 - __always_inline bool
601 + __libitm_always_inline bool
602 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
604 memory_order __m2) volatile noexcept
605 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
607 - __always_inline bool
608 + __libitm_always_inline bool
609 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
610 memory_order __m = memory_order_seq_cst) noexcept
612 @@ -1200,7 +1200,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
613 __calculate_memory_order(__m));
616 - __always_inline bool
617 + __libitm_always_inline bool
618 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
619 memory_order __m = memory_order_seq_cst) volatile noexcept
621 @@ -1208,18 +1208,18 @@ namespace std // _GLIBCXX_VISIBILITY(default)
622 __calculate_memory_order(__m));
625 - __always_inline bool
626 + __libitm_always_inline bool
627 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
628 memory_order __m1, memory_order __m2) noexcept
629 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
631 - __always_inline bool
632 + __libitm_always_inline bool
633 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
635 memory_order __m2) volatile noexcept
636 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
638 - __always_inline bool
639 + __libitm_always_inline bool
640 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
641 memory_order __m = memory_order_seq_cst) noexcept
643 @@ -1227,7 +1227,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
644 __calculate_memory_order(__m));
647 - __always_inline bool
648 + __libitm_always_inline bool
649 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
650 memory_order __m = memory_order_seq_cst) volatile noexcept
652 @@ -1235,22 +1235,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
653 __calculate_memory_order(__m));
656 - __always_inline __pointer_type
657 + __libitm_always_inline __pointer_type
658 fetch_add(ptrdiff_t __d,
659 memory_order __m = memory_order_seq_cst) noexcept
660 { return _M_b.fetch_add(__d, __m); }
662 - __always_inline __pointer_type
663 + __libitm_always_inline __pointer_type
664 fetch_add(ptrdiff_t __d,
665 memory_order __m = memory_order_seq_cst) volatile noexcept
666 { return _M_b.fetch_add(__d, __m); }
668 - __always_inline __pointer_type
669 + __libitm_always_inline __pointer_type
670 fetch_sub(ptrdiff_t __d,
671 memory_order __m = memory_order_seq_cst) noexcept
672 { return _M_b.fetch_sub(__d, __m); }
674 - __always_inline __pointer_type
675 + __libitm_always_inline __pointer_type
676 fetch_sub(ptrdiff_t __d,
677 memory_order __m = memory_order_seq_cst) volatile noexcept
678 { return _M_b.fetch_sub(__d, __m); }
679 @@ -1544,98 +1544,98 @@ namespace std // _GLIBCXX_VISIBILITY(default)
682 // Function definitions, atomic_flag operations.
683 - inline __always_inline bool
684 + inline __libitm_always_inline bool
685 atomic_flag_test_and_set_explicit(atomic_flag* __a,
686 memory_order __m) noexcept
687 { return __a->test_and_set(__m); }
689 - inline __always_inline bool
690 + inline __libitm_always_inline bool
691 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
692 memory_order __m) noexcept
693 { return __a->test_and_set(__m); }
695 - inline __always_inline void
696 + inline __libitm_always_inline void
697 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
700 - inline __always_inline void
701 + inline __libitm_always_inline void
702 atomic_flag_clear_explicit(volatile atomic_flag* __a,
703 memory_order __m) noexcept
706 - inline __always_inline bool
707 + inline __libitm_always_inline bool
708 atomic_flag_test_and_set(atomic_flag* __a) noexcept
709 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
711 - inline __always_inline bool
712 + inline __libitm_always_inline bool
713 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
714 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
716 - inline __always_inline void
717 + inline __libitm_always_inline void
718 atomic_flag_clear(atomic_flag* __a) noexcept
719 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
721 - inline __always_inline void
722 + inline __libitm_always_inline void
723 atomic_flag_clear(volatile atomic_flag* __a) noexcept
724 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
727 // Function templates generally applicable to atomic types.
728 template<typename _ITp>
729 - __always_inline bool
730 + __libitm_always_inline bool
731 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
732 { return __a->is_lock_free(); }
734 template<typename _ITp>
735 - __always_inline bool
736 + __libitm_always_inline bool
737 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
738 { return __a->is_lock_free(); }
740 template<typename _ITp>
741 - __always_inline void
742 + __libitm_always_inline void
743 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
745 template<typename _ITp>
746 - __always_inline void
747 + __libitm_always_inline void
748 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
750 template<typename _ITp>
751 - __always_inline void
752 + __libitm_always_inline void
753 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
754 memory_order __m) noexcept
755 { __a->store(__i, __m); }
757 template<typename _ITp>
758 - __always_inline void
759 + __libitm_always_inline void
760 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
761 memory_order __m) noexcept
762 { __a->store(__i, __m); }
764 template<typename _ITp>
765 - __always_inline _ITp
766 + __libitm_always_inline _ITp
767 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
768 { return __a->load(__m); }
770 template<typename _ITp>
771 - __always_inline _ITp
772 + __libitm_always_inline _ITp
773 atomic_load_explicit(const volatile atomic<_ITp>* __a,
774 memory_order __m) noexcept
775 { return __a->load(__m); }
777 template<typename _ITp>
778 - __always_inline _ITp
779 + __libitm_always_inline _ITp
780 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
781 memory_order __m) noexcept
782 { return __a->exchange(__i, __m); }
784 template<typename _ITp>
785 - __always_inline _ITp
786 + __libitm_always_inline _ITp
787 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
788 memory_order __m) noexcept
789 { return __a->exchange(__i, __m); }
791 template<typename _ITp>
792 - __always_inline bool
793 + __libitm_always_inline bool
794 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
795 _ITp* __i1, _ITp __i2,
797 @@ -1643,7 +1643,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
798 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
800 template<typename _ITp>
801 - __always_inline bool
802 + __libitm_always_inline bool
803 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
804 _ITp* __i1, _ITp __i2,
806 @@ -1651,7 +1651,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
807 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
809 template<typename _ITp>
810 - __always_inline bool
811 + __libitm_always_inline bool
812 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
813 _ITp* __i1, _ITp __i2,
815 @@ -1659,7 +1659,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
816 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
818 template<typename _ITp>
819 - __always_inline bool
820 + __libitm_always_inline bool
821 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
822 _ITp* __i1, _ITp __i2,
824 @@ -1668,37 +1668,37 @@ namespace std // _GLIBCXX_VISIBILITY(default)
827 template<typename _ITp>
828 - __always_inline void
829 + __libitm_always_inline void
830 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
831 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
833 template<typename _ITp>
834 - __always_inline void
835 + __libitm_always_inline void
836 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
837 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
839 template<typename _ITp>
840 - __always_inline _ITp
841 + __libitm_always_inline _ITp
842 atomic_load(const atomic<_ITp>* __a) noexcept
843 { return atomic_load_explicit(__a, memory_order_seq_cst); }
845 template<typename _ITp>
846 - __always_inline _ITp
847 + __libitm_always_inline _ITp
848 atomic_load(const volatile atomic<_ITp>* __a) noexcept
849 { return atomic_load_explicit(__a, memory_order_seq_cst); }
851 template<typename _ITp>
852 - __always_inline _ITp
853 + __libitm_always_inline _ITp
854 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
855 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
857 template<typename _ITp>
858 - __always_inline _ITp
859 + __libitm_always_inline _ITp
860 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
861 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
863 template<typename _ITp>
864 - __always_inline bool
865 + __libitm_always_inline bool
866 atomic_compare_exchange_weak(atomic<_ITp>* __a,
867 _ITp* __i1, _ITp __i2) noexcept
869 @@ -1708,7 +1708,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
872 template<typename _ITp>
873 - __always_inline bool
874 + __libitm_always_inline bool
875 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
876 _ITp* __i1, _ITp __i2) noexcept
878 @@ -1718,7 +1718,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
881 template<typename _ITp>
882 - __always_inline bool
883 + __libitm_always_inline bool
884 atomic_compare_exchange_strong(atomic<_ITp>* __a,
885 _ITp* __i1, _ITp __i2) noexcept
887 @@ -1728,7 +1728,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
890 template<typename _ITp>
891 - __always_inline bool
892 + __libitm_always_inline bool
893 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
894 _ITp* __i1, _ITp __i2) noexcept
896 @@ -1742,158 +1742,158 @@ namespace std // _GLIBCXX_VISIBILITY(default)
897 // intergral types as specified in the standard, excluding address
899 template<typename _ITp>
900 - __always_inline _ITp
901 + __libitm_always_inline _ITp
902 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
903 memory_order __m) noexcept
904 { return __a->fetch_add(__i, __m); }
906 template<typename _ITp>
907 - __always_inline _ITp
908 + __libitm_always_inline _ITp
909 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
910 memory_order __m) noexcept
911 { return __a->fetch_add(__i, __m); }
913 template<typename _ITp>
914 - __always_inline _ITp
915 + __libitm_always_inline _ITp
916 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
917 memory_order __m) noexcept
918 { return __a->fetch_sub(__i, __m); }
920 template<typename _ITp>
921 - __always_inline _ITp
922 + __libitm_always_inline _ITp
923 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
924 memory_order __m) noexcept
925 { return __a->fetch_sub(__i, __m); }
927 template<typename _ITp>
928 - __always_inline _ITp
929 + __libitm_always_inline _ITp
930 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
931 memory_order __m) noexcept
932 { return __a->fetch_and(__i, __m); }
934 template<typename _ITp>
935 - __always_inline _ITp
936 + __libitm_always_inline _ITp
937 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
938 memory_order __m) noexcept
939 { return __a->fetch_and(__i, __m); }
941 template<typename _ITp>
942 - __always_inline _ITp
943 + __libitm_always_inline _ITp
944 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
945 memory_order __m) noexcept
946 { return __a->fetch_or(__i, __m); }
948 template<typename _ITp>
949 - __always_inline _ITp
950 + __libitm_always_inline _ITp
951 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
952 memory_order __m) noexcept
953 { return __a->fetch_or(__i, __m); }
955 template<typename _ITp>
956 - __always_inline _ITp
957 + __libitm_always_inline _ITp
958 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
959 memory_order __m) noexcept
960 { return __a->fetch_xor(__i, __m); }
962 template<typename _ITp>
963 - __always_inline _ITp
964 + __libitm_always_inline _ITp
965 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
966 memory_order __m) noexcept
967 { return __a->fetch_xor(__i, __m); }
969 template<typename _ITp>
970 - __always_inline _ITp
971 + __libitm_always_inline _ITp
972 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
973 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
975 template<typename _ITp>
976 - __always_inline _ITp
977 + __libitm_always_inline _ITp
978 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
979 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
981 template<typename _ITp>
982 - __always_inline _ITp
983 + __libitm_always_inline _ITp
984 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
985 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
987 template<typename _ITp>
988 - __always_inline _ITp
989 + __libitm_always_inline _ITp
990 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
991 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
993 template<typename _ITp>
994 - __always_inline _ITp
995 + __libitm_always_inline _ITp
996 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
997 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
999 template<typename _ITp>
1000 - __always_inline _ITp
1001 + __libitm_always_inline _ITp
1002 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1003 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1005 template<typename _ITp>
1006 - __always_inline _ITp
1007 + __libitm_always_inline _ITp
1008 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1009 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1011 template<typename _ITp>
1012 - __always_inline _ITp
1013 + __libitm_always_inline _ITp
1014 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1015 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1017 template<typename _ITp>
1018 - __always_inline _ITp
1019 + __libitm_always_inline _ITp
1020 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1021 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1023 template<typename _ITp>
1024 - __always_inline _ITp
1025 + __libitm_always_inline _ITp
1026 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1027 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1030 // Partial specializations for pointers.
1031 template<typename _ITp>
1032 - __always_inline _ITp*
1033 + __libitm_always_inline _ITp*
1034 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1035 memory_order __m) noexcept
1036 { return __a->fetch_add(__d, __m); }
1038 template<typename _ITp>
1039 - __always_inline _ITp*
1040 + __libitm_always_inline _ITp*
1041 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1042 memory_order __m) noexcept
1043 { return __a->fetch_add(__d, __m); }
1045 template<typename _ITp>
1046 - __always_inline _ITp*
1047 + __libitm_always_inline _ITp*
1048 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1049 { return __a->fetch_add(__d); }
1051 template<typename _ITp>
1052 - __always_inline _ITp*
1053 + __libitm_always_inline _ITp*
1054 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1055 { return __a->fetch_add(__d); }
1057 template<typename _ITp>
1058 - __always_inline _ITp*
1059 + __libitm_always_inline _ITp*
1060 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1061 ptrdiff_t __d, memory_order __m) noexcept
1062 { return __a->fetch_sub(__d, __m); }
1064 template<typename _ITp>
1065 - __always_inline _ITp*
1066 + __libitm_always_inline _ITp*
1067 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1068 memory_order __m) noexcept
1069 { return __a->fetch_sub(__d, __m); }
1071 template<typename _ITp>
1072 - __always_inline _ITp*
1073 + __libitm_always_inline _ITp*
1074 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1075 { return __a->fetch_sub(__d); }
1077 template<typename _ITp>
1078 - __always_inline _ITp*
1079 + __libitm_always_inline _ITp*
1080 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1081 { return __a->fetch_sub(__d); }