]> git.pld-linux.org Git - packages/gcc.git/blame - gcc-pr61164.patch
- fix install when building without x32 multilib
[packages/gcc.git] / gcc-pr61164.patch
CommitLineData
9236fd78
AM
12014-09-27 Gleb Fotengauer-Malinovskiy <glebfm@altlinux.org>
2
3libitm/
4
5 PR libitm/61164
6 * local_atomic (__always_inline): Rename to...
7 (__libitm_always_inline): ... this.
8---
9
10On Mon, Sep 29, 2014 at 03:38:25PM +0200, Jakub Jelinek wrote:
11> Why do you want to add inline keyword to that? Some inline keywords
12> are implicit (methods defined inline), so there is no point adding it there.
13
14I just didn't get that redefinition of __always_inline was the source of the problem.
15
16 local_atomic | 299 +++++++++++++++++++++++++++++------------------------------
17 1 file changed, 149 insertions(+), 150 deletions(-)
18
19diff --git a/libitm/local_atomic b/libitm/local_atomic
20index c3e079f..552b919 100644
21--- a/libitm/local_atomic
22+++ b/libitm/local_atomic
23@@ -41,8 +41,7 @@
24 #ifndef _GLIBCXX_ATOMIC
25 #define _GLIBCXX_ATOMIC 1
26
27-#undef __always_inline
28-#define __always_inline __attribute__((always_inline))
29+#define __libitm_always_inline __attribute__((always_inline))
30
31 // #pragma GCC system_header
32
33@@ -74,7 +74,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
34 memory_order_seq_cst
35 } memory_order;
36
37- inline __always_inline memory_order
38+ inline __libitm_always_inline memory_order
39 __calculate_memory_order(memory_order __m) noexcept
40 {
41 const bool __cond1 = __m == memory_order_release;
42@@ -84,13 +84,13 @@ namespace std // _GLIBCXX_VISIBILITY(default)
43 return __mo2;
44 }
45
46- inline __always_inline void
47+ inline __libitm_always_inline void
48 atomic_thread_fence(memory_order __m) noexcept
49 {
50 __atomic_thread_fence (__m);
51 }
52
53- inline __always_inline void
54+ inline __libitm_always_inline void
55 atomic_signal_fence(memory_order __m) noexcept
56 {
57 __atomic_thread_fence (__m);
58@@ -280,19 +280,19 @@ namespace std // _GLIBCXX_VISIBILITY(default)
59 // Conversion to ATOMIC_FLAG_INIT.
60 atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
61
62- __always_inline bool
63+ __libitm_always_inline bool
64 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
65 {
66 return __atomic_test_and_set (&_M_i, __m);
67 }
68
69- __always_inline bool
70+ __libitm_always_inline bool
71 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
72 {
73 return __atomic_test_and_set (&_M_i, __m);
74 }
75
76- __always_inline void
77+ __libitm_always_inline void
78 clear(memory_order __m = memory_order_seq_cst) noexcept
79 {
80 // __glibcxx_assert(__m != memory_order_consume);
81@@ -302,7 +302,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
82 __atomic_clear (&_M_i, __m);
83 }
84
85- __always_inline void
86+ __libitm_always_inline void
87 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
88 {
89 // __glibcxx_assert(__m != memory_order_consume);
90@@ -455,7 +455,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
91 is_lock_free() const volatile noexcept
92 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
93
94- __always_inline void
95+ __libitm_always_inline void
96 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
97 {
98 // __glibcxx_assert(__m != memory_order_acquire);
99@@ -465,7 +465,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
100 __atomic_store_n(&_M_i, __i, __m);
101 }
102
103- __always_inline void
104+ __libitm_always_inline void
105 store(__int_type __i,
106 memory_order __m = memory_order_seq_cst) volatile noexcept
107 {
108@@ -476,7 +476,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
109 __atomic_store_n(&_M_i, __i, __m);
110 }
111
112- __always_inline __int_type
113+ __libitm_always_inline __int_type
114 load(memory_order __m = memory_order_seq_cst) const noexcept
115 {
116 // __glibcxx_assert(__m != memory_order_release);
117@@ -485,7 +485,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
118 return __atomic_load_n(&_M_i, __m);
119 }
120
121- __always_inline __int_type
122+ __libitm_always_inline __int_type
123 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
124 {
125 // __glibcxx_assert(__m != memory_order_release);
126@@ -494,21 +494,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
127 return __atomic_load_n(&_M_i, __m);
128 }
129
130- __always_inline __int_type
131+ __libitm_always_inline __int_type
132 exchange(__int_type __i,
133 memory_order __m = memory_order_seq_cst) noexcept
134 {
135 return __atomic_exchange_n(&_M_i, __i, __m);
136 }
137
138- __always_inline __int_type
139+ __libitm_always_inline __int_type
140 exchange(__int_type __i,
141 memory_order __m = memory_order_seq_cst) volatile noexcept
142 {
143 return __atomic_exchange_n(&_M_i, __i, __m);
144 }
145
146- __always_inline bool
147+ __libitm_always_inline bool
148 compare_exchange_weak(__int_type& __i1, __int_type __i2,
149 memory_order __m1, memory_order __m2) noexcept
150 {
151@@ -519,7 +519,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
152 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
153 }
154
155- __always_inline bool
156+ __libitm_always_inline bool
157 compare_exchange_weak(__int_type& __i1, __int_type __i2,
158 memory_order __m1,
159 memory_order __m2) volatile noexcept
160@@ -531,7 +531,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
161 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
162 }
163
164- __always_inline bool
165+ __libitm_always_inline bool
166 compare_exchange_weak(__int_type& __i1, __int_type __i2,
167 memory_order __m = memory_order_seq_cst) noexcept
168 {
169@@ -539,7 +539,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
170 __calculate_memory_order(__m));
171 }
172
173- __always_inline bool
174+ __libitm_always_inline bool
175 compare_exchange_weak(__int_type& __i1, __int_type __i2,
176 memory_order __m = memory_order_seq_cst) volatile noexcept
177 {
178@@ -547,7 +547,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
179 __calculate_memory_order(__m));
180 }
181
182- __always_inline bool
183+ __libitm_always_inline bool
184 compare_exchange_strong(__int_type& __i1, __int_type __i2,
185 memory_order __m1, memory_order __m2) noexcept
186 {
187@@ -558,7 +558,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
188 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
189 }
190
191- __always_inline bool
192+ __libitm_always_inline bool
193 compare_exchange_strong(__int_type& __i1, __int_type __i2,
194 memory_order __m1,
195 memory_order __m2) volatile noexcept
196@@ -570,7 +570,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
197 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
198 }
199
200- __always_inline bool
201+ __libitm_always_inline bool
202 compare_exchange_strong(__int_type& __i1, __int_type __i2,
203 memory_order __m = memory_order_seq_cst) noexcept
204 {
205@@ -578,7 +578,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
206 __calculate_memory_order(__m));
207 }
208
209- __always_inline bool
210+ __libitm_always_inline bool
211 compare_exchange_strong(__int_type& __i1, __int_type __i2,
212 memory_order __m = memory_order_seq_cst) volatile noexcept
213 {
214@@ -586,52 +586,52 @@ namespace std // _GLIBCXX_VISIBILITY(default)
215 __calculate_memory_order(__m));
216 }
217
218- __always_inline __int_type
219+ __libitm_always_inline __int_type
220 fetch_add(__int_type __i,
221 memory_order __m = memory_order_seq_cst) noexcept
222 { return __atomic_fetch_add(&_M_i, __i, __m); }
223
224- __always_inline __int_type
225+ __libitm_always_inline __int_type
226 fetch_add(__int_type __i,
227 memory_order __m = memory_order_seq_cst) volatile noexcept
228 { return __atomic_fetch_add(&_M_i, __i, __m); }
229
230- __always_inline __int_type
231+ __libitm_always_inline __int_type
232 fetch_sub(__int_type __i,
233 memory_order __m = memory_order_seq_cst) noexcept
234 { return __atomic_fetch_sub(&_M_i, __i, __m); }
235
236- __always_inline __int_type
237+ __libitm_always_inline __int_type
238 fetch_sub(__int_type __i,
239 memory_order __m = memory_order_seq_cst) volatile noexcept
240 { return __atomic_fetch_sub(&_M_i, __i, __m); }
241
242- __always_inline __int_type
243+ __libitm_always_inline __int_type
244 fetch_and(__int_type __i,
245 memory_order __m = memory_order_seq_cst) noexcept
246 { return __atomic_fetch_and(&_M_i, __i, __m); }
247
248- __always_inline __int_type
249+ __libitm_always_inline __int_type
250 fetch_and(__int_type __i,
251 memory_order __m = memory_order_seq_cst) volatile noexcept
252 { return __atomic_fetch_and(&_M_i, __i, __m); }
253
254- __always_inline __int_type
255+ __libitm_always_inline __int_type
256 fetch_or(__int_type __i,
257 memory_order __m = memory_order_seq_cst) noexcept
258 { return __atomic_fetch_or(&_M_i, __i, __m); }
259
260- __always_inline __int_type
261+ __libitm_always_inline __int_type
262 fetch_or(__int_type __i,
263 memory_order __m = memory_order_seq_cst) volatile noexcept
264 { return __atomic_fetch_or(&_M_i, __i, __m); }
265
266- __always_inline __int_type
267+ __libitm_always_inline __int_type
268 fetch_xor(__int_type __i,
269 memory_order __m = memory_order_seq_cst) noexcept
270 { return __atomic_fetch_xor(&_M_i, __i, __m); }
271
272- __always_inline __int_type
273+ __libitm_always_inline __int_type
274 fetch_xor(__int_type __i,
275 memory_order __m = memory_order_seq_cst) volatile noexcept
276 { return __atomic_fetch_xor(&_M_i, __i, __m); }
277@@ -733,7 +733,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
278 is_lock_free() const volatile noexcept
279 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
280
281- __always_inline void
282+ __libitm_always_inline void
283 store(__pointer_type __p,
284 memory_order __m = memory_order_seq_cst) noexcept
285 {
286@@ -744,7 +744,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
287 __atomic_store_n(&_M_p, __p, __m);
288 }
289
290- __always_inline void
291+ __libitm_always_inline void
292 store(__pointer_type __p,
293 memory_order __m = memory_order_seq_cst) volatile noexcept
294 {
295@@ -755,7 +755,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
296 __atomic_store_n(&_M_p, __p, __m);
297 }
298
299- __always_inline __pointer_type
300+ __libitm_always_inline __pointer_type
301 load(memory_order __m = memory_order_seq_cst) const noexcept
302 {
303 // __glibcxx_assert(__m != memory_order_release);
304@@ -764,7 +764,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
305 return __atomic_load_n(&_M_p, __m);
306 }
307
308- __always_inline __pointer_type
309+ __libitm_always_inline __pointer_type
310 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
311 {
312 // __glibcxx_assert(__m != memory_order_release);
313@@ -773,21 +773,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
314 return __atomic_load_n(&_M_p, __m);
315 }
316
317- __always_inline __pointer_type
318+ __libitm_always_inline __pointer_type
319 exchange(__pointer_type __p,
320 memory_order __m = memory_order_seq_cst) noexcept
321 {
322 return __atomic_exchange_n(&_M_p, __p, __m);
323 }
324
325- __always_inline __pointer_type
326+ __libitm_always_inline __pointer_type
327 exchange(__pointer_type __p,
328 memory_order __m = memory_order_seq_cst) volatile noexcept
329 {
330 return __atomic_exchange_n(&_M_p, __p, __m);
331 }
332
333- __always_inline bool
334+ __libitm_always_inline bool
335 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
336 memory_order __m1,
337 memory_order __m2) noexcept
338@@ -799,7 +799,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
339 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
340 }
341
342- __always_inline bool
343+ __libitm_always_inline bool
344 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
345 memory_order __m1,
346 memory_order __m2) volatile noexcept
347@@ -811,22 +811,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
348 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
349 }
350
351- __always_inline __pointer_type
352+ __libitm_always_inline __pointer_type
353 fetch_add(ptrdiff_t __d,
354 memory_order __m = memory_order_seq_cst) noexcept
355 { return __atomic_fetch_add(&_M_p, __d, __m); }
356
357- __always_inline __pointer_type
358+ __libitm_always_inline __pointer_type
359 fetch_add(ptrdiff_t __d,
360 memory_order __m = memory_order_seq_cst) volatile noexcept
361 { return __atomic_fetch_add(&_M_p, __d, __m); }
362
363- __always_inline __pointer_type
364+ __libitm_always_inline __pointer_type
365 fetch_sub(ptrdiff_t __d,
366 memory_order __m = memory_order_seq_cst) noexcept
367 { return __atomic_fetch_sub(&_M_p, __d, __m); }
368
369- __always_inline __pointer_type
370+ __libitm_always_inline __pointer_type
371 fetch_sub(ptrdiff_t __d,
372 memory_order __m = memory_order_seq_cst) volatile noexcept
373 { return __atomic_fetch_sub(&_M_p, __d, __m); }
374@@ -870,67 +870,67 @@ namespace std // _GLIBCXX_VISIBILITY(default)
375 bool
376 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
377
378- __always_inline void
379+ __libitm_always_inline void
380 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
381 { _M_base.store(__i, __m); }
382
383- __always_inline void
384+ __libitm_always_inline void
385 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
386 { _M_base.store(__i, __m); }
387
388- __always_inline bool
389+ __libitm_always_inline bool
390 load(memory_order __m = memory_order_seq_cst) const noexcept
391 { return _M_base.load(__m); }
392
393- __always_inline bool
394+ __libitm_always_inline bool
395 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
396 { return _M_base.load(__m); }
397
398- __always_inline bool
399+ __libitm_always_inline bool
400 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
401 { return _M_base.exchange(__i, __m); }
402
403- __always_inline bool
404+ __libitm_always_inline bool
405 exchange(bool __i,
406 memory_order __m = memory_order_seq_cst) volatile noexcept
407 { return _M_base.exchange(__i, __m); }
408
409- __always_inline bool
410+ __libitm_always_inline bool
411 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
412 memory_order __m2) noexcept
413 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
414
415- __always_inline bool
416+ __libitm_always_inline bool
417 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
418 memory_order __m2) volatile noexcept
419 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
420
421- __always_inline bool
422+ __libitm_always_inline bool
423 compare_exchange_weak(bool& __i1, bool __i2,
424 memory_order __m = memory_order_seq_cst) noexcept
425 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
426
427- __always_inline bool
428+ __libitm_always_inline bool
429 compare_exchange_weak(bool& __i1, bool __i2,
430 memory_order __m = memory_order_seq_cst) volatile noexcept
431 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
432
433- __always_inline bool
434+ __libitm_always_inline bool
435 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
436 memory_order __m2) noexcept
437 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
438
439- __always_inline bool
440+ __libitm_always_inline bool
441 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
442 memory_order __m2) volatile noexcept
443 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
444
445- __always_inline bool
446+ __libitm_always_inline bool
447 compare_exchange_strong(bool& __i1, bool __i2,
448 memory_order __m = memory_order_seq_cst) noexcept
449 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
450
451- __always_inline bool
452+ __libitm_always_inline bool
453 compare_exchange_strong(bool& __i1, bool __i2,
454 memory_order __m = memory_order_seq_cst) volatile noexcept
455 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
456@@ -980,11 +980,11 @@ namespace std // _GLIBCXX_VISIBILITY(default)
457 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
458 { __atomic_store(&_M_i, &__i, _m); }
459
460- __always_inline void
461+ __libitm_always_inline void
462 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
463 { __atomic_store(&_M_i, &__i, _m); }
464
465- __always_inline _Tp
466+ __libitm_always_inline _Tp
467 load(memory_order _m = memory_order_seq_cst) const noexcept
468 {
469 _Tp tmp;
470@@ -992,7 +992,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
471 return tmp;
472 }
473
474- __always_inline _Tp
475+ __libitm_always_inline _Tp
476 load(memory_order _m = memory_order_seq_cst) const volatile noexcept
477 {
478 _Tp tmp;
479@@ -1000,7 +1000,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
480 return tmp;
481 }
482
483- __always_inline _Tp
484+ __libitm_always_inline _Tp
485 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
486 {
487 _Tp tmp;
488@@ -1008,7 +1008,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
489 return tmp;
490 }
491
492- __always_inline _Tp
493+ __libitm_always_inline _Tp
494 exchange(_Tp __i,
495 memory_order _m = memory_order_seq_cst) volatile noexcept
496 {
497@@ -1017,50 +1017,50 @@ namespace std // _GLIBCXX_VISIBILITY(default)
498 return tmp;
499 }
500
501- __always_inline bool
502+ __libitm_always_inline bool
503 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
504 memory_order __f) noexcept
505 {
506 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
507 }
508
509- __always_inline bool
510+ __libitm_always_inline bool
511 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
512 memory_order __f) volatile noexcept
513 {
514 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
515 }
516
517- __always_inline bool
518+ __libitm_always_inline bool
519 compare_exchange_weak(_Tp& __e, _Tp __i,
520 memory_order __m = memory_order_seq_cst) noexcept
521 { return compare_exchange_weak(__e, __i, __m, __m); }
522
523- __always_inline bool
524+ __libitm_always_inline bool
525 compare_exchange_weak(_Tp& __e, _Tp __i,
526 memory_order __m = memory_order_seq_cst) volatile noexcept
527 { return compare_exchange_weak(__e, __i, __m, __m); }
528
529- __always_inline bool
530+ __libitm_always_inline bool
531 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
532 memory_order __f) noexcept
533 {
534 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
535 }
536
537- __always_inline bool
538+ __libitm_always_inline bool
539 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
540 memory_order __f) volatile noexcept
541 {
542 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
543 }
544
545- __always_inline bool
546+ __libitm_always_inline bool
547 compare_exchange_strong(_Tp& __e, _Tp __i,
548 memory_order __m = memory_order_seq_cst) noexcept
549 { return compare_exchange_strong(__e, __i, __m, __m); }
550
551- __always_inline bool
552+ __libitm_always_inline bool
553 compare_exchange_strong(_Tp& __e, _Tp __i,
554 memory_order __m = memory_order_seq_cst) volatile noexcept
555 { return compare_exchange_strong(__e, __i, __m, __m); }
556@@ -1153,46 +1153,46 @@ namespace std // _GLIBCXX_VISIBILITY(default)
557 is_lock_free() const volatile noexcept
558 { return _M_b.is_lock_free(); }
559
560- __always_inline void
561+ __libitm_always_inline void
562 store(__pointer_type __p,
563 memory_order __m = memory_order_seq_cst) noexcept
564 { return _M_b.store(__p, __m); }
565
566- __always_inline void
567+ __libitm_always_inline void
568 store(__pointer_type __p,
569 memory_order __m = memory_order_seq_cst) volatile noexcept
570 { return _M_b.store(__p, __m); }
571
572- __always_inline __pointer_type
573+ __libitm_always_inline __pointer_type
574 load(memory_order __m = memory_order_seq_cst) const noexcept
575 { return _M_b.load(__m); }
576
577- __always_inline __pointer_type
578+ __libitm_always_inline __pointer_type
579 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
580 { return _M_b.load(__m); }
581
582- __always_inline __pointer_type
583+ __libitm_always_inline __pointer_type
584 exchange(__pointer_type __p,
585 memory_order __m = memory_order_seq_cst) noexcept
586 { return _M_b.exchange(__p, __m); }
587
588- __always_inline __pointer_type
589+ __libitm_always_inline __pointer_type
590 exchange(__pointer_type __p,
591 memory_order __m = memory_order_seq_cst) volatile noexcept
592 { return _M_b.exchange(__p, __m); }
593
594- __always_inline bool
595+ __libitm_always_inline bool
596 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
597 memory_order __m1, memory_order __m2) noexcept
598 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
599
600- __always_inline bool
601+ __libitm_always_inline bool
602 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
603 memory_order __m1,
604 memory_order __m2) volatile noexcept
605 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
606
607- __always_inline bool
608+ __libitm_always_inline bool
609 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
610 memory_order __m = memory_order_seq_cst) noexcept
611 {
612@@ -1200,7 +1200,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
613 __calculate_memory_order(__m));
614 }
615
616- __always_inline bool
617+ __libitm_always_inline bool
618 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
619 memory_order __m = memory_order_seq_cst) volatile noexcept
620 {
621@@ -1208,18 +1208,18 @@ namespace std // _GLIBCXX_VISIBILITY(default)
622 __calculate_memory_order(__m));
623 }
624
625- __always_inline bool
626+ __libitm_always_inline bool
627 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
628 memory_order __m1, memory_order __m2) noexcept
629 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
630
631- __always_inline bool
632+ __libitm_always_inline bool
633 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
634 memory_order __m1,
635 memory_order __m2) volatile noexcept
636 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
637
638- __always_inline bool
639+ __libitm_always_inline bool
640 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
641 memory_order __m = memory_order_seq_cst) noexcept
642 {
643@@ -1227,7 +1227,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
644 __calculate_memory_order(__m));
645 }
646
647- __always_inline bool
648+ __libitm_always_inline bool
649 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
650 memory_order __m = memory_order_seq_cst) volatile noexcept
651 {
652@@ -1235,22 +1235,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
653 __calculate_memory_order(__m));
654 }
655
656- __always_inline __pointer_type
657+ __libitm_always_inline __pointer_type
658 fetch_add(ptrdiff_t __d,
659 memory_order __m = memory_order_seq_cst) noexcept
660 { return _M_b.fetch_add(__d, __m); }
661
662- __always_inline __pointer_type
663+ __libitm_always_inline __pointer_type
664 fetch_add(ptrdiff_t __d,
665 memory_order __m = memory_order_seq_cst) volatile noexcept
666 { return _M_b.fetch_add(__d, __m); }
667
668- __always_inline __pointer_type
669+ __libitm_always_inline __pointer_type
670 fetch_sub(ptrdiff_t __d,
671 memory_order __m = memory_order_seq_cst) noexcept
672 { return _M_b.fetch_sub(__d, __m); }
673
674- __always_inline __pointer_type
675+ __libitm_always_inline __pointer_type
676 fetch_sub(ptrdiff_t __d,
677 memory_order __m = memory_order_seq_cst) volatile noexcept
678 { return _M_b.fetch_sub(__d, __m); }
679@@ -1544,98 +1544,98 @@ namespace std // _GLIBCXX_VISIBILITY(default)
680
681
682 // Function definitions, atomic_flag operations.
683- inline __always_inline bool
684+ inline __libitm_always_inline bool
685 atomic_flag_test_and_set_explicit(atomic_flag* __a,
686 memory_order __m) noexcept
687 { return __a->test_and_set(__m); }
688
689- inline __always_inline bool
690+ inline __libitm_always_inline bool
691 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
692 memory_order __m) noexcept
693 { return __a->test_and_set(__m); }
694
695- inline __always_inline void
696+ inline __libitm_always_inline void
697 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
698 { __a->clear(__m); }
699
700- inline __always_inline void
701+ inline __libitm_always_inline void
702 atomic_flag_clear_explicit(volatile atomic_flag* __a,
703 memory_order __m) noexcept
704 { __a->clear(__m); }
705
706- inline __always_inline bool
707+ inline __libitm_always_inline bool
708 atomic_flag_test_and_set(atomic_flag* __a) noexcept
709 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
710
711- inline __always_inline bool
712+ inline __libitm_always_inline bool
713 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
714 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
715
716- inline __always_inline void
717+ inline __libitm_always_inline void
718 atomic_flag_clear(atomic_flag* __a) noexcept
719 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
720
721- inline __always_inline void
722+ inline __libitm_always_inline void
723 atomic_flag_clear(volatile atomic_flag* __a) noexcept
724 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
725
726
727 // Function templates generally applicable to atomic types.
728 template<typename _ITp>
729- __always_inline bool
730+ __libitm_always_inline bool
731 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
732 { return __a->is_lock_free(); }
733
734 template<typename _ITp>
735- __always_inline bool
736+ __libitm_always_inline bool
737 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
738 { return __a->is_lock_free(); }
739
740 template<typename _ITp>
741- __always_inline void
742+ __libitm_always_inline void
743 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
744
745 template<typename _ITp>
746- __always_inline void
747+ __libitm_always_inline void
748 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
749
750 template<typename _ITp>
751- __always_inline void
752+ __libitm_always_inline void
753 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
754 memory_order __m) noexcept
755 { __a->store(__i, __m); }
756
757 template<typename _ITp>
758- __always_inline void
759+ __libitm_always_inline void
760 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
761 memory_order __m) noexcept
762 { __a->store(__i, __m); }
763
764 template<typename _ITp>
765- __always_inline _ITp
766+ __libitm_always_inline _ITp
767 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
768 { return __a->load(__m); }
769
770 template<typename _ITp>
771- __always_inline _ITp
772+ __libitm_always_inline _ITp
773 atomic_load_explicit(const volatile atomic<_ITp>* __a,
774 memory_order __m) noexcept
775 { return __a->load(__m); }
776
777 template<typename _ITp>
778- __always_inline _ITp
779+ __libitm_always_inline _ITp
780 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
781 memory_order __m) noexcept
782 { return __a->exchange(__i, __m); }
783
784 template<typename _ITp>
785- __always_inline _ITp
786+ __libitm_always_inline _ITp
787 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
788 memory_order __m) noexcept
789 { return __a->exchange(__i, __m); }
790
791 template<typename _ITp>
792- __always_inline bool
793+ __libitm_always_inline bool
794 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
795 _ITp* __i1, _ITp __i2,
796 memory_order __m1,
797@@ -1643,7 +1643,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
798 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
799
800 template<typename _ITp>
801- __always_inline bool
802+ __libitm_always_inline bool
803 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
804 _ITp* __i1, _ITp __i2,
805 memory_order __m1,
806@@ -1651,7 +1651,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
807 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
808
809 template<typename _ITp>
810- __always_inline bool
811+ __libitm_always_inline bool
812 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
813 _ITp* __i1, _ITp __i2,
814 memory_order __m1,
815@@ -1659,7 +1659,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
816 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
817
818 template<typename _ITp>
819- __always_inline bool
820+ __libitm_always_inline bool
821 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
822 _ITp* __i1, _ITp __i2,
823 memory_order __m1,
824@@ -1668,37 +1668,37 @@ namespace std // _GLIBCXX_VISIBILITY(default)
825
826
827 template<typename _ITp>
828- __always_inline void
829+ __libitm_always_inline void
830 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
831 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
832
833 template<typename _ITp>
834- __always_inline void
835+ __libitm_always_inline void
836 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
837 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
838
839 template<typename _ITp>
840- __always_inline _ITp
841+ __libitm_always_inline _ITp
842 atomic_load(const atomic<_ITp>* __a) noexcept
843 { return atomic_load_explicit(__a, memory_order_seq_cst); }
844
845 template<typename _ITp>
846- __always_inline _ITp
847+ __libitm_always_inline _ITp
848 atomic_load(const volatile atomic<_ITp>* __a) noexcept
849 { return atomic_load_explicit(__a, memory_order_seq_cst); }
850
851 template<typename _ITp>
852- __always_inline _ITp
853+ __libitm_always_inline _ITp
854 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
855 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
856
857 template<typename _ITp>
858- __always_inline _ITp
859+ __libitm_always_inline _ITp
860 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
861 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
862
863 template<typename _ITp>
864- __always_inline bool
865+ __libitm_always_inline bool
866 atomic_compare_exchange_weak(atomic<_ITp>* __a,
867 _ITp* __i1, _ITp __i2) noexcept
868 {
869@@ -1708,7 +1708,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
870 }
871
872 template<typename _ITp>
873- __always_inline bool
874+ __libitm_always_inline bool
875 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
876 _ITp* __i1, _ITp __i2) noexcept
877 {
878@@ -1718,7 +1718,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
879 }
880
881 template<typename _ITp>
882- __always_inline bool
883+ __libitm_always_inline bool
884 atomic_compare_exchange_strong(atomic<_ITp>* __a,
885 _ITp* __i1, _ITp __i2) noexcept
886 {
887@@ -1728,7 +1728,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
888 }
889
890 template<typename _ITp>
891- __always_inline bool
892+ __libitm_always_inline bool
893 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
894 _ITp* __i1, _ITp __i2) noexcept
895 {
896@@ -1742,158 +1742,158 @@ namespace std // _GLIBCXX_VISIBILITY(default)
897 // intergral types as specified in the standard, excluding address
898 // types.
899 template<typename _ITp>
900- __always_inline _ITp
901+ __libitm_always_inline _ITp
902 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
903 memory_order __m) noexcept
904 { return __a->fetch_add(__i, __m); }
905
906 template<typename _ITp>
907- __always_inline _ITp
908+ __libitm_always_inline _ITp
909 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
910 memory_order __m) noexcept
911 { return __a->fetch_add(__i, __m); }
912
913 template<typename _ITp>
914- __always_inline _ITp
915+ __libitm_always_inline _ITp
916 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
917 memory_order __m) noexcept
918 { return __a->fetch_sub(__i, __m); }
919
920 template<typename _ITp>
921- __always_inline _ITp
922+ __libitm_always_inline _ITp
923 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
924 memory_order __m) noexcept
925 { return __a->fetch_sub(__i, __m); }
926
927 template<typename _ITp>
928- __always_inline _ITp
929+ __libitm_always_inline _ITp
930 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
931 memory_order __m) noexcept
932 { return __a->fetch_and(__i, __m); }
933
934 template<typename _ITp>
935- __always_inline _ITp
936+ __libitm_always_inline _ITp
937 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
938 memory_order __m) noexcept
939 { return __a->fetch_and(__i, __m); }
940
941 template<typename _ITp>
942- __always_inline _ITp
943+ __libitm_always_inline _ITp
944 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
945 memory_order __m) noexcept
946 { return __a->fetch_or(__i, __m); }
947
948 template<typename _ITp>
949- __always_inline _ITp
950+ __libitm_always_inline _ITp
951 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
952 memory_order __m) noexcept
953 { return __a->fetch_or(__i, __m); }
954
955 template<typename _ITp>
956- __always_inline _ITp
957+ __libitm_always_inline _ITp
958 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
959 memory_order __m) noexcept
960 { return __a->fetch_xor(__i, __m); }
961
962 template<typename _ITp>
963- __always_inline _ITp
964+ __libitm_always_inline _ITp
965 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
966 memory_order __m) noexcept
967 { return __a->fetch_xor(__i, __m); }
968
969 template<typename _ITp>
970- __always_inline _ITp
971+ __libitm_always_inline _ITp
972 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
973 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
974
975 template<typename _ITp>
976- __always_inline _ITp
977+ __libitm_always_inline _ITp
978 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
979 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
980
981 template<typename _ITp>
982- __always_inline _ITp
983+ __libitm_always_inline _ITp
984 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
985 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
986
987 template<typename _ITp>
988- __always_inline _ITp
989+ __libitm_always_inline _ITp
990 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
991 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
992
993 template<typename _ITp>
994- __always_inline _ITp
995+ __libitm_always_inline _ITp
996 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
997 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
998
999 template<typename _ITp>
1000- __always_inline _ITp
1001+ __libitm_always_inline _ITp
1002 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1003 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1004
1005 template<typename _ITp>
1006- __always_inline _ITp
1007+ __libitm_always_inline _ITp
1008 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1009 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1010
1011 template<typename _ITp>
1012- __always_inline _ITp
1013+ __libitm_always_inline _ITp
1014 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1015 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1016
1017 template<typename _ITp>
1018- __always_inline _ITp
1019+ __libitm_always_inline _ITp
1020 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1021 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1022
1023 template<typename _ITp>
1024- __always_inline _ITp
1025+ __libitm_always_inline _ITp
1026 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1027 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1028
1029
1030 // Partial specializations for pointers.
1031 template<typename _ITp>
1032- __always_inline _ITp*
1033+ __libitm_always_inline _ITp*
1034 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1035 memory_order __m) noexcept
1036 { return __a->fetch_add(__d, __m); }
1037
1038 template<typename _ITp>
1039- __always_inline _ITp*
1040+ __libitm_always_inline _ITp*
1041 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1042 memory_order __m) noexcept
1043 { return __a->fetch_add(__d, __m); }
1044
1045 template<typename _ITp>
1046- __always_inline _ITp*
1047+ __libitm_always_inline _ITp*
1048 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1049 { return __a->fetch_add(__d); }
1050
1051 template<typename _ITp>
1052- __always_inline _ITp*
1053+ __libitm_always_inline _ITp*
1054 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1055 { return __a->fetch_add(__d); }
1056
1057 template<typename _ITp>
1058- __always_inline _ITp*
1059+ __libitm_always_inline _ITp*
1060 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1061 ptrdiff_t __d, memory_order __m) noexcept
1062 { return __a->fetch_sub(__d, __m); }
1063
1064 template<typename _ITp>
1065- __always_inline _ITp*
1066+ __libitm_always_inline _ITp*
1067 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1068 memory_order __m) noexcept
1069 { return __a->fetch_sub(__d, __m); }
1070
1071 template<typename _ITp>
1072- __always_inline _ITp*
1073+ __libitm_always_inline _ITp*
1074 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1075 { return __a->fetch_sub(__d); }
1076
1077 template<typename _ITp>
1078- __always_inline _ITp*
1079+ __libitm_always_inline _ITp*
1080 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1081 { return __a->fetch_sub(__d); }
1082 // @} group atomics
1083
1084--
1085glebfm
This page took 0.174193 seconds and 4 git commands to generate.