1#![warn(missing_docs)]
34#![warn(rust_2018_idioms)]
35#![no_std]
36#![cfg_attr(feature = "nightly", feature(integer_atomics))]
37
38#[cfg(any(test, feature = "std"))]
39#[macro_use]
40extern crate std;
41
42use core::mem::MaybeUninit;
43pub use core::sync::atomic::{fence, Ordering};
45
46use core::cell::UnsafeCell;
47use core::fmt;
48
49#[cfg(feature = "std")]
50use std::panic::RefUnwindSafe;
51
52#[cfg(feature = "fallback")]
53mod fallback;
54mod ops;
55
56#[repr(transparent)]
59pub struct Atomic<T> {
60 v: UnsafeCell<MaybeUninit<T>>,
62}
63
64unsafe impl<T: Copy + Send> Sync for Atomic<T> {}
66
67#[cfg(feature = "std")]
74impl<T: Copy + RefUnwindSafe> RefUnwindSafe for Atomic<T> {}
75
76impl<T: Copy + Default> Default for Atomic<T> {
77 #[inline]
78 fn default() -> Self {
79 Self::new(Default::default())
80 }
81}
82
83impl<T: Copy + fmt::Debug> fmt::Debug for Atomic<T> {
84 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
85 f.debug_tuple("Atomic")
86 .field(&self.load(Ordering::SeqCst))
87 .finish()
88 }
89}
90
91impl<T> Atomic<T> {
92 #[inline]
94 pub const fn new(v: T) -> Atomic<T> {
95 Atomic {
96 v: UnsafeCell::new(MaybeUninit::new(v)),
97 }
98 }
99
100 #[inline]
106 pub const fn is_lock_free() -> bool {
107 ops::atomic_is_lock_free::<T>()
108 }
109}
110
111impl<T: Copy> Atomic<T> {
112 #[inline]
113 fn inner_ptr(&self) -> *mut T {
114 self.v.get() as *mut T
115 }
116
117 #[inline]
122 pub fn get_mut(&mut self) -> &mut T {
123 unsafe { &mut *self.inner_ptr() }
124 }
125
126 #[inline]
131 pub fn into_inner(self) -> T {
132 unsafe { self.v.into_inner().assume_init() }
133 }
134
135 #[inline]
144 pub fn load(&self, order: Ordering) -> T {
145 unsafe { ops::atomic_load(self.inner_ptr(), order) }
146 }
147
148 #[inline]
157 pub fn store(&self, val: T, order: Ordering) {
158 unsafe {
159 ops::atomic_store(self.inner_ptr(), val, order);
160 }
161 }
162
163 #[inline]
168 pub fn swap(&self, val: T, order: Ordering) -> T {
169 unsafe { ops::atomic_swap(self.inner_ptr(), val, order) }
170 }
171
172 #[inline]
185 pub fn compare_exchange(
186 &self,
187 current: T,
188 new: T,
189 success: Ordering,
190 failure: Ordering,
191 ) -> Result<T, T> {
192 unsafe { ops::atomic_compare_exchange(self.inner_ptr(), current, new, success, failure) }
193 }
194
195 #[inline]
210 pub fn compare_exchange_weak(
211 &self,
212 current: T,
213 new: T,
214 success: Ordering,
215 failure: Ordering,
216 ) -> Result<T, T> {
217 unsafe {
218 ops::atomic_compare_exchange_weak(self.inner_ptr(), current, new, success, failure)
219 }
220 }
221
222 #[inline]
259 pub fn fetch_update<F>(
260 &self,
261 set_order: Ordering,
262 fetch_order: Ordering,
263 mut f: F,
264 ) -> Result<T, T>
265 where
266 F: FnMut(T) -> Option<T>,
267 {
268 let mut prev = self.load(fetch_order);
269 while let Some(next) = f(prev) {
270 match self.compare_exchange_weak(prev, next, set_order, fetch_order) {
271 x @ Ok(_) => return x,
272 Err(next_prev) => prev = next_prev,
273 }
274 }
275 Err(prev)
276 }
277}
278
279impl Atomic<bool> {
280 #[inline]
287 pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
288 unsafe { ops::atomic_and(self.inner_ptr(), val, order) }
289 }
290
291 #[inline]
298 pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
299 unsafe { ops::atomic_or(self.inner_ptr(), val, order) }
300 }
301
302 #[inline]
309 pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
310 unsafe { ops::atomic_xor(self.inner_ptr(), val, order) }
311 }
312}
313
314macro_rules! atomic_ops_common {
315 ($($t:ty)*) => ($(
316 impl Atomic<$t> {
317 #[inline]
319 pub fn fetch_add(&self, val: $t, order: Ordering) -> $t {
320 unsafe { ops::atomic_add(self.inner_ptr(), val, order) }
321 }
322
323 #[inline]
325 pub fn fetch_sub(&self, val: $t, order: Ordering) -> $t {
326 unsafe { ops::atomic_sub(self.inner_ptr(), val, order) }
327 }
328
329 #[inline]
331 pub fn fetch_and(&self, val: $t, order: Ordering) -> $t {
332 unsafe { ops::atomic_and(self.inner_ptr(), val, order) }
333 }
334
335 #[inline]
337 pub fn fetch_or(&self, val: $t, order: Ordering) -> $t {
338 unsafe { ops::atomic_or(self.inner_ptr(), val, order) }
339 }
340
341 #[inline]
343 pub fn fetch_xor(&self, val: $t, order: Ordering) -> $t {
344 unsafe { ops::atomic_xor(self.inner_ptr(), val, order) }
345 }
346 }
347 )*);
348}
349macro_rules! atomic_ops_signed {
350 ($($t:ty)*) => (
351 atomic_ops_common!{ $($t)* }
352 $(
353 impl Atomic<$t> {
354 #[inline]
356 pub fn fetch_min(&self, val: $t, order: Ordering) -> $t {
357 unsafe { ops::atomic_min(self.inner_ptr(), val, order) }
358 }
359
360 #[inline]
362 pub fn fetch_max(&self, val: $t, order: Ordering) -> $t {
363 unsafe { ops::atomic_max(self.inner_ptr(), val, order) }
364 }
365 }
366 )*
367 );
368}
369macro_rules! atomic_ops_unsigned {
370 ($($t:ty)*) => (
371 atomic_ops_common!{ $($t)* }
372 $(
373 impl Atomic<$t> {
374 #[inline]
376 pub fn fetch_min(&self, val: $t, order: Ordering) -> $t {
377 unsafe { ops::atomic_umin(self.inner_ptr(), val, order) }
378 }
379
380 #[inline]
382 pub fn fetch_max(&self, val: $t, order: Ordering) -> $t {
383 unsafe { ops::atomic_umax(self.inner_ptr(), val, order) }
384 }
385 }
386 )*
387 );
388}
389atomic_ops_signed! { i8 i16 i32 i64 isize i128 }
390atomic_ops_unsigned! { u8 u16 u32 u64 usize u128 }
391
392#[cfg(test)]
393mod tests {
394 use super::{Atomic, Ordering::*};
395 use core::mem;
396
397 #[derive(Copy, Clone, Eq, PartialEq, Debug, Default)]
398 struct Foo(u8, u8);
399 #[derive(Copy, Clone, Eq, PartialEq, Debug, Default)]
400 struct Bar(u64, u64);
401 #[derive(Copy, Clone, Eq, PartialEq, Debug, Default)]
402 struct Quux(u32);
403
404 #[test]
405 fn atomic_bool() {
406 let a = Atomic::new(false);
407 assert_eq!(
408 Atomic::<bool>::is_lock_free(),
409 cfg!(target_has_atomic = "8"),
410 );
411 assert_eq!(format!("{:?}", a), "Atomic(false)");
412 assert_eq!(a.load(SeqCst), false);
413 a.store(true, SeqCst);
414 assert_eq!(a.swap(false, SeqCst), true);
415 assert_eq!(a.compare_exchange(true, false, SeqCst, SeqCst), Err(false));
416 assert_eq!(a.compare_exchange(false, true, SeqCst, SeqCst), Ok(false));
417 assert_eq!(a.fetch_and(false, SeqCst), true);
418 assert_eq!(a.fetch_or(true, SeqCst), false);
419 assert_eq!(a.fetch_xor(false, SeqCst), true);
420 assert_eq!(a.load(SeqCst), true);
421 }
422
423 #[test]
424 fn atomic_i8() {
425 let a = Atomic::new(0i8);
426 assert_eq!(Atomic::<i8>::is_lock_free(), cfg!(target_has_atomic = "8"));
427 assert_eq!(format!("{:?}", a), "Atomic(0)");
428 assert_eq!(a.load(SeqCst), 0);
429 a.store(1, SeqCst);
430 assert_eq!(a.swap(2, SeqCst), 1);
431 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
432 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
433 assert_eq!(a.fetch_add(123, SeqCst), 3);
434 assert_eq!(a.fetch_sub(-56, SeqCst), 126);
436 assert_eq!(a.fetch_and(7, SeqCst), -74);
437 assert_eq!(a.fetch_or(64, SeqCst), 6);
438 assert_eq!(a.fetch_xor(1, SeqCst), 70);
439 assert_eq!(a.fetch_min(30, SeqCst), 71);
440 assert_eq!(a.fetch_max(-25, SeqCst), 30);
441 assert_eq!(a.load(SeqCst), 30);
442 }
443
444 #[test]
445 fn atomic_i16() {
446 let a = Atomic::new(0i16);
447 assert_eq!(
448 Atomic::<i16>::is_lock_free(),
449 cfg!(target_has_atomic = "16")
450 );
451 assert_eq!(format!("{:?}", a), "Atomic(0)");
452 assert_eq!(a.load(SeqCst), 0);
453 a.store(1, SeqCst);
454 assert_eq!(a.swap(2, SeqCst), 1);
455 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
456 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
457 assert_eq!(a.fetch_add(123, SeqCst), 3);
458 assert_eq!(a.fetch_sub(-56, SeqCst), 126);
459 assert_eq!(a.fetch_and(7, SeqCst), 182);
460 assert_eq!(a.fetch_or(64, SeqCst), 6);
461 assert_eq!(a.fetch_xor(1, SeqCst), 70);
462 assert_eq!(a.fetch_min(30, SeqCst), 71);
463 assert_eq!(a.fetch_max(-25, SeqCst), 30);
464 assert_eq!(a.load(SeqCst), 30);
465 }
466
467 #[test]
468 fn atomic_i32() {
469 let a = Atomic::new(0i32);
470 assert_eq!(
471 Atomic::<i32>::is_lock_free(),
472 cfg!(target_has_atomic = "32")
473 );
474 assert_eq!(format!("{:?}", a), "Atomic(0)");
475 assert_eq!(a.load(SeqCst), 0);
476 a.store(1, SeqCst);
477 assert_eq!(a.swap(2, SeqCst), 1);
478 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
479 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
480 assert_eq!(a.fetch_add(123, SeqCst), 3);
481 assert_eq!(a.fetch_sub(-56, SeqCst), 126);
482 assert_eq!(a.fetch_and(7, SeqCst), 182);
483 assert_eq!(a.fetch_or(64, SeqCst), 6);
484 assert_eq!(a.fetch_xor(1, SeqCst), 70);
485 assert_eq!(a.fetch_min(30, SeqCst), 71);
486 assert_eq!(a.fetch_max(-25, SeqCst), 30);
487 assert_eq!(a.load(SeqCst), 30);
488 }
489
490 #[test]
491 fn atomic_i64() {
492 let a = Atomic::new(0i64);
493 assert_eq!(
494 Atomic::<i64>::is_lock_free(),
495 cfg!(target_has_atomic = "64") && mem::align_of::<i64>() == 8
496 );
497 assert_eq!(format!("{:?}", a), "Atomic(0)");
498 assert_eq!(a.load(SeqCst), 0);
499 a.store(1, SeqCst);
500 assert_eq!(a.swap(2, SeqCst), 1);
501 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
502 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
503 assert_eq!(a.fetch_add(123, SeqCst), 3);
504 assert_eq!(a.fetch_sub(-56, SeqCst), 126);
505 assert_eq!(a.fetch_and(7, SeqCst), 182);
506 assert_eq!(a.fetch_or(64, SeqCst), 6);
507 assert_eq!(a.fetch_xor(1, SeqCst), 70);
508 assert_eq!(a.fetch_min(30, SeqCst), 71);
509 assert_eq!(a.fetch_max(-25, SeqCst), 30);
510 assert_eq!(a.load(SeqCst), 30);
511 }
512
513 #[test]
514 fn atomic_i128() {
515 let a = Atomic::new(0i128);
516 assert_eq!(
517 Atomic::<i128>::is_lock_free(),
518 cfg!(feature = "nightly") & cfg!(target_has_atomic = "128")
519 );
520 assert_eq!(format!("{:?}", a), "Atomic(0)");
521 assert_eq!(a.load(SeqCst), 0);
522 a.store(1, SeqCst);
523 assert_eq!(a.swap(2, SeqCst), 1);
524 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
525 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
526 assert_eq!(a.fetch_add(123, SeqCst), 3);
527 assert_eq!(a.fetch_sub(-56, SeqCst), 126);
528 assert_eq!(a.fetch_and(7, SeqCst), 182);
529 assert_eq!(a.fetch_or(64, SeqCst), 6);
530 assert_eq!(a.fetch_xor(1, SeqCst), 70);
531 assert_eq!(a.fetch_min(30, SeqCst), 71);
532 assert_eq!(a.fetch_max(-25, SeqCst), 30);
533 assert_eq!(a.load(SeqCst), 30);
534 }
535
536 #[test]
537 fn atomic_isize() {
538 let a = Atomic::new(0isize);
539 assert_eq!(format!("{:?}", a), "Atomic(0)");
540 assert_eq!(a.load(SeqCst), 0);
541 a.store(1, SeqCst);
542 assert_eq!(a.swap(2, SeqCst), 1);
543 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
544 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
545 assert_eq!(a.fetch_add(123, SeqCst), 3);
546 assert_eq!(a.fetch_sub(-56, SeqCst), 126);
547 assert_eq!(a.fetch_and(7, SeqCst), 182);
548 assert_eq!(a.fetch_or(64, SeqCst), 6);
549 assert_eq!(a.fetch_xor(1, SeqCst), 70);
550 assert_eq!(a.fetch_min(30, SeqCst), 71);
551 assert_eq!(a.fetch_max(-25, SeqCst), 30);
552 assert_eq!(a.load(SeqCst), 30);
553 }
554
555 #[test]
556 fn atomic_u8() {
557 let a = Atomic::new(0u8);
558 assert_eq!(Atomic::<u8>::is_lock_free(), cfg!(target_has_atomic = "8"));
559 assert_eq!(format!("{:?}", a), "Atomic(0)");
560 assert_eq!(a.load(SeqCst), 0);
561 a.store(1, SeqCst);
562 assert_eq!(a.swap(2, SeqCst), 1);
563 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
564 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
565 assert_eq!(a.fetch_add(123, SeqCst), 3);
566 assert_eq!(a.fetch_sub(56, SeqCst), 126);
567 assert_eq!(a.fetch_and(7, SeqCst), 70);
568 assert_eq!(a.fetch_or(64, SeqCst), 6);
569 assert_eq!(a.fetch_xor(1, SeqCst), 70);
570 assert_eq!(a.fetch_min(30, SeqCst), 71);
571 assert_eq!(a.fetch_max(25, SeqCst), 30);
572 assert_eq!(a.load(SeqCst), 30);
573 }
574
575 #[test]
576 fn atomic_u16() {
577 let a = Atomic::new(0u16);
578 assert_eq!(
579 Atomic::<u16>::is_lock_free(),
580 cfg!(target_has_atomic = "16")
581 );
582 assert_eq!(format!("{:?}", a), "Atomic(0)");
583 assert_eq!(a.load(SeqCst), 0);
584 a.store(1, SeqCst);
585 assert_eq!(a.swap(2, SeqCst), 1);
586 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
587 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
588 assert_eq!(a.fetch_add(123, SeqCst), 3);
589 assert_eq!(a.fetch_sub(56, SeqCst), 126);
590 assert_eq!(a.fetch_and(7, SeqCst), 70);
591 assert_eq!(a.fetch_or(64, SeqCst), 6);
592 assert_eq!(a.fetch_xor(1, SeqCst), 70);
593 assert_eq!(a.fetch_min(30, SeqCst), 71);
594 assert_eq!(a.fetch_max(25, SeqCst), 30);
595 assert_eq!(a.load(SeqCst), 30);
596 }
597
598 #[test]
599 fn atomic_u32() {
600 let a = Atomic::new(0u32);
601 assert_eq!(
602 Atomic::<u32>::is_lock_free(),
603 cfg!(target_has_atomic = "32")
604 );
605 assert_eq!(format!("{:?}", a), "Atomic(0)");
606 assert_eq!(a.load(SeqCst), 0);
607 a.store(1, SeqCst);
608 assert_eq!(a.swap(2, SeqCst), 1);
609 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
610 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
611 assert_eq!(a.fetch_add(123, SeqCst), 3);
612 assert_eq!(a.fetch_sub(56, SeqCst), 126);
613 assert_eq!(a.fetch_and(7, SeqCst), 70);
614 assert_eq!(a.fetch_or(64, SeqCst), 6);
615 assert_eq!(a.fetch_xor(1, SeqCst), 70);
616 assert_eq!(a.fetch_min(30, SeqCst), 71);
617 assert_eq!(a.fetch_max(25, SeqCst), 30);
618 assert_eq!(a.load(SeqCst), 30);
619 }
620
621 #[test]
622 fn atomic_u64() {
623 let a = Atomic::new(0u64);
624 assert_eq!(
625 Atomic::<u64>::is_lock_free(),
626 cfg!(target_has_atomic = "64") && mem::align_of::<u64>() == 8
627 );
628 assert_eq!(format!("{:?}", a), "Atomic(0)");
629 assert_eq!(a.load(SeqCst), 0);
630 a.store(1, SeqCst);
631 assert_eq!(a.swap(2, SeqCst), 1);
632 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
633 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
634 assert_eq!(a.fetch_add(123, SeqCst), 3);
635 assert_eq!(a.fetch_sub(56, SeqCst), 126);
636 assert_eq!(a.fetch_and(7, SeqCst), 70);
637 assert_eq!(a.fetch_or(64, SeqCst), 6);
638 assert_eq!(a.fetch_xor(1, SeqCst), 70);
639 assert_eq!(a.fetch_min(30, SeqCst), 71);
640 assert_eq!(a.fetch_max(25, SeqCst), 30);
641 assert_eq!(a.load(SeqCst), 30);
642 }
643
644 #[test]
645 fn atomic_u128() {
646 let a = Atomic::new(0u128);
647 assert_eq!(
648 Atomic::<u128>::is_lock_free(),
649 cfg!(feature = "nightly") & cfg!(target_has_atomic = "128")
650 );
651 assert_eq!(format!("{:?}", a), "Atomic(0)");
652 assert_eq!(a.load(SeqCst), 0);
653 a.store(1, SeqCst);
654 assert_eq!(a.swap(2, SeqCst), 1);
655 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
656 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
657 assert_eq!(a.fetch_add(123, SeqCst), 3);
658 assert_eq!(a.fetch_sub(56, SeqCst), 126);
659 assert_eq!(a.fetch_and(7, SeqCst), 70);
660 assert_eq!(a.fetch_or(64, SeqCst), 6);
661 assert_eq!(a.fetch_xor(1, SeqCst), 70);
662 assert_eq!(a.fetch_min(30, SeqCst), 71);
663 assert_eq!(a.fetch_max(25, SeqCst), 30);
664 assert_eq!(a.load(SeqCst), 30);
665 }
666
667 #[test]
668 fn atomic_usize() {
669 let a = Atomic::new(0usize);
670 assert_eq!(format!("{:?}", a), "Atomic(0)");
671 assert_eq!(a.load(SeqCst), 0);
672 a.store(1, SeqCst);
673 assert_eq!(a.swap(2, SeqCst), 1);
674 assert_eq!(a.compare_exchange(5, 45, SeqCst, SeqCst), Err(2));
675 assert_eq!(a.compare_exchange(2, 3, SeqCst, SeqCst), Ok(2));
676 assert_eq!(a.fetch_add(123, SeqCst), 3);
677 assert_eq!(a.fetch_sub(56, SeqCst), 126);
678 assert_eq!(a.fetch_and(7, SeqCst), 70);
679 assert_eq!(a.fetch_or(64, SeqCst), 6);
680 assert_eq!(a.fetch_xor(1, SeqCst), 70);
681 assert_eq!(a.fetch_min(30, SeqCst), 71);
682 assert_eq!(a.fetch_max(25, SeqCst), 30);
683 assert_eq!(a.load(SeqCst), 30);
684 }
685
686 #[test]
687 fn atomic_foo() {
688 let a = Atomic::default();
689 assert_eq!(Atomic::<Foo>::is_lock_free(), false);
690 assert_eq!(format!("{:?}", a), "Atomic(Foo(0, 0))");
691 assert_eq!(a.load(SeqCst), Foo(0, 0));
692 a.store(Foo(1, 1), SeqCst);
693 assert_eq!(a.swap(Foo(2, 2), SeqCst), Foo(1, 1));
694 assert_eq!(
695 a.compare_exchange(Foo(5, 5), Foo(45, 45), SeqCst, SeqCst),
696 Err(Foo(2, 2))
697 );
698 assert_eq!(
699 a.compare_exchange(Foo(2, 2), Foo(3, 3), SeqCst, SeqCst),
700 Ok(Foo(2, 2))
701 );
702 assert_eq!(a.load(SeqCst), Foo(3, 3));
703 }
704
705 #[test]
706 fn atomic_bar() {
707 let a = Atomic::default();
708 assert_eq!(Atomic::<Bar>::is_lock_free(), false);
709 assert_eq!(format!("{:?}", a), "Atomic(Bar(0, 0))");
710 assert_eq!(a.load(SeqCst), Bar(0, 0));
711 a.store(Bar(1, 1), SeqCst);
712 assert_eq!(a.swap(Bar(2, 2), SeqCst), Bar(1, 1));
713 assert_eq!(
714 a.compare_exchange(Bar(5, 5), Bar(45, 45), SeqCst, SeqCst),
715 Err(Bar(2, 2))
716 );
717 assert_eq!(
718 a.compare_exchange(Bar(2, 2), Bar(3, 3), SeqCst, SeqCst),
719 Ok(Bar(2, 2))
720 );
721 assert_eq!(a.load(SeqCst), Bar(3, 3));
722 }
723
724 #[test]
725 fn atomic_quxx() {
726 let a = Atomic::default();
727 assert_eq!(
728 Atomic::<Quux>::is_lock_free(),
729 cfg!(target_has_atomic = "32")
730 );
731 assert_eq!(format!("{:?}", a), "Atomic(Quux(0))");
732 assert_eq!(a.load(SeqCst), Quux(0));
733 a.store(Quux(1), SeqCst);
734 assert_eq!(a.swap(Quux(2), SeqCst), Quux(1));
735 assert_eq!(
736 a.compare_exchange(Quux(5), Quux(45), SeqCst, SeqCst),
737 Err(Quux(2))
738 );
739 assert_eq!(
740 a.compare_exchange(Quux(2), Quux(3), SeqCst, SeqCst),
741 Ok(Quux(2))
742 );
743 assert_eq!(a.load(SeqCst), Quux(3));
744 }
745}