+#if CPU_ARM
+INLINE cpu_atomic_t
+cpu_atomic_xchg(volatile cpu_atomic_t *ptr, cpu_atomic_t val)
+{
+ cpu_atomic_t ret;
+
+ asm volatile(
+ "swp %0, %1, [%2]"
+
+ : "=&r" (ret)
+ : "r" (val), "r" (ptr)
+ : "memory", "cc");
+
+ return ret;
+}
+#else /* CPU_ARM */
+#include <cpu/irq.h>
+
+INLINE cpu_atomic_t
+cpu_atomic_xchg(volatile cpu_atomic_t *ptr, cpu_atomic_t val)
+{
+ cpu_atomic_t ret;
+
+ ATOMIC(
+ ret = *ptr;
+ *ptr = val;
+ );
+ return ret;
+}
+#endif /* CPU_ARM */
+