summaryrefslogtreecommitdiff
path: root/libc/sysdeps
diff options
context:
space:
mode:
authorChris Zankel <chris@zankel.net>2012-10-17 16:08:38 -0700
committerChris Zankel <chris@zankel.net>2012-11-03 12:57:45 -0700
commitcc46380d0cf51bffd86e38670d7eb7d939499cb8 (patch)
treea738ccdb23652a68b480a3ba4eb2b9e0c0c88f87 /libc/sysdeps
parent5c0a3b60fbc3442a14169a37657b27ff3173f9db (diff)
xtensa: add a set of atomic intrinsics
Add a new file that provides various atomic intrinsics, which use the conditional store instruction. Signed-off-by: Chris Zankel <chris@zankel.net>
Diffstat (limited to 'libc/sysdeps')
-rw-r--r--libc/sysdeps/linux/xtensa/bits/atomic.h171
1 files changed, 171 insertions, 0 deletions
diff --git a/libc/sysdeps/linux/xtensa/bits/atomic.h b/libc/sysdeps/linux/xtensa/bits/atomic.h
new file mode 100644
index 000000000..efec365f1
--- /dev/null
+++ b/libc/sysdeps/linux/xtensa/bits/atomic.h
@@ -0,0 +1,171 @@
+/* Copyright (C) 2012 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with the GNU C Library; if not, write to the Free
+ Software Foundation, Inc., 51 Franklin Street - Fifth Floor,
+ Boston, MA 02110-1301, USA. */
+
+#ifndef _BITS_ATOMIC_H
+#define _BITS_ATOMIC_H 1
+
+/* Xtensa has only a 32-bit form of a store-conditional instruction,
+ so just stub out the rest. */
+
+/* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
+ Return the old *MEM value. */
+
+#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
+ ({__typeof__(*(mem)) __tmp, __value; \
+ __asm__ __volatile__( \
+ "1: l32i %1, %2, 0 \n" \
+ " bne %1, %4, 2f \n" \
+ " wsr %1, SCOMPARE1 \n" \
+ " mov %0, %1 \n" \
+ " mov %1, %3 \n" \
+ " s32c1i %1, %2, 0 \n" \
+ " bne %0, %1, 1b \n" \
+ "2: \n" \
+ : "=&a" (__value), "=&a" (__tmp) \
+ : "a" (mem), "a" (newval), "a" (oldval) \
+ : "memory" ); \
+ __tmp; \
+ })
+
+/* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
+ Return zero if *MEM was changed or non-zero if no exchange happened. */
+
+#define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
+ ({__typeof__(*(mem)) __tmp, __value; \
+ __asm__ __volatile__( \
+ "1: l32i %0, %2, 0 \n" \
+ " sub %1, %4, %0 \n" \
+ " bnez %1, 2f \n" \
+ " wsr %0, SCOMPARE1 \n" \
+ " mov %1, %3 \n" \
+ " s32c1i %1, %2, 0 \n" \
+ " bne %0, %1, 1b \n" \
+ " movi %1, 0 \n" \
+ "2: \n" \
+ : "=&a" (__value), "=&a" (__tmp) \
+ : "a" (mem), "a" (newval), "a" (oldval) \
+ : "memory" ); \
+ __tmp != 0; \
+ })
+
+/* Store NEWVALUE in *MEM and return the old value. */
+
+#define __arch_exchange_32_acq(mem, newval) \
+ ({__typeof__(*(mem)) __tmp, __value; \
+ __asm__ __volatile__( \
+ "1: l32i %0, %2, 0 \n" \
+ " wsr %0, SCOMPARE1 \n" \
+ " mov %1, %3 \n" \
+ " s32c1i %1, %2, 0 \n" \
+ " bne %0, %1, 1b \n" \
+ : "=&a" (__value), "=&a" (__tmp) \
+ : "a" (mem), "a" (newval) \
+ : "memory" ); \
+ __tmp; \
+ })
+
+/* Add VALUE to *MEM and return the old value of *MEM. */
+
+#define __arch_atomic_exchange_and_add_32(mem, value) \
+ ({__typeof__(*(mem)) __tmp, __value; \
+ __asm__ __volatile__( \
+ "1: l32i %0, %2, 0 \n" \
+ " wsr %0, SCOMPARE1 \n" \
+ " add %1, %0, %3 \n" \
+ " s32c1i %1, %2, 0 \n" \
+ " bne %0, %1, 1b \n" \
+ : "=&a" (__value), "=&a" (__tmp) \
+ : "a" (mem), "a" (value) \
+ : "memory" ); \
+ __tmp; \
+ })
+
+/* Subtract VALUE from *MEM and return the old value of *MEM. */
+
+#define __arch_atomic_exchange_and_sub_32(mem, value) \
+ ({__typeof__(*(mem)) __tmp, __value; \
+ __asm__ __volatile__( \
+ "1: l32i %0, %2, 0 \n" \
+ " wsr %0, SCOMPARE1 \n" \
+ " sub %1, %0, %3 \n" \
+ " s32c1i %1, %2, 0 \n" \
+ " bne %0, %1, 1b \n" \
+ : "=&a" (__value), "=&a" (__tmp) \
+ : "a" (mem), "a" (value) \
+ : "memory" ); \
+ __tmp; \
+ })
+
+/* Decrement *MEM if it is > 0, and return the old value. */
+
+#define __arch_atomic_decrement_if_positive_32(mem) \
+ ({__typeof__(*(mem)) __tmp, __value; \
+ __asm__ __volatile__( \
+ "1: l32i %0, %2, 0 \n" \
+ " blti %0, 1, 2f \n" \
+ " wsr %0, SCOMPARE1 \n" \
+ " addi %1, %0, -1 \n" \
+ " s32c1i %1, %2, 0 \n" \
+ " bne %0, %1, 1b \n" \
+ "2: \n" \
+ : "=&a" (__value), "=&a" (__tmp) \
+ : "a" (mem) \
+ : "memory" ); \
+ __tmp; \
+ })
+
+
+/* These are the preferred public interfaces: */
+
+#define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
+ ({ \
+ if (sizeof (*mem) != 4) \
+ abort(); \
+ __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
+ })
+
+#define atomic_exchange_acq(mem, newval) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_exchange_32_acq(mem, newval); \
+ })
+
+#define atomic_exchange_and_add(mem, newval) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_atomic_exchange_and_add_32(mem, newval); \
+ })
+
+#define atomic_exchange_and_sub(mem, newval) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_atomic_exchange_and_sub_32(mem, newval); \
+ })
+
+#define atomic_decrement_if_positive(mem) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_atomic_decrement_if_positive_32(mem); \
+ })
+
+#endif /* _BITS_ATOMIC_H */
+