summaryrefslogtreecommitdiff
path: root/libc/sysdeps/linux/avr32/bits/atomic.h
diff options
context:
space:
mode:
authorHans-Christian Egtvedt <hans-christian.egtvedt@atmel.com>2008-04-24 07:07:41 +0000
committerHans-Christian Egtvedt <hans-christian.egtvedt@atmel.com>2008-04-24 07:07:41 +0000
commitbb575e537326f74da811ac801a657818a93b2402 (patch)
tree2cc769041b9b369b38496b797c7cd8655c36ad84 /libc/sysdeps/linux/avr32/bits/atomic.h
parent90bfd0fc8abfcd93fa62b104fb6b3720b713694f (diff)
Fix whitespace damage in AVR32 libc bits header files.
Diffstat (limited to 'libc/sysdeps/linux/avr32/bits/atomic.h')
-rw-r--r--libc/sysdeps/linux/avr32/bits/atomic.h174
1 files changed, 87 insertions, 87 deletions
diff --git a/libc/sysdeps/linux/avr32/bits/atomic.h b/libc/sysdeps/linux/avr32/bits/atomic.h
index 4f870c023..e6be41f01 100644
--- a/libc/sysdeps/linux/avr32/bits/atomic.h
+++ b/libc/sysdeps/linux/avr32/bits/atomic.h
@@ -20,101 +20,101 @@ typedef uintptr_t uatomicptr_t;
typedef intmax_t atomic_max_t;
typedef uintmax_t uatomic_max_t;
-#define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
- (abort(), 0)
+#define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
+ (abort(), 0)
-#define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
- (abort(), 0)
+#define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
+ (abort(), 0)
-#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
- ({ \
- __typeof__(*(mem)) __prev; \
- __asm__ __volatile__( \
- "/* __arch_compare_and_exchange_val_32_acq */\n" \
- "1: ssrf 5\n" \
- " ld.w %[result], %[m]\n" \
- " cp.w %[result], %[old]\n" \
- " brne 2f\n" \
- " stcond %[m], %[new]\n" \
- " brne 1b\n" \
- "2:" \
- : [result] "=&r"(__result), [m] "=m"(*(mem)) \
- : "m"(*(mem)), [old] "ir"(oldval), \
- [new] "r"(newval) \
- : "memory", "cc"); \
- __prev; \
- })
+#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
+ ({ \
+ __typeof__(*(mem)) __prev; \
+ __asm__ __volatile__( \
+ "/* __arch_compare_and_exchange_val_32_acq */\n" \
+ "1: ssrf 5\n" \
+ " ld.w %[result], %[m]\n" \
+ " cp.w %[result], %[old]\n" \
+ " brne 2f\n" \
+ " stcond %[m], %[new]\n" \
+ " brne 1b\n" \
+ "2:" \
+ : [result] "=&r"(__result), [m] "=m"(*(mem)) \
+ : "m"(*(mem)), [old] "ir"(oldval), \
+ [new] "r"(newval) \
+ : "memory", "cc"); \
+ __prev; \
+ })
-#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
- (abort(), 0)
+#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
+ (abort(), 0)
-#define __arch_exchange_32_acq(mem, newval) \
- ({ \
- __typeof__(*(mem)) __oldval; \
- __asm__ __volatile__( \
- "/*__arch_exchange_32_acq */\n" \
- " xchg %[old], %[m], %[new]" \
- : [old] "=&r"(__oldval) \
- : [m] "r"(mem), [new] "r"(newval) \
- : "memory"); \
- __oldval; \
- })
+#define __arch_exchange_32_acq(mem, newval) \
+ ({ \
+ __typeof__(*(mem)) __oldval; \
+ __asm__ __volatile__( \
+ "/*__arch_exchange_32_acq */\n" \
+ " xchg %[old], %[m], %[new]" \
+ : [old] "=&r"(__oldval) \
+ : [m] "r"(mem), [new] "r"(newval) \
+ : "memory"); \
+ __oldval; \
+ })
-#define __arch_atomic_exchange_and_add_32(mem, value) \
- ({ \
- __typeof__(*(mem)) __oldval, __tmp; \
- __asm__ __volatile__( \
- "/* __arch_atomic_exchange_and_add_32 */\n" \
- "1: ssrf 5\n" \
- " ld.w %[old], %[m]\n" \
- " add %[tmp], %[old], %[val]\n" \
- " stcond %[m], %[tmp]\n" \
- " brne 1b" \
- : [old] "=&r"(__oldval), [tmp] "=&r"(__tmp), \
- [m] "=m"(*(mem)) \
- : "m"(*(mem)), [val] "r"(value) \
- : "memory", "cc"); \
- __oldval; \
- })
+#define __arch_atomic_exchange_and_add_32(mem, value) \
+ ({ \
+ __typeof__(*(mem)) __oldval, __tmp; \
+ __asm__ __volatile__( \
+ "/* __arch_atomic_exchange_and_add_32 */\n" \
+ "1: ssrf 5\n" \
+ " ld.w %[old], %[m]\n" \
+ " add %[tmp], %[old], %[val]\n" \
+ " stcond %[m], %[tmp]\n" \
+ " brne 1b" \
+ : [old] "=&r"(__oldval), [tmp] "=&r"(__tmp), \
+ [m] "=m"(*(mem)) \
+ : "m"(*(mem)), [val] "r"(value) \
+ : "memory", "cc"); \
+ __oldval; \
+ })
-#define __arch_atomic_decrement_if_positive_32(mem) \
- ({ \
- __typeof__(*(mem)) __oldval, __tmp; \
- __asm__ __volatile__( \
- "/* __arch_atomic_decrement_if_positive_32 */\n" \
- "1: ssrf 5\n" \
- " ld.w %[old], %[m]\n" \
- " sub %[tmp], %[old], 1\n" \
- " brlt 2f\n" \
- " stcond %[m], %[tmp]\n" \
- " brne 1b" \
- "2:" \
- : [old] "=&r"(__oldval), [tmp] "=&r"(__tmp), \
- [m] "=m"(*(mem)) \
- : "m"(*(mem)) \
- : "memory", "cc"); \
- __oldval; \
- })
+#define __arch_atomic_decrement_if_positive_32(mem) \
+ ({ \
+ __typeof__(*(mem)) __oldval, __tmp; \
+ __asm__ __volatile__( \
+ "/* __arch_atomic_decrement_if_positive_32 */\n" \
+ "1: ssrf 5\n" \
+ " ld.w %[old], %[m]\n" \
+ " sub %[tmp], %[old], 1\n" \
+ " brlt 2f\n" \
+ " stcond %[m], %[tmp]\n" \
+ " brne 1b" \
+ "2:" \
+ : [old] "=&r"(__oldval), [tmp] "=&r"(__tmp), \
+ [m] "=m"(*(mem)) \
+ : "m"(*(mem)) \
+ : "memory", "cc"); \
+ __oldval; \
+ })
-#define atomic_exchange_acq(mem, newval) \
- ({ \
- if (sizeof(*(mem)) != 4) \
- abort(); \
- __arch_exchange_32_acq(mem, newval); \
- })
+#define atomic_exchange_acq(mem, newval) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_exchange_32_acq(mem, newval); \
+ })
-#define atomic_exchange_and_add(mem, newval) \
- ({ \
- if (sizeof(*(mem)) != 4) \
- abort(); \
- __arch_atomic_exchange_and_add_32(mem, newval); \
- })
+#define atomic_exchange_and_add(mem, newval) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_atomic_exchange_and_add_32(mem, newval); \
+ })
-#define atomic_decrement_if_positive(mem) \
- ({ \
- if (sizeof(*(mem)) != 4) \
- abort(); \
- __arch_atomic_decrement_if_positive_32(mem); \
- })
+#define atomic_decrement_if_positive(mem) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_atomic_decrement_if_positive_32(mem); \
+ })
#endif /* _AVR32_BITS_ATOMIC_H */