diff options
| author | Bernhard Reutner-Fischer <rep.dot.nop@gmail.com> | 2011-02-11 17:26:19 +0100 | 
|---|---|---|
| committer | Bernhard Reutner-Fischer <rep.dot.nop@gmail.com> | 2011-02-11 16:25:28 +0100 | 
| commit | da2d70ed69b57d63243a7b1e05ac7d43e91778ab (patch) | |
| tree | 76e8d167d8f8b2cd1c6bb84ffc7b1d9ad4b1fcba /libc | |
| parent | 70dd77fa63a3df3c6dd38bd73c54598004d1b54e (diff) | |
arm: use CAS gcc builtin if SI-mode pattern is available
Signed-off-by: Bernhard Reutner-Fischer <rep.dot.nop@gmail.com>
Diffstat (limited to 'libc')
| -rw-r--r-- | libc/sysdeps/linux/arm/bits/atomic.h | 25 | 
1 files changed, 17 insertions, 8 deletions
diff --git a/libc/sysdeps/linux/arm/bits/atomic.h b/libc/sysdeps/linux/arm/bits/atomic.h index 8f63e2510..07101fbe8 100644 --- a/libc/sysdeps/linux/arm/bits/atomic.h +++ b/libc/sysdeps/linux/arm/bits/atomic.h @@ -37,7 +37,12 @@ typedef uintmax_t uatomic_max_t;  void __arm_link_error (void); -#ifdef __thumb2__ +/* Use the atomic builtins provided by GCC in case the backend provides +   a pattern to do this efficiently.  */ + +#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 +#define atomic_full_barrier() __sync_synchronize () +#elif defined __thumb2__  #define atomic_full_barrier() \       __asm__ __volatile__						      \  	     ("movw\tip, #0x0fa0\n\t"					      \ @@ -64,17 +69,21 @@ void __arm_link_error (void);  #define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \    ({ __arm_link_error (); oldval; }) +#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 +#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \ +  __sync_val_compare_and_swap ((mem), (oldval), (newval)) +  /* It doesn't matter what register is used for a_oldval2, but we must     specify one to work around GCC PR rtl-optimization/21223.  Otherwise     it may cause a_oldval or a_tmp to be moved to a different register.  */ -#ifdef __thumb2__ +#elif defined __thumb2__  /* Thumb-2 has ldrex/strex.  However it does not have barrier instructions,     so we still need to use the kernel helper.  */  #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \ -  ({ register __typeof (oldval) a_oldval __asm__ ("r0");			      \ -     register __typeof (oldval) a_newval __asm__ ("r1") = (newval);		      \ -     register __typeof (mem) a_ptr __asm__ ("r2") = (mem);			      \ +  ({ register __typeof (oldval) a_oldval __asm__ ("r0");		      \ +     register __typeof (oldval) a_newval __asm__ ("r1") = (newval);	      \ +     register __typeof (mem) a_ptr __asm__ ("r2") = (mem);		      \       register __typeof (oldval) a_tmp __asm__ ("r3");			      \       register __typeof (oldval) a_oldval2 __asm__ ("r4") = (oldval);	      \       __asm__ __volatile__						      \ @@ -95,9 +104,9 @@ void __arm_link_error (void);       a_tmp; })  #else  #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \ -  ({ register __typeof (oldval) a_oldval __asm__ ("r0");			      \ -     register __typeof (oldval) a_newval __asm__ ("r1") = (newval);		      \ -     register __typeof (mem) a_ptr __asm__ ("r2") = (mem);			      \ +  ({ register __typeof (oldval) a_oldval __asm__ ("r0");		      \ +     register __typeof (oldval) a_newval __asm__ ("r1") = (newval);	      \ +     register __typeof (mem) a_ptr __asm__ ("r2") = (mem);		      \       register __typeof (oldval) a_tmp __asm__ ("r3");			      \       register __typeof (oldval) a_oldval2 __asm__ ("r4") = (oldval);	      \       __asm__ __volatile__						      \  | 
