fix bohem_gc ia64 inline assembly

Richard Henderson rth@twiddle.net
Thu Mar 20 19:03:00 GMT 2003


... by removing all of it in favour of builtins.

Hans, the problem here was that matching constraints on memory
operand aren't workable.  And are not even necessary.  But there's
no point fixing that when we have proper compiler support for
all of this.



r~



Index: boehm-gc/include/private/gc_locks.h
===================================================================
RCS file: /cvs/gcc/gcc/boehm-gc/include/private/gc_locks.h,v
retrieving revision 1.7
diff -c -p -d -r1.7 gc_locks.h
*** boehm-gc/include/private/gc_locks.h	27 Sep 2002 20:40:06 -0000	1.7
--- boehm-gc/include/private/gc_locks.h	20 Mar 2003 18:42:53 -0000
***************
*** 100,116 ****
  #      define GC_TEST_AND_SET_DEFINED
  #    endif
  #    if defined(IA64)
         inline static int GC_test_and_set(volatile unsigned int *addr) {
! 	  long oldval, n = 1;
! 	  __asm__ __volatile__("xchg4 %0=%1,%2"
! 		: "=r"(oldval), "=m"(*addr)
! 		: "r"(n), "1"(*addr) : "memory");
! 	  return oldval;
         }
  #      define GC_TEST_AND_SET_DEFINED
-        /* Should this handle post-increment addressing?? */
         inline static void GC_clear(volatile unsigned int *addr) {
! 	 __asm__ __volatile__("st4.rel %0=r0" : "=m" (*addr) : : "memory");
         }
  #      define GC_CLEAR_DEFINED
  #    endif
--- 100,112 ----
  #      define GC_TEST_AND_SET_DEFINED
  #    endif
  #    if defined(IA64)
+ #     include <ia64intrin.h>
         inline static int GC_test_and_set(volatile unsigned int *addr) {
! 	  return __sync_lock_test_and_set(addr, 1);
         }
  #      define GC_TEST_AND_SET_DEFINED
         inline static void GC_clear(volatile unsigned int *addr) {
! 	  *addr = 0;
         }
  #      define GC_CLEAR_DEFINED
  #    endif
***************
*** 326,345 ****
  #     if defined(IA64)
  #      if !defined(GENERIC_COMPARE_AND_SWAP)
           inline static GC_bool GC_compare_and_exchange(volatile GC_word *addr,
! 						       GC_word old, GC_word new_val) 
  	 {
! 	  unsigned long oldval;
! 	  __asm__ __volatile__("mov ar.ccv=%4 ;; cmpxchg8.rel %0=%1,%2,ar.ccv"
! 		: "=r"(oldval), "=m"(*addr)
! 		: "r"(new_val), "1"(*addr), "r"(old) : "memory");
! 	  return (oldval == old);
           }
  #      endif /* !GENERIC_COMPARE_AND_SWAP */
  #      if 0
  	/* Shouldn't be needed; we use volatile stores instead. */
          inline static void GC_memory_write_barrier()
          {
!           __asm__ __volatile__("mf" : : : "memory");
          }
  #      endif /* 0 */
  #     endif /* IA64 */
--- 322,338 ----
  #     if defined(IA64)
  #      if !defined(GENERIC_COMPARE_AND_SWAP)
           inline static GC_bool GC_compare_and_exchange(volatile GC_word *addr,
! 						       GC_word old,
! 						       GC_word new_val) 
  	 {
! 	   return __sync_bool_compare_and_swap (addr, old, new_val);
           }
  #      endif /* !GENERIC_COMPARE_AND_SWAP */
  #      if 0
  	/* Shouldn't be needed; we use volatile stores instead. */
          inline static void GC_memory_write_barrier()
          {
!           __sync_synchronize ();
          }
  #      endif /* 0 */
  #     endif /* IA64 */



More information about the Gcc-patches mailing list