This is the mail archive of the gcc-bugs@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[Bug rtl-optimization/70825] x86_64: __atomic_compare_exchange_n() accesses stack unnecessarily


https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70825

Ramana Radhakrishnan <ramana at gcc dot gnu.org> changed:

           What    |Removed                     |Added
----------------------------------------------------------------------------
             Target|                            |x86_64, aarch64
                 CC|                            |ramana at gcc dot gnu.org
          Component|target                      |rtl-optimization

--- Comment #1 from Ramana Radhakrishnan <ramana at gcc dot gnu.org> ---
There is an unnecessary store to the stack regardless of the architecture. I
suspect that's just because of the a combination of the specification of the
intrinsic and DSE being unable to remove such stores. 

For e.g. on aarch64 with:

 #include <stdbool.h>

#define __always_inline inline __attribute__((always_inline))

typedef struct {
        int counter;
} atomic_t;


   static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
   {
      int cur = old;
      if (__atomic_compare_exchange_n(&v->counter, &cur, new, false,
                  __ATOMIC_SEQ_CST,
                  __ATOMIC_RELAXED))
         return cur;
      return cur;
   }

   void test_atomic_cmpxchg(atomic_t *counter)
   {
      atomic_cmpxchg(counter, 23, 42);
   }

we get.

        sub     sp, sp, #16
        mov     w1, 23
        mov     w2, 42
        str     w1, [sp, 12] ---> unneeded
.L3:
        ldaxr   w3, [x0]
        cmp     w3, w1
        bne     .L4
        stlxr   w4, w2, [x0]
        cbnz    w4, .L3
.L4:
        add     sp, sp, 16
        ret

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]