[gcc(refs/vendors/ARM/heads/morello)] Add _c forms of atomic functions
Matthew Malcomson
matmal01@gcc.gnu.org
Fri May 6 14:44:05 GMT 2022
https://gcc.gnu.org/g:834e941352d3620251ba7920f289f6b75598ed2d
commit 834e941352d3620251ba7920f289f6b75598ed2d
Author: Richard Sandiford <richard.sandiford@arm.com>
Date: Mon Apr 25 19:48:37 2022 +0100
Add _c forms of atomic functions
This patch adds atomic functions that take __capability pointers
as arguments. Following clang's example (and the convention
for memcpy, etc.), these functions are distinguished from
non-__capability versions by having a “_c” suffix.
The two obvious options were:
(1) Put these _c functions in a group after the non-_c functions.
(2) Put each _c function after the corresponding non-_c function.
The patch goes for (2), since it makes it easier to handle
the difference between functions that have intcap forms
and functions that don't.
The vast majority of backend atomics patterns can only handle
the non-_c form, so the generic aarch64_sync_memory_operand
requires normal-base addresses. However, we can support
alternative-base forms of some atomic loads and stores.
The atomic_load and atomic_store expanders filter out which
forms of load and store can handle alternative base addresses
and FAIL those that can't. Target-independent code then uses
fallback paths for those cases.
Doing this exposed a couple of bugs in the fallback paths
of expand_compare_and_swap_loop (which didn't clean up
after a failed sequence) and expand_atomic_fetch_op
(which was missing a null check).
The existing behaviour of __ATOMIC_CONSUME is different from
(more relaxed than) __ATOMIC_ACQUIRE, which is inconsistent
with the documentation:
@item __ATOMIC_CONSUME
This is currently implemented using the stronger @code{__ATOMIC_ACQUIRE}
memory order because of a deficiency in C++11's semantics for
@code{memory_order_consume}.
But that isn't really relevant to this patch. All that matters
is that the tests are in sync.
The atomic_load insn checks for is_mm_release (model) and
the atomic_store insn checks for is_mm_acquire (model),
but those conditions would never be triggered by valid uses of
the associated __atomic_load_* and __atomic_store_* functions.
Diff:
---
gcc/builtin-types.def | 63 ++-
gcc/builtins.c | 29 +-
gcc/config/aarch64/aarch64-protos.h | 1 +
gcc/config/aarch64/aarch64.c | 22 +
gcc/config/aarch64/atomics.md | 36 +-
gcc/config/aarch64/predicates.md | 13 +-
gcc/fortran/types.def | 63 ++-
gcc/optabs.c | 8 +-
gcc/sync-builtins.def | 63 ++-
.../morello/alt-base-atomic-compare-exchange-1.c | 59 +++
.../morello/alt-base-atomic-compare-exchange-2.c | 87 ++++
.../aarch64/morello/alt-base-atomic-exchange-1.c | 53 +++
.../aarch64/morello/alt-base-atomic-exchange-2.c | 79 ++++
.../aarch64/morello/alt-base-atomic-load-1.c | 290 ++++++++++++
.../aarch64/morello/alt-base-atomic-operation-1.c | 499 +++++++++++++++++++++
.../aarch64/morello/alt-base-atomic-operation-2.c | 483 ++++++++++++++++++++
.../aarch64/morello/alt-base-atomic-store-1.c | 252 +++++++++++
.../aarch64/morello/alt-base-sync-compare-swap-1.c | 90 ++++
.../aarch64/morello/alt-base-sync-compare-swap-2.c | 111 +++++
.../aarch64/morello/alt-base-sync-lock-release-1.c | 89 ++++
.../morello/alt-base-sync-lock-test-and-set-1.c | 52 +++
.../morello/alt-base-sync-lock-test-and-set-2.c | 59 +++
.../aarch64/morello/alt-base-sync-operation-1.c | 396 ++++++++++++++++
.../aarch64/morello/alt-base-sync-operation-2.c | 127 ++++++
gcc/tree.c | 11 +
gcc/tree.h | 1 +
26 files changed, 2977 insertions(+), 59 deletions(-)
diff --git a/gcc/builtin-types.def b/gcc/builtin-types.def
index c2c472aa773..e07bc092769 100644
--- a/gcc/builtin-types.def
+++ b/gcc/builtin-types.def
@@ -57,6 +57,13 @@ along with GCC; see the file COPYING3. If not see
This macro describes a pointer type. ENUM is as above; TYPE is
the type pointed to. */
+#ifndef DEF_POINTER_TYPE_C
+#define DEF_POINTER_TYPE_C(NAME, TYPE) \
+ DEF_PRIMITIVE_TYPE (NAME ##_PTR, build_pointer_type (TYPE)) \
+ DEF_PRIMITIVE_TYPE (NAME ##_CAPPTR, \
+ try_building_capability_pointer_type (TYPE))
+#endif
+
DEF_PRIMITIVE_TYPE (BT_VOID, void_type_node)
DEF_PRIMITIVE_TYPE (BT_BOOL, boolean_type_node)
DEF_PRIMITIVE_TYPE (BT_INT, integer_type_node)
@@ -127,14 +134,12 @@ DEF_PRIMITIVE_TYPE (BT_CONST_PTR, const_ptr_type_node)
DEF_PRIMITIVE_TYPE (BT_CONST_CAPPTR,
(targetm.capability_mode ().exists ()
? cap_const_ptr_type_node : const_ptr_type_node))
-DEF_PRIMITIVE_TYPE (BT_VOLATILE_PTR,
- build_pointer_type
- (build_qualified_type (void_type_node,
- TYPE_QUAL_VOLATILE)))
-DEF_PRIMITIVE_TYPE (BT_CONST_VOLATILE_PTR,
- build_pointer_type
- (build_qualified_type (void_type_node,
- TYPE_QUAL_VOLATILE|TYPE_QUAL_CONST)))
+DEF_POINTER_TYPE_C (BT_VOLATILE,
+ build_qualified_type (void_type_node,
+ TYPE_QUAL_VOLATILE))
+DEF_POINTER_TYPE_C (BT_CONST_VOLATILE,
+ build_qualified_type (void_type_node,
+ TYPE_QUAL_VOLATILE|TYPE_QUAL_CONST))
DEF_PRIMITIVE_TYPE (BT_PTRMODE,
(*lang_hooks.types.type_for_mode)(offset_mode (ptr_mode),
0))
@@ -306,6 +311,7 @@ DEF_FUNCTION_TYPE_1 (BT_FN_DFLOAT32_DFLOAT32, BT_DFLOAT32, BT_DFLOAT32)
DEF_FUNCTION_TYPE_1 (BT_FN_DFLOAT64_DFLOAT64, BT_DFLOAT64, BT_DFLOAT64)
DEF_FUNCTION_TYPE_1 (BT_FN_DFLOAT128_DFLOAT128, BT_DFLOAT128, BT_DFLOAT128)
DEF_FUNCTION_TYPE_1 (BT_FN_VOID_VPTR, BT_VOID, BT_VOLATILE_PTR)
+DEF_FUNCTION_TYPE_1 (BT_FN_VOID_VPTR_C, BT_VOID, BT_VOLATILE_CAPPTR)
DEF_FUNCTION_TYPE_1 (BT_FN_VOID_PTRPTR, BT_VOID, BT_PTR_PTR)
DEF_FUNCTION_TYPE_1 (BT_FN_VOID_CONST_PTR, BT_VOID, BT_CONST_PTR)
DEF_FUNCTION_TYPE_1 (BT_FN_UINT_UINT, BT_UINT, BT_UINT)
@@ -346,12 +352,24 @@ FOR_ALL_SYNC_N(DEF_SYNC)
#undef DEF_SYNC
DEF_FUNCTION_TYPE_2 (BT_FN_ICAP_VPTR_ICAPOFF, BT_ICAP, \
BT_VOLATILE_PTR, BT_ICAPOFF)
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_2 (BT_FN_##DTYPE##_VPTR_##DTYPE##_C, BT_DTYPE, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE)
+FOR_ALL_SYNC_N(DEF_SYNC)
+#undef DEF_SYNC
+DEF_FUNCTION_TYPE_2 (BT_FN_ICAP_VPTR_ICAPOFF_C, BT_ICAP, \
+ BT_VOLATILE_CAPPTR, BT_ICAPOFF)
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_2 (BT_FN_##DTYPE##_CONST_VPTR_INT, BT_DTYPE, \
BT_CONST_VOLATILE_PTR, BT_INT)
FOR_ALL_SYNC_N(DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_2 (BT_FN_##DTYPE##_CONST_VPTR_INT##_C, BT_DTYPE, \
+ BT_CONST_VOLATILE_CAPPTR, BT_INT)
+FOR_ALL_SYNC_N(DEF_SYNC)
+#undef DEF_SYNC
DEF_FUNCTION_TYPE_2 (BT_FN_VOID_PTR_INT, BT_VOID, BT_PTR, BT_INT)
DEF_FUNCTION_TYPE_2 (BT_FN_STRING_STRING_CONST_STRING,
@@ -533,12 +551,22 @@ DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR_PTR, BT_FN_VOID_PTR_PTR)
BT_VOLATILE_PTR, BT_DTYPE, BT_DTYPE)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_BOOL_VPTR_##DTYPE##_##DTYPE##_C, BT_BOOL, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_DTYPE)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_##DTYPE, BT_DTYPE, \
BT_VOLATILE_PTR, BT_DTYPE, BT_DTYPE)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_##DTYPE##_C, BT_DTYPE, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_DTYPE)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_INT, BT_DTYPE, \
@@ -547,12 +575,24 @@ FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
DEF_FUNCTION_TYPE_3 (BT_FN_ICAP_VPTR_ICAPOFF_INT, BT_ICAP, \
BT_VOLATILE_PTR, BT_ICAPOFF, BT_INT)
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_INT_C, BT_DTYPE, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_INT)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
+DEF_FUNCTION_TYPE_3 (BT_FN_ICAP_VPTR_ICAPOFF_INT_C, BT_ICAP, \
+ BT_VOLATILE_CAPPTR, BT_ICAPOFF, BT_INT)
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_3 (BT_FN_VOID_VPTR_##DTYPE##_INT, BT_VOID, \
BT_VOLATILE_PTR, BT_DTYPE, BT_INT)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_VOID_VPTR_##DTYPE##_INT_C, BT_VOID, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_INT)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
DEF_FUNCTION_TYPE_3 (BT_FN_STRING_STRING_CONST_STRING_SIZE,
BT_STRING, BT_STRING, BT_CONST_STRING, BT_SIZE)
@@ -738,6 +778,12 @@ DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_UINT_ULLPTR_ULL_ULLPTR_ULLPTR,
BT_BOOL, BT_INT, BT_INT)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_6 (BT_FN_BOOL_VPTR_PTR_##DTYPE##_BOOL_INT_INT_C, \
+ BT_BOOL, BT_VOLATILE_CAPPTR, BT_PTR, BT_DTYPE, \
+ BT_BOOL, BT_INT, BT_INT)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
DEF_FUNCTION_TYPE_6 (BT_FN_INT_STRING_SIZE_INT_SIZE_CONST_STRING_VALIST_ARG,
BT_INT, BT_STRING, BT_SIZE, BT_INT, BT_SIZE,
@@ -886,3 +932,4 @@ DEF_FUNCTION_TYPE_2 (BT_FN_VOID_VPTR_SIZE, BT_VOID,
BT_VOLATILE_PTR, BT_SIZE)
#undef FOR_ALL_SYNC_N
+#undef DEF_POINTER_TYPE_C
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 3bfc350b2bc..f31032bfeb0 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -6649,7 +6649,7 @@ builtin_sync_code (built_in_function base_code, int dsize)
/* The capability entry comes after the numerical ones. */
if (dsize == SYNC_ICAP)
dsize = 32;
- return (built_in_function) ((int) base_code + exact_log2 (dsize) + 1);
+ return (built_in_function) ((int) base_code + exact_log2 (dsize) * 2 + 1);
}
/* BASE_CODE is an overloaded BUILT_IN_*_N function defined in
@@ -6659,7 +6659,7 @@ builtin_sync_code (built_in_function base_code, int dsize)
sync_dsize
builtin_sync_dsize (built_in_function base_code, built_in_function code)
{
- int nbytes_log2 = (int) code - (int) base_code - 1;
+ int nbytes_log2 = ((int) code - (int) base_code - 1) / 2;
gcc_assert (nbytes_log2 >= 0 && nbytes_log2 <= 5);
if (nbytes_log2 == 5)
return SYNC_ICAP;
@@ -7259,7 +7259,6 @@ expand_builtin_atomic_fetch_op (scalar_addr_mode mode, tree exp, rtx target,
if (ext_call == BUILT_IN_NONE)
return NULL_RTX;
- gcc_assert (!CAPABILITY_MODE_P (mode));
/* Change the call to the specified function. */
fndecl = get_callee_fndecl (exp);
addr = CALL_EXPR_FN (exp);
@@ -7275,23 +7274,33 @@ expand_builtin_atomic_fetch_op (scalar_addr_mode mode, tree exp, rtx target,
CALL_EXPR_TAILCALL (exp) = 0;
/* Expand the call here so we can emit trailing code. */
- ret = expand_call (exp, target, ignore);
+ rtx fetched = expand_call (exp, target, ignore);
/* Replace the original function just in case it matters. */
TREE_OPERAND (addr, 0) = fndecl;
/* Then issue the arithmetic correction to return the right result. */
- if (!ignore)
+ if (ignore)
+ ret = fetched;
+ else if (CAPABILITY_MODE_P (mode) && code == PLUS)
+ ret = expand_pointer_plus (mode, fetched, val, target,
+ true, OPTAB_LIB_WIDEN);
+ else
{
+ rtx subtarget = mode == op_mode ? target : NULL_RTX;
if (code == NOT)
{
- ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
- OPTAB_LIB_WIDEN);
- ret = expand_simple_unop (mode, NOT, ret, target, true);
+ ret = expand_simple_binop (op_mode, AND,
+ drop_capability (fetched), val,
+ NULL_RTX, true, OPTAB_LIB_WIDEN);
+ ret = expand_simple_unop (op_mode, NOT, ret, subtarget, true);
}
else
- ret = expand_simple_binop (mode, code, ret, val, target, true,
- OPTAB_LIB_WIDEN);
+ ret = expand_simple_binop (op_mode, code,
+ drop_capability (fetched), val,
+ subtarget, true, OPTAB_LIB_WIDEN);
+ if (mode != op_mode)
+ ret = expand_replace_address_value (mode, fetched, ret, target);
}
return ret;
}
diff --git a/gcc/config/aarch64/aarch64-protos.h b/gcc/config/aarch64/aarch64-protos.h
index 954ffa45ad1..4b56cfa5155 100644
--- a/gcc/config/aarch64/aarch64-protos.h
+++ b/gcc/config/aarch64/aarch64-protos.h
@@ -776,6 +776,7 @@ struct atomic_ool_names
const char *str[5][4];
};
+bool aarch64_atomic_load_store_ok_p (rtx, rtx);
rtx aarch64_atomic_ool_func(machine_mode mode, rtx model_rtx,
const atomic_ool_names *names);
extern const atomic_ool_names aarch64_ool_swp_names;
diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c
index 5c31ffc52b4..72f60497ef3 100644
--- a/gcc/config/aarch64/aarch64.c
+++ b/gcc/config/aarch64/aarch64.c
@@ -20703,6 +20703,28 @@ aarch64_start_file (void)
default_file_start ();
}
+/* Return true if we can do an atomic load from MEM or an atomic store
+ to MEM using the memory model in CONST_INT MODEL_RTX. */
+
+bool
+aarch64_atomic_load_store_ok_p (rtx mem, rtx model_rtx)
+{
+ enum memmodel model = memmodel_from_int (INTVAL (model_rtx));
+ machine_mode mode = GET_MODE (mem);
+
+ /* Relaxed and consume operations can use normal loads and stores.
+ There are also alternative-base forms of the byte, word and capability
+ atomic loads and stores. */
+ if (is_mm_relaxed (model)
+ || is_mm_consume (model)
+ || mode == QImode
+ || mode == SImode
+ || mode == CADImode)
+ return true;
+
+ return aarch64_normal_base_mem_operand (mem, mode);
+}
+
/* Emit load exclusive. */
static void
diff --git a/gcc/config/aarch64/atomics.md b/gcc/config/aarch64/atomics.md
index 4e1646e2791..0c5d427915c 100644
--- a/gcc/config/aarch64/atomics.md
+++ b/gcc/config/aarch64/atomics.md
@@ -926,13 +926,26 @@
}
)
-(define_insn "atomic_load<mode>"
- [(set (match_operand:ALLIC 0 "register_operand" "=r")
+(define_expand "atomic_load<mode>"
+ [(set (match_operand:ALLIC 0 "register_operand")
(unspec_volatile:ALLIC
- [(match_operand:ALLIC 1 "aarch64_sync_memory_operand" "Q")
+ [(match_operand:ALLIC 1 "aarch64_atomic_load_memory_operand")
(match_operand:SI 2 "const_int_operand")] ;; model
UNSPECV_LDA))]
""
+ {
+ if (!aarch64_atomic_load_store_ok_p (operands[1], operands[2]))
+ FAIL;
+ }
+)
+
+(define_insn "*atomic_load<mode>"
+ [(set (match_operand:ALLIC 0 "register_operand" "=r")
+ (unspec_volatile:ALLIC
+ [(match_operand:ALLIC 1 "aarch64_atomic_load_memory_operand" "Q")
+ (match_operand:SI 2 "const_int_operand")] ;; model
+ UNSPECV_LDA))]
+ "aarch64_atomic_load_store_ok_p (operands[1], operands[2])"
{
enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
@@ -942,6 +955,19 @@
}
)
+(define_expand "atomic_store<mode>"
+ [(set (match_operand:ALLIC 0 "aarch64_rcpc_memory_operand")
+ (unspec_volatile:ALLIC
+ [(match_operand:ALLIC 1 "general_operand")
+ (match_operand:SI 2 "const_int_operand")] ;; model
+ UNSPECV_STL))]
+ ""
+ {
+ if (!aarch64_atomic_load_store_ok_p (operands[0], operands[2]))
+ FAIL;
+ }
+)
+
(define_expand "atomic_storeti"
[(set (match_operand:TI 0 "aarch64_rcpc_memory_operand")
(unspec_volatile:TI
@@ -957,13 +983,13 @@
}
)
-(define_insn "atomic_store<mode>"
+(define_insn "*atomic_store<mode>"
[(set (match_operand:ALLIC 0 "aarch64_rcpc_memory_operand" "=Q,Ust")
(unspec_volatile:ALLIC
[(match_operand:ALLIC 1 "general_operand" "rZ,rZ")
(match_operand:SI 2 "const_int_operand")] ;; model
UNSPECV_STL))]
- ""
+ "aarch64_atomic_load_store_ok_p (operands[0], operands[2])"
{
gcc_assert (which_alternative == 0 || <MODE>mode != CADImode
|| TARGET_CAPABILITY_FAKE);
diff --git a/gcc/config/aarch64/predicates.md b/gcc/config/aarch64/predicates.md
index 97a405c0297..ce4e6ee16cb 100644
--- a/gcc/config/aarch64/predicates.md
+++ b/gcc/config/aarch64/predicates.md
@@ -415,10 +415,17 @@
return false;
})
+(define_predicate "aarch64_plain_base_mem_operand"
+ (and (match_operand 0 "memory_operand")
+ (match_code "reg,subreg" "0")))
+
;; True if the operand is memory reference suitable for a load/store exclusive.
(define_predicate "aarch64_sync_memory_operand"
- (and (match_operand 0 "memory_operand")
- (match_code "reg" "0")))
+ (and (match_operand 0 "aarch64_normal_base_mem_operand")
+ (match_code "reg,subreg" "0")))
+
+(define_predicate "aarch64_atomic_load_memory_operand"
+ (match_operand 0 "aarch64_plain_base_mem_operand"))
(define_predicate "aarch64_9bit_offset_memory_operand"
(and (match_operand 0 "memory_operand")
@@ -449,7 +456,7 @@
(define_predicate "aarch64_rcpc_memory_operand"
(if_then_else (match_test "AARCH64_ISA_RCPC8_4")
(match_operand 0 "aarch64_9bit_offset_memory_operand")
- (match_operand 0 "aarch64_sync_memory_operand")))
+ (match_operand 0 "aarch64_plain_base_mem_operand")))
;; Predicates for parallel expanders based on mode.
(define_special_predicate "vect_par_cnst_hi_half"
diff --git a/gcc/fortran/types.def b/gcc/fortran/types.def
index 820badbf914..566cbff2538 100644
--- a/gcc/fortran/types.def
+++ b/gcc/fortran/types.def
@@ -44,6 +44,13 @@ along with GCC; see the file COPYING3. If not see
This macro describes a pointer type. ENUM is as above; TYPE is
the type pointed to. */
+#ifndef DEF_POINTER_TYPE_C
+#define DEF_POINTER_TYPE_C(NAME, TYPE) \
+ DEF_PRIMITIVE_TYPE (NAME ## _PTR, build_pointer_type (TYPE)) \
+ DEF_PRIMITIVE_TYPE (NAME ## _CAPPTR, \
+ try_building_capability_pointer_type (TYPE))
+#endif
+
DEF_PRIMITIVE_TYPE (BT_VOID, void_type_node)
DEF_PRIMITIVE_TYPE (BT_BOOL,
(*lang_hooks.types.type_for_size) (BOOL_TYPE_SIZE, 1))
@@ -76,14 +83,12 @@ DEF_PRIMITIVE_TYPE (BT_I16, builtin_type_for_size (BITS_PER_UNIT*16, 1))
DEF_PRIMITIVE_TYPE (BT_PTR, ptr_type_node)
DEF_PRIMITIVE_TYPE (BT_CONST_PTR, const_ptr_type_node)
-DEF_PRIMITIVE_TYPE (BT_VOLATILE_PTR,
- build_pointer_type
- (build_qualified_type (void_type_node,
- TYPE_QUAL_VOLATILE)))
-DEF_PRIMITIVE_TYPE (BT_CONST_VOLATILE_PTR,
- build_pointer_type
- (build_qualified_type (void_type_node,
- TYPE_QUAL_VOLATILE|TYPE_QUAL_CONST)))
+DEF_POINTER_TYPE_C (BT_VOLATILE,
+ build_qualified_type (void_type_node,
+ TYPE_QUAL_VOLATILE))
+DEF_POINTER_TYPE_C (BT_CONST_VOLATILE,
+ build_qualified_type (void_type_node,
+ TYPE_QUAL_VOLATILE|TYPE_QUAL_CONST))
DEF_POINTER_TYPE (BT_PTR_LONG, BT_LONG)
DEF_POINTER_TYPE (BT_PTR_ULONGLONG, BT_ULONGLONG)
DEF_POINTER_TYPE (BT_PTR_PTR, BT_PTR)
@@ -96,6 +101,7 @@ DEF_FUNCTION_TYPE_0 (BT_FN_VOID, BT_VOID)
DEF_FUNCTION_TYPE_1 (BT_FN_VOID_PTR, BT_VOID, BT_PTR)
DEF_FUNCTION_TYPE_1 (BT_FN_VOID_PTRPTR, BT_VOID, BT_PTR_PTR)
DEF_FUNCTION_TYPE_1 (BT_FN_VOID_VPTR, BT_VOID, BT_VOLATILE_PTR)
+DEF_FUNCTION_TYPE_1 (BT_FN_VOID_VPTR_C, BT_VOID, BT_VOLATILE_CAPPTR)
DEF_FUNCTION_TYPE_1 (BT_FN_INT_INT, BT_INT, BT_INT)
DEF_FUNCTION_TYPE_1 (BT_FN_UINT_UINT, BT_UINT, BT_UINT)
DEF_FUNCTION_TYPE_1 (BT_FN_PTR_PTR, BT_PTR, BT_PTR)
@@ -112,12 +118,24 @@ FOR_ALL_SYNC_N(DEF_SYNC)
#undef DEF_SYNC
DEF_FUNCTION_TYPE_2 (BT_FN_ICAP_VPTR_ICAPOFF, BT_ICAP, \
BT_VOLATILE_PTR, BT_ICAPOFF)
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_2 (BT_FN_##DTYPE##_VPTR_##DTYPE##_C, BT_DTYPE, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE)
+FOR_ALL_SYNC_N(DEF_SYNC)
+#undef DEF_SYNC
+DEF_FUNCTION_TYPE_2 (BT_FN_ICAP_VPTR_ICAPOFF_C, BT_ICAP, \
+ BT_VOLATILE_CAPPTR, BT_ICAPOFF)
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_2 (BT_FN_##DTYPE##_CONST_VPTR_INT, BT_DTYPE, \
BT_CONST_VOLATILE_PTR, BT_INT)
FOR_ALL_SYNC_N(DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_2 (BT_FN_##DTYPE##_CONST_VPTR_INT##_C, BT_DTYPE, \
+ BT_CONST_VOLATILE_CAPPTR, BT_INT)
+FOR_ALL_SYNC_N(DEF_SYNC)
+#undef DEF_SYNC
DEF_FUNCTION_TYPE_2 (BT_FN_BOOL_LONGPTR_LONGPTR,
BT_BOOL, BT_PTR_LONG, BT_PTR_LONG)
@@ -138,12 +156,22 @@ DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR_PTR, BT_FN_VOID_PTR_PTR)
BT_VOLATILE_PTR, BT_DTYPE, BT_DTYPE)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_BOOL_VPTR_##DTYPE##_##DTYPE##_C, BT_BOOL, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_DTYPE)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_##DTYPE, BT_DTYPE, \
BT_VOLATILE_PTR, BT_DTYPE, BT_DTYPE)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_##DTYPE##_C, BT_DTYPE, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_DTYPE)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_INT, BT_DTYPE, \
@@ -152,12 +180,24 @@ FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
DEF_FUNCTION_TYPE_3 (BT_FN_ICAP_VPTR_ICAPOFF_INT, BT_ICAP, \
BT_VOLATILE_PTR, BT_ICAPOFF, BT_INT)
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_##DTYPE##_VPTR_##DTYPE##_INT_C, BT_DTYPE, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_INT)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
+DEF_FUNCTION_TYPE_3 (BT_FN_ICAP_VPTR_ICAPOFF_INT_C, BT_ICAP, \
+ BT_VOLATILE_CAPPTR, BT_ICAPOFF, BT_INT)
#define DEF_SYNC(DTYPE, BT_DTYPE) \
DEF_FUNCTION_TYPE_3 (BT_FN_VOID_VPTR_##DTYPE##_INT, BT_VOID, \
BT_VOLATILE_PTR, BT_DTYPE, BT_INT)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_3 (BT_FN_VOID_VPTR_##DTYPE##_INT_C, BT_VOID, \
+ BT_VOLATILE_CAPPTR, BT_DTYPE, BT_INT)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
DEF_FUNCTION_TYPE_3 (BT_FN_VOID_SIZE_SIZE_PTR, BT_VOID, BT_SIZE, BT_SIZE,
BT_PTR)
@@ -204,6 +244,12 @@ DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_UINT_ULLPTR_ULL_ULLPTR_ULLPTR,
BT_BOOL, BT_INT, BT_INT)
FOR_ALL_SYNC_N (DEF_SYNC)
#undef DEF_SYNC
+#define DEF_SYNC(DTYPE, BT_DTYPE) \
+ DEF_FUNCTION_TYPE_6 (BT_FN_BOOL_VPTR_PTR_##DTYPE##_BOOL_INT_INT_C, \
+ BT_BOOL, BT_VOLATILE_CAPPTR, BT_PTR, BT_DTYPE, \
+ BT_BOOL, BT_INT, BT_INT)
+FOR_ALL_SYNC_N (DEF_SYNC)
+#undef DEF_SYNC
DEF_FUNCTION_TYPE_6 (BT_FN_BOOL_LONG_LONG_LONG_LONG_LONGPTR_LONGPTR,
BT_BOOL, BT_LONG, BT_LONG, BT_LONG, BT_LONG,
@@ -279,3 +325,4 @@ DEF_FUNCTION_TYPE_VAR_6 (BT_FN_VOID_INT_OMPFN_SIZE_PTR_PTR_PTR_VAR,
BT_PTR, BT_PTR, BT_PTR)
#undef FOR_ALL_SYNC_N
+#undef DEF_POINTER_TYPE_C
diff --git a/gcc/optabs.c b/gcc/optabs.c
index e7fd67a2833..8ba22f2ce36 100644
--- a/gcc/optabs.c
+++ b/gcc/optabs.c
@@ -5999,6 +5999,7 @@ expand_compare_and_swap_loop (rtx mem, rtx old_reg, rtx new_reg, rtx seq)
Note that we only do the plain load from memory once. Subsequent
iterations use the value loaded by the compare-and-swap pattern. */
+ rtx_insn *last_insn = get_last_insn ();
label = gen_label_rtx ();
cmp_reg = gen_reg_rtx (mode);
@@ -6013,7 +6014,10 @@ expand_compare_and_swap_loop (rtx mem, rtx old_reg, rtx new_reg, rtx seq)
if (!expand_atomic_compare_and_swap (&success, &oldval, mem, old_reg,
new_reg, false, MEMMODEL_SYNC_SEQ_CST,
MEMMODEL_RELAXED))
- return false;
+ {
+ delete_insns_since (last_insn);
+ return false;
+ }
if (oldval != cmp_reg)
emit_move_insn (cmp_reg, oldval);
@@ -7158,7 +7162,7 @@ expand_atomic_fetch_op (rtx target, rtx mem, rtx val, enum rtx_code code,
OPTAB_LIB_WIDEN);
/* For after, copy the value now. */
- if (!unused_result && after)
+ if (t1 && !unused_result && after)
emit_move_insn (target, t1);
insn = get_insns ();
end_sequence ();
diff --git a/gcc/sync-builtins.def b/gcc/sync-builtins.def
index 483530f1316..2ebc125475c 100644
--- a/gcc/sync-builtins.def
+++ b/gcc/sync-builtins.def
@@ -37,6 +37,11 @@ along with GCC; see the file COPYING3. If not see
DOFF is the non-capability form of DSIZE, so that DOFF is ICAPOFF
when DSIZE is ICAP. */
+/* Like DEF_SYNC_BUILTIN, but define capability and non-capability forms. */
+#define DEF_SYNC_BUILTIN_C(ENUM, NAME, TYPE, ATTRS) \
+ DEF_SYNC_BUILTIN (ENUM, NAME, TYPE, ATTRS) \
+ DEF_SYNC_BUILTIN (ENUM ## _C, NAME "_c", TYPE ## _C, ATTRS)
+
/* Generate DSIZE-specific functions for ENUM and NAME by invoking:
DEF (ENUM', NAME', DSIZE, DOFF)
@@ -58,15 +63,21 @@ along with GCC; see the file COPYING3. If not see
/* Case statements for everything defined by FOR_NONCAP_SYNC_N. */
#define CASE_SYNC_BUILTIN_NONCAP_N(ENUM) \
case ENUM##_1: \
+ case ENUM##_1_C: \
case ENUM##_2: \
+ case ENUM##_2_C: \
case ENUM##_4: \
+ case ENUM##_4_C: \
case ENUM##_8: \
- case ENUM##_16
+ case ENUM##_8_C: \
+ case ENUM##_16: \
+ case ENUM##_16_C
/* Case statements for everything defined by FOR_ALL_SYNC_N. */
#define CASE_SYNC_BUILTIN_ALL_N(ENUM) \
CASE_SYNC_BUILTIN_NONCAP_N (ENUM): \
- case ENUM##_CAPABILITY
+ case ENUM##_CAPABILITY: \
+ case ENUM##_CAPABILITY_C
/* The macros below operate in pairs. The first macro defines functions
for a particular DSIZE and the second macro defines the following:
@@ -78,11 +89,17 @@ along with GCC; see the file COPYING3. If not see
- BUILT_IN_SYNC_FETCH_AND_ADD_N
- BUILT_IN_SYNC_FETCH_AND_ADD_1
+ - BUILT_IN_SYNC_FETCH_AND_ADD_1_C
- BUILT_IN_SYNC_FETCH_AND_ADD_2
+ - BUILT_IN_SYNC_FETCH_AND_ADD_2_C
- BUILT_IN_SYNC_FETCH_AND_ADD_4
+ - BUILT_IN_SYNC_FETCH_AND_ADD_4_C
- BUILT_IN_SYNC_FETCH_AND_ADD_8
+ - BUILT_IN_SYNC_FETCH_AND_ADD_8_C
- BUILT_IN_SYNC_FETCH_AND_ADD_16
+ - BUILT_IN_SYNC_FETCH_AND_ADD_16_C
- BUILT_IN_SYNC_FETCH_AND_ADD_CAPABILITY
+ - BUILT_IN_SYNC_FETCH_AND_ADD_CAPABILITY_C
although not all functions have a CAPABILITY form.
@@ -92,8 +109,8 @@ along with GCC; see the file COPYING3. If not see
and code outside the front ends uses it for that purpose.
Various bits of code rely on the relationship between the enum values.
- For example, adding log2(nbytes) + 1 to an _N function gives (and must
- give) the associated DSIZE function.
+ For example, adding log2(nbytes) * 2 + 1 to an _N function gives
+ (and must give) the associated DSIZE function.
For historical reasons, the functions are inconsistent in whether
the _N function has an "_n" suffix or has no suffix. For example,
@@ -104,8 +121,8 @@ along with GCC; see the file COPYING3. If not see
/* DSIZE NAME (volatile void *ptr, DSIZE val). */
#define DEF_SYNC_BUILTIN_RMW_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DSIZE, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DSIZE, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_RMW_ALL_N(ENUM, NAME) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME, BT_FN_VOID_VAR, \
@@ -115,8 +132,8 @@ along with GCC; see the file COPYING3. If not see
/* DSIZE NAME (volatile void *ptr, DSIZE val, int memorder). */
#define DEF_SYNC_BUILTIN_RMW_ORDER_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DSIZE##_INT, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DSIZE##_INT, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_RMW_ORDER_ALL_N(ENUM, NAME, SUFFIX) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME SUFFIX, BT_FN_VOID_VAR, \
@@ -126,8 +143,8 @@ along with GCC; see the file COPYING3. If not see
/* DSIZE NAME (volatile void *ptr, DOFF val). */
#define DEF_SYNC_BUILTIN_RMW_OFF_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DOFF, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DOFF, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_RMW_OFF_ALL_N(ENUM, NAME) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME, BT_FN_VOID_VAR, \
@@ -137,8 +154,8 @@ along with GCC; see the file COPYING3. If not see
/* DSIZE NAME (volatile void *ptr, DOFF val, int memorder). */
#define DEF_SYNC_BUILTIN_RMW_OFF_ORDER_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DOFF##_INT, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DOFF##_INT, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_RMW_OFF_ORDER_ALL_N(ENUM, NAME) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME, BT_FN_VOID_VAR, \
@@ -148,8 +165,8 @@ along with GCC; see the file COPYING3. If not see
/* DSIZE NAME (const volatile void *ptr, int memorder). */
#define DEF_SYNC_BUILTIN_LOAD_ORDER_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_##DSIZE##_CONST_VPTR_INT, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_##DSIZE##_CONST_VPTR_INT, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_LOAD_ORDER_ALL_N(ENUM, NAME, SUFFIX) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME SUFFIX, BT_FN_VOID_VAR, \
@@ -159,8 +176,8 @@ along with GCC; see the file COPYING3. If not see
/* void NAME (volatile void *ptr, DSIZE val, int memorder). */
#define DEF_SYNC_BUILTIN_STORE_ORDER_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_VOID_VPTR_##DSIZE##_INT, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_VOID_VPTR_##DSIZE##_INT, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_STORE_ORDER_ALL_N(ENUM, NAME, SUFFIX) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME SUFFIX, BT_FN_VOID_VAR, \
@@ -170,7 +187,7 @@ along with GCC; see the file COPYING3. If not see
/* void NAME (volatile void *ptr). */
#define DEF_SYNC_BUILTIN_RELEASE_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_VOID_VPTR, ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_VOID_VPTR, ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_RELEASE_NONCAP_N(ENUM, NAME) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME, BT_FN_VOID_VAR, \
@@ -180,8 +197,8 @@ along with GCC; see the file COPYING3. If not see
/* bool NAME (volatile void *ptr, DSIZE oldval, DSIZE newval). */
#define DEF_SYNC_BUILTIN_BOOL_CMP_SWAP_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_BOOL_VPTR_##DSIZE##_##DSIZE, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_BOOL_VPTR_##DSIZE##_##DSIZE, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_BOOL_CMP_SWAP_ALL_N(ENUM, NAME) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME, BT_FN_VOID_VAR, \
@@ -191,8 +208,8 @@ along with GCC; see the file COPYING3. If not see
/* DSIZE NAME (volatile void *ptr, DSIZE oldval, DSIZE newval). */
#define DEF_SYNC_BUILTIN_VAL_CMP_SWAP_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DSIZE##_##DSIZE, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_##DSIZE##_VPTR_##DSIZE##_##DSIZE, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_VAL_CMP_SWAP_ALL_N(ENUM, NAME) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME, BT_FN_VOID_VAR, \
@@ -203,8 +220,8 @@ along with GCC; see the file COPYING3. If not see
int success_memorder, int failure_memorder). */
#define DEF_SYNC_BUILTIN_CMP_XCHG_ORDER_N(ENUM, NAME, DSIZE, DOFF) \
- DEF_SYNC_BUILTIN (ENUM, NAME, BT_FN_BOOL_VPTR_PTR_##DSIZE##_BOOL_INT_INT, \
- ATTR_NOTHROWCALL_LEAF_LIST)
+ DEF_SYNC_BUILTIN_C (ENUM, NAME, BT_FN_BOOL_VPTR_PTR_##DSIZE##_BOOL_INT_INT, \
+ ATTR_NOTHROWCALL_LEAF_LIST)
#define DEF_SYNC_BUILTIN_CMP_XCHG_ORDER_ALL_N(ENUM, NAME, SUFFIX) \
DEF_SYNC_BUILTIN (ENUM##_N, NAME SUFFIX, BT_FN_VOID_VAR, \
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-compare-exchange-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-compare-exchange-1.c
new file mode 100644
index 00000000000..24088673306
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-compare-exchange-1.c
@@ -0,0 +1,59 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TYPE \
+ test_##TYPE##_relaxed (TYPE *__capability ptr, TYPE *expected, \
+ TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE##_c (ptr, expected, desired, \
+ 0, __ATOMIC_RELAXED, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_acquire (TYPE *__capability ptr, TYPE *expected, \
+ TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE##_c (ptr, expected, desired, \
+ 0, __ATOMIC_ACQUIRE, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_release (TYPE *__capability ptr, TYPE *expected, \
+ TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE##_c (ptr, expected, desired, \
+ 0, __ATOMIC_RELEASE, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_seq_cst (TYPE *__capability ptr, TYPE *expected, \
+ TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE##_c (ptr, expected, desired, \
+ 0, __ATOMIC_SEQ_CST, \
+ __ATOMIC_SEQ_CST); \
+ }
+
+TEST_SIZE (uint8_t, 1)
+TEST_SIZE (uint16_t, 2)
+TEST_SIZE (uint32_t, 4)
+TEST_SIZE (uint64_t, 8)
+TEST_SIZE (uint128, 16)
+TEST_SIZE (intcap, capability)
+
+/* { dg-final { scan-assembler-times {\t__atomic_compare_exchange_1_c} 4 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_compare_exchange_2_c} 4 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_compare_exchange_4_c} 4 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_compare_exchange_8_c} 4 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_compare_exchange_16_c} 4 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_compare_exchange_capability_c} 4 } } */
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-compare-exchange-2.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-compare-exchange-2.c
new file mode 100644
index 00000000000..f33125a5393
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-compare-exchange-2.c
@@ -0,0 +1,87 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TYPE \
+ test_##TYPE##_relaxed (TYPE *__capability ptr, \
+ TYPE *__capability expected, TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE ((TYPE *) ptr, expected, \
+ desired, 0, \
+ __ATOMIC_RELAXED, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_acquire (TYPE *__capability ptr, \
+ TYPE *__capability expected, TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE ((TYPE *) ptr, expected, \
+ desired, 0, \
+ __ATOMIC_ACQUIRE, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_release (TYPE *__capability ptr, \
+ TYPE *__capability expected, TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE ((TYPE *) ptr, expected, \
+ desired, 0, \
+ __ATOMIC_RELEASE, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_seq_cst (TYPE *__capability ptr, \
+ TYPE *__capability expected, TYPE desired) \
+ { \
+ return __atomic_compare_exchange_##SIZE ((TYPE *) ptr, expected, \
+ desired, 0, \
+ __ATOMIC_SEQ_CST, \
+ __ATOMIC_SEQ_CST); \
+ }
+
+/*
+** test_uint8_t_relaxed:
+** ...
+** casb .*
+** ...
+*/
+
+/*
+** test_uint8_t_acquire:
+** ...
+** casab .*
+** ...
+*/
+
+/*
+** test_uint8_t_release:
+** ...
+** caslb .*
+** ...
+*/
+
+/*
+** test_uint8_t_seq_cst:
+** ...
+** casalb .*
+** ...
+*/
+TEST_SIZE (uint8_t, 1)
+
+/* Don't match the rest. The matches above are mostly to make sure
+ that there are no typos in the function names. */
+TEST_SIZE (uint16_t, 2)
+TEST_SIZE (uint32_t, 4)
+TEST_SIZE (uint64_t, 8)
+TEST_SIZE (uint128, 16)
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-exchange-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-exchange-1.c
new file mode 100644
index 00000000000..23c1f3dcbf3
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-exchange-1.c
@@ -0,0 +1,53 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TYPE \
+ test_##TYPE##_relaxed (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE##_c (ptr, val, __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_acquire (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE##_c (ptr, val, __ATOMIC_ACQUIRE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_release (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE##_c (ptr, val, __ATOMIC_RELEASE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_acq_rel (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE##_c (ptr, val, __ATOMIC_ACQ_REL); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_seq_cst (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE##_c (ptr, val, __ATOMIC_SEQ_CST); \
+ }
+
+TEST_SIZE (uint8_t, 1)
+TEST_SIZE (uint16_t, 2)
+TEST_SIZE (uint32_t, 4)
+TEST_SIZE (uint64_t, 8)
+TEST_SIZE (uint128, 16)
+TEST_SIZE (intcap, capability)
+
+/* { dg-final { scan-assembler-times {\t__atomic_exchange_1_c} 5 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_exchange_2_c} 5 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_exchange_4_c} 5 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_exchange_8_c} 5 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_exchange_16_c} 5 } } */
+/* { dg-final { scan-assembler-times {\t__atomic_exchange_capability_c} 5 } } */
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-exchange-2.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-exchange-2.c
new file mode 100644
index 00000000000..9d0ac46bca4
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-exchange-2.c
@@ -0,0 +1,79 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TYPE \
+ test_##TYPE##_relaxed (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE ((TYPE *) ptr, val, __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_acquire (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE ((TYPE *) ptr, val, __ATOMIC_ACQUIRE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_release (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE ((TYPE *) ptr, val, __ATOMIC_RELEASE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_acq_rel (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE ((TYPE *) ptr, val, __ATOMIC_ACQ_REL); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_seq_cst (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_exchange_##SIZE ((TYPE *) ptr, val, __ATOMIC_SEQ_CST); \
+ }
+
+/*
+** test_uint8_t_relaxed:
+** swpb w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_acquire:
+** swpab w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_release:
+** swplb w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_acq_rel:
+** swpalb w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_seq_cst:
+** swpalb w1, w0, \[x0\]
+** ret
+*/
+TEST_SIZE (uint8_t, 1)
+
+/* Don't match the rest. The matches above are mostly to make sure
+ that there are no typos in the function names. */
+TEST_SIZE (uint16_t, 2)
+TEST_SIZE (uint32_t, 4)
+TEST_SIZE (uint64_t, 8)
+TEST_SIZE (uint128, 16)
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-load-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-load-1.c
new file mode 100644
index 00000000000..914ed19b629
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-load-1.c
@@ -0,0 +1,290 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TYPE \
+ test_##TYPE (TYPE *__capability ptr) \
+ { \
+ return __atomic_load_##SIZE##_c (ptr, __ATOMIC_ACQUIRE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_offset (TYPE *__capability ptr) \
+ { \
+ return __atomic_load_##SIZE##_c (ptr + 1, __ATOMIC_ACQUIRE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_index (TYPE *__capability ptr, int index) \
+ { \
+ return __atomic_load_##SIZE##_c (ptr + index, __ATOMIC_ACQUIRE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_convert (TYPE *__capability ptr, int index) \
+ { \
+ return __atomic_load_##SIZE ((TYPE *) ptr, __ATOMIC_ACQUIRE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_relaxed (TYPE *__capability ptr, int index) \
+ { \
+ return __atomic_load_##SIZE##_c (ptr, __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_consume (TYPE *__capability ptr, int index) \
+ { \
+ return __atomic_load_##SIZE##_c (ptr, __ATOMIC_CONSUME); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_seq_cst (TYPE *__capability ptr, int index) \
+ { \
+ return __atomic_load_##SIZE##_c (ptr, __ATOMIC_SEQ_CST); \
+ }
+
+/*
+** test_uint8_t:
+** ldarb w0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint8_t_offset:
+** add (c[0-9]+), c0, #?1
+** ldarb w0, \[\1\]
+** ret
+*/
+
+/*
+** test_uint8_t_index:
+** add (c[0-9]+), c0, w1, sxtw
+** ldarb w0, \[\1\]
+** ret
+*/
+
+/*
+** test_uint8_t_convert:
+** ldarb w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_relaxed:
+** ldrb w0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint8_t_consume:
+** ldarb w0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint8_t_seq_cst:
+** ldarb w0, \[c0\]
+** ret
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t:
+** ldrh w0, \[c0\]
+** dmb ishld
+** ret
+*/
+
+/*
+** test_uint16_t_offset:
+** ldrh w0, \[c0, #?2\]
+** dmb ishld
+** ret
+*/
+
+/* test_uint16_t_index not matched. */
+
+/*
+** test_uint16_t_convert:
+** ldarh w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint16_t_relaxed:
+** ldrh w0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint16_t_consume:
+** ldrh w0, \[c0\]
+** dmb ishld
+** ret
+*/
+
+/*
+** test_uint16_t_seq_cst:
+** dmb ish
+** ldrh w0, \[c0\]
+** dmb ish
+** ret
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t:
+** ldar w0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint32_t_offset:
+** add (c[0-9]+), c0, #?4
+** ldar w0, \[\1\]
+** ret
+*/
+
+/*
+** test_uint32_t_index:
+** add (c[0-9]+), c0, w1, sxtw #?2
+** ldar w0, \[\1\]
+** ret
+*/
+
+/*
+** test_uint32_t_convert:
+** ldar w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint32_t_relaxed:
+** ldr w0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint32_t_consume:
+** ldar w0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint32_t_seq_cst:
+** ldar w0, \[c0\]
+** ret
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t:
+** ldr x0, \[c0\]
+** dmb ishld
+** ret
+*/
+
+/*
+** test_uint64_t_offset:
+** ldr x0, \[c0, #?8\]
+** dmb ishld
+** ret
+*/
+
+/* test_uint64_t_index not matched. */
+
+/*
+** test_uint64_t_convert:
+** ldar x0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint64_t_relaxed:
+** ldr x0, \[c0\]
+** ret
+*/
+
+/*
+** test_uint64_t_consume:
+** ldr x0, \[c0\]
+** dmb ishld
+** ret
+*/
+
+/*
+** test_uint64_t_seq_cst:
+** dmb ish
+** ldr x0, \[c0\]
+** dmb ish
+** ret
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** test_uint128:
+** mov w1, #?2
+** b __atomic_load_16_c
+*/
+
+/*
+** test_uint128_convert:
+** mov w1, #?2
+** b __atomic_load_16
+*/
+
+/* Others test_uint128_t not matched. */
+TEST_SIZE (uint128, 16)
+
+/*
+** test_intcap:
+** ldar c0, \[c0\]
+** ret
+*/
+
+/*
+** test_intcap_offset:
+** add (c[0-9]+), c0, #?16
+** ldar c0, \[\1\]
+** ret
+*/
+
+/*
+** test_intcap_index:
+** add (c[0-9]+), c0, w1, sxtw #?4
+** ldar c0, \[\1\]
+** ret
+*/
+
+/*
+** test_intcap_convert:
+** ldar c0, \[x0\]
+** ret
+*/
+
+/*
+** test_intcap_relaxed:
+** ldr c0, \[c0\]
+** ret
+*/
+
+/*
+** test_intcap_consume:
+** ldar c0, \[c0\]
+** ret
+*/
+
+/*
+** test_intcap_seq_cst:
+** ldar c0, \[c0\]
+** ret
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-operation-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-operation-1.c
new file mode 100644
index 00000000000..f42a8cbff67
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-operation-1.c
@@ -0,0 +1,499 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_OPERATION(TYPE, SIZE, OPERATION) \
+ TYPE \
+ test_##TYPE##_fetch_##OPERATION (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_fetch_##OPERATION##_##SIZE##_c (ptr, val, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_##OPERATION##_fetch (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_##OPERATION##_fetch_##SIZE##_c (ptr, val, \
+ __ATOMIC_RELAXED); \
+ }
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TEST_OPERATION (TYPE, SIZE, add) \
+ TEST_OPERATION (TYPE, SIZE, sub) \
+ TEST_OPERATION (TYPE, SIZE, and) \
+ TEST_OPERATION (TYPE, SIZE, nand) \
+ TEST_OPERATION (TYPE, SIZE, or) \
+ TEST_OPERATION (TYPE, SIZE, xor)
+
+/*
+** test_uint8_t_fetch_add:
+** mov w2, #?0
+** b __atomic_fetch_add_1_c
+*/
+
+/*
+** test_uint8_t_fetch_sub:
+** mov w2, #?0
+** b __atomic_fetch_sub_1_c
+*/
+
+/*
+** test_uint8_t_fetch_and:
+** mov w2, #?0
+** b __atomic_fetch_and_1_c
+*/
+
+/*
+** test_uint8_t_fetch_nand:
+** mov w2, #?0
+** b __atomic_fetch_nand_1_c
+*/
+
+/*
+** test_uint8_t_fetch_or:
+** mov w2, #?0
+** b __atomic_fetch_or_1_c
+*/
+
+/*
+** test_uint8_t_fetch_xor:
+** mov w2, #?0
+** b __atomic_fetch_xor_1_c
+*/
+
+/*
+** test_uint8_t_add_fetch:
+** ...
+** bl __atomic_fetch_add_1_c
+** ...
+** add w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint8_t_sub_fetch:
+** ...
+** bl __atomic_fetch_sub_1_c
+** ...
+** sub w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint8_t_and_fetch:
+** ...
+** bl __atomic_fetch_and_1_c
+** ...
+** and w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint8_t_nand_fetch:
+** ...
+** bl __atomic_fetch_nand_1_c
+** ...
+** and (w[0-9]+), (w[0-9]+, w0|w0, w[0-9]+)
+** mvn w0, \1
+** ...
+*/
+
+/*
+** test_uint8_t_or_fetch:
+** ...
+** bl __atomic_fetch_or_1_c
+** ...
+** orr w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint8_t_xor_fetch:
+** ...
+** bl __atomic_fetch_xor_1_c
+** ...
+** eor w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t_fetch_add:
+** mov w2, #?0
+** b __atomic_fetch_add_2_c
+*/
+
+/*
+** test_uint16_t_fetch_sub:
+** mov w2, #?0
+** b __atomic_fetch_sub_2_c
+*/
+
+/*
+** test_uint16_t_fetch_and:
+** mov w2, #?0
+** b __atomic_fetch_and_2_c
+*/
+
+/*
+** test_uint16_t_fetch_nand:
+** mov w2, #?0
+** b __atomic_fetch_nand_2_c
+*/
+
+/*
+** test_uint16_t_fetch_or:
+** mov w2, #?0
+** b __atomic_fetch_or_2_c
+*/
+
+/*
+** test_uint16_t_fetch_xor:
+** mov w2, #?0
+** b __atomic_fetch_xor_2_c
+*/
+
+/*
+** test_uint16_t_add_fetch:
+** ...
+** bl __atomic_fetch_add_2_c
+** ...
+** add w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint16_t_sub_fetch:
+** ...
+** bl __atomic_fetch_sub_2_c
+** ...
+** sub w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint16_t_and_fetch:
+** ...
+** bl __atomic_fetch_and_2_c
+** ...
+** and w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint16_t_nand_fetch:
+** ...
+** bl __atomic_fetch_nand_2_c
+** ...
+** and (w[0-9]+), (w[0-9]+, w0|w0, w[0-9]+)
+** mvn w0, \1
+** ...
+*/
+
+/*
+** test_uint16_t_or_fetch:
+** ...
+** bl __atomic_fetch_or_2_c
+** ...
+** orr w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint16_t_xor_fetch:
+** ...
+** bl __atomic_fetch_xor_2_c
+** ...
+** eor w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t_fetch_add:
+** mov w2, #?0
+** b __atomic_fetch_add_4_c
+*/
+
+/*
+** test_uint32_t_fetch_sub:
+** mov w2, #?0
+** b __atomic_fetch_sub_4_c
+*/
+
+/*
+** test_uint32_t_fetch_and:
+** mov w2, #?0
+** b __atomic_fetch_and_4_c
+*/
+
+/*
+** test_uint32_t_fetch_nand:
+** mov w2, #?0
+** b __atomic_fetch_nand_4_c
+*/
+
+/*
+** test_uint32_t_fetch_or:
+** mov w2, #?0
+** b __atomic_fetch_or_4_c
+*/
+
+/*
+** test_uint32_t_fetch_xor:
+** mov w2, #?0
+** b __atomic_fetch_xor_4_c
+*/
+
+/*
+** test_uint32_t_add_fetch:
+** ...
+** bl __atomic_fetch_add_4_c
+** ...
+** add w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint32_t_sub_fetch:
+** ...
+** bl __atomic_fetch_sub_4_c
+** ...
+** sub w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint32_t_and_fetch:
+** ...
+** bl __atomic_fetch_and_4_c
+** ...
+** and w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint32_t_nand_fetch:
+** ...
+** bl __atomic_fetch_nand_4_c
+** ...
+** and (w[0-9]+), (w[0-9]+, w0|w0, w[0-9]+)
+** mvn w0, \1
+** ...
+*/
+
+/*
+** test_uint32_t_or_fetch:
+** ...
+** bl __atomic_fetch_or_4_c
+** ...
+** orr w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+
+/*
+** test_uint32_t_xor_fetch:
+** ...
+** bl __atomic_fetch_xor_4_c
+** ...
+** eor w0, (w[0-9]+, w0|w0, w[0-9]+)
+** ...
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t_fetch_add:
+** mov w2, #?0
+** b __atomic_fetch_add_8_c
+*/
+
+/*
+** test_uint64_t_fetch_sub:
+** mov w2, #?0
+** b __atomic_fetch_sub_8_c
+*/
+
+/*
+** test_uint64_t_fetch_and:
+** mov w2, #?0
+** b __atomic_fetch_and_8_c
+*/
+
+/*
+** test_uint64_t_fetch_nand:
+** mov w2, #?0
+** b __atomic_fetch_nand_8_c
+*/
+
+/*
+** test_uint64_t_fetch_or:
+** mov w2, #?0
+** b __atomic_fetch_or_8_c
+*/
+
+/*
+** test_uint64_t_fetch_xor:
+** mov w2, #?0
+** b __atomic_fetch_xor_8_c
+*/
+
+/*
+** test_uint64_t_add_fetch:
+** ...
+** bl __atomic_fetch_add_8_c
+** ...
+** add x0, (x[0-9]+, x0|x0, x[0-9]+)
+** ...
+*/
+
+/*
+** test_uint64_t_sub_fetch:
+** ...
+** bl __atomic_fetch_sub_8_c
+** ...
+** sub x0, (x[0-9]+, x0|x0, x[0-9]+)
+** ...
+*/
+
+/*
+** test_uint64_t_and_fetch:
+** ...
+** bl __atomic_fetch_and_8_c
+** ...
+** and x0, (x[0-9]+, x0|x0, x[0-9]+)
+** ...
+*/
+
+/*
+** test_uint64_t_nand_fetch:
+** ...
+** bl __atomic_fetch_nand_8_c
+** ...
+** and (x[0-9]+), (x[0-9]+, x0|x0, x[0-9]+)
+** mvn x0, \1
+** ...
+*/
+
+/*
+** test_uint64_t_or_fetch:
+** ...
+** bl __atomic_fetch_or_8_c
+** ...
+** orr x0, (x[0-9]+, x0|x0, x[0-9]+)
+** ...
+*/
+
+/*
+** test_uint64_t_xor_fetch:
+** ...
+** bl __atomic_fetch_xor_8_c
+** ...
+** eor x0, (x[0-9]+, x0|x0, x[0-9]+)
+** ...
+*/
+TEST_SIZE (uint64_t, 8)
+
+TEST_SIZE (uint128, 16)
+
+/*
+** test_intcap_fetch_add:
+** mov w2, #?0
+** b __atomic_fetch_add_capability_c
+*/
+
+/*
+** test_intcap_fetch_sub:
+** mov w2, #?0
+** b __atomic_fetch_sub_capability_c
+*/
+
+/*
+** test_intcap_fetch_and:
+** mov w2, #?0
+** b __atomic_fetch_and_capability_c
+*/
+
+/*
+** test_intcap_fetch_nand:
+** mov w2, #?0
+** b __atomic_fetch_nand_capability_c
+*/
+
+/*
+** test_intcap_fetch_or:
+** mov w2, #?0
+** b __atomic_fetch_or_capability_c
+*/
+
+/*
+** test_intcap_fetch_xor:
+** mov w2, #?0
+** b __atomic_fetch_xor_capability_c
+*/
+
+/*
+** test_intcap_add_fetch:
+** ...
+** bl __atomic_fetch_add_capability_c
+** ...
+** add c0, c0, x[0-9]+
+** ...
+*/
+
+/*
+** test_intcap_sub_fetch:
+** ...
+** bl __atomic_fetch_sub_capability_c
+** ...
+** sub (x[0-9]+), (x[0-9]+, x0|x0, x[0-9]+)
+** scvalue c0, c0, \1
+** ...
+*/
+
+/*
+** test_intcap_and_fetch:
+** ...
+** bl __atomic_fetch_and_capability_c
+** ...
+** and (x[0-9]+), (x[0-9]+, x0|x0, x[0-9]+)
+** scvalue c0, c0, \1
+** ...
+*/
+
+/*
+** test_intcap_nand_fetch:
+** ...
+** bl __atomic_fetch_nand_capability_c
+** ...
+** and (x[0-9]+), (x[0-9]+, x0|x0, x[0-9]+)
+** mvn (x[0-9]+), \1
+** scvalue c0, c0, \3
+** ...
+*/
+
+/*
+** test_intcap_or_fetch:
+** ...
+** bl __atomic_fetch_or_capability_c
+** ...
+** orr (x[0-9]+), (x[0-9]+, x0|x0, x[0-9]+)
+** scvalue c0, c0, \1
+** ...
+*/
+
+/*
+** test_intcap_xor_fetch:
+** ...
+** bl __atomic_fetch_xor_capability_c
+** ...
+** eor (x[0-9]+), (x[0-9]+, x0|x0, x[0-9]+)
+** scvalue c0, c0, \1
+** ...
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-operation-2.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-operation-2.c
new file mode 100644
index 00000000000..af0a9d08e25
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-operation-2.c
@@ -0,0 +1,483 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_OPERATION(TYPE, SIZE, OPERATION) \
+ TYPE \
+ test_##TYPE##_fetch_##OPERATION (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_fetch_##OPERATION##_##SIZE ((TYPE *) ptr, val, \
+ __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_##OPERATION##_fetch (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __atomic_##OPERATION##_fetch_##SIZE ((TYPE *) ptr, val, \
+ __ATOMIC_RELAXED); \
+ }
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TEST_OPERATION (TYPE, SIZE, add) \
+ TEST_OPERATION (TYPE, SIZE, sub) \
+ TEST_OPERATION (TYPE, SIZE, and) \
+ TEST_OPERATION (TYPE, SIZE, nand) \
+ TEST_OPERATION (TYPE, SIZE, or) \
+ TEST_OPERATION (TYPE, SIZE, xor)
+
+/*
+** test_uint8_t_fetch_add:
+** ldaddb w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_fetch_sub:
+** ...
+** neg .*
+** ldaddb .*
+** ret
+*/
+
+/*
+** test_uint8_t_fetch_and:
+** ...
+** mvn .*
+** ldclrb .*
+** ret
+*/
+
+/*
+** test_uint8_t_fetch_nand:
+** ...
+** ldxrb .*
+** ...
+** stxrb .*
+** ...
+** ret
+*/
+
+/*
+** test_uint8_t_fetch_or:
+** ldsetb w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_fetch_xor:
+** ldeorb w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_add_fetch:
+** ...
+** ldaddb .*
+** add w0, .*
+** ret
+*/
+
+/*
+** test_uint8_t_sub_fetch:
+** ...
+** ldaddb .*
+** sub w0, .*
+** ret
+*/
+
+/*
+** test_uint8_t_and_fetch:
+** ...
+** ldclrb .*
+** and w0, .*
+** ret
+*/
+
+/*
+** test_uint8_t_nand_fetch:
+** ...
+** ldxrb .*
+** ...
+** stxrb .*
+** ...
+** ret
+*/
+
+/*
+** test_uint8_t_or_fetch:
+** ...
+** ldsetb .*
+** orr w0, .*
+** ret
+*/
+
+/*
+** test_uint8_t_xor_fetch:
+** ...
+** ldeorb .*
+** eor w0, .*
+** ret
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t_fetch_add:
+** ldaddh w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint16_t_fetch_sub:
+** ...
+** neg .*
+** ldaddh .*
+** ret
+*/
+
+/*
+** test_uint16_t_fetch_and:
+** ...
+** mvn .*
+** ldclrh .*
+** ret
+*/
+
+/*
+** test_uint16_t_fetch_nand:
+** ...
+** ldxrh .*
+** ...
+** stxrh .*
+** ...
+** ret
+*/
+
+/*
+** test_uint16_t_fetch_or:
+** ldseth w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint16_t_fetch_xor:
+** ldeorh w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint16_t_add_fetch:
+** ...
+** ldaddh .*
+** add w0, .*
+** ret
+*/
+
+/*
+** test_uint16_t_sub_fetch:
+** ...
+** ldaddh .*
+** sub w0, .*
+** ret
+*/
+
+/*
+** test_uint16_t_and_fetch:
+** ...
+** ldclrh .*
+** and w0, .*
+** ret
+*/
+
+/*
+** test_uint16_t_nand_fetch:
+** ...
+** ldxrh .*
+** ...
+** stxrh .*
+** ...
+** ret
+*/
+
+/*
+** test_uint16_t_or_fetch:
+** ...
+** ldseth .*
+** orr w0, .*
+** ret
+*/
+
+/*
+** test_uint16_t_xor_fetch:
+** ...
+** ldeorh .*
+** eor w0, .*
+** ret
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t_fetch_add:
+** ldadd w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint32_t_fetch_sub:
+** ...
+** neg .*
+** ldadd .*
+** ret
+*/
+
+/*
+** test_uint32_t_fetch_and:
+** ...
+** mvn .*
+** ldclr .*
+** ret
+*/
+
+/*
+** test_uint32_t_fetch_nand:
+** ...
+** ldxr .*
+** ...
+** stxr .*
+** ...
+** ret
+*/
+
+/*
+** test_uint32_t_fetch_or:
+** ldset w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint32_t_fetch_xor:
+** ldeor w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint32_t_add_fetch:
+** ...
+** ldadd .*
+** add w0, .*
+** ret
+*/
+
+/*
+** test_uint32_t_sub_fetch:
+** ...
+** ldadd .*
+** sub w0, .*
+** ret
+*/
+
+/*
+** test_uint32_t_and_fetch:
+** ...
+** ldclr .*
+** and w0, .*
+** ret
+*/
+
+/*
+** test_uint32_t_nand_fetch:
+** ...
+** ldxr .*
+** ...
+** stxr .*
+** ...
+** ret
+*/
+
+/*
+** test_uint32_t_or_fetch:
+** ...
+** ldset .*
+** orr w0, .*
+** ret
+*/
+
+/*
+** test_uint32_t_xor_fetch:
+** ...
+** ldeor .*
+** eor w0, .*
+** ret
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t_fetch_add:
+** ldadd x1, x0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint64_t_fetch_sub:
+** ...
+** neg .*
+** ldadd .*
+** ret
+*/
+
+/*
+** test_uint64_t_fetch_and:
+** ...
+** mvn .*
+** ldclr .*
+** ret
+*/
+
+/*
+** test_uint64_t_fetch_nand:
+** ...
+** ldxr .*
+** ...
+** stxr .*
+** ...
+** ret
+*/
+
+/*
+** test_uint64_t_fetch_or:
+** ldset x1, x0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint64_t_fetch_xor:
+** ldeor x1, x0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint64_t_add_fetch:
+** ...
+** ldadd .*
+** add x0, .*
+** ret
+*/
+
+/*
+** test_uint64_t_sub_fetch:
+** ...
+** ldadd .*
+** sub x0, .*
+** ret
+*/
+
+/*
+** test_uint64_t_and_fetch:
+** ...
+** ldclr .*
+** and x0, .*
+** ret
+*/
+
+/*
+** test_uint64_t_nand_fetch:
+** ...
+** ldxr .*
+** ...
+** stxr .*
+** ...
+** ret
+*/
+
+/*
+** test_uint64_t_or_fetch:
+** ...
+** ldset .*
+** orr x0, .*
+** ret
+*/
+
+/*
+** test_uint64_t_xor_fetch:
+** ...
+** ldeor .*
+** eor x0, .*
+** ret
+*/
+TEST_SIZE (uint64_t, 8)
+
+/* Not matched, calls into libatomic. */
+TEST_SIZE (uint128, 16)
+
+/*
+** test_intcap_fetch_add:
+** ...
+** ldxr c0, \[(x[0-9]+)\]
+** add (c[0-9]+), c0, x[0-9]+
+** stxr (w[0-9]+), \2, \[\1\]
+** cbnz \3, .*
+** ret
+*/
+
+/*
+** test_intcap_fetch_sub:
+** ...
+** ldxr .*
+** sub .*
+** scvalue .*
+** stxr .*
+** cbnz .*
+** ret
+*/
+
+/*
+** test_intcap_fetch_and:
+** ...
+** ldxr .*
+** and .*
+** scvalue .*
+** stxr .*
+** cbnz .*
+** ret
+*/
+
+/*
+** test_intcap_fetch_nand:
+** ...
+** ldxr .*
+** .*
+** stxr .*
+** cbnz .*
+** ret
+*/
+
+/*
+** test_intcap_fetch_or:
+** ...
+** ldxr .*
+** orr .*
+** scvalue .*
+** stxr .*
+** cbnz .*
+** ret
+*/
+
+/*
+** test_intcap_fetch_xor:
+** ...
+** ldxr .*
+** eor .*
+** scvalue .*
+** stxr .*
+** cbnz .*
+** ret
+*/
+
+/* Don't match the rest since they currently include a redundant final
+ operation. */
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-store-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-store-1.c
new file mode 100644
index 00000000000..8c69b5e3842
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-atomic-store-1.c
@@ -0,0 +1,252 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ void \
+ test_##TYPE (TYPE *__capability ptr, TYPE data) \
+ { \
+ __atomic_store_##SIZE##_c (ptr, data, __ATOMIC_RELEASE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_offset (TYPE *__capability ptr, TYPE data) \
+ { \
+ __atomic_store_##SIZE##_c (ptr + 1, data, __ATOMIC_RELEASE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_index (TYPE *__capability ptr, TYPE data, int index) \
+ { \
+ __atomic_store_##SIZE##_c (ptr + index, data, __ATOMIC_RELEASE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_convert (TYPE *__capability ptr, TYPE data, int index) \
+ { \
+ __atomic_store_##SIZE ((TYPE *) ptr, data, __ATOMIC_RELEASE); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_relaxed (TYPE *__capability ptr, TYPE data, int index) \
+ { \
+ __atomic_store_##SIZE##_c (ptr, data, __ATOMIC_RELAXED); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_seq_cst (TYPE *__capability ptr, TYPE data, int index) \
+ { \
+ __atomic_store_##SIZE##_c (ptr, data, __ATOMIC_SEQ_CST); \
+ }
+
+/*
+** test_uint8_t:
+** ...
+** stlrb w1, \[c0\]
+** ret
+*/
+
+/*
+** test_uint8_t_offset:
+** ...
+** add (c[0-9]+), c0, #?1
+** ...
+** stlrb w1, \[\1\]
+** ret
+*/
+
+/*
+** test_uint8_t_index:
+** ...
+** add (c[0-9]+), c0, w2, sxtw
+** ...
+** stlrb w1, \[\1\]
+** ret
+*/
+
+/*
+** test_uint8_t_convert:
+** ...
+** stlrb w1, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_relaxed:
+** ...
+** strb w1, \[c0\]
+** ret
+*/
+
+/*
+** test_uint8_t_seq_cst:
+** ...
+** stlrb w1, \[c0\]
+** ret
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t:
+** ...
+** dmb ish
+** ...
+** strh w1, \[c0\]
+** ret
+*/
+
+/* test_uint16_t offset and test_uint16_t_index not matched. */
+
+/*
+** test_uint16_t_convert:
+** ...
+** stlrh w1, \[x0\]
+** ret
+*/
+
+/*
+** test_uint16_t_relaxed:
+** ...
+** strh w1, \[c0\]
+** ret
+*/
+
+/*
+** test_uint16_t_seq_cst:
+** ...
+** dmb ish
+** ...
+** dmb ish
+** ret
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t:
+** stlr w1, \[c0\]
+** ret
+*/
+
+/*
+** test_uint32_t_offset:
+** add (c[0-9]+), c0, #?4
+** stlr w1, \[\1\]
+** ret
+*/
+
+/*
+** test_uint32_t_index:
+** add (c[0-9]+), c0, w2, sxtw #?2
+** stlr w1, \[\1\]
+** ret
+*/
+
+/*
+** test_uint32_t_convert:
+** stlr w1, \[x0\]
+** ret
+*/
+
+/*
+** test_uint32_t_relaxed:
+** str w1, \[c0\]
+** ret
+*/
+
+/*
+** test_uint32_t_seq_cst:
+** stlr w1, \[c0\]
+** ret
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t:
+** dmb ish
+** str x1, \[c0\]
+** ret
+*/
+
+/* test_uint64_t_offset and test_uint64_t_index not matched. */
+
+/*
+** test_uint64_t_convert:
+** stlr x1, \[x0\]
+** ret
+*/
+
+/*
+** test_uint64_t_relaxed:
+** str x1, \[c0\]
+** ret
+*/
+
+/*
+** test_uint64_t_seq_cst:
+** dmb ish
+** str x1, \[c0\]
+** dmb ish
+** ret
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** test_uint128:
+** mov w4, #?3
+** b __atomic_store_16_c
+*/
+
+/*
+** test_uint128_convert:
+** ...
+** bl? __atomic_store_16
+** ...
+*/
+
+/* Others test_uint128_t not matched. */
+TEST_SIZE (uint128, 16)
+
+/*
+** test_intcap:
+** stlr c1, \[c0\]
+** ret
+*/
+
+/*
+** test_intcap_offset:
+** add (c[0-9]+), c0, #?16
+** stlr c1, \[\1\]
+** ret
+*/
+
+/*
+** test_intcap_index:
+** add (c[0-9]+), c0, w2, sxtw #?4
+** stlr c1, \[\1\]
+** ret
+*/
+
+/*
+** test_intcap_convert:
+** stlr c1, \[x0\]
+** ret
+*/
+
+/*
+** test_intcap_relaxed:
+** str c1, \[c0\]
+** ret
+*/
+
+/*
+** test_intcap_seq_cst:
+** stlr c1, \[c0\]
+** ret
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-compare-swap-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-compare-swap-1.c
new file mode 100644
index 00000000000..678ad7cb157
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-compare-swap-1.c
@@ -0,0 +1,90 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ _Bool \
+ bool_##TYPE (TYPE *__capability ptr, TYPE oldval, TYPE newval) \
+ { \
+ return __sync_bool_compare_and_swap_##SIZE##_c (ptr, oldval, \
+ newval); \
+ } \
+ \
+ TYPE \
+ val_##TYPE (TYPE *__capability ptr, TYPE oldval, TYPE newval) \
+ { \
+ return __sync_val_compare_and_swap_##SIZE##_c (ptr, oldval, \
+ newval); \
+ }
+
+/*
+** bool_uint8_t:
+** b __sync_bool_compare_and_swap_1_c
+*/
+
+/*
+** val_uint8_t:
+** b __sync_val_compare_and_swap_1_c
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** bool_uint16_t:
+** b __sync_bool_compare_and_swap_2_c
+*/
+
+/*
+** val_uint16_t:
+** b __sync_val_compare_and_swap_2_c
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** bool_uint32_t:
+** b __sync_bool_compare_and_swap_4_c
+*/
+
+/*
+** val_uint32_t:
+** b __sync_val_compare_and_swap_4_c
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** bool_uint64_t:
+** b __sync_bool_compare_and_swap_8_c
+*/
+
+/*
+** val_uint64_t:
+** b __sync_val_compare_and_swap_8_c
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** bool_uint128:
+** b __sync_bool_compare_and_swap_16_c
+*/
+
+/*
+** val_uint128:
+** b __sync_val_compare_and_swap_16_c
+*/
+TEST_SIZE (uint128, 16)
+
+/*
+** bool_intcap:
+** b __sync_bool_compare_and_swap_capability_c
+*/
+
+/*
+** val_intcap:
+** b __sync_val_compare_and_swap_capability_c
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-compare-swap-2.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-compare-swap-2.c
new file mode 100644
index 00000000000..c7ebec62e63
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-compare-swap-2.c
@@ -0,0 +1,111 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ _Bool \
+ bool_##TYPE (TYPE *__capability ptr, TYPE oldval, TYPE newval) \
+ { \
+ return __sync_bool_compare_and_swap_##SIZE ((TYPE *) ptr, oldval, \
+ newval); \
+ } \
+ \
+ TYPE \
+ val_##TYPE (TYPE oldval, TYPE newval, TYPE *__capability ptr) \
+ { \
+ return __sync_val_compare_and_swap_##SIZE ((TYPE *) ptr, oldval, \
+ newval); \
+ }
+
+/*
+** bool_uint8_t:
+** ...
+** casalb .*
+** ...
+*/
+
+/*
+** val_uint8_t:
+** ...
+** casalb .*
+** ...
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** bool_uint16_t:
+** ...
+** casalh .*
+** ...
+*/
+
+/*
+** val_uint16_t:
+** ...
+** casalh .*
+** ...
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** bool_uint32_t:
+** ...
+** casal .*
+** ...
+*/
+
+/*
+** val_uint32_t:
+** casal w0, w1, \[x2\]
+** ret
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** bool_uint64_t:
+** ...
+** casal .*
+** ...
+*/
+
+/*
+** val_uint64_t:
+** casal x0, x1, \[x2\]
+** ret
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** bool_uint128:
+** ...
+** caspal .*
+** ...
+*/
+
+/*
+** val_uint128:
+** ...
+** caspal .*
+** ...
+*/
+TEST_SIZE (uint128, 16)
+
+/*
+** bool_intcap:
+** ...
+** casal .*
+** ...
+*/
+
+/*
+** val_intcap:
+** casal c0, c1, \[x2\]
+** ret
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-release-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-release-1.c
new file mode 100644
index 00000000000..a3b9cfdd64a
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-release-1.c
@@ -0,0 +1,89 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ void \
+ test_##TYPE (TYPE *__capability ptr) \
+ { \
+ __sync_lock_release_##SIZE##_c (ptr); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_convert (TYPE *__capability ptr) \
+ { \
+ __sync_lock_release_##SIZE ((TYPE *) ptr); \
+ }
+
+/*
+** test_uint8_t:
+** stlrb wzr, \[c0\]
+** ret
+*/
+
+/*
+** test_uint8_t_convert:
+** stlrb wzr, \[x0\]
+** ret
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t:
+** dmb ish
+** strh wzr, \[c0\]
+** ret
+*/
+
+/*
+** test_uint16_t_convert:
+** stlrh wzr, \[x0\]
+** ret
+*/
+
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t:
+** stlr wzr, \[c0\]
+** ret
+*/
+
+/*
+** test_uint32_t_convert:
+** stlr wzr, \[x0\]
+** ret
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t:
+** dmb ish
+** str xzr, \[c0\]
+** ret
+*/
+
+/*
+** test_uint64_t_convert:
+** stlr xzr, \[x0\]
+** ret
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** test_uint128:
+** stlr czr, \[c0\]
+** ret
+*/
+
+/*
+** test_uint128_convert:
+** stlr czr, \[x0\]
+** ret
+*/
+TEST_SIZE (uint128, 16)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-test-and-set-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-test-and-set-1.c
new file mode 100644
index 00000000000..8e03f8f444b
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-test-and-set-1.c
@@ -0,0 +1,52 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TYPE \
+ test_##TYPE (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __sync_lock_test_and_set_##SIZE##_c (ptr, val); \
+ }
+
+/*
+** test_uint8_t:
+** b __sync_lock_test_and_set_1_c
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t:
+** b __sync_lock_test_and_set_2_c
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t:
+** b __sync_lock_test_and_set_4_c
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t:
+** b __sync_lock_test_and_set_8_c
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** test_uint128:
+** b __sync_lock_test_and_set_16_c
+*/
+TEST_SIZE (uint128, 16)
+
+/*
+** test_intcap:
+** b __sync_lock_test_and_set_capability_c
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-test-and-set-2.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-test-and-set-2.c
new file mode 100644
index 00000000000..04831904725
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-lock-test-and-set-2.c
@@ -0,0 +1,59 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TYPE \
+ test_##TYPE (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __sync_lock_test_and_set_##SIZE ((TYPE *) ptr, val); \
+ }
+
+/*
+** test_uint8_t:
+** swpab w1, w0, \[x0\]
+** ret
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t:
+** swpah w1, w0, \[x0\]
+** ret
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t:
+** swpa w1, w0, \[x0\]
+** ret
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t:
+** swpa x1, x0, \[x0\]
+** ret
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** test_uint128:
+** ...
+** caspal .*
+** ...
+*/
+TEST_SIZE (uint128, 16)
+
+/*
+** test_intcap:
+** swpa c1, c0, \[x0\]
+** ret
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-operation-1.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-operation-1.c
new file mode 100644
index 00000000000..012c0b115d1
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-operation-1.c
@@ -0,0 +1,396 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_OPERATION(TYPE, SIZE, OPERATION) \
+ TYPE \
+ test_##TYPE##_fetch_and_##OPERATION (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __sync_fetch_and_##OPERATION##_##SIZE##_c (ptr, val); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_##OPERATION##_and_fetch (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __sync_##OPERATION##_and_fetch_##SIZE##_c (ptr, val); \
+ }
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TEST_OPERATION (TYPE, SIZE, add) \
+ TEST_OPERATION (TYPE, SIZE, sub) \
+ TEST_OPERATION (TYPE, SIZE, and) \
+ TEST_OPERATION (TYPE, SIZE, nand) \
+ TEST_OPERATION (TYPE, SIZE, or) \
+ TEST_OPERATION (TYPE, SIZE, xor)
+
+/*
+** test_uint8_t_fetch_and_add:
+** b __sync_fetch_and_add_1_c
+*/
+
+/*
+** test_uint8_t_fetch_and_sub:
+** b __sync_fetch_and_sub_1_c
+*/
+
+/*
+** test_uint8_t_fetch_and_and:
+** b __sync_fetch_and_and_1_c
+*/
+
+/*
+** test_uint8_t_fetch_and_nand:
+** b __sync_fetch_and_nand_1_c
+*/
+
+/*
+** test_uint8_t_fetch_and_or:
+** b __sync_fetch_and_or_1_c
+*/
+
+/*
+** test_uint8_t_fetch_and_xor:
+** b __sync_fetch_and_xor_1_c
+*/
+
+/*
+** test_uint8_t_add_and_fetch:
+** b __sync_add_and_fetch_1_c
+*/
+
+/*
+** test_uint8_t_sub_and_fetch:
+** b __sync_sub_and_fetch_1_c
+*/
+
+/*
+** test_uint8_t_and_and_fetch:
+** b __sync_and_and_fetch_1_c
+*/
+
+/*
+** test_uint8_t_nand_and_fetch:
+** b __sync_nand_and_fetch_1_c
+*/
+
+/*
+** test_uint8_t_or_and_fetch:
+** b __sync_or_and_fetch_1_c
+*/
+
+/*
+** test_uint8_t_xor_and_fetch:
+** b __sync_xor_and_fetch_1_c
+*/
+TEST_SIZE (uint8_t, 1)
+
+/*
+** test_uint16_t_fetch_and_add:
+** b __sync_fetch_and_add_2_c
+*/
+
+/*
+** test_uint16_t_fetch_and_sub:
+** b __sync_fetch_and_sub_2_c
+*/
+
+/*
+** test_uint16_t_fetch_and_and:
+** b __sync_fetch_and_and_2_c
+*/
+
+/*
+** test_uint16_t_fetch_and_nand:
+** b __sync_fetch_and_nand_2_c
+*/
+
+/*
+** test_uint16_t_fetch_and_or:
+** b __sync_fetch_and_or_2_c
+*/
+
+/*
+** test_uint16_t_fetch_and_xor:
+** b __sync_fetch_and_xor_2_c
+*/
+
+/*
+** test_uint16_t_add_and_fetch:
+** b __sync_add_and_fetch_2_c
+*/
+
+/*
+** test_uint16_t_sub_and_fetch:
+** b __sync_sub_and_fetch_2_c
+*/
+
+/*
+** test_uint16_t_and_and_fetch:
+** b __sync_and_and_fetch_2_c
+*/
+
+/*
+** test_uint16_t_nand_and_fetch:
+** b __sync_nand_and_fetch_2_c
+*/
+
+/*
+** test_uint16_t_or_and_fetch:
+** b __sync_or_and_fetch_2_c
+*/
+
+/*
+** test_uint16_t_xor_and_fetch:
+** b __sync_xor_and_fetch_2_c
+*/
+TEST_SIZE (uint16_t, 2)
+
+/*
+** test_uint32_t_fetch_and_add:
+** b __sync_fetch_and_add_4_c
+*/
+
+/*
+** test_uint32_t_fetch_and_sub:
+** b __sync_fetch_and_sub_4_c
+*/
+
+/*
+** test_uint32_t_fetch_and_and:
+** b __sync_fetch_and_and_4_c
+*/
+
+/*
+** test_uint32_t_fetch_and_nand:
+** b __sync_fetch_and_nand_4_c
+*/
+
+/*
+** test_uint32_t_fetch_and_or:
+** b __sync_fetch_and_or_4_c
+*/
+
+/*
+** test_uint32_t_fetch_and_xor:
+** b __sync_fetch_and_xor_4_c
+*/
+
+/*
+** test_uint32_t_add_and_fetch:
+** b __sync_add_and_fetch_4_c
+*/
+
+/*
+** test_uint32_t_sub_and_fetch:
+** b __sync_sub_and_fetch_4_c
+*/
+
+/*
+** test_uint32_t_and_and_fetch:
+** b __sync_and_and_fetch_4_c
+*/
+
+/*
+** test_uint32_t_nand_and_fetch:
+** b __sync_nand_and_fetch_4_c
+*/
+
+/*
+** test_uint32_t_or_and_fetch:
+** b __sync_or_and_fetch_4_c
+*/
+
+/*
+** test_uint32_t_xor_and_fetch:
+** b __sync_xor_and_fetch_4_c
+*/
+TEST_SIZE (uint32_t, 4)
+
+/*
+** test_uint64_t_fetch_and_add:
+** b __sync_fetch_and_add_8_c
+*/
+
+/*
+** test_uint64_t_fetch_and_sub:
+** b __sync_fetch_and_sub_8_c
+*/
+
+/*
+** test_uint64_t_fetch_and_and:
+** b __sync_fetch_and_and_8_c
+*/
+
+/*
+** test_uint64_t_fetch_and_nand:
+** b __sync_fetch_and_nand_8_c
+*/
+
+/*
+** test_uint64_t_fetch_and_or:
+** b __sync_fetch_and_or_8_c
+*/
+
+/*
+** test_uint64_t_fetch_and_xor:
+** b __sync_fetch_and_xor_8_c
+*/
+
+/*
+** test_uint64_t_add_and_fetch:
+** b __sync_add_and_fetch_8_c
+*/
+
+/*
+** test_uint64_t_sub_and_fetch:
+** b __sync_sub_and_fetch_8_c
+*/
+
+/*
+** test_uint64_t_and_and_fetch:
+** b __sync_and_and_fetch_8_c
+*/
+
+/*
+** test_uint64_t_nand_and_fetch:
+** b __sync_nand_and_fetch_8_c
+*/
+
+/*
+** test_uint64_t_or_and_fetch:
+** b __sync_or_and_fetch_8_c
+*/
+
+/*
+** test_uint64_t_xor_and_fetch:
+** b __sync_xor_and_fetch_8_c
+*/
+TEST_SIZE (uint64_t, 8)
+
+/*
+** test_uint128_fetch_and_add:
+** b __sync_fetch_and_add_16_c
+*/
+
+/*
+** test_uint128_fetch_and_sub:
+** b __sync_fetch_and_sub_16_c
+*/
+
+/*
+** test_uint128_fetch_and_and:
+** b __sync_fetch_and_and_16_c
+*/
+
+/*
+** test_uint128_fetch_and_nand:
+** b __sync_fetch_and_nand_16_c
+*/
+
+/*
+** test_uint128_fetch_and_or:
+** b __sync_fetch_and_or_16_c
+*/
+
+/*
+** test_uint128_fetch_and_xor:
+** b __sync_fetch_and_xor_16_c
+*/
+
+/*
+** test_uint128_add_and_fetch:
+** b __sync_add_and_fetch_16_c
+*/
+
+/*
+** test_uint128_sub_and_fetch:
+** b __sync_sub_and_fetch_16_c
+*/
+
+/*
+** test_uint128_and_and_fetch:
+** b __sync_and_and_fetch_16_c
+*/
+
+/*
+** test_uint128_nand_and_fetch:
+** b __sync_nand_and_fetch_16_c
+*/
+
+/*
+** test_uint128_or_and_fetch:
+** b __sync_or_and_fetch_16_c
+*/
+
+/*
+** test_uint128_xor_and_fetch:
+** b __sync_xor_and_fetch_16_c
+*/
+TEST_SIZE (uint128, 16)
+
+/*
+** test_intcap_fetch_and_add:
+** b __sync_fetch_and_add_capability_c
+*/
+
+/*
+** test_intcap_fetch_and_sub:
+** b __sync_fetch_and_sub_capability_c
+*/
+
+/*
+** test_intcap_fetch_and_and:
+** b __sync_fetch_and_and_capability_c
+*/
+
+/*
+** test_intcap_fetch_and_nand:
+** b __sync_fetch_and_nand_capability_c
+*/
+
+/*
+** test_intcap_fetch_and_or:
+** b __sync_fetch_and_or_capability_c
+*/
+
+/*
+** test_intcap_fetch_and_xor:
+** b __sync_fetch_and_xor_capability_c
+*/
+
+/*
+** test_intcap_add_and_fetch:
+** b __sync_add_and_fetch_capability_c
+*/
+
+/*
+** test_intcap_sub_and_fetch:
+** b __sync_sub_and_fetch_capability_c
+*/
+
+/*
+** test_intcap_and_and_fetch:
+** b __sync_and_and_fetch_capability_c
+*/
+
+/*
+** test_intcap_nand_and_fetch:
+** b __sync_nand_and_fetch_capability_c
+*/
+
+/*
+** test_intcap_or_and_fetch:
+** b __sync_or_and_fetch_capability_c
+*/
+
+/*
+** test_intcap_xor_and_fetch:
+** b __sync_xor_and_fetch_capability_c
+*/
+TEST_SIZE (intcap, capability)
diff --git a/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-operation-2.c b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-operation-2.c
new file mode 100644
index 00000000000..94ad371c3b5
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/morello/alt-base-sync-operation-2.c
@@ -0,0 +1,127 @@
+/* { dg-do assemble } */
+/* { dg-additional-options "-foptimize-sibling-calls -save-temps" } */
+/* { dg-final { check-function-bodies "**" "" { {-O[123s]} } } } */
+/* { dg-skip-if "" { *-*-* } { "-mabi=purecap" "-mfake-capability" } { "" } } */
+
+#include <stdint.h>
+
+typedef __uint128_t uint128;
+typedef __intcap intcap;
+
+#define TEST_OPERATION(TYPE, SIZE, OPERATION) \
+ TYPE \
+ test_##TYPE##_fetch_and_##OPERATION (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __sync_fetch_and_##OPERATION##_##SIZE ((TYPE *) ptr, val); \
+ } \
+ \
+ TYPE \
+ test_##TYPE##_##OPERATION##_and_fetch (TYPE *__capability ptr, TYPE val) \
+ { \
+ return __sync_##OPERATION##_and_fetch_##SIZE ((TYPE *) ptr, val); \
+ }
+
+#define TEST_SIZE(TYPE, SIZE) \
+ TEST_OPERATION (TYPE, SIZE, add) \
+ TEST_OPERATION (TYPE, SIZE, sub) \
+ TEST_OPERATION (TYPE, SIZE, and) \
+ TEST_OPERATION (TYPE, SIZE, nand) \
+ TEST_OPERATION (TYPE, SIZE, or) \
+ TEST_OPERATION (TYPE, SIZE, xor)
+
+/*
+** test_uint8_t_fetch_and_add:
+** ldaddalb w1, w0, \[x0\]
+** ret
+*/
+
+/*
+** test_uint8_t_fetch_and_sub:
+** ...
+** ldaddalb .*
+** ...
+*/
+
+/*
+** test_uint8_t_fetch_and_and:
+** ...
+** ldclralb .*
+** ...
+*/
+
+/*
+** test_uint8_t_fetch_and_nand:
+** ...
+** ldxrb .*
+** ...
+** stlxrb .*
+** ...
+*/
+
+/*
+** test_uint8_t_fetch_and_or:
+** ...
+** ldsetalb .*
+** ...
+*/
+
+/*
+** test_uint8_t_fetch_and_xor:
+** ...
+** ldeoralb .*
+** ...
+*/
+
+/*
+** test_uint8_t_add_and_fetch:
+** ...
+** ldaddalb w1, (w[0-9]+), \[x0\]
+** add w0, (w1, \1|\1, w1)
+** ret
+*/
+
+/*
+** test_uint8_t_sub_and_fetch:
+** ...
+** ldaddalb .*
+** ...
+*/
+
+/*
+** test_uint8_t_and_and_fetch:
+** ...
+** ldclralb .*
+** ...
+*/
+
+/*
+** test_uint8_t_nand_and_fetch:
+** ...
+** ldxrb .*
+** ...
+** stlxrb .*
+** ...
+*/
+
+/*
+** test_uint8_t_or_and_fetch:
+** ...
+** ldsetalb .*
+** ...
+*/
+
+/*
+** test_uint8_t_xor_and_fetch:
+** ...
+** ldeoralb .*
+** ...
+*/
+TEST_SIZE (uint8_t, 1)
+
+/* Don't match the rest. The matches above are mostly to make sure
+ that there are no typos in the function names. */
+TEST_SIZE (uint16_t, 2)
+TEST_SIZE (uint32_t, 4)
+TEST_SIZE (uint64_t, 8)
+TEST_SIZE (uint128, 16)
+TEST_SIZE (intcap, capability)
diff --git a/gcc/tree.c b/gcc/tree.c
index da8d55a2905..95f9b82a454 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -8092,6 +8092,17 @@ addr_expr_type (tree_code code, tree op_type)
return build_pointer_type_for_mode (op_type, ptr_mode, false);
}
+/* Try building a capability pointer to TO_TYPE. Return error_mark_node
+ if the target doesn't support such pointers. */
+
+tree
+try_building_capability_pointer_type (tree to_type)
+{
+ if (targetm.capability_mode ().exists ())
+ return addr_expr_type (CAP_ADDR_EXPR, to_type);
+ return error_mark_node;
+}
+
/* Build REPLACE_ADDRESS_VALUE internal function. This represents replacing
the value of a pointer with something else. It is different to a simple
assignment since it works with pointers represented by capabilities and not
diff --git a/gcc/tree.h b/gcc/tree.h
index 2bc986e0869..72013ec0778 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -4583,6 +4583,7 @@ extern tree unsigned_type_for (tree);
extern tree truth_type_for (tree);
extern tree build_pointer_type_for_mode (tree, machine_mode, bool);
extern tree build_pointer_type (tree);
+extern tree try_building_capability_pointer_type (tree);
extern tree addr_expr_type (tree_code, tree);
extern tree build_intcap_type_for_mode (machine_mode, int);
extern tree build_reference_type_for_mode (tree, machine_mode, bool);
More information about the Gcc-cvs
mailing list