return ROUND_UP (bytes, UNITS_PER_WORD);
}
+/* Use 8 bits metadata start from bit48 for LAM_U48,
+ 6 bits metadat start from bit57 for LAM_U57. */
+#define IX86_HWASAN_SHIFT (ix86_lam_type == lam_u48 \
+ ? 48 \
+ : (ix86_lam_type == lam_u57 ? 57 : 0))
+#define IX86_HWASAN_TAG_SIZE (ix86_lam_type == lam_u48 \
+ ? 8 \
+ : (ix86_lam_type == lam_u57 ? 6 : 0))
+
+/* Implement TARGET_MEMTAG_CAN_TAG_ADDRESSES. */
+bool
+ix86_memtag_can_tag_addresses ()
+{
+ return ix86_lam_type != lam_none && TARGET_LP64;
+}
+
+/* Implement TARGET_MEMTAG_TAG_SIZE. */
+unsigned char
+ix86_memtag_tag_size ()
+{
+ return IX86_HWASAN_TAG_SIZE;
+}
+
+/* Implement TARGET_MEMTAG_SET_TAG. */
+rtx
+ix86_memtag_set_tag (rtx untagged, rtx tag, rtx target)
+{
+ /* default_memtag_insert_random_tag may
+ generate tag with value more than 6 bits. */
+ if (ix86_lam_type == lam_u57)
+ {
+ unsigned HOST_WIDE_INT and_imm
+ = (HOST_WIDE_INT_1U << IX86_HWASAN_TAG_SIZE) - 1;
+
+ emit_insn (gen_andqi3 (tag, tag, GEN_INT (and_imm)));
+ }
+ tag = expand_simple_binop (Pmode, ASHIFT, tag,
+ GEN_INT (IX86_HWASAN_SHIFT), NULL_RTX,
+ /* unsignedp = */1, OPTAB_WIDEN);
+ rtx ret = expand_simple_binop (Pmode, IOR, untagged, tag, target,
+ /* unsignedp = */1, OPTAB_DIRECT);
+ return ret;
+}
+
+/* Implement TARGET_MEMTAG_EXTRACT_TAG. */
+rtx
+ix86_memtag_extract_tag (rtx tagged_pointer, rtx target)
+{
+ rtx tag = expand_simple_binop (Pmode, LSHIFTRT, tagged_pointer,
+ GEN_INT (IX86_HWASAN_SHIFT), target,
+ /* unsignedp = */0,
+ OPTAB_DIRECT);
+ rtx ret = gen_reg_rtx (QImode);
+ /* Mask off bit63 when LAM_U57. */
+ if (ix86_lam_type == lam_u57)
+ {
+ unsigned HOST_WIDE_INT and_imm
+ = (HOST_WIDE_INT_1U << IX86_HWASAN_TAG_SIZE) - 1;
+ emit_insn (gen_andqi3 (ret, gen_lowpart (QImode, tag),
+ gen_int_mode (and_imm, QImode)));
+ }
+ else
+ emit_move_insn (ret, gen_lowpart (QImode, tag));
+ return ret;
+}
+
+/* The default implementation of TARGET_MEMTAG_UNTAGGED_POINTER. */
+rtx
+ix86_memtag_untagged_pointer (rtx tagged_pointer, rtx target)
+{
+ /* Leave bit63 alone. */
+ rtx tag_mask = gen_int_mode (((HOST_WIDE_INT_1U << IX86_HWASAN_SHIFT)
+ + (HOST_WIDE_INT_1U << 63) - 1),
+ Pmode);
+ rtx untagged_base = expand_simple_binop (Pmode, AND, tagged_pointer,
+ tag_mask, target, true,
+ OPTAB_DIRECT);
+ gcc_assert (untagged_base);
+ return untagged_base;
+}
+
+/* Implement TARGET_MEMTAG_ADD_TAG. */
+rtx
+ix86_memtag_add_tag (rtx base, poly_int64 offset, unsigned char tag_offset)
+{
+ rtx base_tag = gen_reg_rtx (QImode);
+ rtx base_addr = gen_reg_rtx (Pmode);
+ rtx tagged_addr = gen_reg_rtx (Pmode);
+ rtx new_tag = gen_reg_rtx (QImode);
+ unsigned HOST_WIDE_INT and_imm
+ = (HOST_WIDE_INT_1U << IX86_HWASAN_SHIFT) - 1;
+
+ /* When there's "overflow" in tag adding,
+ need to mask the most significant bit off. */
+ emit_move_insn (base_tag, ix86_memtag_extract_tag (base, NULL_RTX));
+ emit_move_insn (base_addr,
+ ix86_memtag_untagged_pointer (base, NULL_RTX));
+ emit_insn (gen_add2_insn (base_tag, gen_int_mode (tag_offset, QImode)));
+ emit_move_insn (new_tag, base_tag);
+ emit_insn (gen_andqi3 (new_tag, new_tag, gen_int_mode (and_imm, QImode)));
+ emit_move_insn (tagged_addr,
+ ix86_memtag_set_tag (base_addr, new_tag, NULL_RTX));
+ return plus_constant (Pmode, tagged_addr, offset);
+}
+
/* Target-specific selftests. */
#if CHECKING_P
# define TARGET_ASM_RELOC_RW_MASK ix86_reloc_rw_mask
#endif
+#undef TARGET_MEMTAG_CAN_TAG_ADDRESSES
+#define TARGET_MEMTAG_CAN_TAG_ADDRESSES ix86_memtag_can_tag_addresses
+
+#undef TARGET_MEMTAG_ADD_TAG
+#define TARGET_MEMTAG_ADD_TAG ix86_memtag_add_tag
+
+#undef TARGET_MEMTAG_SET_TAG
+#define TARGET_MEMTAG_SET_TAG ix86_memtag_set_tag
+
+#undef TARGET_MEMTAG_EXTRACT_TAG
+#define TARGET_MEMTAG_EXTRACT_TAG ix86_memtag_extract_tag
+
+#undef TARGET_MEMTAG_UNTAGGED_POINTER
+#define TARGET_MEMTAG_UNTAGGED_POINTER ix86_memtag_untagged_pointer
+
+#undef TARGET_MEMTAG_TAG_SIZE
+#define TARGET_MEMTAG_TAG_SIZE ix86_memtag_tag_size
+
static bool ix86_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
{
#ifdef OPTION_GLIBC