]> gcc.gnu.org Git - gcc.git/blobdiff - gcc/ChangeLog
middle-end/94206 fix memset folding to avoid types with padding
[gcc.git] / gcc / ChangeLog
index 2bc6f39b01407444784ba1feca2b078ef4e72899..114d2992bcd2ab9cda170b41569dc0ed160da4f7 100644 (file)
+2020-03-18   Richard Biener  <rguenther@suse.de>
+
+       PR middle-end/94206
+       * gimple-fold.c (gimple_fold_builtin_memset): Avoid using
+       partial int modes or not mode-precision integer types for
+       the store.
+
+2020-03-18  Jakub Jelinek  <jakub@redhat.com>
+
+       * asan.c (get_mem_refs_of_builtin_call): Fix up duplicated word issue
+       in a comment.
+       * config/arc/arc.c (frame_stack_add): Likewise.
+       * gimple-loop-versioning.cc (loop_versioning::analyze_arbitrary_term):
+       Likewise.
+       * ipa-predicate.c (predicate::remap_after_inlining): Likewise.
+       * tree-ssa-strlen.h (handle_printf_call): Likewise.
+       * tree-ssa-strlen.c (is_strlen_related_p): Likewise.
+       * optinfo-emit-json.cc (optrecord_json_writer::add_record): Likewise.
+
+2020-03-18  Duan bo  <duanbo3@huawei.com>
+
+       PR target/94201
+       * config/aarch64/aarch64.md (ldr_got_tiny): Delete.
+       (@ldr_got_tiny_<mode>): New pattern.
+       (ldr_got_tiny_sidi): Likewise.
+       * config/aarch64/aarch64.c (aarch64_load_symref_appropriately): Use
+       them to handle SYMBOL_TINY_GOT for ILP32.
+
+2020-03-18  Richard Sandiford  <richard.sandiford@arm.com>
+
+       * config/aarch64/aarch64.c (aarch64_sve_abi): Treat p12-p15 as
+       call-preserved for SVE PCS functions.
+       (aarch64_layout_frame): Cope with up to 12 predicate save slots.
+       Optimize the case in which there are no following vector save slots.
+
+2020-03-18  Richard Biener  <rguenther@suse.de>
+
+       PR middle-end/94188
+       * fold-const.c (build_fold_addr_expr): Convert address to
+       correct type.
+       * asan.c (maybe_create_ssa_name): Strip useless type conversions.
+       * gimple-fold.c (gimple_fold_stmt_to_constant_1): Use build1
+       to build the ADDR_EXPR which we don't really want to simplify.
+       * tree-ssa-dom.c (record_equivalences_from_stmt): Likewise.
+       * tree-ssa-loop-im.c (gather_mem_refs_stmt): Likewise.
+       * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Likewise.
+       (simplify_builtin_call): Strip useless type conversions.
+       * tree-ssa-strlen.c (new_strinfo): Likewise.
+
+2020-03-17  Alexey Neyman  <stilor@att.net>
+
+       PR debug/93751
+       * dwarf2out.c (gen_decl_die): Proceed to generating the DIE if
+       the debug level is terse and the declaration is public. Do not
+       generate type info.
+       (dwarf2out_decl): Same.
+       (add_type_attribute): Return immediately if debug level is
+       terse.
+
+2020-03-17  Richard Sandiford  <richard.sandiford@arm.com>
+
+       * config/aarch64/iterators.md (Vmtype): Handle V4BF and V8BF.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (TERNOP_UNONE_UNONE_UNONE_IMM_QUALIFIERS):
+       Define qualifier for ternary operands.
+       (TERNOP_UNONE_UNONE_NONE_NONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_NONE_UNONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_UNONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_NONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_NONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_IMM_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_NONE_NONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_NONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_NONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_IMM_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_NONE_NONE_QUALIFIERS): Likewise.
+       * config/arm/arm_mve.h (vabavq_s8): Define macro.
+       (vabavq_s16): Likewise.
+       (vabavq_s32): Likewise.
+       (vbicq_m_n_s16): Likewise.
+       (vbicq_m_n_s32): Likewise.
+       (vbicq_m_n_u16): Likewise.
+       (vbicq_m_n_u32): Likewise.
+       (vcmpeqq_m_f16): Likewise.
+       (vcmpeqq_m_f32): Likewise.
+       (vcvtaq_m_s16_f16): Likewise.
+       (vcvtaq_m_u16_f16): Likewise.
+       (vcvtaq_m_s32_f32): Likewise.
+       (vcvtaq_m_u32_f32): Likewise.
+       (vcvtq_m_f16_s16): Likewise.
+       (vcvtq_m_f16_u16): Likewise.
+       (vcvtq_m_f32_s32): Likewise.
+       (vcvtq_m_f32_u32): Likewise.
+       (vqrshrnbq_n_s16): Likewise.
+       (vqrshrnbq_n_u16): Likewise.
+       (vqrshrnbq_n_s32): Likewise.
+       (vqrshrnbq_n_u32): Likewise.
+       (vqrshrunbq_n_s16): Likewise.
+       (vqrshrunbq_n_s32): Likewise.
+       (vrmlaldavhaq_s32): Likewise.
+       (vrmlaldavhaq_u32): Likewise.
+       (vshlcq_s8): Likewise.
+       (vshlcq_u8): Likewise.
+       (vshlcq_s16): Likewise.
+       (vshlcq_u16): Likewise.
+       (vshlcq_s32): Likewise.
+       (vshlcq_u32): Likewise.
+       (vabavq_u8): Likewise.
+       (vabavq_u16): Likewise.
+       (vabavq_u32): Likewise.
+       (__arm_vabavq_s8): Define intrinsic.
+       (__arm_vabavq_s16): Likewise.
+       (__arm_vabavq_s32): Likewise.
+       (__arm_vabavq_u8): Likewise.
+       (__arm_vabavq_u16): Likewise.
+       (__arm_vabavq_u32): Likewise.
+       (__arm_vbicq_m_n_s16): Likewise.
+       (__arm_vbicq_m_n_s32): Likewise.
+       (__arm_vbicq_m_n_u16): Likewise.
+       (__arm_vbicq_m_n_u32): Likewise.
+       (__arm_vqrshrnbq_n_s16): Likewise.
+       (__arm_vqrshrnbq_n_u16): Likewise.
+       (__arm_vqrshrnbq_n_s32): Likewise.
+       (__arm_vqrshrnbq_n_u32): Likewise.
+       (__arm_vqrshrunbq_n_s16): Likewise.
+       (__arm_vqrshrunbq_n_s32): Likewise.
+       (__arm_vrmlaldavhaq_s32): Likewise.
+       (__arm_vrmlaldavhaq_u32): Likewise.
+       (__arm_vshlcq_s8): Likewise.
+       (__arm_vshlcq_u8): Likewise.
+       (__arm_vshlcq_s16): Likewise.
+       (__arm_vshlcq_u16): Likewise.
+       (__arm_vshlcq_s32): Likewise.
+       (__arm_vshlcq_u32): Likewise.
+       (__arm_vcmpeqq_m_f16): Likewise.
+       (__arm_vcmpeqq_m_f32): Likewise.
+       (__arm_vcvtaq_m_s16_f16): Likewise.
+       (__arm_vcvtaq_m_u16_f16): Likewise.
+       (__arm_vcvtaq_m_s32_f32): Likewise.
+       (__arm_vcvtaq_m_u32_f32): Likewise.
+       (__arm_vcvtq_m_f16_s16): Likewise.
+       (__arm_vcvtq_m_f16_u16): Likewise.
+       (__arm_vcvtq_m_f32_s32): Likewise.
+       (__arm_vcvtq_m_f32_u32): Likewise.
+       (vcvtaq_m): Define polymorphic variant.
+       (vcvtq_m): Likewise.
+       (vabavq): Likewise.
+       (vshlcq): Likewise.
+       (vbicq_m_n): Likewise.
+       (vqrshrnbq_n): Likewise.
+       (vqrshrunbq_n): Likewise.
+       * config/arm/arm_mve_builtins.def
+       (TERNOP_UNONE_UNONE_UNONE_IMM_QUALIFIERS): Use the builtin qualifer.
+       (TERNOP_UNONE_UNONE_NONE_NONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_NONE_UNONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_UNONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_NONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_NONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_IMM_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_NONE_NONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_NONE_IMM_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_NONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_IMM_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_UNONE_UNONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       (TERNOP_NONE_NONE_NONE_NONE_QUALIFIERS): Likewise.
+       * config/arm/mve.md (VBICQ_M_N): Define iterator.
+       (VCVTAQ_M): Likewise.
+       (VCVTQ_M_TO_F): Likewise.
+       (VQRSHRNBQ_N): Likewise.
+       (VABAVQ): Likewise.
+       (VSHLCQ): Likewise.
+       (VRMLALDAVHAQ): Likewise.
+       (mve_vbicq_m_n_<supf><mode>): Define RTL pattern.
+       (mve_vcmpeqq_m_f<mode>): Likewise.
+       (mve_vcvtaq_m_<supf><mode>): Likewise.
+       (mve_vcvtq_m_to_f_<supf><mode>): Likewise.
+       (mve_vqrshrnbq_n_<supf><mode>): Likewise.
+       (mve_vqrshrunbq_n_s<mode>): Likewise.
+       (mve_vrmlaldavhaq_<supf>v4si): Likewise.
+       (mve_vabavq_<supf><mode>): Likewise.
+       (mve_vshlcq_<supf><mode>): Likewise.
+       (mve_vshlcq_<supf><mode>): Likewise.
+       (mve_vshlcq_vec_<supf><mode>): Define RTL expand.
+       (mve_vshlcq_carry_<supf><mode>): Likewise.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm_mve.h (vqmovntq_u16): Define macro.
+       (vqmovnbq_u16): Likewise.
+       (vmulltq_poly_p8): Likewise.
+       (vmullbq_poly_p8): Likewise.
+       (vmovntq_u16): Likewise.
+       (vmovnbq_u16): Likewise.
+       (vmlaldavxq_u16): Likewise.
+       (vmlaldavq_u16): Likewise.
+       (vqmovuntq_s16): Likewise.
+       (vqmovunbq_s16): Likewise.
+       (vshlltq_n_u8): Likewise.
+       (vshllbq_n_u8): Likewise.
+       (vorrq_n_u16): Likewise.
+       (vbicq_n_u16): Likewise.
+       (vcmpneq_n_f16): Likewise.
+       (vcmpneq_f16): Likewise.
+       (vcmpltq_n_f16): Likewise.
+       (vcmpltq_f16): Likewise.
+       (vcmpleq_n_f16): Likewise.
+       (vcmpleq_f16): Likewise.
+       (vcmpgtq_n_f16): Likewise.
+       (vcmpgtq_f16): Likewise.
+       (vcmpgeq_n_f16): Likewise.
+       (vcmpgeq_f16): Likewise.
+       (vcmpeqq_n_f16): Likewise.
+       (vcmpeqq_f16): Likewise.
+       (vsubq_f16): Likewise.
+       (vqmovntq_s16): Likewise.
+       (vqmovnbq_s16): Likewise.
+       (vqdmulltq_s16): Likewise.
+       (vqdmulltq_n_s16): Likewise.
+       (vqdmullbq_s16): Likewise.
+       (vqdmullbq_n_s16): Likewise.
+       (vorrq_f16): Likewise.
+       (vornq_f16): Likewise.
+       (vmulq_n_f16): Likewise.
+       (vmulq_f16): Likewise.
+       (vmovntq_s16): Likewise.
+       (vmovnbq_s16): Likewise.
+       (vmlsldavxq_s16): Likewise.
+       (vmlsldavq_s16): Likewise.
+       (vmlaldavxq_s16): Likewise.
+       (vmlaldavq_s16): Likewise.
+       (vminnmvq_f16): Likewise.
+       (vminnmq_f16): Likewise.
+       (vminnmavq_f16): Likewise.
+       (vminnmaq_f16): Likewise.
+       (vmaxnmvq_f16): Likewise.
+       (vmaxnmq_f16): Likewise.
+       (vmaxnmavq_f16): Likewise.
+       (vmaxnmaq_f16): Likewise.
+       (veorq_f16): Likewise.
+       (vcmulq_rot90_f16): Likewise.
+       (vcmulq_rot270_f16): Likewise.
+       (vcmulq_rot180_f16): Likewise.
+       (vcmulq_f16): Likewise.
+       (vcaddq_rot90_f16): Likewise.
+       (vcaddq_rot270_f16): Likewise.
+       (vbicq_f16): Likewise.
+       (vandq_f16): Likewise.
+       (vaddq_n_f16): Likewise.
+       (vabdq_f16): Likewise.
+       (vshlltq_n_s8): Likewise.
+       (vshllbq_n_s8): Likewise.
+       (vorrq_n_s16): Likewise.
+       (vbicq_n_s16): Likewise.
+       (vqmovntq_u32): Likewise.
+       (vqmovnbq_u32): Likewise.
+       (vmulltq_poly_p16): Likewise.
+       (vmullbq_poly_p16): Likewise.
+       (vmovntq_u32): Likewise.
+       (vmovnbq_u32): Likewise.
+       (vmlaldavxq_u32): Likewise.
+       (vmlaldavq_u32): Likewise.
+       (vqmovuntq_s32): Likewise.
+       (vqmovunbq_s32): Likewise.
+       (vshlltq_n_u16): Likewise.
+       (vshllbq_n_u16): Likewise.
+       (vorrq_n_u32): Likewise.
+       (vbicq_n_u32): Likewise.
+       (vcmpneq_n_f32): Likewise.
+       (vcmpneq_f32): Likewise.
+       (vcmpltq_n_f32): Likewise.
+       (vcmpltq_f32): Likewise.
+       (vcmpleq_n_f32): Likewise.
+       (vcmpleq_f32): Likewise.
+       (vcmpgtq_n_f32): Likewise.
+       (vcmpgtq_f32): Likewise.
+       (vcmpgeq_n_f32): Likewise.
+       (vcmpgeq_f32): Likewise.
+       (vcmpeqq_n_f32): Likewise.
+       (vcmpeqq_f32): Likewise.
+       (vsubq_f32): Likewise.
+       (vqmovntq_s32): Likewise.
+       (vqmovnbq_s32): Likewise.
+       (vqdmulltq_s32): Likewise.
+       (vqdmulltq_n_s32): Likewise.
+       (vqdmullbq_s32): Likewise.
+       (vqdmullbq_n_s32): Likewise.
+       (vorrq_f32): Likewise.
+       (vornq_f32): Likewise.
+       (vmulq_n_f32): Likewise.
+       (vmulq_f32): Likewise.
+       (vmovntq_s32): Likewise.
+       (vmovnbq_s32): Likewise.
+       (vmlsldavxq_s32): Likewise.
+       (vmlsldavq_s32): Likewise.
+       (vmlaldavxq_s32): Likewise.
+       (vmlaldavq_s32): Likewise.
+       (vminnmvq_f32): Likewise.
+       (vminnmq_f32): Likewise.
+       (vminnmavq_f32): Likewise.
+       (vminnmaq_f32): Likewise.
+       (vmaxnmvq_f32): Likewise.
+       (vmaxnmq_f32): Likewise.
+       (vmaxnmavq_f32): Likewise.
+       (vmaxnmaq_f32): Likewise.
+       (veorq_f32): Likewise.
+       (vcmulq_rot90_f32): Likewise.
+       (vcmulq_rot270_f32): Likewise.
+       (vcmulq_rot180_f32): Likewise.
+       (vcmulq_f32): Likewise.
+       (vcaddq_rot90_f32): Likewise.
+       (vcaddq_rot270_f32): Likewise.
+       (vbicq_f32): Likewise.
+       (vandq_f32): Likewise.
+       (vaddq_n_f32): Likewise.
+       (vabdq_f32): Likewise.
+       (vshlltq_n_s16): Likewise.
+       (vshllbq_n_s16): Likewise.
+       (vorrq_n_s32): Likewise.
+       (vbicq_n_s32): Likewise.
+       (vrmlaldavhq_u32): Likewise.
+       (vctp8q_m): Likewise.
+       (vctp64q_m): Likewise.
+       (vctp32q_m): Likewise.
+       (vctp16q_m): Likewise.
+       (vaddlvaq_u32): Likewise.
+       (vrmlsldavhxq_s32): Likewise.
+       (vrmlsldavhq_s32): Likewise.
+       (vrmlaldavhxq_s32): Likewise.
+       (vrmlaldavhq_s32): Likewise.
+       (vcvttq_f16_f32): Likewise.
+       (vcvtbq_f16_f32): Likewise.
+       (vaddlvaq_s32): Likewise.
+       (__arm_vqmovntq_u16): Define intrinsic.
+       (__arm_vqmovnbq_u16): Likewise.
+       (__arm_vmulltq_poly_p8): Likewise.
+       (__arm_vmullbq_poly_p8): Likewise.
+       (__arm_vmovntq_u16): Likewise.
+       (__arm_vmovnbq_u16): Likewise.
+       (__arm_vmlaldavxq_u16): Likewise.
+       (__arm_vmlaldavq_u16): Likewise.
+       (__arm_vqmovuntq_s16): Likewise.
+       (__arm_vqmovunbq_s16): Likewise.
+       (__arm_vshlltq_n_u8): Likewise.
+       (__arm_vshllbq_n_u8): Likewise.
+       (__arm_vorrq_n_u16): Likewise.
+       (__arm_vbicq_n_u16): Likewise.
+       (__arm_vcmpneq_n_f16): Likewise.
+       (__arm_vcmpneq_f16): Likewise.
+       (__arm_vcmpltq_n_f16): Likewise.
+       (__arm_vcmpltq_f16): Likewise.
+       (__arm_vcmpleq_n_f16): Likewise.
+       (__arm_vcmpleq_f16): Likewise.
+       (__arm_vcmpgtq_n_f16): Likewise.
+       (__arm_vcmpgtq_f16): Likewise.
+       (__arm_vcmpgeq_n_f16): Likewise.
+       (__arm_vcmpgeq_f16): Likewise.
+       (__arm_vcmpeqq_n_f16): Likewise.
+       (__arm_vcmpeqq_f16): Likewise.
+       (__arm_vsubq_f16): Likewise.
+       (__arm_vqmovntq_s16): Likewise.
+       (__arm_vqmovnbq_s16): Likewise.
+       (__arm_vqdmulltq_s16): Likewise.
+       (__arm_vqdmulltq_n_s16): Likewise.
+       (__arm_vqdmullbq_s16): Likewise.
+       (__arm_vqdmullbq_n_s16): Likewise.
+       (__arm_vorrq_f16): Likewise.
+       (__arm_vornq_f16): Likewise.
+       (__arm_vmulq_n_f16): Likewise.
+       (__arm_vmulq_f16): Likewise.
+       (__arm_vmovntq_s16): Likewise.
+       (__arm_vmovnbq_s16): Likewise.
+       (__arm_vmlsldavxq_s16): Likewise.
+       (__arm_vmlsldavq_s16): Likewise.
+       (__arm_vmlaldavxq_s16): Likewise.
+       (__arm_vmlaldavq_s16): Likewise.
+       (__arm_vminnmvq_f16): Likewise.
+       (__arm_vminnmq_f16): Likewise.
+       (__arm_vminnmavq_f16): Likewise.
+       (__arm_vminnmaq_f16): Likewise.
+       (__arm_vmaxnmvq_f16): Likewise.
+       (__arm_vmaxnmq_f16): Likewise.
+       (__arm_vmaxnmavq_f16): Likewise.
+       (__arm_vmaxnmaq_f16): Likewise.
+       (__arm_veorq_f16): Likewise.
+       (__arm_vcmulq_rot90_f16): Likewise.
+       (__arm_vcmulq_rot270_f16): Likewise.
+       (__arm_vcmulq_rot180_f16): Likewise.
+       (__arm_vcmulq_f16): Likewise.
+       (__arm_vcaddq_rot90_f16): Likewise.
+       (__arm_vcaddq_rot270_f16): Likewise.
+       (__arm_vbicq_f16): Likewise.
+       (__arm_vandq_f16): Likewise.
+       (__arm_vaddq_n_f16): Likewise.
+       (__arm_vabdq_f16): Likewise.
+       (__arm_vshlltq_n_s8): Likewise.
+       (__arm_vshllbq_n_s8): Likewise.
+       (__arm_vorrq_n_s16): Likewise.
+       (__arm_vbicq_n_s16): Likewise.
+       (__arm_vqmovntq_u32): Likewise.
+       (__arm_vqmovnbq_u32): Likewise.
+       (__arm_vmulltq_poly_p16): Likewise.
+       (__arm_vmullbq_poly_p16): Likewise.
+       (__arm_vmovntq_u32): Likewise.
+       (__arm_vmovnbq_u32): Likewise.
+       (__arm_vmlaldavxq_u32): Likewise.
+       (__arm_vmlaldavq_u32): Likewise.
+       (__arm_vqmovuntq_s32): Likewise.
+       (__arm_vqmovunbq_s32): Likewise.
+       (__arm_vshlltq_n_u16): Likewise.
+       (__arm_vshllbq_n_u16): Likewise.
+       (__arm_vorrq_n_u32): Likewise.
+       (__arm_vbicq_n_u32): Likewise.
+       (__arm_vcmpneq_n_f32): Likewise.
+       (__arm_vcmpneq_f32): Likewise.
+       (__arm_vcmpltq_n_f32): Likewise.
+       (__arm_vcmpltq_f32): Likewise.
+       (__arm_vcmpleq_n_f32): Likewise.
+       (__arm_vcmpleq_f32): Likewise.
+       (__arm_vcmpgtq_n_f32): Likewise.
+       (__arm_vcmpgtq_f32): Likewise.
+       (__arm_vcmpgeq_n_f32): Likewise.
+       (__arm_vcmpgeq_f32): Likewise.
+       (__arm_vcmpeqq_n_f32): Likewise.
+       (__arm_vcmpeqq_f32): Likewise.
+       (__arm_vsubq_f32): Likewise.
+       (__arm_vqmovntq_s32): Likewise.
+       (__arm_vqmovnbq_s32): Likewise.
+       (__arm_vqdmulltq_s32): Likewise.
+       (__arm_vqdmulltq_n_s32): Likewise.
+       (__arm_vqdmullbq_s32): Likewise.
+       (__arm_vqdmullbq_n_s32): Likewise.
+       (__arm_vorrq_f32): Likewise.
+       (__arm_vornq_f32): Likewise.
+       (__arm_vmulq_n_f32): Likewise.
+       (__arm_vmulq_f32): Likewise.
+       (__arm_vmovntq_s32): Likewise.
+       (__arm_vmovnbq_s32): Likewise.
+       (__arm_vmlsldavxq_s32): Likewise.
+       (__arm_vmlsldavq_s32): Likewise.
+       (__arm_vmlaldavxq_s32): Likewise.
+       (__arm_vmlaldavq_s32): Likewise.
+       (__arm_vminnmvq_f32): Likewise.
+       (__arm_vminnmq_f32): Likewise.
+       (__arm_vminnmavq_f32): Likewise.
+       (__arm_vminnmaq_f32): Likewise.
+       (__arm_vmaxnmvq_f32): Likewise.
+       (__arm_vmaxnmq_f32): Likewise.
+       (__arm_vmaxnmavq_f32): Likewise.
+       (__arm_vmaxnmaq_f32): Likewise.
+       (__arm_veorq_f32): Likewise.
+       (__arm_vcmulq_rot90_f32): Likewise.
+       (__arm_vcmulq_rot270_f32): Likewise.
+       (__arm_vcmulq_rot180_f32): Likewise.
+       (__arm_vcmulq_f32): Likewise.
+       (__arm_vcaddq_rot90_f32): Likewise.
+       (__arm_vcaddq_rot270_f32): Likewise.
+       (__arm_vbicq_f32): Likewise.
+       (__arm_vandq_f32): Likewise.
+       (__arm_vaddq_n_f32): Likewise.
+       (__arm_vabdq_f32): Likewise.
+       (__arm_vshlltq_n_s16): Likewise.
+       (__arm_vshllbq_n_s16): Likewise.
+       (__arm_vorrq_n_s32): Likewise.
+       (__arm_vbicq_n_s32): Likewise.
+       (__arm_vrmlaldavhq_u32): Likewise.
+       (__arm_vctp8q_m): Likewise.
+       (__arm_vctp64q_m): Likewise.
+       (__arm_vctp32q_m): Likewise.
+       (__arm_vctp16q_m): Likewise.
+       (__arm_vaddlvaq_u32): Likewise.
+       (__arm_vrmlsldavhxq_s32): Likewise.
+       (__arm_vrmlsldavhq_s32): Likewise.
+       (__arm_vrmlaldavhxq_s32): Likewise.
+       (__arm_vrmlaldavhq_s32): Likewise.
+       (__arm_vcvttq_f16_f32): Likewise.
+       (__arm_vcvtbq_f16_f32): Likewise.
+       (__arm_vaddlvaq_s32): Likewise.
+       (vst4q): Define polymorphic variant.
+       (vrndxq): Likewise.
+       (vrndq): Likewise.
+       (vrndpq): Likewise.
+       (vrndnq): Likewise.
+       (vrndmq): Likewise.
+       (vrndaq): Likewise.
+       (vrev64q): Likewise.
+       (vnegq): Likewise.
+       (vdupq_n): Likewise.
+       (vabsq): Likewise.
+       (vrev32q): Likewise.
+       (vcvtbq_f32): Likewise.
+       (vcvttq_f32): Likewise.
+       (vcvtq): Likewise.
+       (vsubq_n): Likewise.
+       (vbrsrq_n): Likewise.
+       (vcvtq_n): Likewise.
+       (vsubq): Likewise.
+       (vorrq): Likewise.
+       (vabdq): Likewise.
+       (vaddq_n): Likewise.
+       (vandq): Likewise.
+       (vbicq): Likewise.
+       (vornq): Likewise.
+       (vmulq_n): Likewise.
+       (vmulq): Likewise.
+       (vcaddq_rot270): Likewise.
+       (vcmpeqq_n): Likewise.
+       (vcmpeqq): Likewise.
+       (vcaddq_rot90): Likewise.
+       (vcmpgeq_n): Likewise.
+       (vcmpgeq): Likewise.
+       (vcmpgtq_n): Likewise.
+       (vcmpgtq): Likewise.
+       (vcmpgtq): Likewise.
+       (vcmpleq_n): Likewise.
+       (vcmpleq_n): Likewise.
+       (vcmpleq): Likewise.
+       (vcmpleq): Likewise.
+       (vcmpltq_n): Likewise.
+       (vcmpltq_n): Likewise.
+       (vcmpltq): Likewise.
+       (vcmpltq): Likewise.
+       (vcmpneq_n): Likewise.
+       (vcmpneq_n): Likewise.
+       (vcmpneq): Likewise.
+       (vcmpneq): Likewise.
+       (vcmulq): Likewise.
+       (vcmulq): Likewise.
+       (vcmulq_rot180): Likewise.
+       (vcmulq_rot180): Likewise.
+       (vcmulq_rot270): Likewise.
+       (vcmulq_rot270): Likewise.
+       (vcmulq_rot90): Likewise.
+       (vcmulq_rot90): Likewise.
+       (veorq): Likewise.
+       (veorq): Likewise.
+       (vmaxnmaq): Likewise.
+       (vmaxnmaq): Likewise.
+       (vmaxnmavq): Likewise.
+       (vmaxnmavq): Likewise.
+       (vmaxnmq): Likewise.
+       (vmaxnmq): Likewise.
+       (vmaxnmvq): Likewise.
+       (vmaxnmvq): Likewise.
+       (vminnmaq): Likewise.
+       (vminnmaq): Likewise.
+       (vminnmavq): Likewise.
+       (vminnmavq): Likewise.
+       (vminnmq): Likewise.
+       (vminnmq): Likewise.
+       (vminnmvq): Likewise.
+       (vminnmvq): Likewise.
+       (vbicq_n): Likewise.
+       (vqmovntq): Likewise.
+       (vqmovntq): Likewise.
+       (vqmovnbq): Likewise.
+       (vqmovnbq): Likewise.
+       (vmulltq_poly): Likewise.
+       (vmulltq_poly): Likewise.
+       (vmullbq_poly): Likewise.
+       (vmullbq_poly): Likewise.
+       (vmovntq): Likewise.
+       (vmovntq): Likewise.
+       (vmovnbq): Likewise.
+       (vmovnbq): Likewise.
+       (vmlaldavxq): Likewise.
+       (vmlaldavxq): Likewise.
+       (vqmovuntq): Likewise.
+       (vqmovuntq): Likewise.
+       (vshlltq_n): Likewise.
+       (vshlltq_n): Likewise.
+       (vshllbq_n): Likewise.
+       (vshllbq_n): Likewise.
+       (vorrq_n): Likewise.
+       (vorrq_n): Likewise.
+       (vmlaldavq): Likewise.
+       (vmlaldavq): Likewise.
+       (vqmovunbq): Likewise.
+       (vqmovunbq): Likewise.
+       (vqdmulltq_n): Likewise.
+       (vqdmulltq_n): Likewise.
+       (vqdmulltq): Likewise.
+       (vqdmulltq): Likewise.
+       (vqdmullbq_n): Likewise.
+       (vqdmullbq_n): Likewise.
+       (vqdmullbq): Likewise.
+       (vqdmullbq): Likewise.
+       (vaddlvaq): Likewise.
+       (vaddlvaq): Likewise.
+       (vrmlaldavhq): Likewise.
+       (vrmlaldavhq): Likewise.
+       (vrmlaldavhxq): Likewise.
+       (vrmlaldavhxq): Likewise.
+       (vrmlsldavhq): Likewise.
+       (vrmlsldavhq): Likewise.
+       (vrmlsldavhxq): Likewise.
+       (vrmlsldavhxq): Likewise.
+       (vmlsldavxq): Likewise.
+       (vmlsldavxq): Likewise.
+       (vmlsldavq): Likewise.
+       (vmlsldavq): Likewise.
+       * config/arm/arm_mve_builtins.def (BINOP_NONE_NONE_IMM): Use it.
+       (BINOP_NONE_NONE_NONE): Likewise.
+       (BINOP_UNONE_NONE_NONE): Likewise.
+       (BINOP_UNONE_UNONE_IMM): Likewise.
+       (BINOP_UNONE_UNONE_NONE): Likewise.
+       (BINOP_UNONE_UNONE_UNONE): Likewise.
+       * config/arm/mve.md (mve_vabdq_f<mode>): Define RTL pattern.
+       (mve_vaddlvaq_<supf>v4si): Likewise.
+       (mve_vaddq_n_f<mode>): Likewise.
+       (mve_vandq_f<mode>): Likewise.
+       (mve_vbicq_f<mode>): Likewise.
+       (mve_vbicq_n_<supf><mode>): Likewise.
+       (mve_vcaddq_rot270_f<mode>): Likewise.
+       (mve_vcaddq_rot90_f<mode>): Likewise.
+       (mve_vcmpeqq_f<mode>): Likewise.
+       (mve_vcmpeqq_n_f<mode>): Likewise.
+       (mve_vcmpgeq_f<mode>): Likewise.
+       (mve_vcmpgeq_n_f<mode>): Likewise.
+       (mve_vcmpgtq_f<mode>): Likewise.
+       (mve_vcmpgtq_n_f<mode>): Likewise.
+       (mve_vcmpleq_f<mode>): Likewise.
+       (mve_vcmpleq_n_f<mode>): Likewise.
+       (mve_vcmpltq_f<mode>): Likewise.
+       (mve_vcmpltq_n_f<mode>): Likewise.
+       (mve_vcmpneq_f<mode>): Likewise.
+       (mve_vcmpneq_n_f<mode>): Likewise.
+       (mve_vcmulq_f<mode>): Likewise.
+       (mve_vcmulq_rot180_f<mode>): Likewise.
+       (mve_vcmulq_rot270_f<mode>): Likewise.
+       (mve_vcmulq_rot90_f<mode>): Likewise.
+       (mve_vctp<mode1>q_mhi): Likewise.
+       (mve_vcvtbq_f16_f32v8hf): Likewise.
+       (mve_vcvttq_f16_f32v8hf): Likewise.
+       (mve_veorq_f<mode>): Likewise.
+       (mve_vmaxnmaq_f<mode>): Likewise.
+       (mve_vmaxnmavq_f<mode>): Likewise.
+       (mve_vmaxnmq_f<mode>): Likewise.
+       (mve_vmaxnmvq_f<mode>): Likewise.
+       (mve_vminnmaq_f<mode>): Likewise.
+       (mve_vminnmavq_f<mode>): Likewise.
+       (mve_vminnmq_f<mode>): Likewise.
+       (mve_vminnmvq_f<mode>): Likewise.
+       (mve_vmlaldavq_<supf><mode>): Likewise.
+       (mve_vmlaldavxq_<supf><mode>): Likewise.
+       (mve_vmlsldavq_s<mode>): Likewise.
+       (mve_vmlsldavxq_s<mode>): Likewise.
+       (mve_vmovnbq_<supf><mode>): Likewise.
+       (mve_vmovntq_<supf><mode>): Likewise.
+       (mve_vmulq_f<mode>): Likewise.
+       (mve_vmulq_n_f<mode>): Likewise.
+       (mve_vornq_f<mode>): Likewise.
+       (mve_vorrq_f<mode>): Likewise.
+       (mve_vorrq_n_<supf><mode>): Likewise.
+       (mve_vqdmullbq_n_s<mode>): Likewise.
+       (mve_vqdmullbq_s<mode>): Likewise.
+       (mve_vqdmulltq_n_s<mode>): Likewise.
+       (mve_vqdmulltq_s<mode>): Likewise.
+       (mve_vqmovnbq_<supf><mode>): Likewise.
+       (mve_vqmovntq_<supf><mode>): Likewise.
+       (mve_vqmovunbq_s<mode>): Likewise.
+       (mve_vqmovuntq_s<mode>): Likewise.
+       (mve_vrmlaldavhxq_sv4si): Likewise.
+       (mve_vrmlsldavhq_sv4si): Likewise.
+       (mve_vrmlsldavhxq_sv4si): Likewise.
+       (mve_vshllbq_n_<supf><mode>): Likewise.
+       (mve_vshlltq_n_<supf><mode>): Likewise.
+       (mve_vsubq_f<mode>): Likewise.
+       (mve_vmulltq_poly_p<mode>): Likewise.
+       (mve_vmullbq_poly_p<mode>): Likewise.
+       (mve_vrmlaldavhq_<supf>v4si): Likewise.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm_mve.h (vsubq_u8): Define macro.
+       (vsubq_n_u8): Likewise.
+       (vrmulhq_u8): Likewise.
+       (vrhaddq_u8): Likewise.
+       (vqsubq_u8): Likewise.
+       (vqsubq_n_u8): Likewise.
+       (vqaddq_u8): Likewise.
+       (vqaddq_n_u8): Likewise.
+       (vorrq_u8): Likewise.
+       (vornq_u8): Likewise.
+       (vmulq_u8): Likewise.
+       (vmulq_n_u8): Likewise.
+       (vmulltq_int_u8): Likewise.
+       (vmullbq_int_u8): Likewise.
+       (vmulhq_u8): Likewise.
+       (vmladavq_u8): Likewise.
+       (vminvq_u8): Likewise.
+       (vminq_u8): Likewise.
+       (vmaxvq_u8): Likewise.
+       (vmaxq_u8): Likewise.
+       (vhsubq_u8): Likewise.
+       (vhsubq_n_u8): Likewise.
+       (vhaddq_u8): Likewise.
+       (vhaddq_n_u8): Likewise.
+       (veorq_u8): Likewise.
+       (vcmpneq_n_u8): Likewise.
+       (vcmphiq_u8): Likewise.
+       (vcmphiq_n_u8): Likewise.
+       (vcmpeqq_u8): Likewise.
+       (vcmpeqq_n_u8): Likewise.
+       (vcmpcsq_u8): Likewise.
+       (vcmpcsq_n_u8): Likewise.
+       (vcaddq_rot90_u8): Likewise.
+       (vcaddq_rot270_u8): Likewise.
+       (vbicq_u8): Likewise.
+       (vandq_u8): Likewise.
+       (vaddvq_p_u8): Likewise.
+       (vaddvaq_u8): Likewise.
+       (vaddq_n_u8): Likewise.
+       (vabdq_u8): Likewise.
+       (vshlq_r_u8): Likewise.
+       (vrshlq_u8): Likewise.
+       (vrshlq_n_u8): Likewise.
+       (vqshlq_u8): Likewise.
+       (vqshlq_r_u8): Likewise.
+       (vqrshlq_u8): Likewise.
+       (vqrshlq_n_u8): Likewise.
+       (vminavq_s8): Likewise.
+       (vminaq_s8): Likewise.
+       (vmaxavq_s8): Likewise.
+       (vmaxaq_s8): Likewise.
+       (vbrsrq_n_u8): Likewise.
+       (vshlq_n_u8): Likewise.
+       (vrshrq_n_u8): Likewise.
+       (vqshlq_n_u8): Likewise.
+       (vcmpneq_n_s8): Likewise.
+       (vcmpltq_s8): Likewise.
+       (vcmpltq_n_s8): Likewise.
+       (vcmpleq_s8): Likewise.
+       (vcmpleq_n_s8): Likewise.
+       (vcmpgtq_s8): Likewise.
+       (vcmpgtq_n_s8): Likewise.
+       (vcmpgeq_s8): Likewise.
+       (vcmpgeq_n_s8): Likewise.
+       (vcmpeqq_s8): Likewise.
+       (vcmpeqq_n_s8): Likewise.
+       (vqshluq_n_s8): Likewise.
+       (vaddvq_p_s8): Likewise.
+       (vsubq_s8): Likewise.
+       (vsubq_n_s8): Likewise.
+       (vshlq_r_s8): Likewise.
+       (vrshlq_s8): Likewise.
+       (vrshlq_n_s8): Likewise.
+       (vrmulhq_s8): Likewise.
+       (vrhaddq_s8): Likewise.
+       (vqsubq_s8): Likewise.
+       (vqsubq_n_s8): Likewise.
+       (vqshlq_s8): Likewise.
+       (vqshlq_r_s8): Likewise.
+       (vqrshlq_s8): Likewise.
+       (vqrshlq_n_s8): Likewise.
+       (vqrdmulhq_s8): Likewise.
+       (vqrdmulhq_n_s8): Likewise.
+       (vqdmulhq_s8): Likewise.
+       (vqdmulhq_n_s8): Likewise.
+       (vqaddq_s8): Likewise.
+       (vqaddq_n_s8): Likewise.
+       (vorrq_s8): Likewise.
+       (vornq_s8): Likewise.
+       (vmulq_s8): Likewise.
+       (vmulq_n_s8): Likewise.
+       (vmulltq_int_s8): Likewise.
+       (vmullbq_int_s8): Likewise.
+       (vmulhq_s8): Likewise.
+       (vmlsdavxq_s8): Likewise.
+       (vmlsdavq_s8): Likewise.
+       (vmladavxq_s8): Likewise.
+       (vmladavq_s8): Likewise.
+       (vminvq_s8): Likewise.
+       (vminq_s8): Likewise.
+       (vmaxvq_s8): Likewise.
+       (vmaxq_s8): Likewise.
+       (vhsubq_s8): Likewise.
+       (vhsubq_n_s8): Likewise.
+       (vhcaddq_rot90_s8): Likewise.
+       (vhcaddq_rot270_s8): Likewise.
+       (vhaddq_s8): Likewise.
+       (vhaddq_n_s8): Likewise.
+       (veorq_s8): Likewise.
+       (vcaddq_rot90_s8): Likewise.
+       (vcaddq_rot270_s8): Likewise.
+       (vbrsrq_n_s8): Likewise.
+       (vbicq_s8): Likewise.
+       (vandq_s8): Likewise.
+       (vaddvaq_s8): Likewise.
+       (vaddq_n_s8): Likewise.
+       (vabdq_s8): Likewise.
+       (vshlq_n_s8): Likewise.
+       (vrshrq_n_s8): Likewise.
+       (vqshlq_n_s8): Likewise.
+       (vsubq_u16): Likewise.
+       (vsubq_n_u16): Likewise.
+       (vrmulhq_u16): Likewise.
+       (vrhaddq_u16): Likewise.
+       (vqsubq_u16): Likewise.
+       (vqsubq_n_u16): Likewise.
+       (vqaddq_u16): Likewise.
+       (vqaddq_n_u16): Likewise.
+       (vorrq_u16): Likewise.
+       (vornq_u16): Likewise.
+       (vmulq_u16): Likewise.
+       (vmulq_n_u16): Likewise.
+       (vmulltq_int_u16): Likewise.
+       (vmullbq_int_u16): Likewise.
+       (vmulhq_u16): Likewise.
+       (vmladavq_u16): Likewise.
+       (vminvq_u16): Likewise.
+       (vminq_u16): Likewise.
+       (vmaxvq_u16): Likewise.
+       (vmaxq_u16): Likewise.
+       (vhsubq_u16): Likewise.
+       (vhsubq_n_u16): Likewise.
+       (vhaddq_u16): Likewise.
+       (vhaddq_n_u16): Likewise.
+       (veorq_u16): Likewise.
+       (vcmpneq_n_u16): Likewise.
+       (vcmphiq_u16): Likewise.
+       (vcmphiq_n_u16): Likewise.
+       (vcmpeqq_u16): Likewise.
+       (vcmpeqq_n_u16): Likewise.
+       (vcmpcsq_u16): Likewise.
+       (vcmpcsq_n_u16): Likewise.
+       (vcaddq_rot90_u16): Likewise.
+       (vcaddq_rot270_u16): Likewise.
+       (vbicq_u16): Likewise.
+       (vandq_u16): Likewise.
+       (vaddvq_p_u16): Likewise.
+       (vaddvaq_u16): Likewise.
+       (vaddq_n_u16): Likewise.
+       (vabdq_u16): Likewise.
+       (vshlq_r_u16): Likewise.
+       (vrshlq_u16): Likewise.
+       (vrshlq_n_u16): Likewise.
+       (vqshlq_u16): Likewise.
+       (vqshlq_r_u16): Likewise.
+       (vqrshlq_u16): Likewise.
+       (vqrshlq_n_u16): Likewise.
+       (vminavq_s16): Likewise.
+       (vminaq_s16): Likewise.
+       (vmaxavq_s16): Likewise.
+       (vmaxaq_s16): Likewise.
+       (vbrsrq_n_u16): Likewise.
+       (vshlq_n_u16): Likewise.
+       (vrshrq_n_u16): Likewise.
+       (vqshlq_n_u16): Likewise.
+       (vcmpneq_n_s16): Likewise.
+       (vcmpltq_s16): Likewise.
+       (vcmpltq_n_s16): Likewise.
+       (vcmpleq_s16): Likewise.
+       (vcmpleq_n_s16): Likewise.
+       (vcmpgtq_s16): Likewise.
+       (vcmpgtq_n_s16): Likewise.
+       (vcmpgeq_s16): Likewise.
+       (vcmpgeq_n_s16): Likewise.
+       (vcmpeqq_s16): Likewise.
+       (vcmpeqq_n_s16): Likewise.
+       (vqshluq_n_s16): Likewise.
+       (vaddvq_p_s16): Likewise.
+       (vsubq_s16): Likewise.
+       (vsubq_n_s16): Likewise.
+       (vshlq_r_s16): Likewise.
+       (vrshlq_s16): Likewise.
+       (vrshlq_n_s16): Likewise.
+       (vrmulhq_s16): Likewise.
+       (vrhaddq_s16): Likewise.
+       (vqsubq_s16): Likewise.
+       (vqsubq_n_s16): Likewise.
+       (vqshlq_s16): Likewise.
+       (vqshlq_r_s16): Likewise.
+       (vqrshlq_s16): Likewise.
+       (vqrshlq_n_s16): Likewise.
+       (vqrdmulhq_s16): Likewise.
+       (vqrdmulhq_n_s16): Likewise.
+       (vqdmulhq_s16): Likewise.
+       (vqdmulhq_n_s16): Likewise.
+       (vqaddq_s16): Likewise.
+       (vqaddq_n_s16): Likewise.
+       (vorrq_s16): Likewise.
+       (vornq_s16): Likewise.
+       (vmulq_s16): Likewise.
+       (vmulq_n_s16): Likewise.
+       (vmulltq_int_s16): Likewise.
+       (vmullbq_int_s16): Likewise.
+       (vmulhq_s16): Likewise.
+       (vmlsdavxq_s16): Likewise.
+       (vmlsdavq_s16): Likewise.
+       (vmladavxq_s16): Likewise.
+       (vmladavq_s16): Likewise.
+       (vminvq_s16): Likewise.
+       (vminq_s16): Likewise.
+       (vmaxvq_s16): Likewise.
+       (vmaxq_s16): Likewise.
+       (vhsubq_s16): Likewise.
+       (vhsubq_n_s16): Likewise.
+       (vhcaddq_rot90_s16): Likewise.
+       (vhcaddq_rot270_s16): Likewise.
+       (vhaddq_s16): Likewise.
+       (vhaddq_n_s16): Likewise.
+       (veorq_s16): Likewise.
+       (vcaddq_rot90_s16): Likewise.
+       (vcaddq_rot270_s16): Likewise.
+       (vbrsrq_n_s16): Likewise.
+       (vbicq_s16): Likewise.
+       (vandq_s16): Likewise.
+       (vaddvaq_s16): Likewise.
+       (vaddq_n_s16): Likewise.
+       (vabdq_s16): Likewise.
+       (vshlq_n_s16): Likewise.
+       (vrshrq_n_s16): Likewise.
+       (vqshlq_n_s16): Likewise.
+       (vsubq_u32): Likewise.
+       (vsubq_n_u32): Likewise.
+       (vrmulhq_u32): Likewise.
+       (vrhaddq_u32): Likewise.
+       (vqsubq_u32): Likewise.
+       (vqsubq_n_u32): Likewise.
+       (vqaddq_u32): Likewise.
+       (vqaddq_n_u32): Likewise.
+       (vorrq_u32): Likewise.
+       (vornq_u32): Likewise.
+       (vmulq_u32): Likewise.
+       (vmulq_n_u32): Likewise.
+       (vmulltq_int_u32): Likewise.
+       (vmullbq_int_u32): Likewise.
+       (vmulhq_u32): Likewise.
+       (vmladavq_u32): Likewise.
+       (vminvq_u32): Likewise.
+       (vminq_u32): Likewise.
+       (vmaxvq_u32): Likewise.
+       (vmaxq_u32): Likewise.
+       (vhsubq_u32): Likewise.
+       (vhsubq_n_u32): Likewise.
+       (vhaddq_u32): Likewise.
+       (vhaddq_n_u32): Likewise.
+       (veorq_u32): Likewise.
+       (vcmpneq_n_u32): Likewise.
+       (vcmphiq_u32): Likewise.
+       (vcmphiq_n_u32): Likewise.
+       (vcmpeqq_u32): Likewise.
+       (vcmpeqq_n_u32): Likewise.
+       (vcmpcsq_u32): Likewise.
+       (vcmpcsq_n_u32): Likewise.
+       (vcaddq_rot90_u32): Likewise.
+       (vcaddq_rot270_u32): Likewise.
+       (vbicq_u32): Likewise.
+       (vandq_u32): Likewise.
+       (vaddvq_p_u32): Likewise.
+       (vaddvaq_u32): Likewise.
+       (vaddq_n_u32): Likewise.
+       (vabdq_u32): Likewise.
+       (vshlq_r_u32): Likewise.
+       (vrshlq_u32): Likewise.
+       (vrshlq_n_u32): Likewise.
+       (vqshlq_u32): Likewise.
+       (vqshlq_r_u32): Likewise.
+       (vqrshlq_u32): Likewise.
+       (vqrshlq_n_u32): Likewise.
+       (vminavq_s32): Likewise.
+       (vminaq_s32): Likewise.
+       (vmaxavq_s32): Likewise.
+       (vmaxaq_s32): Likewise.
+       (vbrsrq_n_u32): Likewise.
+       (vshlq_n_u32): Likewise.
+       (vrshrq_n_u32): Likewise.
+       (vqshlq_n_u32): Likewise.
+       (vcmpneq_n_s32): Likewise.
+       (vcmpltq_s32): Likewise.
+       (vcmpltq_n_s32): Likewise.
+       (vcmpleq_s32): Likewise.
+       (vcmpleq_n_s32): Likewise.
+       (vcmpgtq_s32): Likewise.
+       (vcmpgtq_n_s32): Likewise.
+       (vcmpgeq_s32): Likewise.
+       (vcmpgeq_n_s32): Likewise.
+       (vcmpeqq_s32): Likewise.
+       (vcmpeqq_n_s32): Likewise.
+       (vqshluq_n_s32): Likewise.
+       (vaddvq_p_s32): Likewise.
+       (vsubq_s32): Likewise.
+       (vsubq_n_s32): Likewise.
+       (vshlq_r_s32): Likewise.
+       (vrshlq_s32): Likewise.
+       (vrshlq_n_s32): Likewise.
+       (vrmulhq_s32): Likewise.
+       (vrhaddq_s32): Likewise.
+       (vqsubq_s32): Likewise.
+       (vqsubq_n_s32): Likewise.
+       (vqshlq_s32): Likewise.
+       (vqshlq_r_s32): Likewise.
+       (vqrshlq_s32): Likewise.
+       (vqrshlq_n_s32): Likewise.
+       (vqrdmulhq_s32): Likewise.
+       (vqrdmulhq_n_s32): Likewise.
+       (vqdmulhq_s32): Likewise.
+       (vqdmulhq_n_s32): Likewise.
+       (vqaddq_s32): Likewise.
+       (vqaddq_n_s32): Likewise.
+       (vorrq_s32): Likewise.
+       (vornq_s32): Likewise.
+       (vmulq_s32): Likewise.
+       (vmulq_n_s32): Likewise.
+       (vmulltq_int_s32): Likewise.
+       (vmullbq_int_s32): Likewise.
+       (vmulhq_s32): Likewise.
+       (vmlsdavxq_s32): Likewise.
+       (vmlsdavq_s32): Likewise.
+       (vmladavxq_s32): Likewise.
+       (vmladavq_s32): Likewise.
+       (vminvq_s32): Likewise.
+       (vminq_s32): Likewise.
+       (vmaxvq_s32): Likewise.
+       (vmaxq_s32): Likewise.
+       (vhsubq_s32): Likewise.
+       (vhsubq_n_s32): Likewise.
+       (vhcaddq_rot90_s32): Likewise.
+       (vhcaddq_rot270_s32): Likewise.
+       (vhaddq_s32): Likewise.
+       (vhaddq_n_s32): Likewise.
+       (veorq_s32): Likewise.
+       (vcaddq_rot90_s32): Likewise.
+       (vcaddq_rot270_s32): Likewise.
+       (vbrsrq_n_s32): Likewise.
+       (vbicq_s32): Likewise.
+       (vandq_s32): Likewise.
+       (vaddvaq_s32): Likewise.
+       (vaddq_n_s32): Likewise.
+       (vabdq_s32): Likewise.
+       (vshlq_n_s32): Likewise.
+       (vrshrq_n_s32): Likewise.
+       (vqshlq_n_s32): Likewise.
+       (__arm_vsubq_u8): Define intrinsic.
+       (__arm_vsubq_n_u8): Likewise.
+       (__arm_vrmulhq_u8): Likewise.
+       (__arm_vrhaddq_u8): Likewise.
+       (__arm_vqsubq_u8): Likewise.
+       (__arm_vqsubq_n_u8): Likewise.
+       (__arm_vqaddq_u8): Likewise.
+       (__arm_vqaddq_n_u8): Likewise.
+       (__arm_vorrq_u8): Likewise.
+       (__arm_vornq_u8): Likewise.
+       (__arm_vmulq_u8): Likewise.
+       (__arm_vmulq_n_u8): Likewise.
+       (__arm_vmulltq_int_u8): Likewise.
+       (__arm_vmullbq_int_u8): Likewise.
+       (__arm_vmulhq_u8): Likewise.
+       (__arm_vmladavq_u8): Likewise.
+       (__arm_vminvq_u8): Likewise.
+       (__arm_vminq_u8): Likewise.
+       (__arm_vmaxvq_u8): Likewise.
+       (__arm_vmaxq_u8): Likewise.
+       (__arm_vhsubq_u8): Likewise.
+       (__arm_vhsubq_n_u8): Likewise.
+       (__arm_vhaddq_u8): Likewise.
+       (__arm_vhaddq_n_u8): Likewise.
+       (__arm_veorq_u8): Likewise.
+       (__arm_vcmpneq_n_u8): Likewise.
+       (__arm_vcmphiq_u8): Likewise.
+       (__arm_vcmphiq_n_u8): Likewise.
+       (__arm_vcmpeqq_u8): Likewise.
+       (__arm_vcmpeqq_n_u8): Likewise.
+       (__arm_vcmpcsq_u8): Likewise.
+       (__arm_vcmpcsq_n_u8): Likewise.
+       (__arm_vcaddq_rot90_u8): Likewise.
+       (__arm_vcaddq_rot270_u8): Likewise.
+       (__arm_vbicq_u8): Likewise.
+       (__arm_vandq_u8): Likewise.
+       (__arm_vaddvq_p_u8): Likewise.
+       (__arm_vaddvaq_u8): Likewise.
+       (__arm_vaddq_n_u8): Likewise.
+       (__arm_vabdq_u8): Likewise.
+       (__arm_vshlq_r_u8): Likewise.
+       (__arm_vrshlq_u8): Likewise.
+       (__arm_vrshlq_n_u8): Likewise.
+       (__arm_vqshlq_u8): Likewise.
+       (__arm_vqshlq_r_u8): Likewise.
+       (__arm_vqrshlq_u8): Likewise.
+       (__arm_vqrshlq_n_u8): Likewise.
+       (__arm_vminavq_s8): Likewise.
+       (__arm_vminaq_s8): Likewise.
+       (__arm_vmaxavq_s8): Likewise.
+       (__arm_vmaxaq_s8): Likewise.
+       (__arm_vbrsrq_n_u8): Likewise.
+       (__arm_vshlq_n_u8): Likewise.
+       (__arm_vrshrq_n_u8): Likewise.
+       (__arm_vqshlq_n_u8): Likewise.
+       (__arm_vcmpneq_n_s8): Likewise.
+       (__arm_vcmpltq_s8): Likewise.
+       (__arm_vcmpltq_n_s8): Likewise.
+       (__arm_vcmpleq_s8): Likewise.
+       (__arm_vcmpleq_n_s8): Likewise.
+       (__arm_vcmpgtq_s8): Likewise.
+       (__arm_vcmpgtq_n_s8): Likewise.
+       (__arm_vcmpgeq_s8): Likewise.
+       (__arm_vcmpgeq_n_s8): Likewise.
+       (__arm_vcmpeqq_s8): Likewise.
+       (__arm_vcmpeqq_n_s8): Likewise.
+       (__arm_vqshluq_n_s8): Likewise.
+       (__arm_vaddvq_p_s8): Likewise.
+       (__arm_vsubq_s8): Likewise.
+       (__arm_vsubq_n_s8): Likewise.
+       (__arm_vshlq_r_s8): Likewise.
+       (__arm_vrshlq_s8): Likewise.
+       (__arm_vrshlq_n_s8): Likewise.
+       (__arm_vrmulhq_s8): Likewise.
+       (__arm_vrhaddq_s8): Likewise.
+       (__arm_vqsubq_s8): Likewise.
+       (__arm_vqsubq_n_s8): Likewise.
+       (__arm_vqshlq_s8): Likewise.
+       (__arm_vqshlq_r_s8): Likewise.
+       (__arm_vqrshlq_s8): Likewise.
+       (__arm_vqrshlq_n_s8): Likewise.
+       (__arm_vqrdmulhq_s8): Likewise.
+       (__arm_vqrdmulhq_n_s8): Likewise.
+       (__arm_vqdmulhq_s8): Likewise.
+       (__arm_vqdmulhq_n_s8): Likewise.
+       (__arm_vqaddq_s8): Likewise.
+       (__arm_vqaddq_n_s8): Likewise.
+       (__arm_vorrq_s8): Likewise.
+       (__arm_vornq_s8): Likewise.
+       (__arm_vmulq_s8): Likewise.
+       (__arm_vmulq_n_s8): Likewise.
+       (__arm_vmulltq_int_s8): Likewise.
+       (__arm_vmullbq_int_s8): Likewise.
+       (__arm_vmulhq_s8): Likewise.
+       (__arm_vmlsdavxq_s8): Likewise.
+       (__arm_vmlsdavq_s8): Likewise.
+       (__arm_vmladavxq_s8): Likewise.
+       (__arm_vmladavq_s8): Likewise.
+       (__arm_vminvq_s8): Likewise.
+       (__arm_vminq_s8): Likewise.
+       (__arm_vmaxvq_s8): Likewise.
+       (__arm_vmaxq_s8): Likewise.
+       (__arm_vhsubq_s8): Likewise.
+       (__arm_vhsubq_n_s8): Likewise.
+       (__arm_vhcaddq_rot90_s8): Likewise.
+       (__arm_vhcaddq_rot270_s8): Likewise.
+       (__arm_vhaddq_s8): Likewise.
+       (__arm_vhaddq_n_s8): Likewise.
+       (__arm_veorq_s8): Likewise.
+       (__arm_vcaddq_rot90_s8): Likewise.
+       (__arm_vcaddq_rot270_s8): Likewise.
+       (__arm_vbrsrq_n_s8): Likewise.
+       (__arm_vbicq_s8): Likewise.
+       (__arm_vandq_s8): Likewise.
+       (__arm_vaddvaq_s8): Likewise.
+       (__arm_vaddq_n_s8): Likewise.
+       (__arm_vabdq_s8): Likewise.
+       (__arm_vshlq_n_s8): Likewise.
+       (__arm_vrshrq_n_s8): Likewise.
+       (__arm_vqshlq_n_s8): Likewise.
+       (__arm_vsubq_u16): Likewise.
+       (__arm_vsubq_n_u16): Likewise.
+       (__arm_vrmulhq_u16): Likewise.
+       (__arm_vrhaddq_u16): Likewise.
+       (__arm_vqsubq_u16): Likewise.
+       (__arm_vqsubq_n_u16): Likewise.
+       (__arm_vqaddq_u16): Likewise.
+       (__arm_vqaddq_n_u16): Likewise.
+       (__arm_vorrq_u16): Likewise.
+       (__arm_vornq_u16): Likewise.
+       (__arm_vmulq_u16): Likewise.
+       (__arm_vmulq_n_u16): Likewise.
+       (__arm_vmulltq_int_u16): Likewise.
+       (__arm_vmullbq_int_u16): Likewise.
+       (__arm_vmulhq_u16): Likewise.
+       (__arm_vmladavq_u16): Likewise.
+       (__arm_vminvq_u16): Likewise.
+       (__arm_vminq_u16): Likewise.
+       (__arm_vmaxvq_u16): Likewise.
+       (__arm_vmaxq_u16): Likewise.
+       (__arm_vhsubq_u16): Likewise.
+       (__arm_vhsubq_n_u16): Likewise.
+       (__arm_vhaddq_u16): Likewise.
+       (__arm_vhaddq_n_u16): Likewise.
+       (__arm_veorq_u16): Likewise.
+       (__arm_vcmpneq_n_u16): Likewise.
+       (__arm_vcmphiq_u16): Likewise.
+       (__arm_vcmphiq_n_u16): Likewise.
+       (__arm_vcmpeqq_u16): Likewise.
+       (__arm_vcmpeqq_n_u16): Likewise.
+       (__arm_vcmpcsq_u16): Likewise.
+       (__arm_vcmpcsq_n_u16): Likewise.
+       (__arm_vcaddq_rot90_u16): Likewise.
+       (__arm_vcaddq_rot270_u16): Likewise.
+       (__arm_vbicq_u16): Likewise.
+       (__arm_vandq_u16): Likewise.
+       (__arm_vaddvq_p_u16): Likewise.
+       (__arm_vaddvaq_u16): Likewise.
+       (__arm_vaddq_n_u16): Likewise.
+       (__arm_vabdq_u16): Likewise.
+       (__arm_vshlq_r_u16): Likewise.
+       (__arm_vrshlq_u16): Likewise.
+       (__arm_vrshlq_n_u16): Likewise.
+       (__arm_vqshlq_u16): Likewise.
+       (__arm_vqshlq_r_u16): Likewise.
+       (__arm_vqrshlq_u16): Likewise.
+       (__arm_vqrshlq_n_u16): Likewise.
+       (__arm_vminavq_s16): Likewise.
+       (__arm_vminaq_s16): Likewise.
+       (__arm_vmaxavq_s16): Likewise.
+       (__arm_vmaxaq_s16): Likewise.
+       (__arm_vbrsrq_n_u16): Likewise.
+       (__arm_vshlq_n_u16): Likewise.
+       (__arm_vrshrq_n_u16): Likewise.
+       (__arm_vqshlq_n_u16): Likewise.
+       (__arm_vcmpneq_n_s16): Likewise.
+       (__arm_vcmpltq_s16): Likewise.
+       (__arm_vcmpltq_n_s16): Likewise.
+       (__arm_vcmpleq_s16): Likewise.
+       (__arm_vcmpleq_n_s16): Likewise.
+       (__arm_vcmpgtq_s16): Likewise.
+       (__arm_vcmpgtq_n_s16): Likewise.
+       (__arm_vcmpgeq_s16): Likewise.
+       (__arm_vcmpgeq_n_s16): Likewise.
+       (__arm_vcmpeqq_s16): Likewise.
+       (__arm_vcmpeqq_n_s16): Likewise.
+       (__arm_vqshluq_n_s16): Likewise.
+       (__arm_vaddvq_p_s16): Likewise.
+       (__arm_vsubq_s16): Likewise.
+       (__arm_vsubq_n_s16): Likewise.
+       (__arm_vshlq_r_s16): Likewise.
+       (__arm_vrshlq_s16): Likewise.
+       (__arm_vrshlq_n_s16): Likewise.
+       (__arm_vrmulhq_s16): Likewise.
+       (__arm_vrhaddq_s16): Likewise.
+       (__arm_vqsubq_s16): Likewise.
+       (__arm_vqsubq_n_s16): Likewise.
+       (__arm_vqshlq_s16): Likewise.
+       (__arm_vqshlq_r_s16): Likewise.
+       (__arm_vqrshlq_s16): Likewise.
+       (__arm_vqrshlq_n_s16): Likewise.
+       (__arm_vqrdmulhq_s16): Likewise.
+       (__arm_vqrdmulhq_n_s16): Likewise.
+       (__arm_vqdmulhq_s16): Likewise.
+       (__arm_vqdmulhq_n_s16): Likewise.
+       (__arm_vqaddq_s16): Likewise.
+       (__arm_vqaddq_n_s16): Likewise.
+       (__arm_vorrq_s16): Likewise.
+       (__arm_vornq_s16): Likewise.
+       (__arm_vmulq_s16): Likewise.
+       (__arm_vmulq_n_s16): Likewise.
+       (__arm_vmulltq_int_s16): Likewise.
+       (__arm_vmullbq_int_s16): Likewise.
+       (__arm_vmulhq_s16): Likewise.
+       (__arm_vmlsdavxq_s16): Likewise.
+       (__arm_vmlsdavq_s16): Likewise.
+       (__arm_vmladavxq_s16): Likewise.
+       (__arm_vmladavq_s16): Likewise.
+       (__arm_vminvq_s16): Likewise.
+       (__arm_vminq_s16): Likewise.
+       (__arm_vmaxvq_s16): Likewise.
+       (__arm_vmaxq_s16): Likewise.
+       (__arm_vhsubq_s16): Likewise.
+       (__arm_vhsubq_n_s16): Likewise.
+       (__arm_vhcaddq_rot90_s16): Likewise.
+       (__arm_vhcaddq_rot270_s16): Likewise.
+       (__arm_vhaddq_s16): Likewise.
+       (__arm_vhaddq_n_s16): Likewise.
+       (__arm_veorq_s16): Likewise.
+       (__arm_vcaddq_rot90_s16): Likewise.
+       (__arm_vcaddq_rot270_s16): Likewise.
+       (__arm_vbrsrq_n_s16): Likewise.
+       (__arm_vbicq_s16): Likewise.
+       (__arm_vandq_s16): Likewise.
+       (__arm_vaddvaq_s16): Likewise.
+       (__arm_vaddq_n_s16): Likewise.
+       (__arm_vabdq_s16): Likewise.
+       (__arm_vshlq_n_s16): Likewise.
+       (__arm_vrshrq_n_s16): Likewise.
+       (__arm_vqshlq_n_s16): Likewise.
+       (__arm_vsubq_u32): Likewise.
+       (__arm_vsubq_n_u32): Likewise.
+       (__arm_vrmulhq_u32): Likewise.
+       (__arm_vrhaddq_u32): Likewise.
+       (__arm_vqsubq_u32): Likewise.
+       (__arm_vqsubq_n_u32): Likewise.
+       (__arm_vqaddq_u32): Likewise.
+       (__arm_vqaddq_n_u32): Likewise.
+       (__arm_vorrq_u32): Likewise.
+       (__arm_vornq_u32): Likewise.
+       (__arm_vmulq_u32): Likewise.
+       (__arm_vmulq_n_u32): Likewise.
+       (__arm_vmulltq_int_u32): Likewise.
+       (__arm_vmullbq_int_u32): Likewise.
+       (__arm_vmulhq_u32): Likewise.
+       (__arm_vmladavq_u32): Likewise.
+       (__arm_vminvq_u32): Likewise.
+       (__arm_vminq_u32): Likewise.
+       (__arm_vmaxvq_u32): Likewise.
+       (__arm_vmaxq_u32): Likewise.
+       (__arm_vhsubq_u32): Likewise.
+       (__arm_vhsubq_n_u32): Likewise.
+       (__arm_vhaddq_u32): Likewise.
+       (__arm_vhaddq_n_u32): Likewise.
+       (__arm_veorq_u32): Likewise.
+       (__arm_vcmpneq_n_u32): Likewise.
+       (__arm_vcmphiq_u32): Likewise.
+       (__arm_vcmphiq_n_u32): Likewise.
+       (__arm_vcmpeqq_u32): Likewise.
+       (__arm_vcmpeqq_n_u32): Likewise.
+       (__arm_vcmpcsq_u32): Likewise.
+       (__arm_vcmpcsq_n_u32): Likewise.
+       (__arm_vcaddq_rot90_u32): Likewise.
+       (__arm_vcaddq_rot270_u32): Likewise.
+       (__arm_vbicq_u32): Likewise.
+       (__arm_vandq_u32): Likewise.
+       (__arm_vaddvq_p_u32): Likewise.
+       (__arm_vaddvaq_u32): Likewise.
+       (__arm_vaddq_n_u32): Likewise.
+       (__arm_vabdq_u32): Likewise.
+       (__arm_vshlq_r_u32): Likewise.
+       (__arm_vrshlq_u32): Likewise.
+       (__arm_vrshlq_n_u32): Likewise.
+       (__arm_vqshlq_u32): Likewise.
+       (__arm_vqshlq_r_u32): Likewise.
+       (__arm_vqrshlq_u32): Likewise.
+       (__arm_vqrshlq_n_u32): Likewise.
+       (__arm_vminavq_s32): Likewise.
+       (__arm_vminaq_s32): Likewise.
+       (__arm_vmaxavq_s32): Likewise.
+       (__arm_vmaxaq_s32): Likewise.
+       (__arm_vbrsrq_n_u32): Likewise.
+       (__arm_vshlq_n_u32): Likewise.
+       (__arm_vrshrq_n_u32): Likewise.
+       (__arm_vqshlq_n_u32): Likewise.
+       (__arm_vcmpneq_n_s32): Likewise.
+       (__arm_vcmpltq_s32): Likewise.
+       (__arm_vcmpltq_n_s32): Likewise.
+       (__arm_vcmpleq_s32): Likewise.
+       (__arm_vcmpleq_n_s32): Likewise.
+       (__arm_vcmpgtq_s32): Likewise.
+       (__arm_vcmpgtq_n_s32): Likewise.
+       (__arm_vcmpgeq_s32): Likewise.
+       (__arm_vcmpgeq_n_s32): Likewise.
+       (__arm_vcmpeqq_s32): Likewise.
+       (__arm_vcmpeqq_n_s32): Likewise.
+       (__arm_vqshluq_n_s32): Likewise.
+       (__arm_vaddvq_p_s32): Likewise.
+       (__arm_vsubq_s32): Likewise.
+       (__arm_vsubq_n_s32): Likewise.
+       (__arm_vshlq_r_s32): Likewise.
+       (__arm_vrshlq_s32): Likewise.
+       (__arm_vrshlq_n_s32): Likewise.
+       (__arm_vrmulhq_s32): Likewise.
+       (__arm_vrhaddq_s32): Likewise.
+       (__arm_vqsubq_s32): Likewise.
+       (__arm_vqsubq_n_s32): Likewise.
+       (__arm_vqshlq_s32): Likewise.
+       (__arm_vqshlq_r_s32): Likewise.
+       (__arm_vqrshlq_s32): Likewise.
+       (__arm_vqrshlq_n_s32): Likewise.
+       (__arm_vqrdmulhq_s32): Likewise.
+       (__arm_vqrdmulhq_n_s32): Likewise.
+       (__arm_vqdmulhq_s32): Likewise.
+       (__arm_vqdmulhq_n_s32): Likewise.
+       (__arm_vqaddq_s32): Likewise.
+       (__arm_vqaddq_n_s32): Likewise.
+       (__arm_vorrq_s32): Likewise.
+       (__arm_vornq_s32): Likewise.
+       (__arm_vmulq_s32): Likewise.
+       (__arm_vmulq_n_s32): Likewise.
+       (__arm_vmulltq_int_s32): Likewise.
+       (__arm_vmullbq_int_s32): Likewise.
+       (__arm_vmulhq_s32): Likewise.
+       (__arm_vmlsdavxq_s32): Likewise.
+       (__arm_vmlsdavq_s32): Likewise.
+       (__arm_vmladavxq_s32): Likewise.
+       (__arm_vmladavq_s32): Likewise.
+       (__arm_vminvq_s32): Likewise.
+       (__arm_vminq_s32): Likewise.
+       (__arm_vmaxvq_s32): Likewise.
+       (__arm_vmaxq_s32): Likewise.
+       (__arm_vhsubq_s32): Likewise.
+       (__arm_vhsubq_n_s32): Likewise.
+       (__arm_vhcaddq_rot90_s32): Likewise.
+       (__arm_vhcaddq_rot270_s32): Likewise.
+       (__arm_vhaddq_s32): Likewise.
+       (__arm_vhaddq_n_s32): Likewise.
+       (__arm_veorq_s32): Likewise.
+       (__arm_vcaddq_rot90_s32): Likewise.
+       (__arm_vcaddq_rot270_s32): Likewise.
+       (__arm_vbrsrq_n_s32): Likewise.
+       (__arm_vbicq_s32): Likewise.
+       (__arm_vandq_s32): Likewise.
+       (__arm_vaddvaq_s32): Likewise.
+       (__arm_vaddq_n_s32): Likewise.
+       (__arm_vabdq_s32): Likewise.
+       (__arm_vshlq_n_s32): Likewise.
+       (__arm_vrshrq_n_s32): Likewise.
+       (__arm_vqshlq_n_s32): Likewise.
+       (vsubq): Define polymorphic variant.
+       (vsubq_n): Likewise.
+       (vshlq_r): Likewise.
+       (vrshlq_n): Likewise.
+       (vrshlq): Likewise.
+       (vrmulhq): Likewise.
+       (vrhaddq): Likewise.
+       (vqsubq_n): Likewise.
+       (vqsubq): Likewise.
+       (vqshlq): Likewise.
+       (vqshlq_r): Likewise.
+       (vqshluq): Likewise.
+       (vrshrq_n): Likewise.
+       (vshlq_n): Likewise.
+       (vqshluq_n): Likewise.
+       (vqshlq_n): Likewise.
+       (vqrshlq_n): Likewise.
+       (vqrshlq): Likewise.
+       (vqrdmulhq_n): Likewise.
+       (vqrdmulhq): Likewise.
+       (vqdmulhq_n): Likewise.
+       (vqdmulhq): Likewise.
+       (vqaddq_n): Likewise.
+       (vqaddq): Likewise.
+       (vorrq_n): Likewise.
+       (vorrq): Likewise.
+       (vornq): Likewise.
+       (vmulq_n): Likewise.
+       (vmulq): Likewise.
+       (vmulltq_int): Likewise.
+       (vmullbq_int): Likewise.
+       (vmulhq): Likewise.
+       (vminq): Likewise.
+       (vminaq): Likewise.
+       (vmaxq): Likewise.
+       (vmaxaq): Likewise.
+       (vhsubq_n): Likewise.
+       (vhsubq): Likewise.
+       (vhcaddq_rot90): Likewise.
+       (vhcaddq_rot270): Likewise.
+       (vhaddq_n): Likewise.
+       (vhaddq): Likewise.
+       (veorq): Likewise.
+       (vcaddq_rot90): Likewise.
+       (vcaddq_rot270): Likewise.
+       (vbrsrq_n): Likewise.
+       (vbicq_n): Likewise.
+       (vbicq): Likewise.
+       (vaddq): Likewise.
+       (vaddq_n): Likewise.
+       (vandq): Likewise.
+       (vabdq): Likewise.
+       * config/arm/arm_mve_builtins.def (BINOP_NONE_NONE_IMM): Use it.
+       (BINOP_NONE_NONE_NONE): Likewise.
+       (BINOP_NONE_NONE_UNONE): Likewise.
+       (BINOP_UNONE_NONE_IMM): Likewise.
+       (BINOP_UNONE_NONE_NONE): Likewise.
+       (BINOP_UNONE_UNONE_IMM): Likewise.
+       (BINOP_UNONE_UNONE_NONE): Likewise.
+       (BINOP_UNONE_UNONE_UNONE): Likewise.
+       * config/arm/constraints.md (Ra): Define constraint to check constant is
+       in the range of 0 to 7.
+       (Rg): Define constriant to check the constant is one among 1, 2, 4
+       and 8.
+       * config/arm/mve.md (mve_vabdq_<supf>): Define RTL pattern.
+       (mve_vaddq_n_<supf>): Likewise.
+       (mve_vaddvaq_<supf>): Likewise.
+       (mve_vaddvq_p_<supf>): Likewise.
+       (mve_vandq_<supf>): Likewise.
+       (mve_vbicq_<supf>): Likewise.
+       (mve_vbrsrq_n_<supf>): Likewise.
+       (mve_vcaddq_rot270_<supf>): Likewise.
+       (mve_vcaddq_rot90_<supf>): Likewise.
+       (mve_vcmpcsq_n_u): Likewise.
+       (mve_vcmpcsq_u): Likewise.
+       (mve_vcmpeqq_n_<supf>): Likewise.
+       (mve_vcmpeqq_<supf>): Likewise.
+       (mve_vcmpgeq_n_s): Likewise.
+       (mve_vcmpgeq_s): Likewise.
+       (mve_vcmpgtq_n_s): Likewise.
+       (mve_vcmpgtq_s): Likewise.
+       (mve_vcmphiq_n_u): Likewise.
+       (mve_vcmphiq_u): Likewise.
+       (mve_vcmpleq_n_s): Likewise.
+       (mve_vcmpleq_s): Likewise.
+       (mve_vcmpltq_n_s): Likewise.
+       (mve_vcmpltq_s): Likewise.
+       (mve_vcmpneq_n_<supf>): Likewise.
+       (mve_vddupq_n_u): Likewise.
+       (mve_veorq_<supf>): Likewise.
+       (mve_vhaddq_n_<supf>): Likewise.
+       (mve_vhaddq_<supf>): Likewise.
+       (mve_vhcaddq_rot270_s): Likewise.
+       (mve_vhcaddq_rot90_s): Likewise.
+       (mve_vhsubq_n_<supf>): Likewise.
+       (mve_vhsubq_<supf>): Likewise.
+       (mve_vidupq_n_u): Likewise.
+       (mve_vmaxaq_s): Likewise.
+       (mve_vmaxavq_s): Likewise.
+       (mve_vmaxq_<supf>): Likewise.
+       (mve_vmaxvq_<supf>): Likewise.
+       (mve_vminaq_s): Likewise.
+       (mve_vminavq_s): Likewise.
+       (mve_vminq_<supf>): Likewise.
+       (mve_vminvq_<supf>): Likewise.
+       (mve_vmladavq_<supf>): Likewise.
+       (mve_vmladavxq_s): Likewise.
+       (mve_vmlsdavq_s): Likewise.
+       (mve_vmlsdavxq_s): Likewise.
+       (mve_vmulhq_<supf>): Likewise.
+       (mve_vmullbq_int_<supf>): Likewise.
+       (mve_vmulltq_int_<supf>): Likewise.
+       (mve_vmulq_n_<supf>): Likewise.
+       (mve_vmulq_<supf>): Likewise.
+       (mve_vornq_<supf>): Likewise.
+       (mve_vorrq_<supf>): Likewise.
+       (mve_vqaddq_n_<supf>): Likewise.
+       (mve_vqaddq_<supf>): Likewise.
+       (mve_vqdmulhq_n_s): Likewise.
+       (mve_vqdmulhq_s): Likewise.
+       (mve_vqrdmulhq_n_s): Likewise.
+       (mve_vqrdmulhq_s): Likewise.
+       (mve_vqrshlq_n_<supf>): Likewise.
+       (mve_vqrshlq_<supf>): Likewise.
+       (mve_vqshlq_n_<supf>): Likewise.
+       (mve_vqshlq_r_<supf>): Likewise.
+       (mve_vqshlq_<supf>): Likewise.
+       (mve_vqshluq_n_s): Likewise.
+       (mve_vqsubq_n_<supf>): Likewise.
+       (mve_vqsubq_<supf>): Likewise.
+       (mve_vrhaddq_<supf>): Likewise.
+       (mve_vrmulhq_<supf>): Likewise.
+       (mve_vrshlq_n_<supf>): Likewise.
+       (mve_vrshlq_<supf>): Likewise.
+       (mve_vrshrq_n_<supf>): Likewise.
+       (mve_vshlq_n_<supf>): Likewise.
+       (mve_vshlq_r_<supf>): Likewise.
+       (mve_vsubq_n_<supf>): Likewise.
+       (mve_vsubq_<supf>): Likewise.
+       * config/arm/predicates.md (mve_imm_7): Define predicate to check
+       the matching constraint Ra.
+       (mve_imm_selective_upto_8): Define predicate to check the matching
+       constraint Rg.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (BINOP_NONE_NONE_UNONE_QUALIFIERS): Define
+       qualifier for binary operands.
+       (BINOP_UNONE_NONE_NONE_QUALIFIERS): Likewise.
+       (BINOP_UNONE_UNONE_NONE_QUALIFIERS): Likewise.
+       * config/arm/arm_mve.h (vaddlvq_p_s32): Define macro.
+       (vaddlvq_p_u32): Likewise.
+       (vcmpneq_s8): Likewise.
+       (vcmpneq_s16): Likewise.
+       (vcmpneq_s32): Likewise.
+       (vcmpneq_u8): Likewise.
+       (vcmpneq_u16): Likewise.
+       (vcmpneq_u32): Likewise.
+       (vshlq_s8): Likewise.
+       (vshlq_s16): Likewise.
+       (vshlq_s32): Likewise.
+       (vshlq_u8): Likewise.
+       (vshlq_u16): Likewise.
+       (vshlq_u32): Likewise.
+       (__arm_vaddlvq_p_s32): Define intrinsic.
+       (__arm_vaddlvq_p_u32): Likewise.
+       (__arm_vcmpneq_s8): Likewise.
+       (__arm_vcmpneq_s16): Likewise.
+       (__arm_vcmpneq_s32): Likewise.
+       (__arm_vcmpneq_u8): Likewise.
+       (__arm_vcmpneq_u16): Likewise.
+       (__arm_vcmpneq_u32): Likewise.
+       (__arm_vshlq_s8): Likewise.
+       (__arm_vshlq_s16): Likewise.
+       (__arm_vshlq_s32): Likewise.
+       (__arm_vshlq_u8): Likewise.
+       (__arm_vshlq_u16): Likewise.
+       (__arm_vshlq_u32): Likewise.
+       (vaddlvq_p): Define polymorphic variant.
+       (vcmpneq): Likewise.
+       (vshlq): Likewise.
+       * config/arm/arm_mve_builtins.def (BINOP_NONE_NONE_UNONE_QUALIFIERS):
+       Use it.
+       (BINOP_UNONE_NONE_NONE_QUALIFIERS): Likewise.
+       (BINOP_UNONE_UNONE_NONE_QUALIFIERS): Likewise.
+       * config/arm/mve.md (mve_vaddlvq_p_<supf>v4si): Define RTL pattern.
+       (mve_vcmpneq_<supf><mode>): Likewise.
+       (mve_vshlq_<supf><mode>): Likewise.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (BINOP_UNONE_UNONE_IMM_QUALIFIERS): Define
+       qualifier for binary operands.
+       (BINOP_UNONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       (BINOP_UNONE_NONE_IMM_QUALIFIERS): Likewise.
+       * config/arm/arm_mve.h (vcvtq_n_s16_f16): Define macro.
+       (vcvtq_n_s32_f32): Likewise.
+       (vcvtq_n_u16_f16): Likewise.
+       (vcvtq_n_u32_f32): Likewise.
+       (vcreateq_u8): Likewise.
+       (vcreateq_u16): Likewise.
+       (vcreateq_u32): Likewise.
+       (vcreateq_u64): Likewise.
+       (vcreateq_s8): Likewise.
+       (vcreateq_s16): Likewise.
+       (vcreateq_s32): Likewise.
+       (vcreateq_s64): Likewise.
+       (vshrq_n_s8): Likewise.
+       (vshrq_n_s16): Likewise.
+       (vshrq_n_s32): Likewise.
+       (vshrq_n_u8): Likewise.
+       (vshrq_n_u16): Likewise.
+       (vshrq_n_u32): Likewise.
+       (__arm_vcreateq_u8): Define intrinsic.
+       (__arm_vcreateq_u16): Likewise.
+       (__arm_vcreateq_u32): Likewise.
+       (__arm_vcreateq_u64): Likewise.
+       (__arm_vcreateq_s8): Likewise.
+       (__arm_vcreateq_s16): Likewise.
+       (__arm_vcreateq_s32): Likewise.
+       (__arm_vcreateq_s64): Likewise.
+       (__arm_vshrq_n_s8): Likewise.
+       (__arm_vshrq_n_s16): Likewise.
+       (__arm_vshrq_n_s32): Likewise.
+       (__arm_vshrq_n_u8): Likewise.
+       (__arm_vshrq_n_u16): Likewise.
+       (__arm_vshrq_n_u32): Likewise.
+       (__arm_vcvtq_n_s16_f16): Likewise.
+       (__arm_vcvtq_n_s32_f32): Likewise.
+       (__arm_vcvtq_n_u16_f16): Likewise.
+       (__arm_vcvtq_n_u32_f32): Likewise.
+       (vshrq_n): Define polymorphic variant.
+       * config/arm/arm_mve_builtins.def (BINOP_UNONE_UNONE_IMM_QUALIFIERS):
+       Use it.
+       (BINOP_UNONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       (BINOP_UNONE_NONE_IMM_QUALIFIERS): Likewise.
+       * config/arm/constraints.md (Rb): Define constraint to check constant is
+       in the range of 1 to 8.
+       (Rf): Define constraint to check constant is in the range of 1 to 32.
+       * config/arm/mve.md (mve_vcreateq_<supf><mode>): Define RTL pattern.
+       (mve_vshrq_n_<supf><mode>): Likewise.
+       (mve_vcvtq_n_from_f_<supf><mode>): Likewise.
+       * config/arm/predicates.md (mve_imm_8): Define predicate to check
+       the matching constraint Rb.
+       (mve_imm_32): Define predicate to check the matching constraint Rf.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (BINOP_NONE_NONE_NONE_QUALIFIERS): Define
+       qualifier for binary operands.
+       (BINOP_NONE_NONE_IMM_QUALIFIERS): Likewise.
+       (BINOP_NONE_UNONE_IMM_QUALIFIERS): Likewise.
+       (BINOP_NONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       * config/arm/arm_mve.h (vsubq_n_f16): Define macro.
+       (vsubq_n_f32): Likewise.
+       (vbrsrq_n_f16): Likewise.
+       (vbrsrq_n_f32): Likewise.
+       (vcvtq_n_f16_s16): Likewise.
+       (vcvtq_n_f32_s32): Likewise.
+       (vcvtq_n_f16_u16): Likewise.
+       (vcvtq_n_f32_u32): Likewise.
+       (vcreateq_f16): Likewise.
+       (vcreateq_f32): Likewise.
+       (__arm_vsubq_n_f16): Define intrinsic.
+       (__arm_vsubq_n_f32): Likewise.
+       (__arm_vbrsrq_n_f16): Likewise.
+       (__arm_vbrsrq_n_f32): Likewise.
+       (__arm_vcvtq_n_f16_s16): Likewise.
+       (__arm_vcvtq_n_f32_s32): Likewise.
+       (__arm_vcvtq_n_f16_u16): Likewise.
+       (__arm_vcvtq_n_f32_u32): Likewise.
+       (__arm_vcreateq_f16): Likewise.
+       (__arm_vcreateq_f32): Likewise.
+       (vsubq): Define polymorphic variant.
+       (vbrsrq): Likewise.
+       (vcvtq_n): Likewise.
+       * config/arm/arm_mve_builtins.def (BINOP_NONE_NONE_NONE_QUALIFIERS): Use
+       it.
+       (BINOP_NONE_NONE_IMM_QUALIFIERS): Likewise.
+       (BINOP_NONE_UNONE_IMM_QUALIFIERS): Likewise.
+       (BINOP_NONE_UNONE_UNONE_QUALIFIERS): Likewise.
+       * config/arm/constraints.md (Rd): Define constraint to check constant is
+       in the range of 1 to 16.
+       * config/arm/mve.md (mve_vsubq_n_f<mode>): Define RTL pattern.
+       mve_vbrsrq_n_f<mode>: Likewise.
+       mve_vcvtq_n_to_f_<supf><mode>: Likewise.
+       mve_vcreateq_f<mode>: Likewise.
+       * config/arm/predicates.md (mve_imm_16): Define predicate to check
+       the matching constraint Rd.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (hi_UP): Define mode.
+       * config/arm/arm.h (IS_VPR_REGNUM): Move.
+       * config/arm/arm.md (VPR_REGNUM): Define before APSRQ_REGNUM.
+       (APSRQ_REGNUM): Modify.
+       (APSRGE_REGNUM): Modify.
+       * config/arm/arm_mve.h (vctp16q): Define macro.
+       (vctp32q): Likewise.
+       (vctp64q): Likewise.
+       (vctp8q): Likewise.
+       (vpnot): Likewise.
+       (__arm_vctp16q): Define intrinsic.
+       (__arm_vctp32q): Likewise.
+       (__arm_vctp64q): Likewise.
+       (__arm_vctp8q): Likewise.
+       (__arm_vpnot): Likewise.
+       * config/arm/arm_mve_builtins.def (UNOP_UNONE_UNONE): Use builtin
+       qualifier.
+       * config/arm/mve.md (mve_vctp<mode1>qhi): Define RTL pattern.
+       (mve_vpnothi): Likewise.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm.h (enum reg_class): Define new class EVEN_REGS.
+       * config/arm/arm_mve.h (vdupq_n_s8): Define macro.
+       (vdupq_n_s16): Likewise.
+       (vdupq_n_s32): Likewise.
+       (vabsq_s8): Likewise.
+       (vabsq_s16): Likewise.
+       (vabsq_s32): Likewise.
+       (vclsq_s8): Likewise.
+       (vclsq_s16): Likewise.
+       (vclsq_s32): Likewise.
+       (vclzq_s8): Likewise.
+       (vclzq_s16): Likewise.
+       (vclzq_s32): Likewise.
+       (vnegq_s8): Likewise.
+       (vnegq_s16): Likewise.
+       (vnegq_s32): Likewise.
+       (vaddlvq_s32): Likewise.
+       (vaddvq_s8): Likewise.
+       (vaddvq_s16): Likewise.
+       (vaddvq_s32): Likewise.
+       (vmovlbq_s8): Likewise.
+       (vmovlbq_s16): Likewise.
+       (vmovltq_s8): Likewise.
+       (vmovltq_s16): Likewise.
+       (vmvnq_s8): Likewise.
+       (vmvnq_s16): Likewise.
+       (vmvnq_s32): Likewise.
+       (vrev16q_s8): Likewise.
+       (vrev32q_s8): Likewise.
+       (vrev32q_s16): Likewise.
+       (vqabsq_s8): Likewise.
+       (vqabsq_s16): Likewise.
+       (vqabsq_s32): Likewise.
+       (vqnegq_s8): Likewise.
+       (vqnegq_s16): Likewise.
+       (vqnegq_s32): Likewise.
+       (vcvtaq_s16_f16): Likewise.
+       (vcvtaq_s32_f32): Likewise.
+       (vcvtnq_s16_f16): Likewise.
+       (vcvtnq_s32_f32): Likewise.
+       (vcvtpq_s16_f16): Likewise.
+       (vcvtpq_s32_f32): Likewise.
+       (vcvtmq_s16_f16): Likewise.
+       (vcvtmq_s32_f32): Likewise.
+       (vmvnq_u8): Likewise.
+       (vmvnq_u16): Likewise.
+       (vmvnq_u32): Likewise.
+       (vdupq_n_u8): Likewise.
+       (vdupq_n_u16): Likewise.
+       (vdupq_n_u32): Likewise.
+       (vclzq_u8): Likewise.
+       (vclzq_u16): Likewise.
+       (vclzq_u32): Likewise.
+       (vaddvq_u8): Likewise.
+       (vaddvq_u16): Likewise.
+       (vaddvq_u32): Likewise.
+       (vrev32q_u8): Likewise.
+       (vrev32q_u16): Likewise.
+       (vmovltq_u8): Likewise.
+       (vmovltq_u16): Likewise.
+       (vmovlbq_u8): Likewise.
+       (vmovlbq_u16): Likewise.
+       (vrev16q_u8): Likewise.
+       (vaddlvq_u32): Likewise.
+       (vcvtpq_u16_f16): Likewise.
+       (vcvtpq_u32_f32): Likewise.
+       (vcvtnq_u16_f16): Likewise.
+       (vcvtmq_u16_f16): Likewise.
+       (vcvtmq_u32_f32): Likewise.
+       (vcvtaq_u16_f16): Likewise.
+       (vcvtaq_u32_f32): Likewise.
+       (__arm_vdupq_n_s8): Define intrinsic.
+       (__arm_vdupq_n_s16): Likewise.
+       (__arm_vdupq_n_s32): Likewise.
+       (__arm_vabsq_s8): Likewise.
+       (__arm_vabsq_s16): Likewise.
+       (__arm_vabsq_s32): Likewise.
+       (__arm_vclsq_s8): Likewise.
+       (__arm_vclsq_s16): Likewise.
+       (__arm_vclsq_s32): Likewise.
+       (__arm_vclzq_s8): Likewise.
+       (__arm_vclzq_s16): Likewise.
+       (__arm_vclzq_s32): Likewise.
+       (__arm_vnegq_s8): Likewise.
+       (__arm_vnegq_s16): Likewise.
+       (__arm_vnegq_s32): Likewise.
+       (__arm_vaddlvq_s32): Likewise.
+       (__arm_vaddvq_s8): Likewise.
+       (__arm_vaddvq_s16): Likewise.
+       (__arm_vaddvq_s32): Likewise.
+       (__arm_vmovlbq_s8): Likewise.
+       (__arm_vmovlbq_s16): Likewise.
+       (__arm_vmovltq_s8): Likewise.
+       (__arm_vmovltq_s16): Likewise.
+       (__arm_vmvnq_s8): Likewise.
+       (__arm_vmvnq_s16): Likewise.
+       (__arm_vmvnq_s32): Likewise.
+       (__arm_vrev16q_s8): Likewise.
+       (__arm_vrev32q_s8): Likewise.
+       (__arm_vrev32q_s16): Likewise.
+       (__arm_vqabsq_s8): Likewise.
+       (__arm_vqabsq_s16): Likewise.
+       (__arm_vqabsq_s32): Likewise.
+       (__arm_vqnegq_s8): Likewise.
+       (__arm_vqnegq_s16): Likewise.
+       (__arm_vqnegq_s32): Likewise.
+       (__arm_vmvnq_u8): Likewise.
+       (__arm_vmvnq_u16): Likewise.
+       (__arm_vmvnq_u32): Likewise.
+       (__arm_vdupq_n_u8): Likewise.
+       (__arm_vdupq_n_u16): Likewise.
+       (__arm_vdupq_n_u32): Likewise.
+       (__arm_vclzq_u8): Likewise.
+       (__arm_vclzq_u16): Likewise.
+       (__arm_vclzq_u32): Likewise.
+       (__arm_vaddvq_u8): Likewise.
+       (__arm_vaddvq_u16): Likewise.
+       (__arm_vaddvq_u32): Likewise.
+       (__arm_vrev32q_u8): Likewise.
+       (__arm_vrev32q_u16): Likewise.
+       (__arm_vmovltq_u8): Likewise.
+       (__arm_vmovltq_u16): Likewise.
+       (__arm_vmovlbq_u8): Likewise.
+       (__arm_vmovlbq_u16): Likewise.
+       (__arm_vrev16q_u8): Likewise.
+       (__arm_vaddlvq_u32): Likewise.
+       (__arm_vcvtpq_u16_f16): Likewise.
+       (__arm_vcvtpq_u32_f32): Likewise.
+       (__arm_vcvtnq_u16_f16): Likewise.
+       (__arm_vcvtmq_u16_f16): Likewise.
+       (__arm_vcvtmq_u32_f32): Likewise.
+       (__arm_vcvtaq_u16_f16): Likewise.
+       (__arm_vcvtaq_u32_f32): Likewise.
+       (__arm_vcvtaq_s16_f16): Likewise.
+       (__arm_vcvtaq_s32_f32): Likewise.
+       (__arm_vcvtnq_s16_f16): Likewise.
+       (__arm_vcvtnq_s32_f32): Likewise.
+       (__arm_vcvtpq_s16_f16): Likewise.
+       (__arm_vcvtpq_s32_f32): Likewise.
+       (__arm_vcvtmq_s16_f16): Likewise.
+       (__arm_vcvtmq_s32_f32): Likewise.
+       (vdupq_n): Define polymorphic variant.
+       (vabsq): Likewise.
+       (vclsq): Likewise.
+       (vclzq): Likewise.
+       (vnegq): Likewise.
+       (vaddlvq): Likewise.
+       (vaddvq): Likewise.
+       (vmovlbq): Likewise.
+       (vmovltq): Likewise.
+       (vmvnq): Likewise.
+       (vrev16q): Likewise.
+       (vrev32q): Likewise.
+       (vqabsq): Likewise.
+       (vqnegq): Likewise.
+       * config/arm/arm_mve_builtins.def (UNOP_SNONE_SNONE): Use it.
+       (UNOP_SNONE_NONE): Likewise.
+       (UNOP_UNONE_UNONE): Likewise.
+       (UNOP_UNONE_NONE): Likewise.
+       * config/arm/constraints.md (e): Define new constriant to allow only
+       even registers.
+       * config/arm/mve.md (mve_vqabsq_s<mode>): Define RTL pattern.
+       (mve_vnegq_s<mode>): Likewise.
+       (mve_vmvnq_<supf><mode>): Likewise.
+       (mve_vdupq_n_<supf><mode>): Likewise.
+       (mve_vclzq_<supf><mode>): Likewise.
+       (mve_vclsq_s<mode>): Likewise.
+       (mve_vaddvq_<supf><mode>): Likewise.
+       (mve_vabsq_s<mode>): Likewise.
+       (mve_vrev32q_<supf><mode>): Likewise.
+       (mve_vmovltq_<supf><mode>): Likewise.
+       (mve_vmovlbq_<supf><mode>): Likewise.
+       (mve_vcvtpq_<supf><mode>): Likewise.
+       (mve_vcvtnq_<supf><mode>): Likewise.
+       (mve_vcvtmq_<supf><mode>): Likewise.
+       (mve_vcvtaq_<supf><mode>): Likewise.
+       (mve_vrev16q_<supf>v16qi): Likewise.
+       (mve_vaddlvq_<supf>v4si): Likewise.
+
+2020-03-17  Jakub Jelinek  <jakub@redhat.com>
+
+       * lra-spills.c (remove_pseudos): Fix up duplicated word issue in
+       a dump message.
+       * tree-sra.c (create_access_replacement): Fix up duplicated word issue
+       in a comment.
+       * read-rtl-function.c (find_param_by_name,
+       function_reader::parse_enum_value, function_reader::get_insn_by_uid):
+       Likewise.
+       * spellcheck.c (get_edit_distance_cutoff): Likewise.
+       * tree-data-ref.c (create_ifn_alias_checks): Likewise.
+       * tree.def (SWITCH_EXPR): Likewise.
+       * selftest.c (assert_str_contains): Likewise.
+       * ipa-param-manipulation.h (class ipa_param_body_adjustments):
+       Likewise.
+       * tree-ssa-math-opts.c (convert_expand_mult_copysign): Likewise.
+       * tree-ssa-loop-split.c (find_vdef_in_loop): Likewise.
+       * langhooks.h (struct lang_hooks_for_decls): Likewise.
+       * ipa-prop.h (struct ipa_param_descriptor): Likewise.
+       * tree-ssa-strlen.c (handle_builtin_string_cmp, handle_store):
+       Likewise.
+       * tree-ssa-dom.c (simplify_stmt_for_jump_threading): Likewise.
+       * tree-ssa-reassoc.c (reassociate_bb): Likewise.
+       * tree.c (component_ref_size): Likewise.
+       * hsa-common.c (hsa_init_compilation_unit_data): Likewise.
+       * gimple-ssa-sprintf.c (get_string_length, format_string,
+       format_directive): Likewise.
+       * omp-grid.c (grid_process_kernel_body_copy): Likewise.
+       * input.c (string_concat_db::get_string_concatenation,
+       test_lexer_string_locations_ucn4): Likewise.
+       * cfgexpand.c (pass_expand::execute): Likewise.
+       * gimple-ssa-warn-restrict.c (builtin_memref::offset_out_of_bounds,
+       maybe_diag_overlap): Likewise.
+       * rtl.c (RTX_CODE_HWINT_P_1): Likewise.
+       * shrink-wrap.c (spread_components): Likewise.
+       * tree-ssa-dse.c (initialize_ao_ref_for_dse, valid_ao_ref_for_dse):
+       Likewise.
+       * tree-call-cdce.c (shrink_wrap_one_built_in_call_with_conds):
+       Likewise.
+       * dwarf2out.c (dwarf2out_early_finish): Likewise.
+       * gimple-ssa-store-merging.c: Likewise.
+       * ira-costs.c (record_operand_costs): Likewise.
+       * tree-vect-loop.c (vectorizable_reduction): Likewise.
+       * target.def (dispatch): Likewise.
+       (validate_dims, gen_ccmp_first): Fix up duplicated word issue
+       in documentation text.
+       * doc/tm.texi: Regenerated.
+       * config/i386/x86-tune.def (X86_TUNE_PARTIAL_FLAG_REG_STALL): Fix up
+       duplicated word issue in a comment.
+       * config/i386/i386.c (ix86_test_loading_unspec): Likewise.
+       * config/i386/i386-features.c (remove_partial_avx_dependency):
+       Likewise.
+       * config/msp430/msp430.c (msp430_select_section): Likewise.
+       * config/gcn/gcn-run.c (load_image): Likewise.
+       * config/aarch64/aarch64-sve.md (sve_ld1r<mode>): Likewise.
+       * config/aarch64/aarch64.c (aarch64_gen_adjusted_ldpstp): Likewise.
+       * config/aarch64/falkor-tag-collision-avoidance.c
+       (single_dest_per_chain): Likewise.
+       * config/nvptx/nvptx.c (nvptx_record_fndecl): Likewise.
+       * config/fr30/fr30.c (fr30_arg_partial_bytes): Likewise.
+       * config/rs6000/rs6000-string.c (expand_cmp_vec_sequence): Likewise.
+       * config/rs6000/rs6000-p8swap.c (replace_swapped_load_constant):
+       Likewise.
+       * config/rs6000/rs6000-c.c (rs6000_target_modify_macros): Likewise.
+       * config/rs6000/rs6000.c (rs6000_option_override_internal): Likewise.
+       * config/rs6000/rs6000-logue.c
+       (rs6000_emit_probe_stack_range_stack_clash): Likewise.
+       * config/nds32/nds32-md-auxiliary.c (nds32_split_ashiftdi3): Likewise.
+       Fix various other issues in the comment.
+
+2020-03-17  Mihail Ionescu  <mihail.ionescu@arm.com>
+
+       * config/arm/t-rmprofile: create new multilib for
+       armv8.1-m.main+mve hard float and reuse v8-m.main ones for
+       v8.1-m.main+mve.
+
+2020-03-17  Jakub Jelinek  <jakub@redhat.com>
+
+       PR tree-optimization/94015
+       * tree-ssa-strlen.c (count_nonzero_bytes): Split portions of the
+       function where EXP is address of the bytes being stored rather than
+       the bytes themselves into count_nonzero_bytes_addr.  Punt on zero
+       sized MEM_REF.  Use VAR_P macro and handle CONST_DECL like VAR_DECLs.
+       Use ctor_for_folding instead of looking at DECL_INITIAL.  Punt before
+       calling native_encode_expr if host or target doesn't have 8-bit
+       chars.  Formatting fixes.
+       (count_nonzero_bytes_addr): New function.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (UNOP_SNONE_SNONE_QUALIFIERS): Define.
+       (UNOP_SNONE_NONE_QUALIFIERS): Likewise.
+       (UNOP_SNONE_IMM_QUALIFIERS): Likewise.
+       (UNOP_UNONE_NONE_QUALIFIERS): Likewise.
+       (UNOP_UNONE_UNONE_QUALIFIERS): Likewise.
+       (UNOP_UNONE_IMM_QUALIFIERS): Likewise.
+       * config/arm/arm_mve.h (vmvnq_n_s16): Define macro.
+       (vmvnq_n_s32): Likewise.
+       (vrev64q_s8): Likewise.
+       (vrev64q_s16): Likewise.
+       (vrev64q_s32): Likewise.
+       (vcvtq_s16_f16): Likewise.
+       (vcvtq_s32_f32): Likewise.
+       (vrev64q_u8): Likewise.
+       (vrev64q_u16): Likewise.
+       (vrev64q_u32): Likewise.
+       (vmvnq_n_u16): Likewise.
+       (vmvnq_n_u32): Likewise.
+       (vcvtq_u16_f16): Likewise.
+       (vcvtq_u32_f32): Likewise.
+       (__arm_vmvnq_n_s16): Define intrinsic.
+       (__arm_vmvnq_n_s32): Likewise.
+       (__arm_vrev64q_s8): Likewise.
+       (__arm_vrev64q_s16): Likewise.
+       (__arm_vrev64q_s32): Likewise.
+       (__arm_vrev64q_u8): Likewise.
+       (__arm_vrev64q_u16): Likewise.
+       (__arm_vrev64q_u32): Likewise.
+       (__arm_vmvnq_n_u16): Likewise.
+       (__arm_vmvnq_n_u32): Likewise.
+       (__arm_vcvtq_s16_f16): Likewise.
+       (__arm_vcvtq_s32_f32): Likewise.
+       (__arm_vcvtq_u16_f16): Likewise.
+       (__arm_vcvtq_u32_f32): Likewise.
+       (vrev64q): Define polymorphic variant.
+       * config/arm/arm_mve_builtins.def (UNOP_SNONE_SNONE): Use it.
+       (UNOP_SNONE_NONE): Likewise.
+       (UNOP_SNONE_IMM): Likewise.
+       (UNOP_UNONE_UNONE): Likewise.
+       (UNOP_UNONE_NONE): Likewise.
+       (UNOP_UNONE_IMM): Likewise.
+       * config/arm/mve.md (mve_vrev64q_<supf><mode>): Define RTL pattern.
+       (mve_vcvtq_from_f_<supf><mode>): Likewise.
+       (mve_vmvnq_n_<supf><mode>): Likewise.
+
+2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (UNOP_NONE_NONE_QUALIFIERS): Define macro.
+       (UNOP_NONE_SNONE_QUALIFIERS): Likewise.
+       (UNOP_NONE_UNONE_QUALIFIERS): Likewise.
+       * config/arm/arm_mve.h (vrndxq_f16): Define macro.
+       (vrndxq_f32): Likewise.
+       (vrndq_f16) Likewise.
+       (vrndq_f32): Likewise.
+       (vrndpq_f16): Likewise.
+       (vrndpq_f32): Likewise.
+       (vrndnq_f16): Likewise.
+       (vrndnq_f32): Likewise.
+       (vrndmq_f16): Likewise.
+       (vrndmq_f32): Likewise. 
+       (vrndaq_f16): Likewise.
+       (vrndaq_f32): Likewise.
+       (vrev64q_f16): Likewise.
+       (vrev64q_f32): Likewise.
+       (vnegq_f16): Likewise.
+       (vnegq_f32): Likewise.
+       (vdupq_n_f16): Likewise.
+       (vdupq_n_f32): Likewise.
+       (vabsq_f16): Likewise.
+       (vabsq_f32): Likewise.
+       (vrev32q_f16): Likewise.
+       (vcvttq_f32_f16): Likewise.
+       (vcvtbq_f32_f16): Likewise.
+       (vcvtq_f16_s16): Likewise.
+       (vcvtq_f32_s32): Likewise.
+       (vcvtq_f16_u16): Likewise.
+       (vcvtq_f32_u32): Likewise.
+       (__arm_vrndxq_f16): Define intrinsic.
+       (__arm_vrndxq_f32): Likewise.
+       (__arm_vrndq_f16): Likewise.
+       (__arm_vrndq_f32): Likewise.
+       (__arm_vrndpq_f16): Likewise.
+       (__arm_vrndpq_f32): Likewise.
+       (__arm_vrndnq_f16): Likewise.
+       (__arm_vrndnq_f32): Likewise.
+       (__arm_vrndmq_f16): Likewise.
+       (__arm_vrndmq_f32): Likewise.
+       (__arm_vrndaq_f16): Likewise.
+       (__arm_vrndaq_f32): Likewise.
+       (__arm_vrev64q_f16): Likewise.
+       (__arm_vrev64q_f32): Likewise.
+       (__arm_vnegq_f16): Likewise.
+       (__arm_vnegq_f32): Likewise.
+       (__arm_vdupq_n_f16): Likewise.
+       (__arm_vdupq_n_f32): Likewise.
+       (__arm_vabsq_f16): Likewise.
+       (__arm_vabsq_f32): Likewise.
+       (__arm_vrev32q_f16): Likewise.
+       (__arm_vcvttq_f32_f16): Likewise.
+       (__arm_vcvtbq_f32_f16): Likewise.
+       (__arm_vcvtq_f16_s16): Likewise.
+       (__arm_vcvtq_f32_s32): Likewise.
+       (__arm_vcvtq_f16_u16): Likewise.
+       (__arm_vcvtq_f32_u32): Likewise.
+       (vrndxq): Define polymorphic variants.
+       (vrndq): Likewise.
+       (vrndpq): Likewise.
+       (vrndnq): Likewise.
+       (vrndmq): Likewise.
+       (vrndaq): Likewise.
+       (vrev64q): Likewise.
+       (vnegq): Likewise.
+       (vabsq): Likewise.
+       (vrev32q): Likewise.
+       (vcvtbq_f32): Likewise.
+       (vcvttq_f32): Likewise.
+       (vcvtq): Likewise.
+       * config/arm/arm_mve_builtins.def (VAR2): Define.
+       (VAR1): Define.
+       * config/arm/mve.md (mve_vrndxq_f<mode>): Add RTL pattern.
+       (mve_vrndq_f<mode>): Likewise.
+       (mve_vrndpq_f<mode>): Likewise.
+       (mve_vrndnq_f<mode>): Likewise.
+       (mve_vrndmq_f<mode>): Likewise.
+       (mve_vrndaq_f<mode>): Likewise.
+       (mve_vrev64q_f<mode>): Likewise.
+       (mve_vnegq_f<mode>): Likewise.
+       (mve_vdupq_n_f<mode>): Likewise.
+       (mve_vabsq_f<mode>): Likewise.
+       (mve_vrev32q_fv8hf): Likewise.
+       (mve_vcvttq_f32_f16v4sf): Likewise.
+       (mve_vcvtbq_f32_f16v4sf): Likewise.
+       (mve_vcvtq_to_f_<supf><mode>): Likewise.
+
+2020-03-16  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm-builtins.c (CF): Define mve_builtin_data.
+       (VAR1): Define.
+       (ARM_BUILTIN_MVE_PATTERN_START): Define.
+       (arm_init_mve_builtins): Define function.
+       (arm_init_builtins): Add TARGET_HAVE_MVE check.
+       (arm_expand_builtin_1): Check the range of fcode.
+       (arm_expand_mve_builtin): Define function to expand MVE builtins.
+       (arm_expand_builtin): Check the range of fcode.
+       * config/arm/arm_mve.h (__ARM_FEATURE_MVE): Define MVE floating point
+       types.
+       (__ARM_MVE_PRESERVE_USER_NAMESPACE): Define to protect user namespace.
+       (vst4q_s8): Define macro.
+       (vst4q_s16): Likewise.
+       (vst4q_s32): Likewise.
+       (vst4q_u8): Likewise.
+       (vst4q_u16): Likewise.
+       (vst4q_u32): Likewise.
+       (vst4q_f16): Likewise.
+       (vst4q_f32): Likewise.
+       (__arm_vst4q_s8): Define inline builtin.
+       (__arm_vst4q_s16): Likewise.
+       (__arm_vst4q_s32): Likewise.
+       (__arm_vst4q_u8): Likewise.
+       (__arm_vst4q_u16): Likewise.
+       (__arm_vst4q_u32): Likewise.
+       (__arm_vst4q_f16): Likewise.
+       (__arm_vst4q_f32): Likewise.
+       (__ARM_mve_typeid): Define macro with MVE types.
+       (__ARM_mve_coerce): Define macro with _Generic feature.
+       (vst4q): Define polymorphic variant for different vst4q builtins.
+       * config/arm/arm_mve_builtins.def: New file.
+       * config/arm/iterators.md (VSTRUCT): Modify to allow XI and OI
+       modes in MVE.
+       * config/arm/mve.md (MVE_VLD_ST): Define iterator.
+       (unspec): Define unspec.
+       (mve_vst4q<mode>): Define RTL pattern.
+       * config/arm/neon.md (mov<mode>): Modify expand to allow XI and OI
+       modes in MVE.
+       (neon_mov<mode>): Modify RTL define_insn to allow XI and OI modes
+       in MVE.
+       (define_split): Allow OI mode split for MVE after reload.
+       (define_split): Allow XI mode split for MVE after reload.
+       * config/arm/t-arm (arm.o): Add entry for arm_mve_builtins.def.
+       (arm-builtins.o): Likewise.
+
+2020-03-17  Christophe Lyon  <christophe.lyon@linaro.org>
+
+       * c-typeck.c (process_init_element): Handle constructor_type with
+       type size represented by POLY_INT_CST.
+
+2020-03-17  Jakub Jelinek  <jakub@redhat.com>
+
+       PR tree-optimization/94187
+       * tree-ssa-strlen.c (count_nonzero_bytes): Punt if
+       nchars - offset < nbytes.
+
+       PR middle-end/94189
+       * builtins.c (expand_builtin_strnlen): Do return NULL_RTX if we would
+       emit a warning if it was enabled and don't depend on TREE_NO_WARNING
+       for code-generation.
+
+2020-03-16  Vladimir Makarov  <vmakarov@redhat.com>
+
+       PR target/94185
+       * lra-spills.c (remove_pseudos): Do not reuse insn alternative
+       after changing memory subreg.
+
+2020-03-16  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config/arm/arm.c (arm_libcall_uses_aapcs_base): Modify function to add
+       emulator calls for dobule precision arithmetic operations for MVE.
+
+2020-03-16  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * common/config/arm/arm-common.c (arm_asm_auto_mfpu): When vfp_base
+       feature bit is on and -mfpu=auto is passed as compiler option, do not
+       generate error on not finding any matching fpu. Because in this case
+       fpu is not required.
+       * config/arm/arm-cpus.in (vfp_base): Define feature bit, this bit is
+       enabled for MVE and also for all VFP extensions.
+       (VFPv2): Modify fgroup to enable vfp_base feature bit when ever VFPv2
+       is enabled.
+       (MVE): Define fgroup to enable feature bits mve, vfp_base and armv7em.
+       (MVE_FP): Define fgroup to enable feature bits is fgroup MVE and FPv5
+       along with feature bits mve_float.
+       (mve): Modify add options in armv8.1-m.main arch for MVE.
+       (mve.fp): Modify add options in armv8.1-m.main arch for MVE with
+       floating point.
+       * config/arm/arm.c (use_return_insn): Replace the
+       check with TARGET_VFP_BASE.
+       (thumb2_legitimate_index_p): Replace TARGET_HARD_FLOAT with
+       TARGET_VFP_BASE.
+       (arm_rtx_costs_internal): Replace "TARGET_HARD_FLOAT || TARGET_HAVE_MVE"
+       with TARGET_VFP_BASE, to allow cost calculations for copies in MVE as
+       well.
+       (arm_get_vfp_saved_size): Replace TARGET_HARD_FLOAT with
+       TARGET_VFP_BASE, to allow space calculation for VFP registers in MVE
+       as well.
+       (arm_compute_frame_layout): Likewise.
+       (arm_save_coproc_regs): Likewise.
+       (arm_fixed_condition_code_regs): Modify to enable using VFPCC_REGNUM
+       in MVE as well.
+       (arm_hard_regno_mode_ok): Replace "TARGET_HARD_FLOAT || TARGET_HAVE_MVE"
+       with equivalent macro TARGET_VFP_BASE.
+       (arm_expand_epilogue_apcs_frame): Likewise.
+       (arm_expand_epilogue): Likewise.
+       (arm_conditional_register_usage): Likewise.
+       (arm_declare_function_name): Add check to skip printing .fpu directive
+       in assembly file when TARGET_VFP_BASE is enabled and fpu_to_print is
+       "softvfp".
+       * config/arm/arm.h (TARGET_VFP_BASE): Define.
+       * config/arm/arm.md (arch): Add "mve" to arch.
+       (eq_attr "arch" "mve"): Enable on TARGET_HAVE_MVE is true.
+       (vfp_pop_multiple_with_writeback): Replace "TARGET_HARD_FLOAT
+       || TARGET_HAVE_MVE" with equivalent macro TARGET_VFP_BASE.
+       * config/arm/constraints.md (Uf): Define to allow modification to FPCCR
+       in MVE.
+       * config/arm/thumb2.md (thumb2_movsfcc_soft_insn): Modify target guard
+       to not allow for MVE.
+       * config/arm/unspecs.md (UNSPEC_GET_FPSCR): Move to volatile unspecs
+       enum.
+       (VUNSPEC_GET_FPSCR): Define.
+       * config/arm/vfp.md (thumb2_movhi_vfp): Add support for VMSR and VMRS
+       instructions which move to general-purpose Register from Floating-point
+       Special register and vice-versa.
+       (thumb2_movhi_fp16): Likewise.
+       (thumb2_movsi_vfp): Add support for VMSR and VMRS instructions along
+       with MCR and MRC instructions which set and get Floating-point Status
+       and Control Register (FPSCR).
+       (movdi_vfp): Modify pattern to enable Single-precision scalar float move
+       in MVE.
+       (thumb2_movdf_vfp): Modify pattern to enable Double-precision scalar
+       float move patterns in MVE.
+       (thumb2_movsfcc_vfp): Modify pattern to enable single float conditional
+       code move patterns of VFP also in MVE by adding TARGET_VFP_BASE check.
+       (thumb2_movdfcc_vfp): Modify pattern to enable double float conditional
+       code move patterns of VFP also in MVE by adding TARGET_VFP_BASE check.
+       (push_multi_vfp): Add support to use VFP VPUSH pattern for MVE by adding
+       TARGET_VFP_BASE check.
+       (set_fpscr): Add support to set FPSCR register for MVE. Modify pattern
+       using VFPCC_REGNUM as few MVE intrinsics use carry bit of FPSCR
+       register.
+       (get_fpscr): Add support to get FPSCR register for MVE. Modify pattern
+       using VFPCC_REGNUM as few MVE intrinsics use carry bit of FPSCR
+       register.
+
+
+2020-03-16  Andre Vieira  <andre.simoesdiasvieira@arm.com>
+            Mihail Ionescu  <mihail.ionescu@arm.com>
+            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>
+
+       * config.gcc (arm_mve.h): Include mve intrinsics header file.
+       * config/arm/aout.h (p0): Add new register name for MVE predicated
+       cases.
+       * config/arm-builtins.c (ARM_BUILTIN_SIMD_LANE_CHECK): Define macro
+       common to Neon and MVE.
+       (ARM_BUILTIN_NEON_LANE_CHECK): Renamed to ARM_BUILTIN_SIMD_LANE_CHECK.
+       (arm_init_simd_builtin_types): Disable poly types for MVE.
+       (arm_init_neon_builtins): Move a check to arm_init_builtins function.
+       (arm_init_builtins): Use ARM_BUILTIN_SIMD_LANE_CHECK instead of
+       ARM_BUILTIN_NEON_LANE_CHECK.
+       (mve_dereference_pointer): Add function.
+       (arm_expand_builtin_args): Call to mve_dereference_pointer when MVE is
+       enabled.
+       (arm_expand_neon_builtin): Moved to arm_expand_builtin function.
+       (arm_expand_builtin): Moved from arm_expand_neon_builtin function.
+       * config/arm/arm-c.c (__ARM_FEATURE_MVE): Define macro for MVE and MVE
+       with floating point enabled.
+       * config/arm/arm-protos.h (neon_immediate_valid_for_move): Renamed to
+       simd_immediate_valid_for_move.
+       (simd_immediate_valid_for_move): Renamed from
+       neon_immediate_valid_for_move function.
+       * config/arm/arm.c (arm_options_perform_arch_sanity_checks): Generate
+       error if vfpv2 feature bit is disabled and mve feature bit is also
+       disabled for HARD_FLOAT_ABI.
+       (use_return_insn): Check to not push VFP regs for MVE.
+       (aapcs_vfp_allocate): Add MVE check to have same Procedure Call Standard
+       as Neon.
+       (aapcs_vfp_allocate_return_reg): Likewise.
+       (thumb2_legitimate_address_p): Check to return 0 on valid Thumb-2
+       address operand for MVE.
+       (arm_rtx_costs_internal): MVE check to determine cost of rtx.
+       (neon_valid_immediate): Rename to simd_valid_immediate.
+       (simd_valid_immediate): Rename from neon_valid_immediate.
+       (simd_valid_immediate): MVE check on size of vector is 128 bits.
+       (neon_immediate_valid_for_move): Rename to
+       simd_immediate_valid_for_move.
+       (simd_immediate_valid_for_move): Rename from
+       neon_immediate_valid_for_move.
+       (neon_immediate_valid_for_logic): Modify call to neon_valid_immediate
+       function.
+       (neon_make_constant): Modify call to neon_valid_immediate function.
+       (neon_vector_mem_operand): Return VFP register for POST_INC or PRE_DEC
+       for MVE.
+       (output_move_neon): Add MVE check to generate vldm/vstm instrcutions.
+       (arm_compute_frame_layout): Calculate space for saved VFP registers for
+       MVE.
+       (arm_save_coproc_regs): Save coproc registers for MVE.
+       (arm_print_operand): Add case 'E' to print memory operands for MVE.
+       (arm_print_operand_address): Check to print register number for MVE.
+       (arm_hard_regno_mode_ok): Check for arm hard regno mode ok for MVE.
+       (arm_modes_tieable_p): Check to allow structure mode for MVE.
+       (arm_regno_class): Add VPR_REGNUM check.
+       (arm_expand_epilogue_apcs_frame): MVE check to calculate epilogue code
+       for APCS frame.
+       (arm_expand_epilogue): MVE check for enabling pop instructions in
+       epilogue.
+       (arm_print_asm_arch_directives): Modify function to disable print of
+       .arch_extension "mve" and "fp" for cases where MVE is enabled with
+       "SOFT FLOAT ABI".
+       (arm_vector_mode_supported_p): Check for modes available in MVE interger
+       and MVE floating point.
+       (arm_array_mode_supported_p): Add TARGET_HAVE_MVE check for array mode
+       pointer support.
+       (arm_conditional_register_usage): Enable usage of conditional regsiter
+       for MVE.
+       (fixed_regs[VPR_REGNUM]): Enable VPR_REG for MVE.
+       (arm_declare_function_name): Modify function to disable print of
+       .arch_extension "mve" and "fp" for cases where MVE is enabled with
+       "SOFT FLOAT ABI".
+       * config/arm/arm.h (TARGET_HAVE_MVE): Disable for soft float abi and
+       when target general registers are required.
+       (TARGET_HAVE_MVE_FLOAT): Likewise.
+       (FIXED_REGISTERS): Add bit for VFP_REG class which is enabled in arm.c
+       for MVE.
+       (CALL_USED_REGISTERS): Set bit for VFP_REG class in CALL_USED_REGISTERS
+       which indicate this is not available for across function calls.
+       (FIRST_PSEUDO_REGISTER): Modify.
+       (VALID_MVE_MODE): Define valid MVE mode.
+       (VALID_MVE_SI_MODE): Define valid MVE SI mode.
+       (VALID_MVE_SF_MODE): Define valid MVE SF mode.
+       (VALID_MVE_STRUCT_MODE): Define valid MVE struct mode.
+       (VPR_REGNUM): Add Vector Predication Register in arm_regs_in_sequence
+       for MVE.
+       (IS_VPR_REGNUM): Macro to check for VPR_REG register.
+       (REG_ALLOC_ORDER): Add VPR_REGNUM entry.
+       (enum reg_class): Add VPR_REG entry.
+       (REG_CLASS_NAMES): Add VPR_REG entry.
+       * config/arm/arm.md (VPR_REGNUM): Define.
+       (conds): Check is_mve_type attrbiute to differentiate "conditional" and
+       "unconditional" instructions.
+       (arm_movsf_soft_insn): Modify RTL to not allow for MVE.
+       (movdf_soft_insn): Modify RTL to not allow for MVE.
+       (vfp_pop_multiple_with_writeback): Enable for MVE.
+       (include "mve.md"): Include mve.md file.
+       * config/arm/arm_mve.h: Add MVE intrinsics head file.
+       * config/arm/constraints.md (Up): Constraint to enable "p0" register in MVE
+       for vector predicated operands.
+       * config/arm/iterators.md (VNIM1): Define.
+       (VNINOTM1): Define.
+       (VHFBF_split): Define
+       * config/arm/mve.md: New file.
+       (mve_mov<mode>): Define RTL for move, store and load in MVE.
+       (mve_mov<mode>): Define move RTL pattern with vec_duplicate operator for
+       second operand.
+       * config/arm/neon.md (neon_immediate_valid_for_move): Rename with
+       simd_immediate_valid_for_move.
+       (neon_mov<mode>): Split pattern and move expand pattern "movv8hf" which
+       is common to MVE and  NEON to vec-common.md file.
+       (vec_init<mode><V_elem_l>): Add TARGET_HAVE_MVE check.
+       * config/arm/predicates.md (vpr_register_operand): Define.
+       * config/arm/t-arm: Add mve.md file.
+       * config/arm/types.md (mve_move): Add MVE instructions mve_move to
+       attribute "type".
+       (mve_store): Add MVE instructions mve_store to attribute "type".
+       (mve_load): Add MVE instructions mve_load to attribute "type".
+       (is_mve_type): Define attribute.
+       * config/arm/vec-common.md (mov<mode>): Modify RTL expand to support
+       standard move patterns in MVE along with NEON and IWMMXT with mode
+       iterator VNIM1.
+       (mov<mode>): Modify RTL expand to support standard move patterns in NEON
+       and IWMMXT with mode iterator V8HF.
+       (movv8hf): Define RTL expand to support standard "movv8hf" pattern in
+       NEON and MVE.
+       * config/arm/vfp.md (neon_immediate_valid_for_move): Rename to
+       simd_immediate_valid_for_move.
+
+
+2020-03-16  H.J. Lu  <hongjiu.lu@intel.com>
+
+       PR target/89229
+       * config/i386/i386.md (*movsi_internal): Call ix86_output_ssemov
+       for TYPE_SSEMOV.  Remove ext_sse_reg_operand and TARGET_AVX512VL
+       check.
+       * config/i386/predicates.md (ext_sse_reg_operand): Removed.
+
+2020-03-16  Jakub Jelinek  <jakub@redhat.com>
+
+       PR debug/94167
+       * tree-inline.c (insert_init_stmt): Don't gimple_regimplify_operands
+       DEBUG_STMTs.
+
+       PR tree-optimization/94166
+       * tree-ssa-reassoc.c (sort_by_mach_mode): Use SSA_NAME_VERSION
+       as secondary comparison key.
+
+2020-03-16  Bin Cheng  <bin.cheng@linux.alibaba.com>
+
+       PR tree-optimization/94125
+       * tree-loop-distribution.c
+       (loop_distribution::break_alias_scc_partitions): Update post order
+       number for merged scc.
+
+2020-03-15  H.J. Lu  <hongjiu.lu@intel.com>
+
+       PR target/89229
+       * config/i386/i386.c (ix86_output_ssemov): Handle MODE_SI and
+       MODE_SF.
+       * config/i386/i386.md (*movsf_internal): Call ix86_output_ssemov
+       for TYPE_SSEMOV.  Remove TARGET_PREFER_AVX256, TARGET_AVX512VL
+       and ext_sse_reg_operand check.
+
+2020-03-15  Lewis Hyatt  <lhyatt@gmail.com>
+
+       * common.opt: Avoid redundancy in the help text.
+       * config/arc/arc.opt: Likewise.
+       * config/cr16/cr16.opt: Likewise.
+
+2020-03-14  Jakub Jelinek  <jakub@redhat.com>
+
+       PR middle-end/93566
+       * tree-nested.c (convert_nonlocal_omp_clauses,
+       convert_local_omp_clauses): Handle {,in_,task_}reduction clauses
+       with C/C++ array sections.
+
+2020-03-14  H.J. Lu  <hongjiu.lu@intel.com>
+
+       PR target/89229
+       * config/i386/i386.md (*movdi_internal): Call ix86_output_ssemov
+       for TYPE_SSEMOV.  Remove ext_sse_reg_operand and TARGET_AVX512VL
+       check.
+
+2020-03-14  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimple-fold.c (gimple_fold_builtin_strncpy): Change
+       "a an" to "an" in a comment.
+       * hsa-common.h (is_a_helper): Likewise.
+       * tree-ssa-strlen.c (maybe_diag_stxncpy_trunc): Likewise.
+       * config/arc/arc.c (arc600_corereg_hazard): Likewise.
+       * config/s390/s390.c (s390_indirect_branch_via_thunk): Likewise.
+
+2020-03-13  Aaron Sawdey  <acsawdey@linux.ibm.com>
+
+       PR target/92379
+       * config/rs6000/rs6000.c (num_insns_constant_multi): Don't shift a
+       64-bit value by 64 bits (UB).
+
+2020-03-13  Vladimir Makarov  <vmakarov@redhat.com>
+
+       PR rtl-optimization/92303
+       * lra-spills.c (remove_pseudos): Try to simplify memory subreg.
+
+2020-03-13  Segher Boessenkool  <segher@kernel.crashing.org>
+
+       PR rtl-optimization/94148
+       PR rtl-optimization/94042
+       * df-core.c (BB_LAST_CHANGE_AGE): Delete.
+       (df_worklist_propagate_forward): New parameter last_change_age, use
+       that instead of bb->aux.
+       (df_worklist_propagate_backward): Ditto.
+       (df_worklist_dataflow_doublequeue): Use a local array last_change_age.
+
+2020-03-13  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/94163
+       * tree-ssa-pre.c (create_expression_by_pieces): Check
+       whether alignment would be zero.
+
+2020-03-13  Martin Liska  <mliska@suse.cz>
+
+       PR lto/94157
+       * lto-wrapper.c (run_gcc): Use concat for appending
+       to collect_gcc_options.
+
+2020-03-13  Jakub Jelinek  <jakub@redhat.com>
+
+       PR target/94121
+       * config/aarch64/aarch64.c (aarch64_add_offset_1): Use gen_int_mode
+       instead of GEN_INT.
+
+2020-03-13  H.J. Lu  <hongjiu.lu@intel.com>
+
+       PR target/89229
+       * config/i386/i386.c (ix86_output_ssemov): Handle MODE_DF.
+       * config/i386/i386.md (*movdf_internal): Call ix86_output_ssemov
+       for TYPE_SSEMOV.  Remove TARGET_AVX512F, TARGET_PREFER_AVX256,
+       TARGET_AVX512VL and ext_sse_reg_operand check.
+
+2020-03-13  Bu Le  <bule1@huawei.com>
+
+       PR target/94154
+       * config/aarch64/aarch64.opt (-param=aarch64-float-recp-precision=)
+       (-param=aarch64-double-recp-precision=): New options.
+       * doc/invoke.texi: Document them.
+       * config/aarch64/aarch64.c (aarch64_emit_approx_div): Use them
+       instead of hard-coding the choice of 1 for float and 2 for double.
+
+2020-03-13  Eric Botcazou  <ebotcazou@adacore.com>
+
+       PR rtl-optimization/94119
+       * resource.h (clear_hashed_info_until_next_barrier): Declare.
+       * resource.c (clear_hashed_info_until_next_barrier): New function.
+       * reorg.c (add_to_delay_list): Fix formatting.
+       (relax_delay_slots): Call clear_hashed_info_until_next_barrier on
+       the next instruction after removing a BARRIER.
+
+2020-03-13  Eric Botcazou  <ebotcazou@adacore.com>
+
+       PR middle-end/92071
+       * expmed.c (store_integral_bit_field): For fields larger than a word,
+       call extract_bit_field on the value if the mode is BLKmode.  Remove
+       specific path for big-endian targets and tidy things up a little bit.
+
+2020-03-12  Richard Sandiford  <richard.sandiford@arm.com>
+
+       PR rtl-optimization/90275
+       * cse.c (cse_insn): Delete no-op register moves too.
+
+2020-03-12  Darius Galis  <darius.galis@cyberthorstudios.com>
+
+       * config/rx/rx.md (CTRLREG_CPEN): Remove.
+       * config/rx/rx.c (rx_print_operand): Remove CTRLREG_CPEN support.
+
+2020-03-12  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/94103
+       * tree-ssa-sccvn.c (visit_reference_op_load): Avoid type
+       punning when the mode precision is not sufficient.
+
+2020-03-12  H.J. Lu  <hongjiu.lu@intel.com>
+
+       PR target/89229
+       * config/i386/i386.c (ix86_output_ssemov): Handle MODE_DI,
+       MODE_V1DF and MODE_V2SF.
+       * config/i386/mmx.md (MMXMODE:*mov<mode>_internal): Call
+       ix86_output_ssemov for TYPE_SSEMOV.  Remove ext_sse_reg_operand
+       check.
+
+2020-03-12  Jakub Jelinek  <jakub@redhat.com>
+
+       * doc/tm.texi.in (ASM_OUTPUT_ALIGNED_DECL_LOCAL): Change
+       ASM_OUTPUT_ALIGNED_DECL in description to ASM_OUTPUT_ALIGNED_LOCAL
+       and ASM_OUTPUT_DECL to ASM_OUTPUT_LOCAL.
+       * doc/tm.texi: Regenerated.
+
+       PR tree-optimization/94130
+       * tree-ssa-dse.c: Include gimplify.h.
+       (increment_start_addr): If stmt has lhs, drop the lhs from call and
+       set it after the call to the original value of the first argument.
+       Formatting fixes.
+       (decrement_count): Formatting fix.
+
+2020-03-11  Delia Burduv  <delia.burduv@arm.com>
+
+       * config/arm/arm-builtins.c
+       (arm_init_simd_builtin_scalar_types): New.
+       * config/arm/arm_neon.h (vld2_bf16): Used new builtin type.
+       (vld2q_bf16): Used new builtin type.
+       (vld3_bf16): Used new builtin type.
+       (vld3q_bf16): Used new builtin type.
+       (vld4_bf16): Used new builtin type.
+       (vld4q_bf16): Used new builtin type.
+       (vld2_dup_bf16): Used new builtin type.
+       (vld2q_dup_bf16): Used new builtin type.
+       (vld3_dup_bf16): Used new builtin type.
+       (vld3q_dup_bf16): Used new builtin type.
+       (vld4_dup_bf16): Used new builtin type.
+       (vld4q_dup_bf16): Used new builtin type.
+
+2020-03-11  Jakub Jelinek  <jakub@redhat.com>
+
+       PR target/94134
+       * config/pdp11/pdp11.c (pdp11_asm_output_var): Call switch_to_section
+       at the start to switch to data section.  Don't print extra newline if
+       .globl directive has not been emitted.
+
+2020-03-11  Richard Biener  <rguenther@suse.de>
+
+       * match.pd ((T *)(ptr - ptr-cst) -> &MEM[ptr + -ptr-cst]):
+       New pattern.
+
+2020-03-11  Eric Botcazou  <ebotcazou@adacore.com>
+
+       PR middle-end/93961
+       * tree.c (variably_modified_type_p) <RECORD_TYPE>: Recurse into fields
+       whose type is a qualified union.
+
+2020-03-11  Jakub Jelinek  <jakub@redhat.com>
+
+       PR target/94121
+       * config/aarch64/aarch64.c (aarch64_add_offset_1): Use absu_hwi
+       instead of abs_hwi, change moffset type to unsigned HOST_WIDE_INT.
+
+       PR bootstrap/93962
+       * value-prof.c (dump_histogram_value): Use abs_hwi instead of
+       std::abs.
+       (get_nth_most_common_value): Use abs_hwi instead of abs.
+
+       PR middle-end/94111
+       * dfp.c (decimal_to_binary): Only use decimal128ToString if from->cl
+       is rvc_normal, otherwise use real_to_decimal to print the number to
+       string.
+
+       PR tree-optimization/94114
+       * tree-loop-distribution.c (generate_memset_builtin): Call
+       rewrite_to_non_trapping_overflow even on mem.
+       (generate_memcpy_builtin): Call rewrite_to_non_trapping_overflow even
+       on dest and src.
+
+2020-03-10  Jeff Law  <law@redhat.com>
+
+       * config/bfin/bfin.md (movsi_insv): Add length attribute.
+
+2020-03-10  Jiufu Guo  <guojiufu@linux.ibm.com>
+
+       PR target/93709
+       * gcc/config/rs6000/rs6000.c (rs6000_emit_p9_fp_minmax): Check
+       NAN and SIGNED_ZEROR for smax/smin.
+
+2020-03-10  Will Schmidt  <will_schmidt@vnet.ibm.com>
+
+       PR target/90763
+       * config/rs6000/rs6000-c.c (altivec_resolve_overloaded_builtin): Add
+       clause to handle P9V_BUILTIN_VEC_LXVL with const arguments.
+
+2020-03-10  Roman Zhuykov  <zhroma@ispras.ru>
+
+       * loop-iv.c (find_simple_exit): Make it static.
+       * cfgloop.h: Remove the corresponding prototype.
+
+2020-03-10  Roman Zhuykov  <zhroma@ispras.ru>
+
+       * ddg.c (create_ddg): Fix intendation.
+       (set_recurrence_length): Likewise.
+       (create_ddg_all_sccs): Likewise.
+
+2020-03-10  Jakub Jelinek  <jakub@redhat.com>
+
+       PR target/94088
+       * config/i386/i386.md (*testqi_ext_3): Call ix86_match_ccmode with
+       CCZmode instead of CCNOmode if operands[2] has DImode and pos + len
+       is 32.
+
+2020-03-09  Jason Merrill  <jason@redhat.com>
+
+       * gdbinit.in (pgs): Fix typo in documentation.
+
+2020-03-09  Vladimir Makarov  <vmakarov@redhat.com>
+
+       Revert:
+
+       2020-02-28  Vladimir Makarov  <vmakarov@redhat.com>
+
+       PR rtl-optimization/93564
+       * ira-color.c (assign_hard_reg): Prefer smaller hard regno when we
+       do not honor reg alloc order.
+
+2020-03-09  Andrew Pinski  <apinski@marvell.com>
+
+       PR inline-asm/94095
+       * doc/extend.texi (x86 Operand Modifiers): Fix column
+       for 'A' modifier.
+
+2020-03-09  Martin Liska  <mliska@suse.cz>
+
+       PR target/93800
+       * config/rs6000/rs6000.c (rs6000_option_override_internal):
+       Remove set of str_align_loops and str_align_jumps as these
+       should be set in previous 2 conditions in the function.
+
+2020-03-09  Jakub Jelinek  <jakub@redhat.com>
+
+       PR rtl-optimization/94045
+       * params.opt (-param=max-find-base-term-values=): New option.
+       * alias.c (find_base_term): Add cut-off for number of visited VALUEs
+       in a single toplevel find_base_term call.
+
 2020-03-06  Wilco Dijkstra  <wdijkstr@arm.com>
 
        PR target/91598
This page took 0.078709 seconds and 5 git commands to generate.