#ifdef __cplusplus
+/* vec_abs */
+
+inline vector signed char
+vec_abs (vector signed char a1)
+{
+ return __builtin_altivec_abs_v16qi (a1);
+}
+
+inline vector signed short
+vec_abs (vector signed short a1)
+{
+ return __builtin_altivec_abs_v8hi (a1);
+}
+
+inline vector signed int
+vec_abs (vector signed int a1)
+{
+ return __builtin_altivec_abs_v4si (a1);
+}
+
+inline vector float
+vec_abs (vector float a1)
+{
+ return __builtin_altivec_abs_v4sf (a1);
+}
+
+/* vec_abss */
+
+inline vector signed char
+vec_abss (vector signed char a1)
+{
+ return __builtin_altivec_abss_v16qi (a1);
+}
+
+inline vector signed short
+vec_abss (vector signed short a1)
+{
+ return __builtin_altivec_abss_v8hi (a1);
+}
+
+inline vector signed int
+vec_abss (vector signed int a1)
+{
+ return __builtin_altivec_abss_v4si (a1);
+}
+
/* vec_add */
inline vector signed char
/* vec_ctf */
inline vector float
-vec_ctf (vector unsigned int a1, int a2)
+vec_ctf (vector unsigned int a1, const char a2)
{
- return (vector float) __builtin_altivec_vcfux ((vector signed int) a1, (const char) a2);
+ return (vector float) __builtin_altivec_vcfux ((vector signed int) a1, a2);
}
inline vector float
-vec_ctf (vector signed int a1, int a2)
+vec_ctf (vector signed int a1, const char a2)
{
- return (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, (const char) a2);
+ return (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, a2);
}
/* vec_cts */
inline vector signed int
-vec_cts (vector float a1, int a2)
+vec_cts (vector float a1, const char a2)
{
- return (vector signed int) __builtin_altivec_vctsxs ((vector float) a1, (const char) a2);
+ return (vector signed int) __builtin_altivec_vctsxs ((vector float) a1, a2);
}
/* vec_ctu */
inline vector unsigned int
-vec_ctu (vector float a1, int a2)
+vec_ctu (vector float a1, const char a2)
{
- return (vector unsigned int) __builtin_altivec_vctuxs ((vector float) a1, (const char) a2);
+ return (vector unsigned int) __builtin_altivec_vctuxs ((vector float) a1, a2);
}
/* vec_dss */
inline void
-vec_dss (int a1)
+vec_dss (const char a1)
{
- __builtin_altivec_dss ((const char) a1);
+ __builtin_altivec_dss (a1);
}
/* vec_dssall */
/* vec_dst */
inline void
-vec_dst (void *a1, int a2, int a3)
+vec_dst (void *a1, int a2, const char a3)
{
- __builtin_altivec_dst ((void *) a1, a2, (const char) a3);
+ __builtin_altivec_dst ((void *) a1, a2, a3);
}
/* vec_dstst */
inline void
-vec_dstst (void *a1, int a2, int a3)
+vec_dstst (void *a1, int a2, const char a3)
{
- __builtin_altivec_dstst ((void *) a1, a2, (const char) a3);
+ __builtin_altivec_dstst ((void *) a1, a2, a3);
}
/* vec_dststt */
inline void
-vec_dststt (void *a1, int a2, int a3)
+vec_dststt (void *a1, int a2, const char a3)
{
- __builtin_altivec_dststt ((void *) a1, a2, (const char) a3);
+ __builtin_altivec_dststt ((void *) a1, a2, a3);
}
/* vec_dstt */
inline void
-vec_dstt (void *a1, int a2, int a3)
+vec_dstt (void *a1, int a2, const char a3)
{
- __builtin_altivec_dstt ((void *) a1, a2, (const char) a3);
+ __builtin_altivec_dstt ((void *) a1, a2, a3);
}
/* vec_expte */
/* vec_sld */
inline vector float
-vec_sld (vector float a1, vector float a2, int a3)
+vec_sld (vector float a1, vector float a2, const char a3)
{
- return (vector float) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3);
+ return (vector float) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3);
}
inline vector signed int
-vec_sld (vector signed int a1, vector signed int a2, int a3)
+vec_sld (vector signed int a1, vector signed int a2, const char a3)
{
- return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3);
+ return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3);
}
inline vector unsigned int
-vec_sld (vector unsigned int a1, vector unsigned int a2, int a3)
+vec_sld (vector unsigned int a1, vector unsigned int a2, const char a3)
{
- return (vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3);
+ return (vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3);
}
inline vector signed short
-vec_sld (vector signed short a1, vector signed short a2, int a3)
+vec_sld (vector signed short a1, vector signed short a2, const char a3)
{
- return (vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3);
+ return (vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3);
}
inline vector unsigned short
-vec_sld (vector unsigned short a1, vector unsigned short a2, int a3)
+vec_sld (vector unsigned short a1, vector unsigned short a2, const char a3)
{
- return (vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3);
+ return (vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3);
}
inline vector signed char
-vec_sld (vector signed char a1, vector signed char a2, int a3)
+vec_sld (vector signed char a1, vector signed char a2, const char a3)
{
- return (vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3);
+ return (vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3);
}
inline vector unsigned char
-vec_sld (vector unsigned char a1, vector unsigned char a2, int a3)
+vec_sld (vector unsigned char a1, vector unsigned char a2, const char a3)
{
- return (vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3);
+ return (vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3);
}
/* vec_sll */
/* vec_splat */
inline vector signed char
-vec_splat (vector signed char a1, int a2)
+vec_splat (vector signed char a1, const char a2)
{
- return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2);
+ return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, a2);
}
inline vector unsigned char
-vec_splat (vector unsigned char a1, int a2)
+vec_splat (vector unsigned char a1, const char a2)
{
- return (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2);
+ return (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, a2);
}
inline vector signed short
-vec_splat (vector signed short a1, int a2)
+vec_splat (vector signed short a1, const char a2)
{
- return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2);
+ return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, a2);
}
inline vector unsigned short
-vec_splat (vector unsigned short a1, int a2)
+vec_splat (vector unsigned short a1, const char a2)
{
- return (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2);
+ return (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, a2);
}
inline vector float
-vec_splat (vector float a1, int a2)
+vec_splat (vector float a1, const char a2)
{
- return (vector float) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2);
+ return (vector float) __builtin_altivec_vspltw ((vector signed int) a1, a2);
}
inline vector signed int
-vec_splat (vector signed int a1, int a2)
+vec_splat (vector signed int a1, const char a2)
{
- return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2);
+ return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, a2);
}
inline vector unsigned int
-vec_splat (vector unsigned int a1, int a2)
+vec_splat (vector unsigned int a1, const char a2)
{
- return (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2);
+ return (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, a2);
}
/* vec_splat_s8 */
inline vector signed char
-vec_splat_s8 (int a1)
+vec_splat_s8 (const char a1)
{
- return (vector signed char) __builtin_altivec_vspltisb ((const char) a1);
+ return (vector signed char) __builtin_altivec_vspltisb (a1);
}
/* vec_splat_s16 */
inline vector signed short
-vec_splat_s16 (int a1)
+vec_splat_s16 (const char a1)
{
- return (vector signed short) __builtin_altivec_vspltish ((const char) a1);
+ return (vector signed short) __builtin_altivec_vspltish (a1);
}
/* vec_splat_s32 */
inline vector signed int
-vec_splat_s32 (int a1)
+vec_splat_s32 (const char a1)
{
- return (vector signed int) __builtin_altivec_vspltisw ((const char) a1);
+ return (vector signed int) __builtin_altivec_vspltisw (a1);
}
/* vec_splat_u8 */
inline vector unsigned char
-vec_splat_u8 (int a1)
+vec_splat_u8 (const char a1)
{
- return (vector unsigned char) __builtin_altivec_vspltisb ((const char) a1);
+ return (vector unsigned char) __builtin_altivec_vspltisb (a1);
}
/* vec_splat_u16 */
inline vector unsigned short
-vec_splat_u16 (int a1)
+vec_splat_u16 (const char a1)
{
- return (vector unsigned short) __builtin_altivec_vspltish ((const char) a1);
+ return (vector unsigned short) __builtin_altivec_vspltish (a1);
}
/* vec_splat_u32 */
inline vector unsigned int
-vec_splat_u32 (int a1)
+vec_splat_u32 (const char a1)
{
- return (vector unsigned int) __builtin_altivec_vspltisw ((const char) a1);
+ return (vector unsigned int) __builtin_altivec_vspltisw (a1);
}
/* vec_sr */
/* "... and so I think no man in a century will suffer as greatly as
you will." */
+#define vec_abs(a) \
+ __ch (__un_args_eq (vector signed char, a), \
+ (vector signed char) __builtin_altivec_abs_v16qi ((vector signed char) a), \
+ __ch (__un_args_eq (vector signed short, a), \
+ (vector signed short) __builtin_altivec_abs_v8hi ((vector signed short) a), \
+ __ch (__un_args_eq (vector signed int, a), \
+ (vector signed int) __builtin_altivec_abs_v4si ((vector signed int) a), \
+ __ch (__un_args_eq (vector float, a), \
+ (vector float) __builtin_altivec_abs_v4sf ((vector float) a), \
+ __altivec_link_error_invalid_argument ()))))
+
+#define vec_abss(a) \
+ __ch (__un_args_eq (vector signed char, a), \
+ (vector signed char) __builtin_altivec_abss_v16qi ((vector signed char) a), \
+ __ch (__un_args_eq (vector signed short, a), \
+ (vector signed short) __builtin_altivec_abss_v8hi ((vector signed short) a), \
+ __ch (__un_args_eq (vector signed int, a), \
+ (vector signed int) __builtin_altivec_abss_v4si ((vector signed int) a), \
+ __altivec_link_error_invalid_argument ())))
+
#define vec_step(t) \
__ch (__builtin_types_compatible_p (t, vector signed int), 4, \
__ch (__builtin_types_compatible_p (t, vector unsigned int), 4, \
static rtx altivec_expand_builtin PARAMS ((tree, rtx));
static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
+static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
{ MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
};
+/* ABS* opreations. */
+
+static const struct builtin_description bdesc_abs[] =
+{
+ { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
+ { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
+ { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
+ { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
+ { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
+ { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
+ { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
+};
+
/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
foo (VECa). */
return target;
}
+static rtx
+altivec_expand_abs_builtin (icode, arglist, target)
+ enum insn_code icode;
+ tree arglist;
+ rtx target;
+{
+ rtx pat, scratch1, scratch2;
+ tree arg0 = TREE_VALUE (arglist);
+ rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
+ enum machine_mode tmode = insn_data[icode].operand[0].mode;
+ enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+
+ /* If we have invalid arguments, bail out before generating bad rtl. */
+ if (arg0 == error_mark_node)
+ return NULL_RTX;
+
+ if (target == 0
+ || GET_MODE (target) != tmode
+ || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
+ target = gen_reg_rtx (tmode);
+
+ if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
+ op0 = copy_to_mode_reg (mode0, op0);
+
+ scratch1 = gen_reg_rtx (mode0);
+ scratch2 = gen_reg_rtx (mode0);
+
+ pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
+ if (! pat)
+ return 0;
+ emit_insn (pat);
+
+ return target;
+}
+
static rtx
altivec_expand_binop_builtin (icode, arglist, target)
enum insn_code icode;
return NULL_RTX;
}
+ /* Expand abs* operations. */
+ d = (struct builtin_description *) bdesc_abs;
+ for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
+ if (d->code == fcode)
+ return altivec_expand_abs_builtin (d->icode, arglist, target);
+
/* Handle simple unary operations. */
d = (struct builtin_description *) bdesc_1arg;
for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
tree_cons (NULL_TREE, V4SF_type_node,
endlink)));
+ tree v4si_ftype_v4si
+ = build_function_type (V4SI_type_node,
+ tree_cons (NULL_TREE, V4SI_type_node, endlink));
+
+ tree v8hi_ftype_v8hi
+ = build_function_type (V8HI_type_node,
+ tree_cons (NULL_TREE, V8HI_type_node, endlink));
+
+ tree v16qi_ftype_v16qi
+ = build_function_type (V16QI_type_node,
+ tree_cons (NULL_TREE, V16QI_type_node, endlink));
+
tree v8hi_ftype_v16qi_v16qi
= build_function_type (V8HI_type_node,
tree_cons (NULL_TREE, V16QI_type_node,
def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
-
+
/* Add the simple ternary operators. */
d = (struct builtin_description *) bdesc_3arg;
for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
def_builtin (d->mask, d->name, type, d->code);
}
+ /* Initialize the abs* operators. */
+ d = (struct builtin_description *) bdesc_abs;
+ for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
+ {
+ enum machine_mode mode0;
+ tree type;
+
+ mode0 = insn_data[d->icode].operand[0].mode;
+
+ switch (mode0)
+ {
+ case V4SImode:
+ type = v4si_ftype_v4si;
+ break;
+ case V8HImode:
+ type = v8hi_ftype_v8hi;
+ break;
+ case V16QImode:
+ type = v16qi_ftype_v16qi;
+ break;
+ case V4SFmode:
+ type = v4sf_ftype_v4sf;
+ break;
+ default:
+ abort ();
+ }
+
+ def_builtin (d->mask, d->name, type, d->code);
+ }
+
/* Add the simple unary operators. */
d = (struct builtin_description *) bdesc_1arg;
for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
"TARGET_ALTIVEC"
"vspltb %0,%1,%2"
[(set_attr "type" "vecperm")])
+
(define_insn "altivec_vsplth"
[(set (match_operand:V8HI 0 "register_operand" "=v")
(unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
"TARGET_ALTIVEC"
"stvewx %2,%0,%1"
[(set_attr "type" "vecstore")])
+
+(define_insn "absv16qi2"
+ [(set (match_operand:V16QI 0 "register_operand" "=v")
+ (abs:V16QI (match_operand:V16QI 1 "register_operand" "v")))
+ (clobber (match_scratch:V16QI 2 "=v"))
+ (clobber (match_scratch:V16QI 3 "=v"))]
+ "TARGET_ALTIVEC"
+ "vspltisb %2,0\;vsububm %3,%2,%1\;vmaxsb %0,%1,%3"
+ [(set_attr "type" "altivec")
+ (set_attr "length" "12")])
+
+(define_insn "absv8hi2"
+ [(set (match_operand:V8HI 0 "register_operand" "=v")
+ (abs:V8HI (match_operand:V8HI 1 "register_operand" "v")))
+ (clobber (match_scratch:V8HI 2 "=v"))
+ (clobber (match_scratch:V8HI 3 "=v"))]
+ "TARGET_ALTIVEC"
+ "vspltisb %2,0\;vsubuhm %3,%2,%1\;vmaxsh %0,%1,%3"
+ [(set_attr "type" "altivec")
+ (set_attr "length" "12")])
+
+(define_insn "absv4si2"
+ [(set (match_operand:V4SI 0 "register_operand" "=v")
+ (abs:V4SI (match_operand:V4SI 1 "register_operand" "v")))
+ (clobber (match_scratch:V4SI 2 "=v"))
+ (clobber (match_scratch:V4SI 3 "=v"))]
+ "TARGET_ALTIVEC"
+ "vspltisb %2,0\;vsubuwm %3,%2,%1\;vmaxsw %0,%1,%3"
+ [(set_attr "type" "altivec")
+ (set_attr "length" "12")])
+
+(define_insn "absv4sf2"
+ [(set (match_operand:V4SF 0 "register_operand" "=v")
+ (abs:V4SF (match_operand:V4SF 1 "register_operand" "v")))
+ (clobber (match_scratch:V4SF 2 "=v"))
+ (clobber (match_scratch:V4SF 3 "=v"))]
+ "TARGET_ALTIVEC"
+ "vspltisw %2, -1\;vslw %3,%2,%2\;vandc %0,%1,%3"
+ [(set_attr "type" "altivec")
+ (set_attr "length" "12")])
+
+(define_insn "altivec_abss_v16qi"
+ [(set (match_operand:V16QI 0 "register_operand" "=v")
+ (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")] 210))
+ (clobber (match_scratch:V16QI 2 "=v"))
+ (clobber (match_scratch:V16QI 3 "=v"))]
+ "TARGET_ALTIVEC"
+ "vspltisb %2,0\;vsubsbs %3,%2,%1\;vmaxsb %0,%1,%3"
+ [(set_attr "type" "altivec")
+ (set_attr "length" "12")])
+
+(define_insn "altivec_abss_v8hi"
+ [(set (match_operand:V8HI 0 "register_operand" "=v")
+ (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")] 211))
+ (clobber (match_scratch:V8HI 2 "=v"))
+ (clobber (match_scratch:V8HI 3 "=v"))]
+ "TARGET_ALTIVEC"
+ "vspltisb %2,0\;vsubshs %3,%2,%1\;vmaxsh %0,%1,%3"
+ [(set_attr "type" "altivec")
+ (set_attr "length" "12")])
+
+(define_insn "altivec_abss_v4si"
+ [(set (match_operand:V4SI 0 "register_operand" "=v")
+ (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")] 212))
+ (clobber (match_scratch:V4SI 2 "=v"))
+ (clobber (match_scratch:V4SI 3 "=v"))]
+ "TARGET_ALTIVEC"
+ "vspltisb %2,0\;vsubsws %3,%2,%1\;vmaxsw %0,%1,%3"
+ [(set_attr "type" "altivec")
+ (set_attr "length" "12")])