This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[hsa] Treat address values specially
- From: Martin Jambor <mjambor at suse dot cz>
- To: GCC Patches <gcc-patches at gcc dot gnu dot org>
- Date: Thu, 10 Sep 2015 13:20:13 +0200
- Subject: [hsa] Treat address values specially
- Authentication-results: sourceware.org; auth=none
Hi,
I have found out that the hsa branch ICEs when expanding the following
tot HSAIL:
int foo ()
{
#pragma omp target
{
int q[8];
__builtin_memset (&q[2], 0, sizeof (int) * 6);
}
}
The problem was that gen_hsa_addr was used for both values and memory
reference trees, which are really separate things, so I introduced a
special function for the former. Consequently, I had to fix a bit
bit-field access signaling in gen_hsa_addr.
Committed to the branch after the standard testing I do.
Martin
2015-09-10 Martin Jambor <mjambor@suse.cz>
* hsa-gen.c (get_address_from_value): New function.
(gen_hsa_ternary_atomic_for_builtin): Use it instead of
gen_hsa_addr.
(gen_hsa_insns_for_call): Likewise.
(gen_hsa_addr): Set bitsize to zero if the thing is not a bitfield
access.
diff --git a/gcc/hsa-gen.c b/gcc/hsa-gen.c
index 7796895..6e39c78 100644
--- a/gcc/hsa-gen.c
+++ b/gcc/hsa-gen.c
@@ -1546,18 +1546,9 @@ gen_hsa_addr (tree ref, hsa_bb *hbb, vec <hsa_op_reg_p> *ssa_map,
switch (TREE_CODE (ref))
{
case SSA_NAME:
- /* The SSA_NAME and ADDR_EXPR cases cannot occur in a valid gimple memory
- reference but we also use this function to generate addresses of
- instructions representing operands of atomic memory access builtins
- which are just addresses and not references. */
- gcc_assert (!reg);
- reg = hsa_reg_for_gimple_ssa_reqtype (ref, ssa_map, hbb, addrtype);
- break;
-
case ADDR_EXPR:
- ref = TREE_OPERAND (ref, 0);
- gcc_assert (DECL_P (ref));
- /* Fall-through. */
+ gcc_unreachable ();
+
case PARM_DECL:
case VAR_DECL:
case RESULT_DECL:
@@ -1636,6 +1627,12 @@ out:
/* Calculate remaining bitsize offset (if presented). */
bitpos %= BITS_PER_UNIT;
+ /* If bitsize is a power of two that is greater or equal to BITS_PER_UNIT, it
+ is not a reason to think this is a bit-field access. */
+ if (bitpos == 0
+ && (bitsize >= BITS_PER_UNIT)
+ && !(bitsize & (bitsize - 1)))
+ bitsize = 0;
if ((bitpos || bitsize) && (output_bitpos == NULL || output_bitsize == NULL))
sorry ("Support for HSA does not implement unhandled bit field reference "
@@ -3297,6 +3294,37 @@ gen_hsa_unaryop_for_builtin (int opcode, gimple stmt, hsa_bb *hbb,
gen_hsa_unary_operation (opcode, dest, op, hbb);
}
+/* Generate HSA address corresponding to a value VAL (as opposed to a memory
+ reference tree), for example an SSA_NAME or an ADDR_EXPR. HBB is the HSA BB
+ to which the instruction should be added and SSA_MAP is used to map gimple
+ SSA names to HSA pseudoregisters. */
+
+static hsa_op_address *
+get_address_from_value (tree val, hsa_bb *hbb, vec <hsa_op_reg_p> *ssa_map)
+{
+ switch (TREE_CODE (val))
+ {
+ case SSA_NAME:
+ {
+ BrigType16_t addrtype = hsa_get_segment_addr_type (BRIG_SEGMENT_FLAT);
+ hsa_op_reg *reg = hsa_reg_for_gimple_ssa_reqtype (val, ssa_map,
+ hbb, addrtype);
+ return new hsa_op_address (NULL, reg, 0);
+ }
+ case ADDR_EXPR:
+ return gen_hsa_addr (TREE_OPERAND (val, 0), hbb, ssa_map);
+
+ case INTEGER_CST:
+ if (tree_fits_shwi_p (val))
+ return new hsa_op_address (NULL, NULL, tree_to_shwi (val));
+ /* Otherwise fall-through */
+
+ default:
+ sorry ("Support for HSA does not implement memory access to %E", val);
+ return new hsa_op_address (NULL, NULL, 0);
+ }
+}
+
/* Helper function to create an HSA atomic binary operation instruction out of
calls to atomic builtins. RET_ORIG is true if the built-in is the variant
that return s the value before applying operation, and false if it should
@@ -3337,7 +3365,7 @@ gen_hsa_ternary_atomic_for_builtin (bool ret_orig,
hsa_insn_atomic *atominsn = new hsa_insn_atomic (nops, opcode, acode,
bit_type);
hsa_op_address *addr;
- addr = gen_hsa_addr (gimple_call_arg (stmt, 0), hbb, ssa_map);
+ addr = get_address_from_value (gimple_call_arg (stmt, 0), hbb, ssa_map);
hsa_op_base *op = hsa_reg_or_immed_for_gimple_op (gimple_call_arg (stmt, 1),
hbb, ssa_map);
@@ -3520,8 +3548,8 @@ specialop:
{
BrigType16_t mtype = mem_type_for_type (hsa_type_for_scalar_tree_type
(TREE_TYPE (lhs), false));
- hsa_op_address *addr = gen_hsa_addr (gimple_call_arg (stmt, 0),
- hbb, ssa_map);
+ hsa_op_address *addr;
+ addr = get_address_from_value (gimple_call_arg (stmt, 0), hbb, ssa_map);
dest = hsa_reg_for_gimple_ssa (lhs, ssa_map);
hsa_insn_atomic *atominsn
= new hsa_insn_atomic (2, BRIG_OPCODE_ATOMIC, BRIG_ATOMIC_LD, mtype,
@@ -3657,7 +3685,7 @@ specialop:
hsa_insn_atomic *atominsn = new hsa_insn_atomic
(4, BRIG_OPCODE_ATOMIC, BRIG_ATOMIC_CAS, atype);
hsa_op_address *addr;
- addr = gen_hsa_addr (gimple_call_arg (stmt, 0), hbb, ssa_map);
+ addr = get_address_from_value (gimple_call_arg (stmt, 0), hbb, ssa_map);
if (lhs != NULL)
dest = hsa_reg_for_gimple_ssa (lhs, ssa_map);
@@ -3709,8 +3737,8 @@ specialop:
tree dst = gimple_call_arg (stmt, 0);
tree src = gimple_call_arg (stmt, 1);
- hsa_op_address *dst_addr = gen_hsa_addr (dst, hbb, ssa_map);
- hsa_op_address *src_addr = gen_hsa_addr (src, hbb, ssa_map);
+ hsa_op_address *dst_addr = get_address_from_value (dst, hbb, ssa_map);
+ hsa_op_address *src_addr = get_address_from_value (src, hbb, ssa_map);
unsigned n = tree_to_uhwi (byte_size);
gen_hsa_memory_copy (hbb, dst_addr, src_addr, n);
@@ -3737,8 +3765,9 @@ specialop:
return;
}
- hsa_op_address *dst_addr = gen_hsa_addr (gimple_call_arg (stmt, 0),
- hbb, ssa_map);
+ hsa_op_address *dst_addr;
+ dst_addr = get_address_from_value (gimple_call_arg (stmt, 0), hbb,
+ ssa_map);
unsigned n = tree_to_uhwi (byte_size);
unsigned HOST_WIDE_INT constant = tree_to_uhwi
(fold_convert (unsigned_char_type_node, c));