static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces *);
static bool block_move_libcall_safe_for_call_parm (void);
-static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
+static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
static tree emit_block_move_libcall_fn (int);
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
struct store_by_pieces *);
-static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
+static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
static rtx clear_storage_via_libcall (rtx, rtx);
static tree clear_storage_libcall_fn (int);
static rtx compress_float_constant (rtx, rtx);
static rtx expand_increment (tree, int, int);
static void expand_operands (tree, tree, rtx, rtx*, rtx*,
enum expand_modifier);
+static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
static rtx do_store_flag (tree, rtx, enum machine_mode, int);
#ifdef PUSH_ROUNDING
static void emit_single_push_insn (enum machine_mode, rtx, tree);
#endif
/* This array records the insn_code of insns to perform block moves. */
-enum insn_code movstr_optab[NUM_MACHINE_MODES];
+enum insn_code movmem_optab[NUM_MACHINE_MODES];
/* This array records the insn_code of insns to perform block clears. */
-enum insn_code clrstr_optab[NUM_MACHINE_MODES];
+enum insn_code clrmem_optab[NUM_MACHINE_MODES];
/* These arrays record the insn_code of two different kinds of insns
to perform block compares. */
if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
move_by_pieces (x, y, INTVAL (size), align, 0);
- else if (emit_block_move_via_movstr (x, y, size, align))
+ else if (emit_block_move_via_movmem (x, y, size, align))
;
else if (may_use_call)
retval = emit_block_move_via_libcall (x, y, size);
rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
if (!tmp || !REG_P (tmp))
return false;
-#ifdef FUNCTION_ARG_PARTIAL_NREGS
if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
NULL_TREE, 1))
return false;
-#endif
FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
}
}
return true;
}
-/* A subroutine of emit_block_move. Expand a movstr pattern;
+/* A subroutine of emit_block_move. Expand a movmem pattern;
return true if successful. */
static bool
-emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
+emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
{
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
int save_volatile_ok = volatile_ok;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
- enum insn_code code = movstr_optab[(int) mode];
+ enum insn_code code = movmem_optab[(int) mode];
insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
&& (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
(bytepos % slen0) * BITS_PER_UNIT,
- 1, NULL_RTX, mode, mode, ssize);
+ 1, NULL_RTX, mode, mode);
}
else if (bytepos == 0)
{
else
tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
bytepos * BITS_PER_UNIT, 1, NULL_RTX,
- mode, mode, ssize);
+ mode, mode);
if (shift)
tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
else
store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
- mode, tmps[i], ssize);
+ mode, tmps[i]);
}
emit_queue ();
store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
extract_bit_field (src, bitsize,
xbitpos % BITS_PER_WORD, 1,
- NULL_RTX, word_mode, word_mode,
- BITS_PER_WORD),
- BITS_PER_WORD);
+ NULL_RTX, word_mode, word_mode));
}
return tgtblk;
else if (GET_CODE (size) == CONST_INT
&& CLEAR_BY_PIECES_P (INTVAL (size), align))
clear_by_pieces (object, INTVAL (size), align);
- else if (clear_storage_via_clrstr (object, size, align))
+ else if (clear_storage_via_clrmem (object, size, align))
;
else
retval = clear_storage_via_libcall (object, size);
return retval;
}
-/* A subroutine of clear_storage. Expand a clrstr pattern;
+/* A subroutine of clear_storage. Expand a clrmem pattern;
return true if successful. */
static bool
-clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
+clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
{
/* Try the most limited insn first, because there's no point
including more than one in the machine description unless
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
- enum insn_code code = clrstr_optab[(int) mode];
+ enum insn_code code = clrmem_optab[(int) mode];
insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
- start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
- end_cleanup_deferral ();
emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
- start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
- end_cleanup_deferral ();
emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
}
/* Store the value in the bitfield. */
- store_bit_field (target, bitsize, bitpos, mode, temp,
- int_size_in_bytes (type));
+ store_bit_field (target, bitsize, bitpos, mode, temp);
if (value_mode != VOIDmode)
{
}
return extract_bit_field (target, bitsize, bitpos, unsignedp,
- NULL_RTX, value_mode, VOIDmode,
- int_size_in_bytes (type));
+ NULL_RTX, value_mode, VOIDmode);
}
return const0_rtx;
}
return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
}
+/* Return a tree representing the upper bound of the array mentioned in
+ EXP, an ARRAY_REF. */
+
+tree
+array_ref_up_bound (tree exp)
+{
+ tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
+
+ /* If there is a domain type and it has an upper bound, use it, substituting
+ for a PLACEHOLDER_EXPR as needed. */
+ if (domain_type && TYPE_MAX_VALUE (domain_type))
+ return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
+
+ /* Otherwise fail. */
+ return NULL_TREE;
+}
+
/* Return a tree representing the offset, in bytes, of the field referenced
by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
break;
case WITH_CLEANUP_EXPR:
- exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
- break;
-
case CLEANUP_POINT_EXPR:
+ /* Lowered by gimplify.c. */
+ abort ();
+
case SAVE_EXPR:
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
may_trap_p instruction may throw. */
&& GET_CODE (PATTERN (insn)) != CLOBBER
&& GET_CODE (PATTERN (insn)) != USE
- && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
+ && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
REG_NOTES (insn));
rtx subtarget, original_target;
int ignore;
tree context;
+ bool reduce_bit_field = false;
+#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
+ ? reduce_to_bit_field_precision ((expr), \
+ target, \
+ type) \
+ : (expr))
mode = TYPE_MODE (type);
unsignedp = TYPE_UNSIGNED (type);
+ if (lang_hooks.reduce_bit_field_operations
+ && TREE_CODE (type) == INTEGER_TYPE
+ && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
+ {
+ /* An operation in what may be a bit-field type needs the
+ result to be reduced to the precision of the bit-field type,
+ which is narrower than that of the type's mode. */
+ reduce_bit_field = true;
+ if (modifier == EXPAND_STACK_PARM)
+ target = 0;
+ }
/* Use subtarget as the target for operand 0 of a binary operation. */
subtarget = get_subtarget (target);
original_target = target;
ignore = (target == const0_rtx
|| ((code == NON_LVALUE_EXPR || code == NOP_EXPR
- || code == CONVERT_EXPR || code == COND_EXPR
- || code == VIEW_CONVERT_EXPR)
+ || code == CONVERT_EXPR || code == COND_EXPR
+ || code == VIEW_CONVERT_EXPR)
&& TREE_CODE (type) == VOID_TYPE));
/* If we are going to ignore this result, we need only do something
}
if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
- || code == INDIRECT_REF || code == BUFFER_REF)
+ || code == INDIRECT_REF)
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
modifier);
op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
(modifier == EXPAND_STACK_PARM
? NULL_RTX : target),
- ext_mode, ext_mode,
- int_size_in_bytes (TREE_TYPE (tem)));
+ ext_mode, ext_mode);
/* If the result is a record type and BITSIZE is narrower than
the mode of OP0, an integral mode, and this is a big endian
case OBJ_TYPE_REF:
return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
- /* Intended for a reference to a buffer of a file-object in Pascal.
- But it's not certain that a special tree code will really be
- necessary for these. INDIRECT_REF might work for them. */
- case BUFFER_REF:
- abort ();
-
- case IN_EXPR:
- {
- /* Pascal set IN expression.
-
- Algorithm:
- rlo = set_low - (set_low%bits_per_word);
- the_word = set [ (index - rlo)/bits_per_word ];
- bit_index = index % bits_per_word;
- bitmask = 1 << bit_index;
- return !!(the_word & bitmask); */
-
- tree set = TREE_OPERAND (exp, 0);
- tree index = TREE_OPERAND (exp, 1);
- int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
- tree set_type = TREE_TYPE (set);
- tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
- tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
- rtx index_val = expand_expr (index, 0, VOIDmode, 0);
- rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
- rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
- rtx setval = expand_expr (set, 0, VOIDmode, 0);
- rtx setaddr = XEXP (setval, 0);
- enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
- rtx rlow;
- rtx diff, quo, rem, addr, bit, result;
-
- /* If domain is empty, answer is no. Likewise if index is constant
- and out of bounds. */
- if (((TREE_CODE (set_high_bound) == INTEGER_CST
- && TREE_CODE (set_low_bound) == INTEGER_CST
- && tree_int_cst_lt (set_high_bound, set_low_bound))
- || (TREE_CODE (index) == INTEGER_CST
- && TREE_CODE (set_low_bound) == INTEGER_CST
- && tree_int_cst_lt (index, set_low_bound))
- || (TREE_CODE (set_high_bound) == INTEGER_CST
- && TREE_CODE (index) == INTEGER_CST
- && tree_int_cst_lt (set_high_bound, index))))
- return const0_rtx;
-
- if (target == 0)
- target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
-
- /* If we get here, we have to generate the code for both cases
- (in range and out of range). */
-
- op0 = gen_label_rtx ();
- op1 = gen_label_rtx ();
-
- if (! (GET_CODE (index_val) == CONST_INT
- && GET_CODE (lo_r) == CONST_INT))
- emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
- GET_MODE (index_val), iunsignedp, op1);
-
- if (! (GET_CODE (index_val) == CONST_INT
- && GET_CODE (hi_r) == CONST_INT))
- emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
- GET_MODE (index_val), iunsignedp, op1);
-
- /* Calculate the element number of bit zero in the first word
- of the set. */
- if (GET_CODE (lo_r) == CONST_INT)
- rlow = GEN_INT (INTVAL (lo_r)
- & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
- else
- rlow = expand_binop (index_mode, and_optab, lo_r,
- GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
- NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
-
- diff = expand_binop (index_mode, sub_optab, index_val, rlow,
- NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
-
- quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
- GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
- rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
- GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
-
- addr = memory_address (byte_mode,
- expand_binop (index_mode, add_optab, diff,
- setaddr, NULL_RTX, iunsignedp,
- OPTAB_LIB_WIDEN));
-
- /* Extract the bit we want to examine. */
- bit = expand_shift (RSHIFT_EXPR, byte_mode,
- gen_rtx_MEM (byte_mode, addr),
- make_tree (TREE_TYPE (index), rem),
- NULL_RTX, 1);
- result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
- GET_MODE (target) == byte_mode ? target : 0,
- 1, OPTAB_LIB_WIDEN);
-
- if (result != target)
- convert_move (target, result, 1);
-
- /* Output the code to handle the out-of-range case. */
- emit_jump (op0);
- emit_label (op1);
- emit_move_insn (target, const0_rtx);
- emit_label (op0);
- return target;
- }
-
- case WITH_CLEANUP_EXPR:
- if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
- {
- WITH_CLEANUP_EXPR_RTL (exp)
- = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
- expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
- CLEANUP_EH_ONLY (exp));
-
- /* That's it for this cleanup. */
- TREE_OPERAND (exp, 1) = 0;
- }
- return WITH_CLEANUP_EXPR_RTL (exp);
-
- case CLEANUP_POINT_EXPR:
- {
- /* Start a new binding layer that will keep track of all cleanup
- actions to be performed. */
- expand_start_bindings (2);
-
- target_temp_slot_level = temp_slot_level;
-
- op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
- /* If we're going to use this value, load it up now. */
- if (! ignore)
- op0 = force_not_mem (op0);
- preserve_temp_slots (op0);
- expand_end_bindings (NULL_TREE, 0, 0);
- }
- return op0;
-
case CALL_EXPR:
/* Check for a built-in function. */
if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
&& GET_CODE (op0) == SUBREG)
SUBREG_PROMOTED_VAR_P (op0) = 0;
- return op0;
+ return REDUCE_BIT_FIELD (op0);
}
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
+ op0 = REDUCE_BIT_FIELD (op0);
if (GET_MODE (op0) == mode)
return op0;
op1 = plus_constant (op1, INTVAL (constant_part));
if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
op1 = force_operand (op1, target);
- return op1;
+ return REDUCE_BIT_FIELD (op1);
}
else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
op0 = plus_constant (op0, INTVAL (constant_part));
if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
op0 = force_operand (op0, target);
- return op0;
+ return REDUCE_BIT_FIELD (op0);
}
}
expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
subtarget, &op0, &op1, modifier);
- return simplify_gen_binary (PLUS, mode, op0, op1);
+ return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
case MINUS_EXPR:
/* For initializers, we are allowed to return a MINUS of two
/* If the last operand is a CONST_INT, use plus_constant of
the negated constant. Else make the MINUS. */
if (GET_CODE (op1) == CONST_INT)
- return plus_constant (op0, - INTVAL (op1));
+ return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
else
- return gen_rtx_MINUS (mode, op0, op1);
+ return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
}
this_optab = ! unsignedp && flag_trapv
if (GET_CODE (op1) == CONST_INT)
{
op1 = negate_rtx (mode, op1);
- return simplify_gen_binary (PLUS, mode, op0, op1);
+ return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
}
goto binop2;
if (!REG_P (op0))
op0 = copy_to_mode_reg (mode, op0);
- return gen_rtx_MULT (mode, op0,
+ return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
gen_int_mode (tree_low_cst (exp1, 0),
- TYPE_MODE (TREE_TYPE (exp1))));
+ TYPE_MODE (TREE_TYPE (exp1)))));
}
if (modifier == EXPAND_STACK_PARM)
zextend_p);
if (htem != hipart)
emit_move_insn (hipart, htem);
- return temp;
+ return REDUCE_BIT_FIELD (temp);
}
}
}
expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
subtarget, &op0, &op1, 0);
- return expand_mult (mode, op0, op1, target, unsignedp);
+ return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
case TRUNC_DIV_EXPR:
case FLOOR_DIV_EXPR:
? negv_optab : neg_optab, op0, target, 0);
if (temp == 0)
abort ();
- return temp;
+ return REDUCE_BIT_FIELD (temp);
case ABS_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
tree then_ = TREE_OPERAND (exp, 1);
tree else_ = TREE_OPERAND (exp, 2);
- /* If we do not have any pending cleanups or stack_levels
- to restore, and at least one arm of the COND_EXPR is a
- GOTO_EXPR to a local label, then we can emit more efficient
- code by using jumpif/jumpifnot instead of the 'if' machinery. */
- if (! optimize
- || containing_blocks_have_cleanups_or_stack_level ())
- ;
- else if (TREE_CODE (then_) == GOTO_EXPR
- && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
+ if (TREE_CODE (then_) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
{
jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
return expand_expr (else_, const0_rtx, VOIDmode, 0);
/* Just use the 'if' machinery. */
expand_start_cond (pred, 0);
- start_cleanup_deferral ();
expand_expr (then_, const0_rtx, VOIDmode, 0);
exp = else_;
expand_start_else ();
expand_expr (exp, const0_rtx, VOIDmode, 0);
}
- end_cleanup_deferral ();
expand_end_cond ();
return const0_rtx;
}
else
jumpifnot (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferral ();
if (binary_op && temp == 0)
/* Just touch the other operand. */
expand_expr (TREE_OPERAND (binary_op, 1),
modifier == EXPAND_STACK_PARM ? 2 : 0);
jumpif (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferral ();
if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
store_expr (TREE_OPERAND (exp, 2), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
modifier == EXPAND_STACK_PARM ? 2 : 0);
jumpifnot (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferral ();
if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
store_expr (TREE_OPERAND (exp, 1), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
op1 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- start_cleanup_deferral ();
-
/* One branch of the cond can be void, if it never returns. For
example A ? throw : E */
if (temp != 0
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- end_cleanup_deferral ();
emit_queue ();
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
- start_cleanup_deferral ();
if (temp != 0
&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
store_expr (TREE_OPERAND (exp, 2), temp,
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
}
- end_cleanup_deferral ();
-
emit_queue ();
emit_label (op1);
OK_DEFER_POP;
return temp;
}
- case TARGET_EXPR:
- {
- /* Something needs to be initialized, but we didn't know
- where that thing was when building the tree. For example,
- it could be the return value of a function, or a parameter
- to a function which lays down in the stack, or a temporary
- variable which must be passed by reference.
-
- We guarantee that the expression will either be constructed
- or copied into our original target. */
-
- tree slot = TREE_OPERAND (exp, 0);
- tree cleanups = NULL_TREE;
- tree exp1;
-
- if (TREE_CODE (slot) != VAR_DECL)
- abort ();
-
- if (! ignore)
- target = original_target;
-
- /* Set this here so that if we get a target that refers to a
- register variable that's already been used, put_reg_into_stack
- knows that it should fix up those uses. */
- TREE_USED (slot) = 1;
-
- if (target == 0)
- {
- if (DECL_RTL_SET_P (slot))
- {
- target = DECL_RTL (slot);
- /* If we have already expanded the slot, so don't do
- it again. (mrs) */
- if (TREE_OPERAND (exp, 1) == NULL_TREE)
- return target;
- }
- else
- {
- target = assign_temp (type, 2, 0, 1);
- SET_DECL_RTL (slot, target);
-
- /* Since SLOT is not known to the called function
- to belong to its stack frame, we must build an explicit
- cleanup. This case occurs when we must build up a reference
- to pass the reference as an argument. In this case,
- it is very likely that such a reference need not be
- built here. */
-
- if (TREE_OPERAND (exp, 2) == 0)
- TREE_OPERAND (exp, 2)
- = lang_hooks.maybe_build_cleanup (slot);
- cleanups = TREE_OPERAND (exp, 2);
- }
- }
- else
- {
- /* This case does occur, when expanding a parameter which
- needs to be constructed on the stack. The target
- is the actual stack address that we want to initialize.
- The function we call will perform the cleanup in this case. */
-
- /* If we have already assigned it space, use that space,
- not target that we were passed in, as our target
- parameter is only a hint. */
- if (DECL_RTL_SET_P (slot))
- {
- target = DECL_RTL (slot);
- /* If we have already expanded the slot, so don't do
- it again. (mrs) */
- if (TREE_OPERAND (exp, 1) == NULL_TREE)
- return target;
- }
- else
- SET_DECL_RTL (slot, target);
- }
-
- exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
- /* Mark it as expanded. */
- TREE_OPERAND (exp, 1) = NULL_TREE;
-
- if (VOID_TYPE_P (TREE_TYPE (exp1)))
- /* If the initializer is void, just expand it; it will initialize
- the object directly. */
- expand_expr (exp1, const0_rtx, VOIDmode, 0);
- else
- store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
-
- expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
-
- return target;
- }
-
case INIT_EXPR:
{
tree lhs = TREE_OPERAND (exp, 0);
case PREINCREMENT_EXPR:
case PREDECREMENT_EXPR:
- return expand_increment (exp, 0, ignore);
+ return REDUCE_BIT_FIELD (expand_increment (exp, 0, ignore));
case POSTINCREMENT_EXPR:
case POSTDECREMENT_EXPR:
/* Faster to treat as pre-increment if result is not used. */
- return expand_increment (exp, ! ignore, ignore);
+ return REDUCE_BIT_FIELD (expand_increment (exp, ! ignore, ignore));
case ADDR_EXPR:
if (modifier == EXPAND_STACK_PARM)
return const0_rtx;
case TRY_CATCH_EXPR:
- {
- tree handler = TREE_OPERAND (exp, 1);
-
- expand_eh_region_start ();
- op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
- expand_eh_handler (handler);
-
- return op0;
- }
-
case CATCH_EXPR:
- expand_start_catch (CATCH_TYPES (exp));
- expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
- expand_end_catch ();
- return const0_rtx;
-
case EH_FILTER_EXPR:
- /* Should have been handled in expand_eh_handler. */
- abort ();
-
case TRY_FINALLY_EXPR:
- {
- tree try_block = TREE_OPERAND (exp, 0);
- tree finally_block = TREE_OPERAND (exp, 1);
-
- if ((!optimize && lang_protect_cleanup_actions == NULL)
- || unsafe_for_reeval (finally_block) > 1)
- {
- /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
- is not sufficient, so we cannot expand the block twice.
- So we play games with GOTO_SUBROUTINE_EXPR to let us
- expand the thing only once. */
- /* When not optimizing, we go ahead with this form since
- (1) user breakpoints operate more predictably without
- code duplication, and
- (2) we're not running any of the global optimizers
- that would explode in time/space with the highly
- connected CFG created by the indirect branching. */
-
- rtx finally_label = gen_label_rtx ();
- rtx done_label = gen_label_rtx ();
- rtx return_link = gen_reg_rtx (Pmode);
- tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
- (tree) finally_label, (tree) return_link);
- TREE_SIDE_EFFECTS (cleanup) = 1;
-
- /* Start a new binding layer that will keep track of all cleanup
- actions to be performed. */
- expand_start_bindings (2);
- target_temp_slot_level = temp_slot_level;
-
- expand_decl_cleanup (NULL_TREE, cleanup);
- op0 = expand_expr (try_block, target, tmode, modifier);
-
- preserve_temp_slots (op0);
- expand_end_bindings (NULL_TREE, 0, 0);
- emit_jump (done_label);
- emit_label (finally_label);
- expand_expr (finally_block, const0_rtx, VOIDmode, 0);
- emit_indirect_jump (return_link);
- emit_label (done_label);
- }
- else
- {
- expand_start_bindings (2);
- target_temp_slot_level = temp_slot_level;
-
- expand_decl_cleanup (NULL_TREE, finally_block);
- op0 = expand_expr (try_block, target, tmode, modifier);
-
- preserve_temp_slots (op0);
- expand_end_bindings (NULL_TREE, 0, 0);
- }
-
- return op0;
- }
+ /* Lowered by tree-eh.c. */
+ abort ();
- case GOTO_SUBROUTINE_EXPR:
- {
- rtx subr = (rtx) TREE_OPERAND (exp, 0);
- rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
- rtx return_address = gen_label_rtx ();
- emit_move_insn (return_link,
- gen_rtx_LABEL_REF (Pmode, return_address));
- emit_jump (subr);
- emit_label (return_address);
- return const0_rtx;
- }
+ case WITH_CLEANUP_EXPR:
+ case CLEANUP_POINT_EXPR:
+ case TARGET_EXPR:
+ /* Lowered by gimplify.c. */
+ abort ();
case VA_ARG_EXPR:
return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
unsignedp, OPTAB_LIB_WIDEN);
if (temp == 0)
abort ();
- return temp;
+ return REDUCE_BIT_FIELD (temp);
+}
+#undef REDUCE_BIT_FIELD
+\f
+/* Subroutine of above: reduce EXP to the precision of TYPE (in the
+ signedness of TYPE), possibly returning the result in TARGET. */
+static rtx
+reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
+{
+ HOST_WIDE_INT prec = TYPE_PRECISION (type);
+ if (target && GET_MODE (target) != GET_MODE (exp))
+ target = 0;
+ if (TYPE_UNSIGNED (type))
+ {
+ rtx mask;
+ if (prec < HOST_BITS_PER_WIDE_INT)
+ mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
+ GET_MODE (exp));
+ else
+ mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
+ ((unsigned HOST_WIDE_INT) 1
+ << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
+ GET_MODE (exp));
+ return expand_and (GET_MODE (exp), exp, mask, target);
+ }
+ else
+ {
+ tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
+ exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
+ return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
+ }
}
\f
/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that