/* Convert function calls to rtl insns, for GNU C compiler.
- Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ Copyright (C) 1989-2014 Free Software Foundation, Inc.
This file is part of GCC.
#include "tm.h"
#include "rtl.h"
#include "tree.h"
+#include "stor-layout.h"
+#include "varasm.h"
+#include "stringpool.h"
+#include "attribs.h"
+#include "predict.h"
+#include "vec.h"
+#include "hashtab.h"
+#include "hash-set.h"
+#include "machmode.h"
+#include "hard-reg-set.h"
+#include "input.h"
+#include "function.h"
+#include "basic-block.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-expr.h"
+#include "is-a.h"
#include "gimple.h"
#include "flags.h"
#include "expr.h"
#include "optabs.h"
#include "libfuncs.h"
-#include "function.h"
#include "regs.h"
#include "diagnostic-core.h"
#include "output.h"
#include "sbitmap.h"
#include "langhooks.h"
#include "target.h"
-#include "debug.h"
+#include "hash-map.h"
+#include "plugin-api.h"
+#include "ipa-ref.h"
#include "cgraph.h"
#include "except.h"
#include "dbgcnt.h"
-#include "tree-flow.h"
+#include "rtl-iter.h"
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
/* Tree node for this argument. */
tree tree_value;
/* Mode for value; TYPE_MODE unless promoted. */
- enum machine_mode mode;
+ machine_mode mode;
/* Current RTL value for argument, or 0 if it isn't precomputed. */
rtx value;
/* Initially-compute RTL value for argument; only for const functions. */
rtx stack;
/* Location on the stack of the start of this argument slot. This can
differ from STACK if this arg pads downward. This location is known
- to be aligned to FUNCTION_ARG_BOUNDARY. */
+ to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
rtx stack_slot;
/* Place that this stack area has been saved, if needed. */
rtx save_area;
static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
HOST_WIDE_INT, rtx, rtx, int, rtx, int,
- CUMULATIVE_ARGS *);
+ cumulative_args_t);
static void precompute_register_parameters (int, struct arg_data *, int *);
static int store_one_arg (struct arg_data *, rtx, int, int, int);
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
static void initialize_argument_information (int, struct arg_data *,
struct args_size *, int,
tree, tree,
- tree, tree, CUMULATIVE_ARGS *, int,
+ tree, tree, cumulative_args_t, int,
rtx *, int *, int *, int *,
bool *, bool);
static void compute_argument_addresses (struct arg_data *, rtx, int);
static void load_register_parameters (struct arg_data *, int, rtx *, int,
int, int *);
static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
- enum machine_mode, int, va_list);
+ machine_mode, int, va_list);
static int special_function_p (const_tree, int);
static int check_sibcall_argument_overlap_1 (rtx);
-static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
+static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
unsigned int);
HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
- CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
+ cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
{
rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
- rtx call_insn;
+ rtx_insn *call_insn;
+ rtx call, funmem;
int already_popped = 0;
HOST_WIDE_INT n_popped
= targetm.calls.return_pops_args (fndecl, funtype, stack_size);
#ifdef CALL_POPS_ARGS
- n_popped += CALL_POPS_ARGS (* args_so_far);
+ n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
#endif
/* Ensure address is valid. SYMBOL_REF is already valid, so no need,
if (GET_CODE (funexp) != SYMBOL_REF)
funexp = memory_address (FUNCTION_MODE, funexp);
+ funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
+ if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
+ {
+ tree t = fndecl;
+
+ /* Although a built-in FUNCTION_DECL and its non-__builtin
+ counterpart compare equal and get a shared mem_attrs, they
+ produce different dump output in compare-debug compilations,
+ if an entry gets garbage collected in one compilation, then
+ adds a different (but equivalent) entry, while the other
+ doesn't run the garbage collector at the same spot and then
+ shares the mem_attr with the equivalent entry. */
+ if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
+ {
+ tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
+ if (t2)
+ t = t2;
+ }
+
+ set_mem_expr (funmem, t);
+ }
+ else if (fntree)
+ set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
+
#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
if ((ecf_flags & ECF_SIBCALL)
&& HAVE_sibcall_pop && HAVE_sibcall_value_pop
if possible, for the sake of frame pointer elimination. */
if (valreg)
- pat = GEN_SIBCALL_VALUE_POP (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
- n_pop);
+ pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
+ next_arg_reg, n_pop);
else
- pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg, n_pop);
+ pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
+ n_pop);
emit_call_insn (pat);
already_popped = 1;
if possible, for the sake of frame pointer elimination. */
if (valreg)
- pat = GEN_CALL_VALUE_POP (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg, n_pop);
+ pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
+ next_arg_reg, n_pop);
else
- pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg, n_pop);
+ pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
+ n_pop);
emit_call_insn (pat);
already_popped = 1;
&& HAVE_sibcall && HAVE_sibcall_value)
{
if (valreg)
- emit_call_insn (GEN_SIBCALL_VALUE (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
+ emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
rounded_stack_size_rtx,
next_arg_reg, NULL_RTX));
else
- emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
+ emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
+ next_arg_reg,
GEN_INT (struct_value_size)));
}
else
if (HAVE_call && HAVE_call_value)
{
if (valreg)
- emit_call_insn (GEN_CALL_VALUE (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
- NULL_RTX));
+ emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
+ next_arg_reg, NULL_RTX));
else
- emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
+ emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
GEN_INT (struct_value_size)));
}
else
/* Find the call we just emitted. */
call_insn = last_call_insn ();
+ /* Some target create a fresh MEM instead of reusing the one provided
+ above. Set its MEM_EXPR. */
+ call = get_call_rtx_from (call_insn);
+ if (call
+ && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
+ && MEM_EXPR (funmem) != NULL_TREE)
+ set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
+
/* Put the register usage information there. */
add_function_usage_to (call_insn, call_fusage);
SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
- /* Record debug information for virtual calls. */
- if (flag_enable_icf_debug && fndecl == NULL)
- (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
- INSN_UID (call_insn));
-
/* Restore this now, so that we do defer pops for this call's args
if the context of the call as a whole permits. */
inhibit_defer_pop = old_inhibit_defer_pop;
rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
stack_pointer_delta -= n_popped;
+ add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
+
/* If popup is needed, stack realign must use DRAP */
if (SUPPORTS_STACK_ALIGNMENT)
crtl->need_drap = true;
}
+ /* For noreturn calls when not accumulating outgoing args force
+ REG_ARGS_SIZE note to prevent crossjumping of calls with different
+ args sizes. */
+ else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
+ add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
if (!ACCUMULATE_OUTGOING_ARGS)
{
&& ! strcmp (tname, "sigsetjmp"))
|| (tname[1] == 'a'
&& ! strcmp (tname, "savectx")))
- flags |= ECF_RETURNS_TWICE;
+ flags |= ECF_RETURNS_TWICE | ECF_LEAF;
if (tname[1] == 'i'
&& ! strcmp (tname, "siglongjmp"))
&& ! strcmp (tname, "vfork"))
|| (tname[0] == 'g' && tname[1] == 'e'
&& !strcmp (tname, "getcontext")))
- flags |= ECF_RETURNS_TWICE;
+ flags |= ECF_RETURNS_TWICE | ECF_LEAF;
else if (tname[0] == 'l' && tname[1] == 'o'
&& ! strcmp (tname, "longjmp"))
return flags;
}
+/* Similar to special_function_p; return a set of ERF_ flags for the
+ function FNDECL. */
+static int
+decl_return_flags (tree fndecl)
+{
+ tree attr;
+ tree type = TREE_TYPE (fndecl);
+ if (!type)
+ return 0;
+
+ attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
+ if (!attr)
+ return 0;
+
+ attr = TREE_VALUE (TREE_VALUE (attr));
+ if (!attr || TREE_STRING_LENGTH (attr) < 1)
+ return 0;
+
+ switch (TREE_STRING_POINTER (attr)[0])
+ {
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
+
+ case 'm':
+ return ERF_NOALIAS;
+
+ case '.':
+ default:
+ return 0;
+ }
+}
+
/* Return nonzero when FNDECL represents a call to setjmp. */
int
setjmp_call_p (const_tree fndecl)
{
+ if (DECL_IS_RETURNS_TWICE (fndecl))
+ return ECF_RETURNS_TWICE;
return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
}
bool
alloca_call_p (const_tree exp)
{
+ tree fndecl;
if (TREE_CODE (exp) == CALL_EXPR
- && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
- && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
- & ECF_MAY_BE_ALLOCA))
+ && (fndecl = get_callee_fndecl (exp))
+ && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
+ return true;
+ return false;
+}
+
+/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
+ function. Return FALSE otherwise. */
+
+static bool
+is_tm_builtin (const_tree fndecl)
+{
+ if (fndecl == NULL)
+ return false;
+
+ if (decl_is_tm_clone (fndecl))
return true;
+
+ if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ {
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_TM_COMMIT:
+ case BUILT_IN_TM_COMMIT_EH:
+ case BUILT_IN_TM_ABORT:
+ case BUILT_IN_TM_IRREVOCABLE:
+ case BUILT_IN_TM_GETTMCLONE_IRR:
+ case BUILT_IN_TM_MEMCPY:
+ case BUILT_IN_TM_MEMMOVE:
+ case BUILT_IN_TM_MEMSET:
+ CASE_BUILT_IN_TM_STORE (1):
+ CASE_BUILT_IN_TM_STORE (2):
+ CASE_BUILT_IN_TM_STORE (4):
+ CASE_BUILT_IN_TM_STORE (8):
+ CASE_BUILT_IN_TM_STORE (FLOAT):
+ CASE_BUILT_IN_TM_STORE (DOUBLE):
+ CASE_BUILT_IN_TM_STORE (LDOUBLE):
+ CASE_BUILT_IN_TM_STORE (M64):
+ CASE_BUILT_IN_TM_STORE (M128):
+ CASE_BUILT_IN_TM_STORE (M256):
+ CASE_BUILT_IN_TM_LOAD (1):
+ CASE_BUILT_IN_TM_LOAD (2):
+ CASE_BUILT_IN_TM_LOAD (4):
+ CASE_BUILT_IN_TM_LOAD (8):
+ CASE_BUILT_IN_TM_LOAD (FLOAT):
+ CASE_BUILT_IN_TM_LOAD (DOUBLE):
+ CASE_BUILT_IN_TM_LOAD (LDOUBLE):
+ CASE_BUILT_IN_TM_LOAD (M64):
+ CASE_BUILT_IN_TM_LOAD (M128):
+ CASE_BUILT_IN_TM_LOAD (M256):
+ case BUILT_IN_TM_LOG:
+ case BUILT_IN_TM_LOG_1:
+ case BUILT_IN_TM_LOG_2:
+ case BUILT_IN_TM_LOG_4:
+ case BUILT_IN_TM_LOG_8:
+ case BUILT_IN_TM_LOG_FLOAT:
+ case BUILT_IN_TM_LOG_DOUBLE:
+ case BUILT_IN_TM_LOG_LDOUBLE:
+ case BUILT_IN_TM_LOG_M64:
+ case BUILT_IN_TM_LOG_M128:
+ case BUILT_IN_TM_LOG_M256:
+ return true;
+ default:
+ break;
+ }
+ }
return false;
}
if (DECL_IS_NOVOPS (exp))
flags |= ECF_NOVOPS;
+ if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
+ flags |= ECF_LEAF;
if (TREE_NOTHROW (exp))
flags |= ECF_NOTHROW;
+ if (flag_tm)
+ {
+ if (is_tm_builtin (exp))
+ flags |= ECF_TM_BUILTIN;
+ else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
+ || lookup_attribute ("transaction_pure",
+ TYPE_ATTRIBUTES (TREE_TYPE (exp))))
+ flags |= ECF_TM_PURE;
+ }
+
flags = special_function_p (exp, flags);
}
- else if (TYPE_P (exp) && TYPE_READONLY (exp))
- flags |= ECF_CONST;
+ else if (TYPE_P (exp))
+ {
+ if (TYPE_READONLY (exp))
+ flags |= ECF_CONST;
+
+ if (flag_tm
+ && ((flags & ECF_CONST) != 0
+ || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
+ flags |= ECF_TM_PURE;
+ }
+ else
+ gcc_unreachable ();
if (TREE_THIS_VOLATILE (exp))
{
pop_temp_slots ();
}
- /* If the value is a non-legitimate constant, force it into a
- pseudo now. TLS symbols sometimes need a call to resolve. */
- if (CONSTANT_P (args[i].value)
- && !LEGITIMATE_CONSTANT_P (args[i].value))
- args[i].value = force_reg (args[i].mode, args[i].value);
-
/* If we are to promote the function arg to a wider mode,
do it now. */
TYPE_MODE (TREE_TYPE (args[i].tree_value)),
args[i].value, args[i].unsignedp);
+ /* If the value is a non-legitimate constant, force it into a
+ pseudo now. TLS symbols sometimes need a call to resolve. */
+ if (CONSTANT_P (args[i].value)
+ && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
+ args[i].value = force_reg (args[i].mode, args[i].value);
+
/* If we're going to have to load the value by parts, pull the
parts into pseudos. The part extraction process can involve
non-trivial computation. */
|| (GET_CODE (args[i].value) == SUBREG
&& REG_P (SUBREG_REG (args[i].value)))))
&& args[i].mode != BLKmode
- && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
+ && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
> COSTS_N_INSNS (1)
&& ((*reg_parm_seen
&& targetm.small_register_classes_for_mode_p (args[i].mode))
if (stack_usage_map[low] != 0)
{
int num_to_save;
- enum machine_mode save_mode;
+ machine_mode save_mode;
int delta;
+ rtx addr;
rtx stack_area;
rtx save_area;
#else
delta = low;
#endif
- stack_area = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- delta)));
+ addr = plus_constant (Pmode, argblock, delta);
+ stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
set_mem_align (stack_area, PARM_BOUNDARY);
if (save_mode == BLKmode)
{
- save_area = assign_stack_temp (BLKmode, num_to_save, 0);
+ save_area = assign_stack_temp (BLKmode, num_to_save);
emit_block_move (validize_mem (save_area), stack_area,
GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
}
static void
restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
{
- enum machine_mode save_mode = GET_MODE (save_area);
+ machine_mode save_mode = GET_MODE (save_area);
int delta;
- rtx stack_area;
+ rtx addr, stack_area;
#ifdef ARGS_GROW_DOWNWARD
delta = -high_to_save;
#else
delta = low_to_save;
#endif
- stack_area = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock, delta)));
+ addr = plus_constant (Pmode, argblock, delta);
+ stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
set_mem_align (stack_area, PARM_BOUNDARY);
if (save_mode != BLKmode)
for (i = 0; i < num_actuals; i++)
if (args[i].reg != 0 && ! args[i].pass_on_stack
+ && GET_CODE (args[i].reg) != PARALLEL
&& args[i].mode == BLKmode
&& MEM_P (args[i].value)
&& (MEM_ALIGN (args[i].value)
emit_move_insn (reg, const0_rtx);
bytes -= bitsize / BITS_PER_UNIT;
- store_bit_field (reg, bitsize, endian_correction, word_mode,
- word);
+ store_bit_field (reg, bitsize, endian_correction, 0, 0,
+ word_mode, word);
}
}
}
int n_named_args ATTRIBUTE_UNUSED,
tree exp, tree struct_value_addr_value,
tree fndecl, tree fntype,
- CUMULATIVE_ARGS *args_so_far,
+ cumulative_args_t args_so_far,
int reg_parm_stack_space,
rtx *old_stack_level, int *old_pending_adj,
int *must_preallocate, int *ecf_flags,
bool *may_tailcall, bool call_from_thunk_p)
{
+ CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
location_t loc = EXPR_LOCATION (exp);
- /* 1 if scanning parms front to back, -1 if scanning back to front. */
- int inc;
/* Count arg position in order args appear. */
int argpos;
args_size->var = 0;
/* In this loop, we consider args in the order they are written.
- We fill up ARGS from the front or from the back if necessary
- so that in any case the first arg to be pushed ends up at the front. */
-
- if (PUSH_ARGS_REVERSED)
- {
- i = num_actuals - 1, inc = -1;
- /* In this case, must reverse order of args
- so that we compute and push the last arg first. */
- }
- else
- {
- i = 0, inc = 1;
- }
+ We fill up ARGS from the back. */
- /* First fill in the actual arguments in the ARGS array, splitting
- complex arguments if necessary. */
+ i = num_actuals - 1;
{
int j = i;
call_expr_arg_iterator iter;
if (struct_value_addr_value)
{
args[j].tree_value = struct_value_addr_value;
- j += inc;
+ j--;
}
FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
{
{
tree subtype = TREE_TYPE (argtype);
args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
- j += inc;
+ j--;
args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
}
else
args[j].tree_value = arg;
- j += inc;
+ j--;
}
}
/* I counts args in order (to be) pushed; ARGPOS counts in order written. */
- for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
+ for (argpos = 0; argpos < num_actuals; i--, argpos++)
{
tree type = TREE_TYPE (args[i].tree_value);
int unsignedp;
- enum machine_mode mode;
+ machine_mode mode;
/* Replace erroneous argument with constant zero. */
if (type == error_mark_node || !COMPLETE_TYPE_P (type))
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if (pass_by_reference (args_so_far, TYPE_MODE (type),
+ if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
type, argpos < n_named_args))
{
bool callee_copies;
- tree base;
+ tree base = NULL_TREE;
callee_copies
- = reference_callee_copied (args_so_far, TYPE_MODE (type),
+ = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
type, argpos < n_named_args);
/* If we're compiling a thunk, pass through invisible references
&& TREE_CODE (base) != SSA_NAME
&& (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
{
+ mark_addressable (args[i].tree_value);
+
/* We can't use sibcalls if a callee-copied argument is
stored in the current function's frame. */
if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
if (*old_stack_level == 0)
{
- emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ emit_stack_save (SAVE_BLOCK, old_stack_level);
*old_pending_adj = pending_stack_adjust;
pending_stack_adjust = 0;
}
- copy = gen_rtx_MEM (BLKmode,
- allocate_dynamic_stack_space
- (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
+ /* We can pass TRUE as the 4th argument because we just
+ saved the stack pointer and will restore it right after
+ the call. */
+ copy = allocate_dynamic_stack_space (size_rtx,
+ TYPE_ALIGN (type),
+ TYPE_ALIGN (type),
+ true);
+ copy = gen_rtx_MEM (BLKmode, copy);
set_mem_attributes (copy, type, 1);
}
else
- copy = assign_temp (type, 0, 1, 0);
+ copy = assign_temp (type, 1, 0);
store_expr (args[i].tree_value, copy, 0, false);
#else
args[i].reg != 0,
#endif
+ reg_parm_stack_space,
args[i].pass_on_stack ? 0 : args[i].partial,
fndecl, args_size, &args[i].locate);
#ifdef BLOCK_REG_PADDING
for (i = 0; i < num_actuals; i++)
{
tree type;
- enum machine_mode mode;
+ machine_mode mode;
if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
continue;
args[i].initial_value
= gen_lowpart_SUBREG (mode, args[i].value);
SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
- SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
- args[i].unsignedp);
+ SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
}
}
}
rtx addr;
unsigned int align, boundary;
unsigned int units_on_stack = 0;
- enum machine_mode partial_mode = VOIDmode;
+ machine_mode partial_mode = VOIDmode;
/* Skip this parm if it will not be passed on the stack. */
if (! args[i].pass_on_stack
continue;
if (CONST_INT_P (offset))
- addr = plus_constant (arg_reg, INTVAL (offset));
+ addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
else
addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
- addr = plus_constant (addr, arg_offset);
+ addr = plus_constant (Pmode, addr, arg_offset);
if (args[i].partial != 0)
{
partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
MODE_INT, 1);
args[i].stack = gen_rtx_MEM (partial_mode, addr);
- set_mem_size (args[i].stack, GEN_INT (units_on_stack));
+ set_mem_size (args[i].stack, units_on_stack);
}
else
{
set_mem_align (args[i].stack, align);
if (CONST_INT_P (slot_offset))
- addr = plus_constant (arg_reg, INTVAL (slot_offset));
+ addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
else
addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
- addr = plus_constant (addr, arg_offset);
+ addr = plus_constant (Pmode, addr, arg_offset);
if (args[i].partial != 0)
{
Generate a simple memory reference of the correct size.
*/
args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
- set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
+ set_mem_size (args[i].stack_slot, units_on_stack);
}
else
{
/* Get the function to call, in the form of RTL. */
if (fndecl)
{
- /* If this is the first use of the function, see if we need to
- make an external definition for it. */
if (!TREE_USED (fndecl) && fndecl != current_function_decl)
- {
- assemble_external (fndecl);
- TREE_USED (fndecl) = 1;
- }
+ TREE_USED (fndecl) = 1;
/* Get a SYMBOL_REF rtx for the function address. */
funexp = XEXP (DECL_RTL (fndecl), 0);
return funexp;
}
+/* Internal state for internal_arg_pointer_based_exp and its helpers. */
+static struct
+{
+ /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
+ or NULL_RTX if none has been scanned yet. */
+ rtx_insn *scan_start;
+ /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
+ based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
+ pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
+ with fixed offset, or PC if this is with variable or unknown offset. */
+ vec<rtx> cache;
+} internal_arg_pointer_exp_state;
+
+static rtx internal_arg_pointer_based_exp (const_rtx, bool);
+
+/* Helper function for internal_arg_pointer_based_exp. Scan insns in
+ the tail call sequence, starting with first insn that hasn't been
+ scanned yet, and note for each pseudo on the LHS whether it is based
+ on crtl->args.internal_arg_pointer or not, and what offset from that
+ that pointer it has. */
+
+static void
+internal_arg_pointer_based_exp_scan (void)
+{
+ rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
+
+ if (scan_start == NULL_RTX)
+ insn = get_insns ();
+ else
+ insn = NEXT_INSN (scan_start);
+
+ while (insn)
+ {
+ rtx set = single_set (insn);
+ if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
+ {
+ rtx val = NULL_RTX;
+ unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
+ /* Punt on pseudos set multiple times. */
+ if (idx < internal_arg_pointer_exp_state.cache.length ()
+ && (internal_arg_pointer_exp_state.cache[idx]
+ != NULL_RTX))
+ val = pc_rtx;
+ else
+ val = internal_arg_pointer_based_exp (SET_SRC (set), false);
+ if (val != NULL_RTX)
+ {
+ if (idx >= internal_arg_pointer_exp_state.cache.length ())
+ internal_arg_pointer_exp_state.cache
+ .safe_grow_cleared (idx + 1);
+ internal_arg_pointer_exp_state.cache[idx] = val;
+ }
+ }
+ if (NEXT_INSN (insn) == NULL_RTX)
+ scan_start = insn;
+ insn = NEXT_INSN (insn);
+ }
+
+ internal_arg_pointer_exp_state.scan_start = scan_start;
+}
+
+/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
+ NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
+ it with fixed offset, or PC if this is with variable or unknown offset.
+ TOPLEVEL is true if the function is invoked at the topmost level. */
+
+static rtx
+internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
+{
+ if (CONSTANT_P (rtl))
+ return NULL_RTX;
+
+ if (rtl == crtl->args.internal_arg_pointer)
+ return const0_rtx;
+
+ if (REG_P (rtl) && HARD_REGISTER_P (rtl))
+ return NULL_RTX;
+
+ if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
+ {
+ rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
+ if (val == NULL_RTX || val == pc_rtx)
+ return val;
+ return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
+ }
+
+ /* When called at the topmost level, scan pseudo assignments in between the
+ last scanned instruction in the tail call sequence and the latest insn
+ in that sequence. */
+ if (toplevel)
+ internal_arg_pointer_based_exp_scan ();
+
+ if (REG_P (rtl))
+ {
+ unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
+ if (idx < internal_arg_pointer_exp_state.cache.length ())
+ return internal_arg_pointer_exp_state.cache[idx];
+
+ return NULL_RTX;
+ }
+
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
+ {
+ const_rtx x = *iter;
+ if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
+ return pc_rtx;
+ if (MEM_P (x))
+ iter.skip_subrtxes ();
+ }
+
+ return NULL_RTX;
+}
+
/* Return true if and only if SIZE storage units (usually bytes)
starting from address ADDR overlap with already clobbered argument
area. This function is used to determine if we should give up a
mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
{
HOST_WIDE_INT i;
+ rtx val;
- if (addr == crtl->args.internal_arg_pointer)
- i = 0;
- else if (GET_CODE (addr) == PLUS
- && XEXP (addr, 0) == crtl->args.internal_arg_pointer
- && CONST_INT_P (XEXP (addr, 1)))
- i = INTVAL (XEXP (addr, 1));
- /* Return true for arg pointer based indexed addressing. */
- else if (GET_CODE (addr) == PLUS
- && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
- || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
+ if (bitmap_empty_p (stored_args_map))
+ return false;
+ val = internal_arg_pointer_based_exp (addr, true);
+ if (val == NULL_RTX)
+ return false;
+ else if (val == pc_rtx)
return true;
else
- return false;
+ i = INTVAL (val);
+#ifdef STACK_GROWS_DOWNWARD
+ i -= crtl->args.pretend_args_size;
+#else
+ i += crtl->args.pretend_args_size;
+#endif
#ifdef ARGS_GROW_DOWNWARD
i = -i - size;
unsigned HOST_WIDE_INT k;
for (k = 0; k < size; k++)
- if (i + k < stored_args_map->n_bits
- && TEST_BIT (stored_args_map, i + k))
+ if (i + k < SBITMAP_SIZE (stored_args_map)
+ && bitmap_bit_p (stored_args_map, i + k))
return true;
}
int partial = args[i].partial;
int nregs;
int size = 0;
- rtx before_arg = get_last_insn ();
+ rtx_insn *before_arg = get_last_insn ();
/* Set non-negative if we must move a word at a time, even if
just one word (e.g, partial == 4 && mode == DFmode). Set
to -1 if we just use a normal move insn. This value can be
call only uses SIZE bytes at the msb end, but it doesn't
seem worth generating rtl to say that. */
reg = gen_rtx_REG (word_mode, REGNO (reg));
- x = expand_shift (LSHIFT_EXPR, word_mode, reg,
- build_int_cst (NULL_TREE, shift),
- reg, 1);
+ x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
if (x != reg)
emit_move_insn (reg, x);
}
else if (partial == 0 || args[i].pass_on_stack)
{
- rtx mem = validize_mem (args[i].value);
+ rtx mem = validize_mem (copy_rtx (args[i].value));
/* Check for overlap with already clobbered argument area,
providing that this has non-zero size. */
: LSHIFT_EXPR;
emit_move_insn (x, tem);
- x = expand_shift (dir, word_mode, x,
- build_int_cst (NULL_TREE, shift),
- ri, 1);
+ x = expand_shift (dir, word_mode, x, shift, ri, 1);
if (x != ri)
emit_move_insn (ri, x);
}
if (GET_CODE (reg) == PARALLEL)
use_group_regs (call_fusage, reg);
else if (nregs == -1)
- use_reg (call_fusage, reg);
+ use_reg_mode (call_fusage, reg,
+ TYPE_MODE (TREE_TYPE (args[i].tree_value)));
else if (nregs > 0)
use_regs (call_fusage, REGNO (reg), nregs);
}
code = GET_CODE (x);
+ /* We need not check the operands of the CALL expression itself. */
+ if (code == CALL)
+ return 0;
+
if (code == MEM)
return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
GET_MODE_SIZE (GET_MODE (x)));
slots, zero otherwise. */
static int
-check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
+check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
+ int mark_stored_args_map)
{
int low, high;
#endif
for (high = low + arg->locate.size.constant; low < high; low++)
- SET_BIT (stored_args_map, low);
+ bitmap_set_bit (stored_args_map, low);
}
return insn != NULL_RTX;
}
as specified by LEFT_P. Return true if some action was needed. */
bool
-shift_return_value (enum machine_mode mode, bool left_p, rtx value)
+shift_return_value (machine_mode mode, bool left_p, rtx value)
{
HOST_WIDE_INT shift;
if (REG_P (x)
&& HARD_REGISTER_P (x)
- && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
+ && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
{
/* Make sure that we generate a REG rather than a CONCAT.
Moves into CONCATs can need nontrivial instructions,
/* RTX for the function to be called. */
rtx funexp;
/* Sequence of insns to perform a normal "call". */
- rtx normal_call_insns = NULL_RTX;
+ rtx_insn *normal_call_insns = NULL;
/* Sequence of insns to perform a tail "call". */
- rtx tail_call_insns = NULL_RTX;
+ rtx_insn *tail_call_insns = NULL;
/* Data type of the function. */
tree funtype;
tree type_arg_types;
/* Size of arguments before any adjustments (such as rounding). */
int unadjusted_args_size;
/* Data on reg parms scanned so far. */
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
/* Nonzero if a reg parm has been scanned. */
int reg_parm_seen;
/* Nonzero if this is an indirect function call. */
(on machines that lack push insns), or 0 if space not preallocated. */
rtx argblock = 0;
- /* Mask of ECF_ flags. */
+ /* Mask of ECF_ and ERF_ flags. */
int flags = 0;
+ int return_flags = 0;
#ifdef REG_PARM_STACK_SPACE
/* Define the boundary of the register parm stack space that needs to be
saved, if any. */
{
fntype = TREE_TYPE (fndecl);
flags |= flags_from_decl_or_type (fndecl);
+ return_flags |= decl_return_flags (fndecl);
}
else
{
{
struct_value_size = int_size_in_bytes (rettype);
- if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
+ /* Even if it is semantically safe to use the target as the return
+ slot, it may be not sufficiently aligned for the return type. */
+ if (CALL_EXPR_RETURN_SLOT_OPT (exp)
+ && target
+ && MEM_P (target)
+ && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
+ && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype),
+ MEM_ALIGN (target))))
structure_value_addr = XEXP (target, 0);
else
{
/* For variable-sized objects, we must be called with a target
specified. If we were to allocate space on the stack here,
we would have no way of knowing when to free it. */
- rtx d = assign_temp (rettype, 0, 1, 1);
-
- mark_temp_addr_taken (d);
+ rtx d = assign_temp (rettype, 1, 1);
structure_value_addr = XEXP (d, 0);
target = 0;
}
preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
if (fndecl)
{
- struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
+ struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
/* Without automatic stack alignment, we can't increase preferred
stack boundary. With automatic stack alignment, it is
unnecessary since unless we can guarantee that all callers will
calling convention than normal calls. The fourth argument in
INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
or not. */
- INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
+ args_so_far = pack_cumulative_args (&args_so_far_v);
/* Now possibly adjust the number of named args.
Normally, don't include the last named arg if anonymous args follow.
registers, so we must force them into memory. */
if (type_arg_types != 0
- && targetm.calls.strict_argument_naming (&args_so_far))
+ && targetm.calls.strict_argument_naming (args_so_far))
;
else if (type_arg_types != 0
- && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
+ && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
/* Don't include the last named arg. */
--n_named_args;
else
initialize_argument_information (num_actuals, args, &args_size,
n_named_args, exp,
structure_value_addr_value, fndecl, fntype,
- &args_so_far, reg_parm_stack_space,
+ args_so_far, reg_parm_stack_space,
&old_stack_level, &old_pending_adj,
&must_preallocate, &flags,
&try_tail_call, CALL_FROM_THUNK_P (exp));
/* If outgoing reg parm stack space changes, we can not do sibcall. */
|| (OUTGOING_REG_PARM_STACK_SPACE (funtype)
!= OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
- || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
+ || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl))
#endif
/* Check whether the target is able to optimize the call
into a sibcall. */
return value. */
if (try_tail_call)
{
- enum machine_mode caller_mode, caller_promoted_mode;
- enum machine_mode callee_mode, callee_promoted_mode;
+ machine_mode caller_mode, caller_promoted_mode;
+ machine_mode callee_mode, callee_promoted_mode;
int caller_unsignedp, callee_unsignedp;
tree caller_res = DECL_RESULT (current_function_decl);
recursion "call". That way we know any adjustment after the tail
recursion call can be ignored if we indeed use the tail
call expansion. */
- int save_pending_stack_adjust = 0;
- int save_stack_pointer_delta = 0;
- rtx insns;
- rtx before_call, next_arg_reg, after_args;
+ saved_pending_stack_adjust save;
+ rtx_insn *insns, *before_call, *after_args;
+ rtx next_arg_reg;
if (pass == 0)
{
/* State variables we need to save and restore between
iterations. */
- save_pending_stack_adjust = pending_stack_adjust;
- save_stack_pointer_delta = stack_pointer_delta;
+ save_pending_stack_adjust (&save);
}
if (pass)
flags &= ~ECF_SIBCALL;
argblock = crtl->args.internal_arg_pointer;
argblock
#ifdef STACK_GROWS_DOWNWARD
- = plus_constant (argblock, crtl->args.pretend_args_size);
+ = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
#else
- = plus_constant (argblock, -crtl->args.pretend_args_size);
+ = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
#endif
stored_args_map = sbitmap_alloc (args_size.constant);
- sbitmap_zero (stored_args_map);
+ bitmap_clear (stored_args_map);
}
/* If we have no actual push instructions, or shouldn't use them,
{
if (old_stack_level == 0)
{
- emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+ emit_stack_save (SAVE_BLOCK, &old_stack_level);
old_stack_pointer_delta = stack_pointer_delta;
old_pending_adj = pending_stack_adjust;
pending_stack_adjust = 0;
stack_arg_under_construction = 0;
}
argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
+ if (flag_stack_usage_info)
+ current_function_has_unbounded_dynamic_stack_size = 1;
}
else
{
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed);
#endif
- if (stack_usage_map_buf)
- free (stack_usage_map_buf);
+ free (stack_usage_map_buf);
stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
{
argblock = push_block (GEN_INT (needed), 0, 0);
#ifdef ARGS_GROW_DOWNWARD
- argblock = plus_constant (argblock, needed);
+ argblock = plus_constant (Pmode, argblock, needed);
#endif
}
: reg_parm_stack_space));
if (old_stack_level == 0)
{
- emit_stack_save (SAVE_BLOCK, &old_stack_level,
- NULL_RTX);
+ emit_stack_save (SAVE_BLOCK, &old_stack_level);
old_stack_pointer_delta = stack_pointer_delta;
old_pending_adj = pending_stack_adjust;
pending_stack_adjust = 0;
= stack_arg_under_construction;
stack_arg_under_construction = 0;
/* Make a new map for the new argument list. */
- if (stack_usage_map_buf)
- free (stack_usage_map_buf);
+ free (stack_usage_map_buf);
stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
highest_outgoing_arg_in_use = 0;
}
- allocate_dynamic_stack_space (push_size, NULL_RTX,
- BITS_PER_UNIT);
+ /* We can pass TRUE as the 4th argument because we just
+ saved the stack pointer and will restore it right after
+ the call. */
+ allocate_dynamic_stack_space (push_size, 0,
+ BIGGEST_ALIGNMENT, true);
}
/* If argument evaluation might modify the stack pointer,
compute_argument_addresses (args, argblock, num_actuals);
- /* If we push args individually in reverse order, perform stack alignment
- before the first push (the last arg). */
- if (PUSH_ARGS_REVERSED && argblock == 0
+ /* Perform stack alignment before the first push (the last arg). */
+ if (argblock == 0
+ && adjusted_args_size.constant > reg_parm_stack_space
&& adjusted_args_size.constant != unadjusted_args_size)
{
/* When the stack adjustment is pending, we get better code
be deferred during the evaluation of the arguments. */
NO_DEFER_POP;
+ /* Record the maximum pushed stack space size. We need to delay
+ doing it this far to take into account the optimization done
+ by combine_pending_stack_adjustment_and_call. */
+ if (flag_stack_usage_info
+ && !ACCUMULATE_OUTGOING_ARGS
+ && pass
+ && adjusted_args_size.var == 0)
+ {
+ int pushed = adjusted_args_size.constant + pending_stack_adjust;
+ if (pushed > current_function_pushed_stack_size)
+ current_function_pushed_stack_size = pushed;
+ }
+
funexp = rtx_for_function_call (fndecl, addr);
/* Figure out the register where the value, if any, will come back. */
{
if (args[i].reg == 0 || args[i].pass_on_stack)
{
- rtx before_arg = get_last_insn ();
+ rtx_insn *before_arg = get_last_insn ();
+
+ /* We don't allow passing huge (> 2^30 B) arguments
+ by value. It would cause an overflow later on. */
+ if (adjusted_args_size.constant
+ >= (1 << (HOST_BITS_PER_INT - 2)))
+ {
+ sorry ("passing too large argument on stack");
+ continue;
+ }
if (store_one_arg (&args[i], argblock, flags,
adjusted_args_size.var != 0,
sibcall_failure = 1;
}
- if (((flags & ECF_CONST)
- || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
- && args[i].stack)
- call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
- gen_rtx_USE (VOIDmode,
- args[i].stack),
- call_fusage);
+ if (args[i].stack)
+ call_fusage
+ = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
+ gen_rtx_USE (VOIDmode, args[i].stack),
+ call_fusage);
}
/* If we have a parm that is passed in registers but not in memory
for (i = 0; i < num_actuals; i++)
if (args[i].partial != 0 && ! args[i].pass_on_stack)
{
- rtx before_arg = get_last_insn ();
+ rtx_insn *before_arg = get_last_insn ();
if (store_one_arg (&args[i], argblock, flags,
adjusted_args_size.var != 0,
sibcall_failure = 1;
}
- /* If we pushed args in forward order, perform stack alignment
- after pushing the last arg. */
- if (!PUSH_ARGS_REVERSED && argblock == 0)
- anti_adjust_stack (GEN_INT (adjusted_args_size.constant
- - unadjusted_args_size));
-
/* If register arguments require space on the stack and stack space
was not preallocated, allocate stack space here for arguments
passed in registers. */
/* Set up next argument register. For sibling calls on machines
with register windows this should be the incoming register. */
if (pass == 0)
- next_arg_reg = targetm.calls.function_incoming_arg (&args_so_far,
+ next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
VOIDmode,
void_type_node,
true);
else
- next_arg_reg = targetm.calls.function_arg (&args_so_far,
+ next_arg_reg = targetm.calls.function_arg (args_so_far,
VOIDmode, void_type_node,
true);
+ if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
+ {
+ int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
+ arg_nr = num_actuals - arg_nr - 1;
+ if (arg_nr >= 0
+ && arg_nr < num_actuals
+ && args[arg_nr].reg
+ && valreg
+ && REG_P (valreg)
+ && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
+ call_fusage
+ = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
+ gen_rtx_SET (VOIDmode, valreg, args[arg_nr].reg),
+ call_fusage);
+ }
/* All arguments and registers used for the call must be set up by
now! */
emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
adjusted_args_size.constant, struct_value_size,
next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
- flags, & args_so_far);
+ flags, args_so_far);
+
+ if (flag_use_caller_save)
+ {
+ rtx_call_insn *last;
+ rtx datum = NULL_RTX;
+ if (fndecl != NULL_TREE)
+ {
+ datum = XEXP (DECL_RTL (fndecl), 0);
+ gcc_assert (datum != NULL_RTX
+ && GET_CODE (datum) == SYMBOL_REF);
+ }
+ last = last_call_insn ();
+ add_reg_note (last, REG_CALL_DECL, datum);
+ }
/* If the call setup or the call itself overlaps with anything
of the argument setup we probably clobbered our call address.
group load/store machinery below. */
if (!structure_value_addr
&& !pcc_struct_value
+ && TYPE_MODE (rettype) != VOIDmode
&& TYPE_MODE (rettype) != BLKmode
+ && REG_P (valreg)
&& targetm.calls.return_in_msb (rettype))
{
if (shift_return_value (TYPE_MODE (rettype), false, valreg))
if (pass && (flags & ECF_MALLOC))
{
rtx temp = gen_reg_rtx (GET_MODE (valreg));
- rtx last, insns;
+ rtx_insn *last, *insns;
/* The return value from a malloc-like function is a pointer. */
if (TREE_CODE (rettype) == POINTER_TYPE)
- mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
+ mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
emit_move_insn (temp, valreg);
immediately after the CALL_INSN. Some ports emit more
than just a CALL_INSN above, so we must search for it here. */
- rtx last = get_last_insn ();
+ rtx_insn *last = get_last_insn ();
while (!CALL_P (last))
{
last = PREV_INSN (last);
else if (GET_CODE (valreg) == PARALLEL)
{
if (target == 0)
- {
- /* This will only be assigned once, so it can be readonly. */
- tree nt = build_qualified_type (rettype,
- (TYPE_QUALS (rettype)
- | TYPE_QUAL_CONST));
-
- target = assign_temp (nt, 0, 1, 1);
- }
-
- if (! rtx_equal_p (target, valreg))
+ target = emit_group_move_into_temps (valreg);
+ else if (rtx_equal_p (target, valreg))
+ ;
+ else if (GET_CODE (target) == PARALLEL)
+ /* Handle the result of a emit_group_move_into_temps
+ call in the previous pass. */
+ emit_group_move (target, valreg);
+ else
emit_group_store (target, valreg, rettype,
int_size_in_bytes (rettype));
-
- /* We can not support sibling calls for this case. */
- sibcall_failure = 1;
}
else if (target
&& GET_MODE (target) == TYPE_MODE (rettype)
sibcall_failure = 1;
}
}
- else if (TYPE_MODE (rettype) == BLKmode)
- {
- rtx val = valreg;
- if (GET_MODE (val) != BLKmode)
- val = avoid_likely_spilled_reg (val);
- target = copy_blkmode_from_reg (target, val, rettype);
-
- /* We can not support sibling calls for this case. */
- sibcall_failure = 1;
- }
else
target = copy_to_reg (avoid_likely_spilled_reg (valreg));
tree type = rettype;
int unsignedp = TYPE_UNSIGNED (type);
int offset = 0;
- enum machine_mode pmode;
+ machine_mode pmode;
/* Ensure we promote as expected, and get the new unsignedness. */
pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
SUBREG_PROMOTED_VAR_P (target) = 1;
- SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
+ SUBREG_PROMOTED_SET (target, unsignedp);
}
/* If size of args is variable or this was a constructor call for a stack
if (old_stack_level)
{
- emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ rtx_insn *prev = get_last_insn ();
+
+ emit_stack_restore (SAVE_BLOCK, old_stack_level);
stack_pointer_delta = old_stack_pointer_delta;
+
+ fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
+
pending_stack_adjust = old_pending_adj;
old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
stack_arg_under_construction = old_stack_arg_under_construction;
for (i = 0; i < num_actuals; i++)
if (args[i].save_area)
{
- enum machine_mode save_mode = GET_MODE (args[i].save_area);
+ machine_mode save_mode = GET_MODE (args[i].save_area);
rtx stack_area
= gen_rtx_MEM (save_mode,
memory_address (save_mode,
/* Free up storage we no longer need. */
for (i = 0; i < num_actuals; ++i)
- if (args[i].aligned_regs)
- free (args[i].aligned_regs);
+ free (args[i].aligned_regs);
insns = get_insns ();
end_sequence ();
/* Restore the pending stack adjustment now that we have
finished generating the sibling call sequence. */
- pending_stack_adjust = save_pending_stack_adjust;
- stack_pointer_delta = save_stack_pointer_delta;
+ restore_pending_stack_adjust (&save);
/* Prepare arg structure for next iteration. */
for (i = 0; i < num_actuals; i++)
}
sbitmap_free (stored_args_map);
+ internal_arg_pointer_exp_state.scan_start = NULL;
+ internal_arg_pointer_exp_state.cache.release ();
}
else
{
/* If something prevents making this a sibling call,
zero out the sequence. */
if (sibcall_failure)
- tail_call_insns = NULL_RTX;
+ tail_call_insns = NULL;
else
break;
}
currently_expanding_call--;
- if (stack_usage_map_buf)
- free (stack_usage_map_buf);
+ free (stack_usage_map_buf);
return target;
}
void
fixup_tail_calls (void)
{
- rtx insn;
+ rtx_insn *insn;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
static rtx
emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, va_list p)
+ machine_mode outmode, int nargs, va_list p)
{
/* Total size in bytes of all the stack-parms scanned so far. */
struct args_size args_size;
isn't present here, so we default to native calling abi here. */
tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
- int inc;
int count;
rtx argblock = 0;
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
struct arg
{
rtx value;
- enum machine_mode mode;
+ machine_mode mode;
rtx reg;
int partial;
struct locate_and_pad_arg_data locate;
int flags;
int reg_parm_stack_space = 0;
int needed;
- rtx before_call;
+ rtx_insn *before_call;
tree tfom; /* type_for_mode (outmode, 0) */
#ifdef REG_PARM_STACK_SPACE
if (value != 0 && MEM_P (value))
mem_value = value;
else
- mem_value = assign_temp (tfom, 0, 1, 1);
+ mem_value = assign_temp (tfom, 1, 1);
#endif
/* This call returns a big structure. */
flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
- INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
+ INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
#else
- INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
#endif
+ args_so_far = pack_cumulative_args (&args_so_far_v);
args_size.constant = 0;
args_size.var = 0;
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (addr) && !MEM_P (addr)
- && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
+ && !(CONSTANT_P (addr)
+ && targetm.legitimate_constant_p (Pmode, addr)))
addr = force_operand (addr, NULL_RTX);
argvec[count].value = addr;
argvec[count].mode = Pmode;
argvec[count].partial = 0;
- argvec[count].reg = targetm.calls.function_arg (&args_so_far,
+ argvec[count].reg = targetm.calls.function_arg (args_so_far,
Pmode, NULL_TREE, true);
- gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
+ gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
NULL_TREE, 1) == 0);
locate_and_pad_parm (Pmode, NULL_TREE,
#else
argvec[count].reg != 0,
#endif
- 0, NULL_TREE, &args_size, &argvec[count].locate);
+ reg_parm_stack_space, 0,
+ NULL_TREE, &args_size, &argvec[count].locate);
if (argvec[count].reg == 0 || argvec[count].partial != 0
|| reg_parm_stack_space > 0)
args_size.constant += argvec[count].locate.size.constant;
- targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
count++;
}
for (; count < nargs; count++)
{
rtx val = va_arg (p, rtx);
- enum machine_mode mode = (enum machine_mode) va_arg (p, int);
+ machine_mode mode = (machine_mode) va_arg (p, int);
+ int unsigned_p = 0;
/* We cannot convert the arg value to the mode the library wants here;
must do it earlier where we know the signedness of the arg. */
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
- && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
+ && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
val = force_operand (val, NULL_RTX);
- if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
+ if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
{
rtx slot;
int must_copy
- = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
+ = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
/* If this was a CONST function, it is now PURE since it now
reads memory. */
}
if (MEM_P (val) && !must_copy)
- slot = val;
+ {
+ tree val_expr = MEM_EXPR (val);
+ if (val_expr)
+ mark_addressable (val_expr);
+ slot = val;
+ }
else
{
slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
- 0, 1, 1);
+ 1, 1);
emit_move_insn (slot, val);
}
val = force_operand (XEXP (slot, 0), NULL_RTX);
}
- argvec[count].value = val;
+ mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
argvec[count].mode = mode;
-
- argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode,
+ argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
NULL_TREE, true);
argvec[count].partial
- = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
+ = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
- locate_and_pad_parm (mode, NULL_TREE,
+ if (argvec[count].reg == 0
+ || argvec[count].partial != 0
+ || reg_parm_stack_space > 0)
+ {
+ locate_and_pad_parm (mode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
- 1,
+ 1,
#else
- argvec[count].reg != 0,
+ argvec[count].reg != 0,
+#endif
+ reg_parm_stack_space, argvec[count].partial,
+ NULL_TREE, &args_size, &argvec[count].locate);
+ args_size.constant += argvec[count].locate.size.constant;
+ gcc_assert (!argvec[count].locate.size.var);
+ }
+#ifdef BLOCK_REG_PADDING
+ else
+ /* The argument is passed entirely in registers. See at which
+ end it should be padded. */
+ argvec[count].locate.where_pad =
+ BLOCK_REG_PADDING (mode, NULL_TREE,
+ GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
#endif
- argvec[count].partial,
- NULL_TREE, &args_size, &argvec[count].locate);
-
- gcc_assert (!argvec[count].locate.size.var);
-
- if (argvec[count].reg == 0 || argvec[count].partial != 0
- || reg_parm_stack_space > 0)
- args_size.constant += argvec[count].locate.size.constant;
- targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
}
/* If this machine requires an external definition for library
if (args_size.constant > crtl->outgoing_args_size)
crtl->outgoing_args_size = args_size.constant;
+ if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
+ {
+ int pushed = args_size.constant + pending_stack_adjust;
+ if (pushed > current_function_pushed_stack_size)
+ current_function_pushed_stack_size = pushed;
+ }
+
if (ACCUMULATE_OUTGOING_ARGS)
{
/* Since the stack pointer will never be pushed, it is possible for
use virtuals anyway, they won't match the rtl patterns. */
if (virtuals_instantiated)
- argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
+ argblock = plus_constant (Pmode, stack_pointer_rtx,
+ STACK_POINTER_OFFSET);
else
argblock = virtual_outgoing_args_rtx;
}
argblock = push_block (GEN_INT (args_size.constant), 0, 0);
}
- /* If we push args individually in reverse order, perform stack alignment
+ /* We push args individually in reverse order, perform stack alignment
before the first push (the last arg). */
- if (argblock == 0 && PUSH_ARGS_REVERSED)
+ if (argblock == 0)
anti_adjust_stack (GEN_INT (args_size.constant
- original_args_size.constant));
- if (PUSH_ARGS_REVERSED)
- {
- inc = -1;
- argnum = nargs - 1;
- }
- else
- {
- inc = 1;
- argnum = 0;
- }
+ argnum = nargs - 1;
#ifdef REG_PARM_STACK_SPACE
if (ACCUMULATE_OUTGOING_ARGS)
/* ARGNUM indexes the ARGVEC array in the order in which the arguments
are to be pushed. */
- for (count = 0; count < nargs; count++, argnum += inc)
+ for (count = 0; count < nargs; count++, argnum--)
{
- enum machine_mode mode = argvec[argnum].mode;
+ machine_mode mode = argvec[argnum].mode;
rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
if (! (reg != 0 && partial == 0))
{
+ rtx use;
+
if (ACCUMULATE_OUTGOING_ARGS)
{
/* If this is being stored into a pre-allocated, fixed-size,
/* We need to make a save area. */
unsigned int size
= argvec[argnum].locate.size.constant * BITS_PER_UNIT;
- enum machine_mode save_mode
+ machine_mode save_mode
= mode_for_size (size, MODE_INT, 1);
rtx adr
- = plus_constant (argblock,
+ = plus_constant (Pmode, argblock,
argvec[argnum].locate.offset.constant);
rtx stack_area
= gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
{
argvec[argnum].save_area
= assign_stack_temp (BLKmode,
- argvec[argnum].locate.size.constant,
- 0);
+ argvec[argnum].locate.size.constant
+ );
- emit_block_move (validize_mem (argvec[argnum].save_area),
+ emit_block_move (validize_mem
+ (copy_rtx (argvec[argnum].save_area)),
stack_area,
GEN_INT (argvec[argnum].locate.size.constant),
BLOCK_OP_CALL_PARM);
NO_DEFER_POP;
- if ((flags & ECF_CONST)
- || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
- {
- rtx use;
-
- /* Indicate argument access so that alias.c knows that these
- values are live. */
- if (argblock)
- use = plus_constant (argblock,
- argvec[argnum].locate.offset.constant);
- else
- /* When arguments are pushed, trying to tell alias.c where
- exactly this argument is won't work, because the
- auto-increment causes confusion. So we merely indicate
- that we access something with a known mode somewhere on
- the stack. */
- use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
- gen_rtx_SCRATCH (Pmode));
- use = gen_rtx_MEM (argvec[argnum].mode, use);
- use = gen_rtx_USE (VOIDmode, use);
- call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
- }
+ /* Indicate argument access so that alias.c knows that these
+ values are live. */
+ if (argblock)
+ use = plus_constant (Pmode, argblock,
+ argvec[argnum].locate.offset.constant);
+ else
+ /* When arguments are pushed, trying to tell alias.c where
+ exactly this argument is won't work, because the
+ auto-increment causes confusion. So we merely indicate
+ that we access something with a known mode somewhere on
+ the stack. */
+ use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
+ gen_rtx_SCRATCH (Pmode));
+ use = gen_rtx_MEM (argvec[argnum].mode, use);
+ use = gen_rtx_USE (VOIDmode, use);
+ call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
}
}
- /* If we pushed args in forward order, perform stack alignment
- after pushing the last arg. */
- if (argblock == 0 && !PUSH_ARGS_REVERSED)
- anti_adjust_stack (GEN_INT (args_size.constant
- - original_args_size.constant));
-
- if (PUSH_ARGS_REVERSED)
- argnum = nargs - 1;
- else
- argnum = 0;
+ argnum = nargs - 1;
fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
/* ARGNUM indexes the ARGVEC array in the order in which the arguments
are to be pushed. */
- for (count = 0; count < nargs; count++, argnum += inc)
+ for (count = 0; count < nargs; count++, argnum--)
{
- enum machine_mode mode = argvec[argnum].mode;
+ machine_mode mode = argvec[argnum].mode;
rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
-
+#ifdef BLOCK_REG_PADDING
+ int size = 0;
+#endif
+
/* Handle calls that pass values in multiple non-contiguous
locations. The PA64 has examples of this for library calls. */
if (reg != 0 && GET_CODE (reg) == PARALLEL)
emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
else if (reg != 0 && partial == 0)
- emit_move_insn (reg, val);
+ {
+ emit_move_insn (reg, val);
+#ifdef BLOCK_REG_PADDING
+ size = GET_MODE_SIZE (argvec[argnum].mode);
+
+ /* Copied from load_register_parameters. */
+
+ /* Handle case where we have a value that needs shifting
+ up to the msb. eg. a QImode value and we're padding
+ upward on a BYTES_BIG_ENDIAN machine. */
+ if (size < UNITS_PER_WORD
+ && (argvec[argnum].locate.where_pad
+ == (BYTES_BIG_ENDIAN ? upward : downward)))
+ {
+ rtx x;
+ int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
+
+ /* Assigning REG here rather than a temp makes CALL_FUSAGE
+ report the whole reg as used. Strictly speaking, the
+ call only uses SIZE bytes at the msb end, but it doesn't
+ seem worth generating rtl to say that. */
+ reg = gen_rtx_REG (word_mode, REGNO (reg));
+ x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
+ if (x != reg)
+ emit_move_insn (reg, x);
+ }
+#endif
+ }
NO_DEFER_POP;
}
build_function_type (tfom, NULL_TREE),
original_args_size.constant, args_size.constant,
struct_value_size,
- targetm.calls.function_arg (&args_so_far,
+ targetm.calls.function_arg (args_so_far,
VOIDmode, void_type_node, true),
valreg,
- old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
+ old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
+
+ if (flag_use_caller_save)
+ {
+ rtx last, datum = orgfun;
+ gcc_assert (GET_CODE (datum) == SYMBOL_REF);
+ last = last_call_insn ();
+ add_reg_note (last, REG_CALL_DECL, datum);
+ }
+
+ /* Right-shift returned value if necessary. */
+ if (!pcc_struct_value
+ && TYPE_MODE (tfom) != BLKmode
+ && targetm.calls.return_in_msb (tfom))
+ {
+ shift_return_value (TYPE_MODE (tfom), false, valreg);
+ valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
+ }
/* For calls to `setjmp', etc., inform function.c:setjmp_warnings
that it should complain if nonvolatile values are live. For
functions that cannot return, inform flow that control does not
fall through. */
-
if (flags & ECF_NORETURN)
{
/* The barrier note must be emitted
immediately after the CALL_INSN. Some ports emit more than
just a CALL_INSN above, so we must search for it here. */
-
- rtx last = get_last_insn ();
+ rtx_insn *last = get_last_insn ();
while (!CALL_P (last))
{
last = PREV_INSN (last);
emit_barrier_after (last);
}
+ /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
+ and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
+ if (flags & ECF_NOTHROW)
+ {
+ rtx_insn *last = get_last_insn ();
+ while (!CALL_P (last))
+ {
+ last = PREV_INSN (last);
+ /* There was no CALL_INSN? */
+ gcc_assert (last != before_call);
+ }
+
+ make_reg_eh_region_note_nothrow_nononlocal (last);
+ }
+
/* Now restore inhibit_defer_pop to its actual original value. */
OK_DEFER_POP;
for (count = 0; count < nargs; count++)
if (argvec[count].save_area)
{
- enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
- rtx adr = plus_constant (argblock,
+ machine_mode save_mode = GET_MODE (argvec[count].save_area);
+ rtx adr = plus_constant (Pmode, argblock,
argvec[count].locate.offset.constant);
rtx stack_area = gen_rtx_MEM (save_mode,
memory_address (save_mode, adr));
if (save_mode == BLKmode)
emit_block_move (stack_area,
- validize_mem (argvec[count].save_area),
+ validize_mem
+ (copy_rtx (argvec[count].save_area)),
GEN_INT (argvec[count].locate.size.constant),
BLOCK_OP_CALL_PARM);
else
stack_usage_map = initial_stack_usage_map;
}
- if (stack_usage_map_buf)
- free (stack_usage_map_buf);
+ free (stack_usage_map_buf);
return value;
void
emit_library_call (rtx orgfun, enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...)
+ machine_mode outmode, int nargs, ...)
{
va_list p;
rtx
emit_library_call_value (rtx orgfun, rtx value,
enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...)
+ machine_mode outmode, int nargs, ...)
{
rtx result;
va_list p;
{
/* We need to make a save area. */
unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
- enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
+ machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
rtx stack_area = gen_rtx_MEM (save_mode, adr);
if (save_mode == BLKmode)
{
- tree ot = TREE_TYPE (arg->tree_value);
- tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
- | TYPE_QUAL_CONST));
-
- arg->save_area = assign_temp (nt, 0, 1, 1);
+ arg->save_area
+ = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
preserve_temp_slots (arg->save_area);
- emit_block_move (validize_mem (arg->save_area), stack_area,
+ emit_block_move (validize_mem (copy_rtx (arg->save_area)),
+ stack_area,
GEN_INT (arg->locate.size.constant),
BLOCK_OP_CALL_PARM);
}
be deferred during the rest of the arguments. */
NO_DEFER_POP;
- /* Free any temporary slots made in processing this argument. Show
- that we might have taken the address of something and pushed that
- as an operand. */
- preserve_temp_slots (NULL_RTX);
- free_temp_slots ();
+ /* Free any temporary slots made in processing this argument. */
pop_temp_slots ();
return sibcall_failure;
/* Nonzero if we do not know how to pass TYPE solely in registers. */
bool
-must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
+must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type)
{
if (!type)
/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
bool
-must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
+must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
{
if (!type)
return false;