]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
re PR c++/27359 (ICE with missing initialization of iteration variable in parallel...
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
ef7befe0
BE
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
bbf6f052 5
1322177d 6This file is part of GCC.
bbf6f052 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
bbf6f052 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
bbf6f052
RK
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
bbf6f052 22
bbf6f052 23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
ca695ac9 27#include "machmode.h"
11ad4784 28#include "real.h"
bbf6f052
RK
29#include "rtl.h"
30#include "tree.h"
31#include "flags.h"
bf76bb5a 32#include "regs.h"
4ed67205 33#include "hard-reg-set.h"
3d195391 34#include "except.h"
bbf6f052 35#include "function.h"
bbf6f052 36#include "insn-config.h"
34e81b5a 37#include "insn-attr.h"
3a94c984 38/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 39#include "expr.h"
e78d8e51
ZW
40#include "optabs.h"
41#include "libfuncs.h"
bbf6f052 42#include "recog.h"
3ef1eef4 43#include "reload.h"
bbf6f052 44#include "output.h"
bbf6f052 45#include "typeclass.h"
10f0ad3d 46#include "toplev.h"
d7db6646 47#include "ggc.h"
ac79cd5a 48#include "langhooks.h"
e2c49ac2 49#include "intl.h"
b1474bb7 50#include "tm_p.h"
6de9cd9a 51#include "tree-iterator.h"
2f8e398b
PB
52#include "tree-pass.h"
53#include "tree-flow.h"
c988af2b 54#include "target.h"
2f8e398b 55#include "timevar.h"
bbf6f052 56
bbf6f052 57/* Decide whether a function's arguments should be processed
bbc8a071
RK
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
bbf6f052 62
bbf6f052 63#ifdef PUSH_ROUNDING
bbc8a071 64
2da4124d 65#ifndef PUSH_ARGS_REVERSED
3319a347 66#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 67#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 68#endif
2da4124d 69#endif
bbc8a071 70
bbf6f052
RK
71#endif
72
73#ifndef STACK_PUSH_CODE
74#ifdef STACK_GROWS_DOWNWARD
75#define STACK_PUSH_CODE PRE_DEC
76#else
77#define STACK_PUSH_CODE PRE_INC
78#endif
79#endif
80
4ca79136 81
bbf6f052
RK
82/* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88int cse_not_expected;
89
4969d05d
RK
90/* This structure is used by move_by_pieces to describe the move to
91 be performed. */
4969d05d
RK
92struct move_by_pieces
93{
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
3bdf5ad1
RK
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
4969d05d
RK
104 int reverse;
105};
106
57814e5e 107/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
108 be performed. */
109
57814e5e 110struct store_by_pieces
9de08200
RK
111{
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
3bdf5ad1
RK
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
502b8322 118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 119 void *constfundata;
9de08200
RK
120 int reverse;
121};
122
502b8322 123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
45d78e7f 124 unsigned int,
502b8322
AJ
125 unsigned int);
126static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128static bool block_move_libcall_safe_for_call_parm (void);
70128ad9 129static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
8148fe65 130static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
502b8322
AJ
131static tree emit_block_move_libcall_fn (int);
132static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
8148fe65 138static rtx clear_storage_via_libcall (rtx, rtx, bool);
502b8322
AJ
139static tree clear_storage_libcall_fn (int);
140static rtx compress_float_constant (rtx, rtx);
141static rtx get_subtarget (rtx);
502b8322
AJ
142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
f45bdcd0 147 tree, tree, int);
502b8322 148
d50a16c4 149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
150
151static int is_aligning_offset (tree, tree);
eb698c58
RS
152static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
bc15d0ef 154static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
502b8322 155static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 156#ifdef PUSH_ROUNDING
502b8322 157static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 158#endif
502b8322
AJ
159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160static rtx const_vector_from_tree (tree);
57aaef66 161static void write_complex_part (rtx, rtx, bool);
bbf6f052 162
4fa52007
RK
163/* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167static char direct_load[NUM_MACHINE_MODES];
168static char direct_store[NUM_MACHINE_MODES];
169
51286de6
RH
170/* Record for each mode whether we can float-extend from memory. */
171
172static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
fbe1758d 174/* This macro is used to determine whether move_by_pieces should be called
3a94c984 175 to perform a structure copy. */
fbe1758d 176#ifndef MOVE_BY_PIECES_P
19caa751 177#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
fbe1758d
AM
180#endif
181
78762e3b
RS
182/* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184#ifndef CLEAR_BY_PIECES_P
185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
78762e3b
RS
188#endif
189
4977bab6
ZW
190/* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193#ifndef STORE_BY_PIECES_P
45d78e7f
JJ
194#define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
4977bab6
ZW
197#endif
198
266007a7 199/* This array records the insn_code of insns to perform block moves. */
70128ad9 200enum insn_code movmem_optab[NUM_MACHINE_MODES];
266007a7 201
57e84f18
AS
202/* This array records the insn_code of insns to perform block sets. */
203enum insn_code setmem_optab[NUM_MACHINE_MODES];
9de08200 204
40c1d5f8 205/* These arrays record the insn_code of three different kinds of insns
118355a0
ZW
206 to perform block compares. */
207enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
40c1d5f8 208enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
118355a0
ZW
209enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
48ae6c13
RH
211/* Synchronization primitives. */
212enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
cc2902df 235/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
236
237#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 238#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 239#endif
bbf6f052 240\f
4fa52007 241/* This is run once per compilation to set up which modes can be used
266007a7 242 directly in memory and to initialize the block move optab. */
4fa52007
RK
243
244void
502b8322 245init_expr_once (void)
4fa52007
RK
246{
247 rtx insn, pat;
248 enum machine_mode mode;
cff48d8f 249 int num_clobbers;
9ec36da5 250 rtx mem, mem1;
bf1660a6 251 rtx reg;
9ec36da5 252
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
9ec36da5
JL
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 258
bf1660a6
JL
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
262
1f8c3c5b
RH
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
4fa52007
RK
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
4fa52007
RK
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
e2549997 274 PUT_MODE (mem1, mode);
bf1660a6 275 PUT_MODE (reg, mode);
4fa52007 276
e6fe56a4
RK
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
7308a047
RS
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
e6fe56a4 287
bf1660a6 288 REGNO (reg) = regno;
e6fe56a4 289
7308a047
RS
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
e6fe56a4 294
e2549997
RS
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
7308a047
RS
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
e2549997
RS
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
7308a047 309 }
4fa52007
RK
310 }
311
51286de6
RH
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
316 {
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 319 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
320 {
321 enum insn_code ic;
322
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
326
327 PUT_MODE (mem, srcmode);
0fb7aeda 328
51286de6
RH
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
331 }
332 }
4fa52007 333}
cff48d8f 334
bbf6f052
RK
335/* This is run at the start of compiling a function. */
336
337void
502b8322 338init_expr (void)
bbf6f052 339{
3a70d621 340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052 341}
bbf6f052
RK
342\f
343/* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
347
348void
502b8322 349convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
350{
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
3d8bf70f
BE
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
bbf6f052
RK
355 enum insn_code code;
356 rtx libcall;
357
358 /* rtx code for making an equivalent value. */
37d0b254
SE
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052 361
bbf6f052 362
5b0264cb 363 gcc_assert (to_real == from_real);
bbf6f052 364
6de9cd9a
DN
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
1499e0a8
RK
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
5b0264cb 380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
1499e0a8 381
bbf6f052
RK
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
0b4565c9
BS
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
5b0264cb 391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
3a94c984 392
0b4565c9 393 if (VECTOR_MODE_P (to_mode))
bafe341a 394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 395 else
bafe341a 396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
397
398 emit_move_insn (to, from);
399 return;
400 }
401
06765df1
R
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
bbf6f052
RK
409 if (to_real)
410 {
642dfa8b 411 rtx value, insns;
85363ca0 412 convert_optab tab;
81d79e2c 413
15ed7b52
JG
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
5b0264cb 418
15ed7b52
JG
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 423 tab = sext_optab;
85363ca0 424 else
5b0264cb 425 tab = trunc_optab;
2b01c326 426
85363ca0 427 /* Try converting directly if the insn is supported. */
2b01c326 428
85363ca0
ZW
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
b092b471 431 {
85363ca0
ZW
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
434 return;
435 }
b092b471 436
85363ca0
ZW
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 439
5b0264cb
NS
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
bbf6f052 442
642dfa8b 443 start_sequence ();
ebb1b59a 444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 445 1, from, from_mode);
642dfa8b
BS
446 insns = get_insns ();
447 end_sequence ();
450b1728
EC
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
452 return;
453 }
454
85363ca0
ZW
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 {
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462
5b0264cb
NS
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
85363ca0
ZW
465
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 to, from, UNKNOWN);
470 return;
471 }
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 {
d2348bd5 474 rtx new_from;
85363ca0
ZW
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477
5b0264cb
NS
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
85363ca0 480
85363ca0 481 if (to_mode == full_mode)
d2348bd5
DD
482 {
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 to, from, UNKNOWN);
485 return;
486 }
487
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
85363ca0 491
a1105617 492 /* else proceed to integer conversions below. */
85363ca0 493 from_mode = full_mode;
d2348bd5 494 from = new_from;
85363ca0
ZW
495 }
496
bbf6f052
RK
497 /* Now both modes are integers. */
498
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 {
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
514 {
cd1b4b44
RK
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
bbf6f052
RK
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
523 }
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
528 {
f8cfc6aa 529 if (REG_P (to))
6a2d136b
EB
530 {
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 }
bbf6f052
RK
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
539 }
540
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
543
5c5033c3
RK
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
546
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
549
bbf6f052
RK
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
555
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
560
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
565 {
566#ifdef HAVE_slt
567 if (HAVE_slt
a995e389 568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
569 && STORE_FLAG_VALUE == -1)
570 {
906c4e36 571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 572 lowpart_mode, 0);
bbf6f052
RK
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
575 }
576 else
577#endif
578 {
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 582 NULL_RTX, 0);
bbf6f052
RK
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
584 }
585 }
586
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 {
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
592
5b0264cb 593 gcc_assert (subword);
bbf6f052
RK
594
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
597 }
598
599 insns = get_insns ();
600 end_sequence ();
601
906c4e36 602 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
604 return;
605 }
606
d3c64ee3
RS
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 610 {
3c0cb5de 611 if (!((MEM_P (from)
431a6eca
JW
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 615 || REG_P (from)
431a6eca
JW
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
bbf6f052
RK
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
620 }
621
bbf6f052
RK
622 /* Now follow all the conversions between integers
623 no more than a word long. */
624
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 628 GET_MODE_BITSIZE (from_mode)))
bbf6f052 629 {
3c0cb5de 630 if (!((MEM_P (from)
d3c64ee3
RS
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 634 || REG_P (from)
d3c64ee3
RS
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
f8cfc6aa 637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
34aa3599
RK
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
bbf6f052
RK
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
642 }
643
d3c64ee3 644 /* Handle extension. */
bbf6f052
RK
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 {
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
650 {
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
653 }
654 else
655 {
656 enum machine_mode intermediate;
2b28d92e
NC
657 rtx tmp;
658 tree shift_amount;
bbf6f052
RK
659
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
670 {
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
674 }
675
2b28d92e 676 /* No suitable intermediate mode.
3a94c984 677 Generate what we need with shifts. */
4a90aeeb
NS
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
7d60be94 680 - GET_MODE_BITSIZE (from_mode));
2b28d92e
NC
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
3a94c984 684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
bbf6f052
RK
689 }
690 }
691
3a94c984 692 /* Support special truncate insns for certain modes. */
85363ca0 693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 694 {
85363ca0
ZW
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 to, from, UNKNOWN);
b9bcad65
RK
697 return;
698 }
699
bbf6f052
RK
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
85363ca0
ZW
702 and for which there was no special instruction.
703
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 {
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
712 }
713
714 /* Mode combination is not recognized. */
5b0264cb 715 gcc_unreachable ();
bbf6f052
RK
716}
717
718/* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
ad76cef8 723 or by copying to a new temporary with conversion. */
bbf6f052
RK
724
725rtx
502b8322 726convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
727{
728 return convert_modes (mode, VOIDmode, x, unsignedp);
729}
730
731/* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
735
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
738
ad76cef8 739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
5ffe63ed
RS
740
741rtx
502b8322 742convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 743{
b3694847 744 rtx temp;
5ffe63ed 745
1499e0a8
RK
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
748
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
bbf6f052 753
64791b18
RK
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
3a94c984 756
5ffe63ed 757 if (mode == oldmode)
bbf6f052
RK
758 return x;
759
760 /* There is one case that we must handle specially: If we are converting
906c4e36 761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
765
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
769 {
770 HOST_WIDE_INT val = INTVAL (x);
771
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 {
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 }
780
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 }
bbf6f052
RK
783
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 788
ba2e110c
RK
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 791 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 792 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 793 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
3c0cb5de 795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
d57c66da 796 && direct_load[(int) mode])
f8cfc6aa 797 || (REG_P (x)
006c9f4a
SE
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
802 {
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 {
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
811
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
818
2496c7bd 819 return gen_int_mode (val, mode);
ba2e110c
RK
820 }
821
822 return gen_lowpart (mode, x);
823 }
bbf6f052 824
ebe75517
JH
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 {
5b0264cb 829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
ebe75517
JH
830 return simplify_gen_subreg (mode, x, oldmode, 0);
831 }
832
bbf6f052
RK
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
836}
837\f
cf5124f6
RS
838/* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
842
843#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844
8fd3cf4e
JJ
845/* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
848
849int
502b8322
AJ
850can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
852{
853 return MOVE_BY_PIECES_P (len, align);
854}
855
21d93687 856/* Generate several move instructions to copy LEN bytes from block FROM to
ad76cef8 857 block TO. (These are MEM rtx's with BLKmode).
566aa174 858
21d93687
RK
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
566aa174 861
8fd3cf4e 862 ALIGN is maximum stack alignment we can assume.
bbf6f052 863
8fd3cf4e
JJ
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
867
868rtx
502b8322
AJ
869move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
bbf6f052
RK
871{
872 struct move_by_pieces data;
566aa174 873 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 874 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
bbf6f052 877
f26aca6d
DD
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879
bbf6f052 880 data.offset = 0;
bbf6f052 881 data.from_addr = from_addr;
566aa174
JH
882 if (to)
883 {
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 }
892 else
893 {
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897#ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899#else
900 data.reverse = 0;
901#endif
902 }
903 data.to_addr = to_addr;
bbf6f052 904 data.from = from;
bbf6f052
RK
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
bbf6f052
RK
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
45d78e7f 919 && move_by_pieces_ninsns (len, align, max_size) > 2)
bbf6f052 920 {
3a94c984 921 /* Find the mode of the largest move... */
fbe1758d
AM
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
928 {
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
932 }
fbe1758d 933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
934 {
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
938 }
bbf6f052
RK
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
942 {
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
946 }
fbe1758d 947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
948 {
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
952 }
bbf6f052
RK
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
955 }
956
f64d6991
DE
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
961 {
962 enum machine_mode xmode;
963
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
970
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 }
bbf6f052
RK
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1)
978 {
e7c33f54
RK
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
982 mode = tmode;
983
984 if (mode == VOIDmode)
985 break;
986
987 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990
991 max_size = GET_MODE_SIZE (mode);
992 }
993
994 /* The code above should have handled everything. */
5b0264cb 995 gcc_assert (!data.len);
8fd3cf4e
JJ
996
997 if (endp)
998 {
999 rtx to1;
1000
5b0264cb 1001 gcc_assert (!data.reverse);
8fd3cf4e
JJ
1002 if (data.autinc_to)
1003 {
1004 if (endp == 2)
1005 {
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1011 }
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1014 }
1015 else
1016 {
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1020 }
1021 return to1;
1022 }
1023 else
1024 return data.to;
bbf6f052
RK
1025}
1026
1027/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1028 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1029
3bdf5ad1 1030static unsigned HOST_WIDE_INT
45d78e7f
JJ
1031move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
bbf6f052 1033{
3bdf5ad1 1034 unsigned HOST_WIDE_INT n_insns = 0;
f64d6991 1035 enum machine_mode tmode;
bbf6f052 1036
f64d6991
DE
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1041 {
1042 enum machine_mode tmode, xmode;
1043
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1050
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 }
bbf6f052
RK
1053
1054 while (max_size > 1)
1055 {
f64d6991 1056 enum machine_mode mode = VOIDmode;
bbf6f052
RK
1057 enum insn_code icode;
1058
e7c33f54
RK
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1062 mode = tmode;
1063
1064 if (mode == VOIDmode)
1065 break;
1066
1067 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070
1071 max_size = GET_MODE_SIZE (mode);
1072 }
1073
5b0264cb 1074 gcc_assert (!l);
bbf6f052
RK
1075 return n_insns;
1076}
1077
1078/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1081
1082static void
502b8322
AJ
1083move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
bbf6f052 1085{
3bdf5ad1 1086 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1087 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1088
1089 while (data->len >= size)
1090 {
3bdf5ad1
RK
1091 if (data->reverse)
1092 data->offset -= size;
1093
566aa174 1094 if (data->to)
3bdf5ad1 1095 {
566aa174 1096 if (data->autinc_to)
630036c6
JJ
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
566aa174 1099 else
f4ef873c 1100 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1101 }
3bdf5ad1
RK
1102
1103 if (data->autinc_from)
630036c6
JJ
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
3bdf5ad1 1106 else
f4ef873c 1107 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1108
940da324 1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1115
566aa174
JH
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
21d93687
RK
1119 {
1120#ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122#else
5b0264cb 1123 gcc_unreachable ();
21d93687
RK
1124#endif
1125 }
3bdf5ad1 1126
940da324 1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1131
3bdf5ad1
RK
1132 if (! data->reverse)
1133 data->offset += size;
bbf6f052
RK
1134
1135 data->len -= size;
1136 }
1137}
1138\f
4ca79136
RH
1139/* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
bbf6f052 1142
4ca79136 1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1144 SIZE is an rtx that says how long they are.
19caa751 1145 ALIGN is the maximum alignment we can assume they have.
44bb111a 1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1147
e9a25f70
JL
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1150
1151rtx
502b8322 1152emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1153{
44bb111a 1154 bool may_use_call;
e9a25f70 1155 rtx retval = 0;
44bb111a
RH
1156 unsigned int align;
1157
1158 switch (method)
1159 {
1160 case BLOCK_OP_NORMAL:
8148fe65 1161 case BLOCK_OP_TAILCALL:
44bb111a
RH
1162 may_use_call = true;
1163 break;
1164
1165 case BLOCK_OP_CALL_PARM:
1166 may_use_call = block_move_libcall_safe_for_call_parm ();
1167
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1170 NO_DEFER_POP;
1171 break;
1172
1173 case BLOCK_OP_NO_LIBCALL:
1174 may_use_call = false;
1175 break;
1176
1177 default:
5b0264cb 1178 gcc_unreachable ();
44bb111a
RH
1179 }
1180
1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1182
5b0264cb
NS
1183 gcc_assert (MEM_P (x));
1184 gcc_assert (MEM_P (y));
1185 gcc_assert (size);
bbf6f052 1186
82c82743
RH
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x = adjust_address (x, BLKmode, 0);
1190 y = adjust_address (y, BLKmode, 0);
1191
cb38fd88
RH
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size) == CONST_INT)
1195 {
6972c506
JJ
1196 if (INTVAL (size) == 0)
1197 return 0;
1198
cb38fd88
RH
1199 x = shallow_copy_rtx (x);
1200 y = shallow_copy_rtx (y);
1201 set_mem_size (x, size);
1202 set_mem_size (y, size);
1203 }
1204
fbe1758d 1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1206 move_by_pieces (x, y, INTVAL (size), align, 0);
70128ad9 1207 else if (emit_block_move_via_movmem (x, y, size, align))
4ca79136 1208 ;
44bb111a 1209 else if (may_use_call)
8148fe65
JJ
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
44bb111a
RH
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1214
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
266007a7 1217
4ca79136
RH
1218 return retval;
1219}
266007a7 1220
502b8322 1221/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1224
1225static bool
502b8322 1226block_move_libcall_safe_for_call_parm (void)
44bb111a 1227{
a357a6d4 1228 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1229 if (PUSH_ARGS)
1230 return true;
44bb111a 1231
450b1728 1232 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1233 an outgoing argument. */
1234#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 {
1236 tree fn = emit_block_move_libcall_fn (false);
1237 (void) fn;
1238 if (REG_PARM_STACK_SPACE (fn) != 0)
1239 return false;
1240 }
44bb111a 1241#endif
44bb111a 1242
a357a6d4
GK
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1245 {
1246 CUMULATIVE_ARGS args_so_far;
1247 tree fn, arg;
450b1728 1248
a357a6d4 1249 fn = emit_block_move_libcall_fn (false);
0f6937fe 1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1251
a357a6d4
GK
1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 {
1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 if (!tmp || !REG_P (tmp))
44bb111a 1258 return false;
78a52f11 1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
a357a6d4 1260 return false;
a357a6d4
GK
1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1262 }
1263 }
1264 return true;
44bb111a
RH
1265}
1266
70128ad9 1267/* A subroutine of emit_block_move. Expand a movmem pattern;
4ca79136 1268 return true if successful. */
3ef1eef4 1269
4ca79136 1270static bool
70128ad9 1271emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1272{
4ca79136 1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1274 int save_volatile_ok = volatile_ok;
4ca79136 1275 enum machine_mode mode;
266007a7 1276
4ca79136
RH
1277 /* Since this is a move insn, we don't care about volatility. */
1278 volatile_ok = 1;
1279
ee960939
OH
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1283
4ca79136
RH
1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285 mode = GET_MODE_WIDER_MODE (mode))
1286 {
70128ad9 1287 enum insn_code code = movmem_optab[(int) mode];
4ca79136
RH
1288 insn_operand_predicate_fn pred;
1289
1290 if (code != CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 <= (GET_MODE_MASK (mode) >> 1)))
1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 || (*pred) (x, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 || (*pred) (y, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 || (*pred) (opalign, VOIDmode)))
1305 {
1306 rtx op2;
1307 rtx last = get_last_insn ();
1308 rtx pat;
1309
1310 op2 = convert_to_mode (mode, size, 1);
1311 pred = insn_data[(int) code].operand[2].predicate;
1312 if (pred != 0 && ! (*pred) (op2, mode))
1313 op2 = copy_to_mode_reg (mode, op2);
1314
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1319
1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 if (pat)
1322 {
1323 emit_insn (pat);
a5e9c810 1324 volatile_ok = save_volatile_ok;
4ca79136 1325 return true;
bbf6f052 1326 }
4ca79136
RH
1327 else
1328 delete_insns_since (last);
bbf6f052 1329 }
4ca79136 1330 }
bbf6f052 1331
a5e9c810 1332 volatile_ok = save_volatile_ok;
4ca79136
RH
1333 return false;
1334}
3ef1eef4 1335
8f99553f 1336/* A subroutine of emit_block_move. Expand a call to memcpy.
4ca79136 1337 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1338
4ca79136 1339static rtx
8148fe65 1340emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
4ca79136 1341{
ee960939 1342 rtx dst_addr, src_addr;
4ca79136
RH
1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344 enum machine_mode size_mode;
1345 rtx retval;
4bc973ae 1346
ad76cef8
PB
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
ee960939
OH
1350
1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1353
ee960939
OH
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1356
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136 1359
8f99553f 1360 size_mode = TYPE_MODE (sizetype);
ee960939 1361
4ca79136
RH
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1364
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
8f99553f 1369 for returning pointers, we could end up generating incorrect code. */
4ca79136 1370
8f99553f 1371 size_tree = make_tree (sizetype, size);
4ca79136
RH
1372
1373 fn = emit_block_move_libcall_fn (true);
1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f
JM
1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
4ca79136
RH
1377
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 call_expr, arg_list, NULL_TREE);
8148fe65 1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136 1383
84217346 1384 retval = expand_normal (call_expr);
4ca79136 1385
8f99553f 1386 return retval;
4ca79136 1387}
52cf7115 1388
4ca79136
RH
1389/* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
52cf7115 1392
4ca79136
RH
1393static GTY(()) tree block_move_fn;
1394
9661b15f 1395void
502b8322 1396init_block_move_fn (const char *asmspec)
4ca79136 1397{
9661b15f 1398 if (!block_move_fn)
4ca79136 1399 {
8fd3cf4e 1400 tree args, fn;
9661b15f 1401
8f99553f
JM
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
52cf7115 1406
4ca79136
RH
1407 fn = build_decl (FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
5b5cba1f
JM
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
66c60e67 1414
4ca79136 1415 block_move_fn = fn;
bbf6f052 1416 }
e9a25f70 1417
9661b15f 1418 if (asmspec)
0e6df31e 1419 set_user_assembler_name (block_move_fn, asmspec);
9661b15f
JJ
1420}
1421
1422static tree
502b8322 1423emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1424{
1425 static bool emitted_extern;
1426
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1429
4ca79136
RH
1430 if (for_call && !emitted_extern)
1431 {
1432 emitted_extern = true;
0e6df31e 1433 make_decl_rtl (block_move_fn);
9661b15f 1434 assemble_external (block_move_fn);
4ca79136
RH
1435 }
1436
9661b15f 1437 return block_move_fn;
bbf6f052 1438}
44bb111a
RH
1439
1440/* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442/* ??? It'd be nice to copy in hunks larger than QImode. */
1443
1444static void
502b8322
AJ
1445emit_block_move_via_loop (rtx x, rtx y, rtx size,
1446 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1447{
1448 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1449 enum machine_mode iter_mode;
1450
1451 iter_mode = GET_MODE (size);
1452 if (iter_mode == VOIDmode)
1453 iter_mode = word_mode;
1454
1455 top_label = gen_label_rtx ();
1456 cmp_label = gen_label_rtx ();
1457 iter = gen_reg_rtx (iter_mode);
1458
1459 emit_move_insn (iter, const0_rtx);
1460
1461 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1462 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1463 do_pending_stack_adjust ();
1464
44bb111a
RH
1465 emit_jump (cmp_label);
1466 emit_label (top_label);
1467
1468 tmp = convert_modes (Pmode, iter_mode, iter, true);
1469 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1470 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1471 x = change_address (x, QImode, x_addr);
1472 y = change_address (y, QImode, y_addr);
1473
1474 emit_move_insn (x, y);
1475
1476 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1477 true, OPTAB_LIB_WIDEN);
1478 if (tmp != iter)
1479 emit_move_insn (iter, tmp);
1480
44bb111a
RH
1481 emit_label (cmp_label);
1482
1483 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1484 true, top_label);
44bb111a 1485}
bbf6f052
RK
1486\f
1487/* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1489
1490void
502b8322 1491move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1492{
1493 int i;
381127e8 1494#ifdef HAVE_load_multiple
3a94c984 1495 rtx pat;
381127e8
RL
1496 rtx last;
1497#endif
bbf6f052 1498
72bb9717
RK
1499 if (nregs == 0)
1500 return;
1501
bbf6f052
RK
1502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1503 x = validize_mem (force_const_mem (mode, x));
1504
1505 /* See if the machine can do this with a load multiple insn. */
1506#ifdef HAVE_load_multiple
c3a02afe 1507 if (HAVE_load_multiple)
bbf6f052 1508 {
c3a02afe 1509 last = get_last_insn ();
38a448ca 1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1511 GEN_INT (nregs));
1512 if (pat)
1513 {
1514 emit_insn (pat);
1515 return;
1516 }
1517 else
1518 delete_insns_since (last);
bbf6f052 1519 }
bbf6f052
RK
1520#endif
1521
1522 for (i = 0; i < nregs; i++)
38a448ca 1523 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1524 operand_subword_force (x, i, mode));
1525}
1526
1527/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1528 The number of registers to be filled is NREGS. */
0040593d 1529
bbf6f052 1530void
502b8322 1531move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1532{
1533 int i;
bbf6f052 1534
2954d7db
RK
1535 if (nregs == 0)
1536 return;
1537
bbf6f052
RK
1538 /* See if the machine can do this with a store multiple insn. */
1539#ifdef HAVE_store_multiple
c3a02afe 1540 if (HAVE_store_multiple)
bbf6f052 1541 {
c6b97fac
AM
1542 rtx last = get_last_insn ();
1543 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1544 GEN_INT (nregs));
c3a02afe
RK
1545 if (pat)
1546 {
1547 emit_insn (pat);
1548 return;
1549 }
1550 else
1551 delete_insns_since (last);
bbf6f052 1552 }
bbf6f052
RK
1553#endif
1554
1555 for (i = 0; i < nregs; i++)
1556 {
1557 rtx tem = operand_subword (x, i, 1, BLKmode);
1558
5b0264cb 1559 gcc_assert (tem);
bbf6f052 1560
38a448ca 1561 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1562 }
1563}
1564
084a1106
JDA
1565/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1570
1571rtx
502b8322 1572gen_group_rtx (rtx orig)
084a1106
JDA
1573{
1574 int i, length;
1575 rtx *tmps;
1576
5b0264cb 1577 gcc_assert (GET_CODE (orig) == PARALLEL);
084a1106
JDA
1578
1579 length = XVECLEN (orig, 0);
703ad42b 1580 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1581
1582 /* Skip a NULL entry in first slot. */
1583 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584
1585 if (i)
1586 tmps[0] = 0;
1587
1588 for (; i < length; i++)
1589 {
1590 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1591 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592
1593 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594 }
1595
1596 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597}
1598
27e29549
RH
1599/* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
daa956d0 1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
fffa9c1d 1602
27e29549
RH
1603static void
1604emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
fffa9c1d 1605{
27e29549 1606 rtx src;
aac5cc16 1607 int start, i;
7ef7000b 1608 enum machine_mode m = GET_MODE (orig_src);
fffa9c1d 1609
5b0264cb 1610 gcc_assert (GET_CODE (dst) == PARALLEL);
fffa9c1d 1611
f2978871
AM
1612 if (m != VOIDmode
1613 && !SCALAR_INT_MODE_P (m)
1614 && !MEM_P (orig_src)
1615 && GET_CODE (orig_src) != CONCAT)
782fa603
AH
1616 {
1617 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1618 if (imode == BLKmode)
1619 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1620 else
1621 src = gen_reg_rtx (imode);
1622 if (imode != BLKmode)
1623 src = gen_lowpart (GET_MODE (orig_src), src);
1624 emit_move_insn (src, orig_src);
1625 /* ...and back again. */
1626 if (imode != BLKmode)
1627 src = gen_lowpart (imode, src);
27e29549 1628 emit_group_load_1 (tmps, dst, src, type, ssize);
782fa603
AH
1629 return;
1630 }
1631
fffa9c1d
JW
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
aac5cc16
RH
1634 if (XEXP (XVECEXP (dst, 0, 0), 0))
1635 start = 0;
fffa9c1d 1636 else
aac5cc16
RH
1637 start = 1;
1638
aac5cc16
RH
1639 /* Process the pieces. */
1640 for (i = start; i < XVECLEN (dst, 0); i++)
1641 {
1642 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1643 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1644 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1645 int shift = 0;
1646
1647 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1648 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1649 {
6e985040
AM
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1652 if (
1653#ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1655 == (BYTES_BIG_ENDIAN ? upward : downward)
1656#else
1657 BYTES_BIG_ENDIAN
1658#endif
1659 )
1660 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16 1661 bytelen = ssize - bytepos;
5b0264cb 1662 gcc_assert (bytelen > 0);
aac5cc16
RH
1663 }
1664
f3ce87a9
DE
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1668 src = orig_src;
3c0cb5de 1669 if (!MEM_P (orig_src)
f3ce87a9
DE
1670 && (!CONSTANT_P (orig_src)
1671 || (GET_MODE (orig_src) != mode
1672 && GET_MODE (orig_src) != VOIDmode)))
1673 {
1674 if (GET_MODE (orig_src) == VOIDmode)
1675 src = gen_reg_rtx (mode);
1676 else
1677 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1678
f3ce87a9
DE
1679 emit_move_insn (src, orig_src);
1680 }
1681
aac5cc16 1682 /* Optimize the access just a bit. */
3c0cb5de 1683 if (MEM_P (src)
6e985040
AM
1684 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1685 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1686 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1687 && bytelen == GET_MODE_SIZE (mode))
1688 {
1689 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1690 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1691 }
d20b1190
EB
1692 else if (COMPLEX_MODE_P (mode)
1693 && GET_MODE (src) == mode
1694 && bytelen == GET_MODE_SIZE (mode))
1695 /* Let emit_move_complex do the bulk of the work. */
1696 tmps[i] = src;
7c4a6db0
JW
1697 else if (GET_CODE (src) == CONCAT)
1698 {
015b1ad1
JDA
1699 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1700 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701
1702 if ((bytepos == 0 && bytelen == slen0)
1703 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1704 {
015b1ad1
JDA
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1708 to be extracted. */
1709 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744 1710 if (! CONSTANT_P (tmps[i])
f8cfc6aa 1711 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
cbb92744 1712 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1 1713 (bytepos % slen0) * BITS_PER_UNIT,
b3520980 1714 1, NULL_RTX, mode, mode);
cbb92744 1715 }
5b0264cb 1716 else
58f69841 1717 {
5b0264cb 1718 rtx mem;
f58c00e3 1719
5b0264cb
NS
1720 gcc_assert (!bytepos);
1721 mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1722 emit_move_insn (mem, src);
f58c00e3
EB
1723 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1724 0, 1, NULL_RTX, mode, mode);
58f69841 1725 }
7c4a6db0 1726 }
9c0631a7
AH
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst))
f8cfc6aa 1731 && REG_P (src))
9c0631a7
AH
1732 {
1733 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 rtx mem;
1735
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 }
d3a16cbd
FJ
1740 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1741 && XVECLEN (dst, 0) > 1)
1742 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1743 else if (CONSTANT_P (src)
f8cfc6aa 1744 || (REG_P (src) && GET_MODE (src) == mode))
2ee5437b 1745 tmps[i] = src;
fffa9c1d 1746 else
19caa751
RK
1747 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1748 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
b3520980 1749 mode, mode);
fffa9c1d 1750
6e985040 1751 if (shift)
09b52670 1752 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
7d60be94 1753 build_int_cst (NULL_TREE, shift), tmps[i], 0);
fffa9c1d 1754 }
27e29549
RH
1755}
1756
1757/* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1760 if not known. */
1761
1762void
1763emit_group_load (rtx dst, rtx src, tree type, int ssize)
1764{
1765 rtx *tmps;
1766 int i;
1767
1768 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1769 emit_group_load_1 (tmps, dst, src, type, ssize);
19caa751 1770
aac5cc16 1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
27e29549
RH
1772 for (i = 0; i < XVECLEN (dst, 0); i++)
1773 {
1774 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1775 if (d == NULL)
1776 continue;
1777 emit_move_insn (d, tmps[i]);
1778 }
1779}
1780
1781/* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1784
1785rtx
1786emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1787{
1788 rtvec vec;
1789 int i;
1790
1791 vec = rtvec_alloc (XVECLEN (parallel, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1793
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i = 0; i < XVECLEN (parallel, 0); i++)
1797 {
1798 rtx e = XVECEXP (parallel, 0, i);
1799 rtx d = XEXP (e, 0);
1800
1801 if (d)
1802 {
1803 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1804 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1805 }
1806 RTVEC_ELT (vec, i) = e;
1807 }
1808
1809 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
fffa9c1d
JW
1810}
1811
084a1106
JDA
1812/* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1814
1815void
502b8322 1816emit_group_move (rtx dst, rtx src)
084a1106
JDA
1817{
1818 int i;
1819
5b0264cb
NS
1820 gcc_assert (GET_CODE (src) == PARALLEL
1821 && GET_CODE (dst) == PARALLEL
1822 && XVECLEN (src, 0) == XVECLEN (dst, 0));
084a1106
JDA
1823
1824 /* Skip first entry if NULL. */
1825 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1826 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1827 XEXP (XVECEXP (src, 0, i), 0));
1828}
1829
27e29549
RH
1830/* Move a group of registers represented by a PARALLEL into pseudos. */
1831
1832rtx
1833emit_group_move_into_temps (rtx src)
1834{
1835 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1836 int i;
1837
1838 for (i = 0; i < XVECLEN (src, 0); i++)
1839 {
1840 rtx e = XVECEXP (src, 0, i);
1841 rtx d = XEXP (e, 0);
1842
1843 if (d)
1844 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1845 RTVEC_ELT (vec, i) = e;
1846 }
1847
1848 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1849}
1850
6e985040
AM
1851/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1854 known. */
fffa9c1d
JW
1855
1856void
6e985040 1857emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1858{
aac5cc16 1859 rtx *tmps, dst;
79edfde8 1860 int start, finish, i;
7ef7000b 1861 enum machine_mode m = GET_MODE (orig_dst);
fffa9c1d 1862
5b0264cb 1863 gcc_assert (GET_CODE (src) == PARALLEL);
fffa9c1d 1864
0da34ce4
RH
1865 if (!SCALAR_INT_MODE_P (m)
1866 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
782fa603
AH
1867 {
1868 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1869 if (imode == BLKmode)
1870 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1871 else
1872 dst = gen_reg_rtx (imode);
1873 emit_group_store (dst, src, type, ssize);
1874 if (imode != BLKmode)
1875 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1876 emit_move_insn (orig_dst, dst);
1877 return;
1878 }
1879
fffa9c1d
JW
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
aac5cc16
RH
1882 if (XEXP (XVECEXP (src, 0, 0), 0))
1883 start = 0;
fffa9c1d 1884 else
aac5cc16 1885 start = 1;
79edfde8 1886 finish = XVECLEN (src, 0);
aac5cc16 1887
79edfde8 1888 tmps = alloca (sizeof (rtx) * finish);
fffa9c1d 1889
aac5cc16 1890 /* Copy the (probable) hard regs into pseudos. */
79edfde8 1891 for (i = start; i < finish; i++)
fffa9c1d 1892 {
aac5cc16 1893 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
5ac60669
RS
1894 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1895 {
1896 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897 emit_move_insn (tmps[i], reg);
1898 }
1899 else
1900 tmps[i] = reg;
aac5cc16 1901 }
fffa9c1d 1902
aac5cc16
RH
1903 /* If we won't be storing directly into memory, protect the real destination
1904 from strange tricks we might play. */
1905 dst = orig_dst;
10a9f2be
JW
1906 if (GET_CODE (dst) == PARALLEL)
1907 {
1908 rtx temp;
1909
1910 /* We can get a PARALLEL dst if there is a conditional expression in
1911 a return statement. In that case, the dst and src are the same,
1912 so no action is necessary. */
1913 if (rtx_equal_p (dst, src))
1914 return;
1915
1916 /* It is unclear if we can ever reach here, but we may as well handle
1917 it. Allocate a temporary, and split this into a store/load to/from
1918 the temporary. */
1919
1920 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
1921 emit_group_store (temp, src, type, ssize);
1922 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
1923 return;
1924 }
3c0cb5de 1925 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
aac5cc16 1926 {
79edfde8
RS
1927 enum machine_mode outer = GET_MODE (dst);
1928 enum machine_mode inner;
5650dfbd 1929 HOST_WIDE_INT bytepos;
79edfde8
RS
1930 bool done = false;
1931 rtx temp;
1932
5ac60669 1933 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
79edfde8
RS
1934 dst = gen_reg_rtx (outer);
1935
aac5cc16 1936 /* Make life a bit easier for combine. */
79edfde8
RS
1937 /* If the first element of the vector is the low part
1938 of the destination mode, use a paradoxical subreg to
1939 initialize the destination. */
1940 if (start < finish)
1941 {
1942 inner = GET_MODE (tmps[start]);
7488662d 1943 bytepos = subreg_lowpart_offset (inner, outer);
79edfde8
RS
1944 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1945 {
1946 temp = simplify_gen_subreg (outer, tmps[start],
7488662d 1947 inner, 0);
9fd20553
RS
1948 if (temp)
1949 {
1950 emit_move_insn (dst, temp);
1951 done = true;
1952 start++;
1953 }
79edfde8
RS
1954 }
1955 }
1956
1957 /* If the first element wasn't the low part, try the last. */
1958 if (!done
1959 && start < finish - 1)
1960 {
1961 inner = GET_MODE (tmps[finish - 1]);
7488662d 1962 bytepos = subreg_lowpart_offset (inner, outer);
79edfde8
RS
1963 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1964 {
1965 temp = simplify_gen_subreg (outer, tmps[finish - 1],
7488662d 1966 inner, 0);
9fd20553
RS
1967 if (temp)
1968 {
1969 emit_move_insn (dst, temp);
1970 done = true;
1971 finish--;
1972 }
79edfde8
RS
1973 }
1974 }
1975
1976 /* Otherwise, simply initialize the result to zero. */
1977 if (!done)
1978 emit_move_insn (dst, CONST0_RTX (outer));
aac5cc16 1979 }
aac5cc16
RH
1980
1981 /* Process the pieces. */
79edfde8 1982 for (i = start; i < finish; i++)
aac5cc16 1983 {
770ae6cc 1984 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 1985 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 1986 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 1987 rtx dest = dst;
aac5cc16
RH
1988
1989 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1990 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 1991 {
6e985040
AM
1992 /* store_bit_field always takes its value from the lsb.
1993 Move the fragment to the lsb if it's not already there. */
1994 if (
1995#ifdef BLOCK_REG_PADDING
1996 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1997 == (BYTES_BIG_ENDIAN ? upward : downward)
1998#else
1999 BYTES_BIG_ENDIAN
2000#endif
2001 )
aac5cc16
RH
2002 {
2003 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
09b52670 2004 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
7d60be94
NS
2005 build_int_cst (NULL_TREE, shift),
2006 tmps[i], 0);
aac5cc16
RH
2007 }
2008 bytelen = ssize - bytepos;
71bc0330 2009 }
fffa9c1d 2010
6ddae612
JJ
2011 if (GET_CODE (dst) == CONCAT)
2012 {
2013 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 {
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2019 }
5b0264cb 2020 else
0d446150 2021 {
5b0264cb 2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
0d446150
JH
2023 dest = assign_stack_temp (GET_MODE (dest),
2024 GET_MODE_SIZE (GET_MODE (dest)), 0);
2025 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2026 tmps[i]);
2027 dst = dest;
2028 break;
2029 }
6ddae612
JJ
2030 }
2031
aac5cc16 2032 /* Optimize the access just a bit. */
3c0cb5de 2033 if (MEM_P (dest)
6e985040
AM
2034 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2035 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2036 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2037 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2038 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2039 else
6ddae612 2040 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
b3520980 2041 mode, tmps[i]);
fffa9c1d 2042 }
729a2125 2043
aac5cc16 2044 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2045 if (orig_dst != dst)
aac5cc16 2046 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2047}
2048
c36fce9a
GRK
2049/* Generate code to copy a BLKmode object of TYPE out of a
2050 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2051 is null, a stack temporary is created. TGTBLK is returned.
2052
c988af2b
RS
2053 The purpose of this routine is to handle functions that return
2054 BLKmode structures in registers. Some machines (the PA for example)
2055 want to return all small structures in registers regardless of the
2056 structure's alignment. */
c36fce9a
GRK
2057
2058rtx
502b8322 2059copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2060{
19caa751
RK
2061 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2062 rtx src = NULL, dst = NULL;
2063 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2064 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2065
2066 if (tgtblk == 0)
2067 {
1da68f56
RK
2068 tgtblk = assign_temp (build_qualified_type (type,
2069 (TYPE_QUALS (type)
2070 | TYPE_QUAL_CONST)),
2071 0, 1, 1);
19caa751
RK
2072 preserve_temp_slots (tgtblk);
2073 }
3a94c984 2074
1ed1b4fb 2075 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2076 into a new pseudo which is a full word. */
0d7839da 2077
19caa751
RK
2078 if (GET_MODE (srcreg) != BLKmode
2079 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 2080 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 2081
c988af2b
RS
2082 /* If the structure doesn't take up a whole number of words, see whether
2083 SRCREG is padded on the left or on the right. If it's on the left,
2084 set PADDING_CORRECTION to the number of bits to skip.
2085
2086 In most ABIs, the structure will be returned at the least end of
2087 the register, which translates to right padding on little-endian
2088 targets and left padding on big-endian targets. The opposite
2089 holds if the structure is returned at the most significant
2090 end of the register. */
2091 if (bytes % UNITS_PER_WORD != 0
2092 && (targetm.calls.return_in_msb (type)
2093 ? !BYTES_BIG_ENDIAN
2094 : BYTES_BIG_ENDIAN))
2095 padding_correction
19caa751
RK
2096 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2097
2098 /* Copy the structure BITSIZE bites at a time.
3a94c984 2099
19caa751
RK
2100 We could probably emit more efficient code for machines which do not use
2101 strict alignment, but it doesn't seem worth the effort at the current
2102 time. */
c988af2b 2103 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2104 bitpos < bytes * BITS_PER_UNIT;
2105 bitpos += bitsize, xbitpos += bitsize)
2106 {
3a94c984 2107 /* We need a new source operand each time xbitpos is on a
c988af2b 2108 word boundary and when xbitpos == padding_correction
19caa751
RK
2109 (the first time through). */
2110 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2111 || xbitpos == padding_correction)
b47f8cfc
JH
2112 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2113 GET_MODE (srcreg));
19caa751
RK
2114
2115 /* We need a new destination operand each time bitpos is on
2116 a word boundary. */
2117 if (bitpos % BITS_PER_WORD == 0)
2118 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2119
19caa751
RK
2120 /* Use xbitpos for the source extraction (right justified) and
2121 xbitpos for the destination store (left justified). */
2122 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2123 extract_bit_field (src, bitsize,
2124 xbitpos % BITS_PER_WORD, 1,
b3520980 2125 NULL_RTX, word_mode, word_mode));
19caa751
RK
2126 }
2127
2128 return tgtblk;
c36fce9a
GRK
2129}
2130
94b25f81
RK
2131/* Add a USE expression for REG to the (possibly empty) list pointed
2132 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2133
2134void
502b8322 2135use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2136{
5b0264cb
NS
2137 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2138
b3f8cf4a 2139 *call_fusage
38a448ca
RH
2140 = gen_rtx_EXPR_LIST (VOIDmode,
2141 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2142}
2143
94b25f81
RK
2144/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2145 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2146
2147void
502b8322 2148use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2149{
0304dfbb 2150 int i;
bbf6f052 2151
5b0264cb 2152 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
0304dfbb
DE
2153
2154 for (i = 0; i < nregs; i++)
e50126e8 2155 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2156}
fffa9c1d
JW
2157
2158/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2159 PARALLEL REGS. This is for calls that pass values in multiple
2160 non-contiguous locations. The Irix 6 ABI has examples of this. */
2161
2162void
502b8322 2163use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2164{
2165 int i;
2166
6bd35f86
DE
2167 for (i = 0; i < XVECLEN (regs, 0); i++)
2168 {
2169 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2170
6bd35f86
DE
2171 /* A NULL entry means the parameter goes both on the stack and in
2172 registers. This can also be a MEM for targets that pass values
2173 partially on the stack and partially in registers. */
f8cfc6aa 2174 if (reg != 0 && REG_P (reg))
6bd35f86
DE
2175 use_reg (call_fusage, reg);
2176 }
fffa9c1d 2177}
bbf6f052 2178\f
57814e5e 2179
cf5124f6
RS
2180/* Determine whether the LEN bytes generated by CONSTFUN can be
2181 stored to memory using several move instructions. CONSTFUNDATA is
2182 a pointer which will be passed as argument in every CONSTFUN call.
2183 ALIGN is maximum alignment we can assume. Return nonzero if a
2184 call to store_by_pieces should succeed. */
2185
57814e5e 2186int
502b8322
AJ
2187can_store_by_pieces (unsigned HOST_WIDE_INT len,
2188 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2189 void *constfundata, unsigned int align)
57814e5e 2190{
45d78e7f
JJ
2191 unsigned HOST_WIDE_INT l;
2192 unsigned int max_size;
57814e5e
JJ
2193 HOST_WIDE_INT offset = 0;
2194 enum machine_mode mode, tmode;
2195 enum insn_code icode;
2196 int reverse;
2197 rtx cst;
2198
2c430630
RS
2199 if (len == 0)
2200 return 1;
2201
4977bab6 2202 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2203 return 0;
2204
f64d6991
DE
2205 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2206 if (align >= GET_MODE_ALIGNMENT (tmode))
2207 align = GET_MODE_ALIGNMENT (tmode);
2208 else
2209 {
2210 enum machine_mode xmode;
2211
2212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2213 tmode != VOIDmode;
2214 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2215 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2216 || SLOW_UNALIGNED_ACCESS (tmode, align))
2217 break;
2218
2219 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2220 }
57814e5e
JJ
2221
2222 /* We would first store what we can in the largest integer mode, then go to
2223 successively smaller modes. */
2224
2225 for (reverse = 0;
2226 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2227 reverse++)
2228 {
2229 l = len;
2230 mode = VOIDmode;
cf5124f6 2231 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2232 while (max_size > 1)
2233 {
2234 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2235 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2236 if (GET_MODE_SIZE (tmode) < max_size)
2237 mode = tmode;
2238
2239 if (mode == VOIDmode)
2240 break;
2241
2242 icode = mov_optab->handlers[(int) mode].insn_code;
2243 if (icode != CODE_FOR_nothing
2244 && align >= GET_MODE_ALIGNMENT (mode))
2245 {
2246 unsigned int size = GET_MODE_SIZE (mode);
2247
2248 while (l >= size)
2249 {
2250 if (reverse)
2251 offset -= size;
2252
2253 cst = (*constfun) (constfundata, offset, mode);
2254 if (!LEGITIMATE_CONSTANT_P (cst))
2255 return 0;
2256
2257 if (!reverse)
2258 offset += size;
2259
2260 l -= size;
2261 }
2262 }
2263
2264 max_size = GET_MODE_SIZE (mode);
2265 }
2266
2267 /* The code above should have handled everything. */
5b0264cb 2268 gcc_assert (!l);
57814e5e
JJ
2269 }
2270
2271 return 1;
2272}
2273
2274/* Generate several move instructions to store LEN bytes generated by
2275 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2276 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2277 ALIGN is maximum alignment we can assume.
2278 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2279 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2280 stpcpy. */
57814e5e 2281
8fd3cf4e 2282rtx
502b8322
AJ
2283store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2284 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2285 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2286{
2287 struct store_by_pieces data;
2288
2c430630
RS
2289 if (len == 0)
2290 {
5b0264cb 2291 gcc_assert (endp != 2);
2c430630
RS
2292 return to;
2293 }
2294
5b0264cb 2295 gcc_assert (STORE_BY_PIECES_P (len, align));
57814e5e
JJ
2296 data.constfun = constfun;
2297 data.constfundata = constfundata;
2298 data.len = len;
2299 data.to = to;
2300 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2301 if (endp)
2302 {
2303 rtx to1;
2304
5b0264cb 2305 gcc_assert (!data.reverse);
8fd3cf4e
JJ
2306 if (data.autinc_to)
2307 {
2308 if (endp == 2)
2309 {
2310 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2311 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2312 else
2313 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2314 -1));
2315 }
2316 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2317 data.offset);
2318 }
2319 else
2320 {
2321 if (endp == 2)
2322 --data.offset;
2323 to1 = adjust_address (data.to, QImode, data.offset);
2324 }
2325 return to1;
2326 }
2327 else
2328 return data.to;
57814e5e
JJ
2329}
2330
19caa751 2331/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
ad76cef8 2332 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
9de08200
RK
2333
2334static void
342e2b74 2335clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2336{
57814e5e
JJ
2337 struct store_by_pieces data;
2338
2c430630
RS
2339 if (len == 0)
2340 return;
2341
57814e5e 2342 data.constfun = clear_by_pieces_1;
df4ae160 2343 data.constfundata = NULL;
57814e5e
JJ
2344 data.len = len;
2345 data.to = to;
2346 store_by_pieces_1 (&data, align);
2347}
2348
2349/* Callback routine for clear_by_pieces.
2350 Return const0_rtx unconditionally. */
2351
2352static rtx
502b8322
AJ
2353clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2354 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2355 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2356{
2357 return const0_rtx;
2358}
2359
2360/* Subroutine of clear_by_pieces and store_by_pieces.
2361 Generate several move instructions to store LEN bytes of block TO. (A MEM
ad76cef8 2362 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
57814e5e
JJ
2363
2364static void
502b8322
AJ
2365store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2366 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2367{
2368 rtx to_addr = XEXP (data->to, 0);
45d78e7f 2369 unsigned int max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2370 enum machine_mode mode = VOIDmode, tmode;
2371 enum insn_code icode;
9de08200 2372
57814e5e
JJ
2373 data->offset = 0;
2374 data->to_addr = to_addr;
2375 data->autinc_to
9de08200
RK
2376 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2377 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2378
57814e5e
JJ
2379 data->explicit_inc_to = 0;
2380 data->reverse
9de08200 2381 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2382 if (data->reverse)
2383 data->offset = data->len;
9de08200 2384
57814e5e 2385 /* If storing requires more than two move insns,
9de08200
RK
2386 copy addresses to registers (to make displacements shorter)
2387 and use post-increment if available. */
57814e5e 2388 if (!data->autinc_to
45d78e7f 2389 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
9de08200 2390 {
3a94c984 2391 /* Determine the main mode we'll be using. */
fbe1758d
AM
2392 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2393 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2394 if (GET_MODE_SIZE (tmode) < max_size)
2395 mode = tmode;
2396
57814e5e 2397 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2398 {
57814e5e
JJ
2399 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2400 data->autinc_to = 1;
2401 data->explicit_inc_to = -1;
9de08200 2402 }
3bdf5ad1 2403
57814e5e
JJ
2404 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2405 && ! data->autinc_to)
9de08200 2406 {
57814e5e
JJ
2407 data->to_addr = copy_addr_to_reg (to_addr);
2408 data->autinc_to = 1;
2409 data->explicit_inc_to = 1;
9de08200 2410 }
3bdf5ad1 2411
57814e5e
JJ
2412 if ( !data->autinc_to && CONSTANT_P (to_addr))
2413 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2414 }
2415
f64d6991
DE
2416 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2417 if (align >= GET_MODE_ALIGNMENT (tmode))
2418 align = GET_MODE_ALIGNMENT (tmode);
2419 else
2420 {
2421 enum machine_mode xmode;
2422
2423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2424 tmode != VOIDmode;
2425 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2426 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2427 || SLOW_UNALIGNED_ACCESS (tmode, align))
2428 break;
2429
2430 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2431 }
9de08200 2432
57814e5e 2433 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2434 successively smaller modes. */
2435
2436 while (max_size > 1)
2437 {
9de08200
RK
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2442
2443 if (mode == VOIDmode)
2444 break;
2445
2446 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2447 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2448 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2449
2450 max_size = GET_MODE_SIZE (mode);
2451 }
2452
2453 /* The code above should have handled everything. */
5b0264cb 2454 gcc_assert (!data->len);
9de08200
RK
2455}
2456
57814e5e 2457/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2458 with move instructions for mode MODE. GENFUN is the gen_... function
2459 to make a move insn for that mode. DATA has all the other info. */
2460
2461static void
502b8322
AJ
2462store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2463 struct store_by_pieces *data)
9de08200 2464{
3bdf5ad1 2465 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2466 rtx to1, cst;
9de08200
RK
2467
2468 while (data->len >= size)
2469 {
3bdf5ad1
RK
2470 if (data->reverse)
2471 data->offset -= size;
9de08200 2472
3bdf5ad1 2473 if (data->autinc_to)
630036c6
JJ
2474 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2475 data->offset);
3a94c984 2476 else
f4ef873c 2477 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2478
940da324 2479 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2480 emit_insn (gen_add2_insn (data->to_addr,
2481 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2482
57814e5e
JJ
2483 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2484 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2485
940da324 2486 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2487 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2488
3bdf5ad1
RK
2489 if (! data->reverse)
2490 data->offset += size;
9de08200
RK
2491
2492 data->len -= size;
2493 }
2494}
2495\f
19caa751 2496/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2497 its length in bytes. */
e9a25f70
JL
2498
2499rtx
8148fe65 2500clear_storage (rtx object, rtx size, enum block_op_methods method)
bbf6f052 2501{
57aaef66
RH
2502 enum machine_mode mode = GET_MODE (object);
2503 unsigned int align;
e9a25f70 2504
8148fe65
JJ
2505 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2506
fcf1b822
RK
2507 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2508 just move a zero. Otherwise, do this a piece at a time. */
57aaef66 2509 if (mode != BLKmode
fcf1b822 2510 && GET_CODE (size) == CONST_INT
57aaef66 2511 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
bbf6f052 2512 {
57aaef66
RH
2513 rtx zero = CONST0_RTX (mode);
2514 if (zero != NULL)
2515 {
2516 emit_move_insn (object, zero);
2517 return NULL;
2518 }
2519
2520 if (COMPLEX_MODE_P (mode))
2521 {
2522 zero = CONST0_RTX (GET_MODE_INNER (mode));
2523 if (zero != NULL)
2524 {
2525 write_complex_part (object, zero, 0);
2526 write_complex_part (object, zero, 1);
2527 return NULL;
2528 }
2529 }
4ca79136
RH
2530 }
2531
57aaef66
RH
2532 if (size == const0_rtx)
2533 return NULL;
2534
2535 align = MEM_ALIGN (object);
2536
2537 if (GET_CODE (size) == CONST_INT
2538 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2539 clear_by_pieces (object, INTVAL (size), align);
57e84f18 2540 else if (set_storage_via_setmem (object, size, const0_rtx, align))
57aaef66
RH
2541 ;
2542 else
8148fe65
JJ
2543 return clear_storage_via_libcall (object, size,
2544 method == BLOCK_OP_TAILCALL);
57aaef66
RH
2545
2546 return NULL;
4ca79136
RH
2547}
2548
8f99553f 2549/* A subroutine of clear_storage. Expand a call to memset.
4ca79136 2550 Return the return value of memset, 0 otherwise. */
9de08200 2551
4ca79136 2552static rtx
8148fe65 2553clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
4ca79136
RH
2554{
2555 tree call_expr, arg_list, fn, object_tree, size_tree;
2556 enum machine_mode size_mode;
2557 rtx retval;
9de08200 2558
ad76cef8
PB
2559 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2560 place those into new pseudos into a VAR_DECL and use them later. */
52cf7115 2561
4ca79136 2562 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2563
8f99553f 2564 size_mode = TYPE_MODE (sizetype);
4ca79136
RH
2565 size = convert_to_mode (size_mode, size, 1);
2566 size = copy_to_mode_reg (size_mode, size);
52cf7115 2567
4ca79136
RH
2568 /* It is incorrect to use the libcall calling conventions to call
2569 memset in this context. This could be a user call to memset and
2570 the user may wish to examine the return value from memset. For
2571 targets where libcalls and normal calls have different conventions
8f99553f 2572 for returning pointers, we could end up generating incorrect code. */
4bc973ae 2573
4ca79136 2574 object_tree = make_tree (ptr_type_node, object);
8f99553f 2575 size_tree = make_tree (sizetype, size);
4ca79136
RH
2576
2577 fn = clear_storage_libcall_fn (true);
2578 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f 2579 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
4ca79136
RH
2580 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2581
2582 /* Now we have to build up the CALL_EXPR itself. */
2583 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
2584 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2585 call_expr, arg_list, NULL_TREE);
8148fe65 2586 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136 2587
84217346 2588 retval = expand_normal (call_expr);
4ca79136 2589
8f99553f 2590 return retval;
4ca79136
RH
2591}
2592
2593/* A subroutine of clear_storage_via_libcall. Create the tree node
2594 for the function we use for block clears. The first time FOR_CALL
2595 is true, we call assemble_external. */
2596
2597static GTY(()) tree block_clear_fn;
66c60e67 2598
9661b15f 2599void
502b8322 2600init_block_clear_fn (const char *asmspec)
4ca79136 2601{
9661b15f 2602 if (!block_clear_fn)
4ca79136 2603 {
9661b15f
JJ
2604 tree fn, args;
2605
8f99553f
JM
2606 fn = get_identifier ("memset");
2607 args = build_function_type_list (ptr_type_node, ptr_type_node,
2608 integer_type_node, sizetype,
2609 NULL_TREE);
4ca79136
RH
2610
2611 fn = build_decl (FUNCTION_DECL, fn, args);
2612 DECL_EXTERNAL (fn) = 1;
2613 TREE_PUBLIC (fn) = 1;
2614 DECL_ARTIFICIAL (fn) = 1;
2615 TREE_NOTHROW (fn) = 1;
5b5cba1f
JM
2616 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2617 DECL_VISIBILITY_SPECIFIED (fn) = 1;
4ca79136
RH
2618
2619 block_clear_fn = fn;
bbf6f052 2620 }
e9a25f70 2621
9661b15f 2622 if (asmspec)
0e6df31e 2623 set_user_assembler_name (block_clear_fn, asmspec);
9661b15f
JJ
2624}
2625
2626static tree
502b8322 2627clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2628{
2629 static bool emitted_extern;
2630
2631 if (!block_clear_fn)
2632 init_block_clear_fn (NULL);
2633
4ca79136
RH
2634 if (for_call && !emitted_extern)
2635 {
2636 emitted_extern = true;
0e6df31e 2637 make_decl_rtl (block_clear_fn);
9661b15f 2638 assemble_external (block_clear_fn);
4ca79136 2639 }
bbf6f052 2640
9661b15f 2641 return block_clear_fn;
4ca79136 2642}
57e84f18
AS
2643\f
2644/* Expand a setmem pattern; return true if successful. */
2645
2646bool
2647set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2648{
2649 /* Try the most limited insn first, because there's no point
2650 including more than one in the machine description unless
2651 the more limited one has some advantage. */
2652
2653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2654 enum machine_mode mode;
2655
2656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2657 mode = GET_MODE_WIDER_MODE (mode))
2658 {
2659 enum insn_code code = setmem_optab[(int) mode];
2660 insn_operand_predicate_fn pred;
2661
2662 if (code != CODE_FOR_nothing
2663 /* We don't need MODE to be narrower than
2664 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2665 the mode mask, as it is returned by the macro, it will
2666 definitely be less than the actual mode mask. */
2667 && ((GET_CODE (size) == CONST_INT
2668 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2669 <= (GET_MODE_MASK (mode) >> 1)))
2670 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2671 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2672 || (*pred) (object, BLKmode))
2673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2674 || (*pred) (opalign, VOIDmode)))
2675 {
9ed92901
AS
2676 rtx opsize, opchar;
2677 enum machine_mode char_mode;
57e84f18
AS
2678 rtx last = get_last_insn ();
2679 rtx pat;
2680
2681 opsize = convert_to_mode (mode, size, 1);
2682 pred = insn_data[(int) code].operand[1].predicate;
2683 if (pred != 0 && ! (*pred) (opsize, mode))
2684 opsize = copy_to_mode_reg (mode, opsize);
2685
9ed92901
AS
2686 opchar = val;
2687 char_mode = insn_data[(int) code].operand[2].mode;
2688 if (char_mode != VOIDmode)
2689 {
2690 opchar = convert_to_mode (char_mode, opchar, 1);
2691 pred = insn_data[(int) code].operand[2].predicate;
2692 if (pred != 0 && ! (*pred) (opchar, char_mode))
2693 opchar = copy_to_mode_reg (char_mode, opchar);
2694 }
57e84f18
AS
2695
2696 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2697 if (pat)
2698 {
2699 emit_insn (pat);
2700 return true;
2701 }
2702 else
2703 delete_insns_since (last);
2704 }
2705 }
2706
2707 return false;
2708}
2709
4ca79136 2710\f
1466e387
RH
2711/* Write to one of the components of the complex value CPLX. Write VAL to
2712 the real part if IMAG_P is false, and the imaginary part if its true. */
bbf6f052 2713
1466e387
RH
2714static void
2715write_complex_part (rtx cplx, rtx val, bool imag_p)
2716{
ddf4e03f
RH
2717 enum machine_mode cmode;
2718 enum machine_mode imode;
2719 unsigned ibitsize;
2720
1466e387 2721 if (GET_CODE (cplx) == CONCAT)
1466e387 2722 {
ddf4e03f
RH
2723 emit_move_insn (XEXP (cplx, imag_p), val);
2724 return;
2725 }
2726
2727 cmode = GET_MODE (cplx);
2728 imode = GET_MODE_INNER (cmode);
2729 ibitsize = GET_MODE_BITSIZE (imode);
bbf6f052 2730
7a31c801
DE
2731 /* For MEMs simplify_gen_subreg may generate an invalid new address
2732 because, e.g., the original address is considered mode-dependent
2733 by the target, which restricts simplify_subreg from invoking
2734 adjust_address_nv. Instead of preparing fallback support for an
2735 invalid address, we call adjust_address_nv directly. */
2736 if (MEM_P (cplx))
22469409
BW
2737 {
2738 emit_move_insn (adjust_address_nv (cplx, imode,
2739 imag_p ? GET_MODE_SIZE (imode) : 0),
2740 val);
2741 return;
2742 }
7a31c801 2743
ddf4e03f
RH
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since store_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
36d7571c
EB
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2753 || (REG_P (cplx)
36d7571c 2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2756 {
2757 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2759 if (part)
2760 {
2761 emit_move_insn (part, val);
2762 return;
2763 }
2764 else
2765 /* simplify_gen_subreg may fail for sub-word MEMs. */
2766 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
1466e387 2767 }
36d7571c
EB
2768
2769 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
1466e387
RH
2770}
2771
2772/* Extract one of the components of the complex value CPLX. Extract the
2773 real part if IMAG_P is false, and the imaginary part if it's true. */
2774
2775static rtx
2776read_complex_part (rtx cplx, bool imag_p)
bbf6f052 2777{
1466e387
RH
2778 enum machine_mode cmode, imode;
2779 unsigned ibitsize;
bbf6f052 2780
1466e387
RH
2781 if (GET_CODE (cplx) == CONCAT)
2782 return XEXP (cplx, imag_p);
bbf6f052 2783
1466e387
RH
2784 cmode = GET_MODE (cplx);
2785 imode = GET_MODE_INNER (cmode);
2786 ibitsize = GET_MODE_BITSIZE (imode);
2787
2788 /* Special case reads from complex constants that got spilled to memory. */
2789 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
de1b33dd 2790 {
1466e387
RH
2791 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2792 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2793 {
2794 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2795 if (CONSTANT_CLASS_P (part))
2796 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2797 }
2798 }
51286de6 2799
7a31c801
DE
2800 /* For MEMs simplify_gen_subreg may generate an invalid new address
2801 because, e.g., the original address is considered mode-dependent
2802 by the target, which restricts simplify_subreg from invoking
2803 adjust_address_nv. Instead of preparing fallback support for an
2804 invalid address, we call adjust_address_nv directly. */
2805 if (MEM_P (cplx))
2806 return adjust_address_nv (cplx, imode,
2807 imag_p ? GET_MODE_SIZE (imode) : 0);
2808
ddf4e03f
RH
2809 /* If the sub-object is at least word sized, then we know that subregging
2810 will work. This special case is important, since extract_bit_field
2811 wants to operate on integer modes, and there's rarely an OImode to
2812 correspond to TCmode. */
36d7571c
EB
2813 if (ibitsize >= BITS_PER_WORD
2814 /* For hard regs we have exact predicates. Assume we can split
2815 the original object if it spans an even number of hard regs.
2816 This special case is important for SCmode on 64-bit platforms
2817 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2818 || (REG_P (cplx)
36d7571c 2819 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2820 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2821 {
2822 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2823 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2824 if (ret)
2825 return ret;
2826 else
2827 /* simplify_gen_subreg may fail for sub-word MEMs. */
2828 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
ddf4e03f
RH
2829 }
2830
1466e387
RH
2831 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2832 true, NULL_RTX, imode, imode);
2833}
2834\f
539eaa3a 2835/* A subroutine of emit_move_insn_1. Yet another lowpart generator.
074e6d01 2836 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
539eaa3a
RH
2837 represented in NEW_MODE. If FORCE is true, this will never happen, as
2838 we'll force-create a SUBREG if needed. */
0c19a26f 2839
1466e387 2840static rtx
074e6d01 2841emit_move_change_mode (enum machine_mode new_mode,
539eaa3a 2842 enum machine_mode old_mode, rtx x, bool force)
1466e387 2843{
074e6d01 2844 rtx ret;
1466e387 2845
ef7befe0 2846 if (MEM_P (x))
1466e387 2847 {
ef7befe0
BE
2848 /* We don't have to worry about changing the address since the
2849 size in bytes is supposed to be the same. */
2850 if (reload_in_progress)
2851 {
2852 /* Copy the MEM to change the mode and move any
2853 substitutions from the old MEM to the new one. */
2854 ret = adjust_address_nv (x, new_mode, 0);
2855 copy_replacements (x, ret);
2856 }
2857 else
2858 ret = adjust_address (x, new_mode, 0);
de1b33dd 2859 }
1466e387
RH
2860 else
2861 {
35fd3193 2862 /* Note that we do want simplify_subreg's behavior of validating
074e6d01
RH
2863 that the new mode is ok for a hard register. If we were to use
2864 simplify_gen_subreg, we would create the subreg, but would
2865 probably run into the target not being able to implement it. */
539eaa3a
RH
2866 /* Except, of course, when FORCE is true, when this is exactly what
2867 we want. Which is needed for CCmodes on some targets. */
2868 if (force)
2869 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2870 else
2871 ret = simplify_subreg (new_mode, x, old_mode, 0);
1466e387 2872 }
bbf6f052 2873
074e6d01
RH
2874 return ret;
2875}
2876
1466e387
RH
2877/* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2878 an integer mode of the same size as MODE. Returns the instruction
2879 emitted, or NULL if such a move could not be generated. */
bbf6f052 2880
1466e387 2881static rtx
652b0932 2882emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
1466e387
RH
2883{
2884 enum machine_mode imode;
2885 enum insn_code code;
bbf6f052 2886
1466e387
RH
2887 /* There must exist a mode of the exact size we require. */
2888 imode = int_mode_for_mode (mode);
2889 if (imode == BLKmode)
2890 return NULL_RTX;
de1b33dd 2891
1466e387
RH
2892 /* The target must support moves in this mode. */
2893 code = mov_optab->handlers[imode].insn_code;
2894 if (code == CODE_FOR_nothing)
2895 return NULL_RTX;
de1b33dd 2896
652b0932 2897 x = emit_move_change_mode (imode, mode, x, force);
539eaa3a
RH
2898 if (x == NULL_RTX)
2899 return NULL_RTX;
652b0932 2900 y = emit_move_change_mode (imode, mode, y, force);
539eaa3a
RH
2901 if (y == NULL_RTX)
2902 return NULL_RTX;
2903 return emit_insn (GEN_FCN (code) (x, y));
261c4230
RS
2904}
2905
1466e387
RH
2906/* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2907 Return an equivalent MEM that does not use an auto-increment. */
261c4230 2908
1466e387
RH
2909static rtx
2910emit_move_resolve_push (enum machine_mode mode, rtx x)
261c4230 2911{
1466e387
RH
2912 enum rtx_code code = GET_CODE (XEXP (x, 0));
2913 HOST_WIDE_INT adjust;
2914 rtx temp;
261c4230 2915
1466e387
RH
2916 adjust = GET_MODE_SIZE (mode);
2917#ifdef PUSH_ROUNDING
2918 adjust = PUSH_ROUNDING (adjust);
2919#endif
2920 if (code == PRE_DEC || code == POST_DEC)
2921 adjust = -adjust;
6541fe75
JJ
2922 else if (code == PRE_MODIFY || code == POST_MODIFY)
2923 {
2924 rtx expr = XEXP (XEXP (x, 0), 1);
2925 HOST_WIDE_INT val;
2926
2927 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2928 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2929 val = INTVAL (XEXP (expr, 1));
2930 if (GET_CODE (expr) == MINUS)
2931 val = -val;
2932 gcc_assert (adjust == val || adjust == -val);
2933 adjust = val;
2934 }
76bbe028 2935
1466e387
RH
2936 /* Do not use anti_adjust_stack, since we don't want to update
2937 stack_pointer_delta. */
2938 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2939 GEN_INT (adjust), stack_pointer_rtx,
2940 0, OPTAB_LIB_WIDEN);
2941 if (temp != stack_pointer_rtx)
2942 emit_move_insn (stack_pointer_rtx, temp);
bbf6f052 2943
1466e387 2944 switch (code)
7308a047 2945 {
1466e387
RH
2946 case PRE_INC:
2947 case PRE_DEC:
6541fe75 2948 case PRE_MODIFY:
1466e387
RH
2949 temp = stack_pointer_rtx;
2950 break;
2951 case POST_INC:
1466e387 2952 case POST_DEC:
6541fe75
JJ
2953 case POST_MODIFY:
2954 temp = plus_constant (stack_pointer_rtx, -adjust);
1466e387
RH
2955 break;
2956 default:
2957 gcc_unreachable ();
2958 }
7308a047 2959
1466e387
RH
2960 return replace_equiv_address (x, temp);
2961}
1a06f5fe 2962
1466e387
RH
2963/* A subroutine of emit_move_complex. Generate a move from Y into X.
2964 X is known to satisfy push_operand, and MODE is known to be complex.
2965 Returns the last instruction emitted. */
bb93b973 2966
1466e387
RH
2967static rtx
2968emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2969{
2970 enum machine_mode submode = GET_MODE_INNER (mode);
2971 bool imag_first;
bb93b973 2972
1466e387
RH
2973#ifdef PUSH_ROUNDING
2974 unsigned int submodesize = GET_MODE_SIZE (submode);
bb93b973 2975
1466e387
RH
2976 /* In case we output to the stack, but the size is smaller than the
2977 machine can push exactly, we need to use move instructions. */
2978 if (PUSH_ROUNDING (submodesize) != submodesize)
2979 {
2980 x = emit_move_resolve_push (mode, x);
2981 return emit_move_insn (x, y);
2982 }
79ce92d7 2983#endif
7308a047 2984
1466e387
RH
2985 /* Note that the real part always precedes the imag part in memory
2986 regardless of machine's endianness. */
2987 switch (GET_CODE (XEXP (x, 0)))
2988 {
2989 case PRE_DEC:
2990 case POST_DEC:
2991 imag_first = true;
2992 break;
2993 case PRE_INC:
2994 case POST_INC:
2995 imag_first = false;
2996 break;
2997 default:
2998 gcc_unreachable ();
2999 }
beb72684 3000
1466e387
RH
3001 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3002 read_complex_part (y, imag_first));
3003 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004 read_complex_part (y, !imag_first));
3005}
405f63da 3006
1466e387
RH
3007/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3008 MODE is known to be complex. Returns the last instruction emitted. */
beb72684 3009
1466e387
RH
3010static rtx
3011emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3012{
3013 bool try_int;
405f63da 3014
1466e387
RH
3015 /* Need to take special care for pushes, to maintain proper ordering
3016 of the data, and possibly extra padding. */
3017 if (push_operand (x, mode))
3018 return emit_move_complex_push (mode, x, y);
7308a047 3019
1466e387
RH
3020 /* See if we can coerce the target into moving both values at once. */
3021
c6506442
DE
3022 /* Move floating point as parts. */
3023 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3024 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3025 try_int = false;
1466e387 3026 /* Not possible if the values are inherently not adjacent. */
c6506442 3027 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
1466e387
RH
3028 try_int = false;
3029 /* Is possible if both are registers (or subregs of registers). */
3030 else if (register_operand (x, mode) && register_operand (y, mode))
3031 try_int = true;
3032 /* If one of the operands is a memory, and alignment constraints
3033 are friendly enough, we may be able to do combined memory operations.
3034 We do not attempt this if Y is a constant because that combination is
3035 usually better with the by-parts thing below. */
3036 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3037 && (!STRICT_ALIGNMENT
3038 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3039 try_int = true;
3040 else
3041 try_int = false;
3042
3043 if (try_int)
a3600c71 3044 {
c6506442
DE
3045 rtx ret;
3046
3047 /* For memory to memory moves, optimal behavior can be had with the
3048 existing block move logic. */
3049 if (MEM_P (x) && MEM_P (y))
3050 {
3051 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3052 BLOCK_OP_NO_LIBCALL);
3053 return get_last_insn ();
3054 }
3055
652b0932 3056 ret = emit_move_via_integer (mode, x, y, true);
1466e387
RH
3057 if (ret)
3058 return ret;
3059 }
a3600c71 3060
1466e387
RH
3061 /* Show the output dies here. This is necessary for SUBREGs
3062 of pseudos since we cannot track their lifetimes correctly;
3063 hard regs shouldn't appear here except as return values. */
3064 if (!reload_completed && !reload_in_progress
3065 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
a3600c71 3067
1466e387
RH
3068 write_complex_part (x, read_complex_part (y, false), false);
3069 write_complex_part (x, read_complex_part (y, true), true);
3070 return get_last_insn ();
3071}
a3600c71 3072
1466e387
RH
3073/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3074 MODE is known to be MODE_CC. Returns the last instruction emitted. */
a3600c71 3075
1466e387
RH
3076static rtx
3077emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3078{
3079 rtx ret;
a3600c71 3080
1466e387
RH
3081 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3082 if (mode != CCmode)
3083 {
3084 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3085 if (code != CODE_FOR_nothing)
539eaa3a
RH
3086 {
3087 x = emit_move_change_mode (CCmode, mode, x, true);
3088 y = emit_move_change_mode (CCmode, mode, y, true);
3089 return emit_insn (GEN_FCN (code) (x, y));
3090 }
1466e387
RH
3091 }
3092
3093 /* Otherwise, find the MODE_INT mode of the same width. */
652b0932 3094 ret = emit_move_via_integer (mode, x, y, false);
1466e387
RH
3095 gcc_assert (ret != NULL);
3096 return ret;
3097}
3098
3099/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3100 MODE is any multi-word or full-word mode that lacks a move_insn
3101 pattern. Note that you will get better code if you define such
3102 patterns, even if they must turn into multiple assembler instructions. */
3103
3104static rtx
3105emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3106{
3107 rtx last_insn = 0;
3108 rtx seq, inner;
3109 bool need_clobber;
3110 int i;
3111
3112 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3113
3114 /* If X is a push on the stack, do the push now and replace
3115 X with a reference to the stack pointer. */
3116 if (push_operand (x, mode))
3117 x = emit_move_resolve_push (mode, x);
3118
3119 /* If we are in reload, see if either operand is a MEM whose address
3120 is scheduled for replacement. */
3121 if (reload_in_progress && MEM_P (x)
3122 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3123 x = replace_equiv_address_nv (x, inner);
3124 if (reload_in_progress && MEM_P (y)
3125 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3126 y = replace_equiv_address_nv (y, inner);
3127
3128 start_sequence ();
3129
3130 need_clobber = false;
3131 for (i = 0;
3132 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3133 i++)
3134 {
3135 rtx xpart = operand_subword (x, i, 1, mode);
3136 rtx ypart = operand_subword (y, i, 1, mode);
3137
3138 /* If we can't get a part of Y, put Y into memory if it is a
535a42b1
NS
3139 constant. Otherwise, force it into a register. Then we must
3140 be able to get a part of Y. */
1466e387 3141 if (ypart == 0 && CONSTANT_P (y))
a3600c71 3142 {
aacd3885 3143 y = use_anchored_address (force_const_mem (mode, y));
1466e387 3144 ypart = operand_subword (y, i, 1, mode);
a3600c71 3145 }
1466e387
RH
3146 else if (ypart == 0)
3147 ypart = operand_subword_force (y, i, mode);
3148
3149 gcc_assert (xpart && ypart);
3150
3151 need_clobber |= (GET_CODE (xpart) == SUBREG);
502b8322 3152
1466e387 3153 last_insn = emit_move_insn (xpart, ypart);
a3600c71
HPN
3154 }
3155
1466e387
RH
3156 seq = get_insns ();
3157 end_sequence ();
3158
3159 /* Show the output dies here. This is necessary for SUBREGs
3160 of pseudos since we cannot track their lifetimes correctly;
3161 hard regs shouldn't appear here except as return values.
3162 We never want to emit such a clobber after reload. */
3163 if (x != y
3164 && ! (reload_in_progress || reload_completed)
3165 && need_clobber != 0)
3166 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3167
3168 emit_insn (seq);
3169
3170 return last_insn;
3171}
3172
3173/* Low level part of emit_move_insn.
3174 Called just like emit_move_insn, but assumes X and Y
3175 are basically valid. */
3176
3177rtx
3178emit_move_insn_1 (rtx x, rtx y)
3179{
3180 enum machine_mode mode = GET_MODE (x);
3181 enum insn_code code;
3182
3183 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3184
3185 code = mov_optab->handlers[mode].insn_code;
3186 if (code != CODE_FOR_nothing)
3187 return emit_insn (GEN_FCN (code) (x, y));
3188
3189 /* Expand complex moves by moving real part and imag part. */
3190 if (COMPLEX_MODE_P (mode))
3191 return emit_move_complex (mode, x, y);
3192
ef7befe0
BE
3193 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3194 {
3195 rtx result = emit_move_via_integer (mode, x, y, true);
3196
3197 /* If we can't find an integer mode, use multi words. */
3198 if (result)
3199 return result;
3200 else
3201 return emit_move_multi_word (mode, x, y);
3202 }
3203
1466e387
RH
3204 if (GET_MODE_CLASS (mode) == MODE_CC)
3205 return emit_move_ccmode (mode, x, y);
3206
5581fc91
RS
3207 /* Try using a move pattern for the corresponding integer mode. This is
3208 only safe when simplify_subreg can convert MODE constants into integer
3209 constants. At present, it can only do this reliably if the value
3210 fits within a HOST_WIDE_INT. */
1466e387 3211 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 3212 {
652b0932 3213 rtx ret = emit_move_via_integer (mode, x, y, false);
1466e387
RH
3214 if (ret)
3215 return ret;
3216 }
0fb7aeda 3217
1466e387
RH
3218 return emit_move_multi_word (mode, x, y);
3219}
918a6124 3220
1466e387
RH
3221/* Generate code to copy Y into X.
3222 Both Y and X must have the same mode, except that
3223 Y can be a constant with VOIDmode.
3224 This mode cannot be BLKmode; use emit_block_move for that.
3a94c984 3225
1466e387 3226 Return the last instruction emitted. */
3ef1eef4 3227
1466e387
RH
3228rtx
3229emit_move_insn (rtx x, rtx y)
3230{
3231 enum machine_mode mode = GET_MODE (x);
3232 rtx y_cst = NULL_RTX;
3233 rtx last_insn, set;
15a7a8ec 3234
1466e387
RH
3235 gcc_assert (mode != BLKmode
3236 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
bbf6f052 3237
1466e387
RH
3238 if (CONSTANT_P (y))
3239 {
3240 if (optimize
3241 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3242 && (last_insn = compress_float_constant (x, y)))
3243 return last_insn;
bbf6f052 3244
1466e387 3245 y_cst = y;
bbf6f052 3246
1466e387
RH
3247 if (!LEGITIMATE_CONSTANT_P (y))
3248 {
3249 y = force_const_mem (mode, y);
235ae7be 3250
1466e387
RH
3251 /* If the target's cannot_force_const_mem prevented the spill,
3252 assume that the target's move expanders will also take care
3253 of the non-legitimate constant. */
3254 if (!y)
3255 y = y_cst;
aacd3885
RS
3256 else
3257 y = use_anchored_address (y);
bbf6f052 3258 }
1466e387 3259 }
6551fa4d 3260
1466e387
RH
3261 /* If X or Y are memory references, verify that their addresses are valid
3262 for the machine. */
3263 if (MEM_P (x)
3264 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3265 && ! push_operand (x, GET_MODE (x)))
3266 || (flag_force_addr
3267 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3268 x = validize_mem (x);
235ae7be 3269
1466e387
RH
3270 if (MEM_P (y)
3271 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3272 || (flag_force_addr
3273 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3274 y = validize_mem (y);
235ae7be 3275
1466e387 3276 gcc_assert (mode != BLKmode);
235ae7be 3277
1466e387
RH
3278 last_insn = emit_move_insn_1 (x, y);
3279
3280 if (y_cst && REG_P (x)
3281 && (set = single_set (last_insn)) != NULL_RTX
3282 && SET_DEST (set) == x
3283 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3284 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3285
3286 return last_insn;
bbf6f052 3287}
51286de6
RH
3288
3289/* If Y is representable exactly in a narrower mode, and the target can
3290 perform the extension directly from constant or memory, then emit the
3291 move as an extension. */
3292
3293static rtx
502b8322 3294compress_float_constant (rtx x, rtx y)
51286de6
RH
3295{
3296 enum machine_mode dstmode = GET_MODE (x);
3297 enum machine_mode orig_srcmode = GET_MODE (y);
3298 enum machine_mode srcmode;
3299 REAL_VALUE_TYPE r;
e4541b7a 3300 int oldcost, newcost;
51286de6
RH
3301
3302 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3303
e4541b7a
DJ
3304 if (LEGITIMATE_CONSTANT_P (y))
3305 oldcost = rtx_cost (y, SET);
3306 else
3307 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3308
51286de6
RH
3309 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3310 srcmode != orig_srcmode;
3311 srcmode = GET_MODE_WIDER_MODE (srcmode))
3312 {
3313 enum insn_code ic;
3314 rtx trunc_y, last_insn;
3315
3316 /* Skip if the target can't extend this way. */
3317 ic = can_extend_p (dstmode, srcmode, 0);
3318 if (ic == CODE_FOR_nothing)
3319 continue;
3320
3321 /* Skip if the narrowed value isn't exact. */
3322 if (! exact_real_truncate (srcmode, &r))
3323 continue;
3324
3325 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3326
3327 if (LEGITIMATE_CONSTANT_P (trunc_y))
3328 {
3329 /* Skip if the target needs extra instructions to perform
3330 the extension. */
3331 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3332 continue;
e4541b7a
DJ
3333 /* This is valid, but may not be cheaper than the original. */
3334 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3335 if (oldcost < newcost)
3336 continue;
51286de6
RH
3337 }
3338 else if (float_extend_from_mem[dstmode][srcmode])
e4541b7a
DJ
3339 {
3340 trunc_y = force_const_mem (srcmode, trunc_y);
3341 /* This is valid, but may not be cheaper than the original. */
3342 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3343 if (oldcost < newcost)
3344 continue;
3345 trunc_y = validize_mem (trunc_y);
3346 }
51286de6
RH
3347 else
3348 continue;
d763e130
RS
3349
3350 /* For CSE's benefit, force the compressed constant pool entry
3351 into a new pseudo. This constant may be used in different modes,
3352 and if not, combine will put things back together for us. */
3353 trunc_y = force_reg (srcmode, trunc_y);
51286de6
RH
3354 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3355 last_insn = get_last_insn ();
3356
f8cfc6aa 3357 if (REG_P (x))
0c19a26f 3358 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3359
3360 return last_insn;
3361 }
3362
3363 return NULL_RTX;
3364}
bbf6f052
RK
3365\f
3366/* Pushing data onto the stack. */
3367
3368/* Push a block of length SIZE (perhaps variable)
3369 and return an rtx to address the beginning of the block.
bbf6f052
RK
3370 The value may be virtual_outgoing_args_rtx.
3371
3372 EXTRA is the number of bytes of padding to push in addition to SIZE.
3373 BELOW nonzero means this padding comes at low addresses;
3374 otherwise, the padding comes at high addresses. */
3375
3376rtx
502b8322 3377push_block (rtx size, int extra, int below)
bbf6f052 3378{
b3694847 3379 rtx temp;
88f63c77
RK
3380
3381 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3382 if (CONSTANT_P (size))
3383 anti_adjust_stack (plus_constant (size, extra));
f8cfc6aa 3384 else if (REG_P (size) && extra == 0)
bbf6f052
RK
3385 anti_adjust_stack (size);
3386 else
3387 {
ce48579b 3388 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3389 if (extra != 0)
906c4e36 3390 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3391 temp, 0, OPTAB_LIB_WIDEN);
3392 anti_adjust_stack (temp);
3393 }
3394
f73ad30e 3395#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3396 if (0)
f73ad30e
JH
3397#else
3398 if (1)
bbf6f052 3399#endif
f73ad30e 3400 {
f73ad30e
JH
3401 temp = virtual_outgoing_args_rtx;
3402 if (extra != 0 && below)
3403 temp = plus_constant (temp, extra);
3404 }
3405 else
3406 {
3407 if (GET_CODE (size) == CONST_INT)
3408 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3409 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3410 else if (extra != 0 && !below)
3411 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3412 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3413 else
3414 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3415 negate_rtx (Pmode, size));
3416 }
bbf6f052
RK
3417
3418 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3419}
3420
21d93687
RK
3421#ifdef PUSH_ROUNDING
3422
566aa174 3423/* Emit single push insn. */
21d93687 3424
566aa174 3425static void
502b8322 3426emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3427{
566aa174 3428 rtx dest_addr;
918a6124 3429 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3430 rtx dest;
371b8fc0
JH
3431 enum insn_code icode;
3432 insn_operand_predicate_fn pred;
566aa174 3433
371b8fc0
JH
3434 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3435 /* If there is push pattern, use it. Otherwise try old way of throwing
3436 MEM representing push operation to move expander. */
3437 icode = push_optab->handlers[(int) mode].insn_code;
3438 if (icode != CODE_FOR_nothing)
3439 {
3440 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3441 && !((*pred) (x, mode))))
371b8fc0
JH
3442 x = force_reg (mode, x);
3443 emit_insn (GEN_FCN (icode) (x));
3444 return;
3445 }
566aa174
JH
3446 if (GET_MODE_SIZE (mode) == rounded_size)
3447 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3448 /* If we are to pad downward, adjust the stack pointer first and
3449 then store X into the stack location using an offset. This is
3450 because emit_move_insn does not know how to pad; it does not have
3451 access to type. */
3452 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3453 {
3454 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3455 HOST_WIDE_INT offset;
3456
3457 emit_move_insn (stack_pointer_rtx,
3458 expand_binop (Pmode,
3459#ifdef STACK_GROWS_DOWNWARD
3460 sub_optab,
3461#else
3462 add_optab,
3463#endif
3464 stack_pointer_rtx,
3465 GEN_INT (rounded_size),
3466 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3467
3468 offset = (HOST_WIDE_INT) padding_size;
3469#ifdef STACK_GROWS_DOWNWARD
3470 if (STACK_PUSH_CODE == POST_DEC)
3471 /* We have already decremented the stack pointer, so get the
3472 previous value. */
3473 offset += (HOST_WIDE_INT) rounded_size;
3474#else
3475 if (STACK_PUSH_CODE == POST_INC)
3476 /* We have already incremented the stack pointer, so get the
3477 previous value. */
3478 offset -= (HOST_WIDE_INT) rounded_size;
3479#endif
3480 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3481 }
566aa174
JH
3482 else
3483 {
3484#ifdef STACK_GROWS_DOWNWARD
329d586f 3485 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3486 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3487 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3488#else
329d586f 3489 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3490 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3491 GEN_INT (rounded_size));
3492#endif
3493 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3494 }
3495
3496 dest = gen_rtx_MEM (mode, dest_addr);
3497
566aa174
JH
3498 if (type != 0)
3499 {
3500 set_mem_attributes (dest, type, 1);
c3d32120
RK
3501
3502 if (flag_optimize_sibling_calls)
3503 /* Function incoming arguments may overlap with sibling call
3504 outgoing arguments and we cannot allow reordering of reads
3505 from function arguments with stores to outgoing arguments
3506 of sibling calls. */
3507 set_mem_alias_set (dest, 0);
566aa174
JH
3508 }
3509 emit_move_insn (dest, x);
566aa174 3510}
21d93687 3511#endif
566aa174 3512
bbf6f052
RK
3513/* Generate code to push X onto the stack, assuming it has mode MODE and
3514 type TYPE.
3515 MODE is redundant except when X is a CONST_INT (since they don't
3516 carry mode info).
3517 SIZE is an rtx for the size of data to be copied (in bytes),
3518 needed only if X is BLKmode.
3519
f1eaaf73 3520 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3521
cd048831 3522 If PARTIAL and REG are both nonzero, then copy that many of the first
78a52f11
RH
3523 bytes of X into registers starting with REG, and push the rest of X.
3524 The amount of space pushed is decreased by PARTIAL bytes.
bbf6f052 3525 REG must be a hard register in this case.
cd048831
RK
3526 If REG is zero but PARTIAL is not, take any all others actions for an
3527 argument partially in registers, but do not actually load any
3528 registers.
bbf6f052
RK
3529
3530 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3531 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3532
3533 On a machine that lacks real push insns, ARGS_ADDR is the address of
3534 the bottom of the argument block for this call. We use indexing off there
3535 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3536 argument block has not been preallocated.
3537
e5e809f4
JL
3538 ARGS_SO_FAR is the size of args previously pushed for this call.
3539
3540 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3541 for arguments passed in registers. If nonzero, it will be the number
3542 of bytes required. */
bbf6f052
RK
3543
3544void
502b8322
AJ
3545emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3546 unsigned int align, int partial, rtx reg, int extra,
3547 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3548 rtx alignment_pad)
bbf6f052
RK
3549{
3550 rtx xinner;
3551 enum direction stack_direction
3552#ifdef STACK_GROWS_DOWNWARD
3553 = downward;
3554#else
3555 = upward;
3556#endif
3557
3558 /* Decide where to pad the argument: `downward' for below,
3559 `upward' for above, or `none' for don't pad it.
3560 Default is below for small data on big-endian machines; else above. */
3561 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3562
0fb7aeda 3563 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3564 FIXME: why? */
3565 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3566 if (where_pad != none)
3567 where_pad = (where_pad == downward ? upward : downward);
3568
ad76cef8 3569 xinner = x;
bbf6f052
RK
3570
3571 if (mode == BLKmode)
3572 {
3573 /* Copy a block into the stack, entirely or partially. */
3574
b3694847 3575 rtx temp;
78a52f11 3576 int used;
531547e9 3577 int offset;
bbf6f052 3578 int skip;
3a94c984 3579
78a52f11
RH
3580 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3581 used = partial - offset;
531547e9 3582
5b0264cb 3583 gcc_assert (size);
bbf6f052 3584
bbf6f052
RK
3585 /* USED is now the # of bytes we need not copy to the stack
3586 because registers will take care of them. */
3587
3588 if (partial != 0)
f4ef873c 3589 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3590
3591 /* If the partial register-part of the arg counts in its stack size,
3592 skip the part of stack space corresponding to the registers.
3593 Otherwise, start copying to the beginning of the stack space,
3594 by setting SKIP to 0. */
e5e809f4 3595 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3596
3597#ifdef PUSH_ROUNDING
3598 /* Do it with several push insns if that doesn't take lots of insns
3599 and if there is no difficulty with push insns that skip bytes
3600 on the stack for alignment purposes. */
3601 if (args_addr == 0
f73ad30e 3602 && PUSH_ARGS
bbf6f052
RK
3603 && GET_CODE (size) == CONST_INT
3604 && skip == 0
f26aca6d 3605 && MEM_ALIGN (xinner) >= align
15914757 3606 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3607 /* Here we avoid the case of a structure whose weak alignment
3608 forces many pushes of a small amount of data,
3609 and such small pushes do rounding that causes trouble. */
e1565e65 3610 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3611 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3612 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3613 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3614 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3615 {
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra && args_addr == 0
3620 && where_pad != none && where_pad != stack_direction)
906c4e36 3621 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3622
8fd3cf4e 3623 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3624 }
3625 else
3a94c984 3626#endif /* PUSH_ROUNDING */
bbf6f052 3627 {
7ab923cc
JJ
3628 rtx target;
3629
bbf6f052
RK
3630 /* Otherwise make space on the stack and copy the data
3631 to the address of that space. */
3632
3633 /* Deduct words put into registers from the size we must copy. */
3634 if (partial != 0)
3635 {
3636 if (GET_CODE (size) == CONST_INT)
906c4e36 3637 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3638 else
3639 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3640 GEN_INT (used), NULL_RTX, 0,
3641 OPTAB_LIB_WIDEN);
bbf6f052
RK
3642 }
3643
3644 /* Get the address of the stack space.
3645 In this case, we do not deal with EXTRA separately.
3646 A single stack adjust will do. */
3647 if (! args_addr)
3648 {
3649 temp = push_block (size, extra, where_pad == downward);
3650 extra = 0;
3651 }
3652 else if (GET_CODE (args_so_far) == CONST_INT)
3653 temp = memory_address (BLKmode,
3654 plus_constant (args_addr,
3655 skip + INTVAL (args_so_far)));
3656 else
3657 temp = memory_address (BLKmode,
38a448ca
RH
3658 plus_constant (gen_rtx_PLUS (Pmode,
3659 args_addr,
3660 args_so_far),
bbf6f052 3661 skip));
4ca79136
RH
3662
3663 if (!ACCUMULATE_OUTGOING_ARGS)
3664 {
3665 /* If the source is referenced relative to the stack pointer,
3666 copy it to another register to stabilize it. We do not need
3667 to do this if we know that we won't be changing sp. */
3668
3669 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3670 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3671 temp = copy_to_reg (temp);
3672 }
3673
3a94c984 3674 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3675
2bb16349
RH
3676 /* We do *not* set_mem_attributes here, because incoming arguments
3677 may overlap with sibling call outgoing arguments and we cannot
3678 allow reordering of reads from function arguments with stores
3679 to outgoing arguments of sibling calls. We do, however, want
3680 to record the alignment of the stack slot. */
44bb111a
RH
3681 /* ALIGN may well be better aligned than TYPE, e.g. due to
3682 PARM_BOUNDARY. Assume the caller isn't lying. */
3683 set_mem_align (target, align);
4ca79136 3684
44bb111a 3685 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3686 }
3687 }
3688 else if (partial > 0)
3689 {
3690 /* Scalar partly in registers. */
3691
3692 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3693 int i;
3694 int not_stack;
78a52f11 3695 /* # bytes of start of argument
bbf6f052 3696 that we must make space for but need not store. */
ac7e839c 3697 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052
RK
3698 int args_offset = INTVAL (args_so_far);
3699 int skip;
3700
3701 /* Push padding now if padding above and stack grows down,
3702 or if padding below and stack grows up.
3703 But if space already allocated, this has already been done. */
3704 if (extra && args_addr == 0
3705 && where_pad != none && where_pad != stack_direction)
906c4e36 3706 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3707
3708 /* If we make space by pushing it, we might as well push
3709 the real data. Otherwise, we can leave OFFSET nonzero
3710 and leave the space uninitialized. */
3711 if (args_addr == 0)
3712 offset = 0;
3713
3714 /* Now NOT_STACK gets the number of words that we don't need to
40b0345d 3715 allocate on the stack. Convert OFFSET to words too. */
78a52f11 3716 not_stack = (partial - offset) / UNITS_PER_WORD;
ac7e839c 3717 offset /= UNITS_PER_WORD;
bbf6f052
RK
3718
3719 /* If the partial register-part of the arg counts in its stack size,
3720 skip the part of stack space corresponding to the registers.
3721 Otherwise, start copying to the beginning of the stack space,
3722 by setting SKIP to 0. */
e5e809f4 3723 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3724
3725 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3726 x = validize_mem (force_const_mem (mode, x));
3727
3728 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3729 SUBREGs of such registers are not allowed. */
f8cfc6aa 3730 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
bbf6f052
RK
3731 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3732 x = copy_to_reg (x);
3733
3734 /* Loop over all the words allocated on the stack for this arg. */
3735 /* We can do it by words, because any scalar bigger than a word
3736 has a size a multiple of a word. */
3737#ifndef PUSH_ARGS_REVERSED
3738 for (i = not_stack; i < size; i++)
3739#else
3740 for (i = size - 1; i >= not_stack; i--)
3741#endif
3742 if (i >= not_stack + offset)
3743 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3744 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3745 0, args_addr,
3746 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3747 * UNITS_PER_WORD)),
4fc026cd 3748 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3749 }
3750 else
3751 {
3752 rtx addr;
3bdf5ad1 3753 rtx dest;
bbf6f052
RK
3754
3755 /* Push padding now if padding above and stack grows down,
3756 or if padding below and stack grows up.
3757 But if space already allocated, this has already been done. */
3758 if (extra && args_addr == 0
3759 && where_pad != none && where_pad != stack_direction)
906c4e36 3760 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3761
3762#ifdef PUSH_ROUNDING
f73ad30e 3763 if (args_addr == 0 && PUSH_ARGS)
566aa174 3764 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3765 else
3766#endif
921b3427
RK
3767 {
3768 if (GET_CODE (args_so_far) == CONST_INT)
3769 addr
3770 = memory_address (mode,
3a94c984 3771 plus_constant (args_addr,
921b3427 3772 INTVAL (args_so_far)));
3a94c984 3773 else
38a448ca
RH
3774 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3775 args_so_far));
566aa174 3776 dest = gen_rtx_MEM (mode, addr);
2bb16349
RH
3777
3778 /* We do *not* set_mem_attributes here, because incoming arguments
3779 may overlap with sibling call outgoing arguments and we cannot
3780 allow reordering of reads from function arguments with stores
3781 to outgoing arguments of sibling calls. We do, however, want
3782 to record the alignment of the stack slot. */
3783 /* ALIGN may well be better aligned than TYPE, e.g. due to
3784 PARM_BOUNDARY. Assume the caller isn't lying. */
3785 set_mem_align (dest, align);
bbf6f052 3786
566aa174 3787 emit_move_insn (dest, x);
566aa174 3788 }
bbf6f052
RK
3789 }
3790
bbf6f052
RK
3791 /* If part should go in registers, copy that part
3792 into the appropriate registers. Do this now, at the end,
3793 since mem-to-mem copies above may do function calls. */
cd048831 3794 if (partial > 0 && reg != 0)
fffa9c1d
JW
3795 {
3796 /* Handle calls that pass values in multiple non-contiguous locations.
3797 The Irix 6 ABI has examples of this. */
3798 if (GET_CODE (reg) == PARALLEL)
6e985040 3799 emit_group_load (reg, x, type, -1);
fffa9c1d 3800 else
78a52f11
RH
3801 {
3802 gcc_assert (partial % UNITS_PER_WORD == 0);
3803 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3804 }
fffa9c1d 3805 }
bbf6f052
RK
3806
3807 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3808 anti_adjust_stack (GEN_INT (extra));
3a94c984 3809
3ea2292a 3810 if (alignment_pad && args_addr == 0)
4fc026cd 3811 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3812}
3813\f
296b4ed9
RK
3814/* Return X if X can be used as a subtarget in a sequence of arithmetic
3815 operations. */
3816
3817static rtx
502b8322 3818get_subtarget (rtx x)
296b4ed9 3819{
7c27e184
PB
3820 return (optimize
3821 || x == 0
296b4ed9 3822 /* Only registers can be subtargets. */
f8cfc6aa 3823 || !REG_P (x)
296b4ed9
RK
3824 /* Don't use hard regs to avoid extending their life. */
3825 || REGNO (x) < FIRST_PSEUDO_REGISTER
296b4ed9
RK
3826 ? 0 : x);
3827}
3828
8c1cfd5a
RH
3829/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3830 FIELD is a bitfield. Returns true if the optimization was successful,
3831 and there's nothing else to do. */
3832
3833static bool
3834optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3835 unsigned HOST_WIDE_INT bitpos,
3836 enum machine_mode mode1, rtx str_rtx,
3837 tree to, tree src)
3838{
3839 enum machine_mode str_mode = GET_MODE (str_rtx);
3840 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3841 tree op0, op1;
3842 rtx value, result;
3843 optab binop;
3844
3845 if (mode1 != VOIDmode
3846 || bitsize >= BITS_PER_WORD
3847 || str_bitsize > BITS_PER_WORD
3848 || TREE_SIDE_EFFECTS (to)
3849 || TREE_THIS_VOLATILE (to))
3850 return false;
3851
3852 STRIP_NOPS (src);
3853 if (!BINARY_CLASS_P (src)
3854 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3855 return false;
3856
3857 op0 = TREE_OPERAND (src, 0);
3858 op1 = TREE_OPERAND (src, 1);
3859 STRIP_NOPS (op0);
3860
3861 if (!operand_equal_p (to, op0, 0))
3862 return false;
3863
3864 if (MEM_P (str_rtx))
3865 {
3866 unsigned HOST_WIDE_INT offset1;
3867
3868 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3869 str_mode = word_mode;
3870 str_mode = get_best_mode (bitsize, bitpos,
3871 MEM_ALIGN (str_rtx), str_mode, 0);
3872 if (str_mode == VOIDmode)
3873 return false;
3874 str_bitsize = GET_MODE_BITSIZE (str_mode);
3875
3876 offset1 = bitpos;
3877 bitpos %= str_bitsize;
3878 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3879 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3880 }
3881 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3882 return false;
3883
3884 /* If the bit field covers the whole REG/MEM, store_field
3885 will likely generate better code. */
3886 if (bitsize >= str_bitsize)
3887 return false;
3888
3889 /* We can't handle fields split across multiple entities. */
3890 if (bitpos + bitsize > str_bitsize)
3891 return false;
3892
3893 if (BYTES_BIG_ENDIAN)
3894 bitpos = str_bitsize - bitpos - bitsize;
3895
3896 switch (TREE_CODE (src))
3897 {
3898 case PLUS_EXPR:
3899 case MINUS_EXPR:
3900 /* For now, just optimize the case of the topmost bitfield
3901 where we don't need to do any masking and also
3902 1 bit bitfields where xor can be used.
3903 We might win by one instruction for the other bitfields
3904 too if insv/extv instructions aren't used, so that
3905 can be added later. */
3906 if (bitpos + bitsize != str_bitsize
3907 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3908 break;
3909
3910 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3911 value = convert_modes (str_mode,
3912 TYPE_MODE (TREE_TYPE (op1)), value,
3913 TYPE_UNSIGNED (TREE_TYPE (op1)));
3914
3915 /* We may be accessing data outside the field, which means
3916 we can alias adjacent data. */
3917 if (MEM_P (str_rtx))
3918 {
3919 str_rtx = shallow_copy_rtx (str_rtx);
3920 set_mem_alias_set (str_rtx, 0);
3921 set_mem_expr (str_rtx, 0);
3922 }
3923
3924 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3925 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3926 {
3927 value = expand_and (str_mode, value, const1_rtx, NULL);
3928 binop = xor_optab;
3929 }
3930 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3931 build_int_cst (NULL_TREE, bitpos),
3932 NULL_RTX, 1);
3933 result = expand_binop (str_mode, binop, str_rtx,
3934 value, str_rtx, 1, OPTAB_WIDEN);
3935 if (result != str_rtx)
3936 emit_move_insn (str_rtx, result);
3937 return true;
3938
92fb2d32
KH
3939 case BIT_IOR_EXPR:
3940 case BIT_XOR_EXPR:
3941 if (TREE_CODE (op1) != INTEGER_CST)
3942 break;
3943 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3944 value = convert_modes (GET_MODE (str_rtx),
3945 TYPE_MODE (TREE_TYPE (op1)), value,
3946 TYPE_UNSIGNED (TREE_TYPE (op1)));
3947
3948 /* We may be accessing data outside the field, which means
3949 we can alias adjacent data. */
3950 if (MEM_P (str_rtx))
3951 {
3952 str_rtx = shallow_copy_rtx (str_rtx);
3953 set_mem_alias_set (str_rtx, 0);
3954 set_mem_expr (str_rtx, 0);
3955 }
3956
3957 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3958 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3959 {
3960 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3961 - 1);
3962 value = expand_and (GET_MODE (str_rtx), value, mask,
3963 NULL_RTX);
3964 }
3965 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3966 build_int_cst (NULL_TREE, bitpos),
3967 NULL_RTX, 1);
3968 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3969 value, str_rtx, 1, OPTAB_WIDEN);
3970 if (result != str_rtx)
3971 emit_move_insn (str_rtx, result);
3972 return true;
3973
8c1cfd5a
RH
3974 default:
3975 break;
3976 }
3977
3978 return false;
3979}
3980
3981
e836a5a2 3982/* Expand an assignment that stores the value of FROM into TO. */
bbf6f052 3983
e836a5a2
KH
3984void
3985expand_assignment (tree to, tree from)
bbf6f052 3986{
b3694847 3987 rtx to_rtx = 0;
bbf6f052
RK
3988 rtx result;
3989
3990 /* Don't crash if the lhs of the assignment was erroneous. */
bbf6f052 3991 if (TREE_CODE (to) == ERROR_MARK)
709f5be1 3992 {
84217346 3993 result = expand_normal (from);
e836a5a2 3994 return;
709f5be1 3995 }
bbf6f052 3996
6cc1d694
RS
3997 /* Optimize away no-op moves without side-effects. */
3998 if (operand_equal_p (to, from, 0))
3999 return;
4000
bbf6f052
RK
4001 /* Assignment of a structure component needs special treatment
4002 if the structure component's rtx is not simply a MEM.
6be58303
JW
4003 Assignment of an array element at a constant index, and assignment of
4004 an array element in an unaligned packed structure field, has the same
4005 problem. */
8c1cfd5a 4006 if (handled_component_p (to)
7c02ae17 4007 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
4008 {
4009 enum machine_mode mode1;
770ae6cc 4010 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 4011 tree offset;
bbf6f052
RK
4012 int unsignedp;
4013 int volatilep = 0;
0088fcb1
RK
4014 tree tem;
4015
4016 push_temp_slots ();
839c4796 4017 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2614034e 4018 &unsignedp, &volatilep, true);
bbf6f052
RK
4019
4020 /* If we are going to use store_bit_field and extract_bit_field,
4021 make sure to_rtx will be safe for multiple use. */
4022
84217346 4023 to_rtx = expand_normal (tem);
1ed1b4fb 4024
7bb0943f
RS
4025 if (offset != 0)
4026 {
1e188d1e 4027 rtx offset_rtx;
7bb0943f 4028
1e188d1e
RH
4029 if (!MEM_P (to_rtx))
4030 {
4031 /* We can get constant negative offsets into arrays with broken
4032 user code. Translate this to a trap instead of ICEing. */
4033 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4034 expand_builtin_trap ();
4035 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4036 }
bd070e1a 4037
1e188d1e 4038 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
bd070e1a 4039#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4040 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4041 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4042#else
4043 if (GET_MODE (offset_rtx) != ptr_mode)
4044 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4045#endif
bd070e1a 4046
9a7b9f4f
JL
4047 /* A constant address in TO_RTX can have VOIDmode, we must not try
4048 to call force_reg for that case. Avoid that case. */
3c0cb5de 4049 if (MEM_P (to_rtx)
89752202 4050 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 4051 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 4052 && bitsize > 0
3a94c984 4053 && (bitpos % bitsize) == 0
89752202 4054 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 4055 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 4056 {
e3c8ea67 4057 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
4058 bitpos = 0;
4059 }
4060
0d4903b8 4061 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
4062 highest_pow2_factor_for_target (to,
4063 offset));
7bb0943f 4064 }
c5c76735 4065
8c1cfd5a
RH
4066 /* Handle expand_expr of a complex value returning a CONCAT. */
4067 if (GET_CODE (to_rtx) == CONCAT)
a06ef755 4068 {
0becc986
RH
4069 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4070 {
4071 gcc_assert (bitpos == 0);
4072 result = store_expr (from, to_rtx, false);
4073 }
4074 else
4075 {
4076 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4077 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4078 }
bbf6f052 4079 }
8c1cfd5a 4080 else
df62f18a 4081 {
8c1cfd5a 4082 if (MEM_P (to_rtx))
b8b139c7 4083 {
8c1cfd5a
RH
4084 /* If the field is at offset zero, we could have been given the
4085 DECL_RTX of the parent struct. Don't munge it. */
4086 to_rtx = shallow_copy_rtx (to_rtx);
b8b139c7 4087
8c1cfd5a 4088 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
b8b139c7 4089
8c1cfd5a
RH
4090 /* Deal with volatile and readonly fields. The former is only
4091 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4092 if (volatilep)
4093 MEM_VOLATILE_P (to_rtx) = 1;
2039d7aa 4094 if (component_uses_parent_alias_set (to))
8c1cfd5a 4095 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
df62f18a 4096 }
60ba25bf 4097
8c1cfd5a
RH
4098 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4099 to_rtx, to, from))
4100 result = NULL;
4101 else
4102 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4103 TREE_TYPE (tem), get_alias_set (to));
df62f18a
JJ
4104 }
4105
8c1cfd5a
RH
4106 if (result)
4107 preserve_temp_slots (result);
a06ef755
RK
4108 free_temp_slots ();
4109 pop_temp_slots ();
e836a5a2 4110 return;
bbf6f052
RK
4111 }
4112
cd1db108
RS
4113 /* If the rhs is a function call and its value is not an aggregate,
4114 call the function before we start to compute the lhs.
4115 This is needed for correct code for cases such as
4116 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4117 requires loading up part of an address in a separate insn.
4118
1858863b
JW
4119 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4120 since it might be a promoted variable where the zero- or sign- extension
4121 needs to be done. Handling this in the normal way is safe because no
4122 computation is done before the call. */
61f71b34 4123 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 4124 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b 4125 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
f8cfc6aa 4126 && REG_P (DECL_RTL (to))))
cd1db108 4127 {
0088fcb1
RK
4128 rtx value;
4129
4130 push_temp_slots ();
84217346 4131 value = expand_normal (from);
cd1db108 4132 if (to_rtx == 0)
37a08a29 4133 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4134
fffa9c1d
JW
4135 /* Handle calls that return values in multiple non-contiguous locations.
4136 The Irix 6 ABI has examples of this. */
4137 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4138 emit_group_load (to_rtx, value, TREE_TYPE (from),
4139 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4140 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4141 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4142 else
6419e5b0 4143 {
5ae6cd0d 4144 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 4145 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
4146 emit_move_insn (to_rtx, value);
4147 }
cd1db108
RS
4148 preserve_temp_slots (to_rtx);
4149 free_temp_slots ();
0088fcb1 4150 pop_temp_slots ();
e836a5a2 4151 return;
cd1db108
RS
4152 }
4153
bbf6f052
RK
4154 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4155 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4156
4157 if (to_rtx == 0)
37a08a29 4158 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4159
86d38d25 4160 /* Don't move directly into a return register. */
14a774a9 4161 if (TREE_CODE (to) == RESULT_DECL
f8cfc6aa 4162 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4163 {
0088fcb1
RK
4164 rtx temp;
4165
4166 push_temp_slots ();
4167 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4168
4169 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4170 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4171 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4172 else
4173 emit_move_insn (to_rtx, temp);
4174
86d38d25
RS
4175 preserve_temp_slots (to_rtx);
4176 free_temp_slots ();
0088fcb1 4177 pop_temp_slots ();
e836a5a2 4178 return;
86d38d25
RS
4179 }
4180
bbf6f052
RK
4181 /* In case we are returning the contents of an object which overlaps
4182 the place the value is being stored, use a safe function when copying
4183 a value through a pointer into a structure value return block. */
4184 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4185 && current_function_returns_struct
4186 && !current_function_returns_pcc_struct)
4187 {
0088fcb1
RK
4188 rtx from_rtx, size;
4189
4190 push_temp_slots ();
33a20d10 4191 size = expr_size (from);
84217346 4192 from_rtx = expand_normal (from);
bbf6f052 4193
8f99553f
JM
4194 emit_library_call (memmove_libfunc, LCT_NORMAL,
4195 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4196 XEXP (from_rtx, 0), Pmode,
4197 convert_to_mode (TYPE_MODE (sizetype),
4198 size, TYPE_UNSIGNED (sizetype)),
4199 TYPE_MODE (sizetype));
bbf6f052
RK
4200
4201 preserve_temp_slots (to_rtx);
4202 free_temp_slots ();
0088fcb1 4203 pop_temp_slots ();
e836a5a2 4204 return;
bbf6f052
RK
4205 }
4206
4207 /* Compute FROM and store the value in the rtx we got. */
4208
0088fcb1 4209 push_temp_slots ();
e836a5a2 4210 result = store_expr (from, to_rtx, 0);
bbf6f052
RK
4211 preserve_temp_slots (result);
4212 free_temp_slots ();
0088fcb1 4213 pop_temp_slots ();
e836a5a2 4214 return;
bbf6f052
RK
4215}
4216
4217/* Generate code for computing expression EXP,
4218 and storing the value into TARGET.
bbf6f052 4219
709f5be1
RS
4220 If the mode is BLKmode then we may return TARGET itself.
4221 It turns out that in BLKmode it doesn't cause a problem.
4222 because C has no operators that could combine two different
4223 assignments into the same BLKmode object with different values
4224 with no sequence point. Will other languages need this to
4225 be more thorough?
4226
6f4fd16d 4227 If CALL_PARAM_P is nonzero, this is a store into a call param on the
8403445a 4228 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4229
4230rtx
6f4fd16d 4231store_expr (tree exp, rtx target, int call_param_p)
bbf6f052 4232{
b3694847 4233 rtx temp;
0fab64a3 4234 rtx alt_rtl = NULL_RTX;
bbf6f052
RK
4235 int dont_return_target = 0;
4236
847311f4
AL
4237 if (VOID_TYPE_P (TREE_TYPE (exp)))
4238 {
4239 /* C++ can generate ?: expressions with a throw expression in one
4240 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4241 store the throw expression's nonexistent result. */
6f4fd16d 4242 gcc_assert (!call_param_p);
847311f4
AL
4243 expand_expr (exp, const0_rtx, VOIDmode, 0);
4244 return NULL_RTX;
4245 }
bbf6f052
RK
4246 if (TREE_CODE (exp) == COMPOUND_EXPR)
4247 {
4248 /* Perform first part of compound expression, then assign from second
4249 part. */
8403445a 4250 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6f4fd16d
KH
4251 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4252 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4253 }
4254 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4255 {
4256 /* For conditional expression, get safe form of the target. Then
4257 test the condition, doing the appropriate assignment on either
4258 side. This avoids the creation of unnecessary temporaries.
4259 For non-BLKmode, it is more efficient not to do this. */
4260
4261 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4262
dabf8373 4263 do_pending_stack_adjust ();
bbf6f052
RK
4264 NO_DEFER_POP;
4265 jumpifnot (TREE_OPERAND (exp, 0), lab1);
6f4fd16d 4266 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4267 emit_jump_insn (gen_jump (lab2));
4268 emit_barrier ();
4269 emit_label (lab1);
6f4fd16d 4270 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
bbf6f052
RK
4271 emit_label (lab2);
4272 OK_DEFER_POP;
a3a58acc 4273
436d948e 4274 return NULL_RTX;
12f06d17 4275 }
1499e0a8 4276 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4277 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4278 than the declared mode, compute the result into its declared mode
4279 and then convert to the wider mode. Our value is the computed
4280 expression. */
4281 {
b76b08ef
RK
4282 rtx inner_target = 0;
4283
436d948e
KH
4284 /* We can do the conversion inside EXP, which will often result
4285 in some optimizations. Do the conversion in two steps: first
4286 change the signedness, if needed, then the extend. But don't
4287 do this if the type of EXP is a subtype of something else
4288 since then the conversion might involve more than just
4289 converting modes. */
4290 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
7e7d1b4b
RH
4291 && TREE_TYPE (TREE_TYPE (exp)) == 0
4292 && (!lang_hooks.reduce_bit_field_operations
4293 || (GET_MODE_PRECISION (GET_MODE (target))
4294 == TYPE_PRECISION (TREE_TYPE (exp)))))
f635a84d 4295 {
8df83eae 4296 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 4297 != SUBREG_PROMOTED_UNSIGNED_P (target))
3967bc2d 4298 exp = fold_convert
ae2bcd98 4299 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4300 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4301
3967bc2d
RS
4302 exp = fold_convert (lang_hooks.types.type_for_mode
4303 (GET_MODE (SUBREG_REG (target)),
4304 SUBREG_PROMOTED_UNSIGNED_P (target)),
4305 exp);
b76b08ef
RK
4306
4307 inner_target = SUBREG_REG (target);
f635a84d 4308 }
3a94c984 4309
8403445a 4310 temp = expand_expr (exp, inner_target, VOIDmode,
6f4fd16d 4311 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c
RS
4312
4313 /* If TEMP is a VOIDmode constant, use convert_modes to make
4314 sure that we properly convert it. */
4315 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4316 {
4317 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4318 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4319 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4320 GET_MODE (target), temp,
4321 SUBREG_PROMOTED_UNSIGNED_P (target));
4322 }
b258707c 4323
1499e0a8
RK
4324 convert_move (SUBREG_REG (target), temp,
4325 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9 4326
436d948e 4327 return NULL_RTX;
1499e0a8 4328 }
bbf6f052
RK
4329 else
4330 {
0fab64a3 4331 temp = expand_expr_real (exp, target, GET_MODE (target),
6f4fd16d 4332 (call_param_p
0fab64a3
MM
4333 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4334 &alt_rtl);
766f36c7 4335 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4336 If TARGET is a volatile mem ref, either return TARGET
4337 or return a reg copied *from* TARGET; ANSI requires this.
4338
4339 Otherwise, if TEMP is not TARGET, return TEMP
4340 if it is constant (for efficiency),
4341 or if we really want the correct value. */
f8cfc6aa 4342 if (!(target && REG_P (target)
bbf6f052 4343 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3c0cb5de 4344 && !(MEM_P (target) && MEM_VOLATILE_P (target))
effbcc6a 4345 && ! rtx_equal_p (temp, target)
436d948e 4346 && CONSTANT_P (temp))
bbf6f052
RK
4347 dont_return_target = 1;
4348 }
4349
b258707c
RS
4350 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4351 the same as that of TARGET, adjust the constant. This is needed, for
4352 example, in case it is a CONST_DOUBLE and we want only a word-sized
4353 value. */
4354 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4355 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4356 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4357 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 4358 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 4359
bbf6f052 4360 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4361 Convert the value to TARGET's type first if necessary and emit the
4362 pending incrementations that have been queued when expanding EXP.
4363 Note that we cannot emit the whole queue blindly because this will
4364 effectively disable the POST_INC optimization later.
4365
37a08a29 4366 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4367 one or both of them are volatile memory refs, we have to distinguish
4368 two cases:
4369 - expand_expr has used TARGET. In this case, we must not generate
4370 another copy. This can be detected by TARGET being equal according
4371 to == .
4372 - expand_expr has not used TARGET - that means that the source just
4373 happens to have the same RTX form. Since temp will have been created
4374 by expand_expr, it will compare unequal according to == .
4375 We must generate a copy in this case, to reach the correct number
4376 of volatile memory references. */
bbf6f052 4377
6036acbb 4378 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4379 || (temp != target && (side_effects_p (temp)
4380 || side_effects_p (target))))
e5408e52 4381 && TREE_CODE (exp) != ERROR_MARK
9c5c5f2c
MM
4382 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4383 but TARGET is not valid memory reference, TEMP will differ
4384 from TARGET although it is really the same location. */
0fab64a3 4385 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
535a42b1
NS
4386 /* If there's nothing to copy, don't bother. Don't call
4387 expr_size unless necessary, because some front-ends (C++)
4388 expr_size-hook must not be given objects that are not
4389 supposed to be bit-copied or bit-initialized. */
e56fc090 4390 && expr_size (exp) != const0_rtx)
bbf6f052 4391 {
bbf6f052 4392 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4393 && GET_MODE (temp) != VOIDmode)
bbf6f052 4394 {
8df83eae 4395 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4396 if (dont_return_target)
4397 {
4398 /* In this case, we will return TEMP,
4399 so make sure it has the proper mode.
4400 But don't forget to store the value into TARGET. */
4401 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4402 emit_move_insn (target, temp);
4403 }
4404 else
4405 convert_move (target, temp, unsignedp);
4406 }
4407
4408 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4409 {
c24ae149
RK
4410 /* Handle copying a string constant into an array. The string
4411 constant may be shorter than the array. So copy just the string's
4412 actual length, and clear the rest. First get the size of the data
4413 type of the string, which is actually the size of the target. */
4414 rtx size = expr_size (exp);
bbf6f052 4415
e87b4f3f
RS
4416 if (GET_CODE (size) == CONST_INT
4417 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a 4418 emit_block_move (target, temp, size,
6f4fd16d 4419 (call_param_p
8403445a 4420 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4421 else
bbf6f052 4422 {
e87b4f3f
RS
4423 /* Compute the size of the data to copy from the string. */
4424 tree copy_size
c03b7665 4425 = size_binop (MIN_EXPR,
b50d17a1 4426 make_tree (sizetype, size),
fed3cef0 4427 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4428 rtx copy_size_rtx
4429 = expand_expr (copy_size, NULL_RTX, VOIDmode,
6f4fd16d 4430 (call_param_p
8403445a 4431 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4432 rtx label = 0;
4433
4434 /* Copy that much. */
267b28bd 4435 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4436 TYPE_UNSIGNED (sizetype));
8403445a 4437 emit_block_move (target, temp, copy_size_rtx,
6f4fd16d 4438 (call_param_p
8403445a 4439 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4440
88f63c77
RK
4441 /* Figure out how much is left in TARGET that we have to clear.
4442 Do all calculations in ptr_mode. */
e87b4f3f
RS
4443 if (GET_CODE (copy_size_rtx) == CONST_INT)
4444 {
c24ae149
RK
4445 size = plus_constant (size, -INTVAL (copy_size_rtx));
4446 target = adjust_address (target, BLKmode,
4447 INTVAL (copy_size_rtx));
e87b4f3f
RS
4448 }
4449 else
4450 {
fa06ab5c 4451 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4452 copy_size_rtx, NULL_RTX, 0,
4453 OPTAB_LIB_WIDEN);
e87b4f3f 4454
c24ae149
RK
4455#ifdef POINTERS_EXTEND_UNSIGNED
4456 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4457 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4458 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4459#endif
4460
4461 target = offset_address (target, copy_size_rtx,
4462 highest_pow2_factor (copy_size));
e87b4f3f 4463 label = gen_label_rtx ();
c5d5d461 4464 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4465 GET_MODE (size), 0, label);
e87b4f3f
RS
4466 }
4467
4468 if (size != const0_rtx)
8148fe65 4469 clear_storage (target, size, BLOCK_OP_NORMAL);
22619c3f 4470
e87b4f3f
RS
4471 if (label)
4472 emit_label (label);
bbf6f052
RK
4473 }
4474 }
fffa9c1d
JW
4475 /* Handle calls that return values in multiple non-contiguous locations.
4476 The Irix 6 ABI has examples of this. */
4477 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4478 emit_group_load (target, temp, TREE_TYPE (exp),
4479 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4480 else if (GET_MODE (temp) == BLKmode)
8403445a 4481 emit_block_move (target, temp, expr_size (exp),
6f4fd16d 4482 (call_param_p
8403445a 4483 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4484 else
b0dccb00
RH
4485 {
4486 temp = force_operand (temp, target);
4487 if (temp != target)
4488 emit_move_insn (target, temp);
4489 }
bbf6f052 4490 }
709f5be1 4491
436d948e 4492 return NULL_RTX;
bbf6f052
RK
4493}
4494\f
6fa91b48
SB
4495/* Examine CTOR to discover:
4496 * how many scalar fields are set to nonzero values,
4497 and place it in *P_NZ_ELTS;
4498 * how many scalar fields are set to non-constant values,
4499 and place it in *P_NC_ELTS; and
4500 * how many scalar fields in total are in CTOR,
6f642f98
RH
4501 and place it in *P_ELT_COUNT.
4502 * if a type is a union, and the initializer from the constructor
4503 is not the largest element in the union, then set *p_must_clear. */
9de08200 4504
6de9cd9a
DN
4505static void
4506categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4507 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4508 HOST_WIDE_INT *p_elt_count,
4509 bool *p_must_clear)
9de08200 4510{
4038c495 4511 unsigned HOST_WIDE_INT idx;
6fa91b48 4512 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4038c495 4513 tree value, purpose;
9de08200 4514
6de9cd9a
DN
4515 nz_elts = 0;
4516 nc_elts = 0;
6fa91b48 4517 elt_count = 0;
caf93cb0 4518
4038c495 4519 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
9de08200 4520 {
6de9cd9a 4521 HOST_WIDE_INT mult;
9de08200 4522
6de9cd9a
DN
4523 mult = 1;
4524 if (TREE_CODE (purpose) == RANGE_EXPR)
4525 {
4526 tree lo_index = TREE_OPERAND (purpose, 0);
4527 tree hi_index = TREE_OPERAND (purpose, 1);
9de08200 4528
6de9cd9a
DN
4529 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4530 mult = (tree_low_cst (hi_index, 1)
4531 - tree_low_cst (lo_index, 1) + 1);
4532 }
9de08200 4533
6de9cd9a
DN
4534 switch (TREE_CODE (value))
4535 {
4536 case CONSTRUCTOR:
4537 {
6f642f98
RH
4538 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4539 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
6de9cd9a
DN
4540 nz_elts += mult * nz;
4541 nc_elts += mult * nc;
6f642f98 4542 elt_count += mult * ic;
6de9cd9a
DN
4543 }
4544 break;
9de08200 4545
6de9cd9a
DN
4546 case INTEGER_CST:
4547 case REAL_CST:
4548 if (!initializer_zerop (value))
4549 nz_elts += mult;
6fa91b48 4550 elt_count += mult;
6de9cd9a 4551 break;
97f8d136
RK
4552
4553 case STRING_CST:
4554 nz_elts += mult * TREE_STRING_LENGTH (value);
6fa91b48 4555 elt_count += mult * TREE_STRING_LENGTH (value);
97f8d136
RK
4556 break;
4557
6de9cd9a
DN
4558 case COMPLEX_CST:
4559 if (!initializer_zerop (TREE_REALPART (value)))
4560 nz_elts += mult;
4561 if (!initializer_zerop (TREE_IMAGPART (value)))
4562 nz_elts += mult;
6fa91b48 4563 elt_count += mult;
6de9cd9a 4564 break;
97f8d136 4565
6de9cd9a
DN
4566 case VECTOR_CST:
4567 {
4568 tree v;
4569 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
6fa91b48
SB
4570 {
4571 if (!initializer_zerop (TREE_VALUE (v)))
4572 nz_elts += mult;
4573 elt_count += mult;
4574 }
6de9cd9a
DN
4575 }
4576 break;
69ef87e2 4577
6de9cd9a
DN
4578 default:
4579 nz_elts += mult;
6fa91b48 4580 elt_count += mult;
6de9cd9a
DN
4581 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4582 nc_elts += mult;
4583 break;
4584 }
4585 }
69ef87e2 4586
6f642f98
RH
4587 if (!*p_must_clear
4588 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4589 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4590 {
4591 tree init_sub_type;
486e4326 4592 bool clear_this = true;
6f642f98 4593
4038c495 4594 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
6f642f98 4595 {
486e4326
RH
4596 /* We don't expect more than one element of the union to be
4597 initialized. Not sure what we should do otherwise... */
4038c495
GB
4598 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4599 == 1);
486e4326 4600
4038c495
GB
4601 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4602 CONSTRUCTOR_ELTS (ctor),
4603 0)->value);
486e4326
RH
4604
4605 /* ??? We could look at each element of the union, and find the
4606 largest element. Which would avoid comparing the size of the
4607 initialized element against any tail padding in the union.
4608 Doesn't seem worth the effort... */
4609 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4610 TYPE_SIZE (init_sub_type)) == 1)
4611 {
4612 /* And now we have to find out if the element itself is fully
4613 constructed. E.g. for union { struct { int a, b; } s; } u
4614 = { .s = { .a = 1 } }. */
73ed17ff 4615 if (elt_count == count_type_elements (init_sub_type, false))
486e4326
RH
4616 clear_this = false;
4617 }
6f642f98 4618 }
486e4326
RH
4619
4620 *p_must_clear = clear_this;
6f642f98
RH
4621 }
4622
6de9cd9a
DN
4623 *p_nz_elts += nz_elts;
4624 *p_nc_elts += nc_elts;
6fa91b48 4625 *p_elt_count += elt_count;
6de9cd9a
DN
4626}
4627
4628void
4629categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4630 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4631 HOST_WIDE_INT *p_elt_count,
4632 bool *p_must_clear)
6de9cd9a
DN
4633{
4634 *p_nz_elts = 0;
4635 *p_nc_elts = 0;
6fa91b48 4636 *p_elt_count = 0;
6f642f98
RH
4637 *p_must_clear = false;
4638 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4639 p_must_clear);
6de9cd9a
DN
4640}
4641
4642/* Count the number of scalars in TYPE. Return -1 on overflow or
73ed17ff
JJ
4643 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4644 array member at the end of the structure. */
6de9cd9a
DN
4645
4646HOST_WIDE_INT
73ed17ff 4647count_type_elements (tree type, bool allow_flexarr)
6de9cd9a
DN
4648{
4649 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4650 switch (TREE_CODE (type))
4651 {
4652 case ARRAY_TYPE:
4653 {
4654 tree telts = array_type_nelts (type);
4655 if (telts && host_integerp (telts, 1))
4656 {
5377d5ba 4657 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
73ed17ff 4658 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
6de9cd9a
DN
4659 if (n == 0)
4660 return 0;
5377d5ba 4661 else if (max / n > m)
6de9cd9a
DN
4662 return n * m;
4663 }
4664 return -1;
4665 }
4666
4667 case RECORD_TYPE:
4668 {
4669 HOST_WIDE_INT n = 0, t;
4670 tree f;
4671
4672 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4673 if (TREE_CODE (f) == FIELD_DECL)
4674 {
73ed17ff 4675 t = count_type_elements (TREE_TYPE (f), false);
6de9cd9a 4676 if (t < 0)
73ed17ff
JJ
4677 {
4678 /* Check for structures with flexible array member. */
4679 tree tf = TREE_TYPE (f);
4680 if (allow_flexarr
4681 && TREE_CHAIN (f) == NULL
4682 && TREE_CODE (tf) == ARRAY_TYPE
4683 && TYPE_DOMAIN (tf)
4684 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4685 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4686 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4687 && int_size_in_bytes (type) >= 0)
4688 break;
4689
4690 return -1;
4691 }
6de9cd9a
DN
4692 n += t;
4693 }
4694
4695 return n;
4696 }
9de08200 4697
6de9cd9a
DN
4698 case UNION_TYPE:
4699 case QUAL_UNION_TYPE:
4700 {
4701 /* Ho hum. How in the world do we guess here? Clearly it isn't
4702 right to count the fields. Guess based on the number of words. */
4703 HOST_WIDE_INT n = int_size_in_bytes (type);
4704 if (n < 0)
4705 return -1;
4706 return n / UNITS_PER_WORD;
4707 }
4708
4709 case COMPLEX_TYPE:
4710 return 2;
4711
4712 case VECTOR_TYPE:
3a021db2 4713 return TYPE_VECTOR_SUBPARTS (type);
6de9cd9a
DN
4714
4715 case INTEGER_TYPE:
4716 case REAL_TYPE:
4717 case ENUMERAL_TYPE:
4718 case BOOLEAN_TYPE:
6de9cd9a
DN
4719 case POINTER_TYPE:
4720 case OFFSET_TYPE:
4721 case REFERENCE_TYPE:
9de08200 4722 return 1;
3a94c984 4723
6de9cd9a
DN
4724 case VOID_TYPE:
4725 case METHOD_TYPE:
6de9cd9a
DN
4726 case FUNCTION_TYPE:
4727 case LANG_TYPE:
e9a25f70 4728 default:
5b0264cb 4729 gcc_unreachable ();
9de08200 4730 }
9de08200
RK
4731}
4732
4733/* Return 1 if EXP contains mostly (3/4) zeros. */
4734
e0ce7708 4735static int
502b8322 4736mostly_zeros_p (tree exp)
9de08200 4737{
9de08200 4738 if (TREE_CODE (exp) == CONSTRUCTOR)
caf93cb0 4739
9de08200 4740 {
6fa91b48 4741 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
6f642f98
RH
4742 bool must_clear;
4743
4744 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4745 if (must_clear)
4746 return 1;
6de9cd9a 4747
73ed17ff 4748 elts = count_type_elements (TREE_TYPE (exp), false);
9de08200 4749
6de9cd9a 4750 return nz_elts < elts / 4;
9de08200
RK
4751 }
4752
6de9cd9a 4753 return initializer_zerop (exp);
9de08200 4754}
c5250139
RG
4755
4756/* Return 1 if EXP contains all zeros. */
4757
4758static int
4759all_zeros_p (tree exp)
4760{
4761 if (TREE_CODE (exp) == CONSTRUCTOR)
4762
4763 {
4764 HOST_WIDE_INT nz_elts, nc_elts, count;
4765 bool must_clear;
4766
4767 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4768 return nz_elts == 0;
4769 }
4770
4771 return initializer_zerop (exp);
4772}
9de08200 4773\f
e1a43f73
PB
4774/* Helper function for store_constructor.
4775 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4776 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4777 CLEARED is as for store_constructor.
23cb1766 4778 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4779
4780 This provides a recursive shortcut back to store_constructor when it isn't
4781 necessary to go through store_field. This is so that we can pass through
4782 the cleared field to let store_constructor know that we may not have to
4783 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4784
4785static void
502b8322
AJ
4786store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4787 HOST_WIDE_INT bitpos, enum machine_mode mode,
4788 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4789{
4790 if (TREE_CODE (exp) == CONSTRUCTOR
6c89c39a
RK
4791 /* We can only call store_constructor recursively if the size and
4792 bit position are on a byte boundary. */
23ccec44 4793 && bitpos % BITS_PER_UNIT == 0
6c89c39a 4794 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
cc2902df 4795 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4796 let store_field do the bitfield handling. This is unlikely to
4797 generate unnecessary clear instructions anyways. */
3c0cb5de 4798 && (bitpos == 0 || MEM_P (target)))
e1a43f73 4799 {
3c0cb5de 4800 if (MEM_P (target))
61cb205c
RK
4801 target
4802 = adjust_address (target,
4803 GET_MODE (target) == BLKmode
4804 || 0 != (bitpos
4805 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4806 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4807
e0339ef7 4808
04050c69 4809 /* Update the alias set, if required. */
3c0cb5de 4810 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
10b76d73 4811 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4812 {
4813 target = copy_rtx (target);
4814 set_mem_alias_set (target, alias_set);
4815 }
e0339ef7 4816
dbb5c281 4817 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4818 }
4819 else
f45bdcd0 4820 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
e1a43f73
PB
4821}
4822
bbf6f052 4823/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4824 TARGET is either a REG or a MEM; we know it cannot conflict, since
4825 safe_from_p has been called.
dbb5c281
RK
4826 CLEARED is true if TARGET is known to have been zero'd.
4827 SIZE is the number of bytes of TARGET we are allowed to modify: this
b7010412
RK
4828 may not be the same as the size of EXP if we are assigning to a field
4829 which has been packed to exclude padding bits. */
bbf6f052
RK
4830
4831static void
502b8322 4832store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4833{
4af3895e 4834 tree type = TREE_TYPE (exp);
a5efcd63 4835#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4836 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4837#endif
4af3895e 4838
5b0264cb 4839 switch (TREE_CODE (type))
bbf6f052 4840 {
5b0264cb
NS
4841 case RECORD_TYPE:
4842 case UNION_TYPE:
4843 case QUAL_UNION_TYPE:
4844 {
4038c495
GB
4845 unsigned HOST_WIDE_INT idx;
4846 tree field, value;
9de08200 4847
5b0264cb
NS
4848 /* If size is zero or the target is already cleared, do nothing. */
4849 if (size == 0 || cleared)
9de08200 4850 cleared = 1;
5b0264cb
NS
4851 /* We either clear the aggregate or indicate the value is dead. */
4852 else if ((TREE_CODE (type) == UNION_TYPE
4853 || TREE_CODE (type) == QUAL_UNION_TYPE)
4854 && ! CONSTRUCTOR_ELTS (exp))
4855 /* If the constructor is empty, clear the union. */
4856 {
8148fe65 4857 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5b0264cb
NS
4858 cleared = 1;
4859 }
bbf6f052 4860
5b0264cb
NS
4861 /* If we are building a static constructor into a register,
4862 set the initial value as zero so we can fold the value into
4863 a constant. But if more than one register is involved,
4864 this probably loses. */
4865 else if (REG_P (target) && TREE_STATIC (exp)
4866 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4867 {
4868 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4869 cleared = 1;
4870 }
3a94c984 4871
5b0264cb
NS
4872 /* If the constructor has fewer fields than the structure or
4873 if we are initializing the structure to mostly zeros, clear
4874 the whole structure first. Don't do this if TARGET is a
4875 register whose mode size isn't equal to SIZE since
4876 clear_storage can't handle this case. */
4877 else if (size > 0
4038c495 4878 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5b0264cb
NS
4879 != fields_length (type))
4880 || mostly_zeros_p (exp))
4881 && (!REG_P (target)
4882 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4883 == size)))
4884 {
8148fe65 4885 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
4886 cleared = 1;
4887 }
b50d17a1 4888
5b0264cb
NS
4889 if (! cleared)
4890 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052 4891
5b0264cb
NS
4892 /* Store each element of the constructor into the
4893 corresponding field of TARGET. */
4038c495 4894 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5b0264cb 4895 {
5b0264cb
NS
4896 enum machine_mode mode;
4897 HOST_WIDE_INT bitsize;
4898 HOST_WIDE_INT bitpos = 0;
4899 tree offset;
4900 rtx to_rtx = target;
4901
4902 /* Just ignore missing fields. We cleared the whole
4903 structure, above, if any fields are missing. */
4904 if (field == 0)
4905 continue;
4906
4907 if (cleared && initializer_zerop (value))
4908 continue;
4909
4910 if (host_integerp (DECL_SIZE (field), 1))
4911 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4912 else
4913 bitsize = -1;
4914
4915 mode = DECL_MODE (field);
4916 if (DECL_BIT_FIELD (field))
4917 mode = VOIDmode;
4918
4919 offset = DECL_FIELD_OFFSET (field);
4920 if (host_integerp (offset, 0)
4921 && host_integerp (bit_position (field), 0))
4922 {
4923 bitpos = int_bit_position (field);
4924 offset = 0;
4925 }
4926 else
4927 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4928
4929 if (offset)
4930 {
4931 rtx offset_rtx;
4932
4933 offset
4934 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4935 make_tree (TREE_TYPE (exp),
4936 target));
4937
84217346 4938 offset_rtx = expand_normal (offset);
5b0264cb
NS
4939 gcc_assert (MEM_P (to_rtx));
4940
bd070e1a 4941#ifdef POINTERS_EXTEND_UNSIGNED
5b0264cb
NS
4942 if (GET_MODE (offset_rtx) != Pmode)
4943 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c 4944#else
5b0264cb
NS
4945 if (GET_MODE (offset_rtx) != ptr_mode)
4946 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4947#endif
bd070e1a 4948
5b0264cb
NS
4949 to_rtx = offset_address (to_rtx, offset_rtx,
4950 highest_pow2_factor (offset));
4951 }
c5c76735 4952
34c73909 4953#ifdef WORD_REGISTER_OPERATIONS
5b0264cb
NS
4954 /* If this initializes a field that is smaller than a
4955 word, at the start of a word, try to widen it to a full
4956 word. This special case allows us to output C++ member
4957 function initializations in a form that the optimizers
4958 can understand. */
4959 if (REG_P (target)
4960 && bitsize < BITS_PER_WORD
4961 && bitpos % BITS_PER_WORD == 0
4962 && GET_MODE_CLASS (mode) == MODE_INT
4963 && TREE_CODE (value) == INTEGER_CST
4964 && exp_size >= 0
4965 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4966 {
4967 tree type = TREE_TYPE (value);
4968
4969 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4970 {
4971 type = lang_hooks.types.type_for_size
4972 (BITS_PER_WORD, TYPE_UNSIGNED (type));
3967bc2d 4973 value = fold_convert (type, value);
5b0264cb
NS
4974 }
4975
4976 if (BYTES_BIG_ENDIAN)
4977 value
4845b383 4978 = fold_build2 (LSHIFT_EXPR, type, value,
3967bc2d 4979 build_int_cst (type,
4845b383 4980 BITS_PER_WORD - bitsize));
5b0264cb
NS
4981 bitsize = BITS_PER_WORD;
4982 mode = word_mode;
4983 }
34c73909 4984#endif
10b76d73 4985
5b0264cb
NS
4986 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4987 && DECL_NONADDRESSABLE_P (field))
4988 {
4989 to_rtx = copy_rtx (to_rtx);
4990 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4991 }
4992
4993 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4994 value, type, cleared,
4995 get_alias_set (TREE_TYPE (field)));
4996 }
4997 break;
4998 }
4999 case ARRAY_TYPE:
5000 {
4038c495
GB
5001 tree value, index;
5002 unsigned HOST_WIDE_INT i;
5b0264cb
NS
5003 int need_to_clear;
5004 tree domain;
5005 tree elttype = TREE_TYPE (type);
5006 int const_bounds_p;
5007 HOST_WIDE_INT minelt = 0;
5008 HOST_WIDE_INT maxelt = 0;
5009
5010 domain = TYPE_DOMAIN (type);
5011 const_bounds_p = (TYPE_MIN_VALUE (domain)
5012 && TYPE_MAX_VALUE (domain)
5013 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5014 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5015
5016 /* If we have constant bounds for the range of the type, get them. */
5017 if (const_bounds_p)
5018 {
5019 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5020 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5021 }
3a021db2 5022
5b0264cb
NS
5023 /* If the constructor has fewer elements than the array, clear
5024 the whole array first. Similarly if this is static
5025 constructor of a non-BLKmode object. */
5026 if (cleared)
5027 need_to_clear = 0;
5028 else if (REG_P (target) && TREE_STATIC (exp))
5029 need_to_clear = 1;
5030 else
5031 {
4038c495
GB
5032 unsigned HOST_WIDE_INT idx;
5033 tree index, value;
5b0264cb
NS
5034 HOST_WIDE_INT count = 0, zero_count = 0;
5035 need_to_clear = ! const_bounds_p;
5036
5037 /* This loop is a more accurate version of the loop in
5038 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5039 is also needed to check for missing elements. */
4038c495 5040 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5b0264cb 5041 {
5b0264cb 5042 HOST_WIDE_INT this_node_count;
4038c495
GB
5043
5044 if (need_to_clear)
5045 break;
5b0264cb
NS
5046
5047 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5048 {
5049 tree lo_index = TREE_OPERAND (index, 0);
5050 tree hi_index = TREE_OPERAND (index, 1);
5051
5052 if (! host_integerp (lo_index, 1)
5053 || ! host_integerp (hi_index, 1))
5054 {
5055 need_to_clear = 1;
5056 break;
5057 }
5058
5059 this_node_count = (tree_low_cst (hi_index, 1)
5060 - tree_low_cst (lo_index, 1) + 1);
5061 }
5062 else
5063 this_node_count = 1;
5064
5065 count += this_node_count;
4038c495 5066 if (mostly_zeros_p (value))
5b0264cb
NS
5067 zero_count += this_node_count;
5068 }
5069
5070 /* Clear the entire array first if there are any missing
5071 elements, or if the incidence of zero elements is >=
5072 75%. */
5073 if (! need_to_clear
5074 && (count < maxelt - minelt + 1
5075 || 4 * zero_count >= 3 * count))
5076 need_to_clear = 1;
5077 }
5078
5079 if (need_to_clear && size > 0)
5080 {
5081 if (REG_P (target))
5082 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5083 else
8148fe65 5084 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
5085 cleared = 1;
5086 }
3a021db2 5087
5b0264cb
NS
5088 if (!cleared && REG_P (target))
5089 /* Inform later passes that the old value is dead. */
5090 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3a021db2 5091
5b0264cb
NS
5092 /* Store each element of the constructor into the
5093 corresponding element of TARGET, determined by counting the
5094 elements. */
4038c495 5095 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5b0264cb
NS
5096 {
5097 enum machine_mode mode;
5098 HOST_WIDE_INT bitsize;
5099 HOST_WIDE_INT bitpos;
5100 int unsignedp;
5b0264cb
NS
5101 rtx xtarget = target;
5102
5103 if (cleared && initializer_zerop (value))
5104 continue;
5105
5106 unsignedp = TYPE_UNSIGNED (elttype);
5107 mode = TYPE_MODE (elttype);
5108 if (mode == BLKmode)
5109 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5110 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5111 : -1);
5112 else
5113 bitsize = GET_MODE_BITSIZE (mode);
5114
5115 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5116 {
5117 tree lo_index = TREE_OPERAND (index, 0);
5118 tree hi_index = TREE_OPERAND (index, 1);
5119 rtx index_r, pos_rtx;
5120 HOST_WIDE_INT lo, hi, count;
5121 tree position;
5122
5123 /* If the range is constant and "small", unroll the loop. */
5124 if (const_bounds_p
5125 && host_integerp (lo_index, 0)
5126 && host_integerp (hi_index, 0)
5127 && (lo = tree_low_cst (lo_index, 0),
5128 hi = tree_low_cst (hi_index, 0),
5129 count = hi - lo + 1,
5130 (!MEM_P (target)
5131 || count <= 2
5132 || (host_integerp (TYPE_SIZE (elttype), 1)
5133 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5134 <= 40 * 8)))))
5135 {
5136 lo -= minelt; hi -= minelt;
5137 for (; lo <= hi; lo++)
5138 {
5139 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5140
5141 if (MEM_P (target)
5142 && !MEM_KEEP_ALIAS_SET_P (target)
5143 && TREE_CODE (type) == ARRAY_TYPE
5144 && TYPE_NONALIASED_COMPONENT (type))
5145 {
5146 target = copy_rtx (target);
5147 MEM_KEEP_ALIAS_SET_P (target) = 1;
5148 }
5149
5150 store_constructor_field
5151 (target, bitsize, bitpos, mode, value, type, cleared,
5152 get_alias_set (elttype));
5153 }
5154 }
5155 else
5156 {
5157 rtx loop_start = gen_label_rtx ();
5158 rtx loop_end = gen_label_rtx ();
5159 tree exit_cond;
5160
84217346 5161 expand_normal (hi_index);
5b0264cb
NS
5162 unsignedp = TYPE_UNSIGNED (domain);
5163
5164 index = build_decl (VAR_DECL, NULL_TREE, domain);
5165
5166 index_r
5167 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5168 &unsignedp, 0));
5169 SET_DECL_RTL (index, index_r);
5170 store_expr (lo_index, index_r, 0);
5171
5172 /* Build the head of the loop. */
5173 do_pending_stack_adjust ();
5174 emit_label (loop_start);
5175
5176 /* Assign value to element index. */
3967bc2d
RS
5177 position =
5178 fold_convert (ssizetype,
5179 fold_build2 (MINUS_EXPR,
5180 TREE_TYPE (index),
5181 index,
5182 TYPE_MIN_VALUE (domain)));
5183
5184 position =
5185 size_binop (MULT_EXPR, position,
5186 fold_convert (ssizetype,
5187 TYPE_SIZE_UNIT (elttype)));
5b0264cb 5188
84217346 5189 pos_rtx = expand_normal (position);
5b0264cb
NS
5190 xtarget = offset_address (target, pos_rtx,
5191 highest_pow2_factor (position));
5192 xtarget = adjust_address (xtarget, mode, 0);
5193 if (TREE_CODE (value) == CONSTRUCTOR)
5194 store_constructor (value, xtarget, cleared,
5195 bitsize / BITS_PER_UNIT);
5196 else
5197 store_expr (value, xtarget, 0);
5198
5199 /* Generate a conditional jump to exit the loop. */
5200 exit_cond = build2 (LT_EXPR, integer_type_node,
5201 index, hi_index);
5202 jumpif (exit_cond, loop_end);
5203
5204 /* Update the loop counter, and jump to the head of
5205 the loop. */
5206 expand_assignment (index,
5207 build2 (PLUS_EXPR, TREE_TYPE (index),
e836a5a2 5208 index, integer_one_node));
5b0264cb
NS
5209
5210 emit_jump (loop_start);
5211
5212 /* Build the end of the loop. */
5213 emit_label (loop_end);
5214 }
5215 }
5216 else if ((index != 0 && ! host_integerp (index, 0))
5217 || ! host_integerp (TYPE_SIZE (elttype), 1))
5218 {
5219 tree position;
5220
5221 if (index == 0)
5222 index = ssize_int (1);
5223
5224 if (minelt)
5225 index = fold_convert (ssizetype,
4845b383
KH
5226 fold_build2 (MINUS_EXPR,
5227 TREE_TYPE (index),
5228 index,
5229 TYPE_MIN_VALUE (domain)));
5b0264cb 5230
3967bc2d
RS
5231 position =
5232 size_binop (MULT_EXPR, index,
5233 fold_convert (ssizetype,
5234 TYPE_SIZE_UNIT (elttype)));
5b0264cb 5235 xtarget = offset_address (target,
84217346 5236 expand_normal (position),
5b0264cb
NS
5237 highest_pow2_factor (position));
5238 xtarget = adjust_address (xtarget, mode, 0);
5239 store_expr (value, xtarget, 0);
5240 }
5241 else
5242 {
5243 if (index != 0)
5244 bitpos = ((tree_low_cst (index, 0) - minelt)
5245 * tree_low_cst (TYPE_SIZE (elttype), 1));
5246 else
5247 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5248
5249 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5250 && TREE_CODE (type) == ARRAY_TYPE
5251 && TYPE_NONALIASED_COMPONENT (type))
5252 {
5253 target = copy_rtx (target);
5254 MEM_KEEP_ALIAS_SET_P (target) = 1;
5255 }
5256 store_constructor_field (target, bitsize, bitpos, mode, value,
5257 type, cleared, get_alias_set (elttype));
5258 }
5259 }
5260 break;
5261 }
3a021db2 5262
5b0264cb
NS
5263 case VECTOR_TYPE:
5264 {
4038c495
GB
5265 unsigned HOST_WIDE_INT idx;
5266 constructor_elt *ce;
5b0264cb
NS
5267 int i;
5268 int need_to_clear;
5269 int icode = 0;
5270 tree elttype = TREE_TYPE (type);
5271 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5272 enum machine_mode eltmode = TYPE_MODE (elttype);
5273 HOST_WIDE_INT bitsize;
5274 HOST_WIDE_INT bitpos;
201dd46b 5275 rtvec vector = NULL;
5b0264cb
NS
5276 unsigned n_elts;
5277
5278 gcc_assert (eltmode != BLKmode);
5279
5280 n_elts = TYPE_VECTOR_SUBPARTS (type);
5281 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5282 {
5283 enum machine_mode mode = GET_MODE (target);
5284
5285 icode = (int) vec_init_optab->handlers[mode].insn_code;
5286 if (icode != CODE_FOR_nothing)
5287 {
5288 unsigned int i;
5289
201dd46b 5290 vector = rtvec_alloc (n_elts);
5b0264cb 5291 for (i = 0; i < n_elts; i++)
201dd46b 5292 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5b0264cb
NS
5293 }
5294 }
5295
5296 /* If the constructor has fewer elements than the vector,
5297 clear the whole array first. Similarly if this is static
5298 constructor of a non-BLKmode object. */
5299 if (cleared)
5300 need_to_clear = 0;
5301 else if (REG_P (target) && TREE_STATIC (exp))
5302 need_to_clear = 1;
5303 else
5304 {
5305 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4038c495 5306 tree value;
5b0264cb 5307
4038c495 5308 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5b0264cb
NS
5309 {
5310 int n_elts_here = tree_low_cst
5311 (int_const_binop (TRUNC_DIV_EXPR,
4038c495 5312 TYPE_SIZE (TREE_TYPE (value)),
5b0264cb
NS
5313 TYPE_SIZE (elttype), 0), 1);
5314
5315 count += n_elts_here;
4038c495 5316 if (mostly_zeros_p (value))
5b0264cb
NS
5317 zero_count += n_elts_here;
5318 }
3a021db2 5319
5b0264cb
NS
5320 /* Clear the entire vector first if there are any missing elements,
5321 or if the incidence of zero elements is >= 75%. */
5322 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5323 }
5324
5325 if (need_to_clear && size > 0 && !vector)
5326 {
5327 if (REG_P (target))
5328 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5329 else
8148fe65 5330 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
5331 cleared = 1;
5332 }
5333
2ab1754e 5334 /* Inform later passes that the old value is dead. */
cf26aa89 5335 if (!cleared && !vector && REG_P (target))
2ab1754e 5336 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5b0264cb
NS
5337
5338 /* Store each element of the constructor into the corresponding
5339 element of TARGET, determined by counting the elements. */
4038c495
GB
5340 for (idx = 0, i = 0;
5341 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5342 idx++, i += bitsize / elt_size)
5b0264cb 5343 {
5b0264cb 5344 HOST_WIDE_INT eltpos;
4038c495 5345 tree value = ce->value;
5b0264cb
NS
5346
5347 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5348 if (cleared && initializer_zerop (value))
5349 continue;
5350
4038c495
GB
5351 if (ce->index)
5352 eltpos = tree_low_cst (ce->index, 1);
5b0264cb
NS
5353 else
5354 eltpos = i;
5355
5356 if (vector)
5357 {
5358 /* Vector CONSTRUCTORs should only be built from smaller
5359 vectors in the case of BLKmode vectors. */
5360 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
201dd46b 5361 RTVEC_ELT (vector, eltpos)
84217346 5362 = expand_normal (value);
5b0264cb
NS
5363 }
5364 else
5365 {
5366 enum machine_mode value_mode =
5367 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
3a021db2
PB
5368 ? TYPE_MODE (TREE_TYPE (value))
5369 : eltmode;
5b0264cb
NS
5370 bitpos = eltpos * elt_size;
5371 store_constructor_field (target, bitsize, bitpos,
5372 value_mode, value, type,
5373 cleared, get_alias_set (elttype));
5374 }
5375 }
5376
5377 if (vector)
5378 emit_insn (GEN_FCN (icode)
5379 (target,
201dd46b 5380 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5b0264cb
NS
5381 break;
5382 }
08f2586c 5383
5b0264cb
NS
5384 default:
5385 gcc_unreachable ();
071a6595 5386 }
bbf6f052
RK
5387}
5388
5389/* Store the value of EXP (an expression tree)
5390 into a subfield of TARGET which has mode MODE and occupies
5391 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5392 If MODE is VOIDmode, it means that we are storing into a bit-field.
5393
f45bdcd0
KH
5394 Always return const0_rtx unless we have something particular to
5395 return.
bbf6f052 5396
a06ef755 5397 TYPE is the type of the underlying object,
ece32014
MM
5398
5399 ALIAS_SET is the alias set for the destination. This value will
5400 (in general) be different from that for TARGET, since TARGET is a
5401 reference to the containing structure. */
bbf6f052
RK
5402
5403static rtx
502b8322 5404store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
f45bdcd0 5405 enum machine_mode mode, tree exp, tree type, int alias_set)
bbf6f052 5406{
906c4e36 5407 HOST_WIDE_INT width_mask = 0;
bbf6f052 5408
e9a25f70
JL
5409 if (TREE_CODE (exp) == ERROR_MARK)
5410 return const0_rtx;
5411
2be6a7e9
RK
5412 /* If we have nothing to store, do nothing unless the expression has
5413 side-effects. */
5414 if (bitsize == 0)
5415 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5416 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5417 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5418
5419 /* If we are storing into an unaligned field of an aligned union that is
5420 in a register, we may have the mode of TARGET being an integer mode but
5421 MODE == BLKmode. In that case, get an aligned object whose size and
5422 alignment are the same as TARGET and store TARGET into it (we can avoid
5423 the store if the field being stored is the entire width of TARGET). Then
5424 call ourselves recursively to store the field into a BLKmode version of
5425 that object. Finally, load from the object into TARGET. This is not
5426 very efficient in general, but should only be slightly more expensive
5427 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5428 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5429 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5430
5431 if (mode == BLKmode
f8cfc6aa 5432 && (REG_P (target) || GET_CODE (target) == SUBREG))
bbf6f052 5433 {
85a43a2f 5434 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5435 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5436
8752c357 5437 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5438 emit_move_insn (object, target);
5439
f45bdcd0 5440 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
bbf6f052
RK
5441
5442 emit_move_insn (target, object);
5443
a06ef755 5444 /* We want to return the BLKmode version of the data. */
46093b97 5445 return blk_object;
bbf6f052 5446 }
c3b247b4
JM
5447
5448 if (GET_CODE (target) == CONCAT)
5449 {
5450 /* We're storing into a struct containing a single __complex. */
5451
5b0264cb 5452 gcc_assert (!bitpos);
f45bdcd0 5453 return store_expr (exp, target, 0);
c3b247b4 5454 }
bbf6f052
RK
5455
5456 /* If the structure is in a register or if the component
5457 is a bit field, we cannot use addressing to access it.
5458 Use bit-field techniques or SUBREG to store in it. */
5459
4fa52007 5460 if (mode == VOIDmode
6ab06cbb
JW
5461 || (mode != BLKmode && ! direct_store[(int) mode]
5462 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5463 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f8cfc6aa 5464 || REG_P (target)
c980ac49 5465 || GET_CODE (target) == SUBREG
ccc98036
RS
5466 /* If the field isn't aligned enough to store as an ordinary memref,
5467 store it as a bit field. */
15b19a7d 5468 || (mode != BLKmode
9e5f281f
OH
5469 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5470 || bitpos % GET_MODE_ALIGNMENT (mode))
5471 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5472 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5473 /* If the RHS and field are a constant size and the size of the
5474 RHS isn't the same size as the bitfield, we must use bitfield
5475 operations. */
05bccae2
RK
5476 || (bitsize >= 0
5477 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5478 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5479 {
48cc8d3b
RH
5480 rtx temp;
5481
5482 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5483 implies a mask operation. If the precision is the same size as
5484 the field we're storing into, that mask is redundant. This is
5485 particularly common with bit field assignments generated by the
5486 C front end. */
8d740330
RH
5487 if (TREE_CODE (exp) == NOP_EXPR)
5488 {
5489 tree type = TREE_TYPE (exp);
5490 if (INTEGRAL_TYPE_P (type)
5491 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5492 && bitsize == TYPE_PRECISION (type))
5493 {
5494 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5495 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5496 exp = TREE_OPERAND (exp, 0);
5497 }
5498 }
48cc8d3b 5499
84217346 5500 temp = expand_normal (exp);
bbd6cf73 5501
ef19912d
RK
5502 /* If BITSIZE is narrower than the size of the type of EXP
5503 we will be narrowing TEMP. Normally, what's wanted are the
5504 low-order bits. However, if EXP's type is a record and this is
5505 big-endian machine, we want the upper BITSIZE bits. */
5506 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5507 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5508 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5509 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5510 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5511 - bitsize),
c1853da7 5512 NULL_RTX, 1);
ef19912d 5513
bbd6cf73
RK
5514 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5515 MODE. */
5516 if (mode != VOIDmode && mode != BLKmode
5517 && mode != TYPE_MODE (TREE_TYPE (exp)))
5518 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5519
a281e72d
RK
5520 /* If the modes of TARGET and TEMP are both BLKmode, both
5521 must be in memory and BITPOS must be aligned on a byte
5522 boundary. If so, we simply do a block copy. */
5523 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5524 {
5b0264cb
NS
5525 gcc_assert (MEM_P (target) && MEM_P (temp)
5526 && !(bitpos % BITS_PER_UNIT));
a281e72d 5527
f4ef873c 5528 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5529 emit_block_move (target, temp,
a06ef755 5530 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5531 / BITS_PER_UNIT),
5532 BLOCK_OP_NORMAL);
a281e72d 5533
f45bdcd0 5534 return const0_rtx;
a281e72d
RK
5535 }
5536
bbf6f052 5537 /* Store the value in the bitfield. */
b3520980 5538 store_bit_field (target, bitsize, bitpos, mode, temp);
a06ef755 5539
bbf6f052
RK
5540 return const0_rtx;
5541 }
5542 else
5543 {
bbf6f052 5544 /* Now build a reference to just the desired component. */
f45bdcd0 5545 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
a06ef755
RK
5546
5547 if (to_rtx == target)
5548 to_rtx = copy_rtx (to_rtx);
792760b9 5549
c6df88cb 5550 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5551 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5552 set_mem_alias_set (to_rtx, alias_set);
bbf6f052 5553
f45bdcd0 5554 return store_expr (exp, to_rtx, 0);
bbf6f052
RK
5555 }
5556}
5557\f
5558/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5559 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5560 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5561
5562 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5563 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5564 If the position of the field is variable, we store a tree
5565 giving the variable offset (in units) in *POFFSET.
5566 This offset is in addition to the bit position.
5567 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5568
5569 If any of the extraction expressions is volatile,
5570 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5571
5572 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5573 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5574 is redundant.
5575
5576 If the field describes a variable-sized object, *PMODE is set to
5577 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2614034e
EB
5578 this case, but the address of the object can be found.
5579
5580 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5581 look through nodes that serve as markers of a greater alignment than
5582 the one that can be deduced from the expression. These nodes make it
5583 possible for front-ends to prevent temporaries from being created by
5584 the middle-end on alignment considerations. For that purpose, the
5585 normal operating mode at high-level is to always pass FALSE so that
5586 the ultimate containing object is really returned; moreover, the
5587 associated predicate handled_component_p will always return TRUE
5588 on these nodes, thus indicating that they are essentially handled
5589 by get_inner_reference. TRUE should only be passed when the caller
5590 is scanning the expression in order to build another representation
5591 and specifically knows how to handle these nodes; as such, this is
5592 the normal operating mode in the RTL expanders. */
bbf6f052
RK
5593
5594tree
502b8322
AJ
5595get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5596 HOST_WIDE_INT *pbitpos, tree *poffset,
5597 enum machine_mode *pmode, int *punsignedp,
2614034e 5598 int *pvolatilep, bool keep_aligning)
bbf6f052
RK
5599{
5600 tree size_tree = 0;
5601 enum machine_mode mode = VOIDmode;
fed3cef0 5602 tree offset = size_zero_node;
770ae6cc 5603 tree bit_offset = bitsize_zero_node;
770ae6cc 5604 tree tem;
bbf6f052 5605
770ae6cc
RK
5606 /* First get the mode, signedness, and size. We do this from just the
5607 outermost expression. */
bbf6f052
RK
5608 if (TREE_CODE (exp) == COMPONENT_REF)
5609 {
5610 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5611 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5612 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5613
a150de29 5614 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5615 }
5616 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5617 {
5618 size_tree = TREE_OPERAND (exp, 1);
a150de29 5619 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5620 }
5621 else
5622 {
5623 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5624 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5625
ab87f8c8
JL
5626 if (mode == BLKmode)
5627 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5628 else
5629 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5630 }
3a94c984 5631
770ae6cc 5632 if (size_tree != 0)
bbf6f052 5633 {
770ae6cc 5634 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5635 mode = BLKmode, *pbitsize = -1;
5636 else
770ae6cc 5637 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5638 }
5639
5640 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5641 and find the ultimate containing object. */
bbf6f052
RK
5642 while (1)
5643 {
afe84921 5644 switch (TREE_CODE (exp))
bbf6f052 5645 {
afe84921
RH
5646 case BIT_FIELD_REF:
5647 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5648 TREE_OPERAND (exp, 2));
5649 break;
bbf6f052 5650
afe84921
RH
5651 case COMPONENT_REF:
5652 {
5653 tree field = TREE_OPERAND (exp, 1);
5654 tree this_offset = component_ref_field_offset (exp);
e7f3c83f 5655
afe84921
RH
5656 /* If this field hasn't been filled in yet, don't go past it.
5657 This should only happen when folding expressions made during
5658 type construction. */
5659 if (this_offset == 0)
5660 break;
e6d8c385 5661
afe84921
RH
5662 offset = size_binop (PLUS_EXPR, offset, this_offset);
5663 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5664 DECL_FIELD_BIT_OFFSET (field));
7156dead 5665
afe84921
RH
5666 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5667 }
5668 break;
7156dead 5669
afe84921
RH
5670 case ARRAY_REF:
5671 case ARRAY_RANGE_REF:
5672 {
5673 tree index = TREE_OPERAND (exp, 1);
5674 tree low_bound = array_ref_low_bound (exp);
5675 tree unit_size = array_ref_element_size (exp);
5676
5677 /* We assume all arrays have sizes that are a multiple of a byte.
5678 First subtract the lower bound, if any, in the type of the
5679 index, then convert to sizetype and multiply by the size of
5680 the array element. */
5681 if (! integer_zerop (low_bound))
4845b383
KH
5682 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5683 index, low_bound);
afe84921
RH
5684
5685 offset = size_binop (PLUS_EXPR, offset,
5686 size_binop (MULT_EXPR,
3967bc2d 5687 fold_convert (sizetype, index),
afe84921
RH
5688 unit_size));
5689 }
5690 break;
5691
5692 case REALPART_EXPR:
afe84921
RH
5693 break;
5694
5695 case IMAGPART_EXPR:
9f25f0ad
RH
5696 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5697 bitsize_int (*pbitsize));
afe84921
RH
5698 break;
5699
afe84921 5700 case VIEW_CONVERT_EXPR:
2614034e
EB
5701 if (keep_aligning && STRICT_ALIGNMENT
5702 && (TYPE_ALIGN (TREE_TYPE (exp))
afe84921 5703 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
afe84921
RH
5704 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5705 < BIGGEST_ALIGNMENT)
5706 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5707 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5708 goto done;
5709 break;
5710
5711 default:
5712 goto done;
5713 }
7bb0943f
RS
5714
5715 /* If any reference in the chain is volatile, the effect is volatile. */
5716 if (TREE_THIS_VOLATILE (exp))
5717 *pvolatilep = 1;
839c4796 5718
bbf6f052
RK
5719 exp = TREE_OPERAND (exp, 0);
5720 }
afe84921 5721 done:
bbf6f052 5722
770ae6cc
RK
5723 /* If OFFSET is constant, see if we can return the whole thing as a
5724 constant bit position. Otherwise, split it up. */
5725 if (host_integerp (offset, 0)
3967bc2d
RS
5726 && 0 != (tem = size_binop (MULT_EXPR,
5727 fold_convert (bitsizetype, offset),
770ae6cc
RK
5728 bitsize_unit_node))
5729 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5730 && host_integerp (tem, 0))
5731 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5732 else
5733 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5734
bbf6f052 5735 *pmode = mode;
bbf6f052
RK
5736 return exp;
5737}
921b3427 5738
44de5aeb
RK
5739/* Return a tree of sizetype representing the size, in bytes, of the element
5740 of EXP, an ARRAY_REF. */
5741
5742tree
5743array_ref_element_size (tree exp)
5744{
5745 tree aligned_size = TREE_OPERAND (exp, 3);
5746 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5747
5748 /* If a size was specified in the ARRAY_REF, it's the size measured
5749 in alignment units of the element type. So multiply by that value. */
5750 if (aligned_size)
bc482be4
RH
5751 {
5752 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5753 sizetype from another type of the same width and signedness. */
5754 if (TREE_TYPE (aligned_size) != sizetype)
5755 aligned_size = fold_convert (sizetype, aligned_size);
5756 return size_binop (MULT_EXPR, aligned_size,
a4e9ffe5 5757 size_int (TYPE_ALIGN_UNIT (elmt_type)));
bc482be4 5758 }
44de5aeb 5759
caf93cb0 5760 /* Otherwise, take the size from that of the element type. Substitute
44de5aeb
RK
5761 any PLACEHOLDER_EXPR that we have. */
5762 else
5763 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5764}
5765
5766/* Return a tree representing the lower bound of the array mentioned in
5767 EXP, an ARRAY_REF. */
5768
5769tree
5770array_ref_low_bound (tree exp)
5771{
5772 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5773
5774 /* If a lower bound is specified in EXP, use it. */
5775 if (TREE_OPERAND (exp, 2))
5776 return TREE_OPERAND (exp, 2);
5777
5778 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5779 substituting for a PLACEHOLDER_EXPR as needed. */
5780 if (domain_type && TYPE_MIN_VALUE (domain_type))
5781 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5782
5783 /* Otherwise, return a zero of the appropriate type. */
5212068f 5784 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
44de5aeb
RK
5785}
5786
a7e5372d
ZD
5787/* Return a tree representing the upper bound of the array mentioned in
5788 EXP, an ARRAY_REF. */
5789
5790tree
5791array_ref_up_bound (tree exp)
5792{
5793 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5794
5795 /* If there is a domain type and it has an upper bound, use it, substituting
5796 for a PLACEHOLDER_EXPR as needed. */
5797 if (domain_type && TYPE_MAX_VALUE (domain_type))
5798 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5799
5800 /* Otherwise fail. */
5801 return NULL_TREE;
5802}
5803
44de5aeb
RK
5804/* Return a tree representing the offset, in bytes, of the field referenced
5805 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5806
5807tree
5808component_ref_field_offset (tree exp)
5809{
5810 tree aligned_offset = TREE_OPERAND (exp, 2);
5811 tree field = TREE_OPERAND (exp, 1);
5812
5813 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5814 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5815 value. */
5816 if (aligned_offset)
bc482be4
RH
5817 {
5818 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5819 sizetype from another type of the same width and signedness. */
5820 if (TREE_TYPE (aligned_offset) != sizetype)
5821 aligned_offset = fold_convert (sizetype, aligned_offset);
5822 return size_binop (MULT_EXPR, aligned_offset,
5823 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5824 }
44de5aeb 5825
caf93cb0 5826 /* Otherwise, take the offset from that of the field. Substitute
44de5aeb
RK
5827 any PLACEHOLDER_EXPR that we have. */
5828 else
5829 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5830}
5831
ed239f5a
RK
5832/* Return 1 if T is an expression that get_inner_reference handles. */
5833
5834int
502b8322 5835handled_component_p (tree t)
ed239f5a
RK
5836{
5837 switch (TREE_CODE (t))
5838 {
5839 case BIT_FIELD_REF:
5840 case COMPONENT_REF:
5841 case ARRAY_REF:
5842 case ARRAY_RANGE_REF:
ed239f5a 5843 case VIEW_CONVERT_EXPR:
afe84921
RH
5844 case REALPART_EXPR:
5845 case IMAGPART_EXPR:
ed239f5a
RK
5846 return 1;
5847
ed239f5a
RK
5848 default:
5849 return 0;
5850 }
5851}
bbf6f052 5852\f
3fe44edd
RK
5853/* Given an rtx VALUE that may contain additions and multiplications, return
5854 an equivalent value that just refers to a register, memory, or constant.
5855 This is done by generating instructions to perform the arithmetic and
5856 returning a pseudo-register containing the value.
c45a13a6
RK
5857
5858 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5859
5860rtx
502b8322 5861force_operand (rtx value, rtx target)
bbf6f052 5862{
8a28dbcc 5863 rtx op1, op2;
bbf6f052 5864 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5865 rtx subtarget = get_subtarget (target);
8a28dbcc 5866 enum rtx_code code = GET_CODE (value);
bbf6f052 5867
50654f6c
ZD
5868 /* Check for subreg applied to an expression produced by loop optimizer. */
5869 if (code == SUBREG
f8cfc6aa 5870 && !REG_P (SUBREG_REG (value))
3c0cb5de 5871 && !MEM_P (SUBREG_REG (value)))
50654f6c
ZD
5872 {
5873 value = simplify_gen_subreg (GET_MODE (value),
5874 force_reg (GET_MODE (SUBREG_REG (value)),
5875 force_operand (SUBREG_REG (value),
5876 NULL_RTX)),
5877 GET_MODE (SUBREG_REG (value)),
5878 SUBREG_BYTE (value));
5879 code = GET_CODE (value);
5880 }
5881
8b015896 5882 /* Check for a PIC address load. */
8a28dbcc 5883 if ((code == PLUS || code == MINUS)
8b015896
RH
5884 && XEXP (value, 0) == pic_offset_table_rtx
5885 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5886 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5887 || GET_CODE (XEXP (value, 1)) == CONST))
5888 {
5889 if (!subtarget)
5890 subtarget = gen_reg_rtx (GET_MODE (value));
5891 emit_move_insn (subtarget, value);
5892 return subtarget;
5893 }
5894
ec8e098d 5895 if (ARITHMETIC_P (value))
bbf6f052
RK
5896 {
5897 op2 = XEXP (value, 1);
f8cfc6aa 5898 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
bbf6f052 5899 subtarget = 0;
8a28dbcc 5900 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5901 {
8a28dbcc 5902 code = PLUS;
bbf6f052
RK
5903 op2 = negate_rtx (GET_MODE (value), op2);
5904 }
5905
5906 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5907 operand a PLUS of a virtual register and something else. In that
5908 case, we want to emit the sum of the virtual register and the
5909 constant first and then add the other value. This allows virtual
5910 register instantiation to simply modify the constant rather than
5911 creating another one around this addition. */
5912 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052 5913 && GET_CODE (XEXP (value, 0)) == PLUS
f8cfc6aa 5914 && REG_P (XEXP (XEXP (value, 0), 0))
bbf6f052
RK
5915 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5916 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5917 {
8a28dbcc
JH
5918 rtx temp = expand_simple_binop (GET_MODE (value), code,
5919 XEXP (XEXP (value, 0), 0), op2,
5920 subtarget, 0, OPTAB_LIB_WIDEN);
5921 return expand_simple_binop (GET_MODE (value), code, temp,
5922 force_operand (XEXP (XEXP (value,
5923 0), 1), 0),
5924 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5925 }
3a94c984 5926
8a28dbcc
JH
5927 op1 = force_operand (XEXP (value, 0), subtarget);
5928 op2 = force_operand (op2, NULL_RTX);
5929 switch (code)
5930 {
5931 case MULT:
5932 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5933 case DIV:
5934 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5935 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5936 target, 1, OPTAB_LIB_WIDEN);
5937 else
5938 return expand_divmod (0,
5939 FLOAT_MODE_P (GET_MODE (value))
5940 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5941 GET_MODE (value), op1, op2, target, 0);
5942 break;
5943 case MOD:
5944 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5945 target, 0);
5946 break;
5947 case UDIV:
5948 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5949 target, 1);
5950 break;
5951 case UMOD:
5952 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5953 target, 1);
5954 break;
5955 case ASHIFTRT:
5956 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5957 target, 0, OPTAB_LIB_WIDEN);
5958 break;
5959 default:
5960 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5961 target, 1, OPTAB_LIB_WIDEN);
5962 }
5963 }
ec8e098d 5964 if (UNARY_P (value))
8a28dbcc 5965 {
72a10eff
RS
5966 if (!target)
5967 target = gen_reg_rtx (GET_MODE (value));
8a28dbcc 5968 op1 = force_operand (XEXP (value, 0), NULL_RTX);
1fd5360d
R
5969 switch (code)
5970 {
72a10eff
RS
5971 case ZERO_EXTEND:
5972 case SIGN_EXTEND:
1fd5360d 5973 case TRUNCATE:
72a10eff
RS
5974 convert_move (target, op1, code == ZERO_EXTEND);
5975 return target;
5976
5977 case FIX:
5978 case UNSIGNED_FIX:
5979 expand_fix (target, op1, code == UNSIGNED_FIX);
5980 return target;
5981
5982 case FLOAT:
5983 case UNSIGNED_FLOAT:
5984 expand_float (target, op1, code == UNSIGNED_FLOAT);
5985 return target;
5986
1fd5360d
R
5987 default:
5988 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5989 }
bbf6f052 5990 }
34e81b5a
RK
5991
5992#ifdef INSN_SCHEDULING
5993 /* On machines that have insn scheduling, we want all memory reference to be
5994 explicit, so we need to deal with such paradoxical SUBREGs. */
3c0cb5de 5995 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
34e81b5a
RK
5996 && (GET_MODE_SIZE (GET_MODE (value))
5997 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5998 value
5999 = simplify_gen_subreg (GET_MODE (value),
6000 force_reg (GET_MODE (SUBREG_REG (value)),
6001 force_operand (SUBREG_REG (value),
6002 NULL_RTX)),
6003 GET_MODE (SUBREG_REG (value)),
6004 SUBREG_BYTE (value));
6005#endif
6006
bbf6f052
RK
6007 return value;
6008}
6009\f
bbf6f052 6010/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
6011 EXP can reference X, which is being modified. TOP_P is nonzero if this
6012 call is going to be used to determine whether we need a temporary
ff439b5f
CB
6013 for EXP, as opposed to a recursive call to this function.
6014
6015 It is always safe for this routine to return zero since it merely
6016 searches for optimization opportunities. */
bbf6f052 6017
8f17b5c5 6018int
502b8322 6019safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
6020{
6021 rtx exp_rtl = 0;
6022 int i, nops;
6023
6676e72f
RK
6024 if (x == 0
6025 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
6026 have no way of allocating temporaries of variable size
6027 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6028 So we assume here that something at a higher level has prevented a
f4510f37 6029 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 6030 do this when X is BLKmode and when we are at the top level. */
d0f062fb 6031 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 6032 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
6033 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6034 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6035 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6036 != INTEGER_CST)
1da68f56
RK
6037 && GET_MODE (x) == BLKmode)
6038 /* If X is in the outgoing argument area, it is always safe. */
3c0cb5de 6039 || (MEM_P (x)
1da68f56
RK
6040 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6041 || (GET_CODE (XEXP (x, 0)) == PLUS
6042 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
6043 return 1;
6044
6045 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6046 find the underlying pseudo. */
6047 if (GET_CODE (x) == SUBREG)
6048 {
6049 x = SUBREG_REG (x);
f8cfc6aa 6050 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
6051 return 0;
6052 }
6053
1da68f56 6054 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
6055 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6056 {
6615c446 6057 case tcc_declaration:
a9772b60 6058 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
6059 break;
6060
6615c446 6061 case tcc_constant:
bbf6f052
RK
6062 return 1;
6063
6615c446 6064 case tcc_exceptional:
bbf6f052 6065 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
6066 {
6067 while (1)
6068 {
6069 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6070 return 0;
6071 exp = TREE_CHAIN (exp);
6072 if (!exp)
6073 return 1;
6074 if (TREE_CODE (exp) != TREE_LIST)
6075 return safe_from_p (x, exp, 0);
6076 }
6077 }
ff439b5f
CB
6078 else if (TREE_CODE (exp) == ERROR_MARK)
6079 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
6080 else
6081 return 0;
6082
6615c446 6083 case tcc_statement:
350fae66
RK
6084 /* The only case we look at here is the DECL_INITIAL inside a
6085 DECL_EXPR. */
6086 return (TREE_CODE (exp) != DECL_EXPR
6087 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6088 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6089 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6090
6615c446
JO
6091 case tcc_binary:
6092 case tcc_comparison:
f8d4be57
CE
6093 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6094 return 0;
5d3cc252 6095 /* Fall through. */
f8d4be57 6096
6615c446 6097 case tcc_unary:
f8d4be57 6098 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 6099
6615c446
JO
6100 case tcc_expression:
6101 case tcc_reference:
bbf6f052
RK
6102 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6103 the expression. If it is set, we conflict iff we are that rtx or
6104 both are in memory. Otherwise, we check all operands of the
6105 expression recursively. */
6106
6107 switch (TREE_CODE (exp))
6108 {
6109 case ADDR_EXPR:
70072ed9
RK
6110 /* If the operand is static or we are static, we can't conflict.
6111 Likewise if we don't conflict with the operand at all. */
6112 if (staticp (TREE_OPERAND (exp, 0))
6113 || TREE_STATIC (exp)
6114 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6115 return 1;
6116
6117 /* Otherwise, the only way this can conflict is if we are taking
6118 the address of a DECL a that address if part of X, which is
6119 very rare. */
6120 exp = TREE_OPERAND (exp, 0);
6121 if (DECL_P (exp))
6122 {
6123 if (!DECL_RTL_SET_P (exp)
3c0cb5de 6124 || !MEM_P (DECL_RTL (exp)))
70072ed9
RK
6125 return 0;
6126 else
6127 exp_rtl = XEXP (DECL_RTL (exp), 0);
6128 }
6129 break;
bbf6f052 6130
7ccf35ed
DN
6131 case MISALIGNED_INDIRECT_REF:
6132 case ALIGN_INDIRECT_REF:
bbf6f052 6133 case INDIRECT_REF:
3c0cb5de 6134 if (MEM_P (x)
1da68f56
RK
6135 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6136 get_alias_set (exp)))
bbf6f052
RK
6137 return 0;
6138 break;
6139
6140 case CALL_EXPR:
f9808f81
MM
6141 /* Assume that the call will clobber all hard registers and
6142 all of memory. */
f8cfc6aa 6143 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
3c0cb5de 6144 || MEM_P (x))
f9808f81 6145 return 0;
bbf6f052
RK
6146 break;
6147
bbf6f052 6148 case WITH_CLEANUP_EXPR:
5dab5552 6149 case CLEANUP_POINT_EXPR:
ac45df5d 6150 /* Lowered by gimplify.c. */
5b0264cb 6151 gcc_unreachable ();
ac45df5d 6152
bbf6f052 6153 case SAVE_EXPR:
82c82743 6154 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 6155
e9a25f70
JL
6156 default:
6157 break;
bbf6f052
RK
6158 }
6159
6160 /* If we have an rtx, we do not need to scan our operands. */
6161 if (exp_rtl)
6162 break;
6163
54e4aedb 6164 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
bbf6f052
RK
6165 for (i = 0; i < nops; i++)
6166 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6167 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6168 return 0;
8f17b5c5
MM
6169
6170 /* If this is a language-specific tree code, it may require
6171 special handling. */
dbbbbf3b
JDA
6172 if ((unsigned int) TREE_CODE (exp)
6173 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 6174 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 6175 return 0;
6615c446
JO
6176 break;
6177
6178 case tcc_type:
6179 /* Should never get a type here. */
6180 gcc_unreachable ();
bbf6f052
RK
6181 }
6182
6183 /* If we have an rtl, find any enclosed object. Then see if we conflict
6184 with it. */
6185 if (exp_rtl)
6186 {
6187 if (GET_CODE (exp_rtl) == SUBREG)
6188 {
6189 exp_rtl = SUBREG_REG (exp_rtl);
f8cfc6aa 6190 if (REG_P (exp_rtl)
bbf6f052
RK
6191 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6192 return 0;
6193 }
6194
6195 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6196 are memory and they conflict. */
bbf6f052 6197 return ! (rtx_equal_p (x, exp_rtl)
3c0cb5de 6198 || (MEM_P (x) && MEM_P (exp_rtl)
21117a17 6199 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6200 rtx_addr_varies_p)));
bbf6f052
RK
6201 }
6202
6203 /* If we reach here, it is safe. */
6204 return 1;
6205}
6206
14a774a9 6207\f
0d4903b8
RK
6208/* Return the highest power of two that EXP is known to be a multiple of.
6209 This is used in updating alignment of MEMs in array references. */
6210
86a07404 6211unsigned HOST_WIDE_INT
502b8322 6212highest_pow2_factor (tree exp)
0d4903b8 6213{
9ceca302 6214 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6215
6216 switch (TREE_CODE (exp))
6217 {
6218 case INTEGER_CST:
e0f1be5c
JJ
6219 /* We can find the lowest bit that's a one. If the low
6220 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6221 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6222 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6223 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6224 later ICE. */
e0f1be5c 6225 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6226 return BIGGEST_ALIGNMENT;
e0f1be5c 6227 else
0d4903b8 6228 {
e0f1be5c
JJ
6229 /* Note: tree_low_cst is intentionally not used here,
6230 we don't care about the upper bits. */
6231 c0 = TREE_INT_CST_LOW (exp);
6232 c0 &= -c0;
6233 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6234 }
6235 break;
6236
65a07688 6237 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6238 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6239 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6240 return MIN (c0, c1);
6241
6242 case MULT_EXPR:
6243 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6244 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6245 return c0 * c1;
6246
6247 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6248 case CEIL_DIV_EXPR:
65a07688
RK
6249 if (integer_pow2p (TREE_OPERAND (exp, 1))
6250 && host_integerp (TREE_OPERAND (exp, 1), 1))
6251 {
6252 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6253 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6254 return MAX (1, c0 / c1);
6255 }
6256 break;
0d4903b8
RK
6257
6258 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6259 case SAVE_EXPR:
0d4903b8
RK
6260 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6261
65a07688
RK
6262 case COMPOUND_EXPR:
6263 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6264
0d4903b8
RK
6265 case COND_EXPR:
6266 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6267 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6268 return MIN (c0, c1);
6269
6270 default:
6271 break;
6272 }
6273
6274 return 1;
6275}
818c0c94 6276
d50a16c4
EB
6277/* Similar, except that the alignment requirements of TARGET are
6278 taken into account. Assume it is at least as aligned as its
6279 type, unless it is a COMPONENT_REF in which case the layout of
6280 the structure gives the alignment. */
818c0c94 6281
9ceca302 6282static unsigned HOST_WIDE_INT
d50a16c4 6283highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6284{
d50a16c4 6285 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6286
6287 factor = highest_pow2_factor (exp);
d50a16c4 6288 if (TREE_CODE (target) == COMPONENT_REF)
a4e9ffe5 6289 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
d50a16c4 6290 else
a4e9ffe5 6291 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
d50a16c4 6292 return MAX (factor, target_align);
818c0c94 6293}
0d4903b8 6294\f
6de9cd9a
DN
6295/* Expands variable VAR. */
6296
6297void
6298expand_var (tree var)
6299{
6300 if (DECL_EXTERNAL (var))
6301 return;
6302
6303 if (TREE_STATIC (var))
6304 /* If this is an inlined copy of a static local variable,
6305 look up the original decl. */
6306 var = DECL_ORIGIN (var);
6307
6308 if (TREE_STATIC (var)
6309 ? !TREE_ASM_WRITTEN (var)
6310 : !DECL_RTL_SET_P (var))
6311 {
833b3afe 6312 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
1a186ec5 6313 /* Should be ignored. */;
673fda6b 6314 else if (lang_hooks.expand_decl (var))
6de9cd9a
DN
6315 /* OK. */;
6316 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6317 expand_decl (var);
6318 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
0e6df31e 6319 rest_of_decl_compilation (var, 0, 0);
6de9cd9a 6320 else
5b0264cb
NS
6321 /* No expansion needed. */
6322 gcc_assert (TREE_CODE (var) == TYPE_DECL
6323 || TREE_CODE (var) == CONST_DECL
6324 || TREE_CODE (var) == FUNCTION_DECL
6325 || TREE_CODE (var) == LABEL_DECL);
6de9cd9a
DN
6326 }
6327}
6328
eb698c58
RS
6329/* Subroutine of expand_expr. Expand the two operands of a binary
6330 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6331 The value may be stored in TARGET if TARGET is nonzero. The
6332 MODIFIER argument is as documented by expand_expr. */
6333
6334static void
6335expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6336 enum expand_modifier modifier)
6337{
6338 if (! safe_from_p (target, exp1, 1))
6339 target = 0;
6340 if (operand_equal_p (exp0, exp1, 0))
6341 {
6342 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6343 *op1 = copy_rtx (*op0);
6344 }
6345 else
6346 {
c67e6e14
RS
6347 /* If we need to preserve evaluation order, copy exp0 into its own
6348 temporary variable so that it can't be clobbered by exp1. */
6349 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6350 exp0 = save_expr (exp0);
eb698c58
RS
6351 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6352 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6353 }
6354}
6355
f47e9b4e 6356\f
c0220ea4 6357/* Return a MEM that contains constant EXP. DEFER is as for
aacd3885
RS
6358 output_constant_def and MODIFIER is as for expand_expr. */
6359
6360static rtx
6361expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6362{
6363 rtx mem;
6364
6365 mem = output_constant_def (exp, defer);
6366 if (modifier != EXPAND_INITIALIZER)
6367 mem = use_anchored_address (mem);
6368 return mem;
6369}
6370
70bb498a 6371/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6377bb9a
RH
6372 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6373
6374static rtx
70bb498a
RH
6375expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6376 enum expand_modifier modifier)
6377bb9a
RH
6377{
6378 rtx result, subtarget;
6379 tree inner, offset;
6380 HOST_WIDE_INT bitsize, bitpos;
6381 int volatilep, unsignedp;
6382 enum machine_mode mode1;
6383
6384 /* If we are taking the address of a constant and are at the top level,
6385 we have to use output_constant_def since we can't call force_const_mem
6386 at top level. */
6387 /* ??? This should be considered a front-end bug. We should not be
6388 generating ADDR_EXPR of something that isn't an LVALUE. The only
6389 exception here is STRING_CST. */
6390 if (TREE_CODE (exp) == CONSTRUCTOR
6615c446 6391 || CONSTANT_CLASS_P (exp))
aacd3885 6392 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6377bb9a
RH
6393
6394 /* Everything must be something allowed by is_gimple_addressable. */
6395 switch (TREE_CODE (exp))
6396 {
6397 case INDIRECT_REF:
6398 /* This case will happen via recursion for &a->b. */
aacd3885 6399 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6377bb9a
RH
6400
6401 case CONST_DECL:
6402 /* Recurse and make the output_constant_def clause above handle this. */
70bb498a 6403 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
b0b324b0 6404 tmode, modifier);
6377bb9a
RH
6405
6406 case REALPART_EXPR:
6407 /* The real part of the complex number is always first, therefore
6408 the address is the same as the address of the parent object. */
6409 offset = 0;
6410 bitpos = 0;
6411 inner = TREE_OPERAND (exp, 0);
6412 break;
6413
6414 case IMAGPART_EXPR:
6415 /* The imaginary part of the complex number is always second.
2a7e31df 6416 The expression is therefore always offset by the size of the
6377bb9a
RH
6417 scalar type. */
6418 offset = 0;
6419 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6420 inner = TREE_OPERAND (exp, 0);
6421 break;
6422
6423 default:
6424 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6425 expand_expr, as that can have various side effects; LABEL_DECLs for
6426 example, may not have their DECL_RTL set yet. Assume language
6427 specific tree nodes can be expanded in some interesting way. */
6428 if (DECL_P (exp)
6429 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6430 {
6431 result = expand_expr (exp, target, tmode,
6432 modifier == EXPAND_INITIALIZER
6433 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6434
6435 /* If the DECL isn't in memory, then the DECL wasn't properly
6436 marked TREE_ADDRESSABLE, which will be either a front-end
6437 or a tree optimizer bug. */
2ca202e7 6438 gcc_assert (MEM_P (result));
6377bb9a
RH
6439 result = XEXP (result, 0);
6440
6441 /* ??? Is this needed anymore? */
b0b324b0 6442 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6377bb9a
RH
6443 {
6444 assemble_external (exp);
6445 TREE_USED (exp) = 1;
6446 }
6447
6448 if (modifier != EXPAND_INITIALIZER
6449 && modifier != EXPAND_CONST_ADDRESS)
6450 result = force_operand (result, target);
6451 return result;
6452 }
6453
2614034e
EB
6454 /* Pass FALSE as the last argument to get_inner_reference although
6455 we are expanding to RTL. The rationale is that we know how to
6456 handle "aligning nodes" here: we can just bypass them because
6457 they won't change the final object whose address will be returned
6458 (they actually exist only for that purpose). */
6377bb9a 6459 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 6460 &mode1, &unsignedp, &volatilep, false);
6377bb9a
RH
6461 break;
6462 }
6463
6464 /* We must have made progress. */
5b0264cb 6465 gcc_assert (inner != exp);
6377bb9a
RH
6466
6467 subtarget = offset || bitpos ? NULL_RTX : target;
70bb498a 6468 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6377bb9a 6469
6377bb9a
RH
6470 if (offset)
6471 {
6472 rtx tmp;
6473
6474 if (modifier != EXPAND_NORMAL)
6475 result = force_operand (result, NULL);
6476 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6477
b0b324b0
RH
6478 result = convert_memory_address (tmode, result);
6479 tmp = convert_memory_address (tmode, tmp);
6480
d047a201 6481 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6377bb9a
RH
6482 result = gen_rtx_PLUS (tmode, result, tmp);
6483 else
6484 {
6485 subtarget = bitpos ? NULL_RTX : target;
6486 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6487 1, OPTAB_LIB_WIDEN);
6488 }
6489 }
6490
6491 if (bitpos)
6492 {
6493 /* Someone beforehand should have rejected taking the address
6494 of such an object. */
b0b324b0 6495 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6377bb9a
RH
6496
6497 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6498 if (modifier < EXPAND_SUM)
6499 result = force_operand (result, target);
6500 }
6501
6502 return result;
6503}
6504
70bb498a
RH
6505/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6506 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6507
6508static rtx
6509expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6510 enum expand_modifier modifier)
6511{
6512 enum machine_mode rmode;
6513 rtx result;
6514
b0b324b0
RH
6515 /* Target mode of VOIDmode says "whatever's natural". */
6516 if (tmode == VOIDmode)
6517 tmode = TYPE_MODE (TREE_TYPE (exp));
6518
6519 /* We can get called with some Weird Things if the user does silliness
6520 like "(short) &a". In that case, convert_memory_address won't do
6521 the right thing, so ignore the given target mode. */
103b83ea 6522 if (tmode != Pmode && tmode != ptr_mode)
b0b324b0
RH
6523 tmode = Pmode;
6524
70bb498a
RH
6525 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6526 tmode, modifier);
6527
6528 /* Despite expand_expr claims concerning ignoring TMODE when not
b0b324b0
RH
6529 strictly convenient, stuff breaks if we don't honor it. Note
6530 that combined with the above, we only do this for pointer modes. */
70bb498a
RH
6531 rmode = GET_MODE (result);
6532 if (rmode == VOIDmode)
6533 rmode = tmode;
6534 if (rmode != tmode)
6535 result = convert_memory_address (tmode, result);
b0b324b0 6536
70bb498a
RH
6537 return result;
6538}
6539
6540
bbf6f052
RK
6541/* expand_expr: generate code for computing expression EXP.
6542 An rtx for the computed value is returned. The value is never null.
6543 In the case of a void EXP, const0_rtx is returned.
6544
6545 The value may be stored in TARGET if TARGET is nonzero.
6546 TARGET is just a suggestion; callers must assume that
6547 the rtx returned may not be the same as TARGET.
6548
6549 If TARGET is CONST0_RTX, it means that the value will be ignored.
6550
6551 If TMODE is not VOIDmode, it suggests generating the
6552 result in mode TMODE. But this is done only when convenient.
6553 Otherwise, TMODE is ignored and the value generated in its natural mode.
6554 TMODE is just a suggestion; callers must assume that
6555 the rtx returned may not have mode TMODE.
6556
d6a5ac33
RK
6557 Note that TARGET may have neither TMODE nor MODE. In that case, it
6558 probably will not be used.
bbf6f052
RK
6559
6560 If MODIFIER is EXPAND_SUM then when EXP is an addition
6561 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6562 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6563 products as above, or REG or MEM, or constant.
6564 Ordinarily in such cases we would output mul or add instructions
6565 and then return a pseudo reg containing the sum.
6566
6567 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6568 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6569 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6570 This is used for outputting expressions used in initializers.
6571
6572 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6573 with a constant address even if that address is not normally legitimate.
8403445a
AM
6574 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6575
6576 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6577 a call parameter. Such targets require special care as we haven't yet
6578 marked TARGET so that it's safe from being trashed by libcalls. We
6579 don't want to use TARGET for anything but the final result;
6580 Intermediate values must go elsewhere. Additionally, calls to
caf93cb0 6581 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
0fab64a3
MM
6582
6583 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6584 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6585 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6586 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6587 recursively. */
bbf6f052 6588
6de9cd9a
DN
6589static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6590 enum expand_modifier, rtx *);
6591
bbf6f052 6592rtx
0fab64a3
MM
6593expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6594 enum expand_modifier modifier, rtx *alt_rtl)
6de9cd9a
DN
6595{
6596 int rn = -1;
6597 rtx ret, last = NULL;
6598
6599 /* Handle ERROR_MARK before anybody tries to access its type. */
6600 if (TREE_CODE (exp) == ERROR_MARK
6601 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6602 {
6603 ret = CONST0_RTX (tmode);
6604 return ret ? ret : const0_rtx;
6605 }
6606
6607 if (flag_non_call_exceptions)
6608 {
6609 rn = lookup_stmt_eh_region (exp);
6610 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6611 if (rn >= 0)
6612 last = get_last_insn ();
6613 }
6614
6615 /* If this is an expression of some kind and it has an associated line
caf93cb0 6616 number, then emit the line number before expanding the expression.
6de9cd9a
DN
6617
6618 We need to save and restore the file and line information so that
6619 errors discovered during expansion are emitted with the right
caf93cb0 6620 information. It would be better of the diagnostic routines
6de9cd9a
DN
6621 used the file/line information embedded in the tree nodes rather
6622 than globals. */
c48dc958 6623 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6de9cd9a
DN
6624 {
6625 location_t saved_location = input_location;
6626 input_location = EXPR_LOCATION (exp);
6627 emit_line_note (input_location);
caf93cb0 6628
6de9cd9a 6629 /* Record where the insns produced belong. */
1ea463a2 6630 record_block_change (TREE_BLOCK (exp));
6de9cd9a
DN
6631
6632 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6633
6634 input_location = saved_location;
6635 }
6636 else
6637 {
6638 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6639 }
6640
6641 /* If using non-call exceptions, mark all insns that may trap.
6642 expand_call() will mark CALL_INSNs before we get to this code,
6643 but it doesn't handle libcalls, and these may trap. */
6644 if (rn >= 0)
caf93cb0 6645 {
6de9cd9a 6646 rtx insn;
caf93cb0 6647 for (insn = next_real_insn (last); insn;
6de9cd9a
DN
6648 insn = next_real_insn (insn))
6649 {
6650 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6651 /* If we want exceptions for non-call insns, any
6652 may_trap_p instruction may throw. */
6653 && GET_CODE (PATTERN (insn)) != CLOBBER
6654 && GET_CODE (PATTERN (insn)) != USE
4b4bf941 6655 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6de9cd9a
DN
6656 {
6657 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6658 REG_NOTES (insn));
6659 }
6660 }
6661 }
6662
6663 return ret;
6664}
6665
6666static rtx
6667expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6668 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6669{
aacd3885 6670 rtx op0, op1, temp, decl_rtl;
bbf6f052 6671 tree type = TREE_TYPE (exp);
8df83eae 6672 int unsignedp;
b3694847
SS
6673 enum machine_mode mode;
6674 enum tree_code code = TREE_CODE (exp);
bbf6f052 6675 optab this_optab;
68557e14
ML
6676 rtx subtarget, original_target;
6677 int ignore;
8b44057d 6678 tree context, subexp0, subexp1;
bc15d0ef
JM
6679 bool reduce_bit_field = false;
6680#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6681 ? reduce_to_bit_field_precision ((expr), \
6682 target, \
6683 type) \
6684 : (expr))
bbf6f052 6685
68557e14 6686 mode = TYPE_MODE (type);
8df83eae 6687 unsignedp = TYPE_UNSIGNED (type);
bc15d0ef
JM
6688 if (lang_hooks.reduce_bit_field_operations
6689 && TREE_CODE (type) == INTEGER_TYPE
6690 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6691 {
6692 /* An operation in what may be a bit-field type needs the
6693 result to be reduced to the precision of the bit-field type,
6694 which is narrower than that of the type's mode. */
6695 reduce_bit_field = true;
6696 if (modifier == EXPAND_STACK_PARM)
6697 target = 0;
6698 }
8df83eae 6699
68557e14 6700 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6701 subtarget = get_subtarget (target);
68557e14
ML
6702 original_target = target;
6703 ignore = (target == const0_rtx
6704 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3a18db48
AP
6705 || code == CONVERT_EXPR || code == COND_EXPR
6706 || code == VIEW_CONVERT_EXPR)
68557e14
ML
6707 && TREE_CODE (type) == VOID_TYPE));
6708
dd27116b
RK
6709 /* If we are going to ignore this result, we need only do something
6710 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6711 is, short-circuit the most common cases here. Note that we must
6712 not call expand_expr with anything but const0_rtx in case this
6713 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6714
dd27116b
RK
6715 if (ignore)
6716 {
6717 if (! TREE_SIDE_EFFECTS (exp))
6718 return const0_rtx;
6719
14a774a9
RK
6720 /* Ensure we reference a volatile object even if value is ignored, but
6721 don't do this if all we are doing is taking its address. */
dd27116b
RK
6722 if (TREE_THIS_VOLATILE (exp)
6723 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6724 && mode != VOIDmode && mode != BLKmode
6725 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6726 {
37a08a29 6727 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3c0cb5de 6728 if (MEM_P (temp))
dd27116b
RK
6729 temp = copy_to_reg (temp);
6730 return const0_rtx;
6731 }
6732
6615c446
JO
6733 if (TREE_CODE_CLASS (code) == tcc_unary
6734 || code == COMPONENT_REF || code == INDIRECT_REF)
37a08a29
RK
6735 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6736 modifier);
6737
6615c446
JO
6738 else if (TREE_CODE_CLASS (code) == tcc_binary
6739 || TREE_CODE_CLASS (code) == tcc_comparison
b4e3fabb 6740 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6741 {
37a08a29
RK
6742 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6743 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6744 return const0_rtx;
6745 }
14a774a9
RK
6746 else if (code == BIT_FIELD_REF)
6747 {
37a08a29
RK
6748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6749 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6750 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6751 return const0_rtx;
6752 }
37a08a29 6753
90764a87 6754 target = 0;
dd27116b 6755 }
bbf6f052 6756
bbf6f052 6757
bbf6f052
RK
6758 switch (code)
6759 {
6760 case LABEL_DECL:
b552441b
RS
6761 {
6762 tree function = decl_function_context (exp);
c5c76735 6763
6de9cd9a
DN
6764 temp = label_rtx (exp);
6765 temp = gen_rtx_LABEL_REF (Pmode, temp);
6766
d0977240 6767 if (function != current_function_decl
6de9cd9a
DN
6768 && function != 0)
6769 LABEL_REF_NONLOCAL_P (temp) = 1;
6770
6771 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
26fcb35a 6772 return temp;
b552441b 6773 }
bbf6f052 6774
8b11a64c
ZD
6775 case SSA_NAME:
6776 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6777 NULL);
6778
bbf6f052 6779 case PARM_DECL:
bbf6f052 6780 case VAR_DECL:
2dca20cd
RS
6781 /* If a static var's type was incomplete when the decl was written,
6782 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6783 if (DECL_SIZE (exp) == 0
6784 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6785 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6786 layout_decl (exp, 0);
921b3427 6787
0f41302f 6788 /* ... fall through ... */
d6a5ac33 6789
2dca20cd 6790 case FUNCTION_DECL:
bbf6f052 6791 case RESULT_DECL:
aacd3885
RS
6792 decl_rtl = DECL_RTL (exp);
6793 gcc_assert (decl_rtl);
d6a5ac33 6794
e44842fe
RK
6795 /* Ensure variable marked as used even if it doesn't go through
6796 a parser. If it hasn't be used yet, write out an external
6797 definition. */
6798 if (! TREE_USED (exp))
6799 {
6800 assemble_external (exp);
6801 TREE_USED (exp) = 1;
6802 }
6803
dc6d66b3
RK
6804 /* Show we haven't gotten RTL for this yet. */
6805 temp = 0;
6806
ab8907ef
RH
6807 /* Variables inherited from containing functions should have
6808 been lowered by this point. */
bbf6f052 6809 context = decl_function_context (exp);
5b0264cb
NS
6810 gcc_assert (!context
6811 || context == current_function_decl
6812 || TREE_STATIC (exp)
6813 /* ??? C++ creates functions that are not TREE_STATIC. */
6814 || TREE_CODE (exp) == FUNCTION_DECL);
4af3895e 6815
bbf6f052
RK
6816 /* This is the case of an array whose size is to be determined
6817 from its initializer, while the initializer is still being parsed.
6818 See expand_decl. */
d6a5ac33 6819
aacd3885
RS
6820 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6821 temp = validize_mem (decl_rtl);
d6a5ac33
RK
6822
6823 /* If DECL_RTL is memory, we are in the normal case and either
6824 the address is not valid or it is not a register and -fforce-addr
6825 is specified, get the address into a register. */
6826
aacd3885 6827 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
0fab64a3
MM
6828 {
6829 if (alt_rtl)
aacd3885
RS
6830 *alt_rtl = decl_rtl;
6831 decl_rtl = use_anchored_address (decl_rtl);
6832 if (modifier != EXPAND_CONST_ADDRESS
6833 && modifier != EXPAND_SUM
6834 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6835 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6836 temp = replace_equiv_address (decl_rtl,
6837 copy_rtx (XEXP (decl_rtl, 0)));
0fab64a3 6838 }
1499e0a8 6839
dc6d66b3 6840 /* If we got something, return it. But first, set the alignment
04956a1a 6841 if the address is a register. */
dc6d66b3
RK
6842 if (temp != 0)
6843 {
3c0cb5de 6844 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
bdb429a5 6845 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6846
6847 return temp;
6848 }
6849
1499e0a8
RK
6850 /* If the mode of DECL_RTL does not match that of the decl, it
6851 must be a promoted value. We return a SUBREG of the wanted mode,
6852 but mark it so that we know that it was already extended. */
6853
aacd3885
RS
6854 if (REG_P (decl_rtl)
6855 && GET_MODE (decl_rtl) != DECL_MODE (exp))
1499e0a8 6856 {
5b0264cb
NS
6857 enum machine_mode pmode;
6858
1499e0a8
RK
6859 /* Get the signedness used for this variable. Ensure we get the
6860 same mode we got when the variable was declared. */
5b0264cb 6861 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
3f9e6aed
PB
6862 (TREE_CODE (exp) == RESULT_DECL
6863 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
aacd3885 6864 gcc_assert (GET_MODE (decl_rtl) == pmode);
1499e0a8 6865
aacd3885 6866 temp = gen_lowpart_SUBREG (mode, decl_rtl);
1499e0a8 6867 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6868 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6869 return temp;
6870 }
6871
aacd3885 6872 return decl_rtl;
bbf6f052
RK
6873
6874 case INTEGER_CST:
d8a50944 6875 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6876 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6877
d8a50944
RH
6878 /* ??? If overflow is set, fold will have done an incomplete job,
6879 which can result in (plus xx (const_int 0)), which can get
6880 simplified by validate_replace_rtx during virtual register
6881 instantiation, which can result in unrecognizable insns.
6882 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6883 if (TREE_CONSTANT_OVERFLOW (exp)
6884 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6885 temp = force_reg (mode, temp);
6886
6887 return temp;
6888
d744e06e 6889 case VECTOR_CST:
3a021db2
PB
6890 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6891 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6892 return const_vector_from_tree (exp);
caf93cb0 6893 else
4038c495
GB
6894 return expand_expr (build_constructor_from_list
6895 (TREE_TYPE (exp),
6896 TREE_VECTOR_CST_ELTS (exp)),
3a021db2 6897 ignore ? const0_rtx : target, tmode, modifier);
d744e06e 6898
bbf6f052 6899 case CONST_DECL:
8403445a 6900 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6901
6902 case REAL_CST:
6903 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6904 which will be turned into memory by reload if necessary.
6905
bbf6f052
RK
6906 We used to force a register so that loop.c could see it. But
6907 this does not allow gen_* patterns to perform optimizations with
6908 the constants. It also produces two insns in cases like "x = 1.0;".
6909 On most machines, floating-point constants are not permitted in
6910 many insns, so we'd end up copying it to a register in any case.
6911
6912 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6913 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6914 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6915
6916 case COMPLEX_CST:
9ad58e09
RS
6917 /* Handle evaluating a complex constant in a CONCAT target. */
6918 if (original_target && GET_CODE (original_target) == CONCAT)
6919 {
6920 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6921 rtx rtarg, itarg;
6922
6923 rtarg = XEXP (original_target, 0);
6924 itarg = XEXP (original_target, 1);
6925
6926 /* Move the real and imaginary parts separately. */
6927 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6928 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6929
6930 if (op0 != rtarg)
6931 emit_move_insn (rtarg, op0);
6932 if (op1 != itarg)
6933 emit_move_insn (itarg, op1);
6934
6935 return original_target;
6936 }
6937
71c0e7fc 6938 /* ... fall through ... */
9ad58e09 6939
bbf6f052 6940 case STRING_CST:
aacd3885 6941 temp = expand_expr_constant (exp, 1, modifier);
bbf6f052 6942
afc6aaab 6943 /* temp contains a constant address.
bbf6f052
RK
6944 On RISC machines where a constant address isn't valid,
6945 make some insns to get that address into a register. */
afc6aaab 6946 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6947 && modifier != EXPAND_INITIALIZER
6948 && modifier != EXPAND_SUM
afc6aaab
ZW
6949 && (! memory_address_p (mode, XEXP (temp, 0))
6950 || flag_force_addr))
6951 return replace_equiv_address (temp,
6952 copy_rtx (XEXP (temp, 0)));
6953 return temp;
bbf6f052
RK
6954
6955 case SAVE_EXPR:
82c82743
RH
6956 {
6957 tree val = TREE_OPERAND (exp, 0);
6958 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
e5e809f4 6959
7f5e6307 6960 if (!SAVE_EXPR_RESOLVED_P (exp))
82c82743
RH
6961 {
6962 /* We can indeed still hit this case, typically via builtin
6963 expanders calling save_expr immediately before expanding
6964 something. Assume this means that we only have to deal
6965 with non-BLKmode values. */
5b0264cb 6966 gcc_assert (GET_MODE (ret) != BLKmode);
1499e0a8 6967
82c82743
RH
6968 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6969 DECL_ARTIFICIAL (val) = 1;
7f5e6307 6970 DECL_IGNORED_P (val) = 1;
82c82743 6971 TREE_OPERAND (exp, 0) = val;
7f5e6307 6972 SAVE_EXPR_RESOLVED_P (exp) = 1;
1499e0a8 6973
82c82743
RH
6974 if (!CONSTANT_P (ret))
6975 ret = copy_to_reg (ret);
6976 SET_DECL_RTL (val, ret);
6977 }
1499e0a8 6978
82c82743
RH
6979 return ret;
6980 }
bbf6f052 6981
70e6ca43
APB
6982 case GOTO_EXPR:
6983 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6984 expand_goto (TREE_OPERAND (exp, 0));
6985 else
6986 expand_computed_goto (TREE_OPERAND (exp, 0));
6987 return const0_rtx;
6988
bbf6f052 6989 case CONSTRUCTOR:
dd27116b
RK
6990 /* If we don't need the result, just ensure we evaluate any
6991 subexpressions. */
6992 if (ignore)
6993 {
4038c495
GB
6994 unsigned HOST_WIDE_INT idx;
6995 tree value;
37a08a29 6996
4038c495
GB
6997 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6998 expand_expr (value, const0_rtx, VOIDmode, 0);
37a08a29 6999
dd27116b
RK
7000 return const0_rtx;
7001 }
3207b172 7002
c5250139
RG
7003 /* Try to avoid creating a temporary at all. This is possible
7004 if all of the initializer is zero.
7005 FIXME: try to handle all [0..255] initializers we can handle
7006 with memset. */
7007 else if (TREE_STATIC (exp)
7008 && !TREE_ADDRESSABLE (exp)
7009 && target != 0 && mode == BLKmode
7010 && all_zeros_p (exp))
7011 {
7012 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7013 return target;
7014 }
7015
4af3895e
JVA
7016 /* All elts simple constants => refer to a constant in memory. But
7017 if this is a non-BLKmode mode, let it store a field at a time
7018 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 7019 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
7020 store directly into the target unless the type is large enough
7021 that memcpy will be used. If we are making an initializer and
00182e1e
AH
7022 all operands are constant, put it in memory as well.
7023
7024 FIXME: Avoid trying to fill vector constructors piece-meal.
7025 Output them with output_constant_def below unless we're sure
7026 they're zeros. This should go away when vector initializers
7027 are treated like VECTOR_CST instead of arrays.
7028 */
dd27116b 7029 else if ((TREE_STATIC (exp)
3207b172 7030 && ((mode == BLKmode
e5e809f4 7031 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 7032 || TREE_ADDRESSABLE (exp)
19caa751 7033 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 7034 && (! MOVE_BY_PIECES_P
19caa751
RK
7035 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7036 TYPE_ALIGN (type)))
6de9cd9a 7037 && ! mostly_zeros_p (exp))))
f59700f9
RK
7038 || ((modifier == EXPAND_INITIALIZER
7039 || modifier == EXPAND_CONST_ADDRESS)
7040 && TREE_CONSTANT (exp)))
bbf6f052 7041 {
aacd3885 7042 rtx constructor = expand_expr_constant (exp, 1, modifier);
19caa751 7043
b552441b
RS
7044 if (modifier != EXPAND_CONST_ADDRESS
7045 && modifier != EXPAND_INITIALIZER
792760b9
RK
7046 && modifier != EXPAND_SUM)
7047 constructor = validize_mem (constructor);
7048
bbf6f052
RK
7049 return constructor;
7050 }
bbf6f052
RK
7051 else
7052 {
e9ac02a6
JW
7053 /* Handle calls that pass values in multiple non-contiguous
7054 locations. The Irix 6 ABI has examples of this. */
e5e809f4 7055 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
7056 || GET_CODE (target) == PARALLEL
7057 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
7058 target
7059 = assign_temp (build_qualified_type (type,
7060 (TYPE_QUALS (type)
7061 | (TREE_READONLY (exp)
7062 * TYPE_QUAL_CONST))),
c24ae149 7063 0, TREE_ADDRESSABLE (exp), 1);
07604beb 7064
dbb5c281 7065 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
7066 return target;
7067 }
7068
7ccf35ed
DN
7069 case MISALIGNED_INDIRECT_REF:
7070 case ALIGN_INDIRECT_REF:
bbf6f052
RK
7071 case INDIRECT_REF:
7072 {
7073 tree exp1 = TREE_OPERAND (exp, 0);
3a94c984 7074
6de9cd9a
DN
7075 if (modifier != EXPAND_WRITE)
7076 {
7077 tree t;
7078
7079 t = fold_read_from_constant_string (exp);
7080 if (t)
7081 return expand_expr (t, target, tmode, modifier);
7082 }
bbf6f052 7083
405f0da6
JW
7084 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7085 op0 = memory_address (mode, op0);
7ccf35ed
DN
7086
7087 if (code == ALIGN_INDIRECT_REF)
7088 {
7089 int align = TYPE_ALIGN_UNIT (type);
7090 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7091 op0 = memory_address (mode, op0);
7092 }
7093
38a448ca 7094 temp = gen_rtx_MEM (mode, op0);
8b11a64c 7095
ac182688 7096 set_mem_attributes (temp, exp, 0);
1125706f 7097
1e0598e2
RH
7098 /* Resolve the misalignment now, so that we don't have to remember
7099 to resolve it later. Of course, this only works for reads. */
7100 /* ??? When we get around to supporting writes, we'll have to handle
7101 this in store_expr directly. The vectorizer isn't generating
7102 those yet, however. */
7103 if (code == MISALIGNED_INDIRECT_REF)
7104 {
7105 int icode;
7106 rtx reg, insn;
7107
29b2d867
RH
7108 gcc_assert (modifier == EXPAND_NORMAL
7109 || modifier == EXPAND_STACK_PARM);
1e0598e2
RH
7110
7111 /* The vectorizer should have already checked the mode. */
7112 icode = movmisalign_optab->handlers[mode].insn_code;
7113 gcc_assert (icode != CODE_FOR_nothing);
7114
7115 /* We've already validated the memory, and we're creating a
7116 new pseudo destination. The predicates really can't fail. */
7117 reg = gen_reg_rtx (mode);
7118
7119 /* Nor can the insn generator. */
7120 insn = GEN_FCN (icode) (reg, temp);
7121 emit_insn (insn);
7122
7123 return reg;
7124 }
7125
8c8a8e34
JW
7126 return temp;
7127 }
bbf6f052 7128
ac182688
ZD
7129 case TARGET_MEM_REF:
7130 {
7131 struct mem_address addr;
7132
7133 get_address_description (exp, &addr);
7134 op0 = addr_for_mem_ref (&addr, true);
7135 op0 = memory_address (mode, op0);
7136 temp = gen_rtx_MEM (mode, op0);
7137 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7138 }
7139 return temp;
7140
bbf6f052 7141 case ARRAY_REF:
6de9cd9a 7142
bbf6f052 7143 {
742920c7 7144 tree array = TREE_OPERAND (exp, 0);
45d8710e 7145 tree index = TREE_OPERAND (exp, 1);
742920c7 7146
742920c7 7147 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7148 This is not done in fold so it won't happen inside &.
7149 Don't fold if this is for wide characters since it's too
7150 difficult to do correctly and this is a very rare case. */
742920c7 7151
017e1b43
RH
7152 if (modifier != EXPAND_CONST_ADDRESS
7153 && modifier != EXPAND_INITIALIZER
6de9cd9a
DN
7154 && modifier != EXPAND_MEMORY)
7155 {
7156 tree t = fold_read_from_constant_string (exp);
7157
7158 if (t)
7159 return expand_expr (t, target, tmode, modifier);
7160 }
bbf6f052 7161
742920c7
RK
7162 /* If this is a constant index into a constant array,
7163 just get the value from the array. Handle both the cases when
7164 we have an explicit constructor and when our operand is a variable
7165 that was declared const. */
4af3895e 7166
017e1b43
RH
7167 if (modifier != EXPAND_CONST_ADDRESS
7168 && modifier != EXPAND_INITIALIZER
7169 && modifier != EXPAND_MEMORY
7170 && TREE_CODE (array) == CONSTRUCTOR
7171 && ! TREE_SIDE_EFFECTS (array)
45d8710e 7172 && TREE_CODE (index) == INTEGER_CST)
742920c7 7173 {
4038c495
GB
7174 unsigned HOST_WIDE_INT ix;
7175 tree field, value;
05bccae2 7176
4038c495
GB
7177 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7178 field, value)
7179 if (tree_int_cst_equal (field, index))
7180 {
7181 if (!TREE_SIDE_EFFECTS (value))
7182 return expand_expr (fold (value), target, tmode, modifier);
7183 break;
7184 }
742920c7 7185 }
3a94c984 7186
742920c7 7187 else if (optimize >= 1
cb5fa0f8
RK
7188 && modifier != EXPAND_CONST_ADDRESS
7189 && modifier != EXPAND_INITIALIZER
017e1b43 7190 && modifier != EXPAND_MEMORY
742920c7
RK
7191 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7192 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
7193 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7194 && targetm.binds_local_p (array))
742920c7 7195 {
08293add 7196 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7197 {
7198 tree init = DECL_INITIAL (array);
7199
742920c7
RK
7200 if (TREE_CODE (init) == CONSTRUCTOR)
7201 {
4038c495
GB
7202 unsigned HOST_WIDE_INT ix;
7203 tree field, value;
7204
7205 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7206 field, value)
7207 if (tree_int_cst_equal (field, index))
7208 {
7209 if (!TREE_SIDE_EFFECTS (value))
7210 return expand_expr (fold (value), target, tmode,
7211 modifier);
7212 break;
7213 }
742920c7 7214 }
f51a281b 7215 else if(TREE_CODE (init) == STRING_CST)
5c80f6e6 7216 {
f51a281b
AP
7217 tree index1 = index;
7218 tree low_bound = array_ref_low_bound (exp);
7219 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7220
7221 /* Optimize the special-case of a zero lower bound.
7222
7223 We convert the low_bound to sizetype to avoid some problems
7224 with constant folding. (E.g. suppose the lower bound is 1,
7225 and its mode is QI. Without the conversion,l (ARRAY
7226 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7227 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7228
7229 if (! integer_zerop (low_bound))
7230 index1 = size_diffop (index1, fold_convert (sizetype,
7231 low_bound));
7232
7233 if (0 > compare_tree_int (index1,
7234 TREE_STRING_LENGTH (init)))
7235 {
7236 tree type = TREE_TYPE (TREE_TYPE (init));
7237 enum machine_mode mode = TYPE_MODE (type);
7238
7239 if (GET_MODE_CLASS (mode) == MODE_INT
7240 && GET_MODE_SIZE (mode) == 1)
7241 return gen_int_mode (TREE_STRING_POINTER (init)
7242 [TREE_INT_CST_LOW (index1)],
7243 mode);
7244 }
5c80f6e6 7245 }
742920c7
RK
7246 }
7247 }
7248 }
afc6aaab 7249 goto normal_inner_ref;
bbf6f052
RK
7250
7251 case COMPONENT_REF:
4af3895e 7252 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7253 appropriate field if it is present. */
7254 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e 7255 {
4038c495
GB
7256 unsigned HOST_WIDE_INT idx;
7257 tree field, value;
4af3895e 7258
4038c495
GB
7259 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7260 idx, field, value)
7261 if (field == TREE_OPERAND (exp, 1)
86b5812c
RK
7262 /* We can normally use the value of the field in the
7263 CONSTRUCTOR. However, if this is a bitfield in
7264 an integral mode that we can fit in a HOST_WIDE_INT,
7265 we must mask only the number of bits in the bitfield,
7266 since this is done implicitly by the constructor. If
7267 the bitfield does not meet either of those conditions,
7268 we can't do this optimization. */
4038c495
GB
7269 && (! DECL_BIT_FIELD (field)
7270 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7271 && (GET_MODE_BITSIZE (DECL_MODE (field))
86b5812c
RK
7272 <= HOST_BITS_PER_WIDE_INT))))
7273 {
4038c495 7274 if (DECL_BIT_FIELD (field)
8403445a
AM
7275 && modifier == EXPAND_STACK_PARM)
7276 target = 0;
4038c495
GB
7277 op0 = expand_expr (value, target, tmode, modifier);
7278 if (DECL_BIT_FIELD (field))
86b5812c 7279 {
4038c495
GB
7280 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7281 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
86b5812c 7282
4038c495 7283 if (TYPE_UNSIGNED (TREE_TYPE (field)))
86b5812c
RK
7284 {
7285 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7286 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7287 }
7288 else
7289 {
7290 tree count
4a90aeeb 7291 = build_int_cst (NULL_TREE,
7d60be94 7292 GET_MODE_BITSIZE (imode) - bitsize);
86b5812c
RK
7293
7294 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7295 target, 0);
7296 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7297 target, 0);
7298 }
7299 }
7300
7301 return op0;
7302 }
4af3895e 7303 }
afc6aaab 7304 goto normal_inner_ref;
4af3895e 7305
afc6aaab
ZW
7306 case BIT_FIELD_REF:
7307 case ARRAY_RANGE_REF:
7308 normal_inner_ref:
bbf6f052
RK
7309 {
7310 enum machine_mode mode1;
770ae6cc 7311 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7312 tree offset;
bbf6f052 7313 int volatilep = 0;
839c4796 7314 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 7315 &mode1, &unsignedp, &volatilep, true);
f47e9b4e 7316 rtx orig_op0;
bbf6f052 7317
e7f3c83f
RK
7318 /* If we got back the original object, something is wrong. Perhaps
7319 we are evaluating an expression too early. In any event, don't
7320 infinitely recurse. */
5b0264cb 7321 gcc_assert (tem != exp);
e7f3c83f 7322
3d27140a 7323 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7324 computation, since it will need a temporary and TARGET is known
7325 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7326
f47e9b4e
RK
7327 orig_op0 = op0
7328 = expand_expr (tem,
7329 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7330 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7331 != INTEGER_CST)
8403445a 7332 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7333 ? target : NULL_RTX),
7334 VOIDmode,
7335 (modifier == EXPAND_INITIALIZER
8403445a
AM
7336 || modifier == EXPAND_CONST_ADDRESS
7337 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7338 ? modifier : EXPAND_NORMAL);
bbf6f052 7339
60a23e2e
OH
7340 /* If this is a constant, put it into a register if it is a legitimate
7341 constant, OFFSET is 0, and we won't try to extract outside the
7342 register (in case we were passed a partially uninitialized object
7343 or a view_conversion to a larger size). Force the constant to
7344 memory otherwise. */
8c8a8e34
JW
7345 if (CONSTANT_P (op0))
7346 {
7347 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9 7348 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
60a23e2e
OH
7349 && offset == 0
7350 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
8c8a8e34
JW
7351 op0 = force_reg (mode, op0);
7352 else
7353 op0 = validize_mem (force_const_mem (mode, op0));
7354 }
7355
60a23e2e
OH
7356 /* Otherwise, if this object not in memory and we either have an
7357 offset, a BLKmode result, or a reference outside the object, put it
7358 there. Such cases can occur in Ada if we have unchecked conversion
7359 of an expression from a scalar type to an array or record type or
7360 for an ARRAY_RANGE_REF whose type is BLKmode. */
3c0cb5de 7361 else if (!MEM_P (op0)
8d2e5f72 7362 && (offset != 0
60a23e2e 7363 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
8d2e5f72
RK
7364 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7365 {
82c82743
RH
7366 tree nt = build_qualified_type (TREE_TYPE (tem),
7367 (TYPE_QUALS (TREE_TYPE (tem))
7368 | TYPE_QUAL_CONST));
7369 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7370
82c82743
RH
7371 emit_move_insn (memloc, op0);
7372 op0 = memloc;
8d2e5f72
RK
7373 }
7374
7bb0943f
RS
7375 if (offset != 0)
7376 {
8403445a
AM
7377 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7378 EXPAND_SUM);
7bb0943f 7379
5b0264cb 7380 gcc_assert (MEM_P (op0));
2d48c13d 7381
2d48c13d 7382#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7383 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7384 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7385#else
7386 if (GET_MODE (offset_rtx) != ptr_mode)
7387 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7388#endif
7389
e82407b5
EB
7390 if (GET_MODE (op0) == BLKmode
7391 /* A constant address in OP0 can have VOIDmode, we must
7392 not try to call force_reg in that case. */
efd07ca7 7393 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7394 && bitsize != 0
3a94c984 7395 && (bitpos % bitsize) == 0
89752202 7396 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7397 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7398 {
e3c8ea67 7399 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7400 bitpos = 0;
7401 }
7402
0d4903b8
RK
7403 op0 = offset_address (op0, offset_rtx,
7404 highest_pow2_factor (offset));
7bb0943f
RS
7405 }
7406
1ce7f3c2
RK
7407 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7408 record its alignment as BIGGEST_ALIGNMENT. */
3c0cb5de 7409 if (MEM_P (op0) && bitpos == 0 && offset != 0
1ce7f3c2
RK
7410 && is_aligning_offset (offset, tem))
7411 set_mem_align (op0, BIGGEST_ALIGNMENT);
7412
bbf6f052 7413 /* Don't forget about volatility even if this is a bitfield. */
3c0cb5de 7414 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
bbf6f052 7415 {
f47e9b4e
RK
7416 if (op0 == orig_op0)
7417 op0 = copy_rtx (op0);
7418
bbf6f052
RK
7419 MEM_VOLATILE_P (op0) = 1;
7420 }
7421
010f87c4
JJ
7422 /* The following code doesn't handle CONCAT.
7423 Assume only bitpos == 0 can be used for CONCAT, due to
7424 one element arrays having the same mode as its element. */
7425 if (GET_CODE (op0) == CONCAT)
7426 {
5b0264cb
NS
7427 gcc_assert (bitpos == 0
7428 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
010f87c4
JJ
7429 return op0;
7430 }
7431
ccc98036
RS
7432 /* In cases where an aligned union has an unaligned object
7433 as a field, we might be extracting a BLKmode value from
7434 an integer-mode (e.g., SImode) object. Handle this case
7435 by doing the extract into an object as wide as the field
7436 (which we know to be the width of a basic mode), then
cb5fa0f8 7437 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7438 if (mode1 == VOIDmode
f8cfc6aa 7439 || REG_P (op0) || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7440 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7441 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7442 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7443 && modifier != EXPAND_CONST_ADDRESS
7444 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7445 /* If the field isn't aligned enough to fetch as a memref,
7446 fetch it as a bit field. */
7447 || (mode1 != BLKmode
9e5f281f 7448 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5 7449 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
3c0cb5de 7450 || (MEM_P (op0)
e82407b5
EB
7451 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7452 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7453 && ((modifier == EXPAND_CONST_ADDRESS
7454 || modifier == EXPAND_INITIALIZER)
7455 ? STRICT_ALIGNMENT
7456 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7457 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7458 /* If the type and the field are a constant size and the
7459 size of the type isn't the same size as the bitfield,
7460 we must use bitfield operations. */
7461 || (bitsize >= 0
dbe4d070
RH
7462 && TYPE_SIZE (TREE_TYPE (exp))
7463 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
cb5fa0f8 7464 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7465 bitsize)))
bbf6f052 7466 {
bbf6f052
RK
7467 enum machine_mode ext_mode = mode;
7468
14a774a9 7469 if (ext_mode == BLKmode
3c0cb5de
JQ
7470 && ! (target != 0 && MEM_P (op0)
7471 && MEM_P (target)
14a774a9 7472 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7473 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7474
7475 if (ext_mode == BLKmode)
a281e72d 7476 {
7a06d606
RK
7477 if (target == 0)
7478 target = assign_temp (type, 0, 1, 1);
7479
7480 if (bitsize == 0)
7481 return target;
7482
a281e72d
RK
7483 /* In this case, BITPOS must start at a byte boundary and
7484 TARGET, if specified, must be a MEM. */
5b0264cb
NS
7485 gcc_assert (MEM_P (op0)
7486 && (!target || MEM_P (target))
7487 && !(bitpos % BITS_PER_UNIT));
a281e72d 7488
7a06d606
RK
7489 emit_block_move (target,
7490 adjust_address (op0, VOIDmode,
7491 bitpos / BITS_PER_UNIT),
a06ef755 7492 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7493 / BITS_PER_UNIT),
8403445a
AM
7494 (modifier == EXPAND_STACK_PARM
7495 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7496
a281e72d
RK
7497 return target;
7498 }
bbf6f052 7499
dc6d66b3
RK
7500 op0 = validize_mem (op0);
7501
3c0cb5de 7502 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
04050c69 7503 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7504
8403445a
AM
7505 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7506 (modifier == EXPAND_STACK_PARM
7507 ? NULL_RTX : target),
b3520980 7508 ext_mode, ext_mode);
ef19912d
RK
7509
7510 /* If the result is a record type and BITSIZE is narrower than
7511 the mode of OP0, an integral mode, and this is a big endian
7512 machine, we must put the field into the high-order bits. */
7513 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7514 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7515 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7516 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7517 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7518 - bitsize),
7519 op0, 1);
7520
daae8185
RK
7521 /* If the result type is BLKmode, store the data into a temporary
7522 of the appropriate type, but with the mode corresponding to the
7523 mode for the data we have (op0's mode). It's tempting to make
7524 this a constant type, since we know it's only being stored once,
7525 but that can cause problems if we are taking the address of this
7526 COMPONENT_REF because the MEM of any reference via that address
7527 will have flags corresponding to the type, which will not
7528 necessarily be constant. */
bbf6f052
RK
7529 if (mode == BLKmode)
7530 {
daae8185
RK
7531 rtx new
7532 = assign_stack_temp_for_type
7533 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7534
7535 emit_move_insn (new, op0);
7536 op0 = copy_rtx (new);
7537 PUT_MODE (op0, BLKmode);
c3d32120 7538 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7539 }
7540
7541 return op0;
7542 }
7543
05019f83
RK
7544 /* If the result is BLKmode, use that to access the object
7545 now as well. */
7546 if (mode == BLKmode)
7547 mode1 = BLKmode;
7548
bbf6f052
RK
7549 /* Get a reference to just this component. */
7550 if (modifier == EXPAND_CONST_ADDRESS
7551 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7552 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7553 else
f4ef873c 7554 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7555
f47e9b4e
RK
7556 if (op0 == orig_op0)
7557 op0 = copy_rtx (op0);
7558
3bdf5ad1 7559 set_mem_attributes (op0, exp, 0);
f8cfc6aa 7560 if (REG_P (XEXP (op0, 0)))
a06ef755 7561 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7562
bbf6f052 7563 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7564 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7565 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7566 || modifier == EXPAND_INITIALIZER)
bbf6f052 7567 return op0;
0d15e60c 7568 else if (target == 0)
bbf6f052 7569 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7570
bbf6f052
RK
7571 convert_move (target, op0, unsignedp);
7572 return target;
7573 }
7574
0f59171d
RH
7575 case OBJ_TYPE_REF:
7576 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
4a8d0c9c 7577
bbf6f052
RK
7578 case CALL_EXPR:
7579 /* Check for a built-in function. */
7580 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7581 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7582 == FUNCTION_DECL)
bbf6f052 7583 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7584 {
c70eaeaf
KG
7585 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7586 == BUILT_IN_FRONTEND)
673fda6b
SB
7587 return lang_hooks.expand_expr (exp, original_target,
7588 tmode, modifier,
7589 alt_rtl);
c70eaeaf
KG
7590 else
7591 return expand_builtin (exp, target, subtarget, tmode, ignore);
7592 }
d6a5ac33 7593
8129842c 7594 return expand_call (exp, target, ignore);
bbf6f052
RK
7595
7596 case NON_LVALUE_EXPR:
7597 case NOP_EXPR:
7598 case CONVERT_EXPR:
4a53008b 7599 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7600 return const0_rtx;
4a53008b 7601
bbf6f052
RK
7602 if (TREE_CODE (type) == UNION_TYPE)
7603 {
7604 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7605
c3d32120
RK
7606 /* If both input and output are BLKmode, this conversion isn't doing
7607 anything except possibly changing memory attribute. */
7608 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7609 {
7610 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7611 modifier);
7612
7613 result = copy_rtx (result);
7614 set_mem_attributes (result, exp, 0);
7615 return result;
7616 }
14a774a9 7617
bbf6f052 7618 if (target == 0)
cf7cb67e
JH
7619 {
7620 if (TYPE_MODE (type) != BLKmode)
7621 target = gen_reg_rtx (TYPE_MODE (type));
7622 else
7623 target = assign_temp (type, 0, 1, 1);
7624 }
d6a5ac33 7625
3c0cb5de 7626 if (MEM_P (target))
bbf6f052
RK
7627 /* Store data into beginning of memory target. */
7628 store_expr (TREE_OPERAND (exp, 0),
8403445a 7629 adjust_address (target, TYPE_MODE (valtype), 0),
6f4fd16d 7630 modifier == EXPAND_STACK_PARM);
1499e0a8 7631
bbf6f052 7632 else
5b0264cb
NS
7633 {
7634 gcc_assert (REG_P (target));
7635
7636 /* Store this field into a union of the proper type. */
7637 store_field (target,
7638 MIN ((int_size_in_bytes (TREE_TYPE
7639 (TREE_OPERAND (exp, 0)))
7640 * BITS_PER_UNIT),
7641 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7642 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
f45bdcd0 7643 type, 0);
5b0264cb 7644 }
bbf6f052
RK
7645
7646 /* Return the entire union. */
7647 return target;
7648 }
d6a5ac33 7649
7f62854a
RK
7650 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7651 {
7652 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7653 modifier);
7f62854a
RK
7654
7655 /* If the signedness of the conversion differs and OP0 is
7656 a promoted SUBREG, clear that indication since we now
7657 have to do the proper extension. */
8df83eae 7658 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7659 && GET_CODE (op0) == SUBREG)
7660 SUBREG_PROMOTED_VAR_P (op0) = 0;
7661
bc15d0ef 7662 return REDUCE_BIT_FIELD (op0);
7f62854a
RK
7663 }
7664
fdf473ae 7665 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90 7666 if (GET_MODE (op0) == mode)
7acda552 7667 ;
12342f90 7668
d6a5ac33 7669 /* If OP0 is a constant, just convert it into the proper mode. */
7acda552 7670 else if (CONSTANT_P (op0))
fdf473ae
RH
7671 {
7672 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7673 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7674
0fb7aeda 7675 if (modifier == EXPAND_INITIALIZER)
7acda552
RK
7676 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7677 subreg_lowpart_offset (mode,
7678 inner_mode));
fdf473ae 7679 else
7acda552
RK
7680 op0= convert_modes (mode, inner_mode, op0,
7681 TYPE_UNSIGNED (inner_type));
fdf473ae 7682 }
12342f90 7683
7acda552
RK
7684 else if (modifier == EXPAND_INITIALIZER)
7685 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7686
7acda552
RK
7687 else if (target == 0)
7688 op0 = convert_to_mode (mode, op0,
7689 TYPE_UNSIGNED (TREE_TYPE
7690 (TREE_OPERAND (exp, 0))));
bbf6f052 7691 else
7acda552
RK
7692 {
7693 convert_move (target, op0,
7694 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7695 op0 = target;
7696 }
7697
7698 return REDUCE_BIT_FIELD (op0);
bbf6f052 7699
ed239f5a 7700 case VIEW_CONVERT_EXPR:
37a08a29 7701 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a 7702
fabaaf36 7703 /* If the input and output modes are both the same, we are done. */
ed239f5a
RK
7704 if (TYPE_MODE (type) == GET_MODE (op0))
7705 ;
fabaaf36
RH
7706 /* If neither mode is BLKmode, and both modes are the same size
7707 then we can use gen_lowpart. */
ed239f5a 7708 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
fabaaf36
RH
7709 && GET_MODE_SIZE (TYPE_MODE (type))
7710 == GET_MODE_SIZE (GET_MODE (op0)))
0fd662ee
RH
7711 {
7712 if (GET_CODE (op0) == SUBREG)
7713 op0 = force_reg (GET_MODE (op0), op0);
7714 op0 = gen_lowpart (TYPE_MODE (type), op0);
7715 }
fabaaf36
RH
7716 /* If both modes are integral, then we can convert from one to the
7717 other. */
7718 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7719 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7720 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7721 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7722 /* As a last resort, spill op0 to memory, and reload it in a
7723 different mode. */
3c0cb5de 7724 else if (!MEM_P (op0))
ed239f5a 7725 {
c11c10d8 7726 /* If the operand is not a MEM, force it into memory. Since we
75c40d56 7727 are going to be changing the mode of the MEM, don't call
c11c10d8
RK
7728 force_const_mem for constants because we don't allow pool
7729 constants to change mode. */
ed239f5a 7730 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7731
5b0264cb 7732 gcc_assert (!TREE_ADDRESSABLE (exp));
ed239f5a 7733
c11c10d8
RK
7734 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7735 target
7736 = assign_stack_temp_for_type
7737 (TYPE_MODE (inner_type),
7738 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7739
c11c10d8
RK
7740 emit_move_insn (target, op0);
7741 op0 = target;
ed239f5a
RK
7742 }
7743
c11c10d8
RK
7744 /* At this point, OP0 is in the correct mode. If the output type is such
7745 that the operand is known to be aligned, indicate that it is.
7746 Otherwise, we need only be concerned about alignment for non-BLKmode
7747 results. */
3c0cb5de 7748 if (MEM_P (op0))
ed239f5a
RK
7749 {
7750 op0 = copy_rtx (op0);
7751
ed239f5a
RK
7752 if (TYPE_ALIGN_OK (type))
7753 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7754 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7755 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7756 {
7757 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7758 HOST_WIDE_INT temp_size
7759 = MAX (int_size_in_bytes (inner_type),
7760 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7761 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7762 temp_size, 0, type);
c4e59f51 7763 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7764
5b0264cb 7765 gcc_assert (!TREE_ADDRESSABLE (exp));
c11c10d8 7766
ed239f5a
RK
7767 if (GET_MODE (op0) == BLKmode)
7768 emit_block_move (new_with_op0_mode, op0,
44bb111a 7769 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7770 (modifier == EXPAND_STACK_PARM
7771 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7772 else
7773 emit_move_insn (new_with_op0_mode, op0);
7774
7775 op0 = new;
7776 }
0fb7aeda 7777
c4e59f51 7778 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7779 }
7780
7781 return op0;
7782
bbf6f052 7783 case PLUS_EXPR:
4dfa0342 7784 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
bbf6f052
RK
7785 something else, make sure we add the register to the constant and
7786 then to the other thing. This case can occur during strength
7787 reduction and doing it this way will produce better code if the
7788 frame pointer or argument pointer is eliminated.
7789
7790 fold-const.c will ensure that the constant is always in the inner
7791 PLUS_EXPR, so the only case we need to do anything about is if
7792 sp, ap, or fp is our second argument, in which case we must swap
7793 the innermost first argument and our second argument. */
7794
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7796 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4dfa0342
RH
7797 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7798 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7799 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7800 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
bbf6f052
RK
7801 {
7802 tree t = TREE_OPERAND (exp, 1);
7803
7804 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7805 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7806 }
7807
88f63c77 7808 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7809 something, we might be forming a constant. So try to use
7810 plus_constant. If it produces a sum and we can't accept it,
7811 use force_operand. This allows P = &ARR[const] to generate
7812 efficient code on machines where a SYMBOL_REF is not a valid
7813 address.
7814
7815 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7816 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7817 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7818 {
8403445a
AM
7819 if (modifier == EXPAND_STACK_PARM)
7820 target = 0;
c980ac49
RS
7821 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7822 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7823 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7824 {
cbbc503e
JL
7825 rtx constant_part;
7826
c980ac49
RS
7827 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7828 EXPAND_SUM);
cbbc503e
JL
7829 /* Use immed_double_const to ensure that the constant is
7830 truncated according to the mode of OP1, then sign extended
7831 to a HOST_WIDE_INT. Using the constant directly can result
7832 in non-canonical RTL in a 64x32 cross compile. */
7833 constant_part
7834 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7835 (HOST_WIDE_INT) 0,
a5efcd63 7836 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7837 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7838 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7839 op1 = force_operand (op1, target);
bc15d0ef 7840 return REDUCE_BIT_FIELD (op1);
c980ac49 7841 }
bbf6f052 7842
c980ac49 7843 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
dc38a610 7844 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
c980ac49
RS
7845 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7846 {
cbbc503e
JL
7847 rtx constant_part;
7848
c980ac49 7849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7850 (modifier == EXPAND_INITIALIZER
7851 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7852 if (! CONSTANT_P (op0))
7853 {
7854 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7855 VOIDmode, modifier);
f0e9957a
RS
7856 /* Return a PLUS if modifier says it's OK. */
7857 if (modifier == EXPAND_SUM
7858 || modifier == EXPAND_INITIALIZER)
7859 return simplify_gen_binary (PLUS, mode, op0, op1);
7860 goto binop2;
c980ac49 7861 }
cbbc503e
JL
7862 /* Use immed_double_const to ensure that the constant is
7863 truncated according to the mode of OP1, then sign extended
7864 to a HOST_WIDE_INT. Using the constant directly can result
7865 in non-canonical RTL in a 64x32 cross compile. */
7866 constant_part
7867 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7868 (HOST_WIDE_INT) 0,
2a94e396 7869 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7870 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7871 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7872 op0 = force_operand (op0, target);
bc15d0ef 7873 return REDUCE_BIT_FIELD (op0);
c980ac49 7874 }
bbf6f052
RK
7875 }
7876
7877 /* No sense saving up arithmetic to be done
7878 if it's all in the wrong mode to form part of an address.
7879 And force_operand won't know whether to sign-extend or
7880 zero-extend. */
7881 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7882 || mode != ptr_mode)
4ef7870a 7883 {
eb698c58
RS
7884 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7885 subtarget, &op0, &op1, 0);
6e7727eb
EB
7886 if (op0 == const0_rtx)
7887 return op1;
7888 if (op1 == const0_rtx)
7889 return op0;
4ef7870a
EB
7890 goto binop2;
7891 }
bbf6f052 7892
eb698c58
RS
7893 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7894 subtarget, &op0, &op1, modifier);
bc15d0ef 7895 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
bbf6f052
RK
7896
7897 case MINUS_EXPR:
ea87523e
RK
7898 /* For initializers, we are allowed to return a MINUS of two
7899 symbolic constants. Here we handle all cases when both operands
7900 are constant. */
bbf6f052
RK
7901 /* Handle difference of two symbolic constants,
7902 for the sake of an initializer. */
7903 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7904 && really_constant_p (TREE_OPERAND (exp, 0))
7905 && really_constant_p (TREE_OPERAND (exp, 1)))
7906 {
eb698c58
RS
7907 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7908 NULL_RTX, &op0, &op1, modifier);
ea87523e 7909
ea87523e
RK
7910 /* If the last operand is a CONST_INT, use plus_constant of
7911 the negated constant. Else make the MINUS. */
7912 if (GET_CODE (op1) == CONST_INT)
bc15d0ef 7913 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
ea87523e 7914 else
bc15d0ef 7915 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
bbf6f052 7916 }
ae431183 7917
1717e19e
UW
7918 /* No sense saving up arithmetic to be done
7919 if it's all in the wrong mode to form part of an address.
7920 And force_operand won't know whether to sign-extend or
7921 zero-extend. */
7922 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7923 || mode != ptr_mode)
7924 goto binop;
7925
eb698c58
RS
7926 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7927 subtarget, &op0, &op1, modifier);
1717e19e
UW
7928
7929 /* Convert A - const to A + (-const). */
7930 if (GET_CODE (op1) == CONST_INT)
7931 {
7932 op1 = negate_rtx (mode, op1);
bc15d0ef 7933 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
1717e19e
UW
7934 }
7935
7936 goto binop2;
bbf6f052
RK
7937
7938 case MULT_EXPR:
bbf6f052
RK
7939 /* If first operand is constant, swap them.
7940 Thus the following special case checks need only
7941 check the second operand. */
7942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7943 {
b3694847 7944 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7945 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7946 TREE_OPERAND (exp, 1) = t1;
7947 }
7948
7949 /* Attempt to return something suitable for generating an
7950 indexed address, for machines that support that. */
7951
88f63c77 7952 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7953 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7954 {
48a5f2fa
DJ
7955 tree exp1 = TREE_OPERAND (exp, 1);
7956
921b3427
RK
7957 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7958 EXPAND_SUM);
bbf6f052 7959
f8cfc6aa 7960 if (!REG_P (op0))
906c4e36 7961 op0 = force_operand (op0, NULL_RTX);
f8cfc6aa 7962 if (!REG_P (op0))
bbf6f052
RK
7963 op0 = copy_to_mode_reg (mode, op0);
7964
bc15d0ef 7965 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
48a5f2fa 7966 gen_int_mode (tree_low_cst (exp1, 0),
bc15d0ef 7967 TYPE_MODE (TREE_TYPE (exp1)))));
bbf6f052
RK
7968 }
7969
8403445a
AM
7970 if (modifier == EXPAND_STACK_PARM)
7971 target = 0;
7972
bbf6f052
RK
7973 /* Check for multiplying things that have been extended
7974 from a narrower type. If this machine supports multiplying
7975 in that narrower type with a result in the desired type,
7976 do it that way, and avoid the explicit type-conversion. */
8b44057d
BS
7977
7978 subexp0 = TREE_OPERAND (exp, 0);
7979 subexp1 = TREE_OPERAND (exp, 1);
7980 /* First, check if we have a multiplication of one signed and one
7981 unsigned operand. */
7982 if (TREE_CODE (subexp0) == NOP_EXPR
7983 && TREE_CODE (subexp1) == NOP_EXPR
7984 && TREE_CODE (type) == INTEGER_TYPE
7985 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7986 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7987 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7988 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7989 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7990 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7991 {
7992 enum machine_mode innermode
7993 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7994 this_optab = usmul_widen_optab;
7995 if (mode == GET_MODE_WIDER_MODE (innermode))
7996 {
7997 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7998 {
7999 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8000 expand_operands (TREE_OPERAND (subexp0, 0),
8001 TREE_OPERAND (subexp1, 0),
8002 NULL_RTX, &op0, &op1, 0);
8003 else
8004 expand_operands (TREE_OPERAND (subexp0, 0),
8005 TREE_OPERAND (subexp1, 0),
8006 NULL_RTX, &op1, &op0, 0);
8007
832942a8 8008 goto binop3;
8b44057d
BS
8009 }
8010 }
8011 }
8012 /* Check for a multiplication with matching signedness. */
8013 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
bbf6f052
RK
8014 && TREE_CODE (type) == INTEGER_TYPE
8015 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8016 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8017 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8018 && int_fits_type_p (TREE_OPERAND (exp, 1),
8019 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8020 /* Don't use a widening multiply if a shift will do. */
8021 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 8022 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
8023 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8024 ||
8025 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
8026 && (TYPE_PRECISION (TREE_TYPE
8027 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8028 == TYPE_PRECISION (TREE_TYPE
8029 (TREE_OPERAND
8030 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
8031 /* If both operands are extended, they must either both
8032 be zero-extended or both be sign-extended. */
8df83eae
RK
8033 && (TYPE_UNSIGNED (TREE_TYPE
8034 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8035 == TYPE_UNSIGNED (TREE_TYPE
8036 (TREE_OPERAND
8037 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 8038 {
888d65b5
RS
8039 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8040 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 8041 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
8042 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8043 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8044
d2348bd5 8045 if (mode == GET_MODE_2XWIDER_MODE (innermode))
bbf6f052 8046 {
b10af0c8
TG
8047 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8048 {
b10af0c8 8049 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
8050 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8051 TREE_OPERAND (exp, 1),
84217346 8052 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
b10af0c8 8053 else
eb698c58
RS
8054 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8055 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
84217346 8056 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
c4d70ce3 8057 goto binop3;
b10af0c8
TG
8058 }
8059 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8060 && innermode == word_mode)
8061 {
888d65b5 8062 rtx htem, hipart;
84217346 8063 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
b10af0c8 8064 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062 8065 op1 = convert_modes (innermode, mode,
84217346 8066 expand_normal (TREE_OPERAND (exp, 1)),
8c118062 8067 unsignedp);
b10af0c8 8068 else
84217346 8069 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
b10af0c8
TG
8070 temp = expand_binop (mode, other_optab, op0, op1, target,
8071 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
8072 hipart = gen_highpart (innermode, temp);
8073 htem = expand_mult_highpart_adjust (innermode, hipart,
8074 op0, op1, hipart,
8075 zextend_p);
8076 if (htem != hipart)
8077 emit_move_insn (hipart, htem);
bc15d0ef 8078 return REDUCE_BIT_FIELD (temp);
b10af0c8 8079 }
bbf6f052
RK
8080 }
8081 }
eb698c58
RS
8082 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8083 subtarget, &op0, &op1, 0);
bc15d0ef 8084 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
bbf6f052
RK
8085
8086 case TRUNC_DIV_EXPR:
8087 case FLOOR_DIV_EXPR:
8088 case CEIL_DIV_EXPR:
8089 case ROUND_DIV_EXPR:
8090 case EXACT_DIV_EXPR:
8403445a
AM
8091 if (modifier == EXPAND_STACK_PARM)
8092 target = 0;
bbf6f052
RK
8093 /* Possible optimization: compute the dividend with EXPAND_SUM
8094 then if the divisor is constant can optimize the case
8095 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
8096 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8097 subtarget, &op0, &op1, 0);
bbf6f052
RK
8098 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8099
8100 case RDIV_EXPR:
bbf6f052
RK
8101 goto binop;
8102
8103 case TRUNC_MOD_EXPR:
8104 case FLOOR_MOD_EXPR:
8105 case CEIL_MOD_EXPR:
8106 case ROUND_MOD_EXPR:
8403445a
AM
8107 if (modifier == EXPAND_STACK_PARM)
8108 target = 0;
eb698c58
RS
8109 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8110 subtarget, &op0, &op1, 0);
bbf6f052
RK
8111 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8112
8113 case FIX_ROUND_EXPR:
8114 case FIX_FLOOR_EXPR:
8115 case FIX_CEIL_EXPR:
5b0264cb 8116 gcc_unreachable (); /* Not used for C. */
bbf6f052
RK
8117
8118 case FIX_TRUNC_EXPR:
84217346 8119 op0 = expand_normal (TREE_OPERAND (exp, 0));
8403445a 8120 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8121 target = gen_reg_rtx (mode);
8122 expand_fix (target, op0, unsignedp);
8123 return target;
8124
8125 case FLOAT_EXPR:
84217346 8126 op0 = expand_normal (TREE_OPERAND (exp, 0));
8403445a 8127 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8128 target = gen_reg_rtx (mode);
8129 /* expand_float can't figure out what to do if FROM has VOIDmode.
8130 So give it the correct mode. With -O, cse will optimize this. */
8131 if (GET_MODE (op0) == VOIDmode)
8132 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8133 op0);
8134 expand_float (target, op0,
8df83eae 8135 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
8136 return target;
8137
8138 case NEGATE_EXPR:
5b22bee8 8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8140 if (modifier == EXPAND_STACK_PARM)
8141 target = 0;
91ce572a 8142 temp = expand_unop (mode,
c4d70ce3
PB
8143 optab_for_tree_code (NEGATE_EXPR, type),
8144 op0, target, 0);
5b0264cb 8145 gcc_assert (temp);
bc15d0ef 8146 return REDUCE_BIT_FIELD (temp);
bbf6f052
RK
8147
8148 case ABS_EXPR:
8149 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8150 if (modifier == EXPAND_STACK_PARM)
8151 target = 0;
bbf6f052 8152
11017cc7 8153 /* ABS_EXPR is not valid for complex arguments. */
5b0264cb
NS
8154 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8155 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
2d7050fd 8156
bbf6f052
RK
8157 /* Unsigned abs is simply the operand. Testing here means we don't
8158 risk generating incorrect code below. */
8df83eae 8159 if (TYPE_UNSIGNED (type))
bbf6f052
RK
8160 return op0;
8161
91ce572a 8162 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8163 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8164
8165 case MAX_EXPR:
8166 case MIN_EXPR:
8167 target = original_target;
8403445a
AM
8168 if (target == 0
8169 || modifier == EXPAND_STACK_PARM
3c0cb5de 8170 || (MEM_P (target) && MEM_VOLATILE_P (target))
d6a5ac33 8171 || GET_MODE (target) != mode
f8cfc6aa 8172 || (REG_P (target)
bbf6f052
RK
8173 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8174 target = gen_reg_rtx (mode);
eb698c58
RS
8175 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8176 target, &op0, &op1, 0);
bbf6f052
RK
8177
8178 /* First try to do it with a special MIN or MAX instruction.
8179 If that does not win, use a conditional jump to select the proper
8180 value. */
c4d70ce3 8181 this_optab = optab_for_tree_code (code, type);
bbf6f052
RK
8182 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8183 OPTAB_WIDEN);
8184 if (temp != 0)
8185 return temp;
8186
fa2981d8
JW
8187 /* At this point, a MEM target is no longer useful; we will get better
8188 code without it. */
3a94c984 8189
dbedefae 8190 if (! REG_P (target))
fa2981d8
JW
8191 target = gen_reg_rtx (mode);
8192
e3be1116
RS
8193 /* If op1 was placed in target, swap op0 and op1. */
8194 if (target != op0 && target == op1)
8195 {
927630a5 8196 temp = op0;
e3be1116 8197 op0 = op1;
927630a5 8198 op1 = temp;
e3be1116
RS
8199 }
8200
dbedefae
RS
8201 /* We generate better code and avoid problems with op1 mentioning
8202 target by forcing op1 into a pseudo if it isn't a constant. */
8203 if (! CONSTANT_P (op1))
8204 op1 = force_reg (mode, op1);
8205
230dedb3
JH
8206 {
8207 enum rtx_code comparison_code;
8208 rtx cmpop1 = op1;
927630a5 8209
230dedb3
JH
8210 if (code == MAX_EXPR)
8211 comparison_code = unsignedp ? GEU : GE;
8212 else
8213 comparison_code = unsignedp ? LEU : LE;
927630a5 8214
6416ae7f 8215 /* Canonicalize to comparisons against 0. */
230dedb3
JH
8216 if (op1 == const1_rtx)
8217 {
8218 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8219 or (a != 0 ? a : 1) for unsigned.
8220 For MIN we are safe converting (a <= 1 ? a : 1)
8221 into (a <= 0 ? a : 1) */
8222 cmpop1 = const0_rtx;
8223 if (code == MAX_EXPR)
8224 comparison_code = unsignedp ? NE : GT;
8225 }
8226 if (op1 == constm1_rtx && !unsignedp)
8227 {
8228 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8229 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8230 cmpop1 = const0_rtx;
8231 if (code == MIN_EXPR)
8232 comparison_code = LT;
8233 }
8234#ifdef HAVE_conditional_move
8235 /* Use a conditional move if possible. */
8236 if (can_conditionally_move_p (mode))
8237 {
8238 rtx insn;
927630a5 8239
230dedb3
JH
8240 /* ??? Same problem as in expmed.c: emit_conditional_move
8241 forces a stack adjustment via compare_from_rtx, and we
8242 lose the stack adjustment if the sequence we are about
8243 to create is discarded. */
8244 do_pending_stack_adjust ();
927630a5 8245
230dedb3 8246 start_sequence ();
927630a5 8247
230dedb3
JH
8248 /* Try to emit the conditional move. */
8249 insn = emit_conditional_move (target, comparison_code,
8250 op0, cmpop1, mode,
8251 op0, op1, mode,
8252 unsignedp);
927630a5 8253
230dedb3
JH
8254 /* If we could do the conditional move, emit the sequence,
8255 and return. */
8256 if (insn)
8257 {
8258 rtx seq = get_insns ();
8259 end_sequence ();
8260 emit_insn (seq);
8261 return target;
8262 }
8263
8264 /* Otherwise discard the sequence and fall back to code with
8265 branches. */
8266 end_sequence ();
8267 }
927630a5 8268#endif
230dedb3
JH
8269 if (target != op0)
8270 emit_move_insn (target, op0);
d6a5ac33 8271
230dedb3 8272 temp = gen_label_rtx ();
3bf78d3b
RS
8273 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8274 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
230dedb3 8275 }
b30f05db 8276 emit_move_insn (target, op1);
927630a5 8277 emit_label (temp);
bbf6f052
RK
8278 return target;
8279
bbf6f052
RK
8280 case BIT_NOT_EXPR:
8281 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8282 if (modifier == EXPAND_STACK_PARM)
8283 target = 0;
bbf6f052 8284 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5b0264cb 8285 gcc_assert (temp);
bbf6f052
RK
8286 return temp;
8287
d6a5ac33
RK
8288 /* ??? Can optimize bitwise operations with one arg constant.
8289 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8290 and (a bitwise1 b) bitwise2 b (etc)
8291 but that is probably not worth while. */
8292
8293 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8294 boolean values when we want in all cases to compute both of them. In
8295 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8296 as actual zero-or-1 values and then bitwise anding. In cases where
8297 there cannot be any side effects, better code would be made by
8298 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8299 how to recognize those cases. */
8300
bbf6f052 8301 case TRUTH_AND_EXPR:
c4d70ce3 8302 code = BIT_AND_EXPR;
bbf6f052 8303 case BIT_AND_EXPR:
bbf6f052
RK
8304 goto binop;
8305
bbf6f052 8306 case TRUTH_OR_EXPR:
7efcb746 8307 code = BIT_IOR_EXPR;
bbf6f052 8308 case BIT_IOR_EXPR:
bbf6f052
RK
8309 goto binop;
8310
874726a8 8311 case TRUTH_XOR_EXPR:
c4d70ce3 8312 code = BIT_XOR_EXPR;
bbf6f052 8313 case BIT_XOR_EXPR:
bbf6f052
RK
8314 goto binop;
8315
8316 case LSHIFT_EXPR:
8317 case RSHIFT_EXPR:
8318 case LROTATE_EXPR:
8319 case RROTATE_EXPR:
e5e809f4 8320 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8321 subtarget = 0;
8403445a
AM
8322 if (modifier == EXPAND_STACK_PARM)
8323 target = 0;
bbf6f052
RK
8324 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8325 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8326 unsignedp);
8327
d6a5ac33
RK
8328 /* Could determine the answer when only additive constants differ. Also,
8329 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8330 case LT_EXPR:
8331 case LE_EXPR:
8332 case GT_EXPR:
8333 case GE_EXPR:
8334 case EQ_EXPR:
8335 case NE_EXPR:
1eb8759b
RH
8336 case UNORDERED_EXPR:
8337 case ORDERED_EXPR:
8338 case UNLT_EXPR:
8339 case UNLE_EXPR:
8340 case UNGT_EXPR:
8341 case UNGE_EXPR:
8342 case UNEQ_EXPR:
d1a7edaf 8343 case LTGT_EXPR:
8403445a
AM
8344 temp = do_store_flag (exp,
8345 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8346 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8347 if (temp != 0)
8348 return temp;
d6a5ac33 8349
0f41302f 8350 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8351 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8352 && original_target
f8cfc6aa 8353 && REG_P (original_target)
bbf6f052
RK
8354 && (GET_MODE (original_target)
8355 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8356 {
d6a5ac33
RK
8357 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8358 VOIDmode, 0);
8359
c0a3eeac
UW
8360 /* If temp is constant, we can just compute the result. */
8361 if (GET_CODE (temp) == CONST_INT)
8362 {
8363 if (INTVAL (temp) != 0)
8364 emit_move_insn (target, const1_rtx);
8365 else
8366 emit_move_insn (target, const0_rtx);
8367
8368 return target;
8369 }
8370
bbf6f052 8371 if (temp != original_target)
c0a3eeac
UW
8372 {
8373 enum machine_mode mode1 = GET_MODE (temp);
8374 if (mode1 == VOIDmode)
8375 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8376
c0a3eeac
UW
8377 temp = copy_to_mode_reg (mode1, temp);
8378 }
d6a5ac33 8379
bbf6f052 8380 op1 = gen_label_rtx ();
c5d5d461 8381 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8382 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8383 emit_move_insn (temp, const1_rtx);
8384 emit_label (op1);
8385 return temp;
8386 }
d6a5ac33 8387
25f3e06c
PB
8388 /* If no set-flag instruction, must generate a conditional store
8389 into a temporary variable. Drop through and handle this
8390 like && and ||. */
8391
8392 if (! ignore
8393 && (target == 0
8394 || modifier == EXPAND_STACK_PARM
8395 || ! safe_from_p (target, exp, 1)
8396 /* Make sure we don't have a hard reg (such as function's return
8397 value) live across basic blocks, if not optimizing. */
8398 || (!optimize && REG_P (target)
8399 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8400 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8401
8402 if (target)
8403 emit_move_insn (target, const0_rtx);
8404
8405 op1 = gen_label_rtx ();
8406 jumpifnot (exp, op1);
8407
8408 if (target)
8409 emit_move_insn (target, const1_rtx);
8410
8411 emit_label (op1);
8412 return ignore ? const0_rtx : target;
8413
bbf6f052 8414 case TRUTH_NOT_EXPR:
8403445a
AM
8415 if (modifier == EXPAND_STACK_PARM)
8416 target = 0;
bbf6f052
RK
8417 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8418 /* The parser is careful to generate TRUTH_NOT_EXPR
8419 only with operands that are always zero or one. */
906c4e36 8420 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052 8421 target, 1, OPTAB_LIB_WIDEN);
5b0264cb 8422 gcc_assert (temp);
bbf6f052
RK
8423 return temp;
8424
6de9cd9a
DN
8425 case STATEMENT_LIST:
8426 {
8427 tree_stmt_iterator iter;
8428
5b0264cb 8429 gcc_assert (ignore);
6de9cd9a
DN
8430
8431 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8432 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8433 }
8434 return const0_rtx;
8435
bbf6f052 8436 case COND_EXPR:
ba8081eb
KH
8437 /* A COND_EXPR with its type being VOID_TYPE represents a
8438 conditional jump and is handled in
8439 expand_gimple_cond_expr. */
8440 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
f676971a 8441
e5bacf32
PB
8442 /* Note that COND_EXPRs whose type is a structure or union
8443 are required to be constructed to contain assignments of
8444 a temporary variable, so that we can evaluate them here
8445 for side effect only. If type is void, we must do likewise. */
8446
5b0264cb
NS
8447 gcc_assert (!TREE_ADDRESSABLE (type)
8448 && !ignore
8449 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8450 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
f676971a 8451
e5bacf32
PB
8452 /* If we are not to produce a result, we have no target. Otherwise,
8453 if a target was specified use it; it will not be used as an
8454 intermediate target unless it is safe. If no target, use a
8455 temporary. */
f676971a 8456
e5bacf32
PB
8457 if (modifier != EXPAND_STACK_PARM
8458 && original_target
8459 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8460 && GET_MODE (original_target) == mode
7c00d1fe 8461#ifdef HAVE_conditional_move
e5bacf32
PB
8462 && (! can_conditionally_move_p (mode)
8463 || REG_P (original_target))
7c00d1fe 8464#endif
e5bacf32
PB
8465 && !MEM_P (original_target))
8466 temp = original_target;
8467 else
8468 temp = assign_temp (type, 0, 0, 1);
f676971a 8469
e5bacf32
PB
8470 do_pending_stack_adjust ();
8471 NO_DEFER_POP;
8472 op0 = gen_label_rtx ();
8473 op1 = gen_label_rtx ();
8474 jumpifnot (TREE_OPERAND (exp, 0), op0);
8475 store_expr (TREE_OPERAND (exp, 1), temp,
6f4fd16d 8476 modifier == EXPAND_STACK_PARM);
f676971a 8477
e5bacf32
PB
8478 emit_jump_insn (gen_jump (op1));
8479 emit_barrier ();
8480 emit_label (op0);
8481 store_expr (TREE_OPERAND (exp, 2), temp,
6f4fd16d 8482 modifier == EXPAND_STACK_PARM);
f676971a 8483
e5bacf32
PB
8484 emit_label (op1);
8485 OK_DEFER_POP;
8486 return temp;
f676971a 8487
7ce67fbe
DP
8488 case VEC_COND_EXPR:
8489 target = expand_vec_cond_expr (exp, target);
8490 return target;
8491
bbf6f052
RK
8492 case MODIFY_EXPR:
8493 {
bbf6f052
RK
8494 tree lhs = TREE_OPERAND (exp, 0);
8495 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8496
df9af2bb
KH
8497 gcc_assert (ignore);
8498
bbf6f052
RK
8499 /* Check for |= or &= of a bitfield of size one into another bitfield
8500 of size 1. In this case, (unless we need the result of the
8501 assignment) we can do this more efficiently with a
8502 test followed by an assignment, if necessary.
8503
8504 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8505 things change so we do, this code should be enhanced to
8506 support it. */
df9af2bb 8507 if (TREE_CODE (lhs) == COMPONENT_REF
bbf6f052
RK
8508 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8509 || TREE_CODE (rhs) == BIT_AND_EXPR)
8510 && TREE_OPERAND (rhs, 0) == lhs
8511 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8512 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8513 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8514 {
8515 rtx label = gen_label_rtx ();
3967bc2d 8516 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
bbf6f052 8517 do_jump (TREE_OPERAND (rhs, 1),
3967bc2d
RS
8518 value ? label : 0,
8519 value ? 0 : label);
8520 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
e7c33f54 8521 do_pending_stack_adjust ();
bbf6f052
RK
8522 emit_label (label);
8523 return const0_rtx;
8524 }
8525
e836a5a2 8526 expand_assignment (lhs, rhs);
0fb7aeda 8527
7f8adc4e 8528 return const0_rtx;
bbf6f052
RK
8529 }
8530
6e7f84a7
APB
8531 case RETURN_EXPR:
8532 if (!TREE_OPERAND (exp, 0))
8533 expand_null_return ();
8534 else
8535 expand_return (TREE_OPERAND (exp, 0));
8536 return const0_rtx;
8537
bbf6f052 8538 case ADDR_EXPR:
70bb498a 8539 return expand_expr_addr_expr (exp, target, tmode, modifier);
bbf6f052 8540
7308a047 8541 case COMPLEX_EXPR:
1466e387 8542 /* Get the rtx code of the operands. */
84217346
MD
8543 op0 = expand_normal (TREE_OPERAND (exp, 0));
8544 op1 = expand_normal (TREE_OPERAND (exp, 1));
7308a047 8545
1466e387
RH
8546 if (!target)
8547 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6551fa4d 8548
1466e387
RH
8549 /* Move the real (op0) and imaginary (op1) parts to their location. */
8550 write_complex_part (target, op0, false);
8551 write_complex_part (target, op1, true);
7308a047 8552
1466e387 8553 return target;
7308a047
RS
8554
8555 case REALPART_EXPR:
84217346 8556 op0 = expand_normal (TREE_OPERAND (exp, 0));
1466e387 8557 return read_complex_part (op0, false);
3a94c984 8558
7308a047 8559 case IMAGPART_EXPR:
84217346 8560 op0 = expand_normal (TREE_OPERAND (exp, 0));
1466e387 8561 return read_complex_part (op0, true);
7308a047 8562
6de9cd9a
DN
8563 case RESX_EXPR:
8564 expand_resx_expr (exp);
8565 return const0_rtx;
8566
e976b8b2 8567 case TRY_CATCH_EXPR:
6de9cd9a 8568 case CATCH_EXPR:
6de9cd9a 8569 case EH_FILTER_EXPR:
b335b813 8570 case TRY_FINALLY_EXPR:
ac45df5d 8571 /* Lowered by tree-eh.c. */
5b0264cb 8572 gcc_unreachable ();
b335b813 8573
ac45df5d
RH
8574 case WITH_CLEANUP_EXPR:
8575 case CLEANUP_POINT_EXPR:
8576 case TARGET_EXPR:
165b54c3 8577 case CASE_LABEL_EXPR:
77c9db77 8578 case VA_ARG_EXPR:
caf93cb0 8579 case BIND_EXPR:
e5bacf32
PB
8580 case INIT_EXPR:
8581 case CONJ_EXPR:
8582 case COMPOUND_EXPR:
8583 case PREINCREMENT_EXPR:
8584 case PREDECREMENT_EXPR:
8585 case POSTINCREMENT_EXPR:
8586 case POSTDECREMENT_EXPR:
8587 case LOOP_EXPR:
8588 case EXIT_EXPR:
e5bacf32
PB
8589 case TRUTH_ANDIF_EXPR:
8590 case TRUTH_ORIF_EXPR:
ac45df5d 8591 /* Lowered by gimplify.c. */
5b0264cb 8592 gcc_unreachable ();
b335b813 8593
52a11cbf 8594 case EXC_PTR_EXPR:
86c99549 8595 return get_exception_pointer (cfun);
52a11cbf 8596
6de9cd9a
DN
8597 case FILTER_EXPR:
8598 return get_exception_filter (cfun);
8599
67231816
RH
8600 case FDESC_EXPR:
8601 /* Function descriptors are not valid except for as
8602 initialization constants, and should not be expanded. */
5b0264cb 8603 gcc_unreachable ();
67231816 8604
6de9cd9a 8605 case SWITCH_EXPR:
7efcb746 8606 expand_case (exp);
6de9cd9a
DN
8607 return const0_rtx;
8608
8609 case LABEL_EXPR:
8610 expand_label (TREE_OPERAND (exp, 0));
8611 return const0_rtx;
8612
6de9cd9a
DN
8613 case ASM_EXPR:
8614 expand_asm_expr (exp);
8615 return const0_rtx;
8616
d25cee4d
RH
8617 case WITH_SIZE_EXPR:
8618 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8619 have pulled out the size to use in whatever context it needed. */
8620 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8621 modifier, alt_rtl);
8622
7ccf35ed
DN
8623 case REALIGN_LOAD_EXPR:
8624 {
8625 tree oprnd0 = TREE_OPERAND (exp, 0);
8626 tree oprnd1 = TREE_OPERAND (exp, 1);
8627 tree oprnd2 = TREE_OPERAND (exp, 2);
8628 rtx op2;
8629
8630 this_optab = optab_for_tree_code (code, type);
84217346
MD
8631 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8632 op2 = expand_normal (oprnd2);
7ccf35ed
DN
8633 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8634 target, unsignedp);
535a42b1 8635 gcc_assert (temp);
7ccf35ed
DN
8636 return temp;
8637 }
8638
20f06221
DN
8639 case DOT_PROD_EXPR:
8640 {
8641 tree oprnd0 = TREE_OPERAND (exp, 0);
8642 tree oprnd1 = TREE_OPERAND (exp, 1);
8643 tree oprnd2 = TREE_OPERAND (exp, 2);
8644 rtx op2;
8645
84217346
MD
8646 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8647 op2 = expand_normal (oprnd2);
20f06221
DN
8648 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8649 target, unsignedp);
8650 return target;
8651 }
8652
8653 case WIDEN_SUM_EXPR:
8654 {
8655 tree oprnd0 = TREE_OPERAND (exp, 0);
8656 tree oprnd1 = TREE_OPERAND (exp, 1);
8657
8658 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8659 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8660 target, unsignedp);
8661 return target;
8662 }
8663
61d3cdbb
DN
8664 case REDUC_MAX_EXPR:
8665 case REDUC_MIN_EXPR:
8666 case REDUC_PLUS_EXPR:
8667 {
84217346 8668 op0 = expand_normal (TREE_OPERAND (exp, 0));
61d3cdbb
DN
8669 this_optab = optab_for_tree_code (code, type);
8670 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8671 gcc_assert (temp);
8672 return temp;
8673 }
7ccf35ed 8674
a6b46ba2
DN
8675 case VEC_LSHIFT_EXPR:
8676 case VEC_RSHIFT_EXPR:
8677 {
8678 target = expand_vec_shift_expr (exp, target);
8679 return target;
8680 }
8681
bbf6f052 8682 default:
673fda6b
SB
8683 return lang_hooks.expand_expr (exp, original_target, tmode,
8684 modifier, alt_rtl);
bbf6f052
RK
8685 }
8686
c4d70ce3 8687 /* Here to do an ordinary binary operator. */
bbf6f052 8688 binop:
eb698c58
RS
8689 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8690 subtarget, &op0, &op1, 0);
bbf6f052 8691 binop2:
c4d70ce3
PB
8692 this_optab = optab_for_tree_code (code, type);
8693 binop3:
8403445a
AM
8694 if (modifier == EXPAND_STACK_PARM)
8695 target = 0;
bbf6f052
RK
8696 temp = expand_binop (mode, this_optab, op0, op1, target,
8697 unsignedp, OPTAB_LIB_WIDEN);
5b0264cb 8698 gcc_assert (temp);
bc15d0ef
JM
8699 return REDUCE_BIT_FIELD (temp);
8700}
8701#undef REDUCE_BIT_FIELD
8702\f
8703/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8704 signedness of TYPE), possibly returning the result in TARGET. */
8705static rtx
8706reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8707{
8708 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8709 if (target && GET_MODE (target) != GET_MODE (exp))
8710 target = 0;
8711 if (TYPE_UNSIGNED (type))
8712 {
8713 rtx mask;
8714 if (prec < HOST_BITS_PER_WIDE_INT)
8715 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8716 GET_MODE (exp));
8717 else
8718 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8719 ((unsigned HOST_WIDE_INT) 1
8720 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8721 GET_MODE (exp));
8722 return expand_and (GET_MODE (exp), exp, mask, target);
8723 }
8724 else
8725 {
4a90aeeb 8726 tree count = build_int_cst (NULL_TREE,
7d60be94 8727 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
bc15d0ef
JM
8728 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8729 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8730 }
bbf6f052 8731}
b93a436e 8732\f
1ce7f3c2
RK
8733/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8734 when applied to the address of EXP produces an address known to be
8735 aligned more than BIGGEST_ALIGNMENT. */
8736
8737static int
502b8322 8738is_aligning_offset (tree offset, tree exp)
1ce7f3c2 8739{
6fce44af 8740 /* Strip off any conversions. */
1ce7f3c2
RK
8741 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8742 || TREE_CODE (offset) == NOP_EXPR
6fce44af 8743 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
8744 offset = TREE_OPERAND (offset, 0);
8745
8746 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8747 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8748 if (TREE_CODE (offset) != BIT_AND_EXPR
8749 || !host_integerp (TREE_OPERAND (offset, 1), 1)
caf93cb0 8750 || compare_tree_int (TREE_OPERAND (offset, 1),
c0cfc691 8751 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
8752 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8753 return 0;
8754
8755 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8756 It must be NEGATE_EXPR. Then strip any more conversions. */
8757 offset = TREE_OPERAND (offset, 0);
8758 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8759 || TREE_CODE (offset) == NOP_EXPR
8760 || TREE_CODE (offset) == CONVERT_EXPR)
8761 offset = TREE_OPERAND (offset, 0);
8762
8763 if (TREE_CODE (offset) != NEGATE_EXPR)
8764 return 0;
8765
8766 offset = TREE_OPERAND (offset, 0);
8767 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8768 || TREE_CODE (offset) == NOP_EXPR
8769 || TREE_CODE (offset) == CONVERT_EXPR)
8770 offset = TREE_OPERAND (offset, 0);
8771
6fce44af
RK
8772 /* This must now be the address of EXP. */
8773 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
8774}
8775\f
e0a2f705 8776/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 8777 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
8778 in bytes within the string that ARG is accessing. The type of the
8779 offset will be `sizetype'. */
b93a436e 8780
28f4ec01 8781tree
502b8322 8782string_constant (tree arg, tree *ptr_offset)
b93a436e 8783{
a45f71f5 8784 tree array, offset;
b93a436e
JL
8785 STRIP_NOPS (arg);
8786
a45f71f5 8787 if (TREE_CODE (arg) == ADDR_EXPR)
b93a436e 8788 {
a45f71f5
JJ
8789 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8790 {
8791 *ptr_offset = size_zero_node;
8792 return TREE_OPERAND (arg, 0);
8793 }
8794 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8795 {
8796 array = TREE_OPERAND (arg, 0);
8797 offset = size_zero_node;
8798 }
8799 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8800 {
8801 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8802 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8803 if (TREE_CODE (array) != STRING_CST
8804 && TREE_CODE (array) != VAR_DECL)
8805 return 0;
8806 }
8807 else
8808 return 0;
6de9cd9a 8809 }
b93a436e
JL
8810 else if (TREE_CODE (arg) == PLUS_EXPR)
8811 {
8812 tree arg0 = TREE_OPERAND (arg, 0);
8813 tree arg1 = TREE_OPERAND (arg, 1);
8814
8815 STRIP_NOPS (arg0);
8816 STRIP_NOPS (arg1);
8817
8818 if (TREE_CODE (arg0) == ADDR_EXPR
a45f71f5
JJ
8819 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8820 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
bbf6f052 8821 {
a45f71f5
JJ
8822 array = TREE_OPERAND (arg0, 0);
8823 offset = arg1;
bbf6f052 8824 }
b93a436e 8825 else if (TREE_CODE (arg1) == ADDR_EXPR
a45f71f5
JJ
8826 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8827 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
bbf6f052 8828 {
a45f71f5
JJ
8829 array = TREE_OPERAND (arg1, 0);
8830 offset = arg0;
bbf6f052 8831 }
a45f71f5
JJ
8832 else
8833 return 0;
8834 }
8835 else
8836 return 0;
8837
8838 if (TREE_CODE (array) == STRING_CST)
8839 {
3967bc2d 8840 *ptr_offset = fold_convert (sizetype, offset);
a45f71f5
JJ
8841 return array;
8842 }
8843 else if (TREE_CODE (array) == VAR_DECL)
8844 {
8845 int length;
8846
8847 /* Variables initialized to string literals can be handled too. */
8848 if (DECL_INITIAL (array) == NULL_TREE
8849 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8850 return 0;
8851
8852 /* If they are read-only, non-volatile and bind locally. */
8853 if (! TREE_READONLY (array)
8854 || TREE_SIDE_EFFECTS (array)
8855 || ! targetm.binds_local_p (array))
8856 return 0;
8857
8858 /* Avoid const char foo[4] = "abcde"; */
8859 if (DECL_SIZE_UNIT (array) == NULL_TREE
8860 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8861 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8862 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8863 return 0;
8864
8865 /* If variable is bigger than the string literal, OFFSET must be constant
8866 and inside of the bounds of the string literal. */
3967bc2d 8867 offset = fold_convert (sizetype, offset);
a45f71f5
JJ
8868 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8869 && (! host_integerp (offset, 1)
8870 || compare_tree_int (offset, length) >= 0))
8871 return 0;
8872
8873 *ptr_offset = offset;
8874 return DECL_INITIAL (array);
b93a436e 8875 }
ca695ac9 8876
b93a436e
JL
8877 return 0;
8878}
ca695ac9 8879\f
b93a436e
JL
8880/* Generate code to calculate EXP using a store-flag instruction
8881 and return an rtx for the result. EXP is either a comparison
8882 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 8883
b93a436e 8884 If TARGET is nonzero, store the result there if convenient.
ca695ac9 8885
cc2902df 8886 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 8887 cheap.
ca695ac9 8888
b93a436e
JL
8889 Return zero if there is no suitable set-flag instruction
8890 available on this machine.
ca695ac9 8891
b93a436e
JL
8892 Once expand_expr has been called on the arguments of the comparison,
8893 we are committed to doing the store flag, since it is not safe to
8894 re-evaluate the expression. We emit the store-flag insn by calling
8895 emit_store_flag, but only expand the arguments if we have a reason
8896 to believe that emit_store_flag will be successful. If we think that
8897 it will, but it isn't, we have to simulate the store-flag with a
8898 set/jump/set sequence. */
ca695ac9 8899
b93a436e 8900static rtx
502b8322 8901do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
8902{
8903 enum rtx_code code;
8904 tree arg0, arg1, type;
8905 tree tem;
8906 enum machine_mode operand_mode;
8907 int invert = 0;
8908 int unsignedp;
8909 rtx op0, op1;
8910 enum insn_code icode;
8911 rtx subtarget = target;
381127e8 8912 rtx result, label;
ca695ac9 8913
b93a436e
JL
8914 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8915 result at the end. We can't simply invert the test since it would
8916 have already been inverted if it were valid. This case occurs for
8917 some floating-point comparisons. */
ca695ac9 8918
b93a436e
JL
8919 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8920 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 8921
b93a436e
JL
8922 arg0 = TREE_OPERAND (exp, 0);
8923 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
8924
8925 /* Don't crash if the comparison was erroneous. */
8926 if (arg0 == error_mark_node || arg1 == error_mark_node)
8927 return const0_rtx;
8928
b93a436e
JL
8929 type = TREE_TYPE (arg0);
8930 operand_mode = TYPE_MODE (type);
8df83eae 8931 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 8932
b93a436e
JL
8933 /* We won't bother with BLKmode store-flag operations because it would mean
8934 passing a lot of information to emit_store_flag. */
8935 if (operand_mode == BLKmode)
8936 return 0;
ca695ac9 8937
b93a436e
JL
8938 /* We won't bother with store-flag operations involving function pointers
8939 when function pointers must be canonicalized before comparisons. */
8940#ifdef HAVE_canonicalize_funcptr_for_compare
8941 if (HAVE_canonicalize_funcptr_for_compare
8942 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8943 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8944 == FUNCTION_TYPE))
8945 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8946 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8947 == FUNCTION_TYPE))))
8948 return 0;
ca695ac9
JB
8949#endif
8950
b93a436e
JL
8951 STRIP_NOPS (arg0);
8952 STRIP_NOPS (arg1);
ca695ac9 8953
b93a436e
JL
8954 /* Get the rtx comparison code to use. We know that EXP is a comparison
8955 operation of some type. Some comparisons against 1 and -1 can be
8956 converted to comparisons with zero. Do so here so that the tests
8957 below will be aware that we have a comparison with zero. These
8958 tests will not catch constants in the first operand, but constants
8959 are rarely passed as the first operand. */
ca695ac9 8960
b93a436e
JL
8961 switch (TREE_CODE (exp))
8962 {
8963 case EQ_EXPR:
8964 code = EQ;
bbf6f052 8965 break;
b93a436e
JL
8966 case NE_EXPR:
8967 code = NE;
bbf6f052 8968 break;
b93a436e
JL
8969 case LT_EXPR:
8970 if (integer_onep (arg1))
8971 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8972 else
8973 code = unsignedp ? LTU : LT;
ca695ac9 8974 break;
b93a436e
JL
8975 case LE_EXPR:
8976 if (! unsignedp && integer_all_onesp (arg1))
8977 arg1 = integer_zero_node, code = LT;
8978 else
8979 code = unsignedp ? LEU : LE;
ca695ac9 8980 break;
b93a436e
JL
8981 case GT_EXPR:
8982 if (! unsignedp && integer_all_onesp (arg1))
8983 arg1 = integer_zero_node, code = GE;
8984 else
8985 code = unsignedp ? GTU : GT;
8986 break;
8987 case GE_EXPR:
8988 if (integer_onep (arg1))
8989 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8990 else
8991 code = unsignedp ? GEU : GE;
ca695ac9 8992 break;
1eb8759b
RH
8993
8994 case UNORDERED_EXPR:
8995 code = UNORDERED;
8996 break;
8997 case ORDERED_EXPR:
8998 code = ORDERED;
8999 break;
9000 case UNLT_EXPR:
9001 code = UNLT;
9002 break;
9003 case UNLE_EXPR:
9004 code = UNLE;
9005 break;
9006 case UNGT_EXPR:
9007 code = UNGT;
9008 break;
9009 case UNGE_EXPR:
9010 code = UNGE;
9011 break;
9012 case UNEQ_EXPR:
9013 code = UNEQ;
9014 break;
d1a7edaf
PB
9015 case LTGT_EXPR:
9016 code = LTGT;
9017 break;
1eb8759b 9018
ca695ac9 9019 default:
5b0264cb 9020 gcc_unreachable ();
bbf6f052 9021 }
bbf6f052 9022
b93a436e
JL
9023 /* Put a constant second. */
9024 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9025 {
9026 tem = arg0; arg0 = arg1; arg1 = tem;
9027 code = swap_condition (code);
ca695ac9 9028 }
bbf6f052 9029
b93a436e
JL
9030 /* If this is an equality or inequality test of a single bit, we can
9031 do this by shifting the bit being tested to the low-order bit and
9032 masking the result with the constant 1. If the condition was EQ,
9033 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9034 than an scc insn even if we have it.
9035
9036 The code to make this transformation was moved into fold_single_bit_test,
9037 so we just call into the folder and expand its result. */
d39985fa 9038
b93a436e
JL
9039 if ((code == NE || code == EQ)
9040 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9041 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 9042 {
ae2bcd98 9043 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 9044 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9045 arg0, arg1, type),
60cd4dae
JL
9046 target, VOIDmode, EXPAND_NORMAL);
9047 }
bbf6f052 9048
b93a436e 9049 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9050 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9051 return 0;
1eb8759b 9052
b93a436e
JL
9053 icode = setcc_gen_code[(int) code];
9054 if (icode == CODE_FOR_nothing
a995e389 9055 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9056 {
b93a436e
JL
9057 /* We can only do this if it is one of the special cases that
9058 can be handled without an scc insn. */
9059 if ((code == LT && integer_zerop (arg1))
9060 || (! only_cheap && code == GE && integer_zerop (arg1)))
9061 ;
08fd6d04 9062 else if (! only_cheap && (code == NE || code == EQ)
b93a436e
JL
9063 && TREE_CODE (type) != REAL_TYPE
9064 && ((abs_optab->handlers[(int) operand_mode].insn_code
9065 != CODE_FOR_nothing)
9066 || (ffs_optab->handlers[(int) operand_mode].insn_code
9067 != CODE_FOR_nothing)))
9068 ;
9069 else
9070 return 0;
ca695ac9 9071 }
3a94c984 9072
296b4ed9 9073 if (! get_subtarget (target)
e3be1116 9074 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9075 subtarget = 0;
9076
eb698c58 9077 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9078
9079 if (target == 0)
9080 target = gen_reg_rtx (mode);
9081
ad76cef8 9082 result = emit_store_flag (target, code, op0, op1,
b93a436e 9083 operand_mode, unsignedp, 1);
ca695ac9 9084
b93a436e
JL
9085 if (result)
9086 {
9087 if (invert)
9088 result = expand_binop (mode, xor_optab, result, const1_rtx,
9089 result, 0, OPTAB_LIB_WIDEN);
9090 return result;
ca695ac9 9091 }
bbf6f052 9092
b93a436e 9093 /* If this failed, we have to do this with set/compare/jump/set code. */
f8cfc6aa 9094 if (!REG_P (target)
b93a436e
JL
9095 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9096 target = gen_reg_rtx (GET_MODE (target));
9097
9098 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9099 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9100 operand_mode, NULL_RTX);
b93a436e
JL
9101 if (GET_CODE (result) == CONST_INT)
9102 return (((result == const0_rtx && ! invert)
9103 || (result != const0_rtx && invert))
9104 ? const0_rtx : const1_rtx);
ca695ac9 9105
8f08e8c0
JL
9106 /* The code of RESULT may not match CODE if compare_from_rtx
9107 decided to swap its operands and reverse the original code.
9108
9109 We know that compare_from_rtx returns either a CONST_INT or
9110 a new comparison code, so it is safe to just extract the
9111 code from RESULT. */
9112 code = GET_CODE (result);
9113
b93a436e 9114 label = gen_label_rtx ();
5b0264cb 9115 gcc_assert (bcc_gen_fctn[(int) code]);
0f41302f 9116
b93a436e
JL
9117 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9118 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9119 emit_label (label);
bbf6f052 9120
b93a436e 9121 return target;
ca695ac9 9122}
b93a436e 9123\f
b93a436e 9124
ad82abb8
ZW
9125/* Stubs in case we haven't got a casesi insn. */
9126#ifndef HAVE_casesi
9127# define HAVE_casesi 0
9128# define gen_casesi(a, b, c, d, e) (0)
9129# define CODE_FOR_casesi CODE_FOR_nothing
9130#endif
9131
9132/* If the machine does not have a case insn that compares the bounds,
9133 this means extra overhead for dispatch tables, which raises the
9134 threshold for using them. */
9135#ifndef CASE_VALUES_THRESHOLD
9136#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9137#endif /* CASE_VALUES_THRESHOLD */
9138
9139unsigned int
502b8322 9140case_values_threshold (void)
ad82abb8
ZW
9141{
9142 return CASE_VALUES_THRESHOLD;
9143}
9144
9145/* Attempt to generate a casesi instruction. Returns 1 if successful,
9146 0 otherwise (i.e. if there is no casesi instruction). */
9147int
502b8322
AJ
9148try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9149 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9150{
9151 enum machine_mode index_mode = SImode;
9152 int index_bits = GET_MODE_BITSIZE (index_mode);
9153 rtx op1, op2, index;
9154 enum machine_mode op_mode;
9155
9156 if (! HAVE_casesi)
9157 return 0;
9158
9159 /* Convert the index to SImode. */
9160 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9161 {
9162 enum machine_mode omode = TYPE_MODE (index_type);
84217346 9163 rtx rangertx = expand_normal (range);
ad82abb8
ZW
9164
9165 /* We must handle the endpoints in the original mode. */
3244e67d
RS
9166 index_expr = build2 (MINUS_EXPR, index_type,
9167 index_expr, minval);
ad82abb8 9168 minval = integer_zero_node;
84217346 9169 index = expand_normal (index_expr);
ad82abb8 9170 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9171 omode, 1, default_label);
ad82abb8
ZW
9172 /* Now we can safely truncate. */
9173 index = convert_to_mode (index_mode, index, 0);
9174 }
9175 else
9176 {
9177 if (TYPE_MODE (index_type) != index_mode)
9178 {
3967bc2d
RS
9179 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9180 index_expr = fold_convert (index_type, index_expr);
ad82abb8
ZW
9181 }
9182
84217346 9183 index = expand_normal (index_expr);
ad82abb8 9184 }
ad76cef8 9185
ad82abb8
ZW
9186 do_pending_stack_adjust ();
9187
9188 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9189 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9190 (index, op_mode))
9191 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9192
84217346 9193 op1 = expand_normal (minval);
ad82abb8
ZW
9194
9195 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9196 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 9197 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
9198 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9199 (op1, op_mode))
9200 op1 = copy_to_mode_reg (op_mode, op1);
9201
84217346 9202 op2 = expand_normal (range);
ad82abb8
ZW
9203
9204 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9205 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 9206 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
9207 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9208 (op2, op_mode))
9209 op2 = copy_to_mode_reg (op_mode, op2);
9210
9211 emit_jump_insn (gen_casesi (index, op1, op2,
9212 table_label, default_label));
9213 return 1;
9214}
9215
9216/* Attempt to generate a tablejump instruction; same concept. */
9217#ifndef HAVE_tablejump
9218#define HAVE_tablejump 0
9219#define gen_tablejump(x, y) (0)
9220#endif
9221
9222/* Subroutine of the next function.
9223
9224 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9225 in the table already subtracted.
9226 MODE is its expected mode (needed if INDEX is constant).
9227 RANGE is the length of the jump table.
9228 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9229
b93a436e
JL
9230 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9231 index value is out of range. */
0f41302f 9232
ad82abb8 9233static void
502b8322
AJ
9234do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9235 rtx default_label)
ca695ac9 9236{
b3694847 9237 rtx temp, vector;
88d3b7f0 9238
74f6d071
JH
9239 if (INTVAL (range) > cfun->max_jumptable_ents)
9240 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9241
b93a436e
JL
9242 /* Do an unsigned comparison (in the proper mode) between the index
9243 expression and the value which represents the length of the range.
9244 Since we just finished subtracting the lower bound of the range
9245 from the index expression, this comparison allows us to simultaneously
9246 check that the original index expression value is both greater than
9247 or equal to the minimum value of the range and less than or equal to
9248 the maximum value of the range. */
709f5be1 9249
c5d5d461 9250 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9251 default_label);
bbf6f052 9252
b93a436e
JL
9253 /* If index is in range, it must fit in Pmode.
9254 Convert to Pmode so we can index with it. */
9255 if (mode != Pmode)
9256 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9257
ba228239 9258 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9259 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9260 and break_out_memory_refs will go to work on it and mess it up. */
9261#ifdef PIC_CASE_VECTOR_ADDRESS
f8cfc6aa 9262 if (flag_pic && !REG_P (index))
b93a436e
JL
9263 index = copy_to_mode_reg (Pmode, index);
9264#endif
ca695ac9 9265
b93a436e
JL
9266 /* If flag_force_addr were to affect this address
9267 it could interfere with the tricky assumptions made
9268 about addresses that contain label-refs,
9269 which may be valid only very near the tablejump itself. */
9270 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9271 GET_MODE_SIZE, because this indicates how large insns are. The other
9272 uses should all be Pmode, because they are addresses. This code
9273 could fail if addresses and insns are not the same size. */
9274 index = gen_rtx_PLUS (Pmode,
9275 gen_rtx_MULT (Pmode, index,
9276 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9277 gen_rtx_LABEL_REF (Pmode, table_label));
9278#ifdef PIC_CASE_VECTOR_ADDRESS
9279 if (flag_pic)
9280 index = PIC_CASE_VECTOR_ADDRESS (index);
9281 else
bbf6f052 9282#endif
b93a436e
JL
9283 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9284 temp = gen_reg_rtx (CASE_VECTOR_MODE);
542a8afa 9285 vector = gen_const_mem (CASE_VECTOR_MODE, index);
b93a436e
JL
9286 convert_move (temp, vector, 0);
9287
9288 emit_jump_insn (gen_tablejump (temp, table_label));
9289
9290 /* If we are generating PIC code or if the table is PC-relative, the
9291 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9292 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9293 emit_barrier ();
bbf6f052 9294}
b93a436e 9295
ad82abb8 9296int
502b8322
AJ
9297try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9298 rtx table_label, rtx default_label)
ad82abb8
ZW
9299{
9300 rtx index;
9301
9302 if (! HAVE_tablejump)
9303 return 0;
9304
4845b383 9305 index_expr = fold_build2 (MINUS_EXPR, index_type,
3967bc2d
RS
9306 fold_convert (index_type, index_expr),
9307 fold_convert (index_type, minval));
84217346 9308 index = expand_normal (index_expr);
ad82abb8
ZW
9309 do_pending_stack_adjust ();
9310
9311 do_tablejump (index, TYPE_MODE (index_type),
9312 convert_modes (TYPE_MODE (index_type),
9313 TYPE_MODE (TREE_TYPE (range)),
84217346 9314 expand_normal (range),
8df83eae 9315 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
9316 table_label, default_label);
9317 return 1;
9318}
e2500fed 9319
cb2a532e
AH
9320/* Nonzero if the mode is a valid vector mode for this architecture.
9321 This returns nonzero even if there is no hardware support for the
9322 vector mode, but we can emulate with narrower modes. */
9323
9324int
502b8322 9325vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9326{
9327 enum mode_class class = GET_MODE_CLASS (mode);
9328 enum machine_mode innermode;
9329
9330 /* Doh! What's going on? */
9331 if (class != MODE_VECTOR_INT
9332 && class != MODE_VECTOR_FLOAT)
9333 return 0;
9334
9335 /* Hardware support. Woo hoo! */
f676971a 9336 if (targetm.vector_mode_supported_p (mode))
cb2a532e
AH
9337 return 1;
9338
9339 innermode = GET_MODE_INNER (mode);
9340
9341 /* We should probably return 1 if requesting V4DI and we have no DI,
9342 but we have V2DI, but this is probably very unlikely. */
9343
9344 /* If we have support for the inner mode, we can safely emulate it.
9345 We may not have V2DI, but me can emulate with a pair of DIs. */
6dd53648 9346 return targetm.scalar_mode_supported_p (innermode);
cb2a532e
AH
9347}
9348
d744e06e
AH
9349/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9350static rtx
502b8322 9351const_vector_from_tree (tree exp)
d744e06e
AH
9352{
9353 rtvec v;
9354 int units, i;
9355 tree link, elt;
9356 enum machine_mode inner, mode;
9357
9358 mode = TYPE_MODE (TREE_TYPE (exp));
9359
6de9cd9a 9360 if (initializer_zerop (exp))
d744e06e
AH
9361 return CONST0_RTX (mode);
9362
9363 units = GET_MODE_NUNITS (mode);
9364 inner = GET_MODE_INNER (mode);
9365
9366 v = rtvec_alloc (units);
9367
9368 link = TREE_VECTOR_CST_ELTS (exp);
9369 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9370 {
9371 elt = TREE_VALUE (link);
9372
9373 if (TREE_CODE (elt) == REAL_CST)
9374 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9375 inner);
9376 else
9377 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9378 TREE_INT_CST_HIGH (elt),
9379 inner);
9380 }
9381
5f6c070d
AH
9382 /* Initialize remaining elements to 0. */
9383 for (; i < units; ++i)
9384 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9385
a73b091d 9386 return gen_rtx_CONST_VECTOR (mode, v);
d744e06e 9387}
e2500fed 9388#include "gt-expr.h"
This page took 5.491814 seconds and 5 git commands to generate.