]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
usmul.c: New test.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
ad616de1 3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d 18along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
6de9cd9a 50#include "tree-iterator.h"
2f8e398b
PB
51#include "tree-pass.h"
52#include "tree-flow.h"
c988af2b 53#include "target.h"
2f8e398b 54#include "timevar.h"
bbf6f052 55
bbf6f052 56/* Decide whether a function's arguments should be processed
bbc8a071
RK
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
bbf6f052 61
bbf6f052 62#ifdef PUSH_ROUNDING
bbc8a071 63
2da4124d 64#ifndef PUSH_ARGS_REVERSED
3319a347 65#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 66#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 67#endif
2da4124d 68#endif
bbc8a071 69
bbf6f052
RK
70#endif
71
72#ifndef STACK_PUSH_CODE
73#ifdef STACK_GROWS_DOWNWARD
74#define STACK_PUSH_CODE PRE_DEC
75#else
76#define STACK_PUSH_CODE PRE_INC
77#endif
78#endif
79
4ca79136 80
bbf6f052
RK
81/* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87int cse_not_expected;
88
4969d05d
RK
89/* This structure is used by move_by_pieces to describe the move to
90 be performed. */
4969d05d
RK
91struct move_by_pieces
92{
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
3bdf5ad1
RK
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
4969d05d
RK
103 int reverse;
104};
105
57814e5e 106/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
107 be performed. */
108
57814e5e 109struct store_by_pieces
9de08200
RK
110{
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
3bdf5ad1
RK
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
502b8322 117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 118 void *constfundata;
9de08200
RK
119 int reverse;
120};
121
502b8322 122static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
45d78e7f 123 unsigned int,
502b8322
AJ
124 unsigned int);
125static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127static bool block_move_libcall_safe_for_call_parm (void);
70128ad9 128static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
8148fe65 129static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
502b8322
AJ
130static tree emit_block_move_libcall_fn (int);
131static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
8148fe65 137static rtx clear_storage_via_libcall (rtx, rtx, bool);
502b8322
AJ
138static tree clear_storage_libcall_fn (int);
139static rtx compress_float_constant (rtx, rtx);
140static rtx get_subtarget (rtx);
502b8322
AJ
141static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
f45bdcd0 146 tree, tree, int);
502b8322 147
d50a16c4 148static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
149
150static int is_aligning_offset (tree, tree);
eb698c58
RS
151static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
bc15d0ef 153static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
502b8322 154static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 155#ifdef PUSH_ROUNDING
502b8322 156static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 157#endif
502b8322
AJ
158static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159static rtx const_vector_from_tree (tree);
57aaef66 160static void write_complex_part (rtx, rtx, bool);
bbf6f052 161
4fa52007
RK
162/* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
165
166static char direct_load[NUM_MACHINE_MODES];
167static char direct_store[NUM_MACHINE_MODES];
168
51286de6
RH
169/* Record for each mode whether we can float-extend from memory. */
170
171static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172
fbe1758d 173/* This macro is used to determine whether move_by_pieces should be called
3a94c984 174 to perform a structure copy. */
fbe1758d 175#ifndef MOVE_BY_PIECES_P
19caa751 176#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
fbe1758d
AM
179#endif
180
78762e3b
RS
181/* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183#ifndef CLEAR_BY_PIECES_P
184#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
78762e3b
RS
187#endif
188
4977bab6
ZW
189/* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192#ifndef STORE_BY_PIECES_P
45d78e7f
JJ
193#define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
4977bab6
ZW
196#endif
197
266007a7 198/* This array records the insn_code of insns to perform block moves. */
70128ad9 199enum insn_code movmem_optab[NUM_MACHINE_MODES];
266007a7 200
57e84f18
AS
201/* This array records the insn_code of insns to perform block sets. */
202enum insn_code setmem_optab[NUM_MACHINE_MODES];
9de08200 203
40c1d5f8 204/* These arrays record the insn_code of three different kinds of insns
118355a0
ZW
205 to perform block compares. */
206enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
40c1d5f8 207enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
118355a0
ZW
208enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209
48ae6c13
RH
210/* Synchronization primitives. */
211enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233
cc2902df 234/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
235
236#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 237#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 238#endif
bbf6f052 239\f
4fa52007 240/* This is run once per compilation to set up which modes can be used
266007a7 241 directly in memory and to initialize the block move optab. */
4fa52007
RK
242
243void
502b8322 244init_expr_once (void)
4fa52007
RK
245{
246 rtx insn, pat;
247 enum machine_mode mode;
cff48d8f 248 int num_clobbers;
9ec36da5 249 rtx mem, mem1;
bf1660a6 250 rtx reg;
9ec36da5 251
e2549997
RS
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
9ec36da5
JL
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 257
bf1660a6
JL
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
261
1f8c3c5b
RH
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
4fa52007
RK
265
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
268 {
269 int regno;
4fa52007
RK
270
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
e2549997 273 PUT_MODE (mem1, mode);
bf1660a6 274 PUT_MODE (reg, mode);
4fa52007 275
e6fe56a4
RK
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
278
7308a047
RS
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
283 {
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
e6fe56a4 286
bf1660a6 287 REGNO (reg) = regno;
e6fe56a4 288
7308a047
RS
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
e6fe56a4 293
e2549997
RS
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
298
7308a047
RS
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
e2549997
RS
303
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
7308a047 308 }
4fa52007
RK
309 }
310
51286de6
RH
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
315 {
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 318 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
319 {
320 enum insn_code ic;
321
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
325
326 PUT_MODE (mem, srcmode);
0fb7aeda 327
51286de6
RH
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
330 }
331 }
4fa52007 332}
cff48d8f 333
bbf6f052
RK
334/* This is run at the start of compiling a function. */
335
336void
502b8322 337init_expr (void)
bbf6f052 338{
3a70d621 339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052 340}
bbf6f052
RK
341\f
342/* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
346
347void
502b8322 348convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
349{
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
353 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
354 enum insn_code code;
355 rtx libcall;
356
357 /* rtx code for making an equivalent value. */
37d0b254
SE
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052 360
bbf6f052 361
5b0264cb 362 gcc_assert (to_real == from_real);
bbf6f052 363
6de9cd9a
DN
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
368
1499e0a8
RK
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
372
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378
5b0264cb 379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
1499e0a8 380
bbf6f052
RK
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 {
384 emit_move_insn (to, from);
385 return;
386 }
387
0b4565c9
BS
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 {
5b0264cb 390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
3a94c984 391
0b4565c9 392 if (VECTOR_MODE_P (to_mode))
bafe341a 393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 394 else
bafe341a 395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
396
397 emit_move_insn (to, from);
398 return;
399 }
400
06765df1
R
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 {
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
406 }
407
bbf6f052
RK
408 if (to_real)
409 {
642dfa8b 410 rtx value, insns;
85363ca0 411 convert_optab tab;
81d79e2c 412
5b0264cb
NS
413 gcc_assert (GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode));
415
e44846d6 416 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 417 tab = sext_optab;
85363ca0 418 else
5b0264cb 419 tab = trunc_optab;
2b01c326 420
85363ca0 421 /* Try converting directly if the insn is supported. */
2b01c326 422
85363ca0
ZW
423 code = tab->handlers[to_mode][from_mode].insn_code;
424 if (code != CODE_FOR_nothing)
b092b471 425 {
85363ca0
ZW
426 emit_unop_insn (code, to, from,
427 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
428 return;
429 }
b092b471 430
85363ca0
ZW
431 /* Otherwise use a libcall. */
432 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 433
5b0264cb
NS
434 /* Is this conversion implemented yet? */
435 gcc_assert (libcall);
bbf6f052 436
642dfa8b 437 start_sequence ();
ebb1b59a 438 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 439 1, from, from_mode);
642dfa8b
BS
440 insns = get_insns ();
441 end_sequence ();
450b1728
EC
442 emit_libcall_block (insns, to, value,
443 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
444 from)
445 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
446 return;
447 }
448
85363ca0
ZW
449 /* Handle pointer conversion. */ /* SPEE 900220. */
450 /* Targets are expected to provide conversion insns between PxImode and
451 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
452 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
453 {
454 enum machine_mode full_mode
455 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
456
5b0264cb
NS
457 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
458 != CODE_FOR_nothing);
85363ca0
ZW
459
460 if (full_mode != from_mode)
461 from = convert_to_mode (full_mode, from, unsignedp);
462 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
463 to, from, UNKNOWN);
464 return;
465 }
466 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
467 {
d2348bd5 468 rtx new_from;
85363ca0
ZW
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
471
5b0264cb
NS
472 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473 != CODE_FOR_nothing);
85363ca0 474
85363ca0 475 if (to_mode == full_mode)
d2348bd5
DD
476 {
477 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
478 to, from, UNKNOWN);
479 return;
480 }
481
482 new_from = gen_reg_rtx (full_mode);
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 new_from, from, UNKNOWN);
85363ca0 485
a1105617 486 /* else proceed to integer conversions below. */
85363ca0 487 from_mode = full_mode;
d2348bd5 488 from = new_from;
85363ca0
ZW
489 }
490
bbf6f052
RK
491 /* Now both modes are integers. */
492
493 /* Handle expanding beyond a word. */
494 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
495 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
496 {
497 rtx insns;
498 rtx lowpart;
499 rtx fill_value;
500 rtx lowfrom;
501 int i;
502 enum machine_mode lowpart_mode;
503 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
504
505 /* Try converting directly if the insn is supported. */
506 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
507 != CODE_FOR_nothing)
508 {
cd1b4b44
RK
509 /* If FROM is a SUBREG, put it into a register. Do this
510 so that we always generate the same set of insns for
511 better cse'ing; if an intermediate assignment occurred,
512 we won't be doing the operation directly on the SUBREG. */
513 if (optimize > 0 && GET_CODE (from) == SUBREG)
514 from = force_reg (from_mode, from);
bbf6f052
RK
515 emit_unop_insn (code, to, from, equiv_code);
516 return;
517 }
518 /* Next, try converting via full word. */
519 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
520 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
521 != CODE_FOR_nothing))
522 {
f8cfc6aa 523 if (REG_P (to))
6a2d136b
EB
524 {
525 if (reg_overlap_mentioned_p (to, from))
526 from = force_reg (from_mode, from);
527 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
528 }
bbf6f052
RK
529 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
530 emit_unop_insn (code, to,
531 gen_lowpart (word_mode, to), equiv_code);
532 return;
533 }
534
535 /* No special multiword conversion insn; do it by hand. */
536 start_sequence ();
537
5c5033c3
RK
538 /* Since we will turn this into a no conflict block, we must ensure
539 that the source does not overlap the target. */
540
541 if (reg_overlap_mentioned_p (to, from))
542 from = force_reg (from_mode, from);
543
bbf6f052
RK
544 /* Get a copy of FROM widened to a word, if necessary. */
545 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
546 lowpart_mode = word_mode;
547 else
548 lowpart_mode = from_mode;
549
550 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
551
552 lowpart = gen_lowpart (lowpart_mode, to);
553 emit_move_insn (lowpart, lowfrom);
554
555 /* Compute the value to put in each remaining word. */
556 if (unsignedp)
557 fill_value = const0_rtx;
558 else
559 {
560#ifdef HAVE_slt
561 if (HAVE_slt
a995e389 562 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
563 && STORE_FLAG_VALUE == -1)
564 {
906c4e36 565 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 566 lowpart_mode, 0);
bbf6f052
RK
567 fill_value = gen_reg_rtx (word_mode);
568 emit_insn (gen_slt (fill_value));
569 }
570 else
571#endif
572 {
573 fill_value
574 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
575 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 576 NULL_RTX, 0);
bbf6f052
RK
577 fill_value = convert_to_mode (word_mode, fill_value, 1);
578 }
579 }
580
581 /* Fill the remaining words. */
582 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
583 {
584 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
585 rtx subword = operand_subword (to, index, 1, to_mode);
586
5b0264cb 587 gcc_assert (subword);
bbf6f052
RK
588
589 if (fill_value != subword)
590 emit_move_insn (subword, fill_value);
591 }
592
593 insns = get_insns ();
594 end_sequence ();
595
906c4e36 596 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 597 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
598 return;
599 }
600
d3c64ee3
RS
601 /* Truncating multi-word to a word or less. */
602 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
603 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 604 {
3c0cb5de 605 if (!((MEM_P (from)
431a6eca
JW
606 && ! MEM_VOLATILE_P (from)
607 && direct_load[(int) to_mode]
608 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 609 || REG_P (from)
431a6eca
JW
610 || GET_CODE (from) == SUBREG))
611 from = force_reg (from_mode, from);
bbf6f052
RK
612 convert_move (to, gen_lowpart (word_mode, from), 0);
613 return;
614 }
615
bbf6f052
RK
616 /* Now follow all the conversions between integers
617 no more than a word long. */
618
619 /* For truncation, usually we can just refer to FROM in a narrower mode. */
620 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
621 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 622 GET_MODE_BITSIZE (from_mode)))
bbf6f052 623 {
3c0cb5de 624 if (!((MEM_P (from)
d3c64ee3
RS
625 && ! MEM_VOLATILE_P (from)
626 && direct_load[(int) to_mode]
627 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 628 || REG_P (from)
d3c64ee3
RS
629 || GET_CODE (from) == SUBREG))
630 from = force_reg (from_mode, from);
f8cfc6aa 631 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
34aa3599
RK
632 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
633 from = copy_to_reg (from);
bbf6f052
RK
634 emit_move_insn (to, gen_lowpart (to_mode, from));
635 return;
636 }
637
d3c64ee3 638 /* Handle extension. */
bbf6f052
RK
639 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
640 {
641 /* Convert directly if that works. */
642 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
643 != CODE_FOR_nothing)
644 {
645 emit_unop_insn (code, to, from, equiv_code);
646 return;
647 }
648 else
649 {
650 enum machine_mode intermediate;
2b28d92e
NC
651 rtx tmp;
652 tree shift_amount;
bbf6f052
RK
653
654 /* Search for a mode to convert via. */
655 for (intermediate = from_mode; intermediate != VOIDmode;
656 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
657 if (((can_extend_p (to_mode, intermediate, unsignedp)
658 != CODE_FOR_nothing)
659 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
660 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
661 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
662 && (can_extend_p (intermediate, from_mode, unsignedp)
663 != CODE_FOR_nothing))
664 {
665 convert_move (to, convert_to_mode (intermediate, from,
666 unsignedp), unsignedp);
667 return;
668 }
669
2b28d92e 670 /* No suitable intermediate mode.
3a94c984 671 Generate what we need with shifts. */
4a90aeeb
NS
672 shift_amount = build_int_cst (NULL_TREE,
673 GET_MODE_BITSIZE (to_mode)
7d60be94 674 - GET_MODE_BITSIZE (from_mode));
2b28d92e
NC
675 from = gen_lowpart (to_mode, force_reg (from_mode, from));
676 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
677 to, unsignedp);
3a94c984 678 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
679 to, unsignedp);
680 if (tmp != to)
681 emit_move_insn (to, tmp);
682 return;
bbf6f052
RK
683 }
684 }
685
3a94c984 686 /* Support special truncate insns for certain modes. */
85363ca0 687 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 688 {
85363ca0
ZW
689 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
690 to, from, UNKNOWN);
b9bcad65
RK
691 return;
692 }
693
bbf6f052
RK
694 /* Handle truncation of volatile memrefs, and so on;
695 the things that couldn't be truncated directly,
85363ca0
ZW
696 and for which there was no special instruction.
697
698 ??? Code above formerly short-circuited this, for most integer
699 mode pairs, with a force_reg in from_mode followed by a recursive
700 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
701 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
702 {
703 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
704 emit_move_insn (to, temp);
705 return;
706 }
707
708 /* Mode combination is not recognized. */
5b0264cb 709 gcc_unreachable ();
bbf6f052
RK
710}
711
712/* Return an rtx for a value that would result
713 from converting X to mode MODE.
714 Both X and MODE may be floating, or both integer.
715 UNSIGNEDP is nonzero if X is an unsigned value.
716 This can be done by referring to a part of X in place
ad76cef8 717 or by copying to a new temporary with conversion. */
bbf6f052
RK
718
719rtx
502b8322 720convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
721{
722 return convert_modes (mode, VOIDmode, x, unsignedp);
723}
724
725/* Return an rtx for a value that would result
726 from converting X from mode OLDMODE to mode MODE.
727 Both modes may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729
730 This can be done by referring to a part of X in place
731 or by copying to a new temporary with conversion.
732
ad76cef8 733 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
5ffe63ed
RS
734
735rtx
502b8322 736convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 737{
b3694847 738 rtx temp;
5ffe63ed 739
1499e0a8
RK
740 /* If FROM is a SUBREG that indicates that we have already done at least
741 the required extension, strip it. */
742
743 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
744 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
745 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
746 x = gen_lowpart (mode, x);
bbf6f052 747
64791b18
RK
748 if (GET_MODE (x) != VOIDmode)
749 oldmode = GET_MODE (x);
3a94c984 750
5ffe63ed 751 if (mode == oldmode)
bbf6f052
RK
752 return x;
753
754 /* There is one case that we must handle specially: If we are converting
906c4e36 755 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
756 we are to interpret the constant as unsigned, gen_lowpart will do
757 the wrong if the constant appears negative. What we want to do is
758 make the high-order word of the constant zero, not all ones. */
759
760 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 761 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 762 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
763 {
764 HOST_WIDE_INT val = INTVAL (x);
765
766 if (oldmode != VOIDmode
767 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
768 {
769 int width = GET_MODE_BITSIZE (oldmode);
770
771 /* We need to zero extend VAL. */
772 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 }
774
775 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
776 }
bbf6f052
RK
777
778 /* We can do this with a gen_lowpart if both desired and current modes
779 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
780 non-volatile MEM. Except for the constant case where MODE is no
781 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 782
ba2e110c
RK
783 if ((GET_CODE (x) == CONST_INT
784 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 785 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 786 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 787 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 788 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
3c0cb5de 789 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
d57c66da 790 && direct_load[(int) mode])
f8cfc6aa 791 || (REG_P (x)
006c9f4a
SE
792 && (! HARD_REGISTER_P (x)
793 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
794 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
795 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
796 {
797 /* ?? If we don't know OLDMODE, we have to assume here that
798 X does not need sign- or zero-extension. This may not be
799 the case, but it's the best we can do. */
800 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
801 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
802 {
803 HOST_WIDE_INT val = INTVAL (x);
804 int width = GET_MODE_BITSIZE (oldmode);
805
806 /* We must sign or zero-extend in this case. Start by
807 zero-extending, then sign extend if we need to. */
808 val &= ((HOST_WIDE_INT) 1 << width) - 1;
809 if (! unsignedp
810 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
811 val |= (HOST_WIDE_INT) (-1) << width;
812
2496c7bd 813 return gen_int_mode (val, mode);
ba2e110c
RK
814 }
815
816 return gen_lowpart (mode, x);
817 }
bbf6f052 818
ebe75517
JH
819 /* Converting from integer constant into mode is always equivalent to an
820 subreg operation. */
821 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
822 {
5b0264cb 823 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
ebe75517
JH
824 return simplify_gen_subreg (mode, x, oldmode, 0);
825 }
826
bbf6f052
RK
827 temp = gen_reg_rtx (mode);
828 convert_move (temp, x, unsignedp);
829 return temp;
830}
831\f
cf5124f6
RS
832/* STORE_MAX_PIECES is the number of bytes at a time that we can
833 store efficiently. Due to internal GCC limitations, this is
834 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
835 for an immediate constant. */
836
837#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
838
8fd3cf4e
JJ
839/* Determine whether the LEN bytes can be moved by using several move
840 instructions. Return nonzero if a call to move_by_pieces should
841 succeed. */
842
843int
502b8322
AJ
844can_move_by_pieces (unsigned HOST_WIDE_INT len,
845 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
846{
847 return MOVE_BY_PIECES_P (len, align);
848}
849
21d93687 850/* Generate several move instructions to copy LEN bytes from block FROM to
ad76cef8 851 block TO. (These are MEM rtx's with BLKmode).
566aa174 852
21d93687
RK
853 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
854 used to push FROM to the stack.
566aa174 855
8fd3cf4e 856 ALIGN is maximum stack alignment we can assume.
bbf6f052 857
8fd3cf4e
JJ
858 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
859 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
860 stpcpy. */
861
862rtx
502b8322
AJ
863move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
864 unsigned int align, int endp)
bbf6f052
RK
865{
866 struct move_by_pieces data;
566aa174 867 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 868 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
869 enum machine_mode mode = VOIDmode, tmode;
870 enum insn_code icode;
bbf6f052 871
f26aca6d
DD
872 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
873
bbf6f052 874 data.offset = 0;
bbf6f052 875 data.from_addr = from_addr;
566aa174
JH
876 if (to)
877 {
878 to_addr = XEXP (to, 0);
879 data.to = to;
880 data.autinc_to
881 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
882 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
883 data.reverse
884 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
885 }
886 else
887 {
888 to_addr = NULL_RTX;
889 data.to = NULL_RTX;
890 data.autinc_to = 1;
891#ifdef STACK_GROWS_DOWNWARD
892 data.reverse = 1;
893#else
894 data.reverse = 0;
895#endif
896 }
897 data.to_addr = to_addr;
bbf6f052 898 data.from = from;
bbf6f052
RK
899 data.autinc_from
900 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901 || GET_CODE (from_addr) == POST_INC
902 || GET_CODE (from_addr) == POST_DEC);
903
904 data.explicit_inc_from = 0;
905 data.explicit_inc_to = 0;
bbf6f052
RK
906 if (data.reverse) data.offset = len;
907 data.len = len;
908
909 /* If copying requires more than two move insns,
910 copy addresses to registers (to make displacements shorter)
911 and use post-increment if available. */
912 if (!(data.autinc_from && data.autinc_to)
45d78e7f 913 && move_by_pieces_ninsns (len, align, max_size) > 2)
bbf6f052 914 {
3a94c984 915 /* Find the mode of the largest move... */
fbe1758d
AM
916 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
917 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
918 if (GET_MODE_SIZE (tmode) < max_size)
919 mode = tmode;
920
921 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
922 {
923 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
924 data.autinc_from = 1;
925 data.explicit_inc_from = -1;
926 }
fbe1758d 927 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
928 {
929 data.from_addr = copy_addr_to_reg (from_addr);
930 data.autinc_from = 1;
931 data.explicit_inc_from = 1;
932 }
bbf6f052
RK
933 if (!data.autinc_from && CONSTANT_P (from_addr))
934 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 935 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
936 {
937 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
938 data.autinc_to = 1;
939 data.explicit_inc_to = -1;
940 }
fbe1758d 941 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
942 {
943 data.to_addr = copy_addr_to_reg (to_addr);
944 data.autinc_to = 1;
945 data.explicit_inc_to = 1;
946 }
bbf6f052
RK
947 if (!data.autinc_to && CONSTANT_P (to_addr))
948 data.to_addr = copy_addr_to_reg (to_addr);
949 }
950
f64d6991
DE
951 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
952 if (align >= GET_MODE_ALIGNMENT (tmode))
953 align = GET_MODE_ALIGNMENT (tmode);
954 else
955 {
956 enum machine_mode xmode;
957
958 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
959 tmode != VOIDmode;
960 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
961 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
962 || SLOW_UNALIGNED_ACCESS (tmode, align))
963 break;
964
965 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
966 }
bbf6f052
RK
967
968 /* First move what we can in the largest integer mode, then go to
969 successively smaller modes. */
970
971 while (max_size > 1)
972 {
e7c33f54
RK
973 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
974 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
975 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
976 mode = tmode;
977
978 if (mode == VOIDmode)
979 break;
980
981 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 982 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
983 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
984
985 max_size = GET_MODE_SIZE (mode);
986 }
987
988 /* The code above should have handled everything. */
5b0264cb 989 gcc_assert (!data.len);
8fd3cf4e
JJ
990
991 if (endp)
992 {
993 rtx to1;
994
5b0264cb 995 gcc_assert (!data.reverse);
8fd3cf4e
JJ
996 if (data.autinc_to)
997 {
998 if (endp == 2)
999 {
1000 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1001 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1002 else
1003 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1004 -1));
1005 }
1006 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1007 data.offset);
1008 }
1009 else
1010 {
1011 if (endp == 2)
1012 --data.offset;
1013 to1 = adjust_address (data.to, QImode, data.offset);
1014 }
1015 return to1;
1016 }
1017 else
1018 return data.to;
bbf6f052
RK
1019}
1020
1021/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1022 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1023
3bdf5ad1 1024static unsigned HOST_WIDE_INT
45d78e7f
JJ
1025move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1026 unsigned int max_size)
bbf6f052 1027{
3bdf5ad1 1028 unsigned HOST_WIDE_INT n_insns = 0;
f64d6991 1029 enum machine_mode tmode;
bbf6f052 1030
f64d6991
DE
1031 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1032 if (align >= GET_MODE_ALIGNMENT (tmode))
1033 align = GET_MODE_ALIGNMENT (tmode);
1034 else
1035 {
1036 enum machine_mode tmode, xmode;
1037
1038 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1039 tmode != VOIDmode;
1040 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1041 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1042 || SLOW_UNALIGNED_ACCESS (tmode, align))
1043 break;
1044
1045 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1046 }
bbf6f052
RK
1047
1048 while (max_size > 1)
1049 {
f64d6991 1050 enum machine_mode mode = VOIDmode;
bbf6f052
RK
1051 enum insn_code icode;
1052
e7c33f54
RK
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1056 mode = tmode;
1057
1058 if (mode == VOIDmode)
1059 break;
1060
1061 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1062 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1063 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1064
1065 max_size = GET_MODE_SIZE (mode);
1066 }
1067
5b0264cb 1068 gcc_assert (!l);
bbf6f052
RK
1069 return n_insns;
1070}
1071
1072/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1073 with move instructions for mode MODE. GENFUN is the gen_... function
1074 to make a move insn for that mode. DATA has all the other info. */
1075
1076static void
502b8322
AJ
1077move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1078 struct move_by_pieces *data)
bbf6f052 1079{
3bdf5ad1 1080 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1081 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1082
1083 while (data->len >= size)
1084 {
3bdf5ad1
RK
1085 if (data->reverse)
1086 data->offset -= size;
1087
566aa174 1088 if (data->to)
3bdf5ad1 1089 {
566aa174 1090 if (data->autinc_to)
630036c6
JJ
1091 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1092 data->offset);
566aa174 1093 else
f4ef873c 1094 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1095 }
3bdf5ad1
RK
1096
1097 if (data->autinc_from)
630036c6
JJ
1098 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1099 data->offset);
3bdf5ad1 1100 else
f4ef873c 1101 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1102
940da324 1103 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1104 emit_insn (gen_add2_insn (data->to_addr,
1105 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1106 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1107 emit_insn (gen_add2_insn (data->from_addr,
1108 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1109
566aa174
JH
1110 if (data->to)
1111 emit_insn ((*genfun) (to1, from1));
1112 else
21d93687
RK
1113 {
1114#ifdef PUSH_ROUNDING
1115 emit_single_push_insn (mode, from1, NULL);
1116#else
5b0264cb 1117 gcc_unreachable ();
21d93687
RK
1118#endif
1119 }
3bdf5ad1 1120
940da324 1121 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1123 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1125
3bdf5ad1
RK
1126 if (! data->reverse)
1127 data->offset += size;
bbf6f052
RK
1128
1129 data->len -= size;
1130 }
1131}
1132\f
4ca79136
RH
1133/* Emit code to move a block Y to a block X. This may be done with
1134 string-move instructions, with multiple scalar move instructions,
1135 or with a library call.
bbf6f052 1136
4ca79136 1137 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1138 SIZE is an rtx that says how long they are.
19caa751 1139 ALIGN is the maximum alignment we can assume they have.
44bb111a 1140 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1141
e9a25f70
JL
1142 Return the address of the new block, if memcpy is called and returns it,
1143 0 otherwise. */
1144
1145rtx
502b8322 1146emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1147{
44bb111a 1148 bool may_use_call;
e9a25f70 1149 rtx retval = 0;
44bb111a
RH
1150 unsigned int align;
1151
1152 switch (method)
1153 {
1154 case BLOCK_OP_NORMAL:
8148fe65 1155 case BLOCK_OP_TAILCALL:
44bb111a
RH
1156 may_use_call = true;
1157 break;
1158
1159 case BLOCK_OP_CALL_PARM:
1160 may_use_call = block_move_libcall_safe_for_call_parm ();
1161
1162 /* Make inhibit_defer_pop nonzero around the library call
1163 to force it to pop the arguments right away. */
1164 NO_DEFER_POP;
1165 break;
1166
1167 case BLOCK_OP_NO_LIBCALL:
1168 may_use_call = false;
1169 break;
1170
1171 default:
5b0264cb 1172 gcc_unreachable ();
44bb111a
RH
1173 }
1174
1175 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1176
5b0264cb
NS
1177 gcc_assert (MEM_P (x));
1178 gcc_assert (MEM_P (y));
1179 gcc_assert (size);
bbf6f052 1180
82c82743
RH
1181 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1182 block copy is more efficient for other large modes, e.g. DCmode. */
1183 x = adjust_address (x, BLKmode, 0);
1184 y = adjust_address (y, BLKmode, 0);
1185
cb38fd88
RH
1186 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1187 can be incorrect is coming from __builtin_memcpy. */
1188 if (GET_CODE (size) == CONST_INT)
1189 {
6972c506
JJ
1190 if (INTVAL (size) == 0)
1191 return 0;
1192
cb38fd88
RH
1193 x = shallow_copy_rtx (x);
1194 y = shallow_copy_rtx (y);
1195 set_mem_size (x, size);
1196 set_mem_size (y, size);
1197 }
1198
fbe1758d 1199 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1200 move_by_pieces (x, y, INTVAL (size), align, 0);
70128ad9 1201 else if (emit_block_move_via_movmem (x, y, size, align))
4ca79136 1202 ;
44bb111a 1203 else if (may_use_call)
8148fe65
JJ
1204 retval = emit_block_move_via_libcall (x, y, size,
1205 method == BLOCK_OP_TAILCALL);
44bb111a
RH
1206 else
1207 emit_block_move_via_loop (x, y, size, align);
1208
1209 if (method == BLOCK_OP_CALL_PARM)
1210 OK_DEFER_POP;
266007a7 1211
4ca79136
RH
1212 return retval;
1213}
266007a7 1214
502b8322 1215/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1216 block move libcall will not clobber any parameters which may have
1217 already been placed on the stack. */
1218
1219static bool
502b8322 1220block_move_libcall_safe_for_call_parm (void)
44bb111a 1221{
a357a6d4 1222 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1223 if (PUSH_ARGS)
1224 return true;
44bb111a 1225
450b1728 1226 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1227 an outgoing argument. */
1228#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1229 {
1230 tree fn = emit_block_move_libcall_fn (false);
1231 (void) fn;
1232 if (REG_PARM_STACK_SPACE (fn) != 0)
1233 return false;
1234 }
44bb111a 1235#endif
44bb111a 1236
a357a6d4
GK
1237 /* If any argument goes in memory, then it might clobber an outgoing
1238 argument. */
1239 {
1240 CUMULATIVE_ARGS args_so_far;
1241 tree fn, arg;
450b1728 1242
a357a6d4 1243 fn = emit_block_move_libcall_fn (false);
0f6937fe 1244 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1245
a357a6d4
GK
1246 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1247 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1248 {
1249 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1250 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1251 if (!tmp || !REG_P (tmp))
44bb111a 1252 return false;
78a52f11 1253 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
a357a6d4 1254 return false;
a357a6d4
GK
1255 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1256 }
1257 }
1258 return true;
44bb111a
RH
1259}
1260
70128ad9 1261/* A subroutine of emit_block_move. Expand a movmem pattern;
4ca79136 1262 return true if successful. */
3ef1eef4 1263
4ca79136 1264static bool
70128ad9 1265emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1266{
4ca79136 1267 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1268 int save_volatile_ok = volatile_ok;
4ca79136 1269 enum machine_mode mode;
266007a7 1270
4ca79136
RH
1271 /* Since this is a move insn, we don't care about volatility. */
1272 volatile_ok = 1;
1273
ee960939
OH
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1277
4ca79136
RH
1278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279 mode = GET_MODE_WIDER_MODE (mode))
1280 {
70128ad9 1281 enum insn_code code = movmem_optab[(int) mode];
4ca79136
RH
1282 insn_operand_predicate_fn pred;
1283
1284 if (code != CODE_FOR_nothing
1285 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1286 here because if SIZE is less than the mode mask, as it is
1287 returned by the macro, it will definitely be less than the
1288 actual mode mask. */
1289 && ((GET_CODE (size) == CONST_INT
1290 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291 <= (GET_MODE_MASK (mode) >> 1)))
1292 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1293 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1294 || (*pred) (x, BLKmode))
1295 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1296 || (*pred) (y, BLKmode))
1297 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1298 || (*pred) (opalign, VOIDmode)))
1299 {
1300 rtx op2;
1301 rtx last = get_last_insn ();
1302 rtx pat;
1303
1304 op2 = convert_to_mode (mode, size, 1);
1305 pred = insn_data[(int) code].operand[2].predicate;
1306 if (pred != 0 && ! (*pred) (op2, mode))
1307 op2 = copy_to_mode_reg (mode, op2);
1308
1309 /* ??? When called via emit_block_move_for_call, it'd be
1310 nice if there were some way to inform the backend, so
1311 that it doesn't fail the expansion because it thinks
1312 emitting the libcall would be more efficient. */
1313
1314 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1315 if (pat)
1316 {
1317 emit_insn (pat);
a5e9c810 1318 volatile_ok = save_volatile_ok;
4ca79136 1319 return true;
bbf6f052 1320 }
4ca79136
RH
1321 else
1322 delete_insns_since (last);
bbf6f052 1323 }
4ca79136 1324 }
bbf6f052 1325
a5e9c810 1326 volatile_ok = save_volatile_ok;
4ca79136
RH
1327 return false;
1328}
3ef1eef4 1329
8f99553f 1330/* A subroutine of emit_block_move. Expand a call to memcpy.
4ca79136 1331 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1332
4ca79136 1333static rtx
8148fe65 1334emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
4ca79136 1335{
ee960939 1336 rtx dst_addr, src_addr;
4ca79136
RH
1337 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1338 enum machine_mode size_mode;
1339 rtx retval;
4bc973ae 1340
ad76cef8
PB
1341 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1342 pseudos. We can then place those new pseudos into a VAR_DECL and
1343 use them later. */
ee960939
OH
1344
1345 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1346 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1347
ee960939
OH
1348 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1349 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1350
1351 dst_tree = make_tree (ptr_type_node, dst_addr);
1352 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136 1353
8f99553f 1354 size_mode = TYPE_MODE (sizetype);
ee960939 1355
4ca79136
RH
1356 size = convert_to_mode (size_mode, size, 1);
1357 size = copy_to_mode_reg (size_mode, size);
1358
1359 /* It is incorrect to use the libcall calling conventions to call
1360 memcpy in this context. This could be a user call to memcpy and
1361 the user may wish to examine the return value from memcpy. For
1362 targets where libcalls and normal calls have different conventions
8f99553f 1363 for returning pointers, we could end up generating incorrect code. */
4ca79136 1364
8f99553f 1365 size_tree = make_tree (sizetype, size);
4ca79136
RH
1366
1367 fn = emit_block_move_libcall_fn (true);
1368 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f
JM
1369 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1370 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
4ca79136
RH
1371
1372 /* Now we have to build up the CALL_EXPR itself. */
1373 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
1374 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1375 call_expr, arg_list, NULL_TREE);
8148fe65 1376 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136
RH
1377
1378 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1379
8f99553f 1380 return retval;
4ca79136 1381}
52cf7115 1382
4ca79136
RH
1383/* A subroutine of emit_block_move_via_libcall. Create the tree node
1384 for the function we use for block copies. The first time FOR_CALL
1385 is true, we call assemble_external. */
52cf7115 1386
4ca79136
RH
1387static GTY(()) tree block_move_fn;
1388
9661b15f 1389void
502b8322 1390init_block_move_fn (const char *asmspec)
4ca79136 1391{
9661b15f 1392 if (!block_move_fn)
4ca79136 1393 {
8fd3cf4e 1394 tree args, fn;
9661b15f 1395
8f99553f
JM
1396 fn = get_identifier ("memcpy");
1397 args = build_function_type_list (ptr_type_node, ptr_type_node,
1398 const_ptr_type_node, sizetype,
1399 NULL_TREE);
52cf7115 1400
4ca79136
RH
1401 fn = build_decl (FUNCTION_DECL, fn, args);
1402 DECL_EXTERNAL (fn) = 1;
1403 TREE_PUBLIC (fn) = 1;
1404 DECL_ARTIFICIAL (fn) = 1;
1405 TREE_NOTHROW (fn) = 1;
66c60e67 1406
4ca79136 1407 block_move_fn = fn;
bbf6f052 1408 }
e9a25f70 1409
9661b15f 1410 if (asmspec)
0e6df31e 1411 set_user_assembler_name (block_move_fn, asmspec);
9661b15f
JJ
1412}
1413
1414static tree
502b8322 1415emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1416{
1417 static bool emitted_extern;
1418
1419 if (!block_move_fn)
1420 init_block_move_fn (NULL);
1421
4ca79136
RH
1422 if (for_call && !emitted_extern)
1423 {
1424 emitted_extern = true;
0e6df31e 1425 make_decl_rtl (block_move_fn);
9661b15f 1426 assemble_external (block_move_fn);
4ca79136
RH
1427 }
1428
9661b15f 1429 return block_move_fn;
bbf6f052 1430}
44bb111a
RH
1431
1432/* A subroutine of emit_block_move. Copy the data via an explicit
1433 loop. This is used only when libcalls are forbidden. */
1434/* ??? It'd be nice to copy in hunks larger than QImode. */
1435
1436static void
502b8322
AJ
1437emit_block_move_via_loop (rtx x, rtx y, rtx size,
1438 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1439{
1440 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1441 enum machine_mode iter_mode;
1442
1443 iter_mode = GET_MODE (size);
1444 if (iter_mode == VOIDmode)
1445 iter_mode = word_mode;
1446
1447 top_label = gen_label_rtx ();
1448 cmp_label = gen_label_rtx ();
1449 iter = gen_reg_rtx (iter_mode);
1450
1451 emit_move_insn (iter, const0_rtx);
1452
1453 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455 do_pending_stack_adjust ();
1456
44bb111a
RH
1457 emit_jump (cmp_label);
1458 emit_label (top_label);
1459
1460 tmp = convert_modes (Pmode, iter_mode, iter, true);
1461 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1462 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1463 x = change_address (x, QImode, x_addr);
1464 y = change_address (y, QImode, y_addr);
1465
1466 emit_move_insn (x, y);
1467
1468 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1469 true, OPTAB_LIB_WIDEN);
1470 if (tmp != iter)
1471 emit_move_insn (iter, tmp);
1472
44bb111a
RH
1473 emit_label (cmp_label);
1474
1475 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1476 true, top_label);
44bb111a 1477}
bbf6f052
RK
1478\f
1479/* Copy all or part of a value X into registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1481
1482void
502b8322 1483move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1484{
1485 int i;
381127e8 1486#ifdef HAVE_load_multiple
3a94c984 1487 rtx pat;
381127e8
RL
1488 rtx last;
1489#endif
bbf6f052 1490
72bb9717
RK
1491 if (nregs == 0)
1492 return;
1493
bbf6f052
RK
1494 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1495 x = validize_mem (force_const_mem (mode, x));
1496
1497 /* See if the machine can do this with a load multiple insn. */
1498#ifdef HAVE_load_multiple
c3a02afe 1499 if (HAVE_load_multiple)
bbf6f052 1500 {
c3a02afe 1501 last = get_last_insn ();
38a448ca 1502 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1503 GEN_INT (nregs));
1504 if (pat)
1505 {
1506 emit_insn (pat);
1507 return;
1508 }
1509 else
1510 delete_insns_since (last);
bbf6f052 1511 }
bbf6f052
RK
1512#endif
1513
1514 for (i = 0; i < nregs; i++)
38a448ca 1515 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1516 operand_subword_force (x, i, mode));
1517}
1518
1519/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1520 The number of registers to be filled is NREGS. */
0040593d 1521
bbf6f052 1522void
502b8322 1523move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1524{
1525 int i;
bbf6f052 1526
2954d7db
RK
1527 if (nregs == 0)
1528 return;
1529
bbf6f052
RK
1530 /* See if the machine can do this with a store multiple insn. */
1531#ifdef HAVE_store_multiple
c3a02afe 1532 if (HAVE_store_multiple)
bbf6f052 1533 {
c6b97fac
AM
1534 rtx last = get_last_insn ();
1535 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1536 GEN_INT (nregs));
c3a02afe
RK
1537 if (pat)
1538 {
1539 emit_insn (pat);
1540 return;
1541 }
1542 else
1543 delete_insns_since (last);
bbf6f052 1544 }
bbf6f052
RK
1545#endif
1546
1547 for (i = 0; i < nregs; i++)
1548 {
1549 rtx tem = operand_subword (x, i, 1, BLKmode);
1550
5b0264cb 1551 gcc_assert (tem);
bbf6f052 1552
38a448ca 1553 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1554 }
1555}
1556
084a1106
JDA
1557/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1558 ORIG, where ORIG is a non-consecutive group of registers represented by
1559 a PARALLEL. The clone is identical to the original except in that the
1560 original set of registers is replaced by a new set of pseudo registers.
1561 The new set has the same modes as the original set. */
1562
1563rtx
502b8322 1564gen_group_rtx (rtx orig)
084a1106
JDA
1565{
1566 int i, length;
1567 rtx *tmps;
1568
5b0264cb 1569 gcc_assert (GET_CODE (orig) == PARALLEL);
084a1106
JDA
1570
1571 length = XVECLEN (orig, 0);
703ad42b 1572 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1573
1574 /* Skip a NULL entry in first slot. */
1575 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1576
1577 if (i)
1578 tmps[0] = 0;
1579
1580 for (; i < length; i++)
1581 {
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1583 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1584
1585 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1586 }
1587
1588 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1589}
1590
27e29549
RH
1591/* A subroutine of emit_group_load. Arguments as for emit_group_load,
1592 except that values are placed in TMPS[i], and must later be moved
daa956d0 1593 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
fffa9c1d 1594
27e29549
RH
1595static void
1596emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
fffa9c1d 1597{
27e29549 1598 rtx src;
aac5cc16 1599 int start, i;
7ef7000b 1600 enum machine_mode m = GET_MODE (orig_src);
fffa9c1d 1601
5b0264cb 1602 gcc_assert (GET_CODE (dst) == PARALLEL);
fffa9c1d 1603
f2978871
AM
1604 if (m != VOIDmode
1605 && !SCALAR_INT_MODE_P (m)
1606 && !MEM_P (orig_src)
1607 && GET_CODE (orig_src) != CONCAT)
782fa603
AH
1608 {
1609 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1610 if (imode == BLKmode)
1611 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1612 else
1613 src = gen_reg_rtx (imode);
1614 if (imode != BLKmode)
1615 src = gen_lowpart (GET_MODE (orig_src), src);
1616 emit_move_insn (src, orig_src);
1617 /* ...and back again. */
1618 if (imode != BLKmode)
1619 src = gen_lowpart (imode, src);
27e29549 1620 emit_group_load_1 (tmps, dst, src, type, ssize);
782fa603
AH
1621 return;
1622 }
1623
fffa9c1d
JW
1624 /* Check for a NULL entry, used to indicate that the parameter goes
1625 both on the stack and in registers. */
aac5cc16
RH
1626 if (XEXP (XVECEXP (dst, 0, 0), 0))
1627 start = 0;
fffa9c1d 1628 else
aac5cc16
RH
1629 start = 1;
1630
aac5cc16
RH
1631 /* Process the pieces. */
1632 for (i = start; i < XVECLEN (dst, 0); i++)
1633 {
1634 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1635 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1636 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1637 int shift = 0;
1638
1639 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1640 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1641 {
6e985040
AM
1642 /* Arrange to shift the fragment to where it belongs.
1643 extract_bit_field loads to the lsb of the reg. */
1644 if (
1645#ifdef BLOCK_REG_PADDING
1646 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1647 == (BYTES_BIG_ENDIAN ? upward : downward)
1648#else
1649 BYTES_BIG_ENDIAN
1650#endif
1651 )
1652 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16 1653 bytelen = ssize - bytepos;
5b0264cb 1654 gcc_assert (bytelen > 0);
aac5cc16
RH
1655 }
1656
f3ce87a9
DE
1657 /* If we won't be loading directly from memory, protect the real source
1658 from strange tricks we might play; but make sure that the source can
1659 be loaded directly into the destination. */
1660 src = orig_src;
3c0cb5de 1661 if (!MEM_P (orig_src)
f3ce87a9
DE
1662 && (!CONSTANT_P (orig_src)
1663 || (GET_MODE (orig_src) != mode
1664 && GET_MODE (orig_src) != VOIDmode)))
1665 {
1666 if (GET_MODE (orig_src) == VOIDmode)
1667 src = gen_reg_rtx (mode);
1668 else
1669 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1670
f3ce87a9
DE
1671 emit_move_insn (src, orig_src);
1672 }
1673
aac5cc16 1674 /* Optimize the access just a bit. */
3c0cb5de 1675 if (MEM_P (src)
6e985040
AM
1676 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1677 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1678 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1679 && bytelen == GET_MODE_SIZE (mode))
1680 {
1681 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1682 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1683 }
d20b1190
EB
1684 else if (COMPLEX_MODE_P (mode)
1685 && GET_MODE (src) == mode
1686 && bytelen == GET_MODE_SIZE (mode))
1687 /* Let emit_move_complex do the bulk of the work. */
1688 tmps[i] = src;
7c4a6db0
JW
1689 else if (GET_CODE (src) == CONCAT)
1690 {
015b1ad1
JDA
1691 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1692 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1693
1694 if ((bytepos == 0 && bytelen == slen0)
1695 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1696 {
015b1ad1
JDA
1697 /* The following assumes that the concatenated objects all
1698 have the same size. In this case, a simple calculation
1699 can be used to determine the object and the bit field
1700 to be extracted. */
1701 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744 1702 if (! CONSTANT_P (tmps[i])
f8cfc6aa 1703 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
cbb92744 1704 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1 1705 (bytepos % slen0) * BITS_PER_UNIT,
b3520980 1706 1, NULL_RTX, mode, mode);
cbb92744 1707 }
5b0264cb 1708 else
58f69841 1709 {
5b0264cb 1710 rtx mem;
f58c00e3 1711
5b0264cb
NS
1712 gcc_assert (!bytepos);
1713 mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1714 emit_move_insn (mem, src);
f58c00e3
EB
1715 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1716 0, 1, NULL_RTX, mode, mode);
58f69841 1717 }
7c4a6db0 1718 }
9c0631a7
AH
1719 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1720 SIMD register, which is currently broken. While we get GCC
1721 to emit proper RTL for these cases, let's dump to memory. */
1722 else if (VECTOR_MODE_P (GET_MODE (dst))
f8cfc6aa 1723 && REG_P (src))
9c0631a7
AH
1724 {
1725 int slen = GET_MODE_SIZE (GET_MODE (src));
1726 rtx mem;
1727
1728 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1729 emit_move_insn (mem, src);
1730 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1731 }
d3a16cbd
FJ
1732 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1733 && XVECLEN (dst, 0) > 1)
1734 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1735 else if (CONSTANT_P (src)
f8cfc6aa 1736 || (REG_P (src) && GET_MODE (src) == mode))
2ee5437b 1737 tmps[i] = src;
fffa9c1d 1738 else
19caa751
RK
1739 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1740 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
b3520980 1741 mode, mode);
fffa9c1d 1742
6e985040 1743 if (shift)
09b52670 1744 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
7d60be94 1745 build_int_cst (NULL_TREE, shift), tmps[i], 0);
fffa9c1d 1746 }
27e29549
RH
1747}
1748
1749/* Emit code to move a block SRC of type TYPE to a block DST,
1750 where DST is non-consecutive registers represented by a PARALLEL.
1751 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1752 if not known. */
1753
1754void
1755emit_group_load (rtx dst, rtx src, tree type, int ssize)
1756{
1757 rtx *tmps;
1758 int i;
1759
1760 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1761 emit_group_load_1 (tmps, dst, src, type, ssize);
19caa751 1762
aac5cc16 1763 /* Copy the extracted pieces into the proper (probable) hard regs. */
27e29549
RH
1764 for (i = 0; i < XVECLEN (dst, 0); i++)
1765 {
1766 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1767 if (d == NULL)
1768 continue;
1769 emit_move_insn (d, tmps[i]);
1770 }
1771}
1772
1773/* Similar, but load SRC into new pseudos in a format that looks like
1774 PARALLEL. This can later be fed to emit_group_move to get things
1775 in the right place. */
1776
1777rtx
1778emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1779{
1780 rtvec vec;
1781 int i;
1782
1783 vec = rtvec_alloc (XVECLEN (parallel, 0));
1784 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1785
1786 /* Convert the vector to look just like the original PARALLEL, except
1787 with the computed values. */
1788 for (i = 0; i < XVECLEN (parallel, 0); i++)
1789 {
1790 rtx e = XVECEXP (parallel, 0, i);
1791 rtx d = XEXP (e, 0);
1792
1793 if (d)
1794 {
1795 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1796 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1797 }
1798 RTVEC_ELT (vec, i) = e;
1799 }
1800
1801 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
fffa9c1d
JW
1802}
1803
084a1106
JDA
1804/* Emit code to move a block SRC to block DST, where SRC and DST are
1805 non-consecutive groups of registers, each represented by a PARALLEL. */
1806
1807void
502b8322 1808emit_group_move (rtx dst, rtx src)
084a1106
JDA
1809{
1810 int i;
1811
5b0264cb
NS
1812 gcc_assert (GET_CODE (src) == PARALLEL
1813 && GET_CODE (dst) == PARALLEL
1814 && XVECLEN (src, 0) == XVECLEN (dst, 0));
084a1106
JDA
1815
1816 /* Skip first entry if NULL. */
1817 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1818 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1819 XEXP (XVECEXP (src, 0, i), 0));
1820}
1821
27e29549
RH
1822/* Move a group of registers represented by a PARALLEL into pseudos. */
1823
1824rtx
1825emit_group_move_into_temps (rtx src)
1826{
1827 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1828 int i;
1829
1830 for (i = 0; i < XVECLEN (src, 0); i++)
1831 {
1832 rtx e = XVECEXP (src, 0, i);
1833 rtx d = XEXP (e, 0);
1834
1835 if (d)
1836 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1837 RTVEC_ELT (vec, i) = e;
1838 }
1839
1840 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1841}
1842
6e985040
AM
1843/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1844 where SRC is non-consecutive registers represented by a PARALLEL.
1845 SSIZE represents the total size of block ORIG_DST, or -1 if not
1846 known. */
fffa9c1d
JW
1847
1848void
6e985040 1849emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1850{
aac5cc16
RH
1851 rtx *tmps, dst;
1852 int start, i;
7ef7000b 1853 enum machine_mode m = GET_MODE (orig_dst);
fffa9c1d 1854
5b0264cb 1855 gcc_assert (GET_CODE (src) == PARALLEL);
fffa9c1d 1856
0da34ce4
RH
1857 if (!SCALAR_INT_MODE_P (m)
1858 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
782fa603
AH
1859 {
1860 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1861 if (imode == BLKmode)
1862 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1863 else
1864 dst = gen_reg_rtx (imode);
1865 emit_group_store (dst, src, type, ssize);
1866 if (imode != BLKmode)
1867 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1868 emit_move_insn (orig_dst, dst);
1869 return;
1870 }
1871
fffa9c1d
JW
1872 /* Check for a NULL entry, used to indicate that the parameter goes
1873 both on the stack and in registers. */
aac5cc16
RH
1874 if (XEXP (XVECEXP (src, 0, 0), 0))
1875 start = 0;
fffa9c1d 1876 else
aac5cc16
RH
1877 start = 1;
1878
703ad42b 1879 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 1880
aac5cc16
RH
1881 /* Copy the (probable) hard regs into pseudos. */
1882 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1883 {
aac5cc16
RH
1884 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1885 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1886 emit_move_insn (tmps[i], reg);
1887 }
fffa9c1d 1888
aac5cc16
RH
1889 /* If we won't be storing directly into memory, protect the real destination
1890 from strange tricks we might play. */
1891 dst = orig_dst;
10a9f2be
JW
1892 if (GET_CODE (dst) == PARALLEL)
1893 {
1894 rtx temp;
1895
1896 /* We can get a PARALLEL dst if there is a conditional expression in
1897 a return statement. In that case, the dst and src are the same,
1898 so no action is necessary. */
1899 if (rtx_equal_p (dst, src))
1900 return;
1901
1902 /* It is unclear if we can ever reach here, but we may as well handle
1903 it. Allocate a temporary, and split this into a store/load to/from
1904 the temporary. */
1905
1906 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
1907 emit_group_store (temp, src, type, ssize);
1908 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
1909 return;
1910 }
3c0cb5de 1911 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
aac5cc16
RH
1912 {
1913 dst = gen_reg_rtx (GET_MODE (orig_dst));
1914 /* Make life a bit easier for combine. */
8ae91fc0 1915 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 1916 }
aac5cc16
RH
1917
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (src, 0); i++)
1920 {
770ae6cc 1921 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 1922 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 1923 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 1924 rtx dest = dst;
aac5cc16
RH
1925
1926 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1927 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 1928 {
6e985040
AM
1929 /* store_bit_field always takes its value from the lsb.
1930 Move the fragment to the lsb if it's not already there. */
1931 if (
1932#ifdef BLOCK_REG_PADDING
1933 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1934 == (BYTES_BIG_ENDIAN ? upward : downward)
1935#else
1936 BYTES_BIG_ENDIAN
1937#endif
1938 )
aac5cc16
RH
1939 {
1940 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
09b52670 1941 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
7d60be94
NS
1942 build_int_cst (NULL_TREE, shift),
1943 tmps[i], 0);
aac5cc16
RH
1944 }
1945 bytelen = ssize - bytepos;
71bc0330 1946 }
fffa9c1d 1947
6ddae612
JJ
1948 if (GET_CODE (dst) == CONCAT)
1949 {
1950 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1951 dest = XEXP (dst, 0);
1952 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1953 {
1954 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1955 dest = XEXP (dst, 1);
1956 }
5b0264cb 1957 else
0d446150 1958 {
5b0264cb 1959 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
0d446150
JH
1960 dest = assign_stack_temp (GET_MODE (dest),
1961 GET_MODE_SIZE (GET_MODE (dest)), 0);
1962 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1963 tmps[i]);
1964 dst = dest;
1965 break;
1966 }
6ddae612
JJ
1967 }
1968
aac5cc16 1969 /* Optimize the access just a bit. */
3c0cb5de 1970 if (MEM_P (dest)
6e985040
AM
1971 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1972 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 1973 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 1974 && bytelen == GET_MODE_SIZE (mode))
6ddae612 1975 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 1976 else
6ddae612 1977 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
b3520980 1978 mode, tmps[i]);
fffa9c1d 1979 }
729a2125 1980
aac5cc16 1981 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 1982 if (orig_dst != dst)
aac5cc16 1983 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
1984}
1985
c36fce9a
GRK
1986/* Generate code to copy a BLKmode object of TYPE out of a
1987 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1988 is null, a stack temporary is created. TGTBLK is returned.
1989
c988af2b
RS
1990 The purpose of this routine is to handle functions that return
1991 BLKmode structures in registers. Some machines (the PA for example)
1992 want to return all small structures in registers regardless of the
1993 structure's alignment. */
c36fce9a
GRK
1994
1995rtx
502b8322 1996copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 1997{
19caa751
RK
1998 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1999 rtx src = NULL, dst = NULL;
2000 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2001 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2002
2003 if (tgtblk == 0)
2004 {
1da68f56
RK
2005 tgtblk = assign_temp (build_qualified_type (type,
2006 (TYPE_QUALS (type)
2007 | TYPE_QUAL_CONST)),
2008 0, 1, 1);
19caa751
RK
2009 preserve_temp_slots (tgtblk);
2010 }
3a94c984 2011
1ed1b4fb 2012 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2013 into a new pseudo which is a full word. */
0d7839da 2014
19caa751
RK
2015 if (GET_MODE (srcreg) != BLKmode
2016 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 2017 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 2018
c988af2b
RS
2019 /* If the structure doesn't take up a whole number of words, see whether
2020 SRCREG is padded on the left or on the right. If it's on the left,
2021 set PADDING_CORRECTION to the number of bits to skip.
2022
2023 In most ABIs, the structure will be returned at the least end of
2024 the register, which translates to right padding on little-endian
2025 targets and left padding on big-endian targets. The opposite
2026 holds if the structure is returned at the most significant
2027 end of the register. */
2028 if (bytes % UNITS_PER_WORD != 0
2029 && (targetm.calls.return_in_msb (type)
2030 ? !BYTES_BIG_ENDIAN
2031 : BYTES_BIG_ENDIAN))
2032 padding_correction
19caa751
RK
2033 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2034
2035 /* Copy the structure BITSIZE bites at a time.
3a94c984 2036
19caa751
RK
2037 We could probably emit more efficient code for machines which do not use
2038 strict alignment, but it doesn't seem worth the effort at the current
2039 time. */
c988af2b 2040 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2041 bitpos < bytes * BITS_PER_UNIT;
2042 bitpos += bitsize, xbitpos += bitsize)
2043 {
3a94c984 2044 /* We need a new source operand each time xbitpos is on a
c988af2b 2045 word boundary and when xbitpos == padding_correction
19caa751
RK
2046 (the first time through). */
2047 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2048 || xbitpos == padding_correction)
b47f8cfc
JH
2049 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2050 GET_MODE (srcreg));
19caa751
RK
2051
2052 /* We need a new destination operand each time bitpos is on
2053 a word boundary. */
2054 if (bitpos % BITS_PER_WORD == 0)
2055 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2056
19caa751
RK
2057 /* Use xbitpos for the source extraction (right justified) and
2058 xbitpos for the destination store (left justified). */
2059 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2060 extract_bit_field (src, bitsize,
2061 xbitpos % BITS_PER_WORD, 1,
b3520980 2062 NULL_RTX, word_mode, word_mode));
19caa751
RK
2063 }
2064
2065 return tgtblk;
c36fce9a
GRK
2066}
2067
94b25f81
RK
2068/* Add a USE expression for REG to the (possibly empty) list pointed
2069 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2070
2071void
502b8322 2072use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2073{
5b0264cb
NS
2074 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2075
b3f8cf4a 2076 *call_fusage
38a448ca
RH
2077 = gen_rtx_EXPR_LIST (VOIDmode,
2078 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2079}
2080
94b25f81
RK
2081/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2082 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2083
2084void
502b8322 2085use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2086{
0304dfbb 2087 int i;
bbf6f052 2088
5b0264cb 2089 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
0304dfbb
DE
2090
2091 for (i = 0; i < nregs; i++)
e50126e8 2092 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2093}
fffa9c1d
JW
2094
2095/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2096 PARALLEL REGS. This is for calls that pass values in multiple
2097 non-contiguous locations. The Irix 6 ABI has examples of this. */
2098
2099void
502b8322 2100use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2101{
2102 int i;
2103
6bd35f86
DE
2104 for (i = 0; i < XVECLEN (regs, 0); i++)
2105 {
2106 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2107
6bd35f86
DE
2108 /* A NULL entry means the parameter goes both on the stack and in
2109 registers. This can also be a MEM for targets that pass values
2110 partially on the stack and partially in registers. */
f8cfc6aa 2111 if (reg != 0 && REG_P (reg))
6bd35f86
DE
2112 use_reg (call_fusage, reg);
2113 }
fffa9c1d 2114}
bbf6f052 2115\f
57814e5e 2116
cf5124f6
RS
2117/* Determine whether the LEN bytes generated by CONSTFUN can be
2118 stored to memory using several move instructions. CONSTFUNDATA is
2119 a pointer which will be passed as argument in every CONSTFUN call.
2120 ALIGN is maximum alignment we can assume. Return nonzero if a
2121 call to store_by_pieces should succeed. */
2122
57814e5e 2123int
502b8322
AJ
2124can_store_by_pieces (unsigned HOST_WIDE_INT len,
2125 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2126 void *constfundata, unsigned int align)
57814e5e 2127{
45d78e7f
JJ
2128 unsigned HOST_WIDE_INT l;
2129 unsigned int max_size;
57814e5e
JJ
2130 HOST_WIDE_INT offset = 0;
2131 enum machine_mode mode, tmode;
2132 enum insn_code icode;
2133 int reverse;
2134 rtx cst;
2135
2c430630
RS
2136 if (len == 0)
2137 return 1;
2138
4977bab6 2139 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2140 return 0;
2141
f64d6991
DE
2142 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2143 if (align >= GET_MODE_ALIGNMENT (tmode))
2144 align = GET_MODE_ALIGNMENT (tmode);
2145 else
2146 {
2147 enum machine_mode xmode;
2148
2149 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2150 tmode != VOIDmode;
2151 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2152 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2153 || SLOW_UNALIGNED_ACCESS (tmode, align))
2154 break;
2155
2156 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2157 }
57814e5e
JJ
2158
2159 /* We would first store what we can in the largest integer mode, then go to
2160 successively smaller modes. */
2161
2162 for (reverse = 0;
2163 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2164 reverse++)
2165 {
2166 l = len;
2167 mode = VOIDmode;
cf5124f6 2168 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2169 while (max_size > 1)
2170 {
2171 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2172 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2173 if (GET_MODE_SIZE (tmode) < max_size)
2174 mode = tmode;
2175
2176 if (mode == VOIDmode)
2177 break;
2178
2179 icode = mov_optab->handlers[(int) mode].insn_code;
2180 if (icode != CODE_FOR_nothing
2181 && align >= GET_MODE_ALIGNMENT (mode))
2182 {
2183 unsigned int size = GET_MODE_SIZE (mode);
2184
2185 while (l >= size)
2186 {
2187 if (reverse)
2188 offset -= size;
2189
2190 cst = (*constfun) (constfundata, offset, mode);
2191 if (!LEGITIMATE_CONSTANT_P (cst))
2192 return 0;
2193
2194 if (!reverse)
2195 offset += size;
2196
2197 l -= size;
2198 }
2199 }
2200
2201 max_size = GET_MODE_SIZE (mode);
2202 }
2203
2204 /* The code above should have handled everything. */
5b0264cb 2205 gcc_assert (!l);
57814e5e
JJ
2206 }
2207
2208 return 1;
2209}
2210
2211/* Generate several move instructions to store LEN bytes generated by
2212 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2213 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2214 ALIGN is maximum alignment we can assume.
2215 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2216 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2217 stpcpy. */
57814e5e 2218
8fd3cf4e 2219rtx
502b8322
AJ
2220store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2221 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2222 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2223{
2224 struct store_by_pieces data;
2225
2c430630
RS
2226 if (len == 0)
2227 {
5b0264cb 2228 gcc_assert (endp != 2);
2c430630
RS
2229 return to;
2230 }
2231
5b0264cb 2232 gcc_assert (STORE_BY_PIECES_P (len, align));
57814e5e
JJ
2233 data.constfun = constfun;
2234 data.constfundata = constfundata;
2235 data.len = len;
2236 data.to = to;
2237 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2238 if (endp)
2239 {
2240 rtx to1;
2241
5b0264cb 2242 gcc_assert (!data.reverse);
8fd3cf4e
JJ
2243 if (data.autinc_to)
2244 {
2245 if (endp == 2)
2246 {
2247 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2248 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2249 else
2250 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2251 -1));
2252 }
2253 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2254 data.offset);
2255 }
2256 else
2257 {
2258 if (endp == 2)
2259 --data.offset;
2260 to1 = adjust_address (data.to, QImode, data.offset);
2261 }
2262 return to1;
2263 }
2264 else
2265 return data.to;
57814e5e
JJ
2266}
2267
19caa751 2268/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
ad76cef8 2269 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
9de08200
RK
2270
2271static void
342e2b74 2272clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2273{
57814e5e
JJ
2274 struct store_by_pieces data;
2275
2c430630
RS
2276 if (len == 0)
2277 return;
2278
57814e5e 2279 data.constfun = clear_by_pieces_1;
df4ae160 2280 data.constfundata = NULL;
57814e5e
JJ
2281 data.len = len;
2282 data.to = to;
2283 store_by_pieces_1 (&data, align);
2284}
2285
2286/* Callback routine for clear_by_pieces.
2287 Return const0_rtx unconditionally. */
2288
2289static rtx
502b8322
AJ
2290clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2291 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2292 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2293{
2294 return const0_rtx;
2295}
2296
2297/* Subroutine of clear_by_pieces and store_by_pieces.
2298 Generate several move instructions to store LEN bytes of block TO. (A MEM
ad76cef8 2299 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
57814e5e
JJ
2300
2301static void
502b8322
AJ
2302store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2303 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2304{
2305 rtx to_addr = XEXP (data->to, 0);
45d78e7f 2306 unsigned int max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2307 enum machine_mode mode = VOIDmode, tmode;
2308 enum insn_code icode;
9de08200 2309
57814e5e
JJ
2310 data->offset = 0;
2311 data->to_addr = to_addr;
2312 data->autinc_to
9de08200
RK
2313 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2314 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2315
57814e5e
JJ
2316 data->explicit_inc_to = 0;
2317 data->reverse
9de08200 2318 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2319 if (data->reverse)
2320 data->offset = data->len;
9de08200 2321
57814e5e 2322 /* If storing requires more than two move insns,
9de08200
RK
2323 copy addresses to registers (to make displacements shorter)
2324 and use post-increment if available. */
57814e5e 2325 if (!data->autinc_to
45d78e7f 2326 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
9de08200 2327 {
3a94c984 2328 /* Determine the main mode we'll be using. */
fbe1758d
AM
2329 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2330 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2331 if (GET_MODE_SIZE (tmode) < max_size)
2332 mode = tmode;
2333
57814e5e 2334 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2335 {
57814e5e
JJ
2336 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2337 data->autinc_to = 1;
2338 data->explicit_inc_to = -1;
9de08200 2339 }
3bdf5ad1 2340
57814e5e
JJ
2341 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2342 && ! data->autinc_to)
9de08200 2343 {
57814e5e
JJ
2344 data->to_addr = copy_addr_to_reg (to_addr);
2345 data->autinc_to = 1;
2346 data->explicit_inc_to = 1;
9de08200 2347 }
3bdf5ad1 2348
57814e5e
JJ
2349 if ( !data->autinc_to && CONSTANT_P (to_addr))
2350 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2351 }
2352
f64d6991
DE
2353 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2354 if (align >= GET_MODE_ALIGNMENT (tmode))
2355 align = GET_MODE_ALIGNMENT (tmode);
2356 else
2357 {
2358 enum machine_mode xmode;
2359
2360 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2361 tmode != VOIDmode;
2362 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2363 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2364 || SLOW_UNALIGNED_ACCESS (tmode, align))
2365 break;
2366
2367 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2368 }
9de08200 2369
57814e5e 2370 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2371 successively smaller modes. */
2372
2373 while (max_size > 1)
2374 {
9de08200
RK
2375 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2376 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2377 if (GET_MODE_SIZE (tmode) < max_size)
2378 mode = tmode;
2379
2380 if (mode == VOIDmode)
2381 break;
2382
2383 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2384 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2385 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2386
2387 max_size = GET_MODE_SIZE (mode);
2388 }
2389
2390 /* The code above should have handled everything. */
5b0264cb 2391 gcc_assert (!data->len);
9de08200
RK
2392}
2393
57814e5e 2394/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2395 with move instructions for mode MODE. GENFUN is the gen_... function
2396 to make a move insn for that mode. DATA has all the other info. */
2397
2398static void
502b8322
AJ
2399store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2400 struct store_by_pieces *data)
9de08200 2401{
3bdf5ad1 2402 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2403 rtx to1, cst;
9de08200
RK
2404
2405 while (data->len >= size)
2406 {
3bdf5ad1
RK
2407 if (data->reverse)
2408 data->offset -= size;
9de08200 2409
3bdf5ad1 2410 if (data->autinc_to)
630036c6
JJ
2411 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2412 data->offset);
3a94c984 2413 else
f4ef873c 2414 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2415
940da324 2416 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2417 emit_insn (gen_add2_insn (data->to_addr,
2418 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2419
57814e5e
JJ
2420 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2421 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2422
940da324 2423 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2424 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2425
3bdf5ad1
RK
2426 if (! data->reverse)
2427 data->offset += size;
9de08200
RK
2428
2429 data->len -= size;
2430 }
2431}
2432\f
19caa751 2433/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2434 its length in bytes. */
e9a25f70
JL
2435
2436rtx
8148fe65 2437clear_storage (rtx object, rtx size, enum block_op_methods method)
bbf6f052 2438{
57aaef66
RH
2439 enum machine_mode mode = GET_MODE (object);
2440 unsigned int align;
e9a25f70 2441
8148fe65
JJ
2442 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2443
fcf1b822
RK
2444 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2445 just move a zero. Otherwise, do this a piece at a time. */
57aaef66 2446 if (mode != BLKmode
fcf1b822 2447 && GET_CODE (size) == CONST_INT
57aaef66 2448 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
bbf6f052 2449 {
57aaef66
RH
2450 rtx zero = CONST0_RTX (mode);
2451 if (zero != NULL)
2452 {
2453 emit_move_insn (object, zero);
2454 return NULL;
2455 }
2456
2457 if (COMPLEX_MODE_P (mode))
2458 {
2459 zero = CONST0_RTX (GET_MODE_INNER (mode));
2460 if (zero != NULL)
2461 {
2462 write_complex_part (object, zero, 0);
2463 write_complex_part (object, zero, 1);
2464 return NULL;
2465 }
2466 }
4ca79136
RH
2467 }
2468
57aaef66
RH
2469 if (size == const0_rtx)
2470 return NULL;
2471
2472 align = MEM_ALIGN (object);
2473
2474 if (GET_CODE (size) == CONST_INT
2475 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2476 clear_by_pieces (object, INTVAL (size), align);
57e84f18 2477 else if (set_storage_via_setmem (object, size, const0_rtx, align))
57aaef66
RH
2478 ;
2479 else
8148fe65
JJ
2480 return clear_storage_via_libcall (object, size,
2481 method == BLOCK_OP_TAILCALL);
57aaef66
RH
2482
2483 return NULL;
4ca79136
RH
2484}
2485
8f99553f 2486/* A subroutine of clear_storage. Expand a call to memset.
4ca79136 2487 Return the return value of memset, 0 otherwise. */
9de08200 2488
4ca79136 2489static rtx
8148fe65 2490clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
4ca79136
RH
2491{
2492 tree call_expr, arg_list, fn, object_tree, size_tree;
2493 enum machine_mode size_mode;
2494 rtx retval;
9de08200 2495
ad76cef8
PB
2496 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2497 place those into new pseudos into a VAR_DECL and use them later. */
52cf7115 2498
4ca79136 2499 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2500
8f99553f 2501 size_mode = TYPE_MODE (sizetype);
4ca79136
RH
2502 size = convert_to_mode (size_mode, size, 1);
2503 size = copy_to_mode_reg (size_mode, size);
52cf7115 2504
4ca79136
RH
2505 /* It is incorrect to use the libcall calling conventions to call
2506 memset in this context. This could be a user call to memset and
2507 the user may wish to examine the return value from memset. For
2508 targets where libcalls and normal calls have different conventions
8f99553f 2509 for returning pointers, we could end up generating incorrect code. */
4bc973ae 2510
4ca79136 2511 object_tree = make_tree (ptr_type_node, object);
8f99553f 2512 size_tree = make_tree (sizetype, size);
4ca79136
RH
2513
2514 fn = clear_storage_libcall_fn (true);
2515 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f 2516 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
4ca79136
RH
2517 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2518
2519 /* Now we have to build up the CALL_EXPR itself. */
2520 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
2521 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2522 call_expr, arg_list, NULL_TREE);
8148fe65 2523 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136
RH
2524
2525 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2526
8f99553f 2527 return retval;
4ca79136
RH
2528}
2529
2530/* A subroutine of clear_storage_via_libcall. Create the tree node
2531 for the function we use for block clears. The first time FOR_CALL
2532 is true, we call assemble_external. */
2533
2534static GTY(()) tree block_clear_fn;
66c60e67 2535
9661b15f 2536void
502b8322 2537init_block_clear_fn (const char *asmspec)
4ca79136 2538{
9661b15f 2539 if (!block_clear_fn)
4ca79136 2540 {
9661b15f
JJ
2541 tree fn, args;
2542
8f99553f
JM
2543 fn = get_identifier ("memset");
2544 args = build_function_type_list (ptr_type_node, ptr_type_node,
2545 integer_type_node, sizetype,
2546 NULL_TREE);
4ca79136
RH
2547
2548 fn = build_decl (FUNCTION_DECL, fn, args);
2549 DECL_EXTERNAL (fn) = 1;
2550 TREE_PUBLIC (fn) = 1;
2551 DECL_ARTIFICIAL (fn) = 1;
2552 TREE_NOTHROW (fn) = 1;
2553
2554 block_clear_fn = fn;
bbf6f052 2555 }
e9a25f70 2556
9661b15f 2557 if (asmspec)
0e6df31e 2558 set_user_assembler_name (block_clear_fn, asmspec);
9661b15f
JJ
2559}
2560
2561static tree
502b8322 2562clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2563{
2564 static bool emitted_extern;
2565
2566 if (!block_clear_fn)
2567 init_block_clear_fn (NULL);
2568
4ca79136
RH
2569 if (for_call && !emitted_extern)
2570 {
2571 emitted_extern = true;
0e6df31e 2572 make_decl_rtl (block_clear_fn);
9661b15f 2573 assemble_external (block_clear_fn);
4ca79136 2574 }
bbf6f052 2575
9661b15f 2576 return block_clear_fn;
4ca79136 2577}
57e84f18
AS
2578\f
2579/* Expand a setmem pattern; return true if successful. */
2580
2581bool
2582set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2583{
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2587
2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2589 enum machine_mode mode;
2590
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2593 {
2594 enum insn_code code = setmem_optab[(int) mode];
2595 insn_operand_predicate_fn pred;
2596
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2604 <= (GET_MODE_MASK (mode) >> 1)))
2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
2610 {
9ed92901
AS
2611 rtx opsize, opchar;
2612 enum machine_mode char_mode;
57e84f18
AS
2613 rtx last = get_last_insn ();
2614 rtx pat;
2615
2616 opsize = convert_to_mode (mode, size, 1);
2617 pred = insn_data[(int) code].operand[1].predicate;
2618 if (pred != 0 && ! (*pred) (opsize, mode))
2619 opsize = copy_to_mode_reg (mode, opsize);
2620
9ed92901
AS
2621 opchar = val;
2622 char_mode = insn_data[(int) code].operand[2].mode;
2623 if (char_mode != VOIDmode)
2624 {
2625 opchar = convert_to_mode (char_mode, opchar, 1);
2626 pred = insn_data[(int) code].operand[2].predicate;
2627 if (pred != 0 && ! (*pred) (opchar, char_mode))
2628 opchar = copy_to_mode_reg (char_mode, opchar);
2629 }
57e84f18
AS
2630
2631 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2632 if (pat)
2633 {
2634 emit_insn (pat);
2635 return true;
2636 }
2637 else
2638 delete_insns_since (last);
2639 }
2640 }
2641
2642 return false;
2643}
2644
4ca79136 2645\f
1466e387
RH
2646/* Write to one of the components of the complex value CPLX. Write VAL to
2647 the real part if IMAG_P is false, and the imaginary part if its true. */
bbf6f052 2648
1466e387
RH
2649static void
2650write_complex_part (rtx cplx, rtx val, bool imag_p)
2651{
ddf4e03f
RH
2652 enum machine_mode cmode;
2653 enum machine_mode imode;
2654 unsigned ibitsize;
2655
1466e387 2656 if (GET_CODE (cplx) == CONCAT)
1466e387 2657 {
ddf4e03f
RH
2658 emit_move_insn (XEXP (cplx, imag_p), val);
2659 return;
2660 }
2661
2662 cmode = GET_MODE (cplx);
2663 imode = GET_MODE_INNER (cmode);
2664 ibitsize = GET_MODE_BITSIZE (imode);
bbf6f052 2665
7a31c801
DE
2666 /* For MEMs simplify_gen_subreg may generate an invalid new address
2667 because, e.g., the original address is considered mode-dependent
2668 by the target, which restricts simplify_subreg from invoking
2669 adjust_address_nv. Instead of preparing fallback support for an
2670 invalid address, we call adjust_address_nv directly. */
2671 if (MEM_P (cplx))
22469409
BW
2672 {
2673 emit_move_insn (adjust_address_nv (cplx, imode,
2674 imag_p ? GET_MODE_SIZE (imode) : 0),
2675 val);
2676 return;
2677 }
7a31c801 2678
ddf4e03f
RH
2679 /* If the sub-object is at least word sized, then we know that subregging
2680 will work. This special case is important, since store_bit_field
2681 wants to operate on integer modes, and there's rarely an OImode to
2682 correspond to TCmode. */
36d7571c
EB
2683 if (ibitsize >= BITS_PER_WORD
2684 /* For hard regs we have exact predicates. Assume we can split
2685 the original object if it spans an even number of hard regs.
2686 This special case is important for SCmode on 64-bit platforms
2687 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2688 || (REG_P (cplx)
36d7571c 2689 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2690 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2691 {
2692 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2693 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2694 if (part)
2695 {
2696 emit_move_insn (part, val);
2697 return;
2698 }
2699 else
2700 /* simplify_gen_subreg may fail for sub-word MEMs. */
2701 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
1466e387 2702 }
36d7571c
EB
2703
2704 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
1466e387
RH
2705}
2706
2707/* Extract one of the components of the complex value CPLX. Extract the
2708 real part if IMAG_P is false, and the imaginary part if it's true. */
2709
2710static rtx
2711read_complex_part (rtx cplx, bool imag_p)
bbf6f052 2712{
1466e387
RH
2713 enum machine_mode cmode, imode;
2714 unsigned ibitsize;
bbf6f052 2715
1466e387
RH
2716 if (GET_CODE (cplx) == CONCAT)
2717 return XEXP (cplx, imag_p);
bbf6f052 2718
1466e387
RH
2719 cmode = GET_MODE (cplx);
2720 imode = GET_MODE_INNER (cmode);
2721 ibitsize = GET_MODE_BITSIZE (imode);
2722
2723 /* Special case reads from complex constants that got spilled to memory. */
2724 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
de1b33dd 2725 {
1466e387
RH
2726 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2727 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2728 {
2729 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2730 if (CONSTANT_CLASS_P (part))
2731 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2732 }
2733 }
51286de6 2734
7a31c801
DE
2735 /* For MEMs simplify_gen_subreg may generate an invalid new address
2736 because, e.g., the original address is considered mode-dependent
2737 by the target, which restricts simplify_subreg from invoking
2738 adjust_address_nv. Instead of preparing fallback support for an
2739 invalid address, we call adjust_address_nv directly. */
2740 if (MEM_P (cplx))
2741 return adjust_address_nv (cplx, imode,
2742 imag_p ? GET_MODE_SIZE (imode) : 0);
2743
ddf4e03f
RH
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since extract_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
36d7571c
EB
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2753 || (REG_P (cplx)
36d7571c 2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2756 {
2757 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2759 if (ret)
2760 return ret;
2761 else
2762 /* simplify_gen_subreg may fail for sub-word MEMs. */
2763 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
ddf4e03f
RH
2764 }
2765
1466e387
RH
2766 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2767 true, NULL_RTX, imode, imode);
2768}
2769\f
539eaa3a 2770/* A subroutine of emit_move_insn_1. Yet another lowpart generator.
074e6d01 2771 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
539eaa3a
RH
2772 represented in NEW_MODE. If FORCE is true, this will never happen, as
2773 we'll force-create a SUBREG if needed. */
0c19a26f 2774
1466e387 2775static rtx
074e6d01 2776emit_move_change_mode (enum machine_mode new_mode,
539eaa3a 2777 enum machine_mode old_mode, rtx x, bool force)
1466e387 2778{
074e6d01 2779 rtx ret;
1466e387 2780
074e6d01 2781 if (reload_in_progress && MEM_P (x))
1466e387 2782 {
074e6d01
RH
2783 /* We can't use gen_lowpart here because it may call change_address
2784 which is not appropriate if we were called when a reload was in
2785 progress. We don't have to worry about changing the address since
2786 the size in bytes is supposed to be the same. Copy the MEM to
2787 change the mode and move any substitutions from the old MEM to
2788 the new one. */
1466e387 2789
074e6d01
RH
2790 ret = adjust_address_nv (x, new_mode, 0);
2791 copy_replacements (x, ret);
de1b33dd 2792 }
1466e387
RH
2793 else
2794 {
35fd3193 2795 /* Note that we do want simplify_subreg's behavior of validating
074e6d01
RH
2796 that the new mode is ok for a hard register. If we were to use
2797 simplify_gen_subreg, we would create the subreg, but would
2798 probably run into the target not being able to implement it. */
539eaa3a
RH
2799 /* Except, of course, when FORCE is true, when this is exactly what
2800 we want. Which is needed for CCmodes on some targets. */
2801 if (force)
2802 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2803 else
2804 ret = simplify_subreg (new_mode, x, old_mode, 0);
1466e387 2805 }
bbf6f052 2806
074e6d01
RH
2807 return ret;
2808}
2809
1466e387
RH
2810/* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2811 an integer mode of the same size as MODE. Returns the instruction
2812 emitted, or NULL if such a move could not be generated. */
bbf6f052 2813
1466e387 2814static rtx
652b0932 2815emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
1466e387
RH
2816{
2817 enum machine_mode imode;
2818 enum insn_code code;
bbf6f052 2819
1466e387
RH
2820 /* There must exist a mode of the exact size we require. */
2821 imode = int_mode_for_mode (mode);
2822 if (imode == BLKmode)
2823 return NULL_RTX;
de1b33dd 2824
1466e387
RH
2825 /* The target must support moves in this mode. */
2826 code = mov_optab->handlers[imode].insn_code;
2827 if (code == CODE_FOR_nothing)
2828 return NULL_RTX;
de1b33dd 2829
652b0932 2830 x = emit_move_change_mode (imode, mode, x, force);
539eaa3a
RH
2831 if (x == NULL_RTX)
2832 return NULL_RTX;
652b0932 2833 y = emit_move_change_mode (imode, mode, y, force);
539eaa3a
RH
2834 if (y == NULL_RTX)
2835 return NULL_RTX;
2836 return emit_insn (GEN_FCN (code) (x, y));
261c4230
RS
2837}
2838
1466e387
RH
2839/* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2840 Return an equivalent MEM that does not use an auto-increment. */
261c4230 2841
1466e387
RH
2842static rtx
2843emit_move_resolve_push (enum machine_mode mode, rtx x)
261c4230 2844{
1466e387
RH
2845 enum rtx_code code = GET_CODE (XEXP (x, 0));
2846 HOST_WIDE_INT adjust;
2847 rtx temp;
261c4230 2848
1466e387
RH
2849 adjust = GET_MODE_SIZE (mode);
2850#ifdef PUSH_ROUNDING
2851 adjust = PUSH_ROUNDING (adjust);
2852#endif
2853 if (code == PRE_DEC || code == POST_DEC)
2854 adjust = -adjust;
76bbe028 2855
1466e387
RH
2856 /* Do not use anti_adjust_stack, since we don't want to update
2857 stack_pointer_delta. */
2858 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2859 GEN_INT (adjust), stack_pointer_rtx,
2860 0, OPTAB_LIB_WIDEN);
2861 if (temp != stack_pointer_rtx)
2862 emit_move_insn (stack_pointer_rtx, temp);
bbf6f052 2863
1466e387 2864 switch (code)
7308a047 2865 {
1466e387
RH
2866 case PRE_INC:
2867 case PRE_DEC:
2868 temp = stack_pointer_rtx;
2869 break;
2870 case POST_INC:
2871 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2872 break;
2873 case POST_DEC:
2874 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2875 break;
2876 default:
2877 gcc_unreachable ();
2878 }
7308a047 2879
1466e387
RH
2880 return replace_equiv_address (x, temp);
2881}
1a06f5fe 2882
1466e387
RH
2883/* A subroutine of emit_move_complex. Generate a move from Y into X.
2884 X is known to satisfy push_operand, and MODE is known to be complex.
2885 Returns the last instruction emitted. */
bb93b973 2886
1466e387
RH
2887static rtx
2888emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2889{
2890 enum machine_mode submode = GET_MODE_INNER (mode);
2891 bool imag_first;
bb93b973 2892
1466e387
RH
2893#ifdef PUSH_ROUNDING
2894 unsigned int submodesize = GET_MODE_SIZE (submode);
bb93b973 2895
1466e387
RH
2896 /* In case we output to the stack, but the size is smaller than the
2897 machine can push exactly, we need to use move instructions. */
2898 if (PUSH_ROUNDING (submodesize) != submodesize)
2899 {
2900 x = emit_move_resolve_push (mode, x);
2901 return emit_move_insn (x, y);
2902 }
79ce92d7 2903#endif
7308a047 2904
1466e387
RH
2905 /* Note that the real part always precedes the imag part in memory
2906 regardless of machine's endianness. */
2907 switch (GET_CODE (XEXP (x, 0)))
2908 {
2909 case PRE_DEC:
2910 case POST_DEC:
2911 imag_first = true;
2912 break;
2913 case PRE_INC:
2914 case POST_INC:
2915 imag_first = false;
2916 break;
2917 default:
2918 gcc_unreachable ();
2919 }
beb72684 2920
1466e387
RH
2921 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2922 read_complex_part (y, imag_first));
2923 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2924 read_complex_part (y, !imag_first));
2925}
405f63da 2926
1466e387
RH
2927/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2928 MODE is known to be complex. Returns the last instruction emitted. */
beb72684 2929
1466e387
RH
2930static rtx
2931emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2932{
2933 bool try_int;
405f63da 2934
1466e387
RH
2935 /* Need to take special care for pushes, to maintain proper ordering
2936 of the data, and possibly extra padding. */
2937 if (push_operand (x, mode))
2938 return emit_move_complex_push (mode, x, y);
7308a047 2939
1466e387
RH
2940 /* See if we can coerce the target into moving both values at once. */
2941
c6506442
DE
2942 /* Move floating point as parts. */
2943 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2944 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2945 try_int = false;
1466e387 2946 /* Not possible if the values are inherently not adjacent. */
c6506442 2947 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
1466e387
RH
2948 try_int = false;
2949 /* Is possible if both are registers (or subregs of registers). */
2950 else if (register_operand (x, mode) && register_operand (y, mode))
2951 try_int = true;
2952 /* If one of the operands is a memory, and alignment constraints
2953 are friendly enough, we may be able to do combined memory operations.
2954 We do not attempt this if Y is a constant because that combination is
2955 usually better with the by-parts thing below. */
2956 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2957 && (!STRICT_ALIGNMENT
2958 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2959 try_int = true;
2960 else
2961 try_int = false;
2962
2963 if (try_int)
a3600c71 2964 {
c6506442
DE
2965 rtx ret;
2966
2967 /* For memory to memory moves, optimal behavior can be had with the
2968 existing block move logic. */
2969 if (MEM_P (x) && MEM_P (y))
2970 {
2971 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2972 BLOCK_OP_NO_LIBCALL);
2973 return get_last_insn ();
2974 }
2975
652b0932 2976 ret = emit_move_via_integer (mode, x, y, true);
1466e387
RH
2977 if (ret)
2978 return ret;
2979 }
a3600c71 2980
1466e387
RH
2981 /* Show the output dies here. This is necessary for SUBREGs
2982 of pseudos since we cannot track their lifetimes correctly;
2983 hard regs shouldn't appear here except as return values. */
2984 if (!reload_completed && !reload_in_progress
2985 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2986 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
a3600c71 2987
1466e387
RH
2988 write_complex_part (x, read_complex_part (y, false), false);
2989 write_complex_part (x, read_complex_part (y, true), true);
2990 return get_last_insn ();
2991}
a3600c71 2992
1466e387
RH
2993/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2994 MODE is known to be MODE_CC. Returns the last instruction emitted. */
a3600c71 2995
1466e387
RH
2996static rtx
2997emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2998{
2999 rtx ret;
a3600c71 3000
1466e387
RH
3001 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3002 if (mode != CCmode)
3003 {
3004 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3005 if (code != CODE_FOR_nothing)
539eaa3a
RH
3006 {
3007 x = emit_move_change_mode (CCmode, mode, x, true);
3008 y = emit_move_change_mode (CCmode, mode, y, true);
3009 return emit_insn (GEN_FCN (code) (x, y));
3010 }
1466e387
RH
3011 }
3012
3013 /* Otherwise, find the MODE_INT mode of the same width. */
652b0932 3014 ret = emit_move_via_integer (mode, x, y, false);
1466e387
RH
3015 gcc_assert (ret != NULL);
3016 return ret;
3017}
3018
3019/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3020 MODE is any multi-word or full-word mode that lacks a move_insn
3021 pattern. Note that you will get better code if you define such
3022 patterns, even if they must turn into multiple assembler instructions. */
3023
3024static rtx
3025emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3026{
3027 rtx last_insn = 0;
3028 rtx seq, inner;
3029 bool need_clobber;
3030 int i;
3031
3032 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3033
3034 /* If X is a push on the stack, do the push now and replace
3035 X with a reference to the stack pointer. */
3036 if (push_operand (x, mode))
3037 x = emit_move_resolve_push (mode, x);
3038
3039 /* If we are in reload, see if either operand is a MEM whose address
3040 is scheduled for replacement. */
3041 if (reload_in_progress && MEM_P (x)
3042 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3043 x = replace_equiv_address_nv (x, inner);
3044 if (reload_in_progress && MEM_P (y)
3045 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3046 y = replace_equiv_address_nv (y, inner);
3047
3048 start_sequence ();
3049
3050 need_clobber = false;
3051 for (i = 0;
3052 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3053 i++)
3054 {
3055 rtx xpart = operand_subword (x, i, 1, mode);
3056 rtx ypart = operand_subword (y, i, 1, mode);
3057
3058 /* If we can't get a part of Y, put Y into memory if it is a
535a42b1
NS
3059 constant. Otherwise, force it into a register. Then we must
3060 be able to get a part of Y. */
1466e387 3061 if (ypart == 0 && CONSTANT_P (y))
a3600c71 3062 {
1466e387
RH
3063 y = force_const_mem (mode, y);
3064 ypart = operand_subword (y, i, 1, mode);
a3600c71 3065 }
1466e387
RH
3066 else if (ypart == 0)
3067 ypart = operand_subword_force (y, i, mode);
3068
3069 gcc_assert (xpart && ypart);
3070
3071 need_clobber |= (GET_CODE (xpart) == SUBREG);
502b8322 3072
1466e387 3073 last_insn = emit_move_insn (xpart, ypart);
a3600c71
HPN
3074 }
3075
1466e387
RH
3076 seq = get_insns ();
3077 end_sequence ();
3078
3079 /* Show the output dies here. This is necessary for SUBREGs
3080 of pseudos since we cannot track their lifetimes correctly;
3081 hard regs shouldn't appear here except as return values.
3082 We never want to emit such a clobber after reload. */
3083 if (x != y
3084 && ! (reload_in_progress || reload_completed)
3085 && need_clobber != 0)
3086 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3087
3088 emit_insn (seq);
3089
3090 return last_insn;
3091}
3092
3093/* Low level part of emit_move_insn.
3094 Called just like emit_move_insn, but assumes X and Y
3095 are basically valid. */
3096
3097rtx
3098emit_move_insn_1 (rtx x, rtx y)
3099{
3100 enum machine_mode mode = GET_MODE (x);
3101 enum insn_code code;
3102
3103 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3104
3105 code = mov_optab->handlers[mode].insn_code;
3106 if (code != CODE_FOR_nothing)
3107 return emit_insn (GEN_FCN (code) (x, y));
3108
3109 /* Expand complex moves by moving real part and imag part. */
3110 if (COMPLEX_MODE_P (mode))
3111 return emit_move_complex (mode, x, y);
3112
3113 if (GET_MODE_CLASS (mode) == MODE_CC)
3114 return emit_move_ccmode (mode, x, y);
3115
5581fc91
RS
3116 /* Try using a move pattern for the corresponding integer mode. This is
3117 only safe when simplify_subreg can convert MODE constants into integer
3118 constants. At present, it can only do this reliably if the value
3119 fits within a HOST_WIDE_INT. */
1466e387 3120 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 3121 {
652b0932 3122 rtx ret = emit_move_via_integer (mode, x, y, false);
1466e387
RH
3123 if (ret)
3124 return ret;
3125 }
0fb7aeda 3126
1466e387
RH
3127 return emit_move_multi_word (mode, x, y);
3128}
918a6124 3129
1466e387
RH
3130/* Generate code to copy Y into X.
3131 Both Y and X must have the same mode, except that
3132 Y can be a constant with VOIDmode.
3133 This mode cannot be BLKmode; use emit_block_move for that.
3a94c984 3134
1466e387 3135 Return the last instruction emitted. */
3ef1eef4 3136
1466e387
RH
3137rtx
3138emit_move_insn (rtx x, rtx y)
3139{
3140 enum machine_mode mode = GET_MODE (x);
3141 rtx y_cst = NULL_RTX;
3142 rtx last_insn, set;
15a7a8ec 3143
1466e387
RH
3144 gcc_assert (mode != BLKmode
3145 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
bbf6f052 3146
1466e387
RH
3147 if (CONSTANT_P (y))
3148 {
3149 if (optimize
3150 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3151 && (last_insn = compress_float_constant (x, y)))
3152 return last_insn;
bbf6f052 3153
1466e387 3154 y_cst = y;
bbf6f052 3155
1466e387
RH
3156 if (!LEGITIMATE_CONSTANT_P (y))
3157 {
3158 y = force_const_mem (mode, y);
235ae7be 3159
1466e387
RH
3160 /* If the target's cannot_force_const_mem prevented the spill,
3161 assume that the target's move expanders will also take care
3162 of the non-legitimate constant. */
3163 if (!y)
3164 y = y_cst;
bbf6f052 3165 }
1466e387 3166 }
6551fa4d 3167
1466e387
RH
3168 /* If X or Y are memory references, verify that their addresses are valid
3169 for the machine. */
3170 if (MEM_P (x)
3171 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3172 && ! push_operand (x, GET_MODE (x)))
3173 || (flag_force_addr
3174 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3175 x = validize_mem (x);
235ae7be 3176
1466e387
RH
3177 if (MEM_P (y)
3178 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3179 || (flag_force_addr
3180 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3181 y = validize_mem (y);
235ae7be 3182
1466e387 3183 gcc_assert (mode != BLKmode);
235ae7be 3184
1466e387
RH
3185 last_insn = emit_move_insn_1 (x, y);
3186
3187 if (y_cst && REG_P (x)
3188 && (set = single_set (last_insn)) != NULL_RTX
3189 && SET_DEST (set) == x
3190 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3191 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3192
3193 return last_insn;
bbf6f052 3194}
51286de6
RH
3195
3196/* If Y is representable exactly in a narrower mode, and the target can
3197 perform the extension directly from constant or memory, then emit the
3198 move as an extension. */
3199
3200static rtx
502b8322 3201compress_float_constant (rtx x, rtx y)
51286de6
RH
3202{
3203 enum machine_mode dstmode = GET_MODE (x);
3204 enum machine_mode orig_srcmode = GET_MODE (y);
3205 enum machine_mode srcmode;
3206 REAL_VALUE_TYPE r;
e4541b7a 3207 int oldcost, newcost;
51286de6
RH
3208
3209 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3210
e4541b7a
DJ
3211 if (LEGITIMATE_CONSTANT_P (y))
3212 oldcost = rtx_cost (y, SET);
3213 else
3214 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3215
51286de6
RH
3216 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3217 srcmode != orig_srcmode;
3218 srcmode = GET_MODE_WIDER_MODE (srcmode))
3219 {
3220 enum insn_code ic;
3221 rtx trunc_y, last_insn;
3222
3223 /* Skip if the target can't extend this way. */
3224 ic = can_extend_p (dstmode, srcmode, 0);
3225 if (ic == CODE_FOR_nothing)
3226 continue;
3227
3228 /* Skip if the narrowed value isn't exact. */
3229 if (! exact_real_truncate (srcmode, &r))
3230 continue;
3231
3232 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3233
3234 if (LEGITIMATE_CONSTANT_P (trunc_y))
3235 {
3236 /* Skip if the target needs extra instructions to perform
3237 the extension. */
3238 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3239 continue;
e4541b7a
DJ
3240 /* This is valid, but may not be cheaper than the original. */
3241 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3242 if (oldcost < newcost)
3243 continue;
51286de6
RH
3244 }
3245 else if (float_extend_from_mem[dstmode][srcmode])
e4541b7a
DJ
3246 {
3247 trunc_y = force_const_mem (srcmode, trunc_y);
3248 /* This is valid, but may not be cheaper than the original. */
3249 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3250 if (oldcost < newcost)
3251 continue;
3252 trunc_y = validize_mem (trunc_y);
3253 }
51286de6
RH
3254 else
3255 continue;
e4541b7a 3256
51286de6
RH
3257 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3258 last_insn = get_last_insn ();
3259
f8cfc6aa 3260 if (REG_P (x))
0c19a26f 3261 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3262
3263 return last_insn;
3264 }
3265
3266 return NULL_RTX;
3267}
bbf6f052
RK
3268\f
3269/* Pushing data onto the stack. */
3270
3271/* Push a block of length SIZE (perhaps variable)
3272 and return an rtx to address the beginning of the block.
bbf6f052
RK
3273 The value may be virtual_outgoing_args_rtx.
3274
3275 EXTRA is the number of bytes of padding to push in addition to SIZE.
3276 BELOW nonzero means this padding comes at low addresses;
3277 otherwise, the padding comes at high addresses. */
3278
3279rtx
502b8322 3280push_block (rtx size, int extra, int below)
bbf6f052 3281{
b3694847 3282 rtx temp;
88f63c77
RK
3283
3284 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3285 if (CONSTANT_P (size))
3286 anti_adjust_stack (plus_constant (size, extra));
f8cfc6aa 3287 else if (REG_P (size) && extra == 0)
bbf6f052
RK
3288 anti_adjust_stack (size);
3289 else
3290 {
ce48579b 3291 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3292 if (extra != 0)
906c4e36 3293 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3294 temp, 0, OPTAB_LIB_WIDEN);
3295 anti_adjust_stack (temp);
3296 }
3297
f73ad30e 3298#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3299 if (0)
f73ad30e
JH
3300#else
3301 if (1)
bbf6f052 3302#endif
f73ad30e 3303 {
f73ad30e
JH
3304 temp = virtual_outgoing_args_rtx;
3305 if (extra != 0 && below)
3306 temp = plus_constant (temp, extra);
3307 }
3308 else
3309 {
3310 if (GET_CODE (size) == CONST_INT)
3311 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3312 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3313 else if (extra != 0 && !below)
3314 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3315 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3316 else
3317 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3318 negate_rtx (Pmode, size));
3319 }
bbf6f052
RK
3320
3321 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3322}
3323
21d93687
RK
3324#ifdef PUSH_ROUNDING
3325
566aa174 3326/* Emit single push insn. */
21d93687 3327
566aa174 3328static void
502b8322 3329emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3330{
566aa174 3331 rtx dest_addr;
918a6124 3332 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3333 rtx dest;
371b8fc0
JH
3334 enum insn_code icode;
3335 insn_operand_predicate_fn pred;
566aa174 3336
371b8fc0
JH
3337 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3338 /* If there is push pattern, use it. Otherwise try old way of throwing
3339 MEM representing push operation to move expander. */
3340 icode = push_optab->handlers[(int) mode].insn_code;
3341 if (icode != CODE_FOR_nothing)
3342 {
3343 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3344 && !((*pred) (x, mode))))
371b8fc0
JH
3345 x = force_reg (mode, x);
3346 emit_insn (GEN_FCN (icode) (x));
3347 return;
3348 }
566aa174
JH
3349 if (GET_MODE_SIZE (mode) == rounded_size)
3350 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3351 /* If we are to pad downward, adjust the stack pointer first and
3352 then store X into the stack location using an offset. This is
3353 because emit_move_insn does not know how to pad; it does not have
3354 access to type. */
3355 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3356 {
3357 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3358 HOST_WIDE_INT offset;
3359
3360 emit_move_insn (stack_pointer_rtx,
3361 expand_binop (Pmode,
3362#ifdef STACK_GROWS_DOWNWARD
3363 sub_optab,
3364#else
3365 add_optab,
3366#endif
3367 stack_pointer_rtx,
3368 GEN_INT (rounded_size),
3369 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3370
3371 offset = (HOST_WIDE_INT) padding_size;
3372#ifdef STACK_GROWS_DOWNWARD
3373 if (STACK_PUSH_CODE == POST_DEC)
3374 /* We have already decremented the stack pointer, so get the
3375 previous value. */
3376 offset += (HOST_WIDE_INT) rounded_size;
3377#else
3378 if (STACK_PUSH_CODE == POST_INC)
3379 /* We have already incremented the stack pointer, so get the
3380 previous value. */
3381 offset -= (HOST_WIDE_INT) rounded_size;
3382#endif
3383 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3384 }
566aa174
JH
3385 else
3386 {
3387#ifdef STACK_GROWS_DOWNWARD
329d586f 3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3390 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3391#else
329d586f 3392 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3393 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3394 GEN_INT (rounded_size));
3395#endif
3396 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3397 }
3398
3399 dest = gen_rtx_MEM (mode, dest_addr);
3400
566aa174
JH
3401 if (type != 0)
3402 {
3403 set_mem_attributes (dest, type, 1);
c3d32120
RK
3404
3405 if (flag_optimize_sibling_calls)
3406 /* Function incoming arguments may overlap with sibling call
3407 outgoing arguments and we cannot allow reordering of reads
3408 from function arguments with stores to outgoing arguments
3409 of sibling calls. */
3410 set_mem_alias_set (dest, 0);
566aa174
JH
3411 }
3412 emit_move_insn (dest, x);
566aa174 3413}
21d93687 3414#endif
566aa174 3415
bbf6f052
RK
3416/* Generate code to push X onto the stack, assuming it has mode MODE and
3417 type TYPE.
3418 MODE is redundant except when X is a CONST_INT (since they don't
3419 carry mode info).
3420 SIZE is an rtx for the size of data to be copied (in bytes),
3421 needed only if X is BLKmode.
3422
f1eaaf73 3423 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3424
cd048831 3425 If PARTIAL and REG are both nonzero, then copy that many of the first
78a52f11
RH
3426 bytes of X into registers starting with REG, and push the rest of X.
3427 The amount of space pushed is decreased by PARTIAL bytes.
bbf6f052 3428 REG must be a hard register in this case.
cd048831
RK
3429 If REG is zero but PARTIAL is not, take any all others actions for an
3430 argument partially in registers, but do not actually load any
3431 registers.
bbf6f052
RK
3432
3433 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3434 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3435
3436 On a machine that lacks real push insns, ARGS_ADDR is the address of
3437 the bottom of the argument block for this call. We use indexing off there
3438 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3439 argument block has not been preallocated.
3440
e5e809f4
JL
3441 ARGS_SO_FAR is the size of args previously pushed for this call.
3442
3443 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3444 for arguments passed in registers. If nonzero, it will be the number
3445 of bytes required. */
bbf6f052
RK
3446
3447void
502b8322
AJ
3448emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3449 unsigned int align, int partial, rtx reg, int extra,
3450 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3451 rtx alignment_pad)
bbf6f052
RK
3452{
3453 rtx xinner;
3454 enum direction stack_direction
3455#ifdef STACK_GROWS_DOWNWARD
3456 = downward;
3457#else
3458 = upward;
3459#endif
3460
3461 /* Decide where to pad the argument: `downward' for below,
3462 `upward' for above, or `none' for don't pad it.
3463 Default is below for small data on big-endian machines; else above. */
3464 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3465
0fb7aeda 3466 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3467 FIXME: why? */
3468 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3469 if (where_pad != none)
3470 where_pad = (where_pad == downward ? upward : downward);
3471
ad76cef8 3472 xinner = x;
bbf6f052
RK
3473
3474 if (mode == BLKmode)
3475 {
3476 /* Copy a block into the stack, entirely or partially. */
3477
b3694847 3478 rtx temp;
78a52f11 3479 int used;
531547e9 3480 int offset;
bbf6f052 3481 int skip;
3a94c984 3482
78a52f11
RH
3483 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3484 used = partial - offset;
531547e9 3485
5b0264cb 3486 gcc_assert (size);
bbf6f052 3487
bbf6f052
RK
3488 /* USED is now the # of bytes we need not copy to the stack
3489 because registers will take care of them. */
3490
3491 if (partial != 0)
f4ef873c 3492 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3493
3494 /* If the partial register-part of the arg counts in its stack size,
3495 skip the part of stack space corresponding to the registers.
3496 Otherwise, start copying to the beginning of the stack space,
3497 by setting SKIP to 0. */
e5e809f4 3498 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3499
3500#ifdef PUSH_ROUNDING
3501 /* Do it with several push insns if that doesn't take lots of insns
3502 and if there is no difficulty with push insns that skip bytes
3503 on the stack for alignment purposes. */
3504 if (args_addr == 0
f73ad30e 3505 && PUSH_ARGS
bbf6f052
RK
3506 && GET_CODE (size) == CONST_INT
3507 && skip == 0
f26aca6d 3508 && MEM_ALIGN (xinner) >= align
15914757 3509 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3510 /* Here we avoid the case of a structure whose weak alignment
3511 forces many pushes of a small amount of data,
3512 and such small pushes do rounding that causes trouble. */
e1565e65 3513 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3514 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3515 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3516 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3517 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3518 {
3519 /* Push padding now if padding above and stack grows down,
3520 or if padding below and stack grows up.
3521 But if space already allocated, this has already been done. */
3522 if (extra && args_addr == 0
3523 && where_pad != none && where_pad != stack_direction)
906c4e36 3524 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3525
8fd3cf4e 3526 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3527 }
3528 else
3a94c984 3529#endif /* PUSH_ROUNDING */
bbf6f052 3530 {
7ab923cc
JJ
3531 rtx target;
3532
bbf6f052
RK
3533 /* Otherwise make space on the stack and copy the data
3534 to the address of that space. */
3535
3536 /* Deduct words put into registers from the size we must copy. */
3537 if (partial != 0)
3538 {
3539 if (GET_CODE (size) == CONST_INT)
906c4e36 3540 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3541 else
3542 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3543 GEN_INT (used), NULL_RTX, 0,
3544 OPTAB_LIB_WIDEN);
bbf6f052
RK
3545 }
3546
3547 /* Get the address of the stack space.
3548 In this case, we do not deal with EXTRA separately.
3549 A single stack adjust will do. */
3550 if (! args_addr)
3551 {
3552 temp = push_block (size, extra, where_pad == downward);
3553 extra = 0;
3554 }
3555 else if (GET_CODE (args_so_far) == CONST_INT)
3556 temp = memory_address (BLKmode,
3557 plus_constant (args_addr,
3558 skip + INTVAL (args_so_far)));
3559 else
3560 temp = memory_address (BLKmode,
38a448ca
RH
3561 plus_constant (gen_rtx_PLUS (Pmode,
3562 args_addr,
3563 args_so_far),
bbf6f052 3564 skip));
4ca79136
RH
3565
3566 if (!ACCUMULATE_OUTGOING_ARGS)
3567 {
3568 /* If the source is referenced relative to the stack pointer,
3569 copy it to another register to stabilize it. We do not need
3570 to do this if we know that we won't be changing sp. */
3571
3572 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3573 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3574 temp = copy_to_reg (temp);
3575 }
3576
3a94c984 3577 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3578
2bb16349
RH
3579 /* We do *not* set_mem_attributes here, because incoming arguments
3580 may overlap with sibling call outgoing arguments and we cannot
3581 allow reordering of reads from function arguments with stores
3582 to outgoing arguments of sibling calls. We do, however, want
3583 to record the alignment of the stack slot. */
44bb111a
RH
3584 /* ALIGN may well be better aligned than TYPE, e.g. due to
3585 PARM_BOUNDARY. Assume the caller isn't lying. */
3586 set_mem_align (target, align);
4ca79136 3587
44bb111a 3588 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3589 }
3590 }
3591 else if (partial > 0)
3592 {
3593 /* Scalar partly in registers. */
3594
3595 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3596 int i;
3597 int not_stack;
78a52f11 3598 /* # bytes of start of argument
bbf6f052 3599 that we must make space for but need not store. */
ac7e839c 3600 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052
RK
3601 int args_offset = INTVAL (args_so_far);
3602 int skip;
3603
3604 /* Push padding now if padding above and stack grows down,
3605 or if padding below and stack grows up.
3606 But if space already allocated, this has already been done. */
3607 if (extra && args_addr == 0
3608 && where_pad != none && where_pad != stack_direction)
906c4e36 3609 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3610
3611 /* If we make space by pushing it, we might as well push
3612 the real data. Otherwise, we can leave OFFSET nonzero
3613 and leave the space uninitialized. */
3614 if (args_addr == 0)
3615 offset = 0;
3616
3617 /* Now NOT_STACK gets the number of words that we don't need to
40b0345d 3618 allocate on the stack. Convert OFFSET to words too. */
78a52f11 3619 not_stack = (partial - offset) / UNITS_PER_WORD;
ac7e839c 3620 offset /= UNITS_PER_WORD;
bbf6f052
RK
3621
3622 /* If the partial register-part of the arg counts in its stack size,
3623 skip the part of stack space corresponding to the registers.
3624 Otherwise, start copying to the beginning of the stack space,
3625 by setting SKIP to 0. */
e5e809f4 3626 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3627
3628 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3629 x = validize_mem (force_const_mem (mode, x));
3630
3631 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3632 SUBREGs of such registers are not allowed. */
f8cfc6aa 3633 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
bbf6f052
RK
3634 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3635 x = copy_to_reg (x);
3636
3637 /* Loop over all the words allocated on the stack for this arg. */
3638 /* We can do it by words, because any scalar bigger than a word
3639 has a size a multiple of a word. */
3640#ifndef PUSH_ARGS_REVERSED
3641 for (i = not_stack; i < size; i++)
3642#else
3643 for (i = size - 1; i >= not_stack; i--)
3644#endif
3645 if (i >= not_stack + offset)
3646 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3647 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3648 0, args_addr,
3649 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3650 * UNITS_PER_WORD)),
4fc026cd 3651 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3652 }
3653 else
3654 {
3655 rtx addr;
3bdf5ad1 3656 rtx dest;
bbf6f052
RK
3657
3658 /* Push padding now if padding above and stack grows down,
3659 or if padding below and stack grows up.
3660 But if space already allocated, this has already been done. */
3661 if (extra && args_addr == 0
3662 && where_pad != none && where_pad != stack_direction)
906c4e36 3663 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3664
3665#ifdef PUSH_ROUNDING
f73ad30e 3666 if (args_addr == 0 && PUSH_ARGS)
566aa174 3667 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3668 else
3669#endif
921b3427
RK
3670 {
3671 if (GET_CODE (args_so_far) == CONST_INT)
3672 addr
3673 = memory_address (mode,
3a94c984 3674 plus_constant (args_addr,
921b3427 3675 INTVAL (args_so_far)));
3a94c984 3676 else
38a448ca
RH
3677 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3678 args_so_far));
566aa174 3679 dest = gen_rtx_MEM (mode, addr);
2bb16349
RH
3680
3681 /* We do *not* set_mem_attributes here, because incoming arguments
3682 may overlap with sibling call outgoing arguments and we cannot
3683 allow reordering of reads from function arguments with stores
3684 to outgoing arguments of sibling calls. We do, however, want
3685 to record the alignment of the stack slot. */
3686 /* ALIGN may well be better aligned than TYPE, e.g. due to
3687 PARM_BOUNDARY. Assume the caller isn't lying. */
3688 set_mem_align (dest, align);
bbf6f052 3689
566aa174 3690 emit_move_insn (dest, x);
566aa174 3691 }
bbf6f052
RK
3692 }
3693
bbf6f052
RK
3694 /* If part should go in registers, copy that part
3695 into the appropriate registers. Do this now, at the end,
3696 since mem-to-mem copies above may do function calls. */
cd048831 3697 if (partial > 0 && reg != 0)
fffa9c1d
JW
3698 {
3699 /* Handle calls that pass values in multiple non-contiguous locations.
3700 The Irix 6 ABI has examples of this. */
3701 if (GET_CODE (reg) == PARALLEL)
6e985040 3702 emit_group_load (reg, x, type, -1);
fffa9c1d 3703 else
78a52f11
RH
3704 {
3705 gcc_assert (partial % UNITS_PER_WORD == 0);
3706 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3707 }
fffa9c1d 3708 }
bbf6f052
RK
3709
3710 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3711 anti_adjust_stack (GEN_INT (extra));
3a94c984 3712
3ea2292a 3713 if (alignment_pad && args_addr == 0)
4fc026cd 3714 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3715}
3716\f
296b4ed9
RK
3717/* Return X if X can be used as a subtarget in a sequence of arithmetic
3718 operations. */
3719
3720static rtx
502b8322 3721get_subtarget (rtx x)
296b4ed9 3722{
7c27e184
PB
3723 return (optimize
3724 || x == 0
296b4ed9 3725 /* Only registers can be subtargets. */
f8cfc6aa 3726 || !REG_P (x)
296b4ed9
RK
3727 /* Don't use hard regs to avoid extending their life. */
3728 || REGNO (x) < FIRST_PSEUDO_REGISTER
296b4ed9
RK
3729 ? 0 : x);
3730}
3731
8c1cfd5a
RH
3732/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3733 FIELD is a bitfield. Returns true if the optimization was successful,
3734 and there's nothing else to do. */
3735
3736static bool
3737optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3738 unsigned HOST_WIDE_INT bitpos,
3739 enum machine_mode mode1, rtx str_rtx,
3740 tree to, tree src)
3741{
3742 enum machine_mode str_mode = GET_MODE (str_rtx);
3743 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3744 tree op0, op1;
3745 rtx value, result;
3746 optab binop;
3747
3748 if (mode1 != VOIDmode
3749 || bitsize >= BITS_PER_WORD
3750 || str_bitsize > BITS_PER_WORD
3751 || TREE_SIDE_EFFECTS (to)
3752 || TREE_THIS_VOLATILE (to))
3753 return false;
3754
3755 STRIP_NOPS (src);
3756 if (!BINARY_CLASS_P (src)
3757 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3758 return false;
3759
3760 op0 = TREE_OPERAND (src, 0);
3761 op1 = TREE_OPERAND (src, 1);
3762 STRIP_NOPS (op0);
3763
3764 if (!operand_equal_p (to, op0, 0))
3765 return false;
3766
3767 if (MEM_P (str_rtx))
3768 {
3769 unsigned HOST_WIDE_INT offset1;
3770
3771 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3772 str_mode = word_mode;
3773 str_mode = get_best_mode (bitsize, bitpos,
3774 MEM_ALIGN (str_rtx), str_mode, 0);
3775 if (str_mode == VOIDmode)
3776 return false;
3777 str_bitsize = GET_MODE_BITSIZE (str_mode);
3778
3779 offset1 = bitpos;
3780 bitpos %= str_bitsize;
3781 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3782 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3783 }
3784 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3785 return false;
3786
3787 /* If the bit field covers the whole REG/MEM, store_field
3788 will likely generate better code. */
3789 if (bitsize >= str_bitsize)
3790 return false;
3791
3792 /* We can't handle fields split across multiple entities. */
3793 if (bitpos + bitsize > str_bitsize)
3794 return false;
3795
3796 if (BYTES_BIG_ENDIAN)
3797 bitpos = str_bitsize - bitpos - bitsize;
3798
3799 switch (TREE_CODE (src))
3800 {
3801 case PLUS_EXPR:
3802 case MINUS_EXPR:
3803 /* For now, just optimize the case of the topmost bitfield
3804 where we don't need to do any masking and also
3805 1 bit bitfields where xor can be used.
3806 We might win by one instruction for the other bitfields
3807 too if insv/extv instructions aren't used, so that
3808 can be added later. */
3809 if (bitpos + bitsize != str_bitsize
3810 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3811 break;
3812
3813 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3814 value = convert_modes (str_mode,
3815 TYPE_MODE (TREE_TYPE (op1)), value,
3816 TYPE_UNSIGNED (TREE_TYPE (op1)));
3817
3818 /* We may be accessing data outside the field, which means
3819 we can alias adjacent data. */
3820 if (MEM_P (str_rtx))
3821 {
3822 str_rtx = shallow_copy_rtx (str_rtx);
3823 set_mem_alias_set (str_rtx, 0);
3824 set_mem_expr (str_rtx, 0);
3825 }
3826
3827 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3828 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3829 {
3830 value = expand_and (str_mode, value, const1_rtx, NULL);
3831 binop = xor_optab;
3832 }
3833 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3834 build_int_cst (NULL_TREE, bitpos),
3835 NULL_RTX, 1);
3836 result = expand_binop (str_mode, binop, str_rtx,
3837 value, str_rtx, 1, OPTAB_WIDEN);
3838 if (result != str_rtx)
3839 emit_move_insn (str_rtx, result);
3840 return true;
3841
92fb2d32
KH
3842 case BIT_IOR_EXPR:
3843 case BIT_XOR_EXPR:
3844 if (TREE_CODE (op1) != INTEGER_CST)
3845 break;
3846 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3847 value = convert_modes (GET_MODE (str_rtx),
3848 TYPE_MODE (TREE_TYPE (op1)), value,
3849 TYPE_UNSIGNED (TREE_TYPE (op1)));
3850
3851 /* We may be accessing data outside the field, which means
3852 we can alias adjacent data. */
3853 if (MEM_P (str_rtx))
3854 {
3855 str_rtx = shallow_copy_rtx (str_rtx);
3856 set_mem_alias_set (str_rtx, 0);
3857 set_mem_expr (str_rtx, 0);
3858 }
3859
3860 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3861 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3862 {
3863 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3864 - 1);
3865 value = expand_and (GET_MODE (str_rtx), value, mask,
3866 NULL_RTX);
3867 }
3868 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3869 build_int_cst (NULL_TREE, bitpos),
3870 NULL_RTX, 1);
3871 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3872 value, str_rtx, 1, OPTAB_WIDEN);
3873 if (result != str_rtx)
3874 emit_move_insn (str_rtx, result);
3875 return true;
3876
8c1cfd5a
RH
3877 default:
3878 break;
3879 }
3880
3881 return false;
3882}
3883
3884
e836a5a2 3885/* Expand an assignment that stores the value of FROM into TO. */
bbf6f052 3886
e836a5a2
KH
3887void
3888expand_assignment (tree to, tree from)
bbf6f052 3889{
b3694847 3890 rtx to_rtx = 0;
bbf6f052
RK
3891 rtx result;
3892
3893 /* Don't crash if the lhs of the assignment was erroneous. */
3894
3895 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3896 {
3897 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
e836a5a2 3898 return;
709f5be1 3899 }
bbf6f052
RK
3900
3901 /* Assignment of a structure component needs special treatment
3902 if the structure component's rtx is not simply a MEM.
6be58303
JW
3903 Assignment of an array element at a constant index, and assignment of
3904 an array element in an unaligned packed structure field, has the same
3905 problem. */
8c1cfd5a 3906 if (handled_component_p (to)
7c02ae17 3907 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3908 {
3909 enum machine_mode mode1;
770ae6cc 3910 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3911 tree offset;
bbf6f052
RK
3912 int unsignedp;
3913 int volatilep = 0;
0088fcb1
RK
3914 tree tem;
3915
3916 push_temp_slots ();
839c4796 3917 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2614034e 3918 &unsignedp, &volatilep, true);
bbf6f052
RK
3919
3920 /* If we are going to use store_bit_field and extract_bit_field,
3921 make sure to_rtx will be safe for multiple use. */
3922
b258008a 3923 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
1ed1b4fb 3924
7bb0943f
RS
3925 if (offset != 0)
3926 {
1e188d1e 3927 rtx offset_rtx;
7bb0943f 3928
1e188d1e
RH
3929 if (!MEM_P (to_rtx))
3930 {
3931 /* We can get constant negative offsets into arrays with broken
3932 user code. Translate this to a trap instead of ICEing. */
3933 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
3934 expand_builtin_trap ();
3935 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
3936 }
bd070e1a 3937
1e188d1e 3938 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
bd070e1a 3939#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3940 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3941 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3942#else
3943 if (GET_MODE (offset_rtx) != ptr_mode)
3944 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3945#endif
bd070e1a 3946
9a7b9f4f
JL
3947 /* A constant address in TO_RTX can have VOIDmode, we must not try
3948 to call force_reg for that case. Avoid that case. */
3c0cb5de 3949 if (MEM_P (to_rtx)
89752202 3950 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3951 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3952 && bitsize > 0
3a94c984 3953 && (bitpos % bitsize) == 0
89752202 3954 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3955 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3956 {
e3c8ea67 3957 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3958 bitpos = 0;
3959 }
3960
0d4903b8 3961 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3962 highest_pow2_factor_for_target (to,
3963 offset));
7bb0943f 3964 }
c5c76735 3965
8c1cfd5a
RH
3966 /* Handle expand_expr of a complex value returning a CONCAT. */
3967 if (GET_CODE (to_rtx) == CONCAT)
a06ef755 3968 {
0becc986
RH
3969 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3970 {
3971 gcc_assert (bitpos == 0);
3972 result = store_expr (from, to_rtx, false);
3973 }
3974 else
3975 {
3976 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3977 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3978 }
bbf6f052 3979 }
8c1cfd5a 3980 else
df62f18a 3981 {
8c1cfd5a 3982 if (MEM_P (to_rtx))
b8b139c7 3983 {
8c1cfd5a
RH
3984 /* If the field is at offset zero, we could have been given the
3985 DECL_RTX of the parent struct. Don't munge it. */
3986 to_rtx = shallow_copy_rtx (to_rtx);
b8b139c7 3987
8c1cfd5a 3988 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
b8b139c7 3989
8c1cfd5a
RH
3990 /* Deal with volatile and readonly fields. The former is only
3991 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3992 if (volatilep)
3993 MEM_VOLATILE_P (to_rtx) = 1;
2039d7aa 3994 if (component_uses_parent_alias_set (to))
8c1cfd5a 3995 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
df62f18a 3996 }
60ba25bf 3997
8c1cfd5a
RH
3998 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3999 to_rtx, to, from))
4000 result = NULL;
4001 else
4002 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4003 TREE_TYPE (tem), get_alias_set (to));
df62f18a
JJ
4004 }
4005
8c1cfd5a
RH
4006 if (result)
4007 preserve_temp_slots (result);
a06ef755
RK
4008 free_temp_slots ();
4009 pop_temp_slots ();
e836a5a2 4010 return;
bbf6f052
RK
4011 }
4012
cd1db108
RS
4013 /* If the rhs is a function call and its value is not an aggregate,
4014 call the function before we start to compute the lhs.
4015 This is needed for correct code for cases such as
4016 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4017 requires loading up part of an address in a separate insn.
4018
1858863b
JW
4019 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4020 since it might be a promoted variable where the zero- or sign- extension
4021 needs to be done. Handling this in the normal way is safe because no
4022 computation is done before the call. */
61f71b34 4023 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 4024 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b 4025 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
f8cfc6aa 4026 && REG_P (DECL_RTL (to))))
cd1db108 4027 {
0088fcb1
RK
4028 rtx value;
4029
4030 push_temp_slots ();
4031 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 4032 if (to_rtx == 0)
37a08a29 4033 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4034
fffa9c1d
JW
4035 /* Handle calls that return values in multiple non-contiguous locations.
4036 The Irix 6 ABI has examples of this. */
4037 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4038 emit_group_load (to_rtx, value, TREE_TYPE (from),
4039 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4040 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4041 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4042 else
6419e5b0 4043 {
5ae6cd0d 4044 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 4045 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
4046 emit_move_insn (to_rtx, value);
4047 }
cd1db108
RS
4048 preserve_temp_slots (to_rtx);
4049 free_temp_slots ();
0088fcb1 4050 pop_temp_slots ();
e836a5a2 4051 return;
cd1db108
RS
4052 }
4053
bbf6f052
RK
4054 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4055 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4056
4057 if (to_rtx == 0)
37a08a29 4058 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4059
86d38d25 4060 /* Don't move directly into a return register. */
14a774a9 4061 if (TREE_CODE (to) == RESULT_DECL
f8cfc6aa 4062 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4063 {
0088fcb1
RK
4064 rtx temp;
4065
4066 push_temp_slots ();
4067 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4068
4069 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4070 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4071 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4072 else
4073 emit_move_insn (to_rtx, temp);
4074
86d38d25
RS
4075 preserve_temp_slots (to_rtx);
4076 free_temp_slots ();
0088fcb1 4077 pop_temp_slots ();
e836a5a2 4078 return;
86d38d25
RS
4079 }
4080
bbf6f052
RK
4081 /* In case we are returning the contents of an object which overlaps
4082 the place the value is being stored, use a safe function when copying
4083 a value through a pointer into a structure value return block. */
4084 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4085 && current_function_returns_struct
4086 && !current_function_returns_pcc_struct)
4087 {
0088fcb1
RK
4088 rtx from_rtx, size;
4089
4090 push_temp_slots ();
33a20d10 4091 size = expr_size (from);
37a08a29 4092 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 4093
8f99553f
JM
4094 emit_library_call (memmove_libfunc, LCT_NORMAL,
4095 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4096 XEXP (from_rtx, 0), Pmode,
4097 convert_to_mode (TYPE_MODE (sizetype),
4098 size, TYPE_UNSIGNED (sizetype)),
4099 TYPE_MODE (sizetype));
bbf6f052
RK
4100
4101 preserve_temp_slots (to_rtx);
4102 free_temp_slots ();
0088fcb1 4103 pop_temp_slots ();
e836a5a2 4104 return;
bbf6f052
RK
4105 }
4106
4107 /* Compute FROM and store the value in the rtx we got. */
4108
0088fcb1 4109 push_temp_slots ();
e836a5a2 4110 result = store_expr (from, to_rtx, 0);
bbf6f052
RK
4111 preserve_temp_slots (result);
4112 free_temp_slots ();
0088fcb1 4113 pop_temp_slots ();
e836a5a2 4114 return;
bbf6f052
RK
4115}
4116
4117/* Generate code for computing expression EXP,
4118 and storing the value into TARGET.
bbf6f052 4119
709f5be1
RS
4120 If the mode is BLKmode then we may return TARGET itself.
4121 It turns out that in BLKmode it doesn't cause a problem.
4122 because C has no operators that could combine two different
4123 assignments into the same BLKmode object with different values
4124 with no sequence point. Will other languages need this to
4125 be more thorough?
4126
6f4fd16d 4127 If CALL_PARAM_P is nonzero, this is a store into a call param on the
8403445a 4128 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4129
4130rtx
6f4fd16d 4131store_expr (tree exp, rtx target, int call_param_p)
bbf6f052 4132{
b3694847 4133 rtx temp;
0fab64a3 4134 rtx alt_rtl = NULL_RTX;
bbf6f052
RK
4135 int dont_return_target = 0;
4136
847311f4
AL
4137 if (VOID_TYPE_P (TREE_TYPE (exp)))
4138 {
4139 /* C++ can generate ?: expressions with a throw expression in one
4140 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4141 store the throw expression's nonexistent result. */
6f4fd16d 4142 gcc_assert (!call_param_p);
847311f4
AL
4143 expand_expr (exp, const0_rtx, VOIDmode, 0);
4144 return NULL_RTX;
4145 }
bbf6f052
RK
4146 if (TREE_CODE (exp) == COMPOUND_EXPR)
4147 {
4148 /* Perform first part of compound expression, then assign from second
4149 part. */
8403445a 4150 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6f4fd16d
KH
4151 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4152 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4153 }
4154 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4155 {
4156 /* For conditional expression, get safe form of the target. Then
4157 test the condition, doing the appropriate assignment on either
4158 side. This avoids the creation of unnecessary temporaries.
4159 For non-BLKmode, it is more efficient not to do this. */
4160
4161 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4162
dabf8373 4163 do_pending_stack_adjust ();
bbf6f052
RK
4164 NO_DEFER_POP;
4165 jumpifnot (TREE_OPERAND (exp, 0), lab1);
6f4fd16d 4166 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4167 emit_jump_insn (gen_jump (lab2));
4168 emit_barrier ();
4169 emit_label (lab1);
6f4fd16d 4170 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
bbf6f052
RK
4171 emit_label (lab2);
4172 OK_DEFER_POP;
a3a58acc 4173
436d948e 4174 return NULL_RTX;
12f06d17 4175 }
1499e0a8 4176 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4177 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4178 than the declared mode, compute the result into its declared mode
4179 and then convert to the wider mode. Our value is the computed
4180 expression. */
4181 {
b76b08ef
RK
4182 rtx inner_target = 0;
4183
436d948e
KH
4184 /* We can do the conversion inside EXP, which will often result
4185 in some optimizations. Do the conversion in two steps: first
4186 change the signedness, if needed, then the extend. But don't
4187 do this if the type of EXP is a subtype of something else
4188 since then the conversion might involve more than just
4189 converting modes. */
4190 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
7e7d1b4b
RH
4191 && TREE_TYPE (TREE_TYPE (exp)) == 0
4192 && (!lang_hooks.reduce_bit_field_operations
4193 || (GET_MODE_PRECISION (GET_MODE (target))
4194 == TYPE_PRECISION (TREE_TYPE (exp)))))
f635a84d 4195 {
8df83eae 4196 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 4197 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 4198 exp = convert
ae2bcd98 4199 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4200 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4201
ae2bcd98 4202 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
4203 (GET_MODE (SUBREG_REG (target)),
4204 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4205 exp);
b76b08ef
RK
4206
4207 inner_target = SUBREG_REG (target);
f635a84d 4208 }
3a94c984 4209
8403445a 4210 temp = expand_expr (exp, inner_target, VOIDmode,
6f4fd16d 4211 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c
RS
4212
4213 /* If TEMP is a VOIDmode constant, use convert_modes to make
4214 sure that we properly convert it. */
4215 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4216 {
4217 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4218 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4219 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4220 GET_MODE (target), temp,
4221 SUBREG_PROMOTED_UNSIGNED_P (target));
4222 }
b258707c 4223
1499e0a8
RK
4224 convert_move (SUBREG_REG (target), temp,
4225 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9 4226
436d948e 4227 return NULL_RTX;
1499e0a8 4228 }
bbf6f052
RK
4229 else
4230 {
0fab64a3 4231 temp = expand_expr_real (exp, target, GET_MODE (target),
6f4fd16d 4232 (call_param_p
0fab64a3
MM
4233 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4234 &alt_rtl);
766f36c7 4235 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4236 If TARGET is a volatile mem ref, either return TARGET
4237 or return a reg copied *from* TARGET; ANSI requires this.
4238
4239 Otherwise, if TEMP is not TARGET, return TEMP
4240 if it is constant (for efficiency),
4241 or if we really want the correct value. */
f8cfc6aa 4242 if (!(target && REG_P (target)
bbf6f052 4243 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3c0cb5de 4244 && !(MEM_P (target) && MEM_VOLATILE_P (target))
effbcc6a 4245 && ! rtx_equal_p (temp, target)
436d948e 4246 && CONSTANT_P (temp))
bbf6f052
RK
4247 dont_return_target = 1;
4248 }
4249
b258707c
RS
4250 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4251 the same as that of TARGET, adjust the constant. This is needed, for
4252 example, in case it is a CONST_DOUBLE and we want only a word-sized
4253 value. */
4254 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4255 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4256 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4257 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 4258 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 4259
bbf6f052 4260 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4261 Convert the value to TARGET's type first if necessary and emit the
4262 pending incrementations that have been queued when expanding EXP.
4263 Note that we cannot emit the whole queue blindly because this will
4264 effectively disable the POST_INC optimization later.
4265
37a08a29 4266 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4267 one or both of them are volatile memory refs, we have to distinguish
4268 two cases:
4269 - expand_expr has used TARGET. In this case, we must not generate
4270 another copy. This can be detected by TARGET being equal according
4271 to == .
4272 - expand_expr has not used TARGET - that means that the source just
4273 happens to have the same RTX form. Since temp will have been created
4274 by expand_expr, it will compare unequal according to == .
4275 We must generate a copy in this case, to reach the correct number
4276 of volatile memory references. */
bbf6f052 4277
6036acbb 4278 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4279 || (temp != target && (side_effects_p (temp)
4280 || side_effects_p (target))))
e5408e52 4281 && TREE_CODE (exp) != ERROR_MARK
9c5c5f2c
MM
4282 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4283 but TARGET is not valid memory reference, TEMP will differ
4284 from TARGET although it is really the same location. */
0fab64a3 4285 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
535a42b1
NS
4286 /* If there's nothing to copy, don't bother. Don't call
4287 expr_size unless necessary, because some front-ends (C++)
4288 expr_size-hook must not be given objects that are not
4289 supposed to be bit-copied or bit-initialized. */
e56fc090 4290 && expr_size (exp) != const0_rtx)
bbf6f052 4291 {
bbf6f052 4292 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4293 && GET_MODE (temp) != VOIDmode)
bbf6f052 4294 {
8df83eae 4295 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4296 if (dont_return_target)
4297 {
4298 /* In this case, we will return TEMP,
4299 so make sure it has the proper mode.
4300 But don't forget to store the value into TARGET. */
4301 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4302 emit_move_insn (target, temp);
4303 }
4304 else
4305 convert_move (target, temp, unsignedp);
4306 }
4307
4308 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4309 {
c24ae149
RK
4310 /* Handle copying a string constant into an array. The string
4311 constant may be shorter than the array. So copy just the string's
4312 actual length, and clear the rest. First get the size of the data
4313 type of the string, which is actually the size of the target. */
4314 rtx size = expr_size (exp);
bbf6f052 4315
e87b4f3f
RS
4316 if (GET_CODE (size) == CONST_INT
4317 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a 4318 emit_block_move (target, temp, size,
6f4fd16d 4319 (call_param_p
8403445a 4320 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4321 else
bbf6f052 4322 {
e87b4f3f
RS
4323 /* Compute the size of the data to copy from the string. */
4324 tree copy_size
c03b7665 4325 = size_binop (MIN_EXPR,
b50d17a1 4326 make_tree (sizetype, size),
fed3cef0 4327 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4328 rtx copy_size_rtx
4329 = expand_expr (copy_size, NULL_RTX, VOIDmode,
6f4fd16d 4330 (call_param_p
8403445a 4331 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4332 rtx label = 0;
4333
4334 /* Copy that much. */
267b28bd 4335 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4336 TYPE_UNSIGNED (sizetype));
8403445a 4337 emit_block_move (target, temp, copy_size_rtx,
6f4fd16d 4338 (call_param_p
8403445a 4339 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4340
88f63c77
RK
4341 /* Figure out how much is left in TARGET that we have to clear.
4342 Do all calculations in ptr_mode. */
e87b4f3f
RS
4343 if (GET_CODE (copy_size_rtx) == CONST_INT)
4344 {
c24ae149
RK
4345 size = plus_constant (size, -INTVAL (copy_size_rtx));
4346 target = adjust_address (target, BLKmode,
4347 INTVAL (copy_size_rtx));
e87b4f3f
RS
4348 }
4349 else
4350 {
fa06ab5c 4351 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4352 copy_size_rtx, NULL_RTX, 0,
4353 OPTAB_LIB_WIDEN);
e87b4f3f 4354
c24ae149
RK
4355#ifdef POINTERS_EXTEND_UNSIGNED
4356 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4357 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4358 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4359#endif
4360
4361 target = offset_address (target, copy_size_rtx,
4362 highest_pow2_factor (copy_size));
e87b4f3f 4363 label = gen_label_rtx ();
c5d5d461 4364 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4365 GET_MODE (size), 0, label);
e87b4f3f
RS
4366 }
4367
4368 if (size != const0_rtx)
8148fe65 4369 clear_storage (target, size, BLOCK_OP_NORMAL);
22619c3f 4370
e87b4f3f
RS
4371 if (label)
4372 emit_label (label);
bbf6f052
RK
4373 }
4374 }
fffa9c1d
JW
4375 /* Handle calls that return values in multiple non-contiguous locations.
4376 The Irix 6 ABI has examples of this. */
4377 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4378 emit_group_load (target, temp, TREE_TYPE (exp),
4379 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4380 else if (GET_MODE (temp) == BLKmode)
8403445a 4381 emit_block_move (target, temp, expr_size (exp),
6f4fd16d 4382 (call_param_p
8403445a 4383 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4384 else
b0dccb00
RH
4385 {
4386 temp = force_operand (temp, target);
4387 if (temp != target)
4388 emit_move_insn (target, temp);
4389 }
bbf6f052 4390 }
709f5be1 4391
436d948e 4392 return NULL_RTX;
bbf6f052
RK
4393}
4394\f
6fa91b48
SB
4395/* Examine CTOR to discover:
4396 * how many scalar fields are set to nonzero values,
4397 and place it in *P_NZ_ELTS;
4398 * how many scalar fields are set to non-constant values,
4399 and place it in *P_NC_ELTS; and
4400 * how many scalar fields in total are in CTOR,
6f642f98
RH
4401 and place it in *P_ELT_COUNT.
4402 * if a type is a union, and the initializer from the constructor
4403 is not the largest element in the union, then set *p_must_clear. */
9de08200 4404
6de9cd9a
DN
4405static void
4406categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4407 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4408 HOST_WIDE_INT *p_elt_count,
4409 bool *p_must_clear)
9de08200 4410{
4038c495 4411 unsigned HOST_WIDE_INT idx;
6fa91b48 4412 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4038c495 4413 tree value, purpose;
9de08200 4414
6de9cd9a
DN
4415 nz_elts = 0;
4416 nc_elts = 0;
6fa91b48 4417 elt_count = 0;
caf93cb0 4418
4038c495 4419 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
9de08200 4420 {
6de9cd9a 4421 HOST_WIDE_INT mult;
9de08200 4422
6de9cd9a
DN
4423 mult = 1;
4424 if (TREE_CODE (purpose) == RANGE_EXPR)
4425 {
4426 tree lo_index = TREE_OPERAND (purpose, 0);
4427 tree hi_index = TREE_OPERAND (purpose, 1);
9de08200 4428
6de9cd9a
DN
4429 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4430 mult = (tree_low_cst (hi_index, 1)
4431 - tree_low_cst (lo_index, 1) + 1);
4432 }
9de08200 4433
6de9cd9a
DN
4434 switch (TREE_CODE (value))
4435 {
4436 case CONSTRUCTOR:
4437 {
6f642f98
RH
4438 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4439 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
6de9cd9a
DN
4440 nz_elts += mult * nz;
4441 nc_elts += mult * nc;
6f642f98 4442 elt_count += mult * ic;
6de9cd9a
DN
4443 }
4444 break;
9de08200 4445
6de9cd9a
DN
4446 case INTEGER_CST:
4447 case REAL_CST:
4448 if (!initializer_zerop (value))
4449 nz_elts += mult;
6fa91b48 4450 elt_count += mult;
6de9cd9a 4451 break;
97f8d136
RK
4452
4453 case STRING_CST:
4454 nz_elts += mult * TREE_STRING_LENGTH (value);
6fa91b48 4455 elt_count += mult * TREE_STRING_LENGTH (value);
97f8d136
RK
4456 break;
4457
6de9cd9a
DN
4458 case COMPLEX_CST:
4459 if (!initializer_zerop (TREE_REALPART (value)))
4460 nz_elts += mult;
4461 if (!initializer_zerop (TREE_IMAGPART (value)))
4462 nz_elts += mult;
6fa91b48 4463 elt_count += mult;
6de9cd9a 4464 break;
97f8d136 4465
6de9cd9a
DN
4466 case VECTOR_CST:
4467 {
4468 tree v;
4469 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
6fa91b48
SB
4470 {
4471 if (!initializer_zerop (TREE_VALUE (v)))
4472 nz_elts += mult;
4473 elt_count += mult;
4474 }
6de9cd9a
DN
4475 }
4476 break;
69ef87e2 4477
6de9cd9a
DN
4478 default:
4479 nz_elts += mult;
6fa91b48 4480 elt_count += mult;
6de9cd9a
DN
4481 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4482 nc_elts += mult;
4483 break;
4484 }
4485 }
69ef87e2 4486
6f642f98
RH
4487 if (!*p_must_clear
4488 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4489 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4490 {
4491 tree init_sub_type;
486e4326 4492 bool clear_this = true;
6f642f98 4493
4038c495 4494 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
6f642f98 4495 {
486e4326
RH
4496 /* We don't expect more than one element of the union to be
4497 initialized. Not sure what we should do otherwise... */
4038c495
GB
4498 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4499 == 1);
486e4326 4500
4038c495
GB
4501 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4502 CONSTRUCTOR_ELTS (ctor),
4503 0)->value);
486e4326
RH
4504
4505 /* ??? We could look at each element of the union, and find the
4506 largest element. Which would avoid comparing the size of the
4507 initialized element against any tail padding in the union.
4508 Doesn't seem worth the effort... */
4509 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4510 TYPE_SIZE (init_sub_type)) == 1)
4511 {
4512 /* And now we have to find out if the element itself is fully
4513 constructed. E.g. for union { struct { int a, b; } s; } u
4514 = { .s = { .a = 1 } }. */
73ed17ff 4515 if (elt_count == count_type_elements (init_sub_type, false))
486e4326
RH
4516 clear_this = false;
4517 }
6f642f98 4518 }
486e4326
RH
4519
4520 *p_must_clear = clear_this;
6f642f98
RH
4521 }
4522
6de9cd9a
DN
4523 *p_nz_elts += nz_elts;
4524 *p_nc_elts += nc_elts;
6fa91b48 4525 *p_elt_count += elt_count;
6de9cd9a
DN
4526}
4527
4528void
4529categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4530 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4531 HOST_WIDE_INT *p_elt_count,
4532 bool *p_must_clear)
6de9cd9a
DN
4533{
4534 *p_nz_elts = 0;
4535 *p_nc_elts = 0;
6fa91b48 4536 *p_elt_count = 0;
6f642f98
RH
4537 *p_must_clear = false;
4538 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4539 p_must_clear);
6de9cd9a
DN
4540}
4541
4542/* Count the number of scalars in TYPE. Return -1 on overflow or
73ed17ff
JJ
4543 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4544 array member at the end of the structure. */
6de9cd9a
DN
4545
4546HOST_WIDE_INT
73ed17ff 4547count_type_elements (tree type, bool allow_flexarr)
6de9cd9a
DN
4548{
4549 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4550 switch (TREE_CODE (type))
4551 {
4552 case ARRAY_TYPE:
4553 {
4554 tree telts = array_type_nelts (type);
4555 if (telts && host_integerp (telts, 1))
4556 {
5377d5ba 4557 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
73ed17ff 4558 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
6de9cd9a
DN
4559 if (n == 0)
4560 return 0;
5377d5ba 4561 else if (max / n > m)
6de9cd9a
DN
4562 return n * m;
4563 }
4564 return -1;
4565 }
4566
4567 case RECORD_TYPE:
4568 {
4569 HOST_WIDE_INT n = 0, t;
4570 tree f;
4571
4572 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4573 if (TREE_CODE (f) == FIELD_DECL)
4574 {
73ed17ff 4575 t = count_type_elements (TREE_TYPE (f), false);
6de9cd9a 4576 if (t < 0)
73ed17ff
JJ
4577 {
4578 /* Check for structures with flexible array member. */
4579 tree tf = TREE_TYPE (f);
4580 if (allow_flexarr
4581 && TREE_CHAIN (f) == NULL
4582 && TREE_CODE (tf) == ARRAY_TYPE
4583 && TYPE_DOMAIN (tf)
4584 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4585 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4586 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4587 && int_size_in_bytes (type) >= 0)
4588 break;
4589
4590 return -1;
4591 }
6de9cd9a
DN
4592 n += t;
4593 }
4594
4595 return n;
4596 }
9de08200 4597
6de9cd9a
DN
4598 case UNION_TYPE:
4599 case QUAL_UNION_TYPE:
4600 {
4601 /* Ho hum. How in the world do we guess here? Clearly it isn't
4602 right to count the fields. Guess based on the number of words. */
4603 HOST_WIDE_INT n = int_size_in_bytes (type);
4604 if (n < 0)
4605 return -1;
4606 return n / UNITS_PER_WORD;
4607 }
4608
4609 case COMPLEX_TYPE:
4610 return 2;
4611
4612 case VECTOR_TYPE:
3a021db2 4613 return TYPE_VECTOR_SUBPARTS (type);
6de9cd9a
DN
4614
4615 case INTEGER_TYPE:
4616 case REAL_TYPE:
4617 case ENUMERAL_TYPE:
4618 case BOOLEAN_TYPE:
4619 case CHAR_TYPE:
4620 case POINTER_TYPE:
4621 case OFFSET_TYPE:
4622 case REFERENCE_TYPE:
9de08200 4623 return 1;
3a94c984 4624
6de9cd9a
DN
4625 case VOID_TYPE:
4626 case METHOD_TYPE:
6de9cd9a
DN
4627 case FUNCTION_TYPE:
4628 case LANG_TYPE:
e9a25f70 4629 default:
5b0264cb 4630 gcc_unreachable ();
9de08200 4631 }
9de08200
RK
4632}
4633
4634/* Return 1 if EXP contains mostly (3/4) zeros. */
4635
e0ce7708 4636static int
502b8322 4637mostly_zeros_p (tree exp)
9de08200 4638{
9de08200 4639 if (TREE_CODE (exp) == CONSTRUCTOR)
caf93cb0 4640
9de08200 4641 {
6fa91b48 4642 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
6f642f98
RH
4643 bool must_clear;
4644
4645 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4646 if (must_clear)
4647 return 1;
6de9cd9a 4648
73ed17ff 4649 elts = count_type_elements (TREE_TYPE (exp), false);
9de08200 4650
6de9cd9a 4651 return nz_elts < elts / 4;
9de08200
RK
4652 }
4653
6de9cd9a 4654 return initializer_zerop (exp);
9de08200 4655}
c5250139
RG
4656
4657/* Return 1 if EXP contains all zeros. */
4658
4659static int
4660all_zeros_p (tree exp)
4661{
4662 if (TREE_CODE (exp) == CONSTRUCTOR)
4663
4664 {
4665 HOST_WIDE_INT nz_elts, nc_elts, count;
4666 bool must_clear;
4667
4668 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4669 return nz_elts == 0;
4670 }
4671
4672 return initializer_zerop (exp);
4673}
9de08200 4674\f
e1a43f73
PB
4675/* Helper function for store_constructor.
4676 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4677 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4678 CLEARED is as for store_constructor.
23cb1766 4679 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4680
4681 This provides a recursive shortcut back to store_constructor when it isn't
4682 necessary to go through store_field. This is so that we can pass through
4683 the cleared field to let store_constructor know that we may not have to
4684 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4685
4686static void
502b8322
AJ
4687store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4688 HOST_WIDE_INT bitpos, enum machine_mode mode,
4689 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4690{
4691 if (TREE_CODE (exp) == CONSTRUCTOR
6c89c39a
RK
4692 /* We can only call store_constructor recursively if the size and
4693 bit position are on a byte boundary. */
23ccec44 4694 && bitpos % BITS_PER_UNIT == 0
6c89c39a 4695 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
cc2902df 4696 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4697 let store_field do the bitfield handling. This is unlikely to
4698 generate unnecessary clear instructions anyways. */
3c0cb5de 4699 && (bitpos == 0 || MEM_P (target)))
e1a43f73 4700 {
3c0cb5de 4701 if (MEM_P (target))
61cb205c
RK
4702 target
4703 = adjust_address (target,
4704 GET_MODE (target) == BLKmode
4705 || 0 != (bitpos
4706 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4707 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4708
e0339ef7 4709
04050c69 4710 /* Update the alias set, if required. */
3c0cb5de 4711 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
10b76d73 4712 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4713 {
4714 target = copy_rtx (target);
4715 set_mem_alias_set (target, alias_set);
4716 }
e0339ef7 4717
dbb5c281 4718 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4719 }
4720 else
f45bdcd0 4721 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
e1a43f73
PB
4722}
4723
bbf6f052 4724/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4725 TARGET is either a REG or a MEM; we know it cannot conflict, since
4726 safe_from_p has been called.
dbb5c281
RK
4727 CLEARED is true if TARGET is known to have been zero'd.
4728 SIZE is the number of bytes of TARGET we are allowed to modify: this
b7010412
RK
4729 may not be the same as the size of EXP if we are assigning to a field
4730 which has been packed to exclude padding bits. */
bbf6f052
RK
4731
4732static void
502b8322 4733store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4734{
4af3895e 4735 tree type = TREE_TYPE (exp);
a5efcd63 4736#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4737 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4738#endif
4af3895e 4739
5b0264cb 4740 switch (TREE_CODE (type))
bbf6f052 4741 {
5b0264cb
NS
4742 case RECORD_TYPE:
4743 case UNION_TYPE:
4744 case QUAL_UNION_TYPE:
4745 {
4038c495
GB
4746 unsigned HOST_WIDE_INT idx;
4747 tree field, value;
9de08200 4748
5b0264cb
NS
4749 /* If size is zero or the target is already cleared, do nothing. */
4750 if (size == 0 || cleared)
9de08200 4751 cleared = 1;
5b0264cb
NS
4752 /* We either clear the aggregate or indicate the value is dead. */
4753 else if ((TREE_CODE (type) == UNION_TYPE
4754 || TREE_CODE (type) == QUAL_UNION_TYPE)
4755 && ! CONSTRUCTOR_ELTS (exp))
4756 /* If the constructor is empty, clear the union. */
4757 {
8148fe65 4758 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5b0264cb
NS
4759 cleared = 1;
4760 }
bbf6f052 4761
5b0264cb
NS
4762 /* If we are building a static constructor into a register,
4763 set the initial value as zero so we can fold the value into
4764 a constant. But if more than one register is involved,
4765 this probably loses. */
4766 else if (REG_P (target) && TREE_STATIC (exp)
4767 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4768 {
4769 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4770 cleared = 1;
4771 }
3a94c984 4772
5b0264cb
NS
4773 /* If the constructor has fewer fields than the structure or
4774 if we are initializing the structure to mostly zeros, clear
4775 the whole structure first. Don't do this if TARGET is a
4776 register whose mode size isn't equal to SIZE since
4777 clear_storage can't handle this case. */
4778 else if (size > 0
4038c495 4779 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5b0264cb
NS
4780 != fields_length (type))
4781 || mostly_zeros_p (exp))
4782 && (!REG_P (target)
4783 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4784 == size)))
4785 {
8148fe65 4786 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
4787 cleared = 1;
4788 }
b50d17a1 4789
5b0264cb
NS
4790 if (! cleared)
4791 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052 4792
5b0264cb
NS
4793 /* Store each element of the constructor into the
4794 corresponding field of TARGET. */
4038c495 4795 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5b0264cb 4796 {
5b0264cb
NS
4797 enum machine_mode mode;
4798 HOST_WIDE_INT bitsize;
4799 HOST_WIDE_INT bitpos = 0;
4800 tree offset;
4801 rtx to_rtx = target;
4802
4803 /* Just ignore missing fields. We cleared the whole
4804 structure, above, if any fields are missing. */
4805 if (field == 0)
4806 continue;
4807
4808 if (cleared && initializer_zerop (value))
4809 continue;
4810
4811 if (host_integerp (DECL_SIZE (field), 1))
4812 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4813 else
4814 bitsize = -1;
4815
4816 mode = DECL_MODE (field);
4817 if (DECL_BIT_FIELD (field))
4818 mode = VOIDmode;
4819
4820 offset = DECL_FIELD_OFFSET (field);
4821 if (host_integerp (offset, 0)
4822 && host_integerp (bit_position (field), 0))
4823 {
4824 bitpos = int_bit_position (field);
4825 offset = 0;
4826 }
4827 else
4828 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4829
4830 if (offset)
4831 {
4832 rtx offset_rtx;
4833
4834 offset
4835 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4836 make_tree (TREE_TYPE (exp),
4837 target));
4838
4839 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4840 gcc_assert (MEM_P (to_rtx));
4841
bd070e1a 4842#ifdef POINTERS_EXTEND_UNSIGNED
5b0264cb
NS
4843 if (GET_MODE (offset_rtx) != Pmode)
4844 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c 4845#else
5b0264cb
NS
4846 if (GET_MODE (offset_rtx) != ptr_mode)
4847 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4848#endif
bd070e1a 4849
5b0264cb
NS
4850 to_rtx = offset_address (to_rtx, offset_rtx,
4851 highest_pow2_factor (offset));
4852 }
c5c76735 4853
34c73909 4854#ifdef WORD_REGISTER_OPERATIONS
5b0264cb
NS
4855 /* If this initializes a field that is smaller than a
4856 word, at the start of a word, try to widen it to a full
4857 word. This special case allows us to output C++ member
4858 function initializations in a form that the optimizers
4859 can understand. */
4860 if (REG_P (target)
4861 && bitsize < BITS_PER_WORD
4862 && bitpos % BITS_PER_WORD == 0
4863 && GET_MODE_CLASS (mode) == MODE_INT
4864 && TREE_CODE (value) == INTEGER_CST
4865 && exp_size >= 0
4866 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4867 {
4868 tree type = TREE_TYPE (value);
4869
4870 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4871 {
4872 type = lang_hooks.types.type_for_size
4873 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4874 value = convert (type, value);
4875 }
4876
4877 if (BYTES_BIG_ENDIAN)
4878 value
4845b383
KH
4879 = fold_build2 (LSHIFT_EXPR, type, value,
4880 build_int_cst (NULL_TREE,
4881 BITS_PER_WORD - bitsize));
5b0264cb
NS
4882 bitsize = BITS_PER_WORD;
4883 mode = word_mode;
4884 }
34c73909 4885#endif
10b76d73 4886
5b0264cb
NS
4887 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4888 && DECL_NONADDRESSABLE_P (field))
4889 {
4890 to_rtx = copy_rtx (to_rtx);
4891 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4892 }
4893
4894 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4895 value, type, cleared,
4896 get_alias_set (TREE_TYPE (field)));
4897 }
4898 break;
4899 }
4900 case ARRAY_TYPE:
4901 {
4038c495
GB
4902 tree value, index;
4903 unsigned HOST_WIDE_INT i;
5b0264cb
NS
4904 int need_to_clear;
4905 tree domain;
4906 tree elttype = TREE_TYPE (type);
4907 int const_bounds_p;
4908 HOST_WIDE_INT minelt = 0;
4909 HOST_WIDE_INT maxelt = 0;
4910
4911 domain = TYPE_DOMAIN (type);
4912 const_bounds_p = (TYPE_MIN_VALUE (domain)
4913 && TYPE_MAX_VALUE (domain)
4914 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4915 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4916
4917 /* If we have constant bounds for the range of the type, get them. */
4918 if (const_bounds_p)
4919 {
4920 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4921 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4922 }
3a021db2 4923
5b0264cb
NS
4924 /* If the constructor has fewer elements than the array, clear
4925 the whole array first. Similarly if this is static
4926 constructor of a non-BLKmode object. */
4927 if (cleared)
4928 need_to_clear = 0;
4929 else if (REG_P (target) && TREE_STATIC (exp))
4930 need_to_clear = 1;
4931 else
4932 {
4038c495
GB
4933 unsigned HOST_WIDE_INT idx;
4934 tree index, value;
5b0264cb
NS
4935 HOST_WIDE_INT count = 0, zero_count = 0;
4936 need_to_clear = ! const_bounds_p;
4937
4938 /* This loop is a more accurate version of the loop in
4939 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4940 is also needed to check for missing elements. */
4038c495 4941 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5b0264cb 4942 {
5b0264cb 4943 HOST_WIDE_INT this_node_count;
4038c495
GB
4944
4945 if (need_to_clear)
4946 break;
5b0264cb
NS
4947
4948 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4949 {
4950 tree lo_index = TREE_OPERAND (index, 0);
4951 tree hi_index = TREE_OPERAND (index, 1);
4952
4953 if (! host_integerp (lo_index, 1)
4954 || ! host_integerp (hi_index, 1))
4955 {
4956 need_to_clear = 1;
4957 break;
4958 }
4959
4960 this_node_count = (tree_low_cst (hi_index, 1)
4961 - tree_low_cst (lo_index, 1) + 1);
4962 }
4963 else
4964 this_node_count = 1;
4965
4966 count += this_node_count;
4038c495 4967 if (mostly_zeros_p (value))
5b0264cb
NS
4968 zero_count += this_node_count;
4969 }
4970
4971 /* Clear the entire array first if there are any missing
4972 elements, or if the incidence of zero elements is >=
4973 75%. */
4974 if (! need_to_clear
4975 && (count < maxelt - minelt + 1
4976 || 4 * zero_count >= 3 * count))
4977 need_to_clear = 1;
4978 }
4979
4980 if (need_to_clear && size > 0)
4981 {
4982 if (REG_P (target))
4983 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4984 else
8148fe65 4985 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
4986 cleared = 1;
4987 }
3a021db2 4988
5b0264cb
NS
4989 if (!cleared && REG_P (target))
4990 /* Inform later passes that the old value is dead. */
4991 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3a021db2 4992
5b0264cb
NS
4993 /* Store each element of the constructor into the
4994 corresponding element of TARGET, determined by counting the
4995 elements. */
4038c495 4996 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5b0264cb
NS
4997 {
4998 enum machine_mode mode;
4999 HOST_WIDE_INT bitsize;
5000 HOST_WIDE_INT bitpos;
5001 int unsignedp;
5b0264cb
NS
5002 rtx xtarget = target;
5003
5004 if (cleared && initializer_zerop (value))
5005 continue;
5006
5007 unsignedp = TYPE_UNSIGNED (elttype);
5008 mode = TYPE_MODE (elttype);
5009 if (mode == BLKmode)
5010 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5011 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5012 : -1);
5013 else
5014 bitsize = GET_MODE_BITSIZE (mode);
5015
5016 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5017 {
5018 tree lo_index = TREE_OPERAND (index, 0);
5019 tree hi_index = TREE_OPERAND (index, 1);
5020 rtx index_r, pos_rtx;
5021 HOST_WIDE_INT lo, hi, count;
5022 tree position;
5023
5024 /* If the range is constant and "small", unroll the loop. */
5025 if (const_bounds_p
5026 && host_integerp (lo_index, 0)
5027 && host_integerp (hi_index, 0)
5028 && (lo = tree_low_cst (lo_index, 0),
5029 hi = tree_low_cst (hi_index, 0),
5030 count = hi - lo + 1,
5031 (!MEM_P (target)
5032 || count <= 2
5033 || (host_integerp (TYPE_SIZE (elttype), 1)
5034 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5035 <= 40 * 8)))))
5036 {
5037 lo -= minelt; hi -= minelt;
5038 for (; lo <= hi; lo++)
5039 {
5040 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5041
5042 if (MEM_P (target)
5043 && !MEM_KEEP_ALIAS_SET_P (target)
5044 && TREE_CODE (type) == ARRAY_TYPE
5045 && TYPE_NONALIASED_COMPONENT (type))
5046 {
5047 target = copy_rtx (target);
5048 MEM_KEEP_ALIAS_SET_P (target) = 1;
5049 }
5050
5051 store_constructor_field
5052 (target, bitsize, bitpos, mode, value, type, cleared,
5053 get_alias_set (elttype));
5054 }
5055 }
5056 else
5057 {
5058 rtx loop_start = gen_label_rtx ();
5059 rtx loop_end = gen_label_rtx ();
5060 tree exit_cond;
5061
5062 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5063 unsignedp = TYPE_UNSIGNED (domain);
5064
5065 index = build_decl (VAR_DECL, NULL_TREE, domain);
5066
5067 index_r
5068 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5069 &unsignedp, 0));
5070 SET_DECL_RTL (index, index_r);
5071 store_expr (lo_index, index_r, 0);
5072
5073 /* Build the head of the loop. */
5074 do_pending_stack_adjust ();
5075 emit_label (loop_start);
5076
5077 /* Assign value to element index. */
5078 position
5079 = convert (ssizetype,
4845b383
KH
5080 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5081 index, TYPE_MIN_VALUE (domain)));
5b0264cb
NS
5082 position = size_binop (MULT_EXPR, position,
5083 convert (ssizetype,
5084 TYPE_SIZE_UNIT (elttype)));
5085
5086 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5087 xtarget = offset_address (target, pos_rtx,
5088 highest_pow2_factor (position));
5089 xtarget = adjust_address (xtarget, mode, 0);
5090 if (TREE_CODE (value) == CONSTRUCTOR)
5091 store_constructor (value, xtarget, cleared,
5092 bitsize / BITS_PER_UNIT);
5093 else
5094 store_expr (value, xtarget, 0);
5095
5096 /* Generate a conditional jump to exit the loop. */
5097 exit_cond = build2 (LT_EXPR, integer_type_node,
5098 index, hi_index);
5099 jumpif (exit_cond, loop_end);
5100
5101 /* Update the loop counter, and jump to the head of
5102 the loop. */
5103 expand_assignment (index,
5104 build2 (PLUS_EXPR, TREE_TYPE (index),
e836a5a2 5105 index, integer_one_node));
5b0264cb
NS
5106
5107 emit_jump (loop_start);
5108
5109 /* Build the end of the loop. */
5110 emit_label (loop_end);
5111 }
5112 }
5113 else if ((index != 0 && ! host_integerp (index, 0))
5114 || ! host_integerp (TYPE_SIZE (elttype), 1))
5115 {
5116 tree position;
5117
5118 if (index == 0)
5119 index = ssize_int (1);
5120
5121 if (minelt)
5122 index = fold_convert (ssizetype,
4845b383
KH
5123 fold_build2 (MINUS_EXPR,
5124 TREE_TYPE (index),
5125 index,
5126 TYPE_MIN_VALUE (domain)));
5b0264cb
NS
5127
5128 position = size_binop (MULT_EXPR, index,
5129 convert (ssizetype,
5130 TYPE_SIZE_UNIT (elttype)));
5131 xtarget = offset_address (target,
5132 expand_expr (position, 0, VOIDmode, 0),
5133 highest_pow2_factor (position));
5134 xtarget = adjust_address (xtarget, mode, 0);
5135 store_expr (value, xtarget, 0);
5136 }
5137 else
5138 {
5139 if (index != 0)
5140 bitpos = ((tree_low_cst (index, 0) - minelt)
5141 * tree_low_cst (TYPE_SIZE (elttype), 1));
5142 else
5143 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5144
5145 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5146 && TREE_CODE (type) == ARRAY_TYPE
5147 && TYPE_NONALIASED_COMPONENT (type))
5148 {
5149 target = copy_rtx (target);
5150 MEM_KEEP_ALIAS_SET_P (target) = 1;
5151 }
5152 store_constructor_field (target, bitsize, bitpos, mode, value,
5153 type, cleared, get_alias_set (elttype));
5154 }
5155 }
5156 break;
5157 }
3a021db2 5158
5b0264cb
NS
5159 case VECTOR_TYPE:
5160 {
4038c495
GB
5161 unsigned HOST_WIDE_INT idx;
5162 constructor_elt *ce;
5b0264cb
NS
5163 int i;
5164 int need_to_clear;
5165 int icode = 0;
5166 tree elttype = TREE_TYPE (type);
5167 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5168 enum machine_mode eltmode = TYPE_MODE (elttype);
5169 HOST_WIDE_INT bitsize;
5170 HOST_WIDE_INT bitpos;
201dd46b 5171 rtvec vector = NULL;
5b0264cb
NS
5172 unsigned n_elts;
5173
5174 gcc_assert (eltmode != BLKmode);
5175
5176 n_elts = TYPE_VECTOR_SUBPARTS (type);
5177 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5178 {
5179 enum machine_mode mode = GET_MODE (target);
5180
5181 icode = (int) vec_init_optab->handlers[mode].insn_code;
5182 if (icode != CODE_FOR_nothing)
5183 {
5184 unsigned int i;
5185
201dd46b 5186 vector = rtvec_alloc (n_elts);
5b0264cb 5187 for (i = 0; i < n_elts; i++)
201dd46b 5188 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5b0264cb
NS
5189 }
5190 }
5191
5192 /* If the constructor has fewer elements than the vector,
5193 clear the whole array first. Similarly if this is static
5194 constructor of a non-BLKmode object. */
5195 if (cleared)
5196 need_to_clear = 0;
5197 else if (REG_P (target) && TREE_STATIC (exp))
5198 need_to_clear = 1;
5199 else
5200 {
5201 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4038c495 5202 tree value;
5b0264cb 5203
4038c495 5204 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5b0264cb
NS
5205 {
5206 int n_elts_here = tree_low_cst
5207 (int_const_binop (TRUNC_DIV_EXPR,
4038c495 5208 TYPE_SIZE (TREE_TYPE (value)),
5b0264cb
NS
5209 TYPE_SIZE (elttype), 0), 1);
5210
5211 count += n_elts_here;
4038c495 5212 if (mostly_zeros_p (value))
5b0264cb
NS
5213 zero_count += n_elts_here;
5214 }
3a021db2 5215
5b0264cb
NS
5216 /* Clear the entire vector first if there are any missing elements,
5217 or if the incidence of zero elements is >= 75%. */
5218 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5219 }
5220
5221 if (need_to_clear && size > 0 && !vector)
5222 {
5223 if (REG_P (target))
5224 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5225 else
8148fe65 5226 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
5227 cleared = 1;
5228 }
5229
2ab1754e 5230 /* Inform later passes that the old value is dead. */
5b0264cb 5231 if (!cleared && REG_P (target))
2ab1754e 5232 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5b0264cb
NS
5233
5234 /* Store each element of the constructor into the corresponding
5235 element of TARGET, determined by counting the elements. */
4038c495
GB
5236 for (idx = 0, i = 0;
5237 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5238 idx++, i += bitsize / elt_size)
5b0264cb 5239 {
5b0264cb 5240 HOST_WIDE_INT eltpos;
4038c495 5241 tree value = ce->value;
5b0264cb
NS
5242
5243 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5244 if (cleared && initializer_zerop (value))
5245 continue;
5246
4038c495
GB
5247 if (ce->index)
5248 eltpos = tree_low_cst (ce->index, 1);
5b0264cb
NS
5249 else
5250 eltpos = i;
5251
5252 if (vector)
5253 {
5254 /* Vector CONSTRUCTORs should only be built from smaller
5255 vectors in the case of BLKmode vectors. */
5256 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
201dd46b
RH
5257 RTVEC_ELT (vector, eltpos)
5258 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5b0264cb
NS
5259 }
5260 else
5261 {
5262 enum machine_mode value_mode =
5263 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
3a021db2
PB
5264 ? TYPE_MODE (TREE_TYPE (value))
5265 : eltmode;
5b0264cb
NS
5266 bitpos = eltpos * elt_size;
5267 store_constructor_field (target, bitsize, bitpos,
5268 value_mode, value, type,
5269 cleared, get_alias_set (elttype));
5270 }
5271 }
5272
5273 if (vector)
5274 emit_insn (GEN_FCN (icode)
5275 (target,
201dd46b 5276 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5b0264cb
NS
5277 break;
5278 }
08f2586c 5279
5b0264cb
NS
5280 default:
5281 gcc_unreachable ();
071a6595 5282 }
bbf6f052
RK
5283}
5284
5285/* Store the value of EXP (an expression tree)
5286 into a subfield of TARGET which has mode MODE and occupies
5287 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5288 If MODE is VOIDmode, it means that we are storing into a bit-field.
5289
f45bdcd0
KH
5290 Always return const0_rtx unless we have something particular to
5291 return.
bbf6f052 5292
a06ef755 5293 TYPE is the type of the underlying object,
ece32014
MM
5294
5295 ALIAS_SET is the alias set for the destination. This value will
5296 (in general) be different from that for TARGET, since TARGET is a
5297 reference to the containing structure. */
bbf6f052
RK
5298
5299static rtx
502b8322 5300store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
f45bdcd0 5301 enum machine_mode mode, tree exp, tree type, int alias_set)
bbf6f052 5302{
906c4e36 5303 HOST_WIDE_INT width_mask = 0;
bbf6f052 5304
e9a25f70
JL
5305 if (TREE_CODE (exp) == ERROR_MARK)
5306 return const0_rtx;
5307
2be6a7e9
RK
5308 /* If we have nothing to store, do nothing unless the expression has
5309 side-effects. */
5310 if (bitsize == 0)
5311 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5312 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5313 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5314
5315 /* If we are storing into an unaligned field of an aligned union that is
5316 in a register, we may have the mode of TARGET being an integer mode but
5317 MODE == BLKmode. In that case, get an aligned object whose size and
5318 alignment are the same as TARGET and store TARGET into it (we can avoid
5319 the store if the field being stored is the entire width of TARGET). Then
5320 call ourselves recursively to store the field into a BLKmode version of
5321 that object. Finally, load from the object into TARGET. This is not
5322 very efficient in general, but should only be slightly more expensive
5323 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5324 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5325 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5326
5327 if (mode == BLKmode
f8cfc6aa 5328 && (REG_P (target) || GET_CODE (target) == SUBREG))
bbf6f052 5329 {
85a43a2f 5330 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5331 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5332
8752c357 5333 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5334 emit_move_insn (object, target);
5335
f45bdcd0 5336 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
bbf6f052
RK
5337
5338 emit_move_insn (target, object);
5339
a06ef755 5340 /* We want to return the BLKmode version of the data. */
46093b97 5341 return blk_object;
bbf6f052 5342 }
c3b247b4
JM
5343
5344 if (GET_CODE (target) == CONCAT)
5345 {
5346 /* We're storing into a struct containing a single __complex. */
5347
5b0264cb 5348 gcc_assert (!bitpos);
f45bdcd0 5349 return store_expr (exp, target, 0);
c3b247b4 5350 }
bbf6f052
RK
5351
5352 /* If the structure is in a register or if the component
5353 is a bit field, we cannot use addressing to access it.
5354 Use bit-field techniques or SUBREG to store in it. */
5355
4fa52007 5356 if (mode == VOIDmode
6ab06cbb
JW
5357 || (mode != BLKmode && ! direct_store[(int) mode]
5358 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5359 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f8cfc6aa 5360 || REG_P (target)
c980ac49 5361 || GET_CODE (target) == SUBREG
ccc98036
RS
5362 /* If the field isn't aligned enough to store as an ordinary memref,
5363 store it as a bit field. */
15b19a7d 5364 || (mode != BLKmode
9e5f281f
OH
5365 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5366 || bitpos % GET_MODE_ALIGNMENT (mode))
5367 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5368 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5369 /* If the RHS and field are a constant size and the size of the
5370 RHS isn't the same size as the bitfield, we must use bitfield
5371 operations. */
05bccae2
RK
5372 || (bitsize >= 0
5373 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5374 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5375 {
48cc8d3b
RH
5376 rtx temp;
5377
5378 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5379 implies a mask operation. If the precision is the same size as
5380 the field we're storing into, that mask is redundant. This is
5381 particularly common with bit field assignments generated by the
5382 C front end. */
8d740330
RH
5383 if (TREE_CODE (exp) == NOP_EXPR)
5384 {
5385 tree type = TREE_TYPE (exp);
5386 if (INTEGRAL_TYPE_P (type)
5387 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5388 && bitsize == TYPE_PRECISION (type))
5389 {
5390 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5391 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5392 exp = TREE_OPERAND (exp, 0);
5393 }
5394 }
48cc8d3b
RH
5395
5396 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5397
ef19912d
RK
5398 /* If BITSIZE is narrower than the size of the type of EXP
5399 we will be narrowing TEMP. Normally, what's wanted are the
5400 low-order bits. However, if EXP's type is a record and this is
5401 big-endian machine, we want the upper BITSIZE bits. */
5402 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5403 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5404 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5405 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5406 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5407 - bitsize),
c1853da7 5408 NULL_RTX, 1);
ef19912d 5409
bbd6cf73
RK
5410 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5411 MODE. */
5412 if (mode != VOIDmode && mode != BLKmode
5413 && mode != TYPE_MODE (TREE_TYPE (exp)))
5414 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5415
a281e72d
RK
5416 /* If the modes of TARGET and TEMP are both BLKmode, both
5417 must be in memory and BITPOS must be aligned on a byte
5418 boundary. If so, we simply do a block copy. */
5419 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5420 {
5b0264cb
NS
5421 gcc_assert (MEM_P (target) && MEM_P (temp)
5422 && !(bitpos % BITS_PER_UNIT));
a281e72d 5423
f4ef873c 5424 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5425 emit_block_move (target, temp,
a06ef755 5426 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5427 / BITS_PER_UNIT),
5428 BLOCK_OP_NORMAL);
a281e72d 5429
f45bdcd0 5430 return const0_rtx;
a281e72d
RK
5431 }
5432
bbf6f052 5433 /* Store the value in the bitfield. */
b3520980 5434 store_bit_field (target, bitsize, bitpos, mode, temp);
a06ef755 5435
bbf6f052
RK
5436 return const0_rtx;
5437 }
5438 else
5439 {
bbf6f052 5440 /* Now build a reference to just the desired component. */
f45bdcd0 5441 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
a06ef755
RK
5442
5443 if (to_rtx == target)
5444 to_rtx = copy_rtx (to_rtx);
792760b9 5445
c6df88cb 5446 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5447 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5448 set_mem_alias_set (to_rtx, alias_set);
bbf6f052 5449
f45bdcd0 5450 return store_expr (exp, to_rtx, 0);
bbf6f052
RK
5451 }
5452}
5453\f
5454/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5455 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5456 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5457
5458 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5459 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5460 If the position of the field is variable, we store a tree
5461 giving the variable offset (in units) in *POFFSET.
5462 This offset is in addition to the bit position.
5463 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5464
5465 If any of the extraction expressions is volatile,
5466 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5467
5468 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5469 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5470 is redundant.
5471
5472 If the field describes a variable-sized object, *PMODE is set to
5473 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2614034e
EB
5474 this case, but the address of the object can be found.
5475
5476 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5477 look through nodes that serve as markers of a greater alignment than
5478 the one that can be deduced from the expression. These nodes make it
5479 possible for front-ends to prevent temporaries from being created by
5480 the middle-end on alignment considerations. For that purpose, the
5481 normal operating mode at high-level is to always pass FALSE so that
5482 the ultimate containing object is really returned; moreover, the
5483 associated predicate handled_component_p will always return TRUE
5484 on these nodes, thus indicating that they are essentially handled
5485 by get_inner_reference. TRUE should only be passed when the caller
5486 is scanning the expression in order to build another representation
5487 and specifically knows how to handle these nodes; as such, this is
5488 the normal operating mode in the RTL expanders. */
bbf6f052
RK
5489
5490tree
502b8322
AJ
5491get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5492 HOST_WIDE_INT *pbitpos, tree *poffset,
5493 enum machine_mode *pmode, int *punsignedp,
2614034e 5494 int *pvolatilep, bool keep_aligning)
bbf6f052
RK
5495{
5496 tree size_tree = 0;
5497 enum machine_mode mode = VOIDmode;
fed3cef0 5498 tree offset = size_zero_node;
770ae6cc 5499 tree bit_offset = bitsize_zero_node;
770ae6cc 5500 tree tem;
bbf6f052 5501
770ae6cc
RK
5502 /* First get the mode, signedness, and size. We do this from just the
5503 outermost expression. */
bbf6f052
RK
5504 if (TREE_CODE (exp) == COMPONENT_REF)
5505 {
5506 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5507 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5508 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5509
a150de29 5510 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5511 }
5512 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5513 {
5514 size_tree = TREE_OPERAND (exp, 1);
a150de29 5515 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5516 }
5517 else
5518 {
5519 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5520 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5521
ab87f8c8
JL
5522 if (mode == BLKmode)
5523 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5524 else
5525 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5526 }
3a94c984 5527
770ae6cc 5528 if (size_tree != 0)
bbf6f052 5529 {
770ae6cc 5530 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5531 mode = BLKmode, *pbitsize = -1;
5532 else
770ae6cc 5533 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5534 }
5535
5536 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5537 and find the ultimate containing object. */
bbf6f052
RK
5538 while (1)
5539 {
afe84921 5540 switch (TREE_CODE (exp))
bbf6f052 5541 {
afe84921
RH
5542 case BIT_FIELD_REF:
5543 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5544 TREE_OPERAND (exp, 2));
5545 break;
bbf6f052 5546
afe84921
RH
5547 case COMPONENT_REF:
5548 {
5549 tree field = TREE_OPERAND (exp, 1);
5550 tree this_offset = component_ref_field_offset (exp);
e7f3c83f 5551
afe84921
RH
5552 /* If this field hasn't been filled in yet, don't go past it.
5553 This should only happen when folding expressions made during
5554 type construction. */
5555 if (this_offset == 0)
5556 break;
e6d8c385 5557
afe84921
RH
5558 offset = size_binop (PLUS_EXPR, offset, this_offset);
5559 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5560 DECL_FIELD_BIT_OFFSET (field));
7156dead 5561
afe84921
RH
5562 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5563 }
5564 break;
7156dead 5565
afe84921
RH
5566 case ARRAY_REF:
5567 case ARRAY_RANGE_REF:
5568 {
5569 tree index = TREE_OPERAND (exp, 1);
5570 tree low_bound = array_ref_low_bound (exp);
5571 tree unit_size = array_ref_element_size (exp);
5572
5573 /* We assume all arrays have sizes that are a multiple of a byte.
5574 First subtract the lower bound, if any, in the type of the
5575 index, then convert to sizetype and multiply by the size of
5576 the array element. */
5577 if (! integer_zerop (low_bound))
4845b383
KH
5578 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5579 index, low_bound);
afe84921
RH
5580
5581 offset = size_binop (PLUS_EXPR, offset,
5582 size_binop (MULT_EXPR,
5583 convert (sizetype, index),
5584 unit_size));
5585 }
5586 break;
5587
5588 case REALPART_EXPR:
afe84921
RH
5589 break;
5590
5591 case IMAGPART_EXPR:
9f25f0ad
RH
5592 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5593 bitsize_int (*pbitsize));
afe84921
RH
5594 break;
5595
afe84921 5596 case VIEW_CONVERT_EXPR:
2614034e
EB
5597 if (keep_aligning && STRICT_ALIGNMENT
5598 && (TYPE_ALIGN (TREE_TYPE (exp))
afe84921 5599 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
afe84921
RH
5600 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5601 < BIGGEST_ALIGNMENT)
5602 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5603 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5604 goto done;
5605 break;
5606
5607 default:
5608 goto done;
5609 }
7bb0943f
RS
5610
5611 /* If any reference in the chain is volatile, the effect is volatile. */
5612 if (TREE_THIS_VOLATILE (exp))
5613 *pvolatilep = 1;
839c4796 5614
bbf6f052
RK
5615 exp = TREE_OPERAND (exp, 0);
5616 }
afe84921 5617 done:
bbf6f052 5618
770ae6cc
RK
5619 /* If OFFSET is constant, see if we can return the whole thing as a
5620 constant bit position. Otherwise, split it up. */
5621 if (host_integerp (offset, 0)
5622 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5623 bitsize_unit_node))
5624 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5625 && host_integerp (tem, 0))
5626 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5627 else
5628 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5629
bbf6f052 5630 *pmode = mode;
bbf6f052
RK
5631 return exp;
5632}
921b3427 5633
44de5aeb
RK
5634/* Return a tree of sizetype representing the size, in bytes, of the element
5635 of EXP, an ARRAY_REF. */
5636
5637tree
5638array_ref_element_size (tree exp)
5639{
5640 tree aligned_size = TREE_OPERAND (exp, 3);
5641 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5642
5643 /* If a size was specified in the ARRAY_REF, it's the size measured
5644 in alignment units of the element type. So multiply by that value. */
5645 if (aligned_size)
bc482be4
RH
5646 {
5647 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5648 sizetype from another type of the same width and signedness. */
5649 if (TREE_TYPE (aligned_size) != sizetype)
5650 aligned_size = fold_convert (sizetype, aligned_size);
5651 return size_binop (MULT_EXPR, aligned_size,
a4e9ffe5 5652 size_int (TYPE_ALIGN_UNIT (elmt_type)));
bc482be4 5653 }
44de5aeb 5654
caf93cb0 5655 /* Otherwise, take the size from that of the element type. Substitute
44de5aeb
RK
5656 any PLACEHOLDER_EXPR that we have. */
5657 else
5658 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5659}
5660
5661/* Return a tree representing the lower bound of the array mentioned in
5662 EXP, an ARRAY_REF. */
5663
5664tree
5665array_ref_low_bound (tree exp)
5666{
5667 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5668
5669 /* If a lower bound is specified in EXP, use it. */
5670 if (TREE_OPERAND (exp, 2))
5671 return TREE_OPERAND (exp, 2);
5672
5673 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5674 substituting for a PLACEHOLDER_EXPR as needed. */
5675 if (domain_type && TYPE_MIN_VALUE (domain_type))
5676 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5677
5678 /* Otherwise, return a zero of the appropriate type. */
5212068f 5679 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
44de5aeb
RK
5680}
5681
a7e5372d
ZD
5682/* Return a tree representing the upper bound of the array mentioned in
5683 EXP, an ARRAY_REF. */
5684
5685tree
5686array_ref_up_bound (tree exp)
5687{
5688 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5689
5690 /* If there is a domain type and it has an upper bound, use it, substituting
5691 for a PLACEHOLDER_EXPR as needed. */
5692 if (domain_type && TYPE_MAX_VALUE (domain_type))
5693 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5694
5695 /* Otherwise fail. */
5696 return NULL_TREE;
5697}
5698
44de5aeb
RK
5699/* Return a tree representing the offset, in bytes, of the field referenced
5700 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5701
5702tree
5703component_ref_field_offset (tree exp)
5704{
5705 tree aligned_offset = TREE_OPERAND (exp, 2);
5706 tree field = TREE_OPERAND (exp, 1);
5707
5708 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5709 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5710 value. */
5711 if (aligned_offset)
bc482be4
RH
5712 {
5713 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5714 sizetype from another type of the same width and signedness. */
5715 if (TREE_TYPE (aligned_offset) != sizetype)
5716 aligned_offset = fold_convert (sizetype, aligned_offset);
5717 return size_binop (MULT_EXPR, aligned_offset,
5718 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5719 }
44de5aeb 5720
caf93cb0 5721 /* Otherwise, take the offset from that of the field. Substitute
44de5aeb
RK
5722 any PLACEHOLDER_EXPR that we have. */
5723 else
5724 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5725}
5726
ed239f5a
RK
5727/* Return 1 if T is an expression that get_inner_reference handles. */
5728
5729int
502b8322 5730handled_component_p (tree t)
ed239f5a
RK
5731{
5732 switch (TREE_CODE (t))
5733 {
5734 case BIT_FIELD_REF:
5735 case COMPONENT_REF:
5736 case ARRAY_REF:
5737 case ARRAY_RANGE_REF:
ed239f5a 5738 case VIEW_CONVERT_EXPR:
afe84921
RH
5739 case REALPART_EXPR:
5740 case IMAGPART_EXPR:
ed239f5a
RK
5741 return 1;
5742
ed239f5a
RK
5743 default:
5744 return 0;
5745 }
5746}
bbf6f052 5747\f
3fe44edd
RK
5748/* Given an rtx VALUE that may contain additions and multiplications, return
5749 an equivalent value that just refers to a register, memory, or constant.
5750 This is done by generating instructions to perform the arithmetic and
5751 returning a pseudo-register containing the value.
c45a13a6
RK
5752
5753 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5754
5755rtx
502b8322 5756force_operand (rtx value, rtx target)
bbf6f052 5757{
8a28dbcc 5758 rtx op1, op2;
bbf6f052 5759 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5760 rtx subtarget = get_subtarget (target);
8a28dbcc 5761 enum rtx_code code = GET_CODE (value);
bbf6f052 5762
50654f6c
ZD
5763 /* Check for subreg applied to an expression produced by loop optimizer. */
5764 if (code == SUBREG
f8cfc6aa 5765 && !REG_P (SUBREG_REG (value))
3c0cb5de 5766 && !MEM_P (SUBREG_REG (value)))
50654f6c
ZD
5767 {
5768 value = simplify_gen_subreg (GET_MODE (value),
5769 force_reg (GET_MODE (SUBREG_REG (value)),
5770 force_operand (SUBREG_REG (value),
5771 NULL_RTX)),
5772 GET_MODE (SUBREG_REG (value)),
5773 SUBREG_BYTE (value));
5774 code = GET_CODE (value);
5775 }
5776
8b015896 5777 /* Check for a PIC address load. */
8a28dbcc 5778 if ((code == PLUS || code == MINUS)
8b015896
RH
5779 && XEXP (value, 0) == pic_offset_table_rtx
5780 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5781 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5782 || GET_CODE (XEXP (value, 1)) == CONST))
5783 {
5784 if (!subtarget)
5785 subtarget = gen_reg_rtx (GET_MODE (value));
5786 emit_move_insn (subtarget, value);
5787 return subtarget;
5788 }
5789
8a28dbcc 5790 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5791 {
8a28dbcc
JH
5792 if (!target)
5793 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5794 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5795 code == ZERO_EXTEND);
5796 return target;
bbf6f052
RK
5797 }
5798
ec8e098d 5799 if (ARITHMETIC_P (value))
bbf6f052
RK
5800 {
5801 op2 = XEXP (value, 1);
f8cfc6aa 5802 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
bbf6f052 5803 subtarget = 0;
8a28dbcc 5804 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5805 {
8a28dbcc 5806 code = PLUS;
bbf6f052
RK
5807 op2 = negate_rtx (GET_MODE (value), op2);
5808 }
5809
5810 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5811 operand a PLUS of a virtual register and something else. In that
5812 case, we want to emit the sum of the virtual register and the
5813 constant first and then add the other value. This allows virtual
5814 register instantiation to simply modify the constant rather than
5815 creating another one around this addition. */
5816 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052 5817 && GET_CODE (XEXP (value, 0)) == PLUS
f8cfc6aa 5818 && REG_P (XEXP (XEXP (value, 0), 0))
bbf6f052
RK
5819 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5820 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5821 {
8a28dbcc
JH
5822 rtx temp = expand_simple_binop (GET_MODE (value), code,
5823 XEXP (XEXP (value, 0), 0), op2,
5824 subtarget, 0, OPTAB_LIB_WIDEN);
5825 return expand_simple_binop (GET_MODE (value), code, temp,
5826 force_operand (XEXP (XEXP (value,
5827 0), 1), 0),
5828 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5829 }
3a94c984 5830
8a28dbcc
JH
5831 op1 = force_operand (XEXP (value, 0), subtarget);
5832 op2 = force_operand (op2, NULL_RTX);
5833 switch (code)
5834 {
5835 case MULT:
5836 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5837 case DIV:
5838 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5839 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5840 target, 1, OPTAB_LIB_WIDEN);
5841 else
5842 return expand_divmod (0,
5843 FLOAT_MODE_P (GET_MODE (value))
5844 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5845 GET_MODE (value), op1, op2, target, 0);
5846 break;
5847 case MOD:
5848 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5849 target, 0);
5850 break;
5851 case UDIV:
5852 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5853 target, 1);
5854 break;
5855 case UMOD:
5856 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5857 target, 1);
5858 break;
5859 case ASHIFTRT:
5860 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5861 target, 0, OPTAB_LIB_WIDEN);
5862 break;
5863 default:
5864 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5865 target, 1, OPTAB_LIB_WIDEN);
5866 }
5867 }
ec8e098d 5868 if (UNARY_P (value))
8a28dbcc
JH
5869 {
5870 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5871 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5872 }
34e81b5a
RK
5873
5874#ifdef INSN_SCHEDULING
5875 /* On machines that have insn scheduling, we want all memory reference to be
5876 explicit, so we need to deal with such paradoxical SUBREGs. */
3c0cb5de 5877 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
34e81b5a
RK
5878 && (GET_MODE_SIZE (GET_MODE (value))
5879 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5880 value
5881 = simplify_gen_subreg (GET_MODE (value),
5882 force_reg (GET_MODE (SUBREG_REG (value)),
5883 force_operand (SUBREG_REG (value),
5884 NULL_RTX)),
5885 GET_MODE (SUBREG_REG (value)),
5886 SUBREG_BYTE (value));
5887#endif
5888
bbf6f052
RK
5889 return value;
5890}
5891\f
bbf6f052 5892/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5893 EXP can reference X, which is being modified. TOP_P is nonzero if this
5894 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5895 for EXP, as opposed to a recursive call to this function.
5896
5897 It is always safe for this routine to return zero since it merely
5898 searches for optimization opportunities. */
bbf6f052 5899
8f17b5c5 5900int
502b8322 5901safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5902{
5903 rtx exp_rtl = 0;
5904 int i, nops;
5905
6676e72f
RK
5906 if (x == 0
5907 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5908 have no way of allocating temporaries of variable size
5909 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5910 So we assume here that something at a higher level has prevented a
f4510f37 5911 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5912 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5913 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5914 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5915 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5916 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5917 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5918 != INTEGER_CST)
1da68f56
RK
5919 && GET_MODE (x) == BLKmode)
5920 /* If X is in the outgoing argument area, it is always safe. */
3c0cb5de 5921 || (MEM_P (x)
1da68f56
RK
5922 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5923 || (GET_CODE (XEXP (x, 0)) == PLUS
5924 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5925 return 1;
5926
5927 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5928 find the underlying pseudo. */
5929 if (GET_CODE (x) == SUBREG)
5930 {
5931 x = SUBREG_REG (x);
f8cfc6aa 5932 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
5933 return 0;
5934 }
5935
1da68f56 5936 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5937 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5938 {
6615c446 5939 case tcc_declaration:
a9772b60 5940 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5941 break;
5942
6615c446 5943 case tcc_constant:
bbf6f052
RK
5944 return 1;
5945
6615c446 5946 case tcc_exceptional:
bbf6f052 5947 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5948 {
5949 while (1)
5950 {
5951 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5952 return 0;
5953 exp = TREE_CHAIN (exp);
5954 if (!exp)
5955 return 1;
5956 if (TREE_CODE (exp) != TREE_LIST)
5957 return safe_from_p (x, exp, 0);
5958 }
5959 }
ff439b5f
CB
5960 else if (TREE_CODE (exp) == ERROR_MARK)
5961 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5962 else
5963 return 0;
5964
6615c446 5965 case tcc_statement:
350fae66
RK
5966 /* The only case we look at here is the DECL_INITIAL inside a
5967 DECL_EXPR. */
5968 return (TREE_CODE (exp) != DECL_EXPR
5969 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5970 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5971 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5972
6615c446
JO
5973 case tcc_binary:
5974 case tcc_comparison:
f8d4be57
CE
5975 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5976 return 0;
5d3cc252 5977 /* Fall through. */
f8d4be57 5978
6615c446 5979 case tcc_unary:
f8d4be57 5980 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 5981
6615c446
JO
5982 case tcc_expression:
5983 case tcc_reference:
bbf6f052
RK
5984 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5985 the expression. If it is set, we conflict iff we are that rtx or
5986 both are in memory. Otherwise, we check all operands of the
5987 expression recursively. */
5988
5989 switch (TREE_CODE (exp))
5990 {
5991 case ADDR_EXPR:
70072ed9
RK
5992 /* If the operand is static or we are static, we can't conflict.
5993 Likewise if we don't conflict with the operand at all. */
5994 if (staticp (TREE_OPERAND (exp, 0))
5995 || TREE_STATIC (exp)
5996 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5997 return 1;
5998
5999 /* Otherwise, the only way this can conflict is if we are taking
6000 the address of a DECL a that address if part of X, which is
6001 very rare. */
6002 exp = TREE_OPERAND (exp, 0);
6003 if (DECL_P (exp))
6004 {
6005 if (!DECL_RTL_SET_P (exp)
3c0cb5de 6006 || !MEM_P (DECL_RTL (exp)))
70072ed9
RK
6007 return 0;
6008 else
6009 exp_rtl = XEXP (DECL_RTL (exp), 0);
6010 }
6011 break;
bbf6f052 6012
7ccf35ed
DN
6013 case MISALIGNED_INDIRECT_REF:
6014 case ALIGN_INDIRECT_REF:
bbf6f052 6015 case INDIRECT_REF:
3c0cb5de 6016 if (MEM_P (x)
1da68f56
RK
6017 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6018 get_alias_set (exp)))
bbf6f052
RK
6019 return 0;
6020 break;
6021
6022 case CALL_EXPR:
f9808f81
MM
6023 /* Assume that the call will clobber all hard registers and
6024 all of memory. */
f8cfc6aa 6025 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
3c0cb5de 6026 || MEM_P (x))
f9808f81 6027 return 0;
bbf6f052
RK
6028 break;
6029
bbf6f052 6030 case WITH_CLEANUP_EXPR:
5dab5552 6031 case CLEANUP_POINT_EXPR:
ac45df5d 6032 /* Lowered by gimplify.c. */
5b0264cb 6033 gcc_unreachable ();
ac45df5d 6034
bbf6f052 6035 case SAVE_EXPR:
82c82743 6036 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 6037
e9a25f70
JL
6038 default:
6039 break;
bbf6f052
RK
6040 }
6041
6042 /* If we have an rtx, we do not need to scan our operands. */
6043 if (exp_rtl)
6044 break;
6045
54e4aedb 6046 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
bbf6f052
RK
6047 for (i = 0; i < nops; i++)
6048 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6049 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6050 return 0;
8f17b5c5
MM
6051
6052 /* If this is a language-specific tree code, it may require
6053 special handling. */
dbbbbf3b
JDA
6054 if ((unsigned int) TREE_CODE (exp)
6055 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 6056 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 6057 return 0;
6615c446
JO
6058 break;
6059
6060 case tcc_type:
6061 /* Should never get a type here. */
6062 gcc_unreachable ();
bbf6f052
RK
6063 }
6064
6065 /* If we have an rtl, find any enclosed object. Then see if we conflict
6066 with it. */
6067 if (exp_rtl)
6068 {
6069 if (GET_CODE (exp_rtl) == SUBREG)
6070 {
6071 exp_rtl = SUBREG_REG (exp_rtl);
f8cfc6aa 6072 if (REG_P (exp_rtl)
bbf6f052
RK
6073 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6074 return 0;
6075 }
6076
6077 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6078 are memory and they conflict. */
bbf6f052 6079 return ! (rtx_equal_p (x, exp_rtl)
3c0cb5de 6080 || (MEM_P (x) && MEM_P (exp_rtl)
21117a17 6081 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6082 rtx_addr_varies_p)));
bbf6f052
RK
6083 }
6084
6085 /* If we reach here, it is safe. */
6086 return 1;
6087}
6088
14a774a9 6089\f
0d4903b8
RK
6090/* Return the highest power of two that EXP is known to be a multiple of.
6091 This is used in updating alignment of MEMs in array references. */
6092
86a07404 6093unsigned HOST_WIDE_INT
502b8322 6094highest_pow2_factor (tree exp)
0d4903b8 6095{
9ceca302 6096 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6097
6098 switch (TREE_CODE (exp))
6099 {
6100 case INTEGER_CST:
e0f1be5c
JJ
6101 /* We can find the lowest bit that's a one. If the low
6102 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6103 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6104 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6105 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6106 later ICE. */
e0f1be5c 6107 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6108 return BIGGEST_ALIGNMENT;
e0f1be5c 6109 else
0d4903b8 6110 {
e0f1be5c
JJ
6111 /* Note: tree_low_cst is intentionally not used here,
6112 we don't care about the upper bits. */
6113 c0 = TREE_INT_CST_LOW (exp);
6114 c0 &= -c0;
6115 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6116 }
6117 break;
6118
65a07688 6119 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6120 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6121 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6122 return MIN (c0, c1);
6123
6124 case MULT_EXPR:
6125 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6126 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6127 return c0 * c1;
6128
6129 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6130 case CEIL_DIV_EXPR:
65a07688
RK
6131 if (integer_pow2p (TREE_OPERAND (exp, 1))
6132 && host_integerp (TREE_OPERAND (exp, 1), 1))
6133 {
6134 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6135 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6136 return MAX (1, c0 / c1);
6137 }
6138 break;
0d4903b8
RK
6139
6140 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6141 case SAVE_EXPR:
0d4903b8
RK
6142 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6143
65a07688
RK
6144 case COMPOUND_EXPR:
6145 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6146
0d4903b8
RK
6147 case COND_EXPR:
6148 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6149 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6150 return MIN (c0, c1);
6151
6152 default:
6153 break;
6154 }
6155
6156 return 1;
6157}
818c0c94 6158
d50a16c4
EB
6159/* Similar, except that the alignment requirements of TARGET are
6160 taken into account. Assume it is at least as aligned as its
6161 type, unless it is a COMPONENT_REF in which case the layout of
6162 the structure gives the alignment. */
818c0c94 6163
9ceca302 6164static unsigned HOST_WIDE_INT
d50a16c4 6165highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6166{
d50a16c4 6167 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6168
6169 factor = highest_pow2_factor (exp);
d50a16c4 6170 if (TREE_CODE (target) == COMPONENT_REF)
a4e9ffe5 6171 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
d50a16c4 6172 else
a4e9ffe5 6173 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
d50a16c4 6174 return MAX (factor, target_align);
818c0c94 6175}
0d4903b8 6176\f
6de9cd9a
DN
6177/* Expands variable VAR. */
6178
6179void
6180expand_var (tree var)
6181{
6182 if (DECL_EXTERNAL (var))
6183 return;
6184
6185 if (TREE_STATIC (var))
6186 /* If this is an inlined copy of a static local variable,
6187 look up the original decl. */
6188 var = DECL_ORIGIN (var);
6189
6190 if (TREE_STATIC (var)
6191 ? !TREE_ASM_WRITTEN (var)
6192 : !DECL_RTL_SET_P (var))
6193 {
833b3afe 6194 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
1a186ec5 6195 /* Should be ignored. */;
673fda6b 6196 else if (lang_hooks.expand_decl (var))
6de9cd9a
DN
6197 /* OK. */;
6198 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6199 expand_decl (var);
6200 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
0e6df31e 6201 rest_of_decl_compilation (var, 0, 0);
6de9cd9a 6202 else
5b0264cb
NS
6203 /* No expansion needed. */
6204 gcc_assert (TREE_CODE (var) == TYPE_DECL
6205 || TREE_CODE (var) == CONST_DECL
6206 || TREE_CODE (var) == FUNCTION_DECL
6207 || TREE_CODE (var) == LABEL_DECL);
6de9cd9a
DN
6208 }
6209}
6210
eb698c58
RS
6211/* Subroutine of expand_expr. Expand the two operands of a binary
6212 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6213 The value may be stored in TARGET if TARGET is nonzero. The
6214 MODIFIER argument is as documented by expand_expr. */
6215
6216static void
6217expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6218 enum expand_modifier modifier)
6219{
6220 if (! safe_from_p (target, exp1, 1))
6221 target = 0;
6222 if (operand_equal_p (exp0, exp1, 0))
6223 {
6224 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6225 *op1 = copy_rtx (*op0);
6226 }
6227 else
6228 {
c67e6e14
RS
6229 /* If we need to preserve evaluation order, copy exp0 into its own
6230 temporary variable so that it can't be clobbered by exp1. */
6231 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6232 exp0 = save_expr (exp0);
eb698c58
RS
6233 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6234 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6235 }
6236}
6237
f47e9b4e 6238\f
70bb498a 6239/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6377bb9a
RH
6240 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6241
6242static rtx
70bb498a
RH
6243expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6244 enum expand_modifier modifier)
6377bb9a
RH
6245{
6246 rtx result, subtarget;
6247 tree inner, offset;
6248 HOST_WIDE_INT bitsize, bitpos;
6249 int volatilep, unsignedp;
6250 enum machine_mode mode1;
6251
6252 /* If we are taking the address of a constant and are at the top level,
6253 we have to use output_constant_def since we can't call force_const_mem
6254 at top level. */
6255 /* ??? This should be considered a front-end bug. We should not be
6256 generating ADDR_EXPR of something that isn't an LVALUE. The only
6257 exception here is STRING_CST. */
6258 if (TREE_CODE (exp) == CONSTRUCTOR
6615c446 6259 || CONSTANT_CLASS_P (exp))
6377bb9a
RH
6260 return XEXP (output_constant_def (exp, 0), 0);
6261
6262 /* Everything must be something allowed by is_gimple_addressable. */
6263 switch (TREE_CODE (exp))
6264 {
6265 case INDIRECT_REF:
6266 /* This case will happen via recursion for &a->b. */
6267 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6268
6269 case CONST_DECL:
6270 /* Recurse and make the output_constant_def clause above handle this. */
70bb498a 6271 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
b0b324b0 6272 tmode, modifier);
6377bb9a
RH
6273
6274 case REALPART_EXPR:
6275 /* The real part of the complex number is always first, therefore
6276 the address is the same as the address of the parent object. */
6277 offset = 0;
6278 bitpos = 0;
6279 inner = TREE_OPERAND (exp, 0);
6280 break;
6281
6282 case IMAGPART_EXPR:
6283 /* The imaginary part of the complex number is always second.
2a7e31df 6284 The expression is therefore always offset by the size of the
6377bb9a
RH
6285 scalar type. */
6286 offset = 0;
6287 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6288 inner = TREE_OPERAND (exp, 0);
6289 break;
6290
6291 default:
6292 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6293 expand_expr, as that can have various side effects; LABEL_DECLs for
6294 example, may not have their DECL_RTL set yet. Assume language
6295 specific tree nodes can be expanded in some interesting way. */
6296 if (DECL_P (exp)
6297 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6298 {
6299 result = expand_expr (exp, target, tmode,
6300 modifier == EXPAND_INITIALIZER
6301 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6302
6303 /* If the DECL isn't in memory, then the DECL wasn't properly
6304 marked TREE_ADDRESSABLE, which will be either a front-end
6305 or a tree optimizer bug. */
2ca202e7 6306 gcc_assert (MEM_P (result));
6377bb9a
RH
6307 result = XEXP (result, 0);
6308
6309 /* ??? Is this needed anymore? */
b0b324b0 6310 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6377bb9a
RH
6311 {
6312 assemble_external (exp);
6313 TREE_USED (exp) = 1;
6314 }
6315
6316 if (modifier != EXPAND_INITIALIZER
6317 && modifier != EXPAND_CONST_ADDRESS)
6318 result = force_operand (result, target);
6319 return result;
6320 }
6321
2614034e
EB
6322 /* Pass FALSE as the last argument to get_inner_reference although
6323 we are expanding to RTL. The rationale is that we know how to
6324 handle "aligning nodes" here: we can just bypass them because
6325 they won't change the final object whose address will be returned
6326 (they actually exist only for that purpose). */
6377bb9a 6327 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 6328 &mode1, &unsignedp, &volatilep, false);
6377bb9a
RH
6329 break;
6330 }
6331
6332 /* We must have made progress. */
5b0264cb 6333 gcc_assert (inner != exp);
6377bb9a
RH
6334
6335 subtarget = offset || bitpos ? NULL_RTX : target;
70bb498a 6336 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6377bb9a 6337
6377bb9a
RH
6338 if (offset)
6339 {
6340 rtx tmp;
6341
6342 if (modifier != EXPAND_NORMAL)
6343 result = force_operand (result, NULL);
6344 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6345
b0b324b0
RH
6346 result = convert_memory_address (tmode, result);
6347 tmp = convert_memory_address (tmode, tmp);
6348
d047a201 6349 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6377bb9a
RH
6350 result = gen_rtx_PLUS (tmode, result, tmp);
6351 else
6352 {
6353 subtarget = bitpos ? NULL_RTX : target;
6354 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6355 1, OPTAB_LIB_WIDEN);
6356 }
6357 }
6358
6359 if (bitpos)
6360 {
6361 /* Someone beforehand should have rejected taking the address
6362 of such an object. */
b0b324b0 6363 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6377bb9a
RH
6364
6365 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6366 if (modifier < EXPAND_SUM)
6367 result = force_operand (result, target);
6368 }
6369
6370 return result;
6371}
6372
70bb498a
RH
6373/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6374 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6375
6376static rtx
6377expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6378 enum expand_modifier modifier)
6379{
6380 enum machine_mode rmode;
6381 rtx result;
6382
b0b324b0
RH
6383 /* Target mode of VOIDmode says "whatever's natural". */
6384 if (tmode == VOIDmode)
6385 tmode = TYPE_MODE (TREE_TYPE (exp));
6386
6387 /* We can get called with some Weird Things if the user does silliness
6388 like "(short) &a". In that case, convert_memory_address won't do
6389 the right thing, so ignore the given target mode. */
103b83ea 6390 if (tmode != Pmode && tmode != ptr_mode)
b0b324b0
RH
6391 tmode = Pmode;
6392
70bb498a
RH
6393 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6394 tmode, modifier);
6395
6396 /* Despite expand_expr claims concerning ignoring TMODE when not
b0b324b0
RH
6397 strictly convenient, stuff breaks if we don't honor it. Note
6398 that combined with the above, we only do this for pointer modes. */
70bb498a
RH
6399 rmode = GET_MODE (result);
6400 if (rmode == VOIDmode)
6401 rmode = tmode;
6402 if (rmode != tmode)
6403 result = convert_memory_address (tmode, result);
b0b324b0 6404
70bb498a
RH
6405 return result;
6406}
6407
6408
bbf6f052
RK
6409/* expand_expr: generate code for computing expression EXP.
6410 An rtx for the computed value is returned. The value is never null.
6411 In the case of a void EXP, const0_rtx is returned.
6412
6413 The value may be stored in TARGET if TARGET is nonzero.
6414 TARGET is just a suggestion; callers must assume that
6415 the rtx returned may not be the same as TARGET.
6416
6417 If TARGET is CONST0_RTX, it means that the value will be ignored.
6418
6419 If TMODE is not VOIDmode, it suggests generating the
6420 result in mode TMODE. But this is done only when convenient.
6421 Otherwise, TMODE is ignored and the value generated in its natural mode.
6422 TMODE is just a suggestion; callers must assume that
6423 the rtx returned may not have mode TMODE.
6424
d6a5ac33
RK
6425 Note that TARGET may have neither TMODE nor MODE. In that case, it
6426 probably will not be used.
bbf6f052
RK
6427
6428 If MODIFIER is EXPAND_SUM then when EXP is an addition
6429 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6430 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6431 products as above, or REG or MEM, or constant.
6432 Ordinarily in such cases we would output mul or add instructions
6433 and then return a pseudo reg containing the sum.
6434
6435 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6436 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6437 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6438 This is used for outputting expressions used in initializers.
6439
6440 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6441 with a constant address even if that address is not normally legitimate.
8403445a
AM
6442 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6443
6444 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6445 a call parameter. Such targets require special care as we haven't yet
6446 marked TARGET so that it's safe from being trashed by libcalls. We
6447 don't want to use TARGET for anything but the final result;
6448 Intermediate values must go elsewhere. Additionally, calls to
caf93cb0 6449 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
0fab64a3
MM
6450
6451 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6452 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6453 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6454 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6455 recursively. */
bbf6f052 6456
6de9cd9a
DN
6457static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6458 enum expand_modifier, rtx *);
6459
bbf6f052 6460rtx
0fab64a3
MM
6461expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6462 enum expand_modifier modifier, rtx *alt_rtl)
6de9cd9a
DN
6463{
6464 int rn = -1;
6465 rtx ret, last = NULL;
6466
6467 /* Handle ERROR_MARK before anybody tries to access its type. */
6468 if (TREE_CODE (exp) == ERROR_MARK
6469 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6470 {
6471 ret = CONST0_RTX (tmode);
6472 return ret ? ret : const0_rtx;
6473 }
6474
6475 if (flag_non_call_exceptions)
6476 {
6477 rn = lookup_stmt_eh_region (exp);
6478 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6479 if (rn >= 0)
6480 last = get_last_insn ();
6481 }
6482
6483 /* If this is an expression of some kind and it has an associated line
caf93cb0 6484 number, then emit the line number before expanding the expression.
6de9cd9a
DN
6485
6486 We need to save and restore the file and line information so that
6487 errors discovered during expansion are emitted with the right
caf93cb0 6488 information. It would be better of the diagnostic routines
6de9cd9a
DN
6489 used the file/line information embedded in the tree nodes rather
6490 than globals. */
c48dc958 6491 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6de9cd9a
DN
6492 {
6493 location_t saved_location = input_location;
6494 input_location = EXPR_LOCATION (exp);
6495 emit_line_note (input_location);
caf93cb0 6496
6de9cd9a 6497 /* Record where the insns produced belong. */
1ea463a2 6498 record_block_change (TREE_BLOCK (exp));
6de9cd9a
DN
6499
6500 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6501
6502 input_location = saved_location;
6503 }
6504 else
6505 {
6506 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6507 }
6508
6509 /* If using non-call exceptions, mark all insns that may trap.
6510 expand_call() will mark CALL_INSNs before we get to this code,
6511 but it doesn't handle libcalls, and these may trap. */
6512 if (rn >= 0)
caf93cb0 6513 {
6de9cd9a 6514 rtx insn;
caf93cb0 6515 for (insn = next_real_insn (last); insn;
6de9cd9a
DN
6516 insn = next_real_insn (insn))
6517 {
6518 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6519 /* If we want exceptions for non-call insns, any
6520 may_trap_p instruction may throw. */
6521 && GET_CODE (PATTERN (insn)) != CLOBBER
6522 && GET_CODE (PATTERN (insn)) != USE
4b4bf941 6523 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6de9cd9a
DN
6524 {
6525 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6526 REG_NOTES (insn));
6527 }
6528 }
6529 }
6530
6531 return ret;
6532}
6533
6534static rtx
6535expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6536 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6537{
b3694847 6538 rtx op0, op1, temp;
bbf6f052 6539 tree type = TREE_TYPE (exp);
8df83eae 6540 int unsignedp;
b3694847
SS
6541 enum machine_mode mode;
6542 enum tree_code code = TREE_CODE (exp);
bbf6f052 6543 optab this_optab;
68557e14
ML
6544 rtx subtarget, original_target;
6545 int ignore;
bbf6f052 6546 tree context;
bc15d0ef
JM
6547 bool reduce_bit_field = false;
6548#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6549 ? reduce_to_bit_field_precision ((expr), \
6550 target, \
6551 type) \
6552 : (expr))
bbf6f052 6553
68557e14 6554 mode = TYPE_MODE (type);
8df83eae 6555 unsignedp = TYPE_UNSIGNED (type);
bc15d0ef
JM
6556 if (lang_hooks.reduce_bit_field_operations
6557 && TREE_CODE (type) == INTEGER_TYPE
6558 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6559 {
6560 /* An operation in what may be a bit-field type needs the
6561 result to be reduced to the precision of the bit-field type,
6562 which is narrower than that of the type's mode. */
6563 reduce_bit_field = true;
6564 if (modifier == EXPAND_STACK_PARM)
6565 target = 0;
6566 }
8df83eae 6567
68557e14 6568 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6569 subtarget = get_subtarget (target);
68557e14
ML
6570 original_target = target;
6571 ignore = (target == const0_rtx
6572 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3a18db48
AP
6573 || code == CONVERT_EXPR || code == COND_EXPR
6574 || code == VIEW_CONVERT_EXPR)
68557e14
ML
6575 && TREE_CODE (type) == VOID_TYPE));
6576
dd27116b
RK
6577 /* If we are going to ignore this result, we need only do something
6578 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6579 is, short-circuit the most common cases here. Note that we must
6580 not call expand_expr with anything but const0_rtx in case this
6581 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6582
dd27116b
RK
6583 if (ignore)
6584 {
6585 if (! TREE_SIDE_EFFECTS (exp))
6586 return const0_rtx;
6587
14a774a9
RK
6588 /* Ensure we reference a volatile object even if value is ignored, but
6589 don't do this if all we are doing is taking its address. */
dd27116b
RK
6590 if (TREE_THIS_VOLATILE (exp)
6591 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6592 && mode != VOIDmode && mode != BLKmode
6593 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6594 {
37a08a29 6595 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3c0cb5de 6596 if (MEM_P (temp))
dd27116b
RK
6597 temp = copy_to_reg (temp);
6598 return const0_rtx;
6599 }
6600
6615c446
JO
6601 if (TREE_CODE_CLASS (code) == tcc_unary
6602 || code == COMPONENT_REF || code == INDIRECT_REF)
37a08a29
RK
6603 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6604 modifier);
6605
6615c446
JO
6606 else if (TREE_CODE_CLASS (code) == tcc_binary
6607 || TREE_CODE_CLASS (code) == tcc_comparison
b4e3fabb 6608 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6609 {
37a08a29
RK
6610 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6611 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6612 return const0_rtx;
6613 }
14a774a9
RK
6614 else if (code == BIT_FIELD_REF)
6615 {
37a08a29
RK
6616 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6617 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6618 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6619 return const0_rtx;
6620 }
37a08a29 6621
90764a87 6622 target = 0;
dd27116b 6623 }
bbf6f052 6624
bbf6f052 6625
bbf6f052
RK
6626 switch (code)
6627 {
6628 case LABEL_DECL:
b552441b
RS
6629 {
6630 tree function = decl_function_context (exp);
c5c76735 6631
6de9cd9a
DN
6632 temp = label_rtx (exp);
6633 temp = gen_rtx_LABEL_REF (Pmode, temp);
6634
d0977240 6635 if (function != current_function_decl
6de9cd9a
DN
6636 && function != 0)
6637 LABEL_REF_NONLOCAL_P (temp) = 1;
6638
6639 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
26fcb35a 6640 return temp;
b552441b 6641 }
bbf6f052 6642
8b11a64c
ZD
6643 case SSA_NAME:
6644 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6645 NULL);
6646
bbf6f052 6647 case PARM_DECL:
bbf6f052 6648 case VAR_DECL:
2dca20cd
RS
6649 /* If a static var's type was incomplete when the decl was written,
6650 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6651 if (DECL_SIZE (exp) == 0
6652 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6653 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6654 layout_decl (exp, 0);
921b3427 6655
0f41302f 6656 /* ... fall through ... */
d6a5ac33 6657
2dca20cd 6658 case FUNCTION_DECL:
bbf6f052 6659 case RESULT_DECL:
5b0264cb 6660 gcc_assert (DECL_RTL (exp));
d6a5ac33 6661
e44842fe
RK
6662 /* Ensure variable marked as used even if it doesn't go through
6663 a parser. If it hasn't be used yet, write out an external
6664 definition. */
6665 if (! TREE_USED (exp))
6666 {
6667 assemble_external (exp);
6668 TREE_USED (exp) = 1;
6669 }
6670
dc6d66b3
RK
6671 /* Show we haven't gotten RTL for this yet. */
6672 temp = 0;
6673
ab8907ef
RH
6674 /* Variables inherited from containing functions should have
6675 been lowered by this point. */
bbf6f052 6676 context = decl_function_context (exp);
5b0264cb
NS
6677 gcc_assert (!context
6678 || context == current_function_decl
6679 || TREE_STATIC (exp)
6680 /* ??? C++ creates functions that are not TREE_STATIC. */
6681 || TREE_CODE (exp) == FUNCTION_DECL);
4af3895e 6682
bbf6f052
RK
6683 /* This is the case of an array whose size is to be determined
6684 from its initializer, while the initializer is still being parsed.
6685 See expand_decl. */
d6a5ac33 6686
5b0264cb 6687 if (MEM_P (DECL_RTL (exp))
f8cfc6aa 6688 && REG_P (XEXP (DECL_RTL (exp), 0)))
792760b9 6689 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6690
6691 /* If DECL_RTL is memory, we are in the normal case and either
6692 the address is not valid or it is not a register and -fforce-addr
6693 is specified, get the address into a register. */
6694
3c0cb5de 6695 else if (MEM_P (DECL_RTL (exp))
dc6d66b3
RK
6696 && modifier != EXPAND_CONST_ADDRESS
6697 && modifier != EXPAND_SUM
6698 && modifier != EXPAND_INITIALIZER
6699 && (! memory_address_p (DECL_MODE (exp),
6700 XEXP (DECL_RTL (exp), 0))
6701 || (flag_force_addr
f8cfc6aa 6702 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
0fab64a3
MM
6703 {
6704 if (alt_rtl)
6705 *alt_rtl = DECL_RTL (exp);
6706 temp = replace_equiv_address (DECL_RTL (exp),
6707 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6708 }
1499e0a8 6709
dc6d66b3 6710 /* If we got something, return it. But first, set the alignment
04956a1a 6711 if the address is a register. */
dc6d66b3
RK
6712 if (temp != 0)
6713 {
3c0cb5de 6714 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
bdb429a5 6715 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6716
6717 return temp;
6718 }
6719
1499e0a8
RK
6720 /* If the mode of DECL_RTL does not match that of the decl, it
6721 must be a promoted value. We return a SUBREG of the wanted mode,
6722 but mark it so that we know that it was already extended. */
6723
f8cfc6aa 6724 if (REG_P (DECL_RTL (exp))
7254c5fa 6725 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6726 {
5b0264cb
NS
6727 enum machine_mode pmode;
6728
1499e0a8
RK
6729 /* Get the signedness used for this variable. Ensure we get the
6730 same mode we got when the variable was declared. */
5b0264cb
NS
6731 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6732 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6733 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
1499e0a8 6734
ddef6bc7 6735 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6736 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6737 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6738 return temp;
6739 }
6740
bbf6f052
RK
6741 return DECL_RTL (exp);
6742
6743 case INTEGER_CST:
d8a50944 6744 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6745 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6746
d8a50944
RH
6747 /* ??? If overflow is set, fold will have done an incomplete job,
6748 which can result in (plus xx (const_int 0)), which can get
6749 simplified by validate_replace_rtx during virtual register
6750 instantiation, which can result in unrecognizable insns.
6751 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6752 if (TREE_CONSTANT_OVERFLOW (exp)
6753 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6754 temp = force_reg (mode, temp);
6755
6756 return temp;
6757
d744e06e 6758 case VECTOR_CST:
3a021db2
PB
6759 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6760 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6761 return const_vector_from_tree (exp);
caf93cb0 6762 else
4038c495
GB
6763 return expand_expr (build_constructor_from_list
6764 (TREE_TYPE (exp),
6765 TREE_VECTOR_CST_ELTS (exp)),
3a021db2 6766 ignore ? const0_rtx : target, tmode, modifier);
d744e06e 6767
bbf6f052 6768 case CONST_DECL:
8403445a 6769 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6770
6771 case REAL_CST:
6772 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6773 which will be turned into memory by reload if necessary.
6774
bbf6f052
RK
6775 We used to force a register so that loop.c could see it. But
6776 this does not allow gen_* patterns to perform optimizations with
6777 the constants. It also produces two insns in cases like "x = 1.0;".
6778 On most machines, floating-point constants are not permitted in
6779 many insns, so we'd end up copying it to a register in any case.
6780
6781 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6782 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6783 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6784
6785 case COMPLEX_CST:
9ad58e09
RS
6786 /* Handle evaluating a complex constant in a CONCAT target. */
6787 if (original_target && GET_CODE (original_target) == CONCAT)
6788 {
6789 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6790 rtx rtarg, itarg;
6791
6792 rtarg = XEXP (original_target, 0);
6793 itarg = XEXP (original_target, 1);
6794
6795 /* Move the real and imaginary parts separately. */
6796 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6797 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6798
6799 if (op0 != rtarg)
6800 emit_move_insn (rtarg, op0);
6801 if (op1 != itarg)
6802 emit_move_insn (itarg, op1);
6803
6804 return original_target;
6805 }
6806
71c0e7fc 6807 /* ... fall through ... */
9ad58e09 6808
bbf6f052 6809 case STRING_CST:
afc6aaab 6810 temp = output_constant_def (exp, 1);
bbf6f052 6811
afc6aaab 6812 /* temp contains a constant address.
bbf6f052
RK
6813 On RISC machines where a constant address isn't valid,
6814 make some insns to get that address into a register. */
afc6aaab 6815 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6816 && modifier != EXPAND_INITIALIZER
6817 && modifier != EXPAND_SUM
afc6aaab
ZW
6818 && (! memory_address_p (mode, XEXP (temp, 0))
6819 || flag_force_addr))
6820 return replace_equiv_address (temp,
6821 copy_rtx (XEXP (temp, 0)));
6822 return temp;
bbf6f052
RK
6823
6824 case SAVE_EXPR:
82c82743
RH
6825 {
6826 tree val = TREE_OPERAND (exp, 0);
6827 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
e5e809f4 6828
7f5e6307 6829 if (!SAVE_EXPR_RESOLVED_P (exp))
82c82743
RH
6830 {
6831 /* We can indeed still hit this case, typically via builtin
6832 expanders calling save_expr immediately before expanding
6833 something. Assume this means that we only have to deal
6834 with non-BLKmode values. */
5b0264cb 6835 gcc_assert (GET_MODE (ret) != BLKmode);
1499e0a8 6836
82c82743
RH
6837 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6838 DECL_ARTIFICIAL (val) = 1;
7f5e6307 6839 DECL_IGNORED_P (val) = 1;
82c82743 6840 TREE_OPERAND (exp, 0) = val;
7f5e6307 6841 SAVE_EXPR_RESOLVED_P (exp) = 1;
1499e0a8 6842
82c82743
RH
6843 if (!CONSTANT_P (ret))
6844 ret = copy_to_reg (ret);
6845 SET_DECL_RTL (val, ret);
6846 }
1499e0a8 6847
82c82743
RH
6848 return ret;
6849 }
bbf6f052 6850
70e6ca43
APB
6851 case GOTO_EXPR:
6852 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6853 expand_goto (TREE_OPERAND (exp, 0));
6854 else
6855 expand_computed_goto (TREE_OPERAND (exp, 0));
6856 return const0_rtx;
6857
bbf6f052 6858 case CONSTRUCTOR:
dd27116b
RK
6859 /* If we don't need the result, just ensure we evaluate any
6860 subexpressions. */
6861 if (ignore)
6862 {
4038c495
GB
6863 unsigned HOST_WIDE_INT idx;
6864 tree value;
37a08a29 6865
4038c495
GB
6866 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6867 expand_expr (value, const0_rtx, VOIDmode, 0);
37a08a29 6868
dd27116b
RK
6869 return const0_rtx;
6870 }
3207b172 6871
c5250139
RG
6872 /* Try to avoid creating a temporary at all. This is possible
6873 if all of the initializer is zero.
6874 FIXME: try to handle all [0..255] initializers we can handle
6875 with memset. */
6876 else if (TREE_STATIC (exp)
6877 && !TREE_ADDRESSABLE (exp)
6878 && target != 0 && mode == BLKmode
6879 && all_zeros_p (exp))
6880 {
6881 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6882 return target;
6883 }
6884
4af3895e
JVA
6885 /* All elts simple constants => refer to a constant in memory. But
6886 if this is a non-BLKmode mode, let it store a field at a time
6887 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6888 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6889 store directly into the target unless the type is large enough
6890 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6891 all operands are constant, put it in memory as well.
6892
6893 FIXME: Avoid trying to fill vector constructors piece-meal.
6894 Output them with output_constant_def below unless we're sure
6895 they're zeros. This should go away when vector initializers
6896 are treated like VECTOR_CST instead of arrays.
6897 */
dd27116b 6898 else if ((TREE_STATIC (exp)
3207b172 6899 && ((mode == BLKmode
e5e809f4 6900 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6901 || TREE_ADDRESSABLE (exp)
19caa751 6902 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6903 && (! MOVE_BY_PIECES_P
19caa751
RK
6904 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6905 TYPE_ALIGN (type)))
6de9cd9a 6906 && ! mostly_zeros_p (exp))))
f59700f9
RK
6907 || ((modifier == EXPAND_INITIALIZER
6908 || modifier == EXPAND_CONST_ADDRESS)
6909 && TREE_CONSTANT (exp)))
bbf6f052 6910 {
bd7cf17e 6911 rtx constructor = output_constant_def (exp, 1);
19caa751 6912
b552441b
RS
6913 if (modifier != EXPAND_CONST_ADDRESS
6914 && modifier != EXPAND_INITIALIZER
792760b9
RK
6915 && modifier != EXPAND_SUM)
6916 constructor = validize_mem (constructor);
6917
bbf6f052
RK
6918 return constructor;
6919 }
bbf6f052
RK
6920 else
6921 {
e9ac02a6
JW
6922 /* Handle calls that pass values in multiple non-contiguous
6923 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6924 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6925 || GET_CODE (target) == PARALLEL
6926 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6927 target
6928 = assign_temp (build_qualified_type (type,
6929 (TYPE_QUALS (type)
6930 | (TREE_READONLY (exp)
6931 * TYPE_QUAL_CONST))),
c24ae149 6932 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6933
dbb5c281 6934 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6935 return target;
6936 }
6937
7ccf35ed
DN
6938 case MISALIGNED_INDIRECT_REF:
6939 case ALIGN_INDIRECT_REF:
bbf6f052
RK
6940 case INDIRECT_REF:
6941 {
6942 tree exp1 = TREE_OPERAND (exp, 0);
3a94c984 6943
6de9cd9a
DN
6944 if (modifier != EXPAND_WRITE)
6945 {
6946 tree t;
6947
6948 t = fold_read_from_constant_string (exp);
6949 if (t)
6950 return expand_expr (t, target, tmode, modifier);
6951 }
bbf6f052 6952
405f0da6
JW
6953 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6954 op0 = memory_address (mode, op0);
7ccf35ed
DN
6955
6956 if (code == ALIGN_INDIRECT_REF)
6957 {
6958 int align = TYPE_ALIGN_UNIT (type);
6959 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6960 op0 = memory_address (mode, op0);
6961 }
6962
38a448ca 6963 temp = gen_rtx_MEM (mode, op0);
8b11a64c 6964
ac182688 6965 set_mem_attributes (temp, exp, 0);
1125706f 6966
1e0598e2
RH
6967 /* Resolve the misalignment now, so that we don't have to remember
6968 to resolve it later. Of course, this only works for reads. */
6969 /* ??? When we get around to supporting writes, we'll have to handle
6970 this in store_expr directly. The vectorizer isn't generating
6971 those yet, however. */
6972 if (code == MISALIGNED_INDIRECT_REF)
6973 {
6974 int icode;
6975 rtx reg, insn;
6976
29b2d867
RH
6977 gcc_assert (modifier == EXPAND_NORMAL
6978 || modifier == EXPAND_STACK_PARM);
1e0598e2
RH
6979
6980 /* The vectorizer should have already checked the mode. */
6981 icode = movmisalign_optab->handlers[mode].insn_code;
6982 gcc_assert (icode != CODE_FOR_nothing);
6983
6984 /* We've already validated the memory, and we're creating a
6985 new pseudo destination. The predicates really can't fail. */
6986 reg = gen_reg_rtx (mode);
6987
6988 /* Nor can the insn generator. */
6989 insn = GEN_FCN (icode) (reg, temp);
6990 emit_insn (insn);
6991
6992 return reg;
6993 }
6994
8c8a8e34
JW
6995 return temp;
6996 }
bbf6f052 6997
ac182688
ZD
6998 case TARGET_MEM_REF:
6999 {
7000 struct mem_address addr;
7001
7002 get_address_description (exp, &addr);
7003 op0 = addr_for_mem_ref (&addr, true);
7004 op0 = memory_address (mode, op0);
7005 temp = gen_rtx_MEM (mode, op0);
7006 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7007 }
7008 return temp;
7009
bbf6f052 7010 case ARRAY_REF:
6de9cd9a 7011
bbf6f052 7012 {
742920c7 7013 tree array = TREE_OPERAND (exp, 0);
45d8710e 7014 tree index = TREE_OPERAND (exp, 1);
742920c7 7015
742920c7 7016 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7017 This is not done in fold so it won't happen inside &.
7018 Don't fold if this is for wide characters since it's too
7019 difficult to do correctly and this is a very rare case. */
742920c7 7020
017e1b43
RH
7021 if (modifier != EXPAND_CONST_ADDRESS
7022 && modifier != EXPAND_INITIALIZER
6de9cd9a
DN
7023 && modifier != EXPAND_MEMORY)
7024 {
7025 tree t = fold_read_from_constant_string (exp);
7026
7027 if (t)
7028 return expand_expr (t, target, tmode, modifier);
7029 }
bbf6f052 7030
742920c7
RK
7031 /* If this is a constant index into a constant array,
7032 just get the value from the array. Handle both the cases when
7033 we have an explicit constructor and when our operand is a variable
7034 that was declared const. */
4af3895e 7035
017e1b43
RH
7036 if (modifier != EXPAND_CONST_ADDRESS
7037 && modifier != EXPAND_INITIALIZER
7038 && modifier != EXPAND_MEMORY
7039 && TREE_CODE (array) == CONSTRUCTOR
7040 && ! TREE_SIDE_EFFECTS (array)
45d8710e 7041 && TREE_CODE (index) == INTEGER_CST)
742920c7 7042 {
4038c495
GB
7043 unsigned HOST_WIDE_INT ix;
7044 tree field, value;
05bccae2 7045
4038c495
GB
7046 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7047 field, value)
7048 if (tree_int_cst_equal (field, index))
7049 {
7050 if (!TREE_SIDE_EFFECTS (value))
7051 return expand_expr (fold (value), target, tmode, modifier);
7052 break;
7053 }
742920c7 7054 }
3a94c984 7055
742920c7 7056 else if (optimize >= 1
cb5fa0f8
RK
7057 && modifier != EXPAND_CONST_ADDRESS
7058 && modifier != EXPAND_INITIALIZER
017e1b43 7059 && modifier != EXPAND_MEMORY
742920c7
RK
7060 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7061 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
7062 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7063 && targetm.binds_local_p (array))
742920c7 7064 {
08293add 7065 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7066 {
7067 tree init = DECL_INITIAL (array);
7068
742920c7
RK
7069 if (TREE_CODE (init) == CONSTRUCTOR)
7070 {
4038c495
GB
7071 unsigned HOST_WIDE_INT ix;
7072 tree field, value;
7073
7074 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7075 field, value)
7076 if (tree_int_cst_equal (field, index))
7077 {
7078 if (!TREE_SIDE_EFFECTS (value))
7079 return expand_expr (fold (value), target, tmode,
7080 modifier);
7081 break;
7082 }
742920c7
RK
7083 }
7084 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7085 && 0 > compare_tree_int (index,
7086 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7087 {
7088 tree type = TREE_TYPE (TREE_TYPE (init));
7089 enum machine_mode mode = TYPE_MODE (type);
7090
7091 if (GET_MODE_CLASS (mode) == MODE_INT
7092 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7093 return gen_int_mode (TREE_STRING_POINTER (init)
7094 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7095 }
742920c7
RK
7096 }
7097 }
7098 }
afc6aaab 7099 goto normal_inner_ref;
bbf6f052
RK
7100
7101 case COMPONENT_REF:
4af3895e 7102 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7103 appropriate field if it is present. */
7104 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e 7105 {
4038c495
GB
7106 unsigned HOST_WIDE_INT idx;
7107 tree field, value;
4af3895e 7108
4038c495
GB
7109 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7110 idx, field, value)
7111 if (field == TREE_OPERAND (exp, 1)
86b5812c
RK
7112 /* We can normally use the value of the field in the
7113 CONSTRUCTOR. However, if this is a bitfield in
7114 an integral mode that we can fit in a HOST_WIDE_INT,
7115 we must mask only the number of bits in the bitfield,
7116 since this is done implicitly by the constructor. If
7117 the bitfield does not meet either of those conditions,
7118 we can't do this optimization. */
4038c495
GB
7119 && (! DECL_BIT_FIELD (field)
7120 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7121 && (GET_MODE_BITSIZE (DECL_MODE (field))
86b5812c
RK
7122 <= HOST_BITS_PER_WIDE_INT))))
7123 {
4038c495 7124 if (DECL_BIT_FIELD (field)
8403445a
AM
7125 && modifier == EXPAND_STACK_PARM)
7126 target = 0;
4038c495
GB
7127 op0 = expand_expr (value, target, tmode, modifier);
7128 if (DECL_BIT_FIELD (field))
86b5812c 7129 {
4038c495
GB
7130 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7131 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
86b5812c 7132
4038c495 7133 if (TYPE_UNSIGNED (TREE_TYPE (field)))
86b5812c
RK
7134 {
7135 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7136 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7137 }
7138 else
7139 {
7140 tree count
4a90aeeb 7141 = build_int_cst (NULL_TREE,
7d60be94 7142 GET_MODE_BITSIZE (imode) - bitsize);
86b5812c
RK
7143
7144 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7145 target, 0);
7146 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7147 target, 0);
7148 }
7149 }
7150
7151 return op0;
7152 }
4af3895e 7153 }
afc6aaab 7154 goto normal_inner_ref;
4af3895e 7155
afc6aaab
ZW
7156 case BIT_FIELD_REF:
7157 case ARRAY_RANGE_REF:
7158 normal_inner_ref:
bbf6f052
RK
7159 {
7160 enum machine_mode mode1;
770ae6cc 7161 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7162 tree offset;
bbf6f052 7163 int volatilep = 0;
839c4796 7164 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 7165 &mode1, &unsignedp, &volatilep, true);
f47e9b4e 7166 rtx orig_op0;
bbf6f052 7167
e7f3c83f
RK
7168 /* If we got back the original object, something is wrong. Perhaps
7169 we are evaluating an expression too early. In any event, don't
7170 infinitely recurse. */
5b0264cb 7171 gcc_assert (tem != exp);
e7f3c83f 7172
3d27140a 7173 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7174 computation, since it will need a temporary and TARGET is known
7175 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7176
f47e9b4e
RK
7177 orig_op0 = op0
7178 = expand_expr (tem,
7179 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7180 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7181 != INTEGER_CST)
8403445a 7182 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7183 ? target : NULL_RTX),
7184 VOIDmode,
7185 (modifier == EXPAND_INITIALIZER
8403445a
AM
7186 || modifier == EXPAND_CONST_ADDRESS
7187 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7188 ? modifier : EXPAND_NORMAL);
bbf6f052 7189
60a23e2e
OH
7190 /* If this is a constant, put it into a register if it is a legitimate
7191 constant, OFFSET is 0, and we won't try to extract outside the
7192 register (in case we were passed a partially uninitialized object
7193 or a view_conversion to a larger size). Force the constant to
7194 memory otherwise. */
8c8a8e34
JW
7195 if (CONSTANT_P (op0))
7196 {
7197 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9 7198 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
60a23e2e
OH
7199 && offset == 0
7200 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
8c8a8e34
JW
7201 op0 = force_reg (mode, op0);
7202 else
7203 op0 = validize_mem (force_const_mem (mode, op0));
7204 }
7205
60a23e2e
OH
7206 /* Otherwise, if this object not in memory and we either have an
7207 offset, a BLKmode result, or a reference outside the object, put it
7208 there. Such cases can occur in Ada if we have unchecked conversion
7209 of an expression from a scalar type to an array or record type or
7210 for an ARRAY_RANGE_REF whose type is BLKmode. */
3c0cb5de 7211 else if (!MEM_P (op0)
8d2e5f72 7212 && (offset != 0
60a23e2e 7213 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
8d2e5f72
RK
7214 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7215 {
82c82743
RH
7216 tree nt = build_qualified_type (TREE_TYPE (tem),
7217 (TYPE_QUALS (TREE_TYPE (tem))
7218 | TYPE_QUAL_CONST));
7219 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7220
82c82743
RH
7221 emit_move_insn (memloc, op0);
7222 op0 = memloc;
8d2e5f72
RK
7223 }
7224
7bb0943f
RS
7225 if (offset != 0)
7226 {
8403445a
AM
7227 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7228 EXPAND_SUM);
7bb0943f 7229
5b0264cb 7230 gcc_assert (MEM_P (op0));
2d48c13d 7231
2d48c13d 7232#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7233 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7234 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7235#else
7236 if (GET_MODE (offset_rtx) != ptr_mode)
7237 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7238#endif
7239
e82407b5
EB
7240 if (GET_MODE (op0) == BLKmode
7241 /* A constant address in OP0 can have VOIDmode, we must
7242 not try to call force_reg in that case. */
efd07ca7 7243 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7244 && bitsize != 0
3a94c984 7245 && (bitpos % bitsize) == 0
89752202 7246 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7247 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7248 {
e3c8ea67 7249 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7250 bitpos = 0;
7251 }
7252
0d4903b8
RK
7253 op0 = offset_address (op0, offset_rtx,
7254 highest_pow2_factor (offset));
7bb0943f
RS
7255 }
7256
1ce7f3c2
RK
7257 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7258 record its alignment as BIGGEST_ALIGNMENT. */
3c0cb5de 7259 if (MEM_P (op0) && bitpos == 0 && offset != 0
1ce7f3c2
RK
7260 && is_aligning_offset (offset, tem))
7261 set_mem_align (op0, BIGGEST_ALIGNMENT);
7262
bbf6f052 7263 /* Don't forget about volatility even if this is a bitfield. */
3c0cb5de 7264 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
bbf6f052 7265 {
f47e9b4e
RK
7266 if (op0 == orig_op0)
7267 op0 = copy_rtx (op0);
7268
bbf6f052
RK
7269 MEM_VOLATILE_P (op0) = 1;
7270 }
7271
010f87c4
JJ
7272 /* The following code doesn't handle CONCAT.
7273 Assume only bitpos == 0 can be used for CONCAT, due to
7274 one element arrays having the same mode as its element. */
7275 if (GET_CODE (op0) == CONCAT)
7276 {
5b0264cb
NS
7277 gcc_assert (bitpos == 0
7278 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
010f87c4
JJ
7279 return op0;
7280 }
7281
ccc98036
RS
7282 /* In cases where an aligned union has an unaligned object
7283 as a field, we might be extracting a BLKmode value from
7284 an integer-mode (e.g., SImode) object. Handle this case
7285 by doing the extract into an object as wide as the field
7286 (which we know to be the width of a basic mode), then
cb5fa0f8 7287 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7288 if (mode1 == VOIDmode
f8cfc6aa 7289 || REG_P (op0) || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7290 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7291 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7292 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7293 && modifier != EXPAND_CONST_ADDRESS
7294 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7295 /* If the field isn't aligned enough to fetch as a memref,
7296 fetch it as a bit field. */
7297 || (mode1 != BLKmode
9e5f281f 7298 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5 7299 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
3c0cb5de 7300 || (MEM_P (op0)
e82407b5
EB
7301 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7302 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7303 && ((modifier == EXPAND_CONST_ADDRESS
7304 || modifier == EXPAND_INITIALIZER)
7305 ? STRICT_ALIGNMENT
7306 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7307 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7308 /* If the type and the field are a constant size and the
7309 size of the type isn't the same size as the bitfield,
7310 we must use bitfield operations. */
7311 || (bitsize >= 0
dbe4d070
RH
7312 && TYPE_SIZE (TREE_TYPE (exp))
7313 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
cb5fa0f8 7314 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7315 bitsize)))
bbf6f052 7316 {
bbf6f052
RK
7317 enum machine_mode ext_mode = mode;
7318
14a774a9 7319 if (ext_mode == BLKmode
3c0cb5de
JQ
7320 && ! (target != 0 && MEM_P (op0)
7321 && MEM_P (target)
14a774a9 7322 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7323 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7324
7325 if (ext_mode == BLKmode)
a281e72d 7326 {
7a06d606
RK
7327 if (target == 0)
7328 target = assign_temp (type, 0, 1, 1);
7329
7330 if (bitsize == 0)
7331 return target;
7332
a281e72d
RK
7333 /* In this case, BITPOS must start at a byte boundary and
7334 TARGET, if specified, must be a MEM. */
5b0264cb
NS
7335 gcc_assert (MEM_P (op0)
7336 && (!target || MEM_P (target))
7337 && !(bitpos % BITS_PER_UNIT));
a281e72d 7338
7a06d606
RK
7339 emit_block_move (target,
7340 adjust_address (op0, VOIDmode,
7341 bitpos / BITS_PER_UNIT),
a06ef755 7342 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7343 / BITS_PER_UNIT),
8403445a
AM
7344 (modifier == EXPAND_STACK_PARM
7345 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7346
a281e72d
RK
7347 return target;
7348 }
bbf6f052 7349
dc6d66b3
RK
7350 op0 = validize_mem (op0);
7351
3c0cb5de 7352 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
04050c69 7353 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7354
8403445a
AM
7355 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7356 (modifier == EXPAND_STACK_PARM
7357 ? NULL_RTX : target),
b3520980 7358 ext_mode, ext_mode);
ef19912d
RK
7359
7360 /* If the result is a record type and BITSIZE is narrower than
7361 the mode of OP0, an integral mode, and this is a big endian
7362 machine, we must put the field into the high-order bits. */
7363 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7364 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7365 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7366 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7367 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7368 - bitsize),
7369 op0, 1);
7370
daae8185
RK
7371 /* If the result type is BLKmode, store the data into a temporary
7372 of the appropriate type, but with the mode corresponding to the
7373 mode for the data we have (op0's mode). It's tempting to make
7374 this a constant type, since we know it's only being stored once,
7375 but that can cause problems if we are taking the address of this
7376 COMPONENT_REF because the MEM of any reference via that address
7377 will have flags corresponding to the type, which will not
7378 necessarily be constant. */
bbf6f052
RK
7379 if (mode == BLKmode)
7380 {
daae8185
RK
7381 rtx new
7382 = assign_stack_temp_for_type
7383 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7384
7385 emit_move_insn (new, op0);
7386 op0 = copy_rtx (new);
7387 PUT_MODE (op0, BLKmode);
c3d32120 7388 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7389 }
7390
7391 return op0;
7392 }
7393
05019f83
RK
7394 /* If the result is BLKmode, use that to access the object
7395 now as well. */
7396 if (mode == BLKmode)
7397 mode1 = BLKmode;
7398
bbf6f052
RK
7399 /* Get a reference to just this component. */
7400 if (modifier == EXPAND_CONST_ADDRESS
7401 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7402 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7403 else
f4ef873c 7404 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7405
f47e9b4e
RK
7406 if (op0 == orig_op0)
7407 op0 = copy_rtx (op0);
7408
3bdf5ad1 7409 set_mem_attributes (op0, exp, 0);
f8cfc6aa 7410 if (REG_P (XEXP (op0, 0)))
a06ef755 7411 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7412
bbf6f052 7413 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7414 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7415 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7416 || modifier == EXPAND_INITIALIZER)
bbf6f052 7417 return op0;
0d15e60c 7418 else if (target == 0)
bbf6f052 7419 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7420
bbf6f052
RK
7421 convert_move (target, op0, unsignedp);
7422 return target;
7423 }
7424
0f59171d
RH
7425 case OBJ_TYPE_REF:
7426 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
4a8d0c9c 7427
bbf6f052
RK
7428 case CALL_EXPR:
7429 /* Check for a built-in function. */
7430 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7431 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7432 == FUNCTION_DECL)
bbf6f052 7433 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7434 {
c70eaeaf
KG
7435 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7436 == BUILT_IN_FRONTEND)
673fda6b
SB
7437 return lang_hooks.expand_expr (exp, original_target,
7438 tmode, modifier,
7439 alt_rtl);
c70eaeaf
KG
7440 else
7441 return expand_builtin (exp, target, subtarget, tmode, ignore);
7442 }
d6a5ac33 7443
8129842c 7444 return expand_call (exp, target, ignore);
bbf6f052
RK
7445
7446 case NON_LVALUE_EXPR:
7447 case NOP_EXPR:
7448 case CONVERT_EXPR:
4a53008b 7449 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7450 return const0_rtx;
4a53008b 7451
bbf6f052
RK
7452 if (TREE_CODE (type) == UNION_TYPE)
7453 {
7454 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7455
c3d32120
RK
7456 /* If both input and output are BLKmode, this conversion isn't doing
7457 anything except possibly changing memory attribute. */
7458 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7459 {
7460 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7461 modifier);
7462
7463 result = copy_rtx (result);
7464 set_mem_attributes (result, exp, 0);
7465 return result;
7466 }
14a774a9 7467
bbf6f052 7468 if (target == 0)
cf7cb67e
JH
7469 {
7470 if (TYPE_MODE (type) != BLKmode)
7471 target = gen_reg_rtx (TYPE_MODE (type));
7472 else
7473 target = assign_temp (type, 0, 1, 1);
7474 }
d6a5ac33 7475
3c0cb5de 7476 if (MEM_P (target))
bbf6f052
RK
7477 /* Store data into beginning of memory target. */
7478 store_expr (TREE_OPERAND (exp, 0),
8403445a 7479 adjust_address (target, TYPE_MODE (valtype), 0),
6f4fd16d 7480 modifier == EXPAND_STACK_PARM);
1499e0a8 7481
bbf6f052 7482 else
5b0264cb
NS
7483 {
7484 gcc_assert (REG_P (target));
7485
7486 /* Store this field into a union of the proper type. */
7487 store_field (target,
7488 MIN ((int_size_in_bytes (TREE_TYPE
7489 (TREE_OPERAND (exp, 0)))
7490 * BITS_PER_UNIT),
7491 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7492 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
f45bdcd0 7493 type, 0);
5b0264cb 7494 }
bbf6f052
RK
7495
7496 /* Return the entire union. */
7497 return target;
7498 }
d6a5ac33 7499
7f62854a
RK
7500 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7501 {
7502 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7503 modifier);
7f62854a
RK
7504
7505 /* If the signedness of the conversion differs and OP0 is
7506 a promoted SUBREG, clear that indication since we now
7507 have to do the proper extension. */
8df83eae 7508 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7509 && GET_CODE (op0) == SUBREG)
7510 SUBREG_PROMOTED_VAR_P (op0) = 0;
7511
bc15d0ef 7512 return REDUCE_BIT_FIELD (op0);
7f62854a
RK
7513 }
7514
fdf473ae 7515 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90 7516 if (GET_MODE (op0) == mode)
7acda552 7517 ;
12342f90 7518
d6a5ac33 7519 /* If OP0 is a constant, just convert it into the proper mode. */
7acda552 7520 else if (CONSTANT_P (op0))
fdf473ae
RH
7521 {
7522 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7523 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7524
0fb7aeda 7525 if (modifier == EXPAND_INITIALIZER)
7acda552
RK
7526 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7527 subreg_lowpart_offset (mode,
7528 inner_mode));
fdf473ae 7529 else
7acda552
RK
7530 op0= convert_modes (mode, inner_mode, op0,
7531 TYPE_UNSIGNED (inner_type));
fdf473ae 7532 }
12342f90 7533
7acda552
RK
7534 else if (modifier == EXPAND_INITIALIZER)
7535 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7536
7acda552
RK
7537 else if (target == 0)
7538 op0 = convert_to_mode (mode, op0,
7539 TYPE_UNSIGNED (TREE_TYPE
7540 (TREE_OPERAND (exp, 0))));
bbf6f052 7541 else
7acda552
RK
7542 {
7543 convert_move (target, op0,
7544 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7545 op0 = target;
7546 }
7547
7548 return REDUCE_BIT_FIELD (op0);
bbf6f052 7549
ed239f5a 7550 case VIEW_CONVERT_EXPR:
37a08a29 7551 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a 7552
fabaaf36 7553 /* If the input and output modes are both the same, we are done. */
ed239f5a
RK
7554 if (TYPE_MODE (type) == GET_MODE (op0))
7555 ;
fabaaf36
RH
7556 /* If neither mode is BLKmode, and both modes are the same size
7557 then we can use gen_lowpart. */
ed239f5a 7558 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
fabaaf36
RH
7559 && GET_MODE_SIZE (TYPE_MODE (type))
7560 == GET_MODE_SIZE (GET_MODE (op0)))
0fd662ee
RH
7561 {
7562 if (GET_CODE (op0) == SUBREG)
7563 op0 = force_reg (GET_MODE (op0), op0);
7564 op0 = gen_lowpart (TYPE_MODE (type), op0);
7565 }
fabaaf36
RH
7566 /* If both modes are integral, then we can convert from one to the
7567 other. */
7568 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7569 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7570 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7571 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7572 /* As a last resort, spill op0 to memory, and reload it in a
7573 different mode. */
3c0cb5de 7574 else if (!MEM_P (op0))
ed239f5a 7575 {
c11c10d8
RK
7576 /* If the operand is not a MEM, force it into memory. Since we
7577 are going to be be changing the mode of the MEM, don't call
7578 force_const_mem for constants because we don't allow pool
7579 constants to change mode. */
ed239f5a 7580 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7581
5b0264cb 7582 gcc_assert (!TREE_ADDRESSABLE (exp));
ed239f5a 7583
c11c10d8
RK
7584 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7585 target
7586 = assign_stack_temp_for_type
7587 (TYPE_MODE (inner_type),
7588 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7589
c11c10d8
RK
7590 emit_move_insn (target, op0);
7591 op0 = target;
ed239f5a
RK
7592 }
7593
c11c10d8
RK
7594 /* At this point, OP0 is in the correct mode. If the output type is such
7595 that the operand is known to be aligned, indicate that it is.
7596 Otherwise, we need only be concerned about alignment for non-BLKmode
7597 results. */
3c0cb5de 7598 if (MEM_P (op0))
ed239f5a
RK
7599 {
7600 op0 = copy_rtx (op0);
7601
ed239f5a
RK
7602 if (TYPE_ALIGN_OK (type))
7603 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7604 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7605 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7606 {
7607 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7608 HOST_WIDE_INT temp_size
7609 = MAX (int_size_in_bytes (inner_type),
7610 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7611 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7612 temp_size, 0, type);
c4e59f51 7613 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7614
5b0264cb 7615 gcc_assert (!TREE_ADDRESSABLE (exp));
c11c10d8 7616
ed239f5a
RK
7617 if (GET_MODE (op0) == BLKmode)
7618 emit_block_move (new_with_op0_mode, op0,
44bb111a 7619 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7620 (modifier == EXPAND_STACK_PARM
7621 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7622 else
7623 emit_move_insn (new_with_op0_mode, op0);
7624
7625 op0 = new;
7626 }
0fb7aeda 7627
c4e59f51 7628 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7629 }
7630
7631 return op0;
7632
bbf6f052 7633 case PLUS_EXPR:
4dfa0342 7634 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
bbf6f052
RK
7635 something else, make sure we add the register to the constant and
7636 then to the other thing. This case can occur during strength
7637 reduction and doing it this way will produce better code if the
7638 frame pointer or argument pointer is eliminated.
7639
7640 fold-const.c will ensure that the constant is always in the inner
7641 PLUS_EXPR, so the only case we need to do anything about is if
7642 sp, ap, or fp is our second argument, in which case we must swap
7643 the innermost first argument and our second argument. */
7644
7645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7646 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4dfa0342
RH
7647 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7648 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7649 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7650 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
bbf6f052
RK
7651 {
7652 tree t = TREE_OPERAND (exp, 1);
7653
7654 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7655 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7656 }
7657
88f63c77 7658 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7659 something, we might be forming a constant. So try to use
7660 plus_constant. If it produces a sum and we can't accept it,
7661 use force_operand. This allows P = &ARR[const] to generate
7662 efficient code on machines where a SYMBOL_REF is not a valid
7663 address.
7664
7665 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7666 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7667 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7668 {
8403445a
AM
7669 if (modifier == EXPAND_STACK_PARM)
7670 target = 0;
c980ac49
RS
7671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7672 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7673 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7674 {
cbbc503e
JL
7675 rtx constant_part;
7676
c980ac49
RS
7677 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7678 EXPAND_SUM);
cbbc503e
JL
7679 /* Use immed_double_const to ensure that the constant is
7680 truncated according to the mode of OP1, then sign extended
7681 to a HOST_WIDE_INT. Using the constant directly can result
7682 in non-canonical RTL in a 64x32 cross compile. */
7683 constant_part
7684 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7685 (HOST_WIDE_INT) 0,
a5efcd63 7686 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7687 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7688 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7689 op1 = force_operand (op1, target);
bc15d0ef 7690 return REDUCE_BIT_FIELD (op1);
c980ac49 7691 }
bbf6f052 7692
c980ac49 7693 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
dc38a610 7694 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
c980ac49
RS
7695 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7696 {
cbbc503e
JL
7697 rtx constant_part;
7698
c980ac49 7699 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7700 (modifier == EXPAND_INITIALIZER
7701 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7702 if (! CONSTANT_P (op0))
7703 {
7704 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7705 VOIDmode, modifier);
f0e9957a
RS
7706 /* Return a PLUS if modifier says it's OK. */
7707 if (modifier == EXPAND_SUM
7708 || modifier == EXPAND_INITIALIZER)
7709 return simplify_gen_binary (PLUS, mode, op0, op1);
7710 goto binop2;
c980ac49 7711 }
cbbc503e
JL
7712 /* Use immed_double_const to ensure that the constant is
7713 truncated according to the mode of OP1, then sign extended
7714 to a HOST_WIDE_INT. Using the constant directly can result
7715 in non-canonical RTL in a 64x32 cross compile. */
7716 constant_part
7717 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7718 (HOST_WIDE_INT) 0,
2a94e396 7719 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7720 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7721 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7722 op0 = force_operand (op0, target);
bc15d0ef 7723 return REDUCE_BIT_FIELD (op0);
c980ac49 7724 }
bbf6f052
RK
7725 }
7726
7727 /* No sense saving up arithmetic to be done
7728 if it's all in the wrong mode to form part of an address.
7729 And force_operand won't know whether to sign-extend or
7730 zero-extend. */
7731 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7732 || mode != ptr_mode)
4ef7870a 7733 {
eb698c58
RS
7734 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7735 subtarget, &op0, &op1, 0);
6e7727eb
EB
7736 if (op0 == const0_rtx)
7737 return op1;
7738 if (op1 == const0_rtx)
7739 return op0;
4ef7870a
EB
7740 goto binop2;
7741 }
bbf6f052 7742
eb698c58
RS
7743 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7744 subtarget, &op0, &op1, modifier);
bc15d0ef 7745 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
bbf6f052
RK
7746
7747 case MINUS_EXPR:
ea87523e
RK
7748 /* For initializers, we are allowed to return a MINUS of two
7749 symbolic constants. Here we handle all cases when both operands
7750 are constant. */
bbf6f052
RK
7751 /* Handle difference of two symbolic constants,
7752 for the sake of an initializer. */
7753 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7754 && really_constant_p (TREE_OPERAND (exp, 0))
7755 && really_constant_p (TREE_OPERAND (exp, 1)))
7756 {
eb698c58
RS
7757 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7758 NULL_RTX, &op0, &op1, modifier);
ea87523e 7759
ea87523e
RK
7760 /* If the last operand is a CONST_INT, use plus_constant of
7761 the negated constant. Else make the MINUS. */
7762 if (GET_CODE (op1) == CONST_INT)
bc15d0ef 7763 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
ea87523e 7764 else
bc15d0ef 7765 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
bbf6f052 7766 }
ae431183 7767
1717e19e
UW
7768 /* No sense saving up arithmetic to be done
7769 if it's all in the wrong mode to form part of an address.
7770 And force_operand won't know whether to sign-extend or
7771 zero-extend. */
7772 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7773 || mode != ptr_mode)
7774 goto binop;
7775
eb698c58
RS
7776 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7777 subtarget, &op0, &op1, modifier);
1717e19e
UW
7778
7779 /* Convert A - const to A + (-const). */
7780 if (GET_CODE (op1) == CONST_INT)
7781 {
7782 op1 = negate_rtx (mode, op1);
bc15d0ef 7783 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
1717e19e
UW
7784 }
7785
7786 goto binop2;
bbf6f052
RK
7787
7788 case MULT_EXPR:
bbf6f052
RK
7789 /* If first operand is constant, swap them.
7790 Thus the following special case checks need only
7791 check the second operand. */
7792 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7793 {
b3694847 7794 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7795 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7796 TREE_OPERAND (exp, 1) = t1;
7797 }
7798
7799 /* Attempt to return something suitable for generating an
7800 indexed address, for machines that support that. */
7801
88f63c77 7802 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7803 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7804 {
48a5f2fa
DJ
7805 tree exp1 = TREE_OPERAND (exp, 1);
7806
921b3427
RK
7807 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7808 EXPAND_SUM);
bbf6f052 7809
f8cfc6aa 7810 if (!REG_P (op0))
906c4e36 7811 op0 = force_operand (op0, NULL_RTX);
f8cfc6aa 7812 if (!REG_P (op0))
bbf6f052
RK
7813 op0 = copy_to_mode_reg (mode, op0);
7814
bc15d0ef 7815 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
48a5f2fa 7816 gen_int_mode (tree_low_cst (exp1, 0),
bc15d0ef 7817 TYPE_MODE (TREE_TYPE (exp1)))));
bbf6f052
RK
7818 }
7819
8403445a
AM
7820 if (modifier == EXPAND_STACK_PARM)
7821 target = 0;
7822
bbf6f052
RK
7823 /* Check for multiplying things that have been extended
7824 from a narrower type. If this machine supports multiplying
7825 in that narrower type with a result in the desired type,
7826 do it that way, and avoid the explicit type-conversion. */
7827 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7828 && TREE_CODE (type) == INTEGER_TYPE
7829 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7830 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7831 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7832 && int_fits_type_p (TREE_OPERAND (exp, 1),
7833 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7834 /* Don't use a widening multiply if a shift will do. */
7835 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7836 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7837 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7838 ||
7839 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
7840 && (TYPE_PRECISION (TREE_TYPE
7841 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7842 == TYPE_PRECISION (TREE_TYPE
7843 (TREE_OPERAND
7844 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
7845 /* If both operands are extended, they must either both
7846 be zero-extended or both be sign-extended. */
8df83eae
RK
7847 && (TYPE_UNSIGNED (TREE_TYPE
7848 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7849 == TYPE_UNSIGNED (TREE_TYPE
7850 (TREE_OPERAND
7851 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 7852 {
888d65b5
RS
7853 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7854 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 7855 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
7856 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7857 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7858
d2348bd5 7859 if (mode == GET_MODE_2XWIDER_MODE (innermode))
bbf6f052 7860 {
b10af0c8
TG
7861 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7862 {
b10af0c8 7863 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7864 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7865 TREE_OPERAND (exp, 1),
7866 NULL_RTX, &op0, &op1, 0);
b10af0c8 7867 else
eb698c58
RS
7868 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7869 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7870 NULL_RTX, &op0, &op1, 0);
c4d70ce3 7871 goto binop3;
b10af0c8
TG
7872 }
7873 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7874 && innermode == word_mode)
7875 {
888d65b5 7876 rtx htem, hipart;
b10af0c8
TG
7877 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7878 NULL_RTX, VOIDmode, 0);
7879 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7880 op1 = convert_modes (innermode, mode,
7881 expand_expr (TREE_OPERAND (exp, 1),
7882 NULL_RTX, VOIDmode, 0),
7883 unsignedp);
b10af0c8
TG
7884 else
7885 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7886 NULL_RTX, VOIDmode, 0);
7887 temp = expand_binop (mode, other_optab, op0, op1, target,
7888 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
7889 hipart = gen_highpart (innermode, temp);
7890 htem = expand_mult_highpart_adjust (innermode, hipart,
7891 op0, op1, hipart,
7892 zextend_p);
7893 if (htem != hipart)
7894 emit_move_insn (hipart, htem);
bc15d0ef 7895 return REDUCE_BIT_FIELD (temp);
b10af0c8 7896 }
bbf6f052
RK
7897 }
7898 }
eb698c58
RS
7899 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7900 subtarget, &op0, &op1, 0);
bc15d0ef 7901 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
bbf6f052
RK
7902
7903 case TRUNC_DIV_EXPR:
7904 case FLOOR_DIV_EXPR:
7905 case CEIL_DIV_EXPR:
7906 case ROUND_DIV_EXPR:
7907 case EXACT_DIV_EXPR:
8403445a
AM
7908 if (modifier == EXPAND_STACK_PARM)
7909 target = 0;
bbf6f052
RK
7910 /* Possible optimization: compute the dividend with EXPAND_SUM
7911 then if the divisor is constant can optimize the case
7912 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7913 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7914 subtarget, &op0, &op1, 0);
bbf6f052
RK
7915 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7916
7917 case RDIV_EXPR:
bbf6f052
RK
7918 goto binop;
7919
7920 case TRUNC_MOD_EXPR:
7921 case FLOOR_MOD_EXPR:
7922 case CEIL_MOD_EXPR:
7923 case ROUND_MOD_EXPR:
8403445a
AM
7924 if (modifier == EXPAND_STACK_PARM)
7925 target = 0;
eb698c58
RS
7926 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7927 subtarget, &op0, &op1, 0);
bbf6f052
RK
7928 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7929
7930 case FIX_ROUND_EXPR:
7931 case FIX_FLOOR_EXPR:
7932 case FIX_CEIL_EXPR:
5b0264cb 7933 gcc_unreachable (); /* Not used for C. */
bbf6f052
RK
7934
7935 case FIX_TRUNC_EXPR:
906c4e36 7936 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7937 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7938 target = gen_reg_rtx (mode);
7939 expand_fix (target, op0, unsignedp);
7940 return target;
7941
7942 case FLOAT_EXPR:
906c4e36 7943 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7944 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7945 target = gen_reg_rtx (mode);
7946 /* expand_float can't figure out what to do if FROM has VOIDmode.
7947 So give it the correct mode. With -O, cse will optimize this. */
7948 if (GET_MODE (op0) == VOIDmode)
7949 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7950 op0);
7951 expand_float (target, op0,
8df83eae 7952 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7953 return target;
7954
7955 case NEGATE_EXPR:
5b22bee8 7956 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7957 if (modifier == EXPAND_STACK_PARM)
7958 target = 0;
91ce572a 7959 temp = expand_unop (mode,
c4d70ce3
PB
7960 optab_for_tree_code (NEGATE_EXPR, type),
7961 op0, target, 0);
5b0264cb 7962 gcc_assert (temp);
bc15d0ef 7963 return REDUCE_BIT_FIELD (temp);
bbf6f052
RK
7964
7965 case ABS_EXPR:
7966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7967 if (modifier == EXPAND_STACK_PARM)
7968 target = 0;
bbf6f052 7969
11017cc7 7970 /* ABS_EXPR is not valid for complex arguments. */
5b0264cb
NS
7971 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7972 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
2d7050fd 7973
bbf6f052
RK
7974 /* Unsigned abs is simply the operand. Testing here means we don't
7975 risk generating incorrect code below. */
8df83eae 7976 if (TYPE_UNSIGNED (type))
bbf6f052
RK
7977 return op0;
7978
91ce572a 7979 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7980 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7981
7982 case MAX_EXPR:
7983 case MIN_EXPR:
7984 target = original_target;
8403445a
AM
7985 if (target == 0
7986 || modifier == EXPAND_STACK_PARM
3c0cb5de 7987 || (MEM_P (target) && MEM_VOLATILE_P (target))
d6a5ac33 7988 || GET_MODE (target) != mode
f8cfc6aa 7989 || (REG_P (target)
bbf6f052
RK
7990 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7991 target = gen_reg_rtx (mode);
eb698c58
RS
7992 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7993 target, &op0, &op1, 0);
bbf6f052
RK
7994
7995 /* First try to do it with a special MIN or MAX instruction.
7996 If that does not win, use a conditional jump to select the proper
7997 value. */
c4d70ce3 7998 this_optab = optab_for_tree_code (code, type);
bbf6f052
RK
7999 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8000 OPTAB_WIDEN);
8001 if (temp != 0)
8002 return temp;
8003
fa2981d8
JW
8004 /* At this point, a MEM target is no longer useful; we will get better
8005 code without it. */
3a94c984 8006
dbedefae 8007 if (! REG_P (target))
fa2981d8
JW
8008 target = gen_reg_rtx (mode);
8009
e3be1116
RS
8010 /* If op1 was placed in target, swap op0 and op1. */
8011 if (target != op0 && target == op1)
8012 {
927630a5 8013 temp = op0;
e3be1116 8014 op0 = op1;
927630a5 8015 op1 = temp;
e3be1116
RS
8016 }
8017
dbedefae
RS
8018 /* We generate better code and avoid problems with op1 mentioning
8019 target by forcing op1 into a pseudo if it isn't a constant. */
8020 if (! CONSTANT_P (op1))
8021 op1 = force_reg (mode, op1);
8022
230dedb3
JH
8023 {
8024 enum rtx_code comparison_code;
8025 rtx cmpop1 = op1;
927630a5 8026
230dedb3
JH
8027 if (code == MAX_EXPR)
8028 comparison_code = unsignedp ? GEU : GE;
8029 else
8030 comparison_code = unsignedp ? LEU : LE;
927630a5 8031
230dedb3
JH
8032 /* Canonicalize to comparsions against 0. */
8033 if (op1 == const1_rtx)
8034 {
8035 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8036 or (a != 0 ? a : 1) for unsigned.
8037 For MIN we are safe converting (a <= 1 ? a : 1)
8038 into (a <= 0 ? a : 1) */
8039 cmpop1 = const0_rtx;
8040 if (code == MAX_EXPR)
8041 comparison_code = unsignedp ? NE : GT;
8042 }
8043 if (op1 == constm1_rtx && !unsignedp)
8044 {
8045 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8046 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8047 cmpop1 = const0_rtx;
8048 if (code == MIN_EXPR)
8049 comparison_code = LT;
8050 }
8051#ifdef HAVE_conditional_move
8052 /* Use a conditional move if possible. */
8053 if (can_conditionally_move_p (mode))
8054 {
8055 rtx insn;
927630a5 8056
230dedb3
JH
8057 /* ??? Same problem as in expmed.c: emit_conditional_move
8058 forces a stack adjustment via compare_from_rtx, and we
8059 lose the stack adjustment if the sequence we are about
8060 to create is discarded. */
8061 do_pending_stack_adjust ();
927630a5 8062
230dedb3 8063 start_sequence ();
927630a5 8064
230dedb3
JH
8065 /* Try to emit the conditional move. */
8066 insn = emit_conditional_move (target, comparison_code,
8067 op0, cmpop1, mode,
8068 op0, op1, mode,
8069 unsignedp);
927630a5 8070
230dedb3
JH
8071 /* If we could do the conditional move, emit the sequence,
8072 and return. */
8073 if (insn)
8074 {
8075 rtx seq = get_insns ();
8076 end_sequence ();
8077 emit_insn (seq);
8078 return target;
8079 }
8080
8081 /* Otherwise discard the sequence and fall back to code with
8082 branches. */
8083 end_sequence ();
8084 }
927630a5 8085#endif
230dedb3
JH
8086 if (target != op0)
8087 emit_move_insn (target, op0);
d6a5ac33 8088
230dedb3 8089 temp = gen_label_rtx ();
d6a5ac33 8090
230dedb3
JH
8091 /* If this mode is an integer too wide to compare properly,
8092 compare word by word. Rely on cse to optimize constant cases. */
8093 if (GET_MODE_CLASS (mode) == MODE_INT
8094 && ! can_compare_p (GE, mode, ccp_jump))
8095 {
8096 if (code == MAX_EXPR)
8097 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8098 NULL_RTX, temp);
8099 else
8100 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8101 NULL_RTX, temp);
8102 }
8103 else
8104 {
8105 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8106 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8107 }
8108 }
b30f05db 8109 emit_move_insn (target, op1);
927630a5 8110 emit_label (temp);
bbf6f052
RK
8111 return target;
8112
bbf6f052
RK
8113 case BIT_NOT_EXPR:
8114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8115 if (modifier == EXPAND_STACK_PARM)
8116 target = 0;
bbf6f052 8117 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5b0264cb 8118 gcc_assert (temp);
bbf6f052
RK
8119 return temp;
8120
d6a5ac33
RK
8121 /* ??? Can optimize bitwise operations with one arg constant.
8122 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8123 and (a bitwise1 b) bitwise2 b (etc)
8124 but that is probably not worth while. */
8125
8126 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8127 boolean values when we want in all cases to compute both of them. In
8128 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8129 as actual zero-or-1 values and then bitwise anding. In cases where
8130 there cannot be any side effects, better code would be made by
8131 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8132 how to recognize those cases. */
8133
bbf6f052 8134 case TRUTH_AND_EXPR:
c4d70ce3 8135 code = BIT_AND_EXPR;
bbf6f052 8136 case BIT_AND_EXPR:
bbf6f052
RK
8137 goto binop;
8138
bbf6f052 8139 case TRUTH_OR_EXPR:
7efcb746 8140 code = BIT_IOR_EXPR;
bbf6f052 8141 case BIT_IOR_EXPR:
bbf6f052
RK
8142 goto binop;
8143
874726a8 8144 case TRUTH_XOR_EXPR:
c4d70ce3 8145 code = BIT_XOR_EXPR;
bbf6f052 8146 case BIT_XOR_EXPR:
bbf6f052
RK
8147 goto binop;
8148
8149 case LSHIFT_EXPR:
8150 case RSHIFT_EXPR:
8151 case LROTATE_EXPR:
8152 case RROTATE_EXPR:
e5e809f4 8153 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8154 subtarget = 0;
8403445a
AM
8155 if (modifier == EXPAND_STACK_PARM)
8156 target = 0;
bbf6f052
RK
8157 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8158 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8159 unsignedp);
8160
d6a5ac33
RK
8161 /* Could determine the answer when only additive constants differ. Also,
8162 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8163 case LT_EXPR:
8164 case LE_EXPR:
8165 case GT_EXPR:
8166 case GE_EXPR:
8167 case EQ_EXPR:
8168 case NE_EXPR:
1eb8759b
RH
8169 case UNORDERED_EXPR:
8170 case ORDERED_EXPR:
8171 case UNLT_EXPR:
8172 case UNLE_EXPR:
8173 case UNGT_EXPR:
8174 case UNGE_EXPR:
8175 case UNEQ_EXPR:
d1a7edaf 8176 case LTGT_EXPR:
8403445a
AM
8177 temp = do_store_flag (exp,
8178 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8179 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8180 if (temp != 0)
8181 return temp;
d6a5ac33 8182
0f41302f 8183 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8184 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8185 && original_target
f8cfc6aa 8186 && REG_P (original_target)
bbf6f052
RK
8187 && (GET_MODE (original_target)
8188 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8189 {
d6a5ac33
RK
8190 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8191 VOIDmode, 0);
8192
c0a3eeac
UW
8193 /* If temp is constant, we can just compute the result. */
8194 if (GET_CODE (temp) == CONST_INT)
8195 {
8196 if (INTVAL (temp) != 0)
8197 emit_move_insn (target, const1_rtx);
8198 else
8199 emit_move_insn (target, const0_rtx);
8200
8201 return target;
8202 }
8203
bbf6f052 8204 if (temp != original_target)
c0a3eeac
UW
8205 {
8206 enum machine_mode mode1 = GET_MODE (temp);
8207 if (mode1 == VOIDmode)
8208 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8209
c0a3eeac
UW
8210 temp = copy_to_mode_reg (mode1, temp);
8211 }
d6a5ac33 8212
bbf6f052 8213 op1 = gen_label_rtx ();
c5d5d461 8214 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8215 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8216 emit_move_insn (temp, const1_rtx);
8217 emit_label (op1);
8218 return temp;
8219 }
d6a5ac33 8220
25f3e06c
PB
8221 /* If no set-flag instruction, must generate a conditional store
8222 into a temporary variable. Drop through and handle this
8223 like && and ||. */
8224
8225 if (! ignore
8226 && (target == 0
8227 || modifier == EXPAND_STACK_PARM
8228 || ! safe_from_p (target, exp, 1)
8229 /* Make sure we don't have a hard reg (such as function's return
8230 value) live across basic blocks, if not optimizing. */
8231 || (!optimize && REG_P (target)
8232 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8233 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8234
8235 if (target)
8236 emit_move_insn (target, const0_rtx);
8237
8238 op1 = gen_label_rtx ();
8239 jumpifnot (exp, op1);
8240
8241 if (target)
8242 emit_move_insn (target, const1_rtx);
8243
8244 emit_label (op1);
8245 return ignore ? const0_rtx : target;
8246
bbf6f052 8247 case TRUTH_NOT_EXPR:
8403445a
AM
8248 if (modifier == EXPAND_STACK_PARM)
8249 target = 0;
bbf6f052
RK
8250 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8251 /* The parser is careful to generate TRUTH_NOT_EXPR
8252 only with operands that are always zero or one. */
906c4e36 8253 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052 8254 target, 1, OPTAB_LIB_WIDEN);
5b0264cb 8255 gcc_assert (temp);
bbf6f052
RK
8256 return temp;
8257
6de9cd9a
DN
8258 case STATEMENT_LIST:
8259 {
8260 tree_stmt_iterator iter;
8261
5b0264cb 8262 gcc_assert (ignore);
6de9cd9a
DN
8263
8264 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8265 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8266 }
8267 return const0_rtx;
8268
bbf6f052 8269 case COND_EXPR:
ba8081eb
KH
8270 /* A COND_EXPR with its type being VOID_TYPE represents a
8271 conditional jump and is handled in
8272 expand_gimple_cond_expr. */
8273 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
f676971a 8274
e5bacf32
PB
8275 /* Note that COND_EXPRs whose type is a structure or union
8276 are required to be constructed to contain assignments of
8277 a temporary variable, so that we can evaluate them here
8278 for side effect only. If type is void, we must do likewise. */
8279
5b0264cb
NS
8280 gcc_assert (!TREE_ADDRESSABLE (type)
8281 && !ignore
8282 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8283 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
f676971a 8284
e5bacf32
PB
8285 /* If we are not to produce a result, we have no target. Otherwise,
8286 if a target was specified use it; it will not be used as an
8287 intermediate target unless it is safe. If no target, use a
8288 temporary. */
f676971a 8289
e5bacf32
PB
8290 if (modifier != EXPAND_STACK_PARM
8291 && original_target
8292 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8293 && GET_MODE (original_target) == mode
7c00d1fe 8294#ifdef HAVE_conditional_move
e5bacf32
PB
8295 && (! can_conditionally_move_p (mode)
8296 || REG_P (original_target))
7c00d1fe 8297#endif
e5bacf32
PB
8298 && !MEM_P (original_target))
8299 temp = original_target;
8300 else
8301 temp = assign_temp (type, 0, 0, 1);
f676971a 8302
e5bacf32
PB
8303 do_pending_stack_adjust ();
8304 NO_DEFER_POP;
8305 op0 = gen_label_rtx ();
8306 op1 = gen_label_rtx ();
8307 jumpifnot (TREE_OPERAND (exp, 0), op0);
8308 store_expr (TREE_OPERAND (exp, 1), temp,
6f4fd16d 8309 modifier == EXPAND_STACK_PARM);
f676971a 8310
e5bacf32
PB
8311 emit_jump_insn (gen_jump (op1));
8312 emit_barrier ();
8313 emit_label (op0);
8314 store_expr (TREE_OPERAND (exp, 2), temp,
6f4fd16d 8315 modifier == EXPAND_STACK_PARM);
f676971a 8316
e5bacf32
PB
8317 emit_label (op1);
8318 OK_DEFER_POP;
8319 return temp;
f676971a 8320
7ce67fbe
DP
8321 case VEC_COND_EXPR:
8322 target = expand_vec_cond_expr (exp, target);
8323 return target;
8324
bbf6f052
RK
8325 case MODIFY_EXPR:
8326 {
bbf6f052
RK
8327 tree lhs = TREE_OPERAND (exp, 0);
8328 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8329
df9af2bb
KH
8330 gcc_assert (ignore);
8331
bbf6f052
RK
8332 /* Check for |= or &= of a bitfield of size one into another bitfield
8333 of size 1. In this case, (unless we need the result of the
8334 assignment) we can do this more efficiently with a
8335 test followed by an assignment, if necessary.
8336
8337 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8338 things change so we do, this code should be enhanced to
8339 support it. */
df9af2bb 8340 if (TREE_CODE (lhs) == COMPONENT_REF
bbf6f052
RK
8341 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8342 || TREE_CODE (rhs) == BIT_AND_EXPR)
8343 && TREE_OPERAND (rhs, 0) == lhs
8344 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8345 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8346 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8347 {
8348 rtx label = gen_label_rtx ();
8349
8350 do_jump (TREE_OPERAND (rhs, 1),
8351 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8352 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8353 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8354 (TREE_CODE (rhs) == BIT_IOR_EXPR
8355 ? integer_one_node
e836a5a2 8356 : integer_zero_node)));
e7c33f54 8357 do_pending_stack_adjust ();
bbf6f052
RK
8358 emit_label (label);
8359 return const0_rtx;
8360 }
8361
e836a5a2 8362 expand_assignment (lhs, rhs);
0fb7aeda 8363
7f8adc4e 8364 return const0_rtx;
bbf6f052
RK
8365 }
8366
6e7f84a7
APB
8367 case RETURN_EXPR:
8368 if (!TREE_OPERAND (exp, 0))
8369 expand_null_return ();
8370 else
8371 expand_return (TREE_OPERAND (exp, 0));
8372 return const0_rtx;
8373
bbf6f052 8374 case ADDR_EXPR:
70bb498a 8375 return expand_expr_addr_expr (exp, target, tmode, modifier);
bbf6f052 8376
7308a047 8377 case COMPLEX_EXPR:
1466e387
RH
8378 /* Get the rtx code of the operands. */
8379 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8380 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7308a047 8381
1466e387
RH
8382 if (!target)
8383 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6551fa4d 8384
1466e387
RH
8385 /* Move the real (op0) and imaginary (op1) parts to their location. */
8386 write_complex_part (target, op0, false);
8387 write_complex_part (target, op1, true);
7308a047 8388
1466e387 8389 return target;
7308a047
RS
8390
8391 case REALPART_EXPR:
2d7050fd 8392 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
1466e387 8393 return read_complex_part (op0, false);
3a94c984 8394
7308a047 8395 case IMAGPART_EXPR:
2d7050fd 8396 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
1466e387 8397 return read_complex_part (op0, true);
7308a047 8398
6de9cd9a
DN
8399 case RESX_EXPR:
8400 expand_resx_expr (exp);
8401 return const0_rtx;
8402
e976b8b2 8403 case TRY_CATCH_EXPR:
6de9cd9a 8404 case CATCH_EXPR:
6de9cd9a 8405 case EH_FILTER_EXPR:
b335b813 8406 case TRY_FINALLY_EXPR:
ac45df5d 8407 /* Lowered by tree-eh.c. */
5b0264cb 8408 gcc_unreachable ();
b335b813 8409
ac45df5d
RH
8410 case WITH_CLEANUP_EXPR:
8411 case CLEANUP_POINT_EXPR:
8412 case TARGET_EXPR:
165b54c3 8413 case CASE_LABEL_EXPR:
77c9db77 8414 case VA_ARG_EXPR:
caf93cb0 8415 case BIND_EXPR:
e5bacf32
PB
8416 case INIT_EXPR:
8417 case CONJ_EXPR:
8418 case COMPOUND_EXPR:
8419 case PREINCREMENT_EXPR:
8420 case PREDECREMENT_EXPR:
8421 case POSTINCREMENT_EXPR:
8422 case POSTDECREMENT_EXPR:
8423 case LOOP_EXPR:
8424 case EXIT_EXPR:
e5bacf32
PB
8425 case TRUTH_ANDIF_EXPR:
8426 case TRUTH_ORIF_EXPR:
ac45df5d 8427 /* Lowered by gimplify.c. */
5b0264cb 8428 gcc_unreachable ();
b335b813 8429
52a11cbf 8430 case EXC_PTR_EXPR:
86c99549 8431 return get_exception_pointer (cfun);
52a11cbf 8432
6de9cd9a
DN
8433 case FILTER_EXPR:
8434 return get_exception_filter (cfun);
8435
67231816
RH
8436 case FDESC_EXPR:
8437 /* Function descriptors are not valid except for as
8438 initialization constants, and should not be expanded. */
5b0264cb 8439 gcc_unreachable ();
67231816 8440
6de9cd9a 8441 case SWITCH_EXPR:
7efcb746 8442 expand_case (exp);
6de9cd9a
DN
8443 return const0_rtx;
8444
8445 case LABEL_EXPR:
8446 expand_label (TREE_OPERAND (exp, 0));
8447 return const0_rtx;
8448
6de9cd9a
DN
8449 case ASM_EXPR:
8450 expand_asm_expr (exp);
8451 return const0_rtx;
8452
d25cee4d
RH
8453 case WITH_SIZE_EXPR:
8454 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8455 have pulled out the size to use in whatever context it needed. */
8456 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8457 modifier, alt_rtl);
8458
7ccf35ed
DN
8459 case REALIGN_LOAD_EXPR:
8460 {
8461 tree oprnd0 = TREE_OPERAND (exp, 0);
8462 tree oprnd1 = TREE_OPERAND (exp, 1);
8463 tree oprnd2 = TREE_OPERAND (exp, 2);
8464 rtx op2;
8465
8466 this_optab = optab_for_tree_code (code, type);
8467 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8468 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8469 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8470 target, unsignedp);
535a42b1 8471 gcc_assert (temp);
7ccf35ed
DN
8472 return temp;
8473 }
8474
61d3cdbb
DN
8475 case REDUC_MAX_EXPR:
8476 case REDUC_MIN_EXPR:
8477 case REDUC_PLUS_EXPR:
8478 {
8479 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8480 this_optab = optab_for_tree_code (code, type);
8481 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8482 gcc_assert (temp);
8483 return temp;
8484 }
7ccf35ed 8485
a6b46ba2
DN
8486 case VEC_LSHIFT_EXPR:
8487 case VEC_RSHIFT_EXPR:
8488 {
8489 target = expand_vec_shift_expr (exp, target);
8490 return target;
8491 }
8492
bbf6f052 8493 default:
673fda6b
SB
8494 return lang_hooks.expand_expr (exp, original_target, tmode,
8495 modifier, alt_rtl);
bbf6f052
RK
8496 }
8497
c4d70ce3 8498 /* Here to do an ordinary binary operator. */
bbf6f052 8499 binop:
eb698c58
RS
8500 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8501 subtarget, &op0, &op1, 0);
bbf6f052 8502 binop2:
c4d70ce3
PB
8503 this_optab = optab_for_tree_code (code, type);
8504 binop3:
8403445a
AM
8505 if (modifier == EXPAND_STACK_PARM)
8506 target = 0;
bbf6f052
RK
8507 temp = expand_binop (mode, this_optab, op0, op1, target,
8508 unsignedp, OPTAB_LIB_WIDEN);
5b0264cb 8509 gcc_assert (temp);
bc15d0ef
JM
8510 return REDUCE_BIT_FIELD (temp);
8511}
8512#undef REDUCE_BIT_FIELD
8513\f
8514/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8515 signedness of TYPE), possibly returning the result in TARGET. */
8516static rtx
8517reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8518{
8519 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8520 if (target && GET_MODE (target) != GET_MODE (exp))
8521 target = 0;
8522 if (TYPE_UNSIGNED (type))
8523 {
8524 rtx mask;
8525 if (prec < HOST_BITS_PER_WIDE_INT)
8526 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8527 GET_MODE (exp));
8528 else
8529 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8530 ((unsigned HOST_WIDE_INT) 1
8531 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8532 GET_MODE (exp));
8533 return expand_and (GET_MODE (exp), exp, mask, target);
8534 }
8535 else
8536 {
4a90aeeb 8537 tree count = build_int_cst (NULL_TREE,
7d60be94 8538 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
bc15d0ef
JM
8539 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8540 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8541 }
bbf6f052 8542}
b93a436e 8543\f
1ce7f3c2
RK
8544/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8545 when applied to the address of EXP produces an address known to be
8546 aligned more than BIGGEST_ALIGNMENT. */
8547
8548static int
502b8322 8549is_aligning_offset (tree offset, tree exp)
1ce7f3c2 8550{
6fce44af 8551 /* Strip off any conversions. */
1ce7f3c2
RK
8552 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8553 || TREE_CODE (offset) == NOP_EXPR
6fce44af 8554 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
8555 offset = TREE_OPERAND (offset, 0);
8556
8557 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8558 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8559 if (TREE_CODE (offset) != BIT_AND_EXPR
8560 || !host_integerp (TREE_OPERAND (offset, 1), 1)
caf93cb0 8561 || compare_tree_int (TREE_OPERAND (offset, 1),
c0cfc691 8562 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
8563 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8564 return 0;
8565
8566 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8567 It must be NEGATE_EXPR. Then strip any more conversions. */
8568 offset = TREE_OPERAND (offset, 0);
8569 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8570 || TREE_CODE (offset) == NOP_EXPR
8571 || TREE_CODE (offset) == CONVERT_EXPR)
8572 offset = TREE_OPERAND (offset, 0);
8573
8574 if (TREE_CODE (offset) != NEGATE_EXPR)
8575 return 0;
8576
8577 offset = TREE_OPERAND (offset, 0);
8578 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8579 || TREE_CODE (offset) == NOP_EXPR
8580 || TREE_CODE (offset) == CONVERT_EXPR)
8581 offset = TREE_OPERAND (offset, 0);
8582
6fce44af
RK
8583 /* This must now be the address of EXP. */
8584 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
8585}
8586\f
e0a2f705 8587/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 8588 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
8589 in bytes within the string that ARG is accessing. The type of the
8590 offset will be `sizetype'. */
b93a436e 8591
28f4ec01 8592tree
502b8322 8593string_constant (tree arg, tree *ptr_offset)
b93a436e 8594{
a45f71f5 8595 tree array, offset;
b93a436e
JL
8596 STRIP_NOPS (arg);
8597
a45f71f5 8598 if (TREE_CODE (arg) == ADDR_EXPR)
b93a436e 8599 {
a45f71f5
JJ
8600 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8601 {
8602 *ptr_offset = size_zero_node;
8603 return TREE_OPERAND (arg, 0);
8604 }
8605 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8606 {
8607 array = TREE_OPERAND (arg, 0);
8608 offset = size_zero_node;
8609 }
8610 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8611 {
8612 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8613 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8614 if (TREE_CODE (array) != STRING_CST
8615 && TREE_CODE (array) != VAR_DECL)
8616 return 0;
8617 }
8618 else
8619 return 0;
6de9cd9a 8620 }
b93a436e
JL
8621 else if (TREE_CODE (arg) == PLUS_EXPR)
8622 {
8623 tree arg0 = TREE_OPERAND (arg, 0);
8624 tree arg1 = TREE_OPERAND (arg, 1);
8625
8626 STRIP_NOPS (arg0);
8627 STRIP_NOPS (arg1);
8628
8629 if (TREE_CODE (arg0) == ADDR_EXPR
a45f71f5
JJ
8630 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8631 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
bbf6f052 8632 {
a45f71f5
JJ
8633 array = TREE_OPERAND (arg0, 0);
8634 offset = arg1;
bbf6f052 8635 }
b93a436e 8636 else if (TREE_CODE (arg1) == ADDR_EXPR
a45f71f5
JJ
8637 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8638 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
bbf6f052 8639 {
a45f71f5
JJ
8640 array = TREE_OPERAND (arg1, 0);
8641 offset = arg0;
bbf6f052 8642 }
a45f71f5
JJ
8643 else
8644 return 0;
8645 }
8646 else
8647 return 0;
8648
8649 if (TREE_CODE (array) == STRING_CST)
8650 {
8651 *ptr_offset = convert (sizetype, offset);
8652 return array;
8653 }
8654 else if (TREE_CODE (array) == VAR_DECL)
8655 {
8656 int length;
8657
8658 /* Variables initialized to string literals can be handled too. */
8659 if (DECL_INITIAL (array) == NULL_TREE
8660 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8661 return 0;
8662
8663 /* If they are read-only, non-volatile and bind locally. */
8664 if (! TREE_READONLY (array)
8665 || TREE_SIDE_EFFECTS (array)
8666 || ! targetm.binds_local_p (array))
8667 return 0;
8668
8669 /* Avoid const char foo[4] = "abcde"; */
8670 if (DECL_SIZE_UNIT (array) == NULL_TREE
8671 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8672 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8673 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8674 return 0;
8675
8676 /* If variable is bigger than the string literal, OFFSET must be constant
8677 and inside of the bounds of the string literal. */
8678 offset = convert (sizetype, offset);
8679 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8680 && (! host_integerp (offset, 1)
8681 || compare_tree_int (offset, length) >= 0))
8682 return 0;
8683
8684 *ptr_offset = offset;
8685 return DECL_INITIAL (array);
b93a436e 8686 }
ca695ac9 8687
b93a436e
JL
8688 return 0;
8689}
ca695ac9 8690\f
b93a436e
JL
8691/* Generate code to calculate EXP using a store-flag instruction
8692 and return an rtx for the result. EXP is either a comparison
8693 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 8694
b93a436e 8695 If TARGET is nonzero, store the result there if convenient.
ca695ac9 8696
cc2902df 8697 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 8698 cheap.
ca695ac9 8699
b93a436e
JL
8700 Return zero if there is no suitable set-flag instruction
8701 available on this machine.
ca695ac9 8702
b93a436e
JL
8703 Once expand_expr has been called on the arguments of the comparison,
8704 we are committed to doing the store flag, since it is not safe to
8705 re-evaluate the expression. We emit the store-flag insn by calling
8706 emit_store_flag, but only expand the arguments if we have a reason
8707 to believe that emit_store_flag will be successful. If we think that
8708 it will, but it isn't, we have to simulate the store-flag with a
8709 set/jump/set sequence. */
ca695ac9 8710
b93a436e 8711static rtx
502b8322 8712do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
8713{
8714 enum rtx_code code;
8715 tree arg0, arg1, type;
8716 tree tem;
8717 enum machine_mode operand_mode;
8718 int invert = 0;
8719 int unsignedp;
8720 rtx op0, op1;
8721 enum insn_code icode;
8722 rtx subtarget = target;
381127e8 8723 rtx result, label;
ca695ac9 8724
b93a436e
JL
8725 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8726 result at the end. We can't simply invert the test since it would
8727 have already been inverted if it were valid. This case occurs for
8728 some floating-point comparisons. */
ca695ac9 8729
b93a436e
JL
8730 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8731 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 8732
b93a436e
JL
8733 arg0 = TREE_OPERAND (exp, 0);
8734 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
8735
8736 /* Don't crash if the comparison was erroneous. */
8737 if (arg0 == error_mark_node || arg1 == error_mark_node)
8738 return const0_rtx;
8739
b93a436e
JL
8740 type = TREE_TYPE (arg0);
8741 operand_mode = TYPE_MODE (type);
8df83eae 8742 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 8743
b93a436e
JL
8744 /* We won't bother with BLKmode store-flag operations because it would mean
8745 passing a lot of information to emit_store_flag. */
8746 if (operand_mode == BLKmode)
8747 return 0;
ca695ac9 8748
b93a436e
JL
8749 /* We won't bother with store-flag operations involving function pointers
8750 when function pointers must be canonicalized before comparisons. */
8751#ifdef HAVE_canonicalize_funcptr_for_compare
8752 if (HAVE_canonicalize_funcptr_for_compare
8753 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8754 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8755 == FUNCTION_TYPE))
8756 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8757 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8758 == FUNCTION_TYPE))))
8759 return 0;
ca695ac9
JB
8760#endif
8761
b93a436e
JL
8762 STRIP_NOPS (arg0);
8763 STRIP_NOPS (arg1);
ca695ac9 8764
b93a436e
JL
8765 /* Get the rtx comparison code to use. We know that EXP is a comparison
8766 operation of some type. Some comparisons against 1 and -1 can be
8767 converted to comparisons with zero. Do so here so that the tests
8768 below will be aware that we have a comparison with zero. These
8769 tests will not catch constants in the first operand, but constants
8770 are rarely passed as the first operand. */
ca695ac9 8771
b93a436e
JL
8772 switch (TREE_CODE (exp))
8773 {
8774 case EQ_EXPR:
8775 code = EQ;
bbf6f052 8776 break;
b93a436e
JL
8777 case NE_EXPR:
8778 code = NE;
bbf6f052 8779 break;
b93a436e
JL
8780 case LT_EXPR:
8781 if (integer_onep (arg1))
8782 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8783 else
8784 code = unsignedp ? LTU : LT;
ca695ac9 8785 break;
b93a436e
JL
8786 case LE_EXPR:
8787 if (! unsignedp && integer_all_onesp (arg1))
8788 arg1 = integer_zero_node, code = LT;
8789 else
8790 code = unsignedp ? LEU : LE;
ca695ac9 8791 break;
b93a436e
JL
8792 case GT_EXPR:
8793 if (! unsignedp && integer_all_onesp (arg1))
8794 arg1 = integer_zero_node, code = GE;
8795 else
8796 code = unsignedp ? GTU : GT;
8797 break;
8798 case GE_EXPR:
8799 if (integer_onep (arg1))
8800 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8801 else
8802 code = unsignedp ? GEU : GE;
ca695ac9 8803 break;
1eb8759b
RH
8804
8805 case UNORDERED_EXPR:
8806 code = UNORDERED;
8807 break;
8808 case ORDERED_EXPR:
8809 code = ORDERED;
8810 break;
8811 case UNLT_EXPR:
8812 code = UNLT;
8813 break;
8814 case UNLE_EXPR:
8815 code = UNLE;
8816 break;
8817 case UNGT_EXPR:
8818 code = UNGT;
8819 break;
8820 case UNGE_EXPR:
8821 code = UNGE;
8822 break;
8823 case UNEQ_EXPR:
8824 code = UNEQ;
8825 break;
d1a7edaf
PB
8826 case LTGT_EXPR:
8827 code = LTGT;
8828 break;
1eb8759b 8829
ca695ac9 8830 default:
5b0264cb 8831 gcc_unreachable ();
bbf6f052 8832 }
bbf6f052 8833
b93a436e
JL
8834 /* Put a constant second. */
8835 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8836 {
8837 tem = arg0; arg0 = arg1; arg1 = tem;
8838 code = swap_condition (code);
ca695ac9 8839 }
bbf6f052 8840
b93a436e
JL
8841 /* If this is an equality or inequality test of a single bit, we can
8842 do this by shifting the bit being tested to the low-order bit and
8843 masking the result with the constant 1. If the condition was EQ,
8844 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
8845 than an scc insn even if we have it.
8846
8847 The code to make this transformation was moved into fold_single_bit_test,
8848 so we just call into the folder and expand its result. */
d39985fa 8849
b93a436e
JL
8850 if ((code == NE || code == EQ)
8851 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8852 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 8853 {
ae2bcd98 8854 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 8855 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 8856 arg0, arg1, type),
60cd4dae
JL
8857 target, VOIDmode, EXPAND_NORMAL);
8858 }
bbf6f052 8859
b93a436e 8860 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 8861 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 8862 return 0;
1eb8759b 8863
b93a436e
JL
8864 icode = setcc_gen_code[(int) code];
8865 if (icode == CODE_FOR_nothing
a995e389 8866 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 8867 {
b93a436e
JL
8868 /* We can only do this if it is one of the special cases that
8869 can be handled without an scc insn. */
8870 if ((code == LT && integer_zerop (arg1))
8871 || (! only_cheap && code == GE && integer_zerop (arg1)))
8872 ;
08fd6d04 8873 else if (! only_cheap && (code == NE || code == EQ)
b93a436e
JL
8874 && TREE_CODE (type) != REAL_TYPE
8875 && ((abs_optab->handlers[(int) operand_mode].insn_code
8876 != CODE_FOR_nothing)
8877 || (ffs_optab->handlers[(int) operand_mode].insn_code
8878 != CODE_FOR_nothing)))
8879 ;
8880 else
8881 return 0;
ca695ac9 8882 }
3a94c984 8883
296b4ed9 8884 if (! get_subtarget (target)
e3be1116 8885 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
8886 subtarget = 0;
8887
eb698c58 8888 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
8889
8890 if (target == 0)
8891 target = gen_reg_rtx (mode);
8892
ad76cef8 8893 result = emit_store_flag (target, code, op0, op1,
b93a436e 8894 operand_mode, unsignedp, 1);
ca695ac9 8895
b93a436e
JL
8896 if (result)
8897 {
8898 if (invert)
8899 result = expand_binop (mode, xor_optab, result, const1_rtx,
8900 result, 0, OPTAB_LIB_WIDEN);
8901 return result;
ca695ac9 8902 }
bbf6f052 8903
b93a436e 8904 /* If this failed, we have to do this with set/compare/jump/set code. */
f8cfc6aa 8905 if (!REG_P (target)
b93a436e
JL
8906 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8907 target = gen_reg_rtx (GET_MODE (target));
8908
8909 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8910 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 8911 operand_mode, NULL_RTX);
b93a436e
JL
8912 if (GET_CODE (result) == CONST_INT)
8913 return (((result == const0_rtx && ! invert)
8914 || (result != const0_rtx && invert))
8915 ? const0_rtx : const1_rtx);
ca695ac9 8916
8f08e8c0
JL
8917 /* The code of RESULT may not match CODE if compare_from_rtx
8918 decided to swap its operands and reverse the original code.
8919
8920 We know that compare_from_rtx returns either a CONST_INT or
8921 a new comparison code, so it is safe to just extract the
8922 code from RESULT. */
8923 code = GET_CODE (result);
8924
b93a436e 8925 label = gen_label_rtx ();
5b0264cb 8926 gcc_assert (bcc_gen_fctn[(int) code]);
0f41302f 8927
b93a436e
JL
8928 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8929 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8930 emit_label (label);
bbf6f052 8931
b93a436e 8932 return target;
ca695ac9 8933}
b93a436e 8934\f
b93a436e 8935
ad82abb8
ZW
8936/* Stubs in case we haven't got a casesi insn. */
8937#ifndef HAVE_casesi
8938# define HAVE_casesi 0
8939# define gen_casesi(a, b, c, d, e) (0)
8940# define CODE_FOR_casesi CODE_FOR_nothing
8941#endif
8942
8943/* If the machine does not have a case insn that compares the bounds,
8944 this means extra overhead for dispatch tables, which raises the
8945 threshold for using them. */
8946#ifndef CASE_VALUES_THRESHOLD
8947#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8948#endif /* CASE_VALUES_THRESHOLD */
8949
8950unsigned int
502b8322 8951case_values_threshold (void)
ad82abb8
ZW
8952{
8953 return CASE_VALUES_THRESHOLD;
8954}
8955
8956/* Attempt to generate a casesi instruction. Returns 1 if successful,
8957 0 otherwise (i.e. if there is no casesi instruction). */
8958int
502b8322
AJ
8959try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8960 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
8961{
8962 enum machine_mode index_mode = SImode;
8963 int index_bits = GET_MODE_BITSIZE (index_mode);
8964 rtx op1, op2, index;
8965 enum machine_mode op_mode;
8966
8967 if (! HAVE_casesi)
8968 return 0;
8969
8970 /* Convert the index to SImode. */
8971 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8972 {
8973 enum machine_mode omode = TYPE_MODE (index_type);
8974 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8975
8976 /* We must handle the endpoints in the original mode. */
3244e67d
RS
8977 index_expr = build2 (MINUS_EXPR, index_type,
8978 index_expr, minval);
ad82abb8
ZW
8979 minval = integer_zero_node;
8980 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8981 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 8982 omode, 1, default_label);
ad82abb8
ZW
8983 /* Now we can safely truncate. */
8984 index = convert_to_mode (index_mode, index, 0);
8985 }
8986 else
8987 {
8988 if (TYPE_MODE (index_type) != index_mode)
8989 {
ae2bcd98 8990 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 8991 (index_bits, 0), index_expr);
ad82abb8
ZW
8992 index_type = TREE_TYPE (index_expr);
8993 }
8994
8995 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8996 }
ad76cef8 8997
ad82abb8
ZW
8998 do_pending_stack_adjust ();
8999
9000 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9001 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9002 (index, op_mode))
9003 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9004
ad82abb8
ZW
9005 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9006
9007 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9008 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 9009 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
9010 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9011 (op1, op_mode))
9012 op1 = copy_to_mode_reg (op_mode, op1);
9013
9014 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9015
9016 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9017 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 9018 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
9019 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9020 (op2, op_mode))
9021 op2 = copy_to_mode_reg (op_mode, op2);
9022
9023 emit_jump_insn (gen_casesi (index, op1, op2,
9024 table_label, default_label));
9025 return 1;
9026}
9027
9028/* Attempt to generate a tablejump instruction; same concept. */
9029#ifndef HAVE_tablejump
9030#define HAVE_tablejump 0
9031#define gen_tablejump(x, y) (0)
9032#endif
9033
9034/* Subroutine of the next function.
9035
9036 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9037 in the table already subtracted.
9038 MODE is its expected mode (needed if INDEX is constant).
9039 RANGE is the length of the jump table.
9040 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9041
b93a436e
JL
9042 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9043 index value is out of range. */
0f41302f 9044
ad82abb8 9045static void
502b8322
AJ
9046do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9047 rtx default_label)
ca695ac9 9048{
b3694847 9049 rtx temp, vector;
88d3b7f0 9050
74f6d071
JH
9051 if (INTVAL (range) > cfun->max_jumptable_ents)
9052 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9053
b93a436e
JL
9054 /* Do an unsigned comparison (in the proper mode) between the index
9055 expression and the value which represents the length of the range.
9056 Since we just finished subtracting the lower bound of the range
9057 from the index expression, this comparison allows us to simultaneously
9058 check that the original index expression value is both greater than
9059 or equal to the minimum value of the range and less than or equal to
9060 the maximum value of the range. */
709f5be1 9061
c5d5d461 9062 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9063 default_label);
bbf6f052 9064
b93a436e
JL
9065 /* If index is in range, it must fit in Pmode.
9066 Convert to Pmode so we can index with it. */
9067 if (mode != Pmode)
9068 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9069
ba228239 9070 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9071 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9072 and break_out_memory_refs will go to work on it and mess it up. */
9073#ifdef PIC_CASE_VECTOR_ADDRESS
f8cfc6aa 9074 if (flag_pic && !REG_P (index))
b93a436e
JL
9075 index = copy_to_mode_reg (Pmode, index);
9076#endif
ca695ac9 9077
b93a436e
JL
9078 /* If flag_force_addr were to affect this address
9079 it could interfere with the tricky assumptions made
9080 about addresses that contain label-refs,
9081 which may be valid only very near the tablejump itself. */
9082 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9083 GET_MODE_SIZE, because this indicates how large insns are. The other
9084 uses should all be Pmode, because they are addresses. This code
9085 could fail if addresses and insns are not the same size. */
9086 index = gen_rtx_PLUS (Pmode,
9087 gen_rtx_MULT (Pmode, index,
9088 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9089 gen_rtx_LABEL_REF (Pmode, table_label));
9090#ifdef PIC_CASE_VECTOR_ADDRESS
9091 if (flag_pic)
9092 index = PIC_CASE_VECTOR_ADDRESS (index);
9093 else
bbf6f052 9094#endif
b93a436e
JL
9095 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9096 temp = gen_reg_rtx (CASE_VECTOR_MODE);
542a8afa 9097 vector = gen_const_mem (CASE_VECTOR_MODE, index);
b93a436e
JL
9098 convert_move (temp, vector, 0);
9099
9100 emit_jump_insn (gen_tablejump (temp, table_label));
9101
9102 /* If we are generating PIC code or if the table is PC-relative, the
9103 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9104 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9105 emit_barrier ();
bbf6f052 9106}
b93a436e 9107
ad82abb8 9108int
502b8322
AJ
9109try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9110 rtx table_label, rtx default_label)
ad82abb8
ZW
9111{
9112 rtx index;
9113
9114 if (! HAVE_tablejump)
9115 return 0;
9116
4845b383
KH
9117 index_expr = fold_build2 (MINUS_EXPR, index_type,
9118 convert (index_type, index_expr),
9119 convert (index_type, minval));
ad82abb8 9120 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
ad82abb8
ZW
9121 do_pending_stack_adjust ();
9122
9123 do_tablejump (index, TYPE_MODE (index_type),
9124 convert_modes (TYPE_MODE (index_type),
9125 TYPE_MODE (TREE_TYPE (range)),
9126 expand_expr (range, NULL_RTX,
9127 VOIDmode, 0),
8df83eae 9128 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
9129 table_label, default_label);
9130 return 1;
9131}
e2500fed 9132
cb2a532e
AH
9133/* Nonzero if the mode is a valid vector mode for this architecture.
9134 This returns nonzero even if there is no hardware support for the
9135 vector mode, but we can emulate with narrower modes. */
9136
9137int
502b8322 9138vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9139{
9140 enum mode_class class = GET_MODE_CLASS (mode);
9141 enum machine_mode innermode;
9142
9143 /* Doh! What's going on? */
9144 if (class != MODE_VECTOR_INT
9145 && class != MODE_VECTOR_FLOAT)
9146 return 0;
9147
9148 /* Hardware support. Woo hoo! */
f676971a 9149 if (targetm.vector_mode_supported_p (mode))
cb2a532e
AH
9150 return 1;
9151
9152 innermode = GET_MODE_INNER (mode);
9153
9154 /* We should probably return 1 if requesting V4DI and we have no DI,
9155 but we have V2DI, but this is probably very unlikely. */
9156
9157 /* If we have support for the inner mode, we can safely emulate it.
9158 We may not have V2DI, but me can emulate with a pair of DIs. */
6dd53648 9159 return targetm.scalar_mode_supported_p (innermode);
cb2a532e
AH
9160}
9161
d744e06e
AH
9162/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9163static rtx
502b8322 9164const_vector_from_tree (tree exp)
d744e06e
AH
9165{
9166 rtvec v;
9167 int units, i;
9168 tree link, elt;
9169 enum machine_mode inner, mode;
9170
9171 mode = TYPE_MODE (TREE_TYPE (exp));
9172
6de9cd9a 9173 if (initializer_zerop (exp))
d744e06e
AH
9174 return CONST0_RTX (mode);
9175
9176 units = GET_MODE_NUNITS (mode);
9177 inner = GET_MODE_INNER (mode);
9178
9179 v = rtvec_alloc (units);
9180
9181 link = TREE_VECTOR_CST_ELTS (exp);
9182 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9183 {
9184 elt = TREE_VALUE (link);
9185
9186 if (TREE_CODE (elt) == REAL_CST)
9187 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9188 inner);
9189 else
9190 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9191 TREE_INT_CST_HIGH (elt),
9192 inner);
9193 }
9194
5f6c070d
AH
9195 /* Initialize remaining elements to 0. */
9196 for (; i < units; ++i)
9197 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9198
a73b091d 9199 return gen_rtx_CONST_VECTOR (mode, v);
d744e06e 9200}
e2500fed 9201#include "gt-expr.h"
This page took 4.78144 seconds and 5 git commands to generate.