]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
norun.lst: cdd2a03 now passes.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
ef7befe0
BE
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
bbf6f052 5
1322177d 6This file is part of GCC.
bbf6f052 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
bbf6f052 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
bbf6f052
RK
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
bbf6f052 22
bbf6f052 23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
ca695ac9 27#include "machmode.h"
11ad4784 28#include "real.h"
bbf6f052
RK
29#include "rtl.h"
30#include "tree.h"
31#include "flags.h"
bf76bb5a 32#include "regs.h"
4ed67205 33#include "hard-reg-set.h"
3d195391 34#include "except.h"
bbf6f052 35#include "function.h"
bbf6f052 36#include "insn-config.h"
34e81b5a 37#include "insn-attr.h"
3a94c984 38/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 39#include "expr.h"
e78d8e51
ZW
40#include "optabs.h"
41#include "libfuncs.h"
bbf6f052 42#include "recog.h"
3ef1eef4 43#include "reload.h"
bbf6f052 44#include "output.h"
bbf6f052 45#include "typeclass.h"
10f0ad3d 46#include "toplev.h"
d7db6646 47#include "ggc.h"
ac79cd5a 48#include "langhooks.h"
e2c49ac2 49#include "intl.h"
b1474bb7 50#include "tm_p.h"
6de9cd9a 51#include "tree-iterator.h"
2f8e398b
PB
52#include "tree-pass.h"
53#include "tree-flow.h"
c988af2b 54#include "target.h"
2f8e398b 55#include "timevar.h"
bbf6f052 56
bbf6f052 57/* Decide whether a function's arguments should be processed
bbc8a071
RK
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
bbf6f052 62
bbf6f052 63#ifdef PUSH_ROUNDING
bbc8a071 64
2da4124d 65#ifndef PUSH_ARGS_REVERSED
3319a347 66#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 67#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 68#endif
2da4124d 69#endif
bbc8a071 70
bbf6f052
RK
71#endif
72
73#ifndef STACK_PUSH_CODE
74#ifdef STACK_GROWS_DOWNWARD
75#define STACK_PUSH_CODE PRE_DEC
76#else
77#define STACK_PUSH_CODE PRE_INC
78#endif
79#endif
80
4ca79136 81
bbf6f052
RK
82/* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88int cse_not_expected;
89
4969d05d
RK
90/* This structure is used by move_by_pieces to describe the move to
91 be performed. */
4969d05d
RK
92struct move_by_pieces
93{
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
3bdf5ad1
RK
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
4969d05d
RK
104 int reverse;
105};
106
57814e5e 107/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
108 be performed. */
109
57814e5e 110struct store_by_pieces
9de08200
RK
111{
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
3bdf5ad1
RK
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
502b8322 118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 119 void *constfundata;
9de08200
RK
120 int reverse;
121};
122
502b8322 123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
45d78e7f 124 unsigned int,
502b8322
AJ
125 unsigned int);
126static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128static bool block_move_libcall_safe_for_call_parm (void);
70128ad9 129static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
8148fe65 130static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
502b8322
AJ
131static tree emit_block_move_libcall_fn (int);
132static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
8148fe65 138static rtx clear_storage_via_libcall (rtx, rtx, bool);
502b8322
AJ
139static tree clear_storage_libcall_fn (int);
140static rtx compress_float_constant (rtx, rtx);
141static rtx get_subtarget (rtx);
502b8322
AJ
142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
f45bdcd0 147 tree, tree, int);
502b8322 148
d50a16c4 149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
150
151static int is_aligning_offset (tree, tree);
eb698c58
RS
152static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
bc15d0ef 154static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
502b8322 155static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 156#ifdef PUSH_ROUNDING
502b8322 157static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 158#endif
502b8322
AJ
159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160static rtx const_vector_from_tree (tree);
57aaef66 161static void write_complex_part (rtx, rtx, bool);
bbf6f052 162
4fa52007
RK
163/* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167static char direct_load[NUM_MACHINE_MODES];
168static char direct_store[NUM_MACHINE_MODES];
169
51286de6
RH
170/* Record for each mode whether we can float-extend from memory. */
171
172static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
fbe1758d 174/* This macro is used to determine whether move_by_pieces should be called
3a94c984 175 to perform a structure copy. */
fbe1758d 176#ifndef MOVE_BY_PIECES_P
19caa751 177#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
fbe1758d
AM
180#endif
181
78762e3b
RS
182/* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184#ifndef CLEAR_BY_PIECES_P
185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
78762e3b
RS
188#endif
189
4977bab6
ZW
190/* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193#ifndef STORE_BY_PIECES_P
45d78e7f
JJ
194#define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
4977bab6
ZW
197#endif
198
266007a7 199/* This array records the insn_code of insns to perform block moves. */
70128ad9 200enum insn_code movmem_optab[NUM_MACHINE_MODES];
266007a7 201
57e84f18
AS
202/* This array records the insn_code of insns to perform block sets. */
203enum insn_code setmem_optab[NUM_MACHINE_MODES];
9de08200 204
40c1d5f8 205/* These arrays record the insn_code of three different kinds of insns
118355a0
ZW
206 to perform block compares. */
207enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
40c1d5f8 208enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
118355a0
ZW
209enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
48ae6c13
RH
211/* Synchronization primitives. */
212enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
cc2902df 235/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
236
237#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 238#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 239#endif
bbf6f052 240\f
4fa52007 241/* This is run once per compilation to set up which modes can be used
266007a7 242 directly in memory and to initialize the block move optab. */
4fa52007
RK
243
244void
502b8322 245init_expr_once (void)
4fa52007
RK
246{
247 rtx insn, pat;
248 enum machine_mode mode;
cff48d8f 249 int num_clobbers;
9ec36da5 250 rtx mem, mem1;
bf1660a6 251 rtx reg;
9ec36da5 252
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
9ec36da5
JL
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 258
bf1660a6
JL
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
262
1f8c3c5b
RH
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
4fa52007
RK
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
4fa52007
RK
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
e2549997 274 PUT_MODE (mem1, mode);
bf1660a6 275 PUT_MODE (reg, mode);
4fa52007 276
e6fe56a4
RK
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
7308a047
RS
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
e6fe56a4 287
bf1660a6 288 REGNO (reg) = regno;
e6fe56a4 289
7308a047
RS
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
e6fe56a4 294
e2549997
RS
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
7308a047
RS
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
e2549997
RS
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
7308a047 309 }
4fa52007
RK
310 }
311
51286de6
RH
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
316 {
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 319 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
320 {
321 enum insn_code ic;
322
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
326
327 PUT_MODE (mem, srcmode);
0fb7aeda 328
51286de6
RH
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
331 }
332 }
4fa52007 333}
cff48d8f 334
bbf6f052
RK
335/* This is run at the start of compiling a function. */
336
337void
502b8322 338init_expr (void)
bbf6f052 339{
3a70d621 340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052 341}
bbf6f052
RK
342\f
343/* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
347
348void
502b8322 349convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
350{
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
3d8bf70f
BE
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
bbf6f052
RK
355 enum insn_code code;
356 rtx libcall;
357
358 /* rtx code for making an equivalent value. */
37d0b254
SE
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052 361
bbf6f052 362
5b0264cb 363 gcc_assert (to_real == from_real);
bbf6f052 364
6de9cd9a
DN
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
1499e0a8
RK
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
5b0264cb 380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
1499e0a8 381
bbf6f052
RK
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
0b4565c9
BS
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
5b0264cb 391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
3a94c984 392
0b4565c9 393 if (VECTOR_MODE_P (to_mode))
bafe341a 394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 395 else
bafe341a 396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
397
398 emit_move_insn (to, from);
399 return;
400 }
401
06765df1
R
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
bbf6f052
RK
409 if (to_real)
410 {
642dfa8b 411 rtx value, insns;
85363ca0 412 convert_optab tab;
81d79e2c 413
15ed7b52
JG
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
5b0264cb 418
15ed7b52
JG
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 423 tab = sext_optab;
85363ca0 424 else
5b0264cb 425 tab = trunc_optab;
2b01c326 426
85363ca0 427 /* Try converting directly if the insn is supported. */
2b01c326 428
85363ca0
ZW
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
b092b471 431 {
85363ca0
ZW
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
434 return;
435 }
b092b471 436
85363ca0
ZW
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 439
5b0264cb
NS
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
bbf6f052 442
642dfa8b 443 start_sequence ();
ebb1b59a 444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 445 1, from, from_mode);
642dfa8b
BS
446 insns = get_insns ();
447 end_sequence ();
450b1728
EC
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
452 return;
453 }
454
85363ca0
ZW
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 {
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462
5b0264cb
NS
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
85363ca0
ZW
465
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 to, from, UNKNOWN);
470 return;
471 }
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 {
d2348bd5 474 rtx new_from;
85363ca0
ZW
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477
5b0264cb
NS
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
85363ca0 480
85363ca0 481 if (to_mode == full_mode)
d2348bd5
DD
482 {
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 to, from, UNKNOWN);
485 return;
486 }
487
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
85363ca0 491
a1105617 492 /* else proceed to integer conversions below. */
85363ca0 493 from_mode = full_mode;
d2348bd5 494 from = new_from;
85363ca0
ZW
495 }
496
bbf6f052
RK
497 /* Now both modes are integers. */
498
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 {
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
514 {
cd1b4b44
RK
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
bbf6f052
RK
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
523 }
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
528 {
f8cfc6aa 529 if (REG_P (to))
6a2d136b
EB
530 {
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 }
bbf6f052
RK
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
539 }
540
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
543
5c5033c3
RK
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
546
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
549
bbf6f052
RK
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
555
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
560
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
565 {
566#ifdef HAVE_slt
567 if (HAVE_slt
a995e389 568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
569 && STORE_FLAG_VALUE == -1)
570 {
906c4e36 571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 572 lowpart_mode, 0);
bbf6f052
RK
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
575 }
576 else
577#endif
578 {
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 582 NULL_RTX, 0);
bbf6f052
RK
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
584 }
585 }
586
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 {
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
592
5b0264cb 593 gcc_assert (subword);
bbf6f052
RK
594
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
597 }
598
599 insns = get_insns ();
600 end_sequence ();
601
906c4e36 602 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
604 return;
605 }
606
d3c64ee3
RS
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 610 {
3c0cb5de 611 if (!((MEM_P (from)
431a6eca
JW
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 615 || REG_P (from)
431a6eca
JW
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
bbf6f052
RK
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
620 }
621
bbf6f052
RK
622 /* Now follow all the conversions between integers
623 no more than a word long. */
624
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 628 GET_MODE_BITSIZE (from_mode)))
bbf6f052 629 {
3c0cb5de 630 if (!((MEM_P (from)
d3c64ee3
RS
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 634 || REG_P (from)
d3c64ee3
RS
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
f8cfc6aa 637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
34aa3599
RK
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
bbf6f052
RK
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
642 }
643
d3c64ee3 644 /* Handle extension. */
bbf6f052
RK
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 {
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
650 {
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
653 }
654 else
655 {
656 enum machine_mode intermediate;
2b28d92e
NC
657 rtx tmp;
658 tree shift_amount;
bbf6f052
RK
659
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
670 {
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
674 }
675
2b28d92e 676 /* No suitable intermediate mode.
3a94c984 677 Generate what we need with shifts. */
4a90aeeb
NS
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
7d60be94 680 - GET_MODE_BITSIZE (from_mode));
2b28d92e
NC
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
3a94c984 684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
bbf6f052
RK
689 }
690 }
691
3a94c984 692 /* Support special truncate insns for certain modes. */
85363ca0 693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 694 {
85363ca0
ZW
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 to, from, UNKNOWN);
b9bcad65
RK
697 return;
698 }
699
bbf6f052
RK
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
85363ca0
ZW
702 and for which there was no special instruction.
703
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 {
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
712 }
713
714 /* Mode combination is not recognized. */
5b0264cb 715 gcc_unreachable ();
bbf6f052
RK
716}
717
718/* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
ad76cef8 723 or by copying to a new temporary with conversion. */
bbf6f052
RK
724
725rtx
502b8322 726convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
727{
728 return convert_modes (mode, VOIDmode, x, unsignedp);
729}
730
731/* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
735
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
738
ad76cef8 739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
5ffe63ed
RS
740
741rtx
502b8322 742convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 743{
b3694847 744 rtx temp;
5ffe63ed 745
1499e0a8
RK
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
748
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
bbf6f052 753
64791b18
RK
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
3a94c984 756
5ffe63ed 757 if (mode == oldmode)
bbf6f052
RK
758 return x;
759
760 /* There is one case that we must handle specially: If we are converting
906c4e36 761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
765
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
769 {
770 HOST_WIDE_INT val = INTVAL (x);
771
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 {
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 }
780
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 }
bbf6f052
RK
783
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 788
ba2e110c
RK
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 791 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 792 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 793 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
3c0cb5de 795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
d57c66da 796 && direct_load[(int) mode])
f8cfc6aa 797 || (REG_P (x)
006c9f4a
SE
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
802 {
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 {
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
811
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
818
2496c7bd 819 return gen_int_mode (val, mode);
ba2e110c
RK
820 }
821
822 return gen_lowpart (mode, x);
823 }
bbf6f052 824
ebe75517
JH
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 {
5b0264cb 829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
ebe75517
JH
830 return simplify_gen_subreg (mode, x, oldmode, 0);
831 }
832
bbf6f052
RK
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
836}
837\f
cf5124f6
RS
838/* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
842
843#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844
8fd3cf4e
JJ
845/* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
848
849int
502b8322
AJ
850can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
852{
853 return MOVE_BY_PIECES_P (len, align);
854}
855
21d93687 856/* Generate several move instructions to copy LEN bytes from block FROM to
ad76cef8 857 block TO. (These are MEM rtx's with BLKmode).
566aa174 858
21d93687
RK
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
566aa174 861
8fd3cf4e 862 ALIGN is maximum stack alignment we can assume.
bbf6f052 863
8fd3cf4e
JJ
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
867
868rtx
502b8322
AJ
869move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
bbf6f052
RK
871{
872 struct move_by_pieces data;
566aa174 873 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 874 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
bbf6f052 877
f26aca6d
DD
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879
bbf6f052 880 data.offset = 0;
bbf6f052 881 data.from_addr = from_addr;
566aa174
JH
882 if (to)
883 {
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 }
892 else
893 {
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897#ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899#else
900 data.reverse = 0;
901#endif
902 }
903 data.to_addr = to_addr;
bbf6f052 904 data.from = from;
bbf6f052
RK
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
bbf6f052
RK
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
45d78e7f 919 && move_by_pieces_ninsns (len, align, max_size) > 2)
bbf6f052 920 {
3a94c984 921 /* Find the mode of the largest move... */
fbe1758d
AM
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
928 {
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
932 }
fbe1758d 933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
934 {
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
938 }
bbf6f052
RK
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
942 {
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
946 }
fbe1758d 947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
948 {
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
952 }
bbf6f052
RK
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
955 }
956
f64d6991
DE
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
961 {
962 enum machine_mode xmode;
963
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
970
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 }
bbf6f052
RK
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1)
978 {
e7c33f54
RK
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
982 mode = tmode;
983
984 if (mode == VOIDmode)
985 break;
986
987 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990
991 max_size = GET_MODE_SIZE (mode);
992 }
993
994 /* The code above should have handled everything. */
5b0264cb 995 gcc_assert (!data.len);
8fd3cf4e
JJ
996
997 if (endp)
998 {
999 rtx to1;
1000
5b0264cb 1001 gcc_assert (!data.reverse);
8fd3cf4e
JJ
1002 if (data.autinc_to)
1003 {
1004 if (endp == 2)
1005 {
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1011 }
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1014 }
1015 else
1016 {
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1020 }
1021 return to1;
1022 }
1023 else
1024 return data.to;
bbf6f052
RK
1025}
1026
1027/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1028 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1029
3bdf5ad1 1030static unsigned HOST_WIDE_INT
45d78e7f
JJ
1031move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
bbf6f052 1033{
3bdf5ad1 1034 unsigned HOST_WIDE_INT n_insns = 0;
f64d6991 1035 enum machine_mode tmode;
bbf6f052 1036
f64d6991
DE
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1041 {
1042 enum machine_mode tmode, xmode;
1043
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1050
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 }
bbf6f052
RK
1053
1054 while (max_size > 1)
1055 {
f64d6991 1056 enum machine_mode mode = VOIDmode;
bbf6f052
RK
1057 enum insn_code icode;
1058
e7c33f54
RK
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1062 mode = tmode;
1063
1064 if (mode == VOIDmode)
1065 break;
1066
1067 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070
1071 max_size = GET_MODE_SIZE (mode);
1072 }
1073
5b0264cb 1074 gcc_assert (!l);
bbf6f052
RK
1075 return n_insns;
1076}
1077
1078/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1081
1082static void
502b8322
AJ
1083move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
bbf6f052 1085{
3bdf5ad1 1086 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1087 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1088
1089 while (data->len >= size)
1090 {
3bdf5ad1
RK
1091 if (data->reverse)
1092 data->offset -= size;
1093
566aa174 1094 if (data->to)
3bdf5ad1 1095 {
566aa174 1096 if (data->autinc_to)
630036c6
JJ
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
566aa174 1099 else
f4ef873c 1100 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1101 }
3bdf5ad1
RK
1102
1103 if (data->autinc_from)
630036c6
JJ
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
3bdf5ad1 1106 else
f4ef873c 1107 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1108
940da324 1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1115
566aa174
JH
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
21d93687
RK
1119 {
1120#ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122#else
5b0264cb 1123 gcc_unreachable ();
21d93687
RK
1124#endif
1125 }
3bdf5ad1 1126
940da324 1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1131
3bdf5ad1
RK
1132 if (! data->reverse)
1133 data->offset += size;
bbf6f052
RK
1134
1135 data->len -= size;
1136 }
1137}
1138\f
4ca79136
RH
1139/* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
bbf6f052 1142
4ca79136 1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1144 SIZE is an rtx that says how long they are.
19caa751 1145 ALIGN is the maximum alignment we can assume they have.
44bb111a 1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1147
e9a25f70
JL
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1150
1151rtx
502b8322 1152emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1153{
44bb111a 1154 bool may_use_call;
e9a25f70 1155 rtx retval = 0;
44bb111a
RH
1156 unsigned int align;
1157
1158 switch (method)
1159 {
1160 case BLOCK_OP_NORMAL:
8148fe65 1161 case BLOCK_OP_TAILCALL:
44bb111a
RH
1162 may_use_call = true;
1163 break;
1164
1165 case BLOCK_OP_CALL_PARM:
1166 may_use_call = block_move_libcall_safe_for_call_parm ();
1167
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1170 NO_DEFER_POP;
1171 break;
1172
1173 case BLOCK_OP_NO_LIBCALL:
1174 may_use_call = false;
1175 break;
1176
1177 default:
5b0264cb 1178 gcc_unreachable ();
44bb111a
RH
1179 }
1180
1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1182
5b0264cb
NS
1183 gcc_assert (MEM_P (x));
1184 gcc_assert (MEM_P (y));
1185 gcc_assert (size);
bbf6f052 1186
82c82743
RH
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x = adjust_address (x, BLKmode, 0);
1190 y = adjust_address (y, BLKmode, 0);
1191
cb38fd88
RH
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size) == CONST_INT)
1195 {
6972c506
JJ
1196 if (INTVAL (size) == 0)
1197 return 0;
1198
cb38fd88
RH
1199 x = shallow_copy_rtx (x);
1200 y = shallow_copy_rtx (y);
1201 set_mem_size (x, size);
1202 set_mem_size (y, size);
1203 }
1204
fbe1758d 1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1206 move_by_pieces (x, y, INTVAL (size), align, 0);
70128ad9 1207 else if (emit_block_move_via_movmem (x, y, size, align))
4ca79136 1208 ;
44bb111a 1209 else if (may_use_call)
8148fe65
JJ
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
44bb111a
RH
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1214
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
266007a7 1217
4ca79136
RH
1218 return retval;
1219}
266007a7 1220
502b8322 1221/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1224
1225static bool
502b8322 1226block_move_libcall_safe_for_call_parm (void)
44bb111a 1227{
a357a6d4 1228 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1229 if (PUSH_ARGS)
1230 return true;
44bb111a 1231
450b1728 1232 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1233 an outgoing argument. */
1234#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 {
1236 tree fn = emit_block_move_libcall_fn (false);
1237 (void) fn;
1238 if (REG_PARM_STACK_SPACE (fn) != 0)
1239 return false;
1240 }
44bb111a 1241#endif
44bb111a 1242
a357a6d4
GK
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1245 {
1246 CUMULATIVE_ARGS args_so_far;
1247 tree fn, arg;
450b1728 1248
a357a6d4 1249 fn = emit_block_move_libcall_fn (false);
0f6937fe 1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1251
a357a6d4
GK
1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 {
1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 if (!tmp || !REG_P (tmp))
44bb111a 1258 return false;
78a52f11 1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
a357a6d4 1260 return false;
a357a6d4
GK
1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1262 }
1263 }
1264 return true;
44bb111a
RH
1265}
1266
70128ad9 1267/* A subroutine of emit_block_move. Expand a movmem pattern;
4ca79136 1268 return true if successful. */
3ef1eef4 1269
4ca79136 1270static bool
70128ad9 1271emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1272{
4ca79136 1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1274 int save_volatile_ok = volatile_ok;
4ca79136 1275 enum machine_mode mode;
266007a7 1276
4ca79136
RH
1277 /* Since this is a move insn, we don't care about volatility. */
1278 volatile_ok = 1;
1279
ee960939
OH
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1283
4ca79136
RH
1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285 mode = GET_MODE_WIDER_MODE (mode))
1286 {
70128ad9 1287 enum insn_code code = movmem_optab[(int) mode];
4ca79136
RH
1288 insn_operand_predicate_fn pred;
1289
1290 if (code != CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 <= (GET_MODE_MASK (mode) >> 1)))
1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 || (*pred) (x, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 || (*pred) (y, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 || (*pred) (opalign, VOIDmode)))
1305 {
1306 rtx op2;
1307 rtx last = get_last_insn ();
1308 rtx pat;
1309
1310 op2 = convert_to_mode (mode, size, 1);
1311 pred = insn_data[(int) code].operand[2].predicate;
1312 if (pred != 0 && ! (*pred) (op2, mode))
1313 op2 = copy_to_mode_reg (mode, op2);
1314
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1319
1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 if (pat)
1322 {
1323 emit_insn (pat);
a5e9c810 1324 volatile_ok = save_volatile_ok;
4ca79136 1325 return true;
bbf6f052 1326 }
4ca79136
RH
1327 else
1328 delete_insns_since (last);
bbf6f052 1329 }
4ca79136 1330 }
bbf6f052 1331
a5e9c810 1332 volatile_ok = save_volatile_ok;
4ca79136
RH
1333 return false;
1334}
3ef1eef4 1335
8f99553f 1336/* A subroutine of emit_block_move. Expand a call to memcpy.
4ca79136 1337 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1338
4ca79136 1339static rtx
8148fe65 1340emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
4ca79136 1341{
ee960939 1342 rtx dst_addr, src_addr;
4ca79136
RH
1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344 enum machine_mode size_mode;
1345 rtx retval;
4bc973ae 1346
ad76cef8
PB
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
ee960939
OH
1350
1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1353
ee960939
OH
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1356
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136 1359
8f99553f 1360 size_mode = TYPE_MODE (sizetype);
ee960939 1361
4ca79136
RH
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1364
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
8f99553f 1369 for returning pointers, we could end up generating incorrect code. */
4ca79136 1370
8f99553f 1371 size_tree = make_tree (sizetype, size);
4ca79136
RH
1372
1373 fn = emit_block_move_libcall_fn (true);
1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f
JM
1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
4ca79136
RH
1377
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 call_expr, arg_list, NULL_TREE);
8148fe65 1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136 1383
84217346 1384 retval = expand_normal (call_expr);
4ca79136 1385
8f99553f 1386 return retval;
4ca79136 1387}
52cf7115 1388
4ca79136
RH
1389/* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
52cf7115 1392
4ca79136
RH
1393static GTY(()) tree block_move_fn;
1394
9661b15f 1395void
502b8322 1396init_block_move_fn (const char *asmspec)
4ca79136 1397{
9661b15f 1398 if (!block_move_fn)
4ca79136 1399 {
8fd3cf4e 1400 tree args, fn;
9661b15f 1401
8f99553f
JM
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
52cf7115 1406
4ca79136
RH
1407 fn = build_decl (FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
66c60e67 1412
4ca79136 1413 block_move_fn = fn;
bbf6f052 1414 }
e9a25f70 1415
9661b15f 1416 if (asmspec)
0e6df31e 1417 set_user_assembler_name (block_move_fn, asmspec);
9661b15f
JJ
1418}
1419
1420static tree
502b8322 1421emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1422{
1423 static bool emitted_extern;
1424
1425 if (!block_move_fn)
1426 init_block_move_fn (NULL);
1427
4ca79136
RH
1428 if (for_call && !emitted_extern)
1429 {
1430 emitted_extern = true;
0e6df31e 1431 make_decl_rtl (block_move_fn);
9661b15f 1432 assemble_external (block_move_fn);
4ca79136
RH
1433 }
1434
9661b15f 1435 return block_move_fn;
bbf6f052 1436}
44bb111a
RH
1437
1438/* A subroutine of emit_block_move. Copy the data via an explicit
1439 loop. This is used only when libcalls are forbidden. */
1440/* ??? It'd be nice to copy in hunks larger than QImode. */
1441
1442static void
502b8322
AJ
1443emit_block_move_via_loop (rtx x, rtx y, rtx size,
1444 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1445{
1446 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1447 enum machine_mode iter_mode;
1448
1449 iter_mode = GET_MODE (size);
1450 if (iter_mode == VOIDmode)
1451 iter_mode = word_mode;
1452
1453 top_label = gen_label_rtx ();
1454 cmp_label = gen_label_rtx ();
1455 iter = gen_reg_rtx (iter_mode);
1456
1457 emit_move_insn (iter, const0_rtx);
1458
1459 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1460 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1461 do_pending_stack_adjust ();
1462
44bb111a
RH
1463 emit_jump (cmp_label);
1464 emit_label (top_label);
1465
1466 tmp = convert_modes (Pmode, iter_mode, iter, true);
1467 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1468 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1469 x = change_address (x, QImode, x_addr);
1470 y = change_address (y, QImode, y_addr);
1471
1472 emit_move_insn (x, y);
1473
1474 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1475 true, OPTAB_LIB_WIDEN);
1476 if (tmp != iter)
1477 emit_move_insn (iter, tmp);
1478
44bb111a
RH
1479 emit_label (cmp_label);
1480
1481 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1482 true, top_label);
44bb111a 1483}
bbf6f052
RK
1484\f
1485/* Copy all or part of a value X into registers starting at REGNO.
1486 The number of registers to be filled is NREGS. */
1487
1488void
502b8322 1489move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1490{
1491 int i;
381127e8 1492#ifdef HAVE_load_multiple
3a94c984 1493 rtx pat;
381127e8
RL
1494 rtx last;
1495#endif
bbf6f052 1496
72bb9717
RK
1497 if (nregs == 0)
1498 return;
1499
bbf6f052
RK
1500 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1501 x = validize_mem (force_const_mem (mode, x));
1502
1503 /* See if the machine can do this with a load multiple insn. */
1504#ifdef HAVE_load_multiple
c3a02afe 1505 if (HAVE_load_multiple)
bbf6f052 1506 {
c3a02afe 1507 last = get_last_insn ();
38a448ca 1508 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1509 GEN_INT (nregs));
1510 if (pat)
1511 {
1512 emit_insn (pat);
1513 return;
1514 }
1515 else
1516 delete_insns_since (last);
bbf6f052 1517 }
bbf6f052
RK
1518#endif
1519
1520 for (i = 0; i < nregs; i++)
38a448ca 1521 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1522 operand_subword_force (x, i, mode));
1523}
1524
1525/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1526 The number of registers to be filled is NREGS. */
0040593d 1527
bbf6f052 1528void
502b8322 1529move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1530{
1531 int i;
bbf6f052 1532
2954d7db
RK
1533 if (nregs == 0)
1534 return;
1535
bbf6f052
RK
1536 /* See if the machine can do this with a store multiple insn. */
1537#ifdef HAVE_store_multiple
c3a02afe 1538 if (HAVE_store_multiple)
bbf6f052 1539 {
c6b97fac
AM
1540 rtx last = get_last_insn ();
1541 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1542 GEN_INT (nregs));
c3a02afe
RK
1543 if (pat)
1544 {
1545 emit_insn (pat);
1546 return;
1547 }
1548 else
1549 delete_insns_since (last);
bbf6f052 1550 }
bbf6f052
RK
1551#endif
1552
1553 for (i = 0; i < nregs; i++)
1554 {
1555 rtx tem = operand_subword (x, i, 1, BLKmode);
1556
5b0264cb 1557 gcc_assert (tem);
bbf6f052 1558
38a448ca 1559 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1560 }
1561}
1562
084a1106
JDA
1563/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1564 ORIG, where ORIG is a non-consecutive group of registers represented by
1565 a PARALLEL. The clone is identical to the original except in that the
1566 original set of registers is replaced by a new set of pseudo registers.
1567 The new set has the same modes as the original set. */
1568
1569rtx
502b8322 1570gen_group_rtx (rtx orig)
084a1106
JDA
1571{
1572 int i, length;
1573 rtx *tmps;
1574
5b0264cb 1575 gcc_assert (GET_CODE (orig) == PARALLEL);
084a1106
JDA
1576
1577 length = XVECLEN (orig, 0);
703ad42b 1578 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1579
1580 /* Skip a NULL entry in first slot. */
1581 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1582
1583 if (i)
1584 tmps[0] = 0;
1585
1586 for (; i < length; i++)
1587 {
1588 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1589 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1590
1591 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1592 }
1593
1594 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1595}
1596
27e29549
RH
1597/* A subroutine of emit_group_load. Arguments as for emit_group_load,
1598 except that values are placed in TMPS[i], and must later be moved
daa956d0 1599 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
fffa9c1d 1600
27e29549
RH
1601static void
1602emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
fffa9c1d 1603{
27e29549 1604 rtx src;
aac5cc16 1605 int start, i;
7ef7000b 1606 enum machine_mode m = GET_MODE (orig_src);
fffa9c1d 1607
5b0264cb 1608 gcc_assert (GET_CODE (dst) == PARALLEL);
fffa9c1d 1609
f2978871
AM
1610 if (m != VOIDmode
1611 && !SCALAR_INT_MODE_P (m)
1612 && !MEM_P (orig_src)
1613 && GET_CODE (orig_src) != CONCAT)
782fa603
AH
1614 {
1615 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1616 if (imode == BLKmode)
1617 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1618 else
1619 src = gen_reg_rtx (imode);
1620 if (imode != BLKmode)
1621 src = gen_lowpart (GET_MODE (orig_src), src);
1622 emit_move_insn (src, orig_src);
1623 /* ...and back again. */
1624 if (imode != BLKmode)
1625 src = gen_lowpart (imode, src);
27e29549 1626 emit_group_load_1 (tmps, dst, src, type, ssize);
782fa603
AH
1627 return;
1628 }
1629
fffa9c1d
JW
1630 /* Check for a NULL entry, used to indicate that the parameter goes
1631 both on the stack and in registers. */
aac5cc16
RH
1632 if (XEXP (XVECEXP (dst, 0, 0), 0))
1633 start = 0;
fffa9c1d 1634 else
aac5cc16
RH
1635 start = 1;
1636
aac5cc16
RH
1637 /* Process the pieces. */
1638 for (i = start; i < XVECLEN (dst, 0); i++)
1639 {
1640 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1641 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1642 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1643 int shift = 0;
1644
1645 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1646 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1647 {
6e985040
AM
1648 /* Arrange to shift the fragment to where it belongs.
1649 extract_bit_field loads to the lsb of the reg. */
1650 if (
1651#ifdef BLOCK_REG_PADDING
1652 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1653 == (BYTES_BIG_ENDIAN ? upward : downward)
1654#else
1655 BYTES_BIG_ENDIAN
1656#endif
1657 )
1658 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16 1659 bytelen = ssize - bytepos;
5b0264cb 1660 gcc_assert (bytelen > 0);
aac5cc16
RH
1661 }
1662
f3ce87a9
DE
1663 /* If we won't be loading directly from memory, protect the real source
1664 from strange tricks we might play; but make sure that the source can
1665 be loaded directly into the destination. */
1666 src = orig_src;
3c0cb5de 1667 if (!MEM_P (orig_src)
f3ce87a9
DE
1668 && (!CONSTANT_P (orig_src)
1669 || (GET_MODE (orig_src) != mode
1670 && GET_MODE (orig_src) != VOIDmode)))
1671 {
1672 if (GET_MODE (orig_src) == VOIDmode)
1673 src = gen_reg_rtx (mode);
1674 else
1675 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1676
f3ce87a9
DE
1677 emit_move_insn (src, orig_src);
1678 }
1679
aac5cc16 1680 /* Optimize the access just a bit. */
3c0cb5de 1681 if (MEM_P (src)
6e985040
AM
1682 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1683 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1684 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1685 && bytelen == GET_MODE_SIZE (mode))
1686 {
1687 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1688 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1689 }
d20b1190
EB
1690 else if (COMPLEX_MODE_P (mode)
1691 && GET_MODE (src) == mode
1692 && bytelen == GET_MODE_SIZE (mode))
1693 /* Let emit_move_complex do the bulk of the work. */
1694 tmps[i] = src;
7c4a6db0
JW
1695 else if (GET_CODE (src) == CONCAT)
1696 {
015b1ad1
JDA
1697 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1698 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1699
1700 if ((bytepos == 0 && bytelen == slen0)
1701 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1702 {
015b1ad1
JDA
1703 /* The following assumes that the concatenated objects all
1704 have the same size. In this case, a simple calculation
1705 can be used to determine the object and the bit field
1706 to be extracted. */
1707 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744 1708 if (! CONSTANT_P (tmps[i])
f8cfc6aa 1709 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
cbb92744 1710 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1 1711 (bytepos % slen0) * BITS_PER_UNIT,
b3520980 1712 1, NULL_RTX, mode, mode);
cbb92744 1713 }
5b0264cb 1714 else
58f69841 1715 {
5b0264cb 1716 rtx mem;
f58c00e3 1717
5b0264cb
NS
1718 gcc_assert (!bytepos);
1719 mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1720 emit_move_insn (mem, src);
f58c00e3
EB
1721 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1722 0, 1, NULL_RTX, mode, mode);
58f69841 1723 }
7c4a6db0 1724 }
9c0631a7
AH
1725 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1726 SIMD register, which is currently broken. While we get GCC
1727 to emit proper RTL for these cases, let's dump to memory. */
1728 else if (VECTOR_MODE_P (GET_MODE (dst))
f8cfc6aa 1729 && REG_P (src))
9c0631a7
AH
1730 {
1731 int slen = GET_MODE_SIZE (GET_MODE (src));
1732 rtx mem;
1733
1734 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1735 emit_move_insn (mem, src);
1736 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1737 }
d3a16cbd
FJ
1738 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1739 && XVECLEN (dst, 0) > 1)
1740 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1741 else if (CONSTANT_P (src)
f8cfc6aa 1742 || (REG_P (src) && GET_MODE (src) == mode))
2ee5437b 1743 tmps[i] = src;
fffa9c1d 1744 else
19caa751
RK
1745 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1746 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
b3520980 1747 mode, mode);
fffa9c1d 1748
6e985040 1749 if (shift)
09b52670 1750 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
7d60be94 1751 build_int_cst (NULL_TREE, shift), tmps[i], 0);
fffa9c1d 1752 }
27e29549
RH
1753}
1754
1755/* Emit code to move a block SRC of type TYPE to a block DST,
1756 where DST is non-consecutive registers represented by a PARALLEL.
1757 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1758 if not known. */
1759
1760void
1761emit_group_load (rtx dst, rtx src, tree type, int ssize)
1762{
1763 rtx *tmps;
1764 int i;
1765
1766 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1767 emit_group_load_1 (tmps, dst, src, type, ssize);
19caa751 1768
aac5cc16 1769 /* Copy the extracted pieces into the proper (probable) hard regs. */
27e29549
RH
1770 for (i = 0; i < XVECLEN (dst, 0); i++)
1771 {
1772 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1773 if (d == NULL)
1774 continue;
1775 emit_move_insn (d, tmps[i]);
1776 }
1777}
1778
1779/* Similar, but load SRC into new pseudos in a format that looks like
1780 PARALLEL. This can later be fed to emit_group_move to get things
1781 in the right place. */
1782
1783rtx
1784emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1785{
1786 rtvec vec;
1787 int i;
1788
1789 vec = rtvec_alloc (XVECLEN (parallel, 0));
1790 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1791
1792 /* Convert the vector to look just like the original PARALLEL, except
1793 with the computed values. */
1794 for (i = 0; i < XVECLEN (parallel, 0); i++)
1795 {
1796 rtx e = XVECEXP (parallel, 0, i);
1797 rtx d = XEXP (e, 0);
1798
1799 if (d)
1800 {
1801 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1802 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1803 }
1804 RTVEC_ELT (vec, i) = e;
1805 }
1806
1807 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
fffa9c1d
JW
1808}
1809
084a1106
JDA
1810/* Emit code to move a block SRC to block DST, where SRC and DST are
1811 non-consecutive groups of registers, each represented by a PARALLEL. */
1812
1813void
502b8322 1814emit_group_move (rtx dst, rtx src)
084a1106
JDA
1815{
1816 int i;
1817
5b0264cb
NS
1818 gcc_assert (GET_CODE (src) == PARALLEL
1819 && GET_CODE (dst) == PARALLEL
1820 && XVECLEN (src, 0) == XVECLEN (dst, 0));
084a1106
JDA
1821
1822 /* Skip first entry if NULL. */
1823 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1824 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1825 XEXP (XVECEXP (src, 0, i), 0));
1826}
1827
27e29549
RH
1828/* Move a group of registers represented by a PARALLEL into pseudos. */
1829
1830rtx
1831emit_group_move_into_temps (rtx src)
1832{
1833 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1834 int i;
1835
1836 for (i = 0; i < XVECLEN (src, 0); i++)
1837 {
1838 rtx e = XVECEXP (src, 0, i);
1839 rtx d = XEXP (e, 0);
1840
1841 if (d)
1842 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1843 RTVEC_ELT (vec, i) = e;
1844 }
1845
1846 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1847}
1848
6e985040
AM
1849/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1850 where SRC is non-consecutive registers represented by a PARALLEL.
1851 SSIZE represents the total size of block ORIG_DST, or -1 if not
1852 known. */
fffa9c1d
JW
1853
1854void
6e985040 1855emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1856{
aac5cc16
RH
1857 rtx *tmps, dst;
1858 int start, i;
7ef7000b 1859 enum machine_mode m = GET_MODE (orig_dst);
fffa9c1d 1860
5b0264cb 1861 gcc_assert (GET_CODE (src) == PARALLEL);
fffa9c1d 1862
0da34ce4
RH
1863 if (!SCALAR_INT_MODE_P (m)
1864 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
782fa603
AH
1865 {
1866 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1867 if (imode == BLKmode)
1868 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1869 else
1870 dst = gen_reg_rtx (imode);
1871 emit_group_store (dst, src, type, ssize);
1872 if (imode != BLKmode)
1873 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1874 emit_move_insn (orig_dst, dst);
1875 return;
1876 }
1877
fffa9c1d
JW
1878 /* Check for a NULL entry, used to indicate that the parameter goes
1879 both on the stack and in registers. */
aac5cc16
RH
1880 if (XEXP (XVECEXP (src, 0, 0), 0))
1881 start = 0;
fffa9c1d 1882 else
aac5cc16
RH
1883 start = 1;
1884
703ad42b 1885 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 1886
aac5cc16
RH
1887 /* Copy the (probable) hard regs into pseudos. */
1888 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1889 {
aac5cc16
RH
1890 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1891 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1892 emit_move_insn (tmps[i], reg);
1893 }
fffa9c1d 1894
aac5cc16
RH
1895 /* If we won't be storing directly into memory, protect the real destination
1896 from strange tricks we might play. */
1897 dst = orig_dst;
10a9f2be
JW
1898 if (GET_CODE (dst) == PARALLEL)
1899 {
1900 rtx temp;
1901
1902 /* We can get a PARALLEL dst if there is a conditional expression in
1903 a return statement. In that case, the dst and src are the same,
1904 so no action is necessary. */
1905 if (rtx_equal_p (dst, src))
1906 return;
1907
1908 /* It is unclear if we can ever reach here, but we may as well handle
1909 it. Allocate a temporary, and split this into a store/load to/from
1910 the temporary. */
1911
1912 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
1913 emit_group_store (temp, src, type, ssize);
1914 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
1915 return;
1916 }
3c0cb5de 1917 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
aac5cc16
RH
1918 {
1919 dst = gen_reg_rtx (GET_MODE (orig_dst));
1920 /* Make life a bit easier for combine. */
8ae91fc0 1921 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 1922 }
aac5cc16
RH
1923
1924 /* Process the pieces. */
1925 for (i = start; i < XVECLEN (src, 0); i++)
1926 {
770ae6cc 1927 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 1928 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 1929 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 1930 rtx dest = dst;
aac5cc16
RH
1931
1932 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1933 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 1934 {
6e985040
AM
1935 /* store_bit_field always takes its value from the lsb.
1936 Move the fragment to the lsb if it's not already there. */
1937 if (
1938#ifdef BLOCK_REG_PADDING
1939 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1940 == (BYTES_BIG_ENDIAN ? upward : downward)
1941#else
1942 BYTES_BIG_ENDIAN
1943#endif
1944 )
aac5cc16
RH
1945 {
1946 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
09b52670 1947 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
7d60be94
NS
1948 build_int_cst (NULL_TREE, shift),
1949 tmps[i], 0);
aac5cc16
RH
1950 }
1951 bytelen = ssize - bytepos;
71bc0330 1952 }
fffa9c1d 1953
6ddae612
JJ
1954 if (GET_CODE (dst) == CONCAT)
1955 {
1956 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1957 dest = XEXP (dst, 0);
1958 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1959 {
1960 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1961 dest = XEXP (dst, 1);
1962 }
5b0264cb 1963 else
0d446150 1964 {
5b0264cb 1965 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
0d446150
JH
1966 dest = assign_stack_temp (GET_MODE (dest),
1967 GET_MODE_SIZE (GET_MODE (dest)), 0);
1968 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1969 tmps[i]);
1970 dst = dest;
1971 break;
1972 }
6ddae612
JJ
1973 }
1974
aac5cc16 1975 /* Optimize the access just a bit. */
3c0cb5de 1976 if (MEM_P (dest)
6e985040
AM
1977 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1978 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 1979 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 1980 && bytelen == GET_MODE_SIZE (mode))
6ddae612 1981 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 1982 else
6ddae612 1983 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
b3520980 1984 mode, tmps[i]);
fffa9c1d 1985 }
729a2125 1986
aac5cc16 1987 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 1988 if (orig_dst != dst)
aac5cc16 1989 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
1990}
1991
c36fce9a
GRK
1992/* Generate code to copy a BLKmode object of TYPE out of a
1993 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1994 is null, a stack temporary is created. TGTBLK is returned.
1995
c988af2b
RS
1996 The purpose of this routine is to handle functions that return
1997 BLKmode structures in registers. Some machines (the PA for example)
1998 want to return all small structures in registers regardless of the
1999 structure's alignment. */
c36fce9a
GRK
2000
2001rtx
502b8322 2002copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2003{
19caa751
RK
2004 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2005 rtx src = NULL, dst = NULL;
2006 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2007 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2008
2009 if (tgtblk == 0)
2010 {
1da68f56
RK
2011 tgtblk = assign_temp (build_qualified_type (type,
2012 (TYPE_QUALS (type)
2013 | TYPE_QUAL_CONST)),
2014 0, 1, 1);
19caa751
RK
2015 preserve_temp_slots (tgtblk);
2016 }
3a94c984 2017
1ed1b4fb 2018 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2019 into a new pseudo which is a full word. */
0d7839da 2020
19caa751
RK
2021 if (GET_MODE (srcreg) != BLKmode
2022 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 2023 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 2024
c988af2b
RS
2025 /* If the structure doesn't take up a whole number of words, see whether
2026 SRCREG is padded on the left or on the right. If it's on the left,
2027 set PADDING_CORRECTION to the number of bits to skip.
2028
2029 In most ABIs, the structure will be returned at the least end of
2030 the register, which translates to right padding on little-endian
2031 targets and left padding on big-endian targets. The opposite
2032 holds if the structure is returned at the most significant
2033 end of the register. */
2034 if (bytes % UNITS_PER_WORD != 0
2035 && (targetm.calls.return_in_msb (type)
2036 ? !BYTES_BIG_ENDIAN
2037 : BYTES_BIG_ENDIAN))
2038 padding_correction
19caa751
RK
2039 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2040
2041 /* Copy the structure BITSIZE bites at a time.
3a94c984 2042
19caa751
RK
2043 We could probably emit more efficient code for machines which do not use
2044 strict alignment, but it doesn't seem worth the effort at the current
2045 time. */
c988af2b 2046 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2047 bitpos < bytes * BITS_PER_UNIT;
2048 bitpos += bitsize, xbitpos += bitsize)
2049 {
3a94c984 2050 /* We need a new source operand each time xbitpos is on a
c988af2b 2051 word boundary and when xbitpos == padding_correction
19caa751
RK
2052 (the first time through). */
2053 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2054 || xbitpos == padding_correction)
b47f8cfc
JH
2055 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2056 GET_MODE (srcreg));
19caa751
RK
2057
2058 /* We need a new destination operand each time bitpos is on
2059 a word boundary. */
2060 if (bitpos % BITS_PER_WORD == 0)
2061 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2062
19caa751
RK
2063 /* Use xbitpos for the source extraction (right justified) and
2064 xbitpos for the destination store (left justified). */
2065 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2066 extract_bit_field (src, bitsize,
2067 xbitpos % BITS_PER_WORD, 1,
b3520980 2068 NULL_RTX, word_mode, word_mode));
19caa751
RK
2069 }
2070
2071 return tgtblk;
c36fce9a
GRK
2072}
2073
94b25f81
RK
2074/* Add a USE expression for REG to the (possibly empty) list pointed
2075 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2076
2077void
502b8322 2078use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2079{
5b0264cb
NS
2080 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2081
b3f8cf4a 2082 *call_fusage
38a448ca
RH
2083 = gen_rtx_EXPR_LIST (VOIDmode,
2084 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2085}
2086
94b25f81
RK
2087/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2088 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2089
2090void
502b8322 2091use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2092{
0304dfbb 2093 int i;
bbf6f052 2094
5b0264cb 2095 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
0304dfbb
DE
2096
2097 for (i = 0; i < nregs; i++)
e50126e8 2098 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2099}
fffa9c1d
JW
2100
2101/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2102 PARALLEL REGS. This is for calls that pass values in multiple
2103 non-contiguous locations. The Irix 6 ABI has examples of this. */
2104
2105void
502b8322 2106use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2107{
2108 int i;
2109
6bd35f86
DE
2110 for (i = 0; i < XVECLEN (regs, 0); i++)
2111 {
2112 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2113
6bd35f86
DE
2114 /* A NULL entry means the parameter goes both on the stack and in
2115 registers. This can also be a MEM for targets that pass values
2116 partially on the stack and partially in registers. */
f8cfc6aa 2117 if (reg != 0 && REG_P (reg))
6bd35f86
DE
2118 use_reg (call_fusage, reg);
2119 }
fffa9c1d 2120}
bbf6f052 2121\f
57814e5e 2122
cf5124f6
RS
2123/* Determine whether the LEN bytes generated by CONSTFUN can be
2124 stored to memory using several move instructions. CONSTFUNDATA is
2125 a pointer which will be passed as argument in every CONSTFUN call.
2126 ALIGN is maximum alignment we can assume. Return nonzero if a
2127 call to store_by_pieces should succeed. */
2128
57814e5e 2129int
502b8322
AJ
2130can_store_by_pieces (unsigned HOST_WIDE_INT len,
2131 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2132 void *constfundata, unsigned int align)
57814e5e 2133{
45d78e7f
JJ
2134 unsigned HOST_WIDE_INT l;
2135 unsigned int max_size;
57814e5e
JJ
2136 HOST_WIDE_INT offset = 0;
2137 enum machine_mode mode, tmode;
2138 enum insn_code icode;
2139 int reverse;
2140 rtx cst;
2141
2c430630
RS
2142 if (len == 0)
2143 return 1;
2144
4977bab6 2145 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2146 return 0;
2147
f64d6991
DE
2148 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2149 if (align >= GET_MODE_ALIGNMENT (tmode))
2150 align = GET_MODE_ALIGNMENT (tmode);
2151 else
2152 {
2153 enum machine_mode xmode;
2154
2155 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2156 tmode != VOIDmode;
2157 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2158 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2159 || SLOW_UNALIGNED_ACCESS (tmode, align))
2160 break;
2161
2162 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2163 }
57814e5e
JJ
2164
2165 /* We would first store what we can in the largest integer mode, then go to
2166 successively smaller modes. */
2167
2168 for (reverse = 0;
2169 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2170 reverse++)
2171 {
2172 l = len;
2173 mode = VOIDmode;
cf5124f6 2174 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2175 while (max_size > 1)
2176 {
2177 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2178 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2179 if (GET_MODE_SIZE (tmode) < max_size)
2180 mode = tmode;
2181
2182 if (mode == VOIDmode)
2183 break;
2184
2185 icode = mov_optab->handlers[(int) mode].insn_code;
2186 if (icode != CODE_FOR_nothing
2187 && align >= GET_MODE_ALIGNMENT (mode))
2188 {
2189 unsigned int size = GET_MODE_SIZE (mode);
2190
2191 while (l >= size)
2192 {
2193 if (reverse)
2194 offset -= size;
2195
2196 cst = (*constfun) (constfundata, offset, mode);
2197 if (!LEGITIMATE_CONSTANT_P (cst))
2198 return 0;
2199
2200 if (!reverse)
2201 offset += size;
2202
2203 l -= size;
2204 }
2205 }
2206
2207 max_size = GET_MODE_SIZE (mode);
2208 }
2209
2210 /* The code above should have handled everything. */
5b0264cb 2211 gcc_assert (!l);
57814e5e
JJ
2212 }
2213
2214 return 1;
2215}
2216
2217/* Generate several move instructions to store LEN bytes generated by
2218 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2219 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2220 ALIGN is maximum alignment we can assume.
2221 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2222 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2223 stpcpy. */
57814e5e 2224
8fd3cf4e 2225rtx
502b8322
AJ
2226store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2227 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2228 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2229{
2230 struct store_by_pieces data;
2231
2c430630
RS
2232 if (len == 0)
2233 {
5b0264cb 2234 gcc_assert (endp != 2);
2c430630
RS
2235 return to;
2236 }
2237
5b0264cb 2238 gcc_assert (STORE_BY_PIECES_P (len, align));
57814e5e
JJ
2239 data.constfun = constfun;
2240 data.constfundata = constfundata;
2241 data.len = len;
2242 data.to = to;
2243 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2244 if (endp)
2245 {
2246 rtx to1;
2247
5b0264cb 2248 gcc_assert (!data.reverse);
8fd3cf4e
JJ
2249 if (data.autinc_to)
2250 {
2251 if (endp == 2)
2252 {
2253 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2254 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2255 else
2256 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2257 -1));
2258 }
2259 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2260 data.offset);
2261 }
2262 else
2263 {
2264 if (endp == 2)
2265 --data.offset;
2266 to1 = adjust_address (data.to, QImode, data.offset);
2267 }
2268 return to1;
2269 }
2270 else
2271 return data.to;
57814e5e
JJ
2272}
2273
19caa751 2274/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
ad76cef8 2275 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
9de08200
RK
2276
2277static void
342e2b74 2278clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2279{
57814e5e
JJ
2280 struct store_by_pieces data;
2281
2c430630
RS
2282 if (len == 0)
2283 return;
2284
57814e5e 2285 data.constfun = clear_by_pieces_1;
df4ae160 2286 data.constfundata = NULL;
57814e5e
JJ
2287 data.len = len;
2288 data.to = to;
2289 store_by_pieces_1 (&data, align);
2290}
2291
2292/* Callback routine for clear_by_pieces.
2293 Return const0_rtx unconditionally. */
2294
2295static rtx
502b8322
AJ
2296clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2297 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2298 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2299{
2300 return const0_rtx;
2301}
2302
2303/* Subroutine of clear_by_pieces and store_by_pieces.
2304 Generate several move instructions to store LEN bytes of block TO. (A MEM
ad76cef8 2305 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
57814e5e
JJ
2306
2307static void
502b8322
AJ
2308store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2309 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2310{
2311 rtx to_addr = XEXP (data->to, 0);
45d78e7f 2312 unsigned int max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2313 enum machine_mode mode = VOIDmode, tmode;
2314 enum insn_code icode;
9de08200 2315
57814e5e
JJ
2316 data->offset = 0;
2317 data->to_addr = to_addr;
2318 data->autinc_to
9de08200
RK
2319 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2320 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2321
57814e5e
JJ
2322 data->explicit_inc_to = 0;
2323 data->reverse
9de08200 2324 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2325 if (data->reverse)
2326 data->offset = data->len;
9de08200 2327
57814e5e 2328 /* If storing requires more than two move insns,
9de08200
RK
2329 copy addresses to registers (to make displacements shorter)
2330 and use post-increment if available. */
57814e5e 2331 if (!data->autinc_to
45d78e7f 2332 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
9de08200 2333 {
3a94c984 2334 /* Determine the main mode we'll be using. */
fbe1758d
AM
2335 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2336 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2337 if (GET_MODE_SIZE (tmode) < max_size)
2338 mode = tmode;
2339
57814e5e 2340 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2341 {
57814e5e
JJ
2342 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2343 data->autinc_to = 1;
2344 data->explicit_inc_to = -1;
9de08200 2345 }
3bdf5ad1 2346
57814e5e
JJ
2347 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2348 && ! data->autinc_to)
9de08200 2349 {
57814e5e
JJ
2350 data->to_addr = copy_addr_to_reg (to_addr);
2351 data->autinc_to = 1;
2352 data->explicit_inc_to = 1;
9de08200 2353 }
3bdf5ad1 2354
57814e5e
JJ
2355 if ( !data->autinc_to && CONSTANT_P (to_addr))
2356 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2357 }
2358
f64d6991
DE
2359 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2360 if (align >= GET_MODE_ALIGNMENT (tmode))
2361 align = GET_MODE_ALIGNMENT (tmode);
2362 else
2363 {
2364 enum machine_mode xmode;
2365
2366 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2367 tmode != VOIDmode;
2368 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2369 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2370 || SLOW_UNALIGNED_ACCESS (tmode, align))
2371 break;
2372
2373 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2374 }
9de08200 2375
57814e5e 2376 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2377 successively smaller modes. */
2378
2379 while (max_size > 1)
2380 {
9de08200
RK
2381 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2382 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2383 if (GET_MODE_SIZE (tmode) < max_size)
2384 mode = tmode;
2385
2386 if (mode == VOIDmode)
2387 break;
2388
2389 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2390 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2391 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2392
2393 max_size = GET_MODE_SIZE (mode);
2394 }
2395
2396 /* The code above should have handled everything. */
5b0264cb 2397 gcc_assert (!data->len);
9de08200
RK
2398}
2399
57814e5e 2400/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2401 with move instructions for mode MODE. GENFUN is the gen_... function
2402 to make a move insn for that mode. DATA has all the other info. */
2403
2404static void
502b8322
AJ
2405store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2406 struct store_by_pieces *data)
9de08200 2407{
3bdf5ad1 2408 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2409 rtx to1, cst;
9de08200
RK
2410
2411 while (data->len >= size)
2412 {
3bdf5ad1
RK
2413 if (data->reverse)
2414 data->offset -= size;
9de08200 2415
3bdf5ad1 2416 if (data->autinc_to)
630036c6
JJ
2417 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2418 data->offset);
3a94c984 2419 else
f4ef873c 2420 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2421
940da324 2422 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2423 emit_insn (gen_add2_insn (data->to_addr,
2424 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2425
57814e5e
JJ
2426 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2427 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2428
940da324 2429 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2430 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2431
3bdf5ad1
RK
2432 if (! data->reverse)
2433 data->offset += size;
9de08200
RK
2434
2435 data->len -= size;
2436 }
2437}
2438\f
19caa751 2439/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2440 its length in bytes. */
e9a25f70
JL
2441
2442rtx
8148fe65 2443clear_storage (rtx object, rtx size, enum block_op_methods method)
bbf6f052 2444{
57aaef66
RH
2445 enum machine_mode mode = GET_MODE (object);
2446 unsigned int align;
e9a25f70 2447
8148fe65
JJ
2448 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2449
fcf1b822
RK
2450 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2451 just move a zero. Otherwise, do this a piece at a time. */
57aaef66 2452 if (mode != BLKmode
fcf1b822 2453 && GET_CODE (size) == CONST_INT
57aaef66 2454 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
bbf6f052 2455 {
57aaef66
RH
2456 rtx zero = CONST0_RTX (mode);
2457 if (zero != NULL)
2458 {
2459 emit_move_insn (object, zero);
2460 return NULL;
2461 }
2462
2463 if (COMPLEX_MODE_P (mode))
2464 {
2465 zero = CONST0_RTX (GET_MODE_INNER (mode));
2466 if (zero != NULL)
2467 {
2468 write_complex_part (object, zero, 0);
2469 write_complex_part (object, zero, 1);
2470 return NULL;
2471 }
2472 }
4ca79136
RH
2473 }
2474
57aaef66
RH
2475 if (size == const0_rtx)
2476 return NULL;
2477
2478 align = MEM_ALIGN (object);
2479
2480 if (GET_CODE (size) == CONST_INT
2481 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2482 clear_by_pieces (object, INTVAL (size), align);
57e84f18 2483 else if (set_storage_via_setmem (object, size, const0_rtx, align))
57aaef66
RH
2484 ;
2485 else
8148fe65
JJ
2486 return clear_storage_via_libcall (object, size,
2487 method == BLOCK_OP_TAILCALL);
57aaef66
RH
2488
2489 return NULL;
4ca79136
RH
2490}
2491
8f99553f 2492/* A subroutine of clear_storage. Expand a call to memset.
4ca79136 2493 Return the return value of memset, 0 otherwise. */
9de08200 2494
4ca79136 2495static rtx
8148fe65 2496clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
4ca79136
RH
2497{
2498 tree call_expr, arg_list, fn, object_tree, size_tree;
2499 enum machine_mode size_mode;
2500 rtx retval;
9de08200 2501
ad76cef8
PB
2502 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2503 place those into new pseudos into a VAR_DECL and use them later. */
52cf7115 2504
4ca79136 2505 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2506
8f99553f 2507 size_mode = TYPE_MODE (sizetype);
4ca79136
RH
2508 size = convert_to_mode (size_mode, size, 1);
2509 size = copy_to_mode_reg (size_mode, size);
52cf7115 2510
4ca79136
RH
2511 /* It is incorrect to use the libcall calling conventions to call
2512 memset in this context. This could be a user call to memset and
2513 the user may wish to examine the return value from memset. For
2514 targets where libcalls and normal calls have different conventions
8f99553f 2515 for returning pointers, we could end up generating incorrect code. */
4bc973ae 2516
4ca79136 2517 object_tree = make_tree (ptr_type_node, object);
8f99553f 2518 size_tree = make_tree (sizetype, size);
4ca79136
RH
2519
2520 fn = clear_storage_libcall_fn (true);
2521 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f 2522 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
4ca79136
RH
2523 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2524
2525 /* Now we have to build up the CALL_EXPR itself. */
2526 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
2527 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2528 call_expr, arg_list, NULL_TREE);
8148fe65 2529 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136 2530
84217346 2531 retval = expand_normal (call_expr);
4ca79136 2532
8f99553f 2533 return retval;
4ca79136
RH
2534}
2535
2536/* A subroutine of clear_storage_via_libcall. Create the tree node
2537 for the function we use for block clears. The first time FOR_CALL
2538 is true, we call assemble_external. */
2539
2540static GTY(()) tree block_clear_fn;
66c60e67 2541
9661b15f 2542void
502b8322 2543init_block_clear_fn (const char *asmspec)
4ca79136 2544{
9661b15f 2545 if (!block_clear_fn)
4ca79136 2546 {
9661b15f
JJ
2547 tree fn, args;
2548
8f99553f
JM
2549 fn = get_identifier ("memset");
2550 args = build_function_type_list (ptr_type_node, ptr_type_node,
2551 integer_type_node, sizetype,
2552 NULL_TREE);
4ca79136
RH
2553
2554 fn = build_decl (FUNCTION_DECL, fn, args);
2555 DECL_EXTERNAL (fn) = 1;
2556 TREE_PUBLIC (fn) = 1;
2557 DECL_ARTIFICIAL (fn) = 1;
2558 TREE_NOTHROW (fn) = 1;
2559
2560 block_clear_fn = fn;
bbf6f052 2561 }
e9a25f70 2562
9661b15f 2563 if (asmspec)
0e6df31e 2564 set_user_assembler_name (block_clear_fn, asmspec);
9661b15f
JJ
2565}
2566
2567static tree
502b8322 2568clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2569{
2570 static bool emitted_extern;
2571
2572 if (!block_clear_fn)
2573 init_block_clear_fn (NULL);
2574
4ca79136
RH
2575 if (for_call && !emitted_extern)
2576 {
2577 emitted_extern = true;
0e6df31e 2578 make_decl_rtl (block_clear_fn);
9661b15f 2579 assemble_external (block_clear_fn);
4ca79136 2580 }
bbf6f052 2581
9661b15f 2582 return block_clear_fn;
4ca79136 2583}
57e84f18
AS
2584\f
2585/* Expand a setmem pattern; return true if successful. */
2586
2587bool
2588set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2589{
2590 /* Try the most limited insn first, because there's no point
2591 including more than one in the machine description unless
2592 the more limited one has some advantage. */
2593
2594 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2595 enum machine_mode mode;
2596
2597 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2598 mode = GET_MODE_WIDER_MODE (mode))
2599 {
2600 enum insn_code code = setmem_optab[(int) mode];
2601 insn_operand_predicate_fn pred;
2602
2603 if (code != CODE_FOR_nothing
2604 /* We don't need MODE to be narrower than
2605 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2606 the mode mask, as it is returned by the macro, it will
2607 definitely be less than the actual mode mask. */
2608 && ((GET_CODE (size) == CONST_INT
2609 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2610 <= (GET_MODE_MASK (mode) >> 1)))
2611 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2612 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2613 || (*pred) (object, BLKmode))
2614 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2615 || (*pred) (opalign, VOIDmode)))
2616 {
9ed92901
AS
2617 rtx opsize, opchar;
2618 enum machine_mode char_mode;
57e84f18
AS
2619 rtx last = get_last_insn ();
2620 rtx pat;
2621
2622 opsize = convert_to_mode (mode, size, 1);
2623 pred = insn_data[(int) code].operand[1].predicate;
2624 if (pred != 0 && ! (*pred) (opsize, mode))
2625 opsize = copy_to_mode_reg (mode, opsize);
2626
9ed92901
AS
2627 opchar = val;
2628 char_mode = insn_data[(int) code].operand[2].mode;
2629 if (char_mode != VOIDmode)
2630 {
2631 opchar = convert_to_mode (char_mode, opchar, 1);
2632 pred = insn_data[(int) code].operand[2].predicate;
2633 if (pred != 0 && ! (*pred) (opchar, char_mode))
2634 opchar = copy_to_mode_reg (char_mode, opchar);
2635 }
57e84f18
AS
2636
2637 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2638 if (pat)
2639 {
2640 emit_insn (pat);
2641 return true;
2642 }
2643 else
2644 delete_insns_since (last);
2645 }
2646 }
2647
2648 return false;
2649}
2650
4ca79136 2651\f
1466e387
RH
2652/* Write to one of the components of the complex value CPLX. Write VAL to
2653 the real part if IMAG_P is false, and the imaginary part if its true. */
bbf6f052 2654
1466e387
RH
2655static void
2656write_complex_part (rtx cplx, rtx val, bool imag_p)
2657{
ddf4e03f
RH
2658 enum machine_mode cmode;
2659 enum machine_mode imode;
2660 unsigned ibitsize;
2661
1466e387 2662 if (GET_CODE (cplx) == CONCAT)
1466e387 2663 {
ddf4e03f
RH
2664 emit_move_insn (XEXP (cplx, imag_p), val);
2665 return;
2666 }
2667
2668 cmode = GET_MODE (cplx);
2669 imode = GET_MODE_INNER (cmode);
2670 ibitsize = GET_MODE_BITSIZE (imode);
bbf6f052 2671
7a31c801
DE
2672 /* For MEMs simplify_gen_subreg may generate an invalid new address
2673 because, e.g., the original address is considered mode-dependent
2674 by the target, which restricts simplify_subreg from invoking
2675 adjust_address_nv. Instead of preparing fallback support for an
2676 invalid address, we call adjust_address_nv directly. */
2677 if (MEM_P (cplx))
22469409
BW
2678 {
2679 emit_move_insn (adjust_address_nv (cplx, imode,
2680 imag_p ? GET_MODE_SIZE (imode) : 0),
2681 val);
2682 return;
2683 }
7a31c801 2684
ddf4e03f
RH
2685 /* If the sub-object is at least word sized, then we know that subregging
2686 will work. This special case is important, since store_bit_field
2687 wants to operate on integer modes, and there's rarely an OImode to
2688 correspond to TCmode. */
36d7571c
EB
2689 if (ibitsize >= BITS_PER_WORD
2690 /* For hard regs we have exact predicates. Assume we can split
2691 the original object if it spans an even number of hard regs.
2692 This special case is important for SCmode on 64-bit platforms
2693 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2694 || (REG_P (cplx)
36d7571c 2695 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2696 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2697 {
2698 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2699 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2700 if (part)
2701 {
2702 emit_move_insn (part, val);
2703 return;
2704 }
2705 else
2706 /* simplify_gen_subreg may fail for sub-word MEMs. */
2707 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
1466e387 2708 }
36d7571c
EB
2709
2710 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
1466e387
RH
2711}
2712
2713/* Extract one of the components of the complex value CPLX. Extract the
2714 real part if IMAG_P is false, and the imaginary part if it's true. */
2715
2716static rtx
2717read_complex_part (rtx cplx, bool imag_p)
bbf6f052 2718{
1466e387
RH
2719 enum machine_mode cmode, imode;
2720 unsigned ibitsize;
bbf6f052 2721
1466e387
RH
2722 if (GET_CODE (cplx) == CONCAT)
2723 return XEXP (cplx, imag_p);
bbf6f052 2724
1466e387
RH
2725 cmode = GET_MODE (cplx);
2726 imode = GET_MODE_INNER (cmode);
2727 ibitsize = GET_MODE_BITSIZE (imode);
2728
2729 /* Special case reads from complex constants that got spilled to memory. */
2730 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
de1b33dd 2731 {
1466e387
RH
2732 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2733 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2734 {
2735 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2736 if (CONSTANT_CLASS_P (part))
2737 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2738 }
2739 }
51286de6 2740
7a31c801
DE
2741 /* For MEMs simplify_gen_subreg may generate an invalid new address
2742 because, e.g., the original address is considered mode-dependent
2743 by the target, which restricts simplify_subreg from invoking
2744 adjust_address_nv. Instead of preparing fallback support for an
2745 invalid address, we call adjust_address_nv directly. */
2746 if (MEM_P (cplx))
2747 return adjust_address_nv (cplx, imode,
2748 imag_p ? GET_MODE_SIZE (imode) : 0);
2749
ddf4e03f
RH
2750 /* If the sub-object is at least word sized, then we know that subregging
2751 will work. This special case is important, since extract_bit_field
2752 wants to operate on integer modes, and there's rarely an OImode to
2753 correspond to TCmode. */
36d7571c
EB
2754 if (ibitsize >= BITS_PER_WORD
2755 /* For hard regs we have exact predicates. Assume we can split
2756 the original object if it spans an even number of hard regs.
2757 This special case is important for SCmode on 64-bit platforms
2758 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2759 || (REG_P (cplx)
36d7571c 2760 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2761 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2762 {
2763 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2764 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2765 if (ret)
2766 return ret;
2767 else
2768 /* simplify_gen_subreg may fail for sub-word MEMs. */
2769 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
ddf4e03f
RH
2770 }
2771
1466e387
RH
2772 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2773 true, NULL_RTX, imode, imode);
2774}
2775\f
539eaa3a 2776/* A subroutine of emit_move_insn_1. Yet another lowpart generator.
074e6d01 2777 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
539eaa3a
RH
2778 represented in NEW_MODE. If FORCE is true, this will never happen, as
2779 we'll force-create a SUBREG if needed. */
0c19a26f 2780
1466e387 2781static rtx
074e6d01 2782emit_move_change_mode (enum machine_mode new_mode,
539eaa3a 2783 enum machine_mode old_mode, rtx x, bool force)
1466e387 2784{
074e6d01 2785 rtx ret;
1466e387 2786
ef7befe0 2787 if (MEM_P (x))
1466e387 2788 {
ef7befe0
BE
2789 /* We don't have to worry about changing the address since the
2790 size in bytes is supposed to be the same. */
2791 if (reload_in_progress)
2792 {
2793 /* Copy the MEM to change the mode and move any
2794 substitutions from the old MEM to the new one. */
2795 ret = adjust_address_nv (x, new_mode, 0);
2796 copy_replacements (x, ret);
2797 }
2798 else
2799 ret = adjust_address (x, new_mode, 0);
de1b33dd 2800 }
1466e387
RH
2801 else
2802 {
35fd3193 2803 /* Note that we do want simplify_subreg's behavior of validating
074e6d01
RH
2804 that the new mode is ok for a hard register. If we were to use
2805 simplify_gen_subreg, we would create the subreg, but would
2806 probably run into the target not being able to implement it. */
539eaa3a
RH
2807 /* Except, of course, when FORCE is true, when this is exactly what
2808 we want. Which is needed for CCmodes on some targets. */
2809 if (force)
2810 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2811 else
2812 ret = simplify_subreg (new_mode, x, old_mode, 0);
1466e387 2813 }
bbf6f052 2814
074e6d01
RH
2815 return ret;
2816}
2817
1466e387
RH
2818/* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2819 an integer mode of the same size as MODE. Returns the instruction
2820 emitted, or NULL if such a move could not be generated. */
bbf6f052 2821
1466e387 2822static rtx
652b0932 2823emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
1466e387
RH
2824{
2825 enum machine_mode imode;
2826 enum insn_code code;
bbf6f052 2827
1466e387
RH
2828 /* There must exist a mode of the exact size we require. */
2829 imode = int_mode_for_mode (mode);
2830 if (imode == BLKmode)
2831 return NULL_RTX;
de1b33dd 2832
1466e387
RH
2833 /* The target must support moves in this mode. */
2834 code = mov_optab->handlers[imode].insn_code;
2835 if (code == CODE_FOR_nothing)
2836 return NULL_RTX;
de1b33dd 2837
652b0932 2838 x = emit_move_change_mode (imode, mode, x, force);
539eaa3a
RH
2839 if (x == NULL_RTX)
2840 return NULL_RTX;
652b0932 2841 y = emit_move_change_mode (imode, mode, y, force);
539eaa3a
RH
2842 if (y == NULL_RTX)
2843 return NULL_RTX;
2844 return emit_insn (GEN_FCN (code) (x, y));
261c4230
RS
2845}
2846
1466e387
RH
2847/* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2848 Return an equivalent MEM that does not use an auto-increment. */
261c4230 2849
1466e387
RH
2850static rtx
2851emit_move_resolve_push (enum machine_mode mode, rtx x)
261c4230 2852{
1466e387
RH
2853 enum rtx_code code = GET_CODE (XEXP (x, 0));
2854 HOST_WIDE_INT adjust;
2855 rtx temp;
261c4230 2856
1466e387
RH
2857 adjust = GET_MODE_SIZE (mode);
2858#ifdef PUSH_ROUNDING
2859 adjust = PUSH_ROUNDING (adjust);
2860#endif
2861 if (code == PRE_DEC || code == POST_DEC)
2862 adjust = -adjust;
6541fe75
JJ
2863 else if (code == PRE_MODIFY || code == POST_MODIFY)
2864 {
2865 rtx expr = XEXP (XEXP (x, 0), 1);
2866 HOST_WIDE_INT val;
2867
2868 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2869 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2870 val = INTVAL (XEXP (expr, 1));
2871 if (GET_CODE (expr) == MINUS)
2872 val = -val;
2873 gcc_assert (adjust == val || adjust == -val);
2874 adjust = val;
2875 }
76bbe028 2876
1466e387
RH
2877 /* Do not use anti_adjust_stack, since we don't want to update
2878 stack_pointer_delta. */
2879 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2880 GEN_INT (adjust), stack_pointer_rtx,
2881 0, OPTAB_LIB_WIDEN);
2882 if (temp != stack_pointer_rtx)
2883 emit_move_insn (stack_pointer_rtx, temp);
bbf6f052 2884
1466e387 2885 switch (code)
7308a047 2886 {
1466e387
RH
2887 case PRE_INC:
2888 case PRE_DEC:
6541fe75 2889 case PRE_MODIFY:
1466e387
RH
2890 temp = stack_pointer_rtx;
2891 break;
2892 case POST_INC:
1466e387 2893 case POST_DEC:
6541fe75
JJ
2894 case POST_MODIFY:
2895 temp = plus_constant (stack_pointer_rtx, -adjust);
1466e387
RH
2896 break;
2897 default:
2898 gcc_unreachable ();
2899 }
7308a047 2900
1466e387
RH
2901 return replace_equiv_address (x, temp);
2902}
1a06f5fe 2903
1466e387
RH
2904/* A subroutine of emit_move_complex. Generate a move from Y into X.
2905 X is known to satisfy push_operand, and MODE is known to be complex.
2906 Returns the last instruction emitted. */
bb93b973 2907
1466e387
RH
2908static rtx
2909emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2910{
2911 enum machine_mode submode = GET_MODE_INNER (mode);
2912 bool imag_first;
bb93b973 2913
1466e387
RH
2914#ifdef PUSH_ROUNDING
2915 unsigned int submodesize = GET_MODE_SIZE (submode);
bb93b973 2916
1466e387
RH
2917 /* In case we output to the stack, but the size is smaller than the
2918 machine can push exactly, we need to use move instructions. */
2919 if (PUSH_ROUNDING (submodesize) != submodesize)
2920 {
2921 x = emit_move_resolve_push (mode, x);
2922 return emit_move_insn (x, y);
2923 }
79ce92d7 2924#endif
7308a047 2925
1466e387
RH
2926 /* Note that the real part always precedes the imag part in memory
2927 regardless of machine's endianness. */
2928 switch (GET_CODE (XEXP (x, 0)))
2929 {
2930 case PRE_DEC:
2931 case POST_DEC:
2932 imag_first = true;
2933 break;
2934 case PRE_INC:
2935 case POST_INC:
2936 imag_first = false;
2937 break;
2938 default:
2939 gcc_unreachable ();
2940 }
beb72684 2941
1466e387
RH
2942 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2943 read_complex_part (y, imag_first));
2944 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2945 read_complex_part (y, !imag_first));
2946}
405f63da 2947
1466e387
RH
2948/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2949 MODE is known to be complex. Returns the last instruction emitted. */
beb72684 2950
1466e387
RH
2951static rtx
2952emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2953{
2954 bool try_int;
405f63da 2955
1466e387
RH
2956 /* Need to take special care for pushes, to maintain proper ordering
2957 of the data, and possibly extra padding. */
2958 if (push_operand (x, mode))
2959 return emit_move_complex_push (mode, x, y);
7308a047 2960
1466e387
RH
2961 /* See if we can coerce the target into moving both values at once. */
2962
c6506442
DE
2963 /* Move floating point as parts. */
2964 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2965 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2966 try_int = false;
1466e387 2967 /* Not possible if the values are inherently not adjacent. */
c6506442 2968 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
1466e387
RH
2969 try_int = false;
2970 /* Is possible if both are registers (or subregs of registers). */
2971 else if (register_operand (x, mode) && register_operand (y, mode))
2972 try_int = true;
2973 /* If one of the operands is a memory, and alignment constraints
2974 are friendly enough, we may be able to do combined memory operations.
2975 We do not attempt this if Y is a constant because that combination is
2976 usually better with the by-parts thing below. */
2977 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2978 && (!STRICT_ALIGNMENT
2979 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2980 try_int = true;
2981 else
2982 try_int = false;
2983
2984 if (try_int)
a3600c71 2985 {
c6506442
DE
2986 rtx ret;
2987
2988 /* For memory to memory moves, optimal behavior can be had with the
2989 existing block move logic. */
2990 if (MEM_P (x) && MEM_P (y))
2991 {
2992 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2993 BLOCK_OP_NO_LIBCALL);
2994 return get_last_insn ();
2995 }
2996
652b0932 2997 ret = emit_move_via_integer (mode, x, y, true);
1466e387
RH
2998 if (ret)
2999 return ret;
3000 }
a3600c71 3001
1466e387
RH
3002 /* Show the output dies here. This is necessary for SUBREGs
3003 of pseudos since we cannot track their lifetimes correctly;
3004 hard regs shouldn't appear here except as return values. */
3005 if (!reload_completed && !reload_in_progress
3006 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3007 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
a3600c71 3008
1466e387
RH
3009 write_complex_part (x, read_complex_part (y, false), false);
3010 write_complex_part (x, read_complex_part (y, true), true);
3011 return get_last_insn ();
3012}
a3600c71 3013
1466e387
RH
3014/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3015 MODE is known to be MODE_CC. Returns the last instruction emitted. */
a3600c71 3016
1466e387
RH
3017static rtx
3018emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3019{
3020 rtx ret;
a3600c71 3021
1466e387
RH
3022 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3023 if (mode != CCmode)
3024 {
3025 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3026 if (code != CODE_FOR_nothing)
539eaa3a
RH
3027 {
3028 x = emit_move_change_mode (CCmode, mode, x, true);
3029 y = emit_move_change_mode (CCmode, mode, y, true);
3030 return emit_insn (GEN_FCN (code) (x, y));
3031 }
1466e387
RH
3032 }
3033
3034 /* Otherwise, find the MODE_INT mode of the same width. */
652b0932 3035 ret = emit_move_via_integer (mode, x, y, false);
1466e387
RH
3036 gcc_assert (ret != NULL);
3037 return ret;
3038}
3039
3040/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3041 MODE is any multi-word or full-word mode that lacks a move_insn
3042 pattern. Note that you will get better code if you define such
3043 patterns, even if they must turn into multiple assembler instructions. */
3044
3045static rtx
3046emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3047{
3048 rtx last_insn = 0;
3049 rtx seq, inner;
3050 bool need_clobber;
3051 int i;
3052
3053 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3054
3055 /* If X is a push on the stack, do the push now and replace
3056 X with a reference to the stack pointer. */
3057 if (push_operand (x, mode))
3058 x = emit_move_resolve_push (mode, x);
3059
3060 /* If we are in reload, see if either operand is a MEM whose address
3061 is scheduled for replacement. */
3062 if (reload_in_progress && MEM_P (x)
3063 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3064 x = replace_equiv_address_nv (x, inner);
3065 if (reload_in_progress && MEM_P (y)
3066 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3067 y = replace_equiv_address_nv (y, inner);
3068
3069 start_sequence ();
3070
3071 need_clobber = false;
3072 for (i = 0;
3073 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3074 i++)
3075 {
3076 rtx xpart = operand_subword (x, i, 1, mode);
3077 rtx ypart = operand_subword (y, i, 1, mode);
3078
3079 /* If we can't get a part of Y, put Y into memory if it is a
535a42b1
NS
3080 constant. Otherwise, force it into a register. Then we must
3081 be able to get a part of Y. */
1466e387 3082 if (ypart == 0 && CONSTANT_P (y))
a3600c71 3083 {
aacd3885 3084 y = use_anchored_address (force_const_mem (mode, y));
1466e387 3085 ypart = operand_subword (y, i, 1, mode);
a3600c71 3086 }
1466e387
RH
3087 else if (ypart == 0)
3088 ypart = operand_subword_force (y, i, mode);
3089
3090 gcc_assert (xpart && ypart);
3091
3092 need_clobber |= (GET_CODE (xpart) == SUBREG);
502b8322 3093
1466e387 3094 last_insn = emit_move_insn (xpart, ypart);
a3600c71
HPN
3095 }
3096
1466e387
RH
3097 seq = get_insns ();
3098 end_sequence ();
3099
3100 /* Show the output dies here. This is necessary for SUBREGs
3101 of pseudos since we cannot track their lifetimes correctly;
3102 hard regs shouldn't appear here except as return values.
3103 We never want to emit such a clobber after reload. */
3104 if (x != y
3105 && ! (reload_in_progress || reload_completed)
3106 && need_clobber != 0)
3107 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3108
3109 emit_insn (seq);
3110
3111 return last_insn;
3112}
3113
3114/* Low level part of emit_move_insn.
3115 Called just like emit_move_insn, but assumes X and Y
3116 are basically valid. */
3117
3118rtx
3119emit_move_insn_1 (rtx x, rtx y)
3120{
3121 enum machine_mode mode = GET_MODE (x);
3122 enum insn_code code;
3123
3124 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3125
3126 code = mov_optab->handlers[mode].insn_code;
3127 if (code != CODE_FOR_nothing)
3128 return emit_insn (GEN_FCN (code) (x, y));
3129
3130 /* Expand complex moves by moving real part and imag part. */
3131 if (COMPLEX_MODE_P (mode))
3132 return emit_move_complex (mode, x, y);
3133
ef7befe0
BE
3134 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3135 {
3136 rtx result = emit_move_via_integer (mode, x, y, true);
3137
3138 /* If we can't find an integer mode, use multi words. */
3139 if (result)
3140 return result;
3141 else
3142 return emit_move_multi_word (mode, x, y);
3143 }
3144
1466e387
RH
3145 if (GET_MODE_CLASS (mode) == MODE_CC)
3146 return emit_move_ccmode (mode, x, y);
3147
5581fc91
RS
3148 /* Try using a move pattern for the corresponding integer mode. This is
3149 only safe when simplify_subreg can convert MODE constants into integer
3150 constants. At present, it can only do this reliably if the value
3151 fits within a HOST_WIDE_INT. */
1466e387 3152 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 3153 {
652b0932 3154 rtx ret = emit_move_via_integer (mode, x, y, false);
1466e387
RH
3155 if (ret)
3156 return ret;
3157 }
0fb7aeda 3158
1466e387
RH
3159 return emit_move_multi_word (mode, x, y);
3160}
918a6124 3161
1466e387
RH
3162/* Generate code to copy Y into X.
3163 Both Y and X must have the same mode, except that
3164 Y can be a constant with VOIDmode.
3165 This mode cannot be BLKmode; use emit_block_move for that.
3a94c984 3166
1466e387 3167 Return the last instruction emitted. */
3ef1eef4 3168
1466e387
RH
3169rtx
3170emit_move_insn (rtx x, rtx y)
3171{
3172 enum machine_mode mode = GET_MODE (x);
3173 rtx y_cst = NULL_RTX;
3174 rtx last_insn, set;
15a7a8ec 3175
1466e387
RH
3176 gcc_assert (mode != BLKmode
3177 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
bbf6f052 3178
1466e387
RH
3179 if (CONSTANT_P (y))
3180 {
3181 if (optimize
3182 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3183 && (last_insn = compress_float_constant (x, y)))
3184 return last_insn;
bbf6f052 3185
1466e387 3186 y_cst = y;
bbf6f052 3187
1466e387
RH
3188 if (!LEGITIMATE_CONSTANT_P (y))
3189 {
3190 y = force_const_mem (mode, y);
235ae7be 3191
1466e387
RH
3192 /* If the target's cannot_force_const_mem prevented the spill,
3193 assume that the target's move expanders will also take care
3194 of the non-legitimate constant. */
3195 if (!y)
3196 y = y_cst;
aacd3885
RS
3197 else
3198 y = use_anchored_address (y);
bbf6f052 3199 }
1466e387 3200 }
6551fa4d 3201
1466e387
RH
3202 /* If X or Y are memory references, verify that their addresses are valid
3203 for the machine. */
3204 if (MEM_P (x)
3205 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3206 && ! push_operand (x, GET_MODE (x)))
3207 || (flag_force_addr
3208 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3209 x = validize_mem (x);
235ae7be 3210
1466e387
RH
3211 if (MEM_P (y)
3212 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3213 || (flag_force_addr
3214 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3215 y = validize_mem (y);
235ae7be 3216
1466e387 3217 gcc_assert (mode != BLKmode);
235ae7be 3218
1466e387
RH
3219 last_insn = emit_move_insn_1 (x, y);
3220
3221 if (y_cst && REG_P (x)
3222 && (set = single_set (last_insn)) != NULL_RTX
3223 && SET_DEST (set) == x
3224 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3225 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3226
3227 return last_insn;
bbf6f052 3228}
51286de6
RH
3229
3230/* If Y is representable exactly in a narrower mode, and the target can
3231 perform the extension directly from constant or memory, then emit the
3232 move as an extension. */
3233
3234static rtx
502b8322 3235compress_float_constant (rtx x, rtx y)
51286de6
RH
3236{
3237 enum machine_mode dstmode = GET_MODE (x);
3238 enum machine_mode orig_srcmode = GET_MODE (y);
3239 enum machine_mode srcmode;
3240 REAL_VALUE_TYPE r;
e4541b7a 3241 int oldcost, newcost;
51286de6
RH
3242
3243 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3244
e4541b7a
DJ
3245 if (LEGITIMATE_CONSTANT_P (y))
3246 oldcost = rtx_cost (y, SET);
3247 else
3248 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3249
51286de6
RH
3250 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3251 srcmode != orig_srcmode;
3252 srcmode = GET_MODE_WIDER_MODE (srcmode))
3253 {
3254 enum insn_code ic;
3255 rtx trunc_y, last_insn;
3256
3257 /* Skip if the target can't extend this way. */
3258 ic = can_extend_p (dstmode, srcmode, 0);
3259 if (ic == CODE_FOR_nothing)
3260 continue;
3261
3262 /* Skip if the narrowed value isn't exact. */
3263 if (! exact_real_truncate (srcmode, &r))
3264 continue;
3265
3266 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3267
3268 if (LEGITIMATE_CONSTANT_P (trunc_y))
3269 {
3270 /* Skip if the target needs extra instructions to perform
3271 the extension. */
3272 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3273 continue;
e4541b7a
DJ
3274 /* This is valid, but may not be cheaper than the original. */
3275 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3276 if (oldcost < newcost)
3277 continue;
51286de6
RH
3278 }
3279 else if (float_extend_from_mem[dstmode][srcmode])
e4541b7a
DJ
3280 {
3281 trunc_y = force_const_mem (srcmode, trunc_y);
3282 /* This is valid, but may not be cheaper than the original. */
3283 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3284 if (oldcost < newcost)
3285 continue;
3286 trunc_y = validize_mem (trunc_y);
3287 }
51286de6
RH
3288 else
3289 continue;
e4541b7a 3290
51286de6
RH
3291 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3292 last_insn = get_last_insn ();
3293
f8cfc6aa 3294 if (REG_P (x))
0c19a26f 3295 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3296
3297 return last_insn;
3298 }
3299
3300 return NULL_RTX;
3301}
bbf6f052
RK
3302\f
3303/* Pushing data onto the stack. */
3304
3305/* Push a block of length SIZE (perhaps variable)
3306 and return an rtx to address the beginning of the block.
bbf6f052
RK
3307 The value may be virtual_outgoing_args_rtx.
3308
3309 EXTRA is the number of bytes of padding to push in addition to SIZE.
3310 BELOW nonzero means this padding comes at low addresses;
3311 otherwise, the padding comes at high addresses. */
3312
3313rtx
502b8322 3314push_block (rtx size, int extra, int below)
bbf6f052 3315{
b3694847 3316 rtx temp;
88f63c77
RK
3317
3318 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3319 if (CONSTANT_P (size))
3320 anti_adjust_stack (plus_constant (size, extra));
f8cfc6aa 3321 else if (REG_P (size) && extra == 0)
bbf6f052
RK
3322 anti_adjust_stack (size);
3323 else
3324 {
ce48579b 3325 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3326 if (extra != 0)
906c4e36 3327 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3328 temp, 0, OPTAB_LIB_WIDEN);
3329 anti_adjust_stack (temp);
3330 }
3331
f73ad30e 3332#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3333 if (0)
f73ad30e
JH
3334#else
3335 if (1)
bbf6f052 3336#endif
f73ad30e 3337 {
f73ad30e
JH
3338 temp = virtual_outgoing_args_rtx;
3339 if (extra != 0 && below)
3340 temp = plus_constant (temp, extra);
3341 }
3342 else
3343 {
3344 if (GET_CODE (size) == CONST_INT)
3345 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3346 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3347 else if (extra != 0 && !below)
3348 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3349 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3350 else
3351 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3352 negate_rtx (Pmode, size));
3353 }
bbf6f052
RK
3354
3355 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3356}
3357
21d93687
RK
3358#ifdef PUSH_ROUNDING
3359
566aa174 3360/* Emit single push insn. */
21d93687 3361
566aa174 3362static void
502b8322 3363emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3364{
566aa174 3365 rtx dest_addr;
918a6124 3366 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3367 rtx dest;
371b8fc0
JH
3368 enum insn_code icode;
3369 insn_operand_predicate_fn pred;
566aa174 3370
371b8fc0
JH
3371 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3372 /* If there is push pattern, use it. Otherwise try old way of throwing
3373 MEM representing push operation to move expander. */
3374 icode = push_optab->handlers[(int) mode].insn_code;
3375 if (icode != CODE_FOR_nothing)
3376 {
3377 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3378 && !((*pred) (x, mode))))
371b8fc0
JH
3379 x = force_reg (mode, x);
3380 emit_insn (GEN_FCN (icode) (x));
3381 return;
3382 }
566aa174
JH
3383 if (GET_MODE_SIZE (mode) == rounded_size)
3384 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3385 /* If we are to pad downward, adjust the stack pointer first and
3386 then store X into the stack location using an offset. This is
3387 because emit_move_insn does not know how to pad; it does not have
3388 access to type. */
3389 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3390 {
3391 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3392 HOST_WIDE_INT offset;
3393
3394 emit_move_insn (stack_pointer_rtx,
3395 expand_binop (Pmode,
3396#ifdef STACK_GROWS_DOWNWARD
3397 sub_optab,
3398#else
3399 add_optab,
3400#endif
3401 stack_pointer_rtx,
3402 GEN_INT (rounded_size),
3403 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3404
3405 offset = (HOST_WIDE_INT) padding_size;
3406#ifdef STACK_GROWS_DOWNWARD
3407 if (STACK_PUSH_CODE == POST_DEC)
3408 /* We have already decremented the stack pointer, so get the
3409 previous value. */
3410 offset += (HOST_WIDE_INT) rounded_size;
3411#else
3412 if (STACK_PUSH_CODE == POST_INC)
3413 /* We have already incremented the stack pointer, so get the
3414 previous value. */
3415 offset -= (HOST_WIDE_INT) rounded_size;
3416#endif
3417 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3418 }
566aa174
JH
3419 else
3420 {
3421#ifdef STACK_GROWS_DOWNWARD
329d586f 3422 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3423 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3424 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3425#else
329d586f 3426 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3427 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3428 GEN_INT (rounded_size));
3429#endif
3430 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3431 }
3432
3433 dest = gen_rtx_MEM (mode, dest_addr);
3434
566aa174
JH
3435 if (type != 0)
3436 {
3437 set_mem_attributes (dest, type, 1);
c3d32120
RK
3438
3439 if (flag_optimize_sibling_calls)
3440 /* Function incoming arguments may overlap with sibling call
3441 outgoing arguments and we cannot allow reordering of reads
3442 from function arguments with stores to outgoing arguments
3443 of sibling calls. */
3444 set_mem_alias_set (dest, 0);
566aa174
JH
3445 }
3446 emit_move_insn (dest, x);
566aa174 3447}
21d93687 3448#endif
566aa174 3449
bbf6f052
RK
3450/* Generate code to push X onto the stack, assuming it has mode MODE and
3451 type TYPE.
3452 MODE is redundant except when X is a CONST_INT (since they don't
3453 carry mode info).
3454 SIZE is an rtx for the size of data to be copied (in bytes),
3455 needed only if X is BLKmode.
3456
f1eaaf73 3457 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3458
cd048831 3459 If PARTIAL and REG are both nonzero, then copy that many of the first
78a52f11
RH
3460 bytes of X into registers starting with REG, and push the rest of X.
3461 The amount of space pushed is decreased by PARTIAL bytes.
bbf6f052 3462 REG must be a hard register in this case.
cd048831
RK
3463 If REG is zero but PARTIAL is not, take any all others actions for an
3464 argument partially in registers, but do not actually load any
3465 registers.
bbf6f052
RK
3466
3467 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3468 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3469
3470 On a machine that lacks real push insns, ARGS_ADDR is the address of
3471 the bottom of the argument block for this call. We use indexing off there
3472 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3473 argument block has not been preallocated.
3474
e5e809f4
JL
3475 ARGS_SO_FAR is the size of args previously pushed for this call.
3476
3477 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3478 for arguments passed in registers. If nonzero, it will be the number
3479 of bytes required. */
bbf6f052
RK
3480
3481void
502b8322
AJ
3482emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3483 unsigned int align, int partial, rtx reg, int extra,
3484 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3485 rtx alignment_pad)
bbf6f052
RK
3486{
3487 rtx xinner;
3488 enum direction stack_direction
3489#ifdef STACK_GROWS_DOWNWARD
3490 = downward;
3491#else
3492 = upward;
3493#endif
3494
3495 /* Decide where to pad the argument: `downward' for below,
3496 `upward' for above, or `none' for don't pad it.
3497 Default is below for small data on big-endian machines; else above. */
3498 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3499
0fb7aeda 3500 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3501 FIXME: why? */
3502 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3503 if (where_pad != none)
3504 where_pad = (where_pad == downward ? upward : downward);
3505
ad76cef8 3506 xinner = x;
bbf6f052
RK
3507
3508 if (mode == BLKmode)
3509 {
3510 /* Copy a block into the stack, entirely or partially. */
3511
b3694847 3512 rtx temp;
78a52f11 3513 int used;
531547e9 3514 int offset;
bbf6f052 3515 int skip;
3a94c984 3516
78a52f11
RH
3517 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3518 used = partial - offset;
531547e9 3519
5b0264cb 3520 gcc_assert (size);
bbf6f052 3521
bbf6f052
RK
3522 /* USED is now the # of bytes we need not copy to the stack
3523 because registers will take care of them. */
3524
3525 if (partial != 0)
f4ef873c 3526 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3527
3528 /* If the partial register-part of the arg counts in its stack size,
3529 skip the part of stack space corresponding to the registers.
3530 Otherwise, start copying to the beginning of the stack space,
3531 by setting SKIP to 0. */
e5e809f4 3532 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3533
3534#ifdef PUSH_ROUNDING
3535 /* Do it with several push insns if that doesn't take lots of insns
3536 and if there is no difficulty with push insns that skip bytes
3537 on the stack for alignment purposes. */
3538 if (args_addr == 0
f73ad30e 3539 && PUSH_ARGS
bbf6f052
RK
3540 && GET_CODE (size) == CONST_INT
3541 && skip == 0
f26aca6d 3542 && MEM_ALIGN (xinner) >= align
15914757 3543 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3544 /* Here we avoid the case of a structure whose weak alignment
3545 forces many pushes of a small amount of data,
3546 and such small pushes do rounding that causes trouble. */
e1565e65 3547 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3548 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3549 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3550 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3551 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3552 {
3553 /* Push padding now if padding above and stack grows down,
3554 or if padding below and stack grows up.
3555 But if space already allocated, this has already been done. */
3556 if (extra && args_addr == 0
3557 && where_pad != none && where_pad != stack_direction)
906c4e36 3558 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3559
8fd3cf4e 3560 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3561 }
3562 else
3a94c984 3563#endif /* PUSH_ROUNDING */
bbf6f052 3564 {
7ab923cc
JJ
3565 rtx target;
3566
bbf6f052
RK
3567 /* Otherwise make space on the stack and copy the data
3568 to the address of that space. */
3569
3570 /* Deduct words put into registers from the size we must copy. */
3571 if (partial != 0)
3572 {
3573 if (GET_CODE (size) == CONST_INT)
906c4e36 3574 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3575 else
3576 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3577 GEN_INT (used), NULL_RTX, 0,
3578 OPTAB_LIB_WIDEN);
bbf6f052
RK
3579 }
3580
3581 /* Get the address of the stack space.
3582 In this case, we do not deal with EXTRA separately.
3583 A single stack adjust will do. */
3584 if (! args_addr)
3585 {
3586 temp = push_block (size, extra, where_pad == downward);
3587 extra = 0;
3588 }
3589 else if (GET_CODE (args_so_far) == CONST_INT)
3590 temp = memory_address (BLKmode,
3591 plus_constant (args_addr,
3592 skip + INTVAL (args_so_far)));
3593 else
3594 temp = memory_address (BLKmode,
38a448ca
RH
3595 plus_constant (gen_rtx_PLUS (Pmode,
3596 args_addr,
3597 args_so_far),
bbf6f052 3598 skip));
4ca79136
RH
3599
3600 if (!ACCUMULATE_OUTGOING_ARGS)
3601 {
3602 /* If the source is referenced relative to the stack pointer,
3603 copy it to another register to stabilize it. We do not need
3604 to do this if we know that we won't be changing sp. */
3605
3606 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3607 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3608 temp = copy_to_reg (temp);
3609 }
3610
3a94c984 3611 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3612
2bb16349
RH
3613 /* We do *not* set_mem_attributes here, because incoming arguments
3614 may overlap with sibling call outgoing arguments and we cannot
3615 allow reordering of reads from function arguments with stores
3616 to outgoing arguments of sibling calls. We do, however, want
3617 to record the alignment of the stack slot. */
44bb111a
RH
3618 /* ALIGN may well be better aligned than TYPE, e.g. due to
3619 PARM_BOUNDARY. Assume the caller isn't lying. */
3620 set_mem_align (target, align);
4ca79136 3621
44bb111a 3622 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3623 }
3624 }
3625 else if (partial > 0)
3626 {
3627 /* Scalar partly in registers. */
3628
3629 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3630 int i;
3631 int not_stack;
78a52f11 3632 /* # bytes of start of argument
bbf6f052 3633 that we must make space for but need not store. */
ac7e839c 3634 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052
RK
3635 int args_offset = INTVAL (args_so_far);
3636 int skip;
3637
3638 /* Push padding now if padding above and stack grows down,
3639 or if padding below and stack grows up.
3640 But if space already allocated, this has already been done. */
3641 if (extra && args_addr == 0
3642 && where_pad != none && where_pad != stack_direction)
906c4e36 3643 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3644
3645 /* If we make space by pushing it, we might as well push
3646 the real data. Otherwise, we can leave OFFSET nonzero
3647 and leave the space uninitialized. */
3648 if (args_addr == 0)
3649 offset = 0;
3650
3651 /* Now NOT_STACK gets the number of words that we don't need to
40b0345d 3652 allocate on the stack. Convert OFFSET to words too. */
78a52f11 3653 not_stack = (partial - offset) / UNITS_PER_WORD;
ac7e839c 3654 offset /= UNITS_PER_WORD;
bbf6f052
RK
3655
3656 /* If the partial register-part of the arg counts in its stack size,
3657 skip the part of stack space corresponding to the registers.
3658 Otherwise, start copying to the beginning of the stack space,
3659 by setting SKIP to 0. */
e5e809f4 3660 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3661
3662 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3663 x = validize_mem (force_const_mem (mode, x));
3664
3665 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3666 SUBREGs of such registers are not allowed. */
f8cfc6aa 3667 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
bbf6f052
RK
3668 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3669 x = copy_to_reg (x);
3670
3671 /* Loop over all the words allocated on the stack for this arg. */
3672 /* We can do it by words, because any scalar bigger than a word
3673 has a size a multiple of a word. */
3674#ifndef PUSH_ARGS_REVERSED
3675 for (i = not_stack; i < size; i++)
3676#else
3677 for (i = size - 1; i >= not_stack; i--)
3678#endif
3679 if (i >= not_stack + offset)
3680 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3681 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3682 0, args_addr,
3683 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3684 * UNITS_PER_WORD)),
4fc026cd 3685 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3686 }
3687 else
3688 {
3689 rtx addr;
3bdf5ad1 3690 rtx dest;
bbf6f052
RK
3691
3692 /* Push padding now if padding above and stack grows down,
3693 or if padding below and stack grows up.
3694 But if space already allocated, this has already been done. */
3695 if (extra && args_addr == 0
3696 && where_pad != none && where_pad != stack_direction)
906c4e36 3697 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3698
3699#ifdef PUSH_ROUNDING
f73ad30e 3700 if (args_addr == 0 && PUSH_ARGS)
566aa174 3701 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3702 else
3703#endif
921b3427
RK
3704 {
3705 if (GET_CODE (args_so_far) == CONST_INT)
3706 addr
3707 = memory_address (mode,
3a94c984 3708 plus_constant (args_addr,
921b3427 3709 INTVAL (args_so_far)));
3a94c984 3710 else
38a448ca
RH
3711 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3712 args_so_far));
566aa174 3713 dest = gen_rtx_MEM (mode, addr);
2bb16349
RH
3714
3715 /* We do *not* set_mem_attributes here, because incoming arguments
3716 may overlap with sibling call outgoing arguments and we cannot
3717 allow reordering of reads from function arguments with stores
3718 to outgoing arguments of sibling calls. We do, however, want
3719 to record the alignment of the stack slot. */
3720 /* ALIGN may well be better aligned than TYPE, e.g. due to
3721 PARM_BOUNDARY. Assume the caller isn't lying. */
3722 set_mem_align (dest, align);
bbf6f052 3723
566aa174 3724 emit_move_insn (dest, x);
566aa174 3725 }
bbf6f052
RK
3726 }
3727
bbf6f052
RK
3728 /* If part should go in registers, copy that part
3729 into the appropriate registers. Do this now, at the end,
3730 since mem-to-mem copies above may do function calls. */
cd048831 3731 if (partial > 0 && reg != 0)
fffa9c1d
JW
3732 {
3733 /* Handle calls that pass values in multiple non-contiguous locations.
3734 The Irix 6 ABI has examples of this. */
3735 if (GET_CODE (reg) == PARALLEL)
6e985040 3736 emit_group_load (reg, x, type, -1);
fffa9c1d 3737 else
78a52f11
RH
3738 {
3739 gcc_assert (partial % UNITS_PER_WORD == 0);
3740 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3741 }
fffa9c1d 3742 }
bbf6f052
RK
3743
3744 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3745 anti_adjust_stack (GEN_INT (extra));
3a94c984 3746
3ea2292a 3747 if (alignment_pad && args_addr == 0)
4fc026cd 3748 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3749}
3750\f
296b4ed9
RK
3751/* Return X if X can be used as a subtarget in a sequence of arithmetic
3752 operations. */
3753
3754static rtx
502b8322 3755get_subtarget (rtx x)
296b4ed9 3756{
7c27e184
PB
3757 return (optimize
3758 || x == 0
296b4ed9 3759 /* Only registers can be subtargets. */
f8cfc6aa 3760 || !REG_P (x)
296b4ed9
RK
3761 /* Don't use hard regs to avoid extending their life. */
3762 || REGNO (x) < FIRST_PSEUDO_REGISTER
296b4ed9
RK
3763 ? 0 : x);
3764}
3765
8c1cfd5a
RH
3766/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3767 FIELD is a bitfield. Returns true if the optimization was successful,
3768 and there's nothing else to do. */
3769
3770static bool
3771optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3772 unsigned HOST_WIDE_INT bitpos,
3773 enum machine_mode mode1, rtx str_rtx,
3774 tree to, tree src)
3775{
3776 enum machine_mode str_mode = GET_MODE (str_rtx);
3777 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3778 tree op0, op1;
3779 rtx value, result;
3780 optab binop;
3781
3782 if (mode1 != VOIDmode
3783 || bitsize >= BITS_PER_WORD
3784 || str_bitsize > BITS_PER_WORD
3785 || TREE_SIDE_EFFECTS (to)
3786 || TREE_THIS_VOLATILE (to))
3787 return false;
3788
3789 STRIP_NOPS (src);
3790 if (!BINARY_CLASS_P (src)
3791 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3792 return false;
3793
3794 op0 = TREE_OPERAND (src, 0);
3795 op1 = TREE_OPERAND (src, 1);
3796 STRIP_NOPS (op0);
3797
3798 if (!operand_equal_p (to, op0, 0))
3799 return false;
3800
3801 if (MEM_P (str_rtx))
3802 {
3803 unsigned HOST_WIDE_INT offset1;
3804
3805 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3806 str_mode = word_mode;
3807 str_mode = get_best_mode (bitsize, bitpos,
3808 MEM_ALIGN (str_rtx), str_mode, 0);
3809 if (str_mode == VOIDmode)
3810 return false;
3811 str_bitsize = GET_MODE_BITSIZE (str_mode);
3812
3813 offset1 = bitpos;
3814 bitpos %= str_bitsize;
3815 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3816 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3817 }
3818 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3819 return false;
3820
3821 /* If the bit field covers the whole REG/MEM, store_field
3822 will likely generate better code. */
3823 if (bitsize >= str_bitsize)
3824 return false;
3825
3826 /* We can't handle fields split across multiple entities. */
3827 if (bitpos + bitsize > str_bitsize)
3828 return false;
3829
3830 if (BYTES_BIG_ENDIAN)
3831 bitpos = str_bitsize - bitpos - bitsize;
3832
3833 switch (TREE_CODE (src))
3834 {
3835 case PLUS_EXPR:
3836 case MINUS_EXPR:
3837 /* For now, just optimize the case of the topmost bitfield
3838 where we don't need to do any masking and also
3839 1 bit bitfields where xor can be used.
3840 We might win by one instruction for the other bitfields
3841 too if insv/extv instructions aren't used, so that
3842 can be added later. */
3843 if (bitpos + bitsize != str_bitsize
3844 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3845 break;
3846
3847 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3848 value = convert_modes (str_mode,
3849 TYPE_MODE (TREE_TYPE (op1)), value,
3850 TYPE_UNSIGNED (TREE_TYPE (op1)));
3851
3852 /* We may be accessing data outside the field, which means
3853 we can alias adjacent data. */
3854 if (MEM_P (str_rtx))
3855 {
3856 str_rtx = shallow_copy_rtx (str_rtx);
3857 set_mem_alias_set (str_rtx, 0);
3858 set_mem_expr (str_rtx, 0);
3859 }
3860
3861 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3862 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3863 {
3864 value = expand_and (str_mode, value, const1_rtx, NULL);
3865 binop = xor_optab;
3866 }
3867 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3868 build_int_cst (NULL_TREE, bitpos),
3869 NULL_RTX, 1);
3870 result = expand_binop (str_mode, binop, str_rtx,
3871 value, str_rtx, 1, OPTAB_WIDEN);
3872 if (result != str_rtx)
3873 emit_move_insn (str_rtx, result);
3874 return true;
3875
92fb2d32
KH
3876 case BIT_IOR_EXPR:
3877 case BIT_XOR_EXPR:
3878 if (TREE_CODE (op1) != INTEGER_CST)
3879 break;
3880 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3881 value = convert_modes (GET_MODE (str_rtx),
3882 TYPE_MODE (TREE_TYPE (op1)), value,
3883 TYPE_UNSIGNED (TREE_TYPE (op1)));
3884
3885 /* We may be accessing data outside the field, which means
3886 we can alias adjacent data. */
3887 if (MEM_P (str_rtx))
3888 {
3889 str_rtx = shallow_copy_rtx (str_rtx);
3890 set_mem_alias_set (str_rtx, 0);
3891 set_mem_expr (str_rtx, 0);
3892 }
3893
3894 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3895 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3896 {
3897 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3898 - 1);
3899 value = expand_and (GET_MODE (str_rtx), value, mask,
3900 NULL_RTX);
3901 }
3902 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3903 build_int_cst (NULL_TREE, bitpos),
3904 NULL_RTX, 1);
3905 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3906 value, str_rtx, 1, OPTAB_WIDEN);
3907 if (result != str_rtx)
3908 emit_move_insn (str_rtx, result);
3909 return true;
3910
8c1cfd5a
RH
3911 default:
3912 break;
3913 }
3914
3915 return false;
3916}
3917
3918
e836a5a2 3919/* Expand an assignment that stores the value of FROM into TO. */
bbf6f052 3920
e836a5a2
KH
3921void
3922expand_assignment (tree to, tree from)
bbf6f052 3923{
b3694847 3924 rtx to_rtx = 0;
bbf6f052
RK
3925 rtx result;
3926
3927 /* Don't crash if the lhs of the assignment was erroneous. */
3928
3929 if (TREE_CODE (to) == ERROR_MARK)
709f5be1 3930 {
84217346 3931 result = expand_normal (from);
e836a5a2 3932 return;
709f5be1 3933 }
bbf6f052
RK
3934
3935 /* Assignment of a structure component needs special treatment
3936 if the structure component's rtx is not simply a MEM.
6be58303
JW
3937 Assignment of an array element at a constant index, and assignment of
3938 an array element in an unaligned packed structure field, has the same
3939 problem. */
8c1cfd5a 3940 if (handled_component_p (to)
7c02ae17 3941 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3942 {
3943 enum machine_mode mode1;
770ae6cc 3944 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3945 tree offset;
bbf6f052
RK
3946 int unsignedp;
3947 int volatilep = 0;
0088fcb1
RK
3948 tree tem;
3949
3950 push_temp_slots ();
839c4796 3951 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2614034e 3952 &unsignedp, &volatilep, true);
bbf6f052
RK
3953
3954 /* If we are going to use store_bit_field and extract_bit_field,
3955 make sure to_rtx will be safe for multiple use. */
3956
84217346 3957 to_rtx = expand_normal (tem);
1ed1b4fb 3958
7bb0943f
RS
3959 if (offset != 0)
3960 {
1e188d1e 3961 rtx offset_rtx;
7bb0943f 3962
1e188d1e
RH
3963 if (!MEM_P (to_rtx))
3964 {
3965 /* We can get constant negative offsets into arrays with broken
3966 user code. Translate this to a trap instead of ICEing. */
3967 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
3968 expand_builtin_trap ();
3969 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
3970 }
bd070e1a 3971
1e188d1e 3972 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
bd070e1a 3973#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3974 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3975 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3976#else
3977 if (GET_MODE (offset_rtx) != ptr_mode)
3978 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3979#endif
bd070e1a 3980
9a7b9f4f
JL
3981 /* A constant address in TO_RTX can have VOIDmode, we must not try
3982 to call force_reg for that case. Avoid that case. */
3c0cb5de 3983 if (MEM_P (to_rtx)
89752202 3984 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3985 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3986 && bitsize > 0
3a94c984 3987 && (bitpos % bitsize) == 0
89752202 3988 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3989 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3990 {
e3c8ea67 3991 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3992 bitpos = 0;
3993 }
3994
0d4903b8 3995 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3996 highest_pow2_factor_for_target (to,
3997 offset));
7bb0943f 3998 }
c5c76735 3999
8c1cfd5a
RH
4000 /* Handle expand_expr of a complex value returning a CONCAT. */
4001 if (GET_CODE (to_rtx) == CONCAT)
a06ef755 4002 {
0becc986
RH
4003 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4004 {
4005 gcc_assert (bitpos == 0);
4006 result = store_expr (from, to_rtx, false);
4007 }
4008 else
4009 {
4010 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4011 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4012 }
bbf6f052 4013 }
8c1cfd5a 4014 else
df62f18a 4015 {
8c1cfd5a 4016 if (MEM_P (to_rtx))
b8b139c7 4017 {
8c1cfd5a
RH
4018 /* If the field is at offset zero, we could have been given the
4019 DECL_RTX of the parent struct. Don't munge it. */
4020 to_rtx = shallow_copy_rtx (to_rtx);
b8b139c7 4021
8c1cfd5a 4022 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
b8b139c7 4023
8c1cfd5a
RH
4024 /* Deal with volatile and readonly fields. The former is only
4025 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4026 if (volatilep)
4027 MEM_VOLATILE_P (to_rtx) = 1;
2039d7aa 4028 if (component_uses_parent_alias_set (to))
8c1cfd5a 4029 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
df62f18a 4030 }
60ba25bf 4031
8c1cfd5a
RH
4032 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4033 to_rtx, to, from))
4034 result = NULL;
4035 else
4036 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4037 TREE_TYPE (tem), get_alias_set (to));
df62f18a
JJ
4038 }
4039
8c1cfd5a
RH
4040 if (result)
4041 preserve_temp_slots (result);
a06ef755
RK
4042 free_temp_slots ();
4043 pop_temp_slots ();
e836a5a2 4044 return;
bbf6f052
RK
4045 }
4046
cd1db108
RS
4047 /* If the rhs is a function call and its value is not an aggregate,
4048 call the function before we start to compute the lhs.
4049 This is needed for correct code for cases such as
4050 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4051 requires loading up part of an address in a separate insn.
4052
1858863b
JW
4053 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4054 since it might be a promoted variable where the zero- or sign- extension
4055 needs to be done. Handling this in the normal way is safe because no
4056 computation is done before the call. */
61f71b34 4057 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 4058 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b 4059 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
f8cfc6aa 4060 && REG_P (DECL_RTL (to))))
cd1db108 4061 {
0088fcb1
RK
4062 rtx value;
4063
4064 push_temp_slots ();
84217346 4065 value = expand_normal (from);
cd1db108 4066 if (to_rtx == 0)
37a08a29 4067 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4068
fffa9c1d
JW
4069 /* Handle calls that return values in multiple non-contiguous locations.
4070 The Irix 6 ABI has examples of this. */
4071 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4072 emit_group_load (to_rtx, value, TREE_TYPE (from),
4073 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4074 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4075 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4076 else
6419e5b0 4077 {
5ae6cd0d 4078 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 4079 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
4080 emit_move_insn (to_rtx, value);
4081 }
cd1db108
RS
4082 preserve_temp_slots (to_rtx);
4083 free_temp_slots ();
0088fcb1 4084 pop_temp_slots ();
e836a5a2 4085 return;
cd1db108
RS
4086 }
4087
bbf6f052
RK
4088 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4089 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4090
4091 if (to_rtx == 0)
37a08a29 4092 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4093
86d38d25 4094 /* Don't move directly into a return register. */
14a774a9 4095 if (TREE_CODE (to) == RESULT_DECL
f8cfc6aa 4096 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4097 {
0088fcb1
RK
4098 rtx temp;
4099
4100 push_temp_slots ();
4101 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4102
4103 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4104 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4105 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4106 else
4107 emit_move_insn (to_rtx, temp);
4108
86d38d25
RS
4109 preserve_temp_slots (to_rtx);
4110 free_temp_slots ();
0088fcb1 4111 pop_temp_slots ();
e836a5a2 4112 return;
86d38d25
RS
4113 }
4114
bbf6f052
RK
4115 /* In case we are returning the contents of an object which overlaps
4116 the place the value is being stored, use a safe function when copying
4117 a value through a pointer into a structure value return block. */
4118 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4119 && current_function_returns_struct
4120 && !current_function_returns_pcc_struct)
4121 {
0088fcb1
RK
4122 rtx from_rtx, size;
4123
4124 push_temp_slots ();
33a20d10 4125 size = expr_size (from);
84217346 4126 from_rtx = expand_normal (from);
bbf6f052 4127
8f99553f
JM
4128 emit_library_call (memmove_libfunc, LCT_NORMAL,
4129 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4130 XEXP (from_rtx, 0), Pmode,
4131 convert_to_mode (TYPE_MODE (sizetype),
4132 size, TYPE_UNSIGNED (sizetype)),
4133 TYPE_MODE (sizetype));
bbf6f052
RK
4134
4135 preserve_temp_slots (to_rtx);
4136 free_temp_slots ();
0088fcb1 4137 pop_temp_slots ();
e836a5a2 4138 return;
bbf6f052
RK
4139 }
4140
4141 /* Compute FROM and store the value in the rtx we got. */
4142
0088fcb1 4143 push_temp_slots ();
e836a5a2 4144 result = store_expr (from, to_rtx, 0);
bbf6f052
RK
4145 preserve_temp_slots (result);
4146 free_temp_slots ();
0088fcb1 4147 pop_temp_slots ();
e836a5a2 4148 return;
bbf6f052
RK
4149}
4150
4151/* Generate code for computing expression EXP,
4152 and storing the value into TARGET.
bbf6f052 4153
709f5be1
RS
4154 If the mode is BLKmode then we may return TARGET itself.
4155 It turns out that in BLKmode it doesn't cause a problem.
4156 because C has no operators that could combine two different
4157 assignments into the same BLKmode object with different values
4158 with no sequence point. Will other languages need this to
4159 be more thorough?
4160
6f4fd16d 4161 If CALL_PARAM_P is nonzero, this is a store into a call param on the
8403445a 4162 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4163
4164rtx
6f4fd16d 4165store_expr (tree exp, rtx target, int call_param_p)
bbf6f052 4166{
b3694847 4167 rtx temp;
0fab64a3 4168 rtx alt_rtl = NULL_RTX;
bbf6f052
RK
4169 int dont_return_target = 0;
4170
847311f4
AL
4171 if (VOID_TYPE_P (TREE_TYPE (exp)))
4172 {
4173 /* C++ can generate ?: expressions with a throw expression in one
4174 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4175 store the throw expression's nonexistent result. */
6f4fd16d 4176 gcc_assert (!call_param_p);
847311f4
AL
4177 expand_expr (exp, const0_rtx, VOIDmode, 0);
4178 return NULL_RTX;
4179 }
bbf6f052
RK
4180 if (TREE_CODE (exp) == COMPOUND_EXPR)
4181 {
4182 /* Perform first part of compound expression, then assign from second
4183 part. */
8403445a 4184 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6f4fd16d
KH
4185 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4186 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4187 }
4188 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4189 {
4190 /* For conditional expression, get safe form of the target. Then
4191 test the condition, doing the appropriate assignment on either
4192 side. This avoids the creation of unnecessary temporaries.
4193 For non-BLKmode, it is more efficient not to do this. */
4194
4195 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4196
dabf8373 4197 do_pending_stack_adjust ();
bbf6f052
RK
4198 NO_DEFER_POP;
4199 jumpifnot (TREE_OPERAND (exp, 0), lab1);
6f4fd16d 4200 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4201 emit_jump_insn (gen_jump (lab2));
4202 emit_barrier ();
4203 emit_label (lab1);
6f4fd16d 4204 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
bbf6f052
RK
4205 emit_label (lab2);
4206 OK_DEFER_POP;
a3a58acc 4207
436d948e 4208 return NULL_RTX;
12f06d17 4209 }
1499e0a8 4210 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4211 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4212 than the declared mode, compute the result into its declared mode
4213 and then convert to the wider mode. Our value is the computed
4214 expression. */
4215 {
b76b08ef
RK
4216 rtx inner_target = 0;
4217
436d948e
KH
4218 /* We can do the conversion inside EXP, which will often result
4219 in some optimizations. Do the conversion in two steps: first
4220 change the signedness, if needed, then the extend. But don't
4221 do this if the type of EXP is a subtype of something else
4222 since then the conversion might involve more than just
4223 converting modes. */
4224 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
7e7d1b4b
RH
4225 && TREE_TYPE (TREE_TYPE (exp)) == 0
4226 && (!lang_hooks.reduce_bit_field_operations
4227 || (GET_MODE_PRECISION (GET_MODE (target))
4228 == TYPE_PRECISION (TREE_TYPE (exp)))))
f635a84d 4229 {
8df83eae 4230 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 4231 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 4232 exp = convert
ae2bcd98 4233 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4234 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4235
ae2bcd98 4236 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
4237 (GET_MODE (SUBREG_REG (target)),
4238 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4239 exp);
b76b08ef
RK
4240
4241 inner_target = SUBREG_REG (target);
f635a84d 4242 }
3a94c984 4243
8403445a 4244 temp = expand_expr (exp, inner_target, VOIDmode,
6f4fd16d 4245 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c
RS
4246
4247 /* If TEMP is a VOIDmode constant, use convert_modes to make
4248 sure that we properly convert it. */
4249 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4250 {
4251 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4252 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4253 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4254 GET_MODE (target), temp,
4255 SUBREG_PROMOTED_UNSIGNED_P (target));
4256 }
b258707c 4257
1499e0a8
RK
4258 convert_move (SUBREG_REG (target), temp,
4259 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9 4260
436d948e 4261 return NULL_RTX;
1499e0a8 4262 }
bbf6f052
RK
4263 else
4264 {
0fab64a3 4265 temp = expand_expr_real (exp, target, GET_MODE (target),
6f4fd16d 4266 (call_param_p
0fab64a3
MM
4267 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4268 &alt_rtl);
766f36c7 4269 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4270 If TARGET is a volatile mem ref, either return TARGET
4271 or return a reg copied *from* TARGET; ANSI requires this.
4272
4273 Otherwise, if TEMP is not TARGET, return TEMP
4274 if it is constant (for efficiency),
4275 or if we really want the correct value. */
f8cfc6aa 4276 if (!(target && REG_P (target)
bbf6f052 4277 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3c0cb5de 4278 && !(MEM_P (target) && MEM_VOLATILE_P (target))
effbcc6a 4279 && ! rtx_equal_p (temp, target)
436d948e 4280 && CONSTANT_P (temp))
bbf6f052
RK
4281 dont_return_target = 1;
4282 }
4283
b258707c
RS
4284 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4285 the same as that of TARGET, adjust the constant. This is needed, for
4286 example, in case it is a CONST_DOUBLE and we want only a word-sized
4287 value. */
4288 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4289 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4290 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4291 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 4292 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 4293
bbf6f052 4294 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4295 Convert the value to TARGET's type first if necessary and emit the
4296 pending incrementations that have been queued when expanding EXP.
4297 Note that we cannot emit the whole queue blindly because this will
4298 effectively disable the POST_INC optimization later.
4299
37a08a29 4300 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4301 one or both of them are volatile memory refs, we have to distinguish
4302 two cases:
4303 - expand_expr has used TARGET. In this case, we must not generate
4304 another copy. This can be detected by TARGET being equal according
4305 to == .
4306 - expand_expr has not used TARGET - that means that the source just
4307 happens to have the same RTX form. Since temp will have been created
4308 by expand_expr, it will compare unequal according to == .
4309 We must generate a copy in this case, to reach the correct number
4310 of volatile memory references. */
bbf6f052 4311
6036acbb 4312 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4313 || (temp != target && (side_effects_p (temp)
4314 || side_effects_p (target))))
e5408e52 4315 && TREE_CODE (exp) != ERROR_MARK
9c5c5f2c
MM
4316 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4317 but TARGET is not valid memory reference, TEMP will differ
4318 from TARGET although it is really the same location. */
0fab64a3 4319 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
535a42b1
NS
4320 /* If there's nothing to copy, don't bother. Don't call
4321 expr_size unless necessary, because some front-ends (C++)
4322 expr_size-hook must not be given objects that are not
4323 supposed to be bit-copied or bit-initialized. */
e56fc090 4324 && expr_size (exp) != const0_rtx)
bbf6f052 4325 {
bbf6f052 4326 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4327 && GET_MODE (temp) != VOIDmode)
bbf6f052 4328 {
8df83eae 4329 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4330 if (dont_return_target)
4331 {
4332 /* In this case, we will return TEMP,
4333 so make sure it has the proper mode.
4334 But don't forget to store the value into TARGET. */
4335 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4336 emit_move_insn (target, temp);
4337 }
4338 else
4339 convert_move (target, temp, unsignedp);
4340 }
4341
4342 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4343 {
c24ae149
RK
4344 /* Handle copying a string constant into an array. The string
4345 constant may be shorter than the array. So copy just the string's
4346 actual length, and clear the rest. First get the size of the data
4347 type of the string, which is actually the size of the target. */
4348 rtx size = expr_size (exp);
bbf6f052 4349
e87b4f3f
RS
4350 if (GET_CODE (size) == CONST_INT
4351 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a 4352 emit_block_move (target, temp, size,
6f4fd16d 4353 (call_param_p
8403445a 4354 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4355 else
bbf6f052 4356 {
e87b4f3f
RS
4357 /* Compute the size of the data to copy from the string. */
4358 tree copy_size
c03b7665 4359 = size_binop (MIN_EXPR,
b50d17a1 4360 make_tree (sizetype, size),
fed3cef0 4361 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4362 rtx copy_size_rtx
4363 = expand_expr (copy_size, NULL_RTX, VOIDmode,
6f4fd16d 4364 (call_param_p
8403445a 4365 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4366 rtx label = 0;
4367
4368 /* Copy that much. */
267b28bd 4369 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4370 TYPE_UNSIGNED (sizetype));
8403445a 4371 emit_block_move (target, temp, copy_size_rtx,
6f4fd16d 4372 (call_param_p
8403445a 4373 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4374
88f63c77
RK
4375 /* Figure out how much is left in TARGET that we have to clear.
4376 Do all calculations in ptr_mode. */
e87b4f3f
RS
4377 if (GET_CODE (copy_size_rtx) == CONST_INT)
4378 {
c24ae149
RK
4379 size = plus_constant (size, -INTVAL (copy_size_rtx));
4380 target = adjust_address (target, BLKmode,
4381 INTVAL (copy_size_rtx));
e87b4f3f
RS
4382 }
4383 else
4384 {
fa06ab5c 4385 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4386 copy_size_rtx, NULL_RTX, 0,
4387 OPTAB_LIB_WIDEN);
e87b4f3f 4388
c24ae149
RK
4389#ifdef POINTERS_EXTEND_UNSIGNED
4390 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4391 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4392 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4393#endif
4394
4395 target = offset_address (target, copy_size_rtx,
4396 highest_pow2_factor (copy_size));
e87b4f3f 4397 label = gen_label_rtx ();
c5d5d461 4398 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4399 GET_MODE (size), 0, label);
e87b4f3f
RS
4400 }
4401
4402 if (size != const0_rtx)
8148fe65 4403 clear_storage (target, size, BLOCK_OP_NORMAL);
22619c3f 4404
e87b4f3f
RS
4405 if (label)
4406 emit_label (label);
bbf6f052
RK
4407 }
4408 }
fffa9c1d
JW
4409 /* Handle calls that return values in multiple non-contiguous locations.
4410 The Irix 6 ABI has examples of this. */
4411 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4412 emit_group_load (target, temp, TREE_TYPE (exp),
4413 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4414 else if (GET_MODE (temp) == BLKmode)
8403445a 4415 emit_block_move (target, temp, expr_size (exp),
6f4fd16d 4416 (call_param_p
8403445a 4417 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4418 else
b0dccb00
RH
4419 {
4420 temp = force_operand (temp, target);
4421 if (temp != target)
4422 emit_move_insn (target, temp);
4423 }
bbf6f052 4424 }
709f5be1 4425
436d948e 4426 return NULL_RTX;
bbf6f052
RK
4427}
4428\f
6fa91b48
SB
4429/* Examine CTOR to discover:
4430 * how many scalar fields are set to nonzero values,
4431 and place it in *P_NZ_ELTS;
4432 * how many scalar fields are set to non-constant values,
4433 and place it in *P_NC_ELTS; and
4434 * how many scalar fields in total are in CTOR,
6f642f98
RH
4435 and place it in *P_ELT_COUNT.
4436 * if a type is a union, and the initializer from the constructor
4437 is not the largest element in the union, then set *p_must_clear. */
9de08200 4438
6de9cd9a
DN
4439static void
4440categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4441 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4442 HOST_WIDE_INT *p_elt_count,
4443 bool *p_must_clear)
9de08200 4444{
4038c495 4445 unsigned HOST_WIDE_INT idx;
6fa91b48 4446 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4038c495 4447 tree value, purpose;
9de08200 4448
6de9cd9a
DN
4449 nz_elts = 0;
4450 nc_elts = 0;
6fa91b48 4451 elt_count = 0;
caf93cb0 4452
4038c495 4453 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
9de08200 4454 {
6de9cd9a 4455 HOST_WIDE_INT mult;
9de08200 4456
6de9cd9a
DN
4457 mult = 1;
4458 if (TREE_CODE (purpose) == RANGE_EXPR)
4459 {
4460 tree lo_index = TREE_OPERAND (purpose, 0);
4461 tree hi_index = TREE_OPERAND (purpose, 1);
9de08200 4462
6de9cd9a
DN
4463 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4464 mult = (tree_low_cst (hi_index, 1)
4465 - tree_low_cst (lo_index, 1) + 1);
4466 }
9de08200 4467
6de9cd9a
DN
4468 switch (TREE_CODE (value))
4469 {
4470 case CONSTRUCTOR:
4471 {
6f642f98
RH
4472 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4473 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
6de9cd9a
DN
4474 nz_elts += mult * nz;
4475 nc_elts += mult * nc;
6f642f98 4476 elt_count += mult * ic;
6de9cd9a
DN
4477 }
4478 break;
9de08200 4479
6de9cd9a
DN
4480 case INTEGER_CST:
4481 case REAL_CST:
4482 if (!initializer_zerop (value))
4483 nz_elts += mult;
6fa91b48 4484 elt_count += mult;
6de9cd9a 4485 break;
97f8d136
RK
4486
4487 case STRING_CST:
4488 nz_elts += mult * TREE_STRING_LENGTH (value);
6fa91b48 4489 elt_count += mult * TREE_STRING_LENGTH (value);
97f8d136
RK
4490 break;
4491
6de9cd9a
DN
4492 case COMPLEX_CST:
4493 if (!initializer_zerop (TREE_REALPART (value)))
4494 nz_elts += mult;
4495 if (!initializer_zerop (TREE_IMAGPART (value)))
4496 nz_elts += mult;
6fa91b48 4497 elt_count += mult;
6de9cd9a 4498 break;
97f8d136 4499
6de9cd9a
DN
4500 case VECTOR_CST:
4501 {
4502 tree v;
4503 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
6fa91b48
SB
4504 {
4505 if (!initializer_zerop (TREE_VALUE (v)))
4506 nz_elts += mult;
4507 elt_count += mult;
4508 }
6de9cd9a
DN
4509 }
4510 break;
69ef87e2 4511
6de9cd9a
DN
4512 default:
4513 nz_elts += mult;
6fa91b48 4514 elt_count += mult;
6de9cd9a
DN
4515 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4516 nc_elts += mult;
4517 break;
4518 }
4519 }
69ef87e2 4520
6f642f98
RH
4521 if (!*p_must_clear
4522 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4523 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4524 {
4525 tree init_sub_type;
486e4326 4526 bool clear_this = true;
6f642f98 4527
4038c495 4528 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
6f642f98 4529 {
486e4326
RH
4530 /* We don't expect more than one element of the union to be
4531 initialized. Not sure what we should do otherwise... */
4038c495
GB
4532 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4533 == 1);
486e4326 4534
4038c495
GB
4535 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4536 CONSTRUCTOR_ELTS (ctor),
4537 0)->value);
486e4326
RH
4538
4539 /* ??? We could look at each element of the union, and find the
4540 largest element. Which would avoid comparing the size of the
4541 initialized element against any tail padding in the union.
4542 Doesn't seem worth the effort... */
4543 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4544 TYPE_SIZE (init_sub_type)) == 1)
4545 {
4546 /* And now we have to find out if the element itself is fully
4547 constructed. E.g. for union { struct { int a, b; } s; } u
4548 = { .s = { .a = 1 } }. */
73ed17ff 4549 if (elt_count == count_type_elements (init_sub_type, false))
486e4326
RH
4550 clear_this = false;
4551 }
6f642f98 4552 }
486e4326
RH
4553
4554 *p_must_clear = clear_this;
6f642f98
RH
4555 }
4556
6de9cd9a
DN
4557 *p_nz_elts += nz_elts;
4558 *p_nc_elts += nc_elts;
6fa91b48 4559 *p_elt_count += elt_count;
6de9cd9a
DN
4560}
4561
4562void
4563categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4564 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4565 HOST_WIDE_INT *p_elt_count,
4566 bool *p_must_clear)
6de9cd9a
DN
4567{
4568 *p_nz_elts = 0;
4569 *p_nc_elts = 0;
6fa91b48 4570 *p_elt_count = 0;
6f642f98
RH
4571 *p_must_clear = false;
4572 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4573 p_must_clear);
6de9cd9a
DN
4574}
4575
4576/* Count the number of scalars in TYPE. Return -1 on overflow or
73ed17ff
JJ
4577 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4578 array member at the end of the structure. */
6de9cd9a
DN
4579
4580HOST_WIDE_INT
73ed17ff 4581count_type_elements (tree type, bool allow_flexarr)
6de9cd9a
DN
4582{
4583 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4584 switch (TREE_CODE (type))
4585 {
4586 case ARRAY_TYPE:
4587 {
4588 tree telts = array_type_nelts (type);
4589 if (telts && host_integerp (telts, 1))
4590 {
5377d5ba 4591 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
73ed17ff 4592 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
6de9cd9a
DN
4593 if (n == 0)
4594 return 0;
5377d5ba 4595 else if (max / n > m)
6de9cd9a
DN
4596 return n * m;
4597 }
4598 return -1;
4599 }
4600
4601 case RECORD_TYPE:
4602 {
4603 HOST_WIDE_INT n = 0, t;
4604 tree f;
4605
4606 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4607 if (TREE_CODE (f) == FIELD_DECL)
4608 {
73ed17ff 4609 t = count_type_elements (TREE_TYPE (f), false);
6de9cd9a 4610 if (t < 0)
73ed17ff
JJ
4611 {
4612 /* Check for structures with flexible array member. */
4613 tree tf = TREE_TYPE (f);
4614 if (allow_flexarr
4615 && TREE_CHAIN (f) == NULL
4616 && TREE_CODE (tf) == ARRAY_TYPE
4617 && TYPE_DOMAIN (tf)
4618 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4619 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4620 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4621 && int_size_in_bytes (type) >= 0)
4622 break;
4623
4624 return -1;
4625 }
6de9cd9a
DN
4626 n += t;
4627 }
4628
4629 return n;
4630 }
9de08200 4631
6de9cd9a
DN
4632 case UNION_TYPE:
4633 case QUAL_UNION_TYPE:
4634 {
4635 /* Ho hum. How in the world do we guess here? Clearly it isn't
4636 right to count the fields. Guess based on the number of words. */
4637 HOST_WIDE_INT n = int_size_in_bytes (type);
4638 if (n < 0)
4639 return -1;
4640 return n / UNITS_PER_WORD;
4641 }
4642
4643 case COMPLEX_TYPE:
4644 return 2;
4645
4646 case VECTOR_TYPE:
3a021db2 4647 return TYPE_VECTOR_SUBPARTS (type);
6de9cd9a
DN
4648
4649 case INTEGER_TYPE:
4650 case REAL_TYPE:
4651 case ENUMERAL_TYPE:
4652 case BOOLEAN_TYPE:
6de9cd9a
DN
4653 case POINTER_TYPE:
4654 case OFFSET_TYPE:
4655 case REFERENCE_TYPE:
9de08200 4656 return 1;
3a94c984 4657
6de9cd9a
DN
4658 case VOID_TYPE:
4659 case METHOD_TYPE:
6de9cd9a
DN
4660 case FUNCTION_TYPE:
4661 case LANG_TYPE:
e9a25f70 4662 default:
5b0264cb 4663 gcc_unreachable ();
9de08200 4664 }
9de08200
RK
4665}
4666
4667/* Return 1 if EXP contains mostly (3/4) zeros. */
4668
e0ce7708 4669static int
502b8322 4670mostly_zeros_p (tree exp)
9de08200 4671{
9de08200 4672 if (TREE_CODE (exp) == CONSTRUCTOR)
caf93cb0 4673
9de08200 4674 {
6fa91b48 4675 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
6f642f98
RH
4676 bool must_clear;
4677
4678 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4679 if (must_clear)
4680 return 1;
6de9cd9a 4681
73ed17ff 4682 elts = count_type_elements (TREE_TYPE (exp), false);
9de08200 4683
6de9cd9a 4684 return nz_elts < elts / 4;
9de08200
RK
4685 }
4686
6de9cd9a 4687 return initializer_zerop (exp);
9de08200 4688}
c5250139
RG
4689
4690/* Return 1 if EXP contains all zeros. */
4691
4692static int
4693all_zeros_p (tree exp)
4694{
4695 if (TREE_CODE (exp) == CONSTRUCTOR)
4696
4697 {
4698 HOST_WIDE_INT nz_elts, nc_elts, count;
4699 bool must_clear;
4700
4701 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4702 return nz_elts == 0;
4703 }
4704
4705 return initializer_zerop (exp);
4706}
9de08200 4707\f
e1a43f73
PB
4708/* Helper function for store_constructor.
4709 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4710 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4711 CLEARED is as for store_constructor.
23cb1766 4712 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4713
4714 This provides a recursive shortcut back to store_constructor when it isn't
4715 necessary to go through store_field. This is so that we can pass through
4716 the cleared field to let store_constructor know that we may not have to
4717 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4718
4719static void
502b8322
AJ
4720store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4721 HOST_WIDE_INT bitpos, enum machine_mode mode,
4722 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4723{
4724 if (TREE_CODE (exp) == CONSTRUCTOR
6c89c39a
RK
4725 /* We can only call store_constructor recursively if the size and
4726 bit position are on a byte boundary. */
23ccec44 4727 && bitpos % BITS_PER_UNIT == 0
6c89c39a 4728 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
cc2902df 4729 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4730 let store_field do the bitfield handling. This is unlikely to
4731 generate unnecessary clear instructions anyways. */
3c0cb5de 4732 && (bitpos == 0 || MEM_P (target)))
e1a43f73 4733 {
3c0cb5de 4734 if (MEM_P (target))
61cb205c
RK
4735 target
4736 = adjust_address (target,
4737 GET_MODE (target) == BLKmode
4738 || 0 != (bitpos
4739 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4740 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4741
e0339ef7 4742
04050c69 4743 /* Update the alias set, if required. */
3c0cb5de 4744 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
10b76d73 4745 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4746 {
4747 target = copy_rtx (target);
4748 set_mem_alias_set (target, alias_set);
4749 }
e0339ef7 4750
dbb5c281 4751 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4752 }
4753 else
f45bdcd0 4754 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
e1a43f73
PB
4755}
4756
bbf6f052 4757/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4758 TARGET is either a REG or a MEM; we know it cannot conflict, since
4759 safe_from_p has been called.
dbb5c281
RK
4760 CLEARED is true if TARGET is known to have been zero'd.
4761 SIZE is the number of bytes of TARGET we are allowed to modify: this
b7010412
RK
4762 may not be the same as the size of EXP if we are assigning to a field
4763 which has been packed to exclude padding bits. */
bbf6f052
RK
4764
4765static void
502b8322 4766store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4767{
4af3895e 4768 tree type = TREE_TYPE (exp);
a5efcd63 4769#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4770 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4771#endif
4af3895e 4772
5b0264cb 4773 switch (TREE_CODE (type))
bbf6f052 4774 {
5b0264cb
NS
4775 case RECORD_TYPE:
4776 case UNION_TYPE:
4777 case QUAL_UNION_TYPE:
4778 {
4038c495
GB
4779 unsigned HOST_WIDE_INT idx;
4780 tree field, value;
9de08200 4781
5b0264cb
NS
4782 /* If size is zero or the target is already cleared, do nothing. */
4783 if (size == 0 || cleared)
9de08200 4784 cleared = 1;
5b0264cb
NS
4785 /* We either clear the aggregate or indicate the value is dead. */
4786 else if ((TREE_CODE (type) == UNION_TYPE
4787 || TREE_CODE (type) == QUAL_UNION_TYPE)
4788 && ! CONSTRUCTOR_ELTS (exp))
4789 /* If the constructor is empty, clear the union. */
4790 {
8148fe65 4791 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5b0264cb
NS
4792 cleared = 1;
4793 }
bbf6f052 4794
5b0264cb
NS
4795 /* If we are building a static constructor into a register,
4796 set the initial value as zero so we can fold the value into
4797 a constant. But if more than one register is involved,
4798 this probably loses. */
4799 else if (REG_P (target) && TREE_STATIC (exp)
4800 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4801 {
4802 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4803 cleared = 1;
4804 }
3a94c984 4805
5b0264cb
NS
4806 /* If the constructor has fewer fields than the structure or
4807 if we are initializing the structure to mostly zeros, clear
4808 the whole structure first. Don't do this if TARGET is a
4809 register whose mode size isn't equal to SIZE since
4810 clear_storage can't handle this case. */
4811 else if (size > 0
4038c495 4812 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5b0264cb
NS
4813 != fields_length (type))
4814 || mostly_zeros_p (exp))
4815 && (!REG_P (target)
4816 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4817 == size)))
4818 {
8148fe65 4819 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
4820 cleared = 1;
4821 }
b50d17a1 4822
5b0264cb
NS
4823 if (! cleared)
4824 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052 4825
5b0264cb
NS
4826 /* Store each element of the constructor into the
4827 corresponding field of TARGET. */
4038c495 4828 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5b0264cb 4829 {
5b0264cb
NS
4830 enum machine_mode mode;
4831 HOST_WIDE_INT bitsize;
4832 HOST_WIDE_INT bitpos = 0;
4833 tree offset;
4834 rtx to_rtx = target;
4835
4836 /* Just ignore missing fields. We cleared the whole
4837 structure, above, if any fields are missing. */
4838 if (field == 0)
4839 continue;
4840
4841 if (cleared && initializer_zerop (value))
4842 continue;
4843
4844 if (host_integerp (DECL_SIZE (field), 1))
4845 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4846 else
4847 bitsize = -1;
4848
4849 mode = DECL_MODE (field);
4850 if (DECL_BIT_FIELD (field))
4851 mode = VOIDmode;
4852
4853 offset = DECL_FIELD_OFFSET (field);
4854 if (host_integerp (offset, 0)
4855 && host_integerp (bit_position (field), 0))
4856 {
4857 bitpos = int_bit_position (field);
4858 offset = 0;
4859 }
4860 else
4861 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4862
4863 if (offset)
4864 {
4865 rtx offset_rtx;
4866
4867 offset
4868 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4869 make_tree (TREE_TYPE (exp),
4870 target));
4871
84217346 4872 offset_rtx = expand_normal (offset);
5b0264cb
NS
4873 gcc_assert (MEM_P (to_rtx));
4874
bd070e1a 4875#ifdef POINTERS_EXTEND_UNSIGNED
5b0264cb
NS
4876 if (GET_MODE (offset_rtx) != Pmode)
4877 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c 4878#else
5b0264cb
NS
4879 if (GET_MODE (offset_rtx) != ptr_mode)
4880 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4881#endif
bd070e1a 4882
5b0264cb
NS
4883 to_rtx = offset_address (to_rtx, offset_rtx,
4884 highest_pow2_factor (offset));
4885 }
c5c76735 4886
34c73909 4887#ifdef WORD_REGISTER_OPERATIONS
5b0264cb
NS
4888 /* If this initializes a field that is smaller than a
4889 word, at the start of a word, try to widen it to a full
4890 word. This special case allows us to output C++ member
4891 function initializations in a form that the optimizers
4892 can understand. */
4893 if (REG_P (target)
4894 && bitsize < BITS_PER_WORD
4895 && bitpos % BITS_PER_WORD == 0
4896 && GET_MODE_CLASS (mode) == MODE_INT
4897 && TREE_CODE (value) == INTEGER_CST
4898 && exp_size >= 0
4899 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4900 {
4901 tree type = TREE_TYPE (value);
4902
4903 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4904 {
4905 type = lang_hooks.types.type_for_size
4906 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4907 value = convert (type, value);
4908 }
4909
4910 if (BYTES_BIG_ENDIAN)
4911 value
4845b383
KH
4912 = fold_build2 (LSHIFT_EXPR, type, value,
4913 build_int_cst (NULL_TREE,
4914 BITS_PER_WORD - bitsize));
5b0264cb
NS
4915 bitsize = BITS_PER_WORD;
4916 mode = word_mode;
4917 }
34c73909 4918#endif
10b76d73 4919
5b0264cb
NS
4920 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4921 && DECL_NONADDRESSABLE_P (field))
4922 {
4923 to_rtx = copy_rtx (to_rtx);
4924 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4925 }
4926
4927 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4928 value, type, cleared,
4929 get_alias_set (TREE_TYPE (field)));
4930 }
4931 break;
4932 }
4933 case ARRAY_TYPE:
4934 {
4038c495
GB
4935 tree value, index;
4936 unsigned HOST_WIDE_INT i;
5b0264cb
NS
4937 int need_to_clear;
4938 tree domain;
4939 tree elttype = TREE_TYPE (type);
4940 int const_bounds_p;
4941 HOST_WIDE_INT minelt = 0;
4942 HOST_WIDE_INT maxelt = 0;
4943
4944 domain = TYPE_DOMAIN (type);
4945 const_bounds_p = (TYPE_MIN_VALUE (domain)
4946 && TYPE_MAX_VALUE (domain)
4947 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4948 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4949
4950 /* If we have constant bounds for the range of the type, get them. */
4951 if (const_bounds_p)
4952 {
4953 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4954 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4955 }
3a021db2 4956
5b0264cb
NS
4957 /* If the constructor has fewer elements than the array, clear
4958 the whole array first. Similarly if this is static
4959 constructor of a non-BLKmode object. */
4960 if (cleared)
4961 need_to_clear = 0;
4962 else if (REG_P (target) && TREE_STATIC (exp))
4963 need_to_clear = 1;
4964 else
4965 {
4038c495
GB
4966 unsigned HOST_WIDE_INT idx;
4967 tree index, value;
5b0264cb
NS
4968 HOST_WIDE_INT count = 0, zero_count = 0;
4969 need_to_clear = ! const_bounds_p;
4970
4971 /* This loop is a more accurate version of the loop in
4972 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4973 is also needed to check for missing elements. */
4038c495 4974 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5b0264cb 4975 {
5b0264cb 4976 HOST_WIDE_INT this_node_count;
4038c495
GB
4977
4978 if (need_to_clear)
4979 break;
5b0264cb
NS
4980
4981 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4982 {
4983 tree lo_index = TREE_OPERAND (index, 0);
4984 tree hi_index = TREE_OPERAND (index, 1);
4985
4986 if (! host_integerp (lo_index, 1)
4987 || ! host_integerp (hi_index, 1))
4988 {
4989 need_to_clear = 1;
4990 break;
4991 }
4992
4993 this_node_count = (tree_low_cst (hi_index, 1)
4994 - tree_low_cst (lo_index, 1) + 1);
4995 }
4996 else
4997 this_node_count = 1;
4998
4999 count += this_node_count;
4038c495 5000 if (mostly_zeros_p (value))
5b0264cb
NS
5001 zero_count += this_node_count;
5002 }
5003
5004 /* Clear the entire array first if there are any missing
5005 elements, or if the incidence of zero elements is >=
5006 75%. */
5007 if (! need_to_clear
5008 && (count < maxelt - minelt + 1
5009 || 4 * zero_count >= 3 * count))
5010 need_to_clear = 1;
5011 }
5012
5013 if (need_to_clear && size > 0)
5014 {
5015 if (REG_P (target))
5016 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5017 else
8148fe65 5018 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
5019 cleared = 1;
5020 }
3a021db2 5021
5b0264cb
NS
5022 if (!cleared && REG_P (target))
5023 /* Inform later passes that the old value is dead. */
5024 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3a021db2 5025
5b0264cb
NS
5026 /* Store each element of the constructor into the
5027 corresponding element of TARGET, determined by counting the
5028 elements. */
4038c495 5029 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5b0264cb
NS
5030 {
5031 enum machine_mode mode;
5032 HOST_WIDE_INT bitsize;
5033 HOST_WIDE_INT bitpos;
5034 int unsignedp;
5b0264cb
NS
5035 rtx xtarget = target;
5036
5037 if (cleared && initializer_zerop (value))
5038 continue;
5039
5040 unsignedp = TYPE_UNSIGNED (elttype);
5041 mode = TYPE_MODE (elttype);
5042 if (mode == BLKmode)
5043 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5044 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5045 : -1);
5046 else
5047 bitsize = GET_MODE_BITSIZE (mode);
5048
5049 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5050 {
5051 tree lo_index = TREE_OPERAND (index, 0);
5052 tree hi_index = TREE_OPERAND (index, 1);
5053 rtx index_r, pos_rtx;
5054 HOST_WIDE_INT lo, hi, count;
5055 tree position;
5056
5057 /* If the range is constant and "small", unroll the loop. */
5058 if (const_bounds_p
5059 && host_integerp (lo_index, 0)
5060 && host_integerp (hi_index, 0)
5061 && (lo = tree_low_cst (lo_index, 0),
5062 hi = tree_low_cst (hi_index, 0),
5063 count = hi - lo + 1,
5064 (!MEM_P (target)
5065 || count <= 2
5066 || (host_integerp (TYPE_SIZE (elttype), 1)
5067 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5068 <= 40 * 8)))))
5069 {
5070 lo -= minelt; hi -= minelt;
5071 for (; lo <= hi; lo++)
5072 {
5073 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5074
5075 if (MEM_P (target)
5076 && !MEM_KEEP_ALIAS_SET_P (target)
5077 && TREE_CODE (type) == ARRAY_TYPE
5078 && TYPE_NONALIASED_COMPONENT (type))
5079 {
5080 target = copy_rtx (target);
5081 MEM_KEEP_ALIAS_SET_P (target) = 1;
5082 }
5083
5084 store_constructor_field
5085 (target, bitsize, bitpos, mode, value, type, cleared,
5086 get_alias_set (elttype));
5087 }
5088 }
5089 else
5090 {
5091 rtx loop_start = gen_label_rtx ();
5092 rtx loop_end = gen_label_rtx ();
5093 tree exit_cond;
5094
84217346 5095 expand_normal (hi_index);
5b0264cb
NS
5096 unsignedp = TYPE_UNSIGNED (domain);
5097
5098 index = build_decl (VAR_DECL, NULL_TREE, domain);
5099
5100 index_r
5101 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5102 &unsignedp, 0));
5103 SET_DECL_RTL (index, index_r);
5104 store_expr (lo_index, index_r, 0);
5105
5106 /* Build the head of the loop. */
5107 do_pending_stack_adjust ();
5108 emit_label (loop_start);
5109
5110 /* Assign value to element index. */
5111 position
5112 = convert (ssizetype,
4845b383
KH
5113 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5114 index, TYPE_MIN_VALUE (domain)));
5b0264cb
NS
5115 position = size_binop (MULT_EXPR, position,
5116 convert (ssizetype,
5117 TYPE_SIZE_UNIT (elttype)));
5118
84217346 5119 pos_rtx = expand_normal (position);
5b0264cb
NS
5120 xtarget = offset_address (target, pos_rtx,
5121 highest_pow2_factor (position));
5122 xtarget = adjust_address (xtarget, mode, 0);
5123 if (TREE_CODE (value) == CONSTRUCTOR)
5124 store_constructor (value, xtarget, cleared,
5125 bitsize / BITS_PER_UNIT);
5126 else
5127 store_expr (value, xtarget, 0);
5128
5129 /* Generate a conditional jump to exit the loop. */
5130 exit_cond = build2 (LT_EXPR, integer_type_node,
5131 index, hi_index);
5132 jumpif (exit_cond, loop_end);
5133
5134 /* Update the loop counter, and jump to the head of
5135 the loop. */
5136 expand_assignment (index,
5137 build2 (PLUS_EXPR, TREE_TYPE (index),
e836a5a2 5138 index, integer_one_node));
5b0264cb
NS
5139
5140 emit_jump (loop_start);
5141
5142 /* Build the end of the loop. */
5143 emit_label (loop_end);
5144 }
5145 }
5146 else if ((index != 0 && ! host_integerp (index, 0))
5147 || ! host_integerp (TYPE_SIZE (elttype), 1))
5148 {
5149 tree position;
5150
5151 if (index == 0)
5152 index = ssize_int (1);
5153
5154 if (minelt)
5155 index = fold_convert (ssizetype,
4845b383
KH
5156 fold_build2 (MINUS_EXPR,
5157 TREE_TYPE (index),
5158 index,
5159 TYPE_MIN_VALUE (domain)));
5b0264cb
NS
5160
5161 position = size_binop (MULT_EXPR, index,
5162 convert (ssizetype,
5163 TYPE_SIZE_UNIT (elttype)));
5164 xtarget = offset_address (target,
84217346 5165 expand_normal (position),
5b0264cb
NS
5166 highest_pow2_factor (position));
5167 xtarget = adjust_address (xtarget, mode, 0);
5168 store_expr (value, xtarget, 0);
5169 }
5170 else
5171 {
5172 if (index != 0)
5173 bitpos = ((tree_low_cst (index, 0) - minelt)
5174 * tree_low_cst (TYPE_SIZE (elttype), 1));
5175 else
5176 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5177
5178 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5179 && TREE_CODE (type) == ARRAY_TYPE
5180 && TYPE_NONALIASED_COMPONENT (type))
5181 {
5182 target = copy_rtx (target);
5183 MEM_KEEP_ALIAS_SET_P (target) = 1;
5184 }
5185 store_constructor_field (target, bitsize, bitpos, mode, value,
5186 type, cleared, get_alias_set (elttype));
5187 }
5188 }
5189 break;
5190 }
3a021db2 5191
5b0264cb
NS
5192 case VECTOR_TYPE:
5193 {
4038c495
GB
5194 unsigned HOST_WIDE_INT idx;
5195 constructor_elt *ce;
5b0264cb
NS
5196 int i;
5197 int need_to_clear;
5198 int icode = 0;
5199 tree elttype = TREE_TYPE (type);
5200 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5201 enum machine_mode eltmode = TYPE_MODE (elttype);
5202 HOST_WIDE_INT bitsize;
5203 HOST_WIDE_INT bitpos;
201dd46b 5204 rtvec vector = NULL;
5b0264cb
NS
5205 unsigned n_elts;
5206
5207 gcc_assert (eltmode != BLKmode);
5208
5209 n_elts = TYPE_VECTOR_SUBPARTS (type);
5210 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5211 {
5212 enum machine_mode mode = GET_MODE (target);
5213
5214 icode = (int) vec_init_optab->handlers[mode].insn_code;
5215 if (icode != CODE_FOR_nothing)
5216 {
5217 unsigned int i;
5218
201dd46b 5219 vector = rtvec_alloc (n_elts);
5b0264cb 5220 for (i = 0; i < n_elts; i++)
201dd46b 5221 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5b0264cb
NS
5222 }
5223 }
5224
5225 /* If the constructor has fewer elements than the vector,
5226 clear the whole array first. Similarly if this is static
5227 constructor of a non-BLKmode object. */
5228 if (cleared)
5229 need_to_clear = 0;
5230 else if (REG_P (target) && TREE_STATIC (exp))
5231 need_to_clear = 1;
5232 else
5233 {
5234 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4038c495 5235 tree value;
5b0264cb 5236
4038c495 5237 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5b0264cb
NS
5238 {
5239 int n_elts_here = tree_low_cst
5240 (int_const_binop (TRUNC_DIV_EXPR,
4038c495 5241 TYPE_SIZE (TREE_TYPE (value)),
5b0264cb
NS
5242 TYPE_SIZE (elttype), 0), 1);
5243
5244 count += n_elts_here;
4038c495 5245 if (mostly_zeros_p (value))
5b0264cb
NS
5246 zero_count += n_elts_here;
5247 }
3a021db2 5248
5b0264cb
NS
5249 /* Clear the entire vector first if there are any missing elements,
5250 or if the incidence of zero elements is >= 75%. */
5251 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5252 }
5253
5254 if (need_to_clear && size > 0 && !vector)
5255 {
5256 if (REG_P (target))
5257 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5258 else
8148fe65 5259 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
5260 cleared = 1;
5261 }
5262
2ab1754e 5263 /* Inform later passes that the old value is dead. */
5b0264cb 5264 if (!cleared && REG_P (target))
2ab1754e 5265 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5b0264cb
NS
5266
5267 /* Store each element of the constructor into the corresponding
5268 element of TARGET, determined by counting the elements. */
4038c495
GB
5269 for (idx = 0, i = 0;
5270 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5271 idx++, i += bitsize / elt_size)
5b0264cb 5272 {
5b0264cb 5273 HOST_WIDE_INT eltpos;
4038c495 5274 tree value = ce->value;
5b0264cb
NS
5275
5276 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5277 if (cleared && initializer_zerop (value))
5278 continue;
5279
4038c495
GB
5280 if (ce->index)
5281 eltpos = tree_low_cst (ce->index, 1);
5b0264cb
NS
5282 else
5283 eltpos = i;
5284
5285 if (vector)
5286 {
5287 /* Vector CONSTRUCTORs should only be built from smaller
5288 vectors in the case of BLKmode vectors. */
5289 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
201dd46b 5290 RTVEC_ELT (vector, eltpos)
84217346 5291 = expand_normal (value);
5b0264cb
NS
5292 }
5293 else
5294 {
5295 enum machine_mode value_mode =
5296 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
3a021db2
PB
5297 ? TYPE_MODE (TREE_TYPE (value))
5298 : eltmode;
5b0264cb
NS
5299 bitpos = eltpos * elt_size;
5300 store_constructor_field (target, bitsize, bitpos,
5301 value_mode, value, type,
5302 cleared, get_alias_set (elttype));
5303 }
5304 }
5305
5306 if (vector)
5307 emit_insn (GEN_FCN (icode)
5308 (target,
201dd46b 5309 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5b0264cb
NS
5310 break;
5311 }
08f2586c 5312
5b0264cb
NS
5313 default:
5314 gcc_unreachable ();
071a6595 5315 }
bbf6f052
RK
5316}
5317
5318/* Store the value of EXP (an expression tree)
5319 into a subfield of TARGET which has mode MODE and occupies
5320 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5321 If MODE is VOIDmode, it means that we are storing into a bit-field.
5322
f45bdcd0
KH
5323 Always return const0_rtx unless we have something particular to
5324 return.
bbf6f052 5325
a06ef755 5326 TYPE is the type of the underlying object,
ece32014
MM
5327
5328 ALIAS_SET is the alias set for the destination. This value will
5329 (in general) be different from that for TARGET, since TARGET is a
5330 reference to the containing structure. */
bbf6f052
RK
5331
5332static rtx
502b8322 5333store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
f45bdcd0 5334 enum machine_mode mode, tree exp, tree type, int alias_set)
bbf6f052 5335{
906c4e36 5336 HOST_WIDE_INT width_mask = 0;
bbf6f052 5337
e9a25f70
JL
5338 if (TREE_CODE (exp) == ERROR_MARK)
5339 return const0_rtx;
5340
2be6a7e9
RK
5341 /* If we have nothing to store, do nothing unless the expression has
5342 side-effects. */
5343 if (bitsize == 0)
5344 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5345 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5346 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5347
5348 /* If we are storing into an unaligned field of an aligned union that is
5349 in a register, we may have the mode of TARGET being an integer mode but
5350 MODE == BLKmode. In that case, get an aligned object whose size and
5351 alignment are the same as TARGET and store TARGET into it (we can avoid
5352 the store if the field being stored is the entire width of TARGET). Then
5353 call ourselves recursively to store the field into a BLKmode version of
5354 that object. Finally, load from the object into TARGET. This is not
5355 very efficient in general, but should only be slightly more expensive
5356 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5357 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5358 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5359
5360 if (mode == BLKmode
f8cfc6aa 5361 && (REG_P (target) || GET_CODE (target) == SUBREG))
bbf6f052 5362 {
85a43a2f 5363 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5364 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5365
8752c357 5366 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5367 emit_move_insn (object, target);
5368
f45bdcd0 5369 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
bbf6f052
RK
5370
5371 emit_move_insn (target, object);
5372
a06ef755 5373 /* We want to return the BLKmode version of the data. */
46093b97 5374 return blk_object;
bbf6f052 5375 }
c3b247b4
JM
5376
5377 if (GET_CODE (target) == CONCAT)
5378 {
5379 /* We're storing into a struct containing a single __complex. */
5380
5b0264cb 5381 gcc_assert (!bitpos);
f45bdcd0 5382 return store_expr (exp, target, 0);
c3b247b4 5383 }
bbf6f052
RK
5384
5385 /* If the structure is in a register or if the component
5386 is a bit field, we cannot use addressing to access it.
5387 Use bit-field techniques or SUBREG to store in it. */
5388
4fa52007 5389 if (mode == VOIDmode
6ab06cbb
JW
5390 || (mode != BLKmode && ! direct_store[(int) mode]
5391 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5392 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f8cfc6aa 5393 || REG_P (target)
c980ac49 5394 || GET_CODE (target) == SUBREG
ccc98036
RS
5395 /* If the field isn't aligned enough to store as an ordinary memref,
5396 store it as a bit field. */
15b19a7d 5397 || (mode != BLKmode
9e5f281f
OH
5398 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5399 || bitpos % GET_MODE_ALIGNMENT (mode))
5400 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5401 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5402 /* If the RHS and field are a constant size and the size of the
5403 RHS isn't the same size as the bitfield, we must use bitfield
5404 operations. */
05bccae2
RK
5405 || (bitsize >= 0
5406 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5407 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5408 {
48cc8d3b
RH
5409 rtx temp;
5410
5411 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5412 implies a mask operation. If the precision is the same size as
5413 the field we're storing into, that mask is redundant. This is
5414 particularly common with bit field assignments generated by the
5415 C front end. */
8d740330
RH
5416 if (TREE_CODE (exp) == NOP_EXPR)
5417 {
5418 tree type = TREE_TYPE (exp);
5419 if (INTEGRAL_TYPE_P (type)
5420 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5421 && bitsize == TYPE_PRECISION (type))
5422 {
5423 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5424 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5425 exp = TREE_OPERAND (exp, 0);
5426 }
5427 }
48cc8d3b 5428
84217346 5429 temp = expand_normal (exp);
bbd6cf73 5430
ef19912d
RK
5431 /* If BITSIZE is narrower than the size of the type of EXP
5432 we will be narrowing TEMP. Normally, what's wanted are the
5433 low-order bits. However, if EXP's type is a record and this is
5434 big-endian machine, we want the upper BITSIZE bits. */
5435 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5436 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5437 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5438 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5439 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5440 - bitsize),
c1853da7 5441 NULL_RTX, 1);
ef19912d 5442
bbd6cf73
RK
5443 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5444 MODE. */
5445 if (mode != VOIDmode && mode != BLKmode
5446 && mode != TYPE_MODE (TREE_TYPE (exp)))
5447 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5448
a281e72d
RK
5449 /* If the modes of TARGET and TEMP are both BLKmode, both
5450 must be in memory and BITPOS must be aligned on a byte
5451 boundary. If so, we simply do a block copy. */
5452 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5453 {
5b0264cb
NS
5454 gcc_assert (MEM_P (target) && MEM_P (temp)
5455 && !(bitpos % BITS_PER_UNIT));
a281e72d 5456
f4ef873c 5457 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5458 emit_block_move (target, temp,
a06ef755 5459 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5460 / BITS_PER_UNIT),
5461 BLOCK_OP_NORMAL);
a281e72d 5462
f45bdcd0 5463 return const0_rtx;
a281e72d
RK
5464 }
5465
bbf6f052 5466 /* Store the value in the bitfield. */
b3520980 5467 store_bit_field (target, bitsize, bitpos, mode, temp);
a06ef755 5468
bbf6f052
RK
5469 return const0_rtx;
5470 }
5471 else
5472 {
bbf6f052 5473 /* Now build a reference to just the desired component. */
f45bdcd0 5474 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
a06ef755
RK
5475
5476 if (to_rtx == target)
5477 to_rtx = copy_rtx (to_rtx);
792760b9 5478
c6df88cb 5479 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5480 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5481 set_mem_alias_set (to_rtx, alias_set);
bbf6f052 5482
f45bdcd0 5483 return store_expr (exp, to_rtx, 0);
bbf6f052
RK
5484 }
5485}
5486\f
5487/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5488 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5489 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5490
5491 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5492 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5493 If the position of the field is variable, we store a tree
5494 giving the variable offset (in units) in *POFFSET.
5495 This offset is in addition to the bit position.
5496 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5497
5498 If any of the extraction expressions is volatile,
5499 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5500
5501 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5502 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5503 is redundant.
5504
5505 If the field describes a variable-sized object, *PMODE is set to
5506 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2614034e
EB
5507 this case, but the address of the object can be found.
5508
5509 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5510 look through nodes that serve as markers of a greater alignment than
5511 the one that can be deduced from the expression. These nodes make it
5512 possible for front-ends to prevent temporaries from being created by
5513 the middle-end on alignment considerations. For that purpose, the
5514 normal operating mode at high-level is to always pass FALSE so that
5515 the ultimate containing object is really returned; moreover, the
5516 associated predicate handled_component_p will always return TRUE
5517 on these nodes, thus indicating that they are essentially handled
5518 by get_inner_reference. TRUE should only be passed when the caller
5519 is scanning the expression in order to build another representation
5520 and specifically knows how to handle these nodes; as such, this is
5521 the normal operating mode in the RTL expanders. */
bbf6f052
RK
5522
5523tree
502b8322
AJ
5524get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5525 HOST_WIDE_INT *pbitpos, tree *poffset,
5526 enum machine_mode *pmode, int *punsignedp,
2614034e 5527 int *pvolatilep, bool keep_aligning)
bbf6f052
RK
5528{
5529 tree size_tree = 0;
5530 enum machine_mode mode = VOIDmode;
fed3cef0 5531 tree offset = size_zero_node;
770ae6cc 5532 tree bit_offset = bitsize_zero_node;
770ae6cc 5533 tree tem;
bbf6f052 5534
770ae6cc
RK
5535 /* First get the mode, signedness, and size. We do this from just the
5536 outermost expression. */
bbf6f052
RK
5537 if (TREE_CODE (exp) == COMPONENT_REF)
5538 {
5539 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5540 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5541 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5542
a150de29 5543 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5544 }
5545 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5546 {
5547 size_tree = TREE_OPERAND (exp, 1);
a150de29 5548 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5549 }
5550 else
5551 {
5552 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5553 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5554
ab87f8c8
JL
5555 if (mode == BLKmode)
5556 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5557 else
5558 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5559 }
3a94c984 5560
770ae6cc 5561 if (size_tree != 0)
bbf6f052 5562 {
770ae6cc 5563 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5564 mode = BLKmode, *pbitsize = -1;
5565 else
770ae6cc 5566 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5567 }
5568
5569 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5570 and find the ultimate containing object. */
bbf6f052
RK
5571 while (1)
5572 {
afe84921 5573 switch (TREE_CODE (exp))
bbf6f052 5574 {
afe84921
RH
5575 case BIT_FIELD_REF:
5576 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5577 TREE_OPERAND (exp, 2));
5578 break;
bbf6f052 5579
afe84921
RH
5580 case COMPONENT_REF:
5581 {
5582 tree field = TREE_OPERAND (exp, 1);
5583 tree this_offset = component_ref_field_offset (exp);
e7f3c83f 5584
afe84921
RH
5585 /* If this field hasn't been filled in yet, don't go past it.
5586 This should only happen when folding expressions made during
5587 type construction. */
5588 if (this_offset == 0)
5589 break;
e6d8c385 5590
afe84921
RH
5591 offset = size_binop (PLUS_EXPR, offset, this_offset);
5592 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5593 DECL_FIELD_BIT_OFFSET (field));
7156dead 5594
afe84921
RH
5595 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5596 }
5597 break;
7156dead 5598
afe84921
RH
5599 case ARRAY_REF:
5600 case ARRAY_RANGE_REF:
5601 {
5602 tree index = TREE_OPERAND (exp, 1);
5603 tree low_bound = array_ref_low_bound (exp);
5604 tree unit_size = array_ref_element_size (exp);
5605
5606 /* We assume all arrays have sizes that are a multiple of a byte.
5607 First subtract the lower bound, if any, in the type of the
5608 index, then convert to sizetype and multiply by the size of
5609 the array element. */
5610 if (! integer_zerop (low_bound))
4845b383
KH
5611 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5612 index, low_bound);
afe84921
RH
5613
5614 offset = size_binop (PLUS_EXPR, offset,
5615 size_binop (MULT_EXPR,
5616 convert (sizetype, index),
5617 unit_size));
5618 }
5619 break;
5620
5621 case REALPART_EXPR:
afe84921
RH
5622 break;
5623
5624 case IMAGPART_EXPR:
9f25f0ad
RH
5625 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5626 bitsize_int (*pbitsize));
afe84921
RH
5627 break;
5628
afe84921 5629 case VIEW_CONVERT_EXPR:
2614034e
EB
5630 if (keep_aligning && STRICT_ALIGNMENT
5631 && (TYPE_ALIGN (TREE_TYPE (exp))
afe84921 5632 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
afe84921
RH
5633 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5634 < BIGGEST_ALIGNMENT)
5635 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5636 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5637 goto done;
5638 break;
5639
5640 default:
5641 goto done;
5642 }
7bb0943f
RS
5643
5644 /* If any reference in the chain is volatile, the effect is volatile. */
5645 if (TREE_THIS_VOLATILE (exp))
5646 *pvolatilep = 1;
839c4796 5647
bbf6f052
RK
5648 exp = TREE_OPERAND (exp, 0);
5649 }
afe84921 5650 done:
bbf6f052 5651
770ae6cc
RK
5652 /* If OFFSET is constant, see if we can return the whole thing as a
5653 constant bit position. Otherwise, split it up. */
5654 if (host_integerp (offset, 0)
5655 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5656 bitsize_unit_node))
5657 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5658 && host_integerp (tem, 0))
5659 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5660 else
5661 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5662
bbf6f052 5663 *pmode = mode;
bbf6f052
RK
5664 return exp;
5665}
921b3427 5666
44de5aeb
RK
5667/* Return a tree of sizetype representing the size, in bytes, of the element
5668 of EXP, an ARRAY_REF. */
5669
5670tree
5671array_ref_element_size (tree exp)
5672{
5673 tree aligned_size = TREE_OPERAND (exp, 3);
5674 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5675
5676 /* If a size was specified in the ARRAY_REF, it's the size measured
5677 in alignment units of the element type. So multiply by that value. */
5678 if (aligned_size)
bc482be4
RH
5679 {
5680 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5681 sizetype from another type of the same width and signedness. */
5682 if (TREE_TYPE (aligned_size) != sizetype)
5683 aligned_size = fold_convert (sizetype, aligned_size);
5684 return size_binop (MULT_EXPR, aligned_size,
a4e9ffe5 5685 size_int (TYPE_ALIGN_UNIT (elmt_type)));
bc482be4 5686 }
44de5aeb 5687
caf93cb0 5688 /* Otherwise, take the size from that of the element type. Substitute
44de5aeb
RK
5689 any PLACEHOLDER_EXPR that we have. */
5690 else
5691 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5692}
5693
5694/* Return a tree representing the lower bound of the array mentioned in
5695 EXP, an ARRAY_REF. */
5696
5697tree
5698array_ref_low_bound (tree exp)
5699{
5700 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5701
5702 /* If a lower bound is specified in EXP, use it. */
5703 if (TREE_OPERAND (exp, 2))
5704 return TREE_OPERAND (exp, 2);
5705
5706 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5707 substituting for a PLACEHOLDER_EXPR as needed. */
5708 if (domain_type && TYPE_MIN_VALUE (domain_type))
5709 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5710
5711 /* Otherwise, return a zero of the appropriate type. */
5212068f 5712 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
44de5aeb
RK
5713}
5714
a7e5372d
ZD
5715/* Return a tree representing the upper bound of the array mentioned in
5716 EXP, an ARRAY_REF. */
5717
5718tree
5719array_ref_up_bound (tree exp)
5720{
5721 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5722
5723 /* If there is a domain type and it has an upper bound, use it, substituting
5724 for a PLACEHOLDER_EXPR as needed. */
5725 if (domain_type && TYPE_MAX_VALUE (domain_type))
5726 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5727
5728 /* Otherwise fail. */
5729 return NULL_TREE;
5730}
5731
44de5aeb
RK
5732/* Return a tree representing the offset, in bytes, of the field referenced
5733 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5734
5735tree
5736component_ref_field_offset (tree exp)
5737{
5738 tree aligned_offset = TREE_OPERAND (exp, 2);
5739 tree field = TREE_OPERAND (exp, 1);
5740
5741 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5742 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5743 value. */
5744 if (aligned_offset)
bc482be4
RH
5745 {
5746 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5747 sizetype from another type of the same width and signedness. */
5748 if (TREE_TYPE (aligned_offset) != sizetype)
5749 aligned_offset = fold_convert (sizetype, aligned_offset);
5750 return size_binop (MULT_EXPR, aligned_offset,
5751 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5752 }
44de5aeb 5753
caf93cb0 5754 /* Otherwise, take the offset from that of the field. Substitute
44de5aeb
RK
5755 any PLACEHOLDER_EXPR that we have. */
5756 else
5757 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5758}
5759
ed239f5a
RK
5760/* Return 1 if T is an expression that get_inner_reference handles. */
5761
5762int
502b8322 5763handled_component_p (tree t)
ed239f5a
RK
5764{
5765 switch (TREE_CODE (t))
5766 {
5767 case BIT_FIELD_REF:
5768 case COMPONENT_REF:
5769 case ARRAY_REF:
5770 case ARRAY_RANGE_REF:
ed239f5a 5771 case VIEW_CONVERT_EXPR:
afe84921
RH
5772 case REALPART_EXPR:
5773 case IMAGPART_EXPR:
ed239f5a
RK
5774 return 1;
5775
ed239f5a
RK
5776 default:
5777 return 0;
5778 }
5779}
bbf6f052 5780\f
3fe44edd
RK
5781/* Given an rtx VALUE that may contain additions and multiplications, return
5782 an equivalent value that just refers to a register, memory, or constant.
5783 This is done by generating instructions to perform the arithmetic and
5784 returning a pseudo-register containing the value.
c45a13a6
RK
5785
5786 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5787
5788rtx
502b8322 5789force_operand (rtx value, rtx target)
bbf6f052 5790{
8a28dbcc 5791 rtx op1, op2;
bbf6f052 5792 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5793 rtx subtarget = get_subtarget (target);
8a28dbcc 5794 enum rtx_code code = GET_CODE (value);
bbf6f052 5795
50654f6c
ZD
5796 /* Check for subreg applied to an expression produced by loop optimizer. */
5797 if (code == SUBREG
f8cfc6aa 5798 && !REG_P (SUBREG_REG (value))
3c0cb5de 5799 && !MEM_P (SUBREG_REG (value)))
50654f6c
ZD
5800 {
5801 value = simplify_gen_subreg (GET_MODE (value),
5802 force_reg (GET_MODE (SUBREG_REG (value)),
5803 force_operand (SUBREG_REG (value),
5804 NULL_RTX)),
5805 GET_MODE (SUBREG_REG (value)),
5806 SUBREG_BYTE (value));
5807 code = GET_CODE (value);
5808 }
5809
8b015896 5810 /* Check for a PIC address load. */
8a28dbcc 5811 if ((code == PLUS || code == MINUS)
8b015896
RH
5812 && XEXP (value, 0) == pic_offset_table_rtx
5813 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5814 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5815 || GET_CODE (XEXP (value, 1)) == CONST))
5816 {
5817 if (!subtarget)
5818 subtarget = gen_reg_rtx (GET_MODE (value));
5819 emit_move_insn (subtarget, value);
5820 return subtarget;
5821 }
5822
ec8e098d 5823 if (ARITHMETIC_P (value))
bbf6f052
RK
5824 {
5825 op2 = XEXP (value, 1);
f8cfc6aa 5826 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
bbf6f052 5827 subtarget = 0;
8a28dbcc 5828 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5829 {
8a28dbcc 5830 code = PLUS;
bbf6f052
RK
5831 op2 = negate_rtx (GET_MODE (value), op2);
5832 }
5833
5834 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5835 operand a PLUS of a virtual register and something else. In that
5836 case, we want to emit the sum of the virtual register and the
5837 constant first and then add the other value. This allows virtual
5838 register instantiation to simply modify the constant rather than
5839 creating another one around this addition. */
5840 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052 5841 && GET_CODE (XEXP (value, 0)) == PLUS
f8cfc6aa 5842 && REG_P (XEXP (XEXP (value, 0), 0))
bbf6f052
RK
5843 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5844 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5845 {
8a28dbcc
JH
5846 rtx temp = expand_simple_binop (GET_MODE (value), code,
5847 XEXP (XEXP (value, 0), 0), op2,
5848 subtarget, 0, OPTAB_LIB_WIDEN);
5849 return expand_simple_binop (GET_MODE (value), code, temp,
5850 force_operand (XEXP (XEXP (value,
5851 0), 1), 0),
5852 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5853 }
3a94c984 5854
8a28dbcc
JH
5855 op1 = force_operand (XEXP (value, 0), subtarget);
5856 op2 = force_operand (op2, NULL_RTX);
5857 switch (code)
5858 {
5859 case MULT:
5860 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5861 case DIV:
5862 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5863 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5864 target, 1, OPTAB_LIB_WIDEN);
5865 else
5866 return expand_divmod (0,
5867 FLOAT_MODE_P (GET_MODE (value))
5868 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5869 GET_MODE (value), op1, op2, target, 0);
5870 break;
5871 case MOD:
5872 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5873 target, 0);
5874 break;
5875 case UDIV:
5876 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5877 target, 1);
5878 break;
5879 case UMOD:
5880 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5881 target, 1);
5882 break;
5883 case ASHIFTRT:
5884 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5885 target, 0, OPTAB_LIB_WIDEN);
5886 break;
5887 default:
5888 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5889 target, 1, OPTAB_LIB_WIDEN);
5890 }
5891 }
ec8e098d 5892 if (UNARY_P (value))
8a28dbcc 5893 {
72a10eff
RS
5894 if (!target)
5895 target = gen_reg_rtx (GET_MODE (value));
8a28dbcc 5896 op1 = force_operand (XEXP (value, 0), NULL_RTX);
1fd5360d
R
5897 switch (code)
5898 {
72a10eff
RS
5899 case ZERO_EXTEND:
5900 case SIGN_EXTEND:
1fd5360d 5901 case TRUNCATE:
72a10eff
RS
5902 convert_move (target, op1, code == ZERO_EXTEND);
5903 return target;
5904
5905 case FIX:
5906 case UNSIGNED_FIX:
5907 expand_fix (target, op1, code == UNSIGNED_FIX);
5908 return target;
5909
5910 case FLOAT:
5911 case UNSIGNED_FLOAT:
5912 expand_float (target, op1, code == UNSIGNED_FLOAT);
5913 return target;
5914
1fd5360d
R
5915 default:
5916 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5917 }
bbf6f052 5918 }
34e81b5a
RK
5919
5920#ifdef INSN_SCHEDULING
5921 /* On machines that have insn scheduling, we want all memory reference to be
5922 explicit, so we need to deal with such paradoxical SUBREGs. */
3c0cb5de 5923 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
34e81b5a
RK
5924 && (GET_MODE_SIZE (GET_MODE (value))
5925 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5926 value
5927 = simplify_gen_subreg (GET_MODE (value),
5928 force_reg (GET_MODE (SUBREG_REG (value)),
5929 force_operand (SUBREG_REG (value),
5930 NULL_RTX)),
5931 GET_MODE (SUBREG_REG (value)),
5932 SUBREG_BYTE (value));
5933#endif
5934
bbf6f052
RK
5935 return value;
5936}
5937\f
bbf6f052 5938/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5939 EXP can reference X, which is being modified. TOP_P is nonzero if this
5940 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5941 for EXP, as opposed to a recursive call to this function.
5942
5943 It is always safe for this routine to return zero since it merely
5944 searches for optimization opportunities. */
bbf6f052 5945
8f17b5c5 5946int
502b8322 5947safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5948{
5949 rtx exp_rtl = 0;
5950 int i, nops;
5951
6676e72f
RK
5952 if (x == 0
5953 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5954 have no way of allocating temporaries of variable size
5955 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5956 So we assume here that something at a higher level has prevented a
f4510f37 5957 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5958 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5959 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5960 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5961 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5962 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5963 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5964 != INTEGER_CST)
1da68f56
RK
5965 && GET_MODE (x) == BLKmode)
5966 /* If X is in the outgoing argument area, it is always safe. */
3c0cb5de 5967 || (MEM_P (x)
1da68f56
RK
5968 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5969 || (GET_CODE (XEXP (x, 0)) == PLUS
5970 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5971 return 1;
5972
5973 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5974 find the underlying pseudo. */
5975 if (GET_CODE (x) == SUBREG)
5976 {
5977 x = SUBREG_REG (x);
f8cfc6aa 5978 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
5979 return 0;
5980 }
5981
1da68f56 5982 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5983 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5984 {
6615c446 5985 case tcc_declaration:
a9772b60 5986 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5987 break;
5988
6615c446 5989 case tcc_constant:
bbf6f052
RK
5990 return 1;
5991
6615c446 5992 case tcc_exceptional:
bbf6f052 5993 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5994 {
5995 while (1)
5996 {
5997 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5998 return 0;
5999 exp = TREE_CHAIN (exp);
6000 if (!exp)
6001 return 1;
6002 if (TREE_CODE (exp) != TREE_LIST)
6003 return safe_from_p (x, exp, 0);
6004 }
6005 }
ff439b5f
CB
6006 else if (TREE_CODE (exp) == ERROR_MARK)
6007 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
6008 else
6009 return 0;
6010
6615c446 6011 case tcc_statement:
350fae66
RK
6012 /* The only case we look at here is the DECL_INITIAL inside a
6013 DECL_EXPR. */
6014 return (TREE_CODE (exp) != DECL_EXPR
6015 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6016 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6017 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6018
6615c446
JO
6019 case tcc_binary:
6020 case tcc_comparison:
f8d4be57
CE
6021 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6022 return 0;
5d3cc252 6023 /* Fall through. */
f8d4be57 6024
6615c446 6025 case tcc_unary:
f8d4be57 6026 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 6027
6615c446
JO
6028 case tcc_expression:
6029 case tcc_reference:
bbf6f052
RK
6030 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6031 the expression. If it is set, we conflict iff we are that rtx or
6032 both are in memory. Otherwise, we check all operands of the
6033 expression recursively. */
6034
6035 switch (TREE_CODE (exp))
6036 {
6037 case ADDR_EXPR:
70072ed9
RK
6038 /* If the operand is static or we are static, we can't conflict.
6039 Likewise if we don't conflict with the operand at all. */
6040 if (staticp (TREE_OPERAND (exp, 0))
6041 || TREE_STATIC (exp)
6042 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6043 return 1;
6044
6045 /* Otherwise, the only way this can conflict is if we are taking
6046 the address of a DECL a that address if part of X, which is
6047 very rare. */
6048 exp = TREE_OPERAND (exp, 0);
6049 if (DECL_P (exp))
6050 {
6051 if (!DECL_RTL_SET_P (exp)
3c0cb5de 6052 || !MEM_P (DECL_RTL (exp)))
70072ed9
RK
6053 return 0;
6054 else
6055 exp_rtl = XEXP (DECL_RTL (exp), 0);
6056 }
6057 break;
bbf6f052 6058
7ccf35ed
DN
6059 case MISALIGNED_INDIRECT_REF:
6060 case ALIGN_INDIRECT_REF:
bbf6f052 6061 case INDIRECT_REF:
3c0cb5de 6062 if (MEM_P (x)
1da68f56
RK
6063 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6064 get_alias_set (exp)))
bbf6f052
RK
6065 return 0;
6066 break;
6067
6068 case CALL_EXPR:
f9808f81
MM
6069 /* Assume that the call will clobber all hard registers and
6070 all of memory. */
f8cfc6aa 6071 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
3c0cb5de 6072 || MEM_P (x))
f9808f81 6073 return 0;
bbf6f052
RK
6074 break;
6075
bbf6f052 6076 case WITH_CLEANUP_EXPR:
5dab5552 6077 case CLEANUP_POINT_EXPR:
ac45df5d 6078 /* Lowered by gimplify.c. */
5b0264cb 6079 gcc_unreachable ();
ac45df5d 6080
bbf6f052 6081 case SAVE_EXPR:
82c82743 6082 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 6083
e9a25f70
JL
6084 default:
6085 break;
bbf6f052
RK
6086 }
6087
6088 /* If we have an rtx, we do not need to scan our operands. */
6089 if (exp_rtl)
6090 break;
6091
54e4aedb 6092 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
bbf6f052
RK
6093 for (i = 0; i < nops; i++)
6094 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6095 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6096 return 0;
8f17b5c5
MM
6097
6098 /* If this is a language-specific tree code, it may require
6099 special handling. */
dbbbbf3b
JDA
6100 if ((unsigned int) TREE_CODE (exp)
6101 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 6102 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 6103 return 0;
6615c446
JO
6104 break;
6105
6106 case tcc_type:
6107 /* Should never get a type here. */
6108 gcc_unreachable ();
bbf6f052
RK
6109 }
6110
6111 /* If we have an rtl, find any enclosed object. Then see if we conflict
6112 with it. */
6113 if (exp_rtl)
6114 {
6115 if (GET_CODE (exp_rtl) == SUBREG)
6116 {
6117 exp_rtl = SUBREG_REG (exp_rtl);
f8cfc6aa 6118 if (REG_P (exp_rtl)
bbf6f052
RK
6119 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6120 return 0;
6121 }
6122
6123 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6124 are memory and they conflict. */
bbf6f052 6125 return ! (rtx_equal_p (x, exp_rtl)
3c0cb5de 6126 || (MEM_P (x) && MEM_P (exp_rtl)
21117a17 6127 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6128 rtx_addr_varies_p)));
bbf6f052
RK
6129 }
6130
6131 /* If we reach here, it is safe. */
6132 return 1;
6133}
6134
14a774a9 6135\f
0d4903b8
RK
6136/* Return the highest power of two that EXP is known to be a multiple of.
6137 This is used in updating alignment of MEMs in array references. */
6138
86a07404 6139unsigned HOST_WIDE_INT
502b8322 6140highest_pow2_factor (tree exp)
0d4903b8 6141{
9ceca302 6142 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6143
6144 switch (TREE_CODE (exp))
6145 {
6146 case INTEGER_CST:
e0f1be5c
JJ
6147 /* We can find the lowest bit that's a one. If the low
6148 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6149 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6150 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6151 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6152 later ICE. */
e0f1be5c 6153 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6154 return BIGGEST_ALIGNMENT;
e0f1be5c 6155 else
0d4903b8 6156 {
e0f1be5c
JJ
6157 /* Note: tree_low_cst is intentionally not used here,
6158 we don't care about the upper bits. */
6159 c0 = TREE_INT_CST_LOW (exp);
6160 c0 &= -c0;
6161 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6162 }
6163 break;
6164
65a07688 6165 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6166 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6167 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6168 return MIN (c0, c1);
6169
6170 case MULT_EXPR:
6171 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6172 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6173 return c0 * c1;
6174
6175 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6176 case CEIL_DIV_EXPR:
65a07688
RK
6177 if (integer_pow2p (TREE_OPERAND (exp, 1))
6178 && host_integerp (TREE_OPERAND (exp, 1), 1))
6179 {
6180 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6181 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6182 return MAX (1, c0 / c1);
6183 }
6184 break;
0d4903b8
RK
6185
6186 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6187 case SAVE_EXPR:
0d4903b8
RK
6188 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6189
65a07688
RK
6190 case COMPOUND_EXPR:
6191 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6192
0d4903b8
RK
6193 case COND_EXPR:
6194 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6195 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6196 return MIN (c0, c1);
6197
6198 default:
6199 break;
6200 }
6201
6202 return 1;
6203}
818c0c94 6204
d50a16c4
EB
6205/* Similar, except that the alignment requirements of TARGET are
6206 taken into account. Assume it is at least as aligned as its
6207 type, unless it is a COMPONENT_REF in which case the layout of
6208 the structure gives the alignment. */
818c0c94 6209
9ceca302 6210static unsigned HOST_WIDE_INT
d50a16c4 6211highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6212{
d50a16c4 6213 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6214
6215 factor = highest_pow2_factor (exp);
d50a16c4 6216 if (TREE_CODE (target) == COMPONENT_REF)
a4e9ffe5 6217 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
d50a16c4 6218 else
a4e9ffe5 6219 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
d50a16c4 6220 return MAX (factor, target_align);
818c0c94 6221}
0d4903b8 6222\f
6de9cd9a
DN
6223/* Expands variable VAR. */
6224
6225void
6226expand_var (tree var)
6227{
6228 if (DECL_EXTERNAL (var))
6229 return;
6230
6231 if (TREE_STATIC (var))
6232 /* If this is an inlined copy of a static local variable,
6233 look up the original decl. */
6234 var = DECL_ORIGIN (var);
6235
6236 if (TREE_STATIC (var)
6237 ? !TREE_ASM_WRITTEN (var)
6238 : !DECL_RTL_SET_P (var))
6239 {
833b3afe 6240 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
1a186ec5 6241 /* Should be ignored. */;
673fda6b 6242 else if (lang_hooks.expand_decl (var))
6de9cd9a
DN
6243 /* OK. */;
6244 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6245 expand_decl (var);
6246 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
0e6df31e 6247 rest_of_decl_compilation (var, 0, 0);
6de9cd9a 6248 else
5b0264cb
NS
6249 /* No expansion needed. */
6250 gcc_assert (TREE_CODE (var) == TYPE_DECL
6251 || TREE_CODE (var) == CONST_DECL
6252 || TREE_CODE (var) == FUNCTION_DECL
6253 || TREE_CODE (var) == LABEL_DECL);
6de9cd9a
DN
6254 }
6255}
6256
eb698c58
RS
6257/* Subroutine of expand_expr. Expand the two operands of a binary
6258 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6259 The value may be stored in TARGET if TARGET is nonzero. The
6260 MODIFIER argument is as documented by expand_expr. */
6261
6262static void
6263expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6264 enum expand_modifier modifier)
6265{
6266 if (! safe_from_p (target, exp1, 1))
6267 target = 0;
6268 if (operand_equal_p (exp0, exp1, 0))
6269 {
6270 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6271 *op1 = copy_rtx (*op0);
6272 }
6273 else
6274 {
c67e6e14
RS
6275 /* If we need to preserve evaluation order, copy exp0 into its own
6276 temporary variable so that it can't be clobbered by exp1. */
6277 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6278 exp0 = save_expr (exp0);
eb698c58
RS
6279 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6280 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6281 }
6282}
6283
f47e9b4e 6284\f
aacd3885
RS
6285/* Return a MEM that constains constant EXP. DEFER is as for
6286 output_constant_def and MODIFIER is as for expand_expr. */
6287
6288static rtx
6289expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6290{
6291 rtx mem;
6292
6293 mem = output_constant_def (exp, defer);
6294 if (modifier != EXPAND_INITIALIZER)
6295 mem = use_anchored_address (mem);
6296 return mem;
6297}
6298
70bb498a 6299/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6377bb9a
RH
6300 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6301
6302static rtx
70bb498a
RH
6303expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6304 enum expand_modifier modifier)
6377bb9a
RH
6305{
6306 rtx result, subtarget;
6307 tree inner, offset;
6308 HOST_WIDE_INT bitsize, bitpos;
6309 int volatilep, unsignedp;
6310 enum machine_mode mode1;
6311
6312 /* If we are taking the address of a constant and are at the top level,
6313 we have to use output_constant_def since we can't call force_const_mem
6314 at top level. */
6315 /* ??? This should be considered a front-end bug. We should not be
6316 generating ADDR_EXPR of something that isn't an LVALUE. The only
6317 exception here is STRING_CST. */
6318 if (TREE_CODE (exp) == CONSTRUCTOR
6615c446 6319 || CONSTANT_CLASS_P (exp))
aacd3885 6320 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6377bb9a
RH
6321
6322 /* Everything must be something allowed by is_gimple_addressable. */
6323 switch (TREE_CODE (exp))
6324 {
6325 case INDIRECT_REF:
6326 /* This case will happen via recursion for &a->b. */
aacd3885 6327 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6377bb9a
RH
6328
6329 case CONST_DECL:
6330 /* Recurse and make the output_constant_def clause above handle this. */
70bb498a 6331 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
b0b324b0 6332 tmode, modifier);
6377bb9a
RH
6333
6334 case REALPART_EXPR:
6335 /* The real part of the complex number is always first, therefore
6336 the address is the same as the address of the parent object. */
6337 offset = 0;
6338 bitpos = 0;
6339 inner = TREE_OPERAND (exp, 0);
6340 break;
6341
6342 case IMAGPART_EXPR:
6343 /* The imaginary part of the complex number is always second.
2a7e31df 6344 The expression is therefore always offset by the size of the
6377bb9a
RH
6345 scalar type. */
6346 offset = 0;
6347 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6348 inner = TREE_OPERAND (exp, 0);
6349 break;
6350
6351 default:
6352 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6353 expand_expr, as that can have various side effects; LABEL_DECLs for
6354 example, may not have their DECL_RTL set yet. Assume language
6355 specific tree nodes can be expanded in some interesting way. */
6356 if (DECL_P (exp)
6357 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6358 {
6359 result = expand_expr (exp, target, tmode,
6360 modifier == EXPAND_INITIALIZER
6361 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6362
6363 /* If the DECL isn't in memory, then the DECL wasn't properly
6364 marked TREE_ADDRESSABLE, which will be either a front-end
6365 or a tree optimizer bug. */
2ca202e7 6366 gcc_assert (MEM_P (result));
6377bb9a
RH
6367 result = XEXP (result, 0);
6368
6369 /* ??? Is this needed anymore? */
b0b324b0 6370 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6377bb9a
RH
6371 {
6372 assemble_external (exp);
6373 TREE_USED (exp) = 1;
6374 }
6375
6376 if (modifier != EXPAND_INITIALIZER
6377 && modifier != EXPAND_CONST_ADDRESS)
6378 result = force_operand (result, target);
6379 return result;
6380 }
6381
2614034e
EB
6382 /* Pass FALSE as the last argument to get_inner_reference although
6383 we are expanding to RTL. The rationale is that we know how to
6384 handle "aligning nodes" here: we can just bypass them because
6385 they won't change the final object whose address will be returned
6386 (they actually exist only for that purpose). */
6377bb9a 6387 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 6388 &mode1, &unsignedp, &volatilep, false);
6377bb9a
RH
6389 break;
6390 }
6391
6392 /* We must have made progress. */
5b0264cb 6393 gcc_assert (inner != exp);
6377bb9a
RH
6394
6395 subtarget = offset || bitpos ? NULL_RTX : target;
70bb498a 6396 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6377bb9a 6397
6377bb9a
RH
6398 if (offset)
6399 {
6400 rtx tmp;
6401
6402 if (modifier != EXPAND_NORMAL)
6403 result = force_operand (result, NULL);
6404 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6405
b0b324b0
RH
6406 result = convert_memory_address (tmode, result);
6407 tmp = convert_memory_address (tmode, tmp);
6408
d047a201 6409 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6377bb9a
RH
6410 result = gen_rtx_PLUS (tmode, result, tmp);
6411 else
6412 {
6413 subtarget = bitpos ? NULL_RTX : target;
6414 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6415 1, OPTAB_LIB_WIDEN);
6416 }
6417 }
6418
6419 if (bitpos)
6420 {
6421 /* Someone beforehand should have rejected taking the address
6422 of such an object. */
b0b324b0 6423 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6377bb9a
RH
6424
6425 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6426 if (modifier < EXPAND_SUM)
6427 result = force_operand (result, target);
6428 }
6429
6430 return result;
6431}
6432
70bb498a
RH
6433/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6434 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6435
6436static rtx
6437expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6438 enum expand_modifier modifier)
6439{
6440 enum machine_mode rmode;
6441 rtx result;
6442
b0b324b0
RH
6443 /* Target mode of VOIDmode says "whatever's natural". */
6444 if (tmode == VOIDmode)
6445 tmode = TYPE_MODE (TREE_TYPE (exp));
6446
6447 /* We can get called with some Weird Things if the user does silliness
6448 like "(short) &a". In that case, convert_memory_address won't do
6449 the right thing, so ignore the given target mode. */
103b83ea 6450 if (tmode != Pmode && tmode != ptr_mode)
b0b324b0
RH
6451 tmode = Pmode;
6452
70bb498a
RH
6453 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6454 tmode, modifier);
6455
6456 /* Despite expand_expr claims concerning ignoring TMODE when not
b0b324b0
RH
6457 strictly convenient, stuff breaks if we don't honor it. Note
6458 that combined with the above, we only do this for pointer modes. */
70bb498a
RH
6459 rmode = GET_MODE (result);
6460 if (rmode == VOIDmode)
6461 rmode = tmode;
6462 if (rmode != tmode)
6463 result = convert_memory_address (tmode, result);
b0b324b0 6464
70bb498a
RH
6465 return result;
6466}
6467
6468
bbf6f052
RK
6469/* expand_expr: generate code for computing expression EXP.
6470 An rtx for the computed value is returned. The value is never null.
6471 In the case of a void EXP, const0_rtx is returned.
6472
6473 The value may be stored in TARGET if TARGET is nonzero.
6474 TARGET is just a suggestion; callers must assume that
6475 the rtx returned may not be the same as TARGET.
6476
6477 If TARGET is CONST0_RTX, it means that the value will be ignored.
6478
6479 If TMODE is not VOIDmode, it suggests generating the
6480 result in mode TMODE. But this is done only when convenient.
6481 Otherwise, TMODE is ignored and the value generated in its natural mode.
6482 TMODE is just a suggestion; callers must assume that
6483 the rtx returned may not have mode TMODE.
6484
d6a5ac33
RK
6485 Note that TARGET may have neither TMODE nor MODE. In that case, it
6486 probably will not be used.
bbf6f052
RK
6487
6488 If MODIFIER is EXPAND_SUM then when EXP is an addition
6489 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6490 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6491 products as above, or REG or MEM, or constant.
6492 Ordinarily in such cases we would output mul or add instructions
6493 and then return a pseudo reg containing the sum.
6494
6495 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6496 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6497 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6498 This is used for outputting expressions used in initializers.
6499
6500 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6501 with a constant address even if that address is not normally legitimate.
8403445a
AM
6502 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6503
6504 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6505 a call parameter. Such targets require special care as we haven't yet
6506 marked TARGET so that it's safe from being trashed by libcalls. We
6507 don't want to use TARGET for anything but the final result;
6508 Intermediate values must go elsewhere. Additionally, calls to
caf93cb0 6509 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
0fab64a3
MM
6510
6511 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6512 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6513 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6514 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6515 recursively. */
bbf6f052 6516
6de9cd9a
DN
6517static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6518 enum expand_modifier, rtx *);
6519
bbf6f052 6520rtx
0fab64a3
MM
6521expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6522 enum expand_modifier modifier, rtx *alt_rtl)
6de9cd9a
DN
6523{
6524 int rn = -1;
6525 rtx ret, last = NULL;
6526
6527 /* Handle ERROR_MARK before anybody tries to access its type. */
6528 if (TREE_CODE (exp) == ERROR_MARK
6529 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6530 {
6531 ret = CONST0_RTX (tmode);
6532 return ret ? ret : const0_rtx;
6533 }
6534
6535 if (flag_non_call_exceptions)
6536 {
6537 rn = lookup_stmt_eh_region (exp);
6538 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6539 if (rn >= 0)
6540 last = get_last_insn ();
6541 }
6542
6543 /* If this is an expression of some kind and it has an associated line
caf93cb0 6544 number, then emit the line number before expanding the expression.
6de9cd9a
DN
6545
6546 We need to save and restore the file and line information so that
6547 errors discovered during expansion are emitted with the right
caf93cb0 6548 information. It would be better of the diagnostic routines
6de9cd9a
DN
6549 used the file/line information embedded in the tree nodes rather
6550 than globals. */
c48dc958 6551 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6de9cd9a
DN
6552 {
6553 location_t saved_location = input_location;
6554 input_location = EXPR_LOCATION (exp);
6555 emit_line_note (input_location);
caf93cb0 6556
6de9cd9a 6557 /* Record where the insns produced belong. */
1ea463a2 6558 record_block_change (TREE_BLOCK (exp));
6de9cd9a
DN
6559
6560 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6561
6562 input_location = saved_location;
6563 }
6564 else
6565 {
6566 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6567 }
6568
6569 /* If using non-call exceptions, mark all insns that may trap.
6570 expand_call() will mark CALL_INSNs before we get to this code,
6571 but it doesn't handle libcalls, and these may trap. */
6572 if (rn >= 0)
caf93cb0 6573 {
6de9cd9a 6574 rtx insn;
caf93cb0 6575 for (insn = next_real_insn (last); insn;
6de9cd9a
DN
6576 insn = next_real_insn (insn))
6577 {
6578 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6579 /* If we want exceptions for non-call insns, any
6580 may_trap_p instruction may throw. */
6581 && GET_CODE (PATTERN (insn)) != CLOBBER
6582 && GET_CODE (PATTERN (insn)) != USE
4b4bf941 6583 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6de9cd9a
DN
6584 {
6585 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6586 REG_NOTES (insn));
6587 }
6588 }
6589 }
6590
6591 return ret;
6592}
6593
6594static rtx
6595expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6596 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6597{
aacd3885 6598 rtx op0, op1, temp, decl_rtl;
bbf6f052 6599 tree type = TREE_TYPE (exp);
8df83eae 6600 int unsignedp;
b3694847
SS
6601 enum machine_mode mode;
6602 enum tree_code code = TREE_CODE (exp);
bbf6f052 6603 optab this_optab;
68557e14
ML
6604 rtx subtarget, original_target;
6605 int ignore;
8b44057d 6606 tree context, subexp0, subexp1;
bc15d0ef
JM
6607 bool reduce_bit_field = false;
6608#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6609 ? reduce_to_bit_field_precision ((expr), \
6610 target, \
6611 type) \
6612 : (expr))
bbf6f052 6613
68557e14 6614 mode = TYPE_MODE (type);
8df83eae 6615 unsignedp = TYPE_UNSIGNED (type);
bc15d0ef
JM
6616 if (lang_hooks.reduce_bit_field_operations
6617 && TREE_CODE (type) == INTEGER_TYPE
6618 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6619 {
6620 /* An operation in what may be a bit-field type needs the
6621 result to be reduced to the precision of the bit-field type,
6622 which is narrower than that of the type's mode. */
6623 reduce_bit_field = true;
6624 if (modifier == EXPAND_STACK_PARM)
6625 target = 0;
6626 }
8df83eae 6627
68557e14 6628 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6629 subtarget = get_subtarget (target);
68557e14
ML
6630 original_target = target;
6631 ignore = (target == const0_rtx
6632 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3a18db48
AP
6633 || code == CONVERT_EXPR || code == COND_EXPR
6634 || code == VIEW_CONVERT_EXPR)
68557e14
ML
6635 && TREE_CODE (type) == VOID_TYPE));
6636
dd27116b
RK
6637 /* If we are going to ignore this result, we need only do something
6638 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6639 is, short-circuit the most common cases here. Note that we must
6640 not call expand_expr with anything but const0_rtx in case this
6641 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6642
dd27116b
RK
6643 if (ignore)
6644 {
6645 if (! TREE_SIDE_EFFECTS (exp))
6646 return const0_rtx;
6647
14a774a9
RK
6648 /* Ensure we reference a volatile object even if value is ignored, but
6649 don't do this if all we are doing is taking its address. */
dd27116b
RK
6650 if (TREE_THIS_VOLATILE (exp)
6651 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6652 && mode != VOIDmode && mode != BLKmode
6653 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6654 {
37a08a29 6655 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3c0cb5de 6656 if (MEM_P (temp))
dd27116b
RK
6657 temp = copy_to_reg (temp);
6658 return const0_rtx;
6659 }
6660
6615c446
JO
6661 if (TREE_CODE_CLASS (code) == tcc_unary
6662 || code == COMPONENT_REF || code == INDIRECT_REF)
37a08a29
RK
6663 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6664 modifier);
6665
6615c446
JO
6666 else if (TREE_CODE_CLASS (code) == tcc_binary
6667 || TREE_CODE_CLASS (code) == tcc_comparison
b4e3fabb 6668 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6669 {
37a08a29
RK
6670 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6671 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6672 return const0_rtx;
6673 }
14a774a9
RK
6674 else if (code == BIT_FIELD_REF)
6675 {
37a08a29
RK
6676 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6677 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6678 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6679 return const0_rtx;
6680 }
37a08a29 6681
90764a87 6682 target = 0;
dd27116b 6683 }
bbf6f052 6684
bbf6f052 6685
bbf6f052
RK
6686 switch (code)
6687 {
6688 case LABEL_DECL:
b552441b
RS
6689 {
6690 tree function = decl_function_context (exp);
c5c76735 6691
6de9cd9a
DN
6692 temp = label_rtx (exp);
6693 temp = gen_rtx_LABEL_REF (Pmode, temp);
6694
d0977240 6695 if (function != current_function_decl
6de9cd9a
DN
6696 && function != 0)
6697 LABEL_REF_NONLOCAL_P (temp) = 1;
6698
6699 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
26fcb35a 6700 return temp;
b552441b 6701 }
bbf6f052 6702
8b11a64c
ZD
6703 case SSA_NAME:
6704 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6705 NULL);
6706
bbf6f052 6707 case PARM_DECL:
bbf6f052 6708 case VAR_DECL:
2dca20cd
RS
6709 /* If a static var's type was incomplete when the decl was written,
6710 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6711 if (DECL_SIZE (exp) == 0
6712 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6713 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6714 layout_decl (exp, 0);
921b3427 6715
0f41302f 6716 /* ... fall through ... */
d6a5ac33 6717
2dca20cd 6718 case FUNCTION_DECL:
bbf6f052 6719 case RESULT_DECL:
aacd3885
RS
6720 decl_rtl = DECL_RTL (exp);
6721 gcc_assert (decl_rtl);
d6a5ac33 6722
e44842fe
RK
6723 /* Ensure variable marked as used even if it doesn't go through
6724 a parser. If it hasn't be used yet, write out an external
6725 definition. */
6726 if (! TREE_USED (exp))
6727 {
6728 assemble_external (exp);
6729 TREE_USED (exp) = 1;
6730 }
6731
dc6d66b3
RK
6732 /* Show we haven't gotten RTL for this yet. */
6733 temp = 0;
6734
ab8907ef
RH
6735 /* Variables inherited from containing functions should have
6736 been lowered by this point. */
bbf6f052 6737 context = decl_function_context (exp);
5b0264cb
NS
6738 gcc_assert (!context
6739 || context == current_function_decl
6740 || TREE_STATIC (exp)
6741 /* ??? C++ creates functions that are not TREE_STATIC. */
6742 || TREE_CODE (exp) == FUNCTION_DECL);
4af3895e 6743
bbf6f052
RK
6744 /* This is the case of an array whose size is to be determined
6745 from its initializer, while the initializer is still being parsed.
6746 See expand_decl. */
d6a5ac33 6747
aacd3885
RS
6748 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6749 temp = validize_mem (decl_rtl);
d6a5ac33
RK
6750
6751 /* If DECL_RTL is memory, we are in the normal case and either
6752 the address is not valid or it is not a register and -fforce-addr
6753 is specified, get the address into a register. */
6754
aacd3885 6755 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
0fab64a3
MM
6756 {
6757 if (alt_rtl)
aacd3885
RS
6758 *alt_rtl = decl_rtl;
6759 decl_rtl = use_anchored_address (decl_rtl);
6760 if (modifier != EXPAND_CONST_ADDRESS
6761 && modifier != EXPAND_SUM
6762 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6763 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6764 temp = replace_equiv_address (decl_rtl,
6765 copy_rtx (XEXP (decl_rtl, 0)));
0fab64a3 6766 }
1499e0a8 6767
dc6d66b3 6768 /* If we got something, return it. But first, set the alignment
04956a1a 6769 if the address is a register. */
dc6d66b3
RK
6770 if (temp != 0)
6771 {
3c0cb5de 6772 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
bdb429a5 6773 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6774
6775 return temp;
6776 }
6777
1499e0a8
RK
6778 /* If the mode of DECL_RTL does not match that of the decl, it
6779 must be a promoted value. We return a SUBREG of the wanted mode,
6780 but mark it so that we know that it was already extended. */
6781
aacd3885
RS
6782 if (REG_P (decl_rtl)
6783 && GET_MODE (decl_rtl) != DECL_MODE (exp))
1499e0a8 6784 {
5b0264cb
NS
6785 enum machine_mode pmode;
6786
1499e0a8
RK
6787 /* Get the signedness used for this variable. Ensure we get the
6788 same mode we got when the variable was declared. */
5b0264cb 6789 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
3f9e6aed
PB
6790 (TREE_CODE (exp) == RESULT_DECL
6791 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
aacd3885 6792 gcc_assert (GET_MODE (decl_rtl) == pmode);
1499e0a8 6793
aacd3885 6794 temp = gen_lowpart_SUBREG (mode, decl_rtl);
1499e0a8 6795 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6796 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6797 return temp;
6798 }
6799
aacd3885 6800 return decl_rtl;
bbf6f052
RK
6801
6802 case INTEGER_CST:
d8a50944 6803 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6804 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6805
d8a50944
RH
6806 /* ??? If overflow is set, fold will have done an incomplete job,
6807 which can result in (plus xx (const_int 0)), which can get
6808 simplified by validate_replace_rtx during virtual register
6809 instantiation, which can result in unrecognizable insns.
6810 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6811 if (TREE_CONSTANT_OVERFLOW (exp)
6812 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6813 temp = force_reg (mode, temp);
6814
6815 return temp;
6816
d744e06e 6817 case VECTOR_CST:
3a021db2
PB
6818 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6819 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6820 return const_vector_from_tree (exp);
caf93cb0 6821 else
4038c495
GB
6822 return expand_expr (build_constructor_from_list
6823 (TREE_TYPE (exp),
6824 TREE_VECTOR_CST_ELTS (exp)),
3a021db2 6825 ignore ? const0_rtx : target, tmode, modifier);
d744e06e 6826
bbf6f052 6827 case CONST_DECL:
8403445a 6828 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6829
6830 case REAL_CST:
6831 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6832 which will be turned into memory by reload if necessary.
6833
bbf6f052
RK
6834 We used to force a register so that loop.c could see it. But
6835 this does not allow gen_* patterns to perform optimizations with
6836 the constants. It also produces two insns in cases like "x = 1.0;".
6837 On most machines, floating-point constants are not permitted in
6838 many insns, so we'd end up copying it to a register in any case.
6839
6840 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6841 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6842 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6843
6844 case COMPLEX_CST:
9ad58e09
RS
6845 /* Handle evaluating a complex constant in a CONCAT target. */
6846 if (original_target && GET_CODE (original_target) == CONCAT)
6847 {
6848 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6849 rtx rtarg, itarg;
6850
6851 rtarg = XEXP (original_target, 0);
6852 itarg = XEXP (original_target, 1);
6853
6854 /* Move the real and imaginary parts separately. */
6855 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6856 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6857
6858 if (op0 != rtarg)
6859 emit_move_insn (rtarg, op0);
6860 if (op1 != itarg)
6861 emit_move_insn (itarg, op1);
6862
6863 return original_target;
6864 }
6865
71c0e7fc 6866 /* ... fall through ... */
9ad58e09 6867
bbf6f052 6868 case STRING_CST:
aacd3885 6869 temp = expand_expr_constant (exp, 1, modifier);
bbf6f052 6870
afc6aaab 6871 /* temp contains a constant address.
bbf6f052
RK
6872 On RISC machines where a constant address isn't valid,
6873 make some insns to get that address into a register. */
afc6aaab 6874 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6875 && modifier != EXPAND_INITIALIZER
6876 && modifier != EXPAND_SUM
afc6aaab
ZW
6877 && (! memory_address_p (mode, XEXP (temp, 0))
6878 || flag_force_addr))
6879 return replace_equiv_address (temp,
6880 copy_rtx (XEXP (temp, 0)));
6881 return temp;
bbf6f052
RK
6882
6883 case SAVE_EXPR:
82c82743
RH
6884 {
6885 tree val = TREE_OPERAND (exp, 0);
6886 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
e5e809f4 6887
7f5e6307 6888 if (!SAVE_EXPR_RESOLVED_P (exp))
82c82743
RH
6889 {
6890 /* We can indeed still hit this case, typically via builtin
6891 expanders calling save_expr immediately before expanding
6892 something. Assume this means that we only have to deal
6893 with non-BLKmode values. */
5b0264cb 6894 gcc_assert (GET_MODE (ret) != BLKmode);
1499e0a8 6895
82c82743
RH
6896 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6897 DECL_ARTIFICIAL (val) = 1;
7f5e6307 6898 DECL_IGNORED_P (val) = 1;
82c82743 6899 TREE_OPERAND (exp, 0) = val;
7f5e6307 6900 SAVE_EXPR_RESOLVED_P (exp) = 1;
1499e0a8 6901
82c82743
RH
6902 if (!CONSTANT_P (ret))
6903 ret = copy_to_reg (ret);
6904 SET_DECL_RTL (val, ret);
6905 }
1499e0a8 6906
82c82743
RH
6907 return ret;
6908 }
bbf6f052 6909
70e6ca43
APB
6910 case GOTO_EXPR:
6911 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6912 expand_goto (TREE_OPERAND (exp, 0));
6913 else
6914 expand_computed_goto (TREE_OPERAND (exp, 0));
6915 return const0_rtx;
6916
bbf6f052 6917 case CONSTRUCTOR:
dd27116b
RK
6918 /* If we don't need the result, just ensure we evaluate any
6919 subexpressions. */
6920 if (ignore)
6921 {
4038c495
GB
6922 unsigned HOST_WIDE_INT idx;
6923 tree value;
37a08a29 6924
4038c495
GB
6925 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6926 expand_expr (value, const0_rtx, VOIDmode, 0);
37a08a29 6927
dd27116b
RK
6928 return const0_rtx;
6929 }
3207b172 6930
c5250139
RG
6931 /* Try to avoid creating a temporary at all. This is possible
6932 if all of the initializer is zero.
6933 FIXME: try to handle all [0..255] initializers we can handle
6934 with memset. */
6935 else if (TREE_STATIC (exp)
6936 && !TREE_ADDRESSABLE (exp)
6937 && target != 0 && mode == BLKmode
6938 && all_zeros_p (exp))
6939 {
6940 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6941 return target;
6942 }
6943
4af3895e
JVA
6944 /* All elts simple constants => refer to a constant in memory. But
6945 if this is a non-BLKmode mode, let it store a field at a time
6946 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6947 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6948 store directly into the target unless the type is large enough
6949 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6950 all operands are constant, put it in memory as well.
6951
6952 FIXME: Avoid trying to fill vector constructors piece-meal.
6953 Output them with output_constant_def below unless we're sure
6954 they're zeros. This should go away when vector initializers
6955 are treated like VECTOR_CST instead of arrays.
6956 */
dd27116b 6957 else if ((TREE_STATIC (exp)
3207b172 6958 && ((mode == BLKmode
e5e809f4 6959 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6960 || TREE_ADDRESSABLE (exp)
19caa751 6961 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6962 && (! MOVE_BY_PIECES_P
19caa751
RK
6963 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6964 TYPE_ALIGN (type)))
6de9cd9a 6965 && ! mostly_zeros_p (exp))))
f59700f9
RK
6966 || ((modifier == EXPAND_INITIALIZER
6967 || modifier == EXPAND_CONST_ADDRESS)
6968 && TREE_CONSTANT (exp)))
bbf6f052 6969 {
aacd3885 6970 rtx constructor = expand_expr_constant (exp, 1, modifier);
19caa751 6971
b552441b
RS
6972 if (modifier != EXPAND_CONST_ADDRESS
6973 && modifier != EXPAND_INITIALIZER
792760b9
RK
6974 && modifier != EXPAND_SUM)
6975 constructor = validize_mem (constructor);
6976
bbf6f052
RK
6977 return constructor;
6978 }
bbf6f052
RK
6979 else
6980 {
e9ac02a6
JW
6981 /* Handle calls that pass values in multiple non-contiguous
6982 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6983 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6984 || GET_CODE (target) == PARALLEL
6985 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6986 target
6987 = assign_temp (build_qualified_type (type,
6988 (TYPE_QUALS (type)
6989 | (TREE_READONLY (exp)
6990 * TYPE_QUAL_CONST))),
c24ae149 6991 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6992
dbb5c281 6993 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6994 return target;
6995 }
6996
7ccf35ed
DN
6997 case MISALIGNED_INDIRECT_REF:
6998 case ALIGN_INDIRECT_REF:
bbf6f052
RK
6999 case INDIRECT_REF:
7000 {
7001 tree exp1 = TREE_OPERAND (exp, 0);
3a94c984 7002
6de9cd9a
DN
7003 if (modifier != EXPAND_WRITE)
7004 {
7005 tree t;
7006
7007 t = fold_read_from_constant_string (exp);
7008 if (t)
7009 return expand_expr (t, target, tmode, modifier);
7010 }
bbf6f052 7011
405f0da6
JW
7012 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7013 op0 = memory_address (mode, op0);
7ccf35ed
DN
7014
7015 if (code == ALIGN_INDIRECT_REF)
7016 {
7017 int align = TYPE_ALIGN_UNIT (type);
7018 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7019 op0 = memory_address (mode, op0);
7020 }
7021
38a448ca 7022 temp = gen_rtx_MEM (mode, op0);
8b11a64c 7023
ac182688 7024 set_mem_attributes (temp, exp, 0);
1125706f 7025
1e0598e2
RH
7026 /* Resolve the misalignment now, so that we don't have to remember
7027 to resolve it later. Of course, this only works for reads. */
7028 /* ??? When we get around to supporting writes, we'll have to handle
7029 this in store_expr directly. The vectorizer isn't generating
7030 those yet, however. */
7031 if (code == MISALIGNED_INDIRECT_REF)
7032 {
7033 int icode;
7034 rtx reg, insn;
7035
29b2d867
RH
7036 gcc_assert (modifier == EXPAND_NORMAL
7037 || modifier == EXPAND_STACK_PARM);
1e0598e2
RH
7038
7039 /* The vectorizer should have already checked the mode. */
7040 icode = movmisalign_optab->handlers[mode].insn_code;
7041 gcc_assert (icode != CODE_FOR_nothing);
7042
7043 /* We've already validated the memory, and we're creating a
7044 new pseudo destination. The predicates really can't fail. */
7045 reg = gen_reg_rtx (mode);
7046
7047 /* Nor can the insn generator. */
7048 insn = GEN_FCN (icode) (reg, temp);
7049 emit_insn (insn);
7050
7051 return reg;
7052 }
7053
8c8a8e34
JW
7054 return temp;
7055 }
bbf6f052 7056
ac182688
ZD
7057 case TARGET_MEM_REF:
7058 {
7059 struct mem_address addr;
7060
7061 get_address_description (exp, &addr);
7062 op0 = addr_for_mem_ref (&addr, true);
7063 op0 = memory_address (mode, op0);
7064 temp = gen_rtx_MEM (mode, op0);
7065 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7066 }
7067 return temp;
7068
bbf6f052 7069 case ARRAY_REF:
6de9cd9a 7070
bbf6f052 7071 {
742920c7 7072 tree array = TREE_OPERAND (exp, 0);
45d8710e 7073 tree index = TREE_OPERAND (exp, 1);
742920c7 7074
742920c7 7075 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7076 This is not done in fold so it won't happen inside &.
7077 Don't fold if this is for wide characters since it's too
7078 difficult to do correctly and this is a very rare case. */
742920c7 7079
017e1b43
RH
7080 if (modifier != EXPAND_CONST_ADDRESS
7081 && modifier != EXPAND_INITIALIZER
6de9cd9a
DN
7082 && modifier != EXPAND_MEMORY)
7083 {
7084 tree t = fold_read_from_constant_string (exp);
7085
7086 if (t)
7087 return expand_expr (t, target, tmode, modifier);
7088 }
bbf6f052 7089
742920c7
RK
7090 /* If this is a constant index into a constant array,
7091 just get the value from the array. Handle both the cases when
7092 we have an explicit constructor and when our operand is a variable
7093 that was declared const. */
4af3895e 7094
017e1b43
RH
7095 if (modifier != EXPAND_CONST_ADDRESS
7096 && modifier != EXPAND_INITIALIZER
7097 && modifier != EXPAND_MEMORY
7098 && TREE_CODE (array) == CONSTRUCTOR
7099 && ! TREE_SIDE_EFFECTS (array)
45d8710e 7100 && TREE_CODE (index) == INTEGER_CST)
742920c7 7101 {
4038c495
GB
7102 unsigned HOST_WIDE_INT ix;
7103 tree field, value;
05bccae2 7104
4038c495
GB
7105 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7106 field, value)
7107 if (tree_int_cst_equal (field, index))
7108 {
7109 if (!TREE_SIDE_EFFECTS (value))
7110 return expand_expr (fold (value), target, tmode, modifier);
7111 break;
7112 }
742920c7 7113 }
3a94c984 7114
742920c7 7115 else if (optimize >= 1
cb5fa0f8
RK
7116 && modifier != EXPAND_CONST_ADDRESS
7117 && modifier != EXPAND_INITIALIZER
017e1b43 7118 && modifier != EXPAND_MEMORY
742920c7
RK
7119 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7120 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
7121 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7122 && targetm.binds_local_p (array))
742920c7 7123 {
08293add 7124 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7125 {
7126 tree init = DECL_INITIAL (array);
7127
742920c7
RK
7128 if (TREE_CODE (init) == CONSTRUCTOR)
7129 {
4038c495
GB
7130 unsigned HOST_WIDE_INT ix;
7131 tree field, value;
7132
7133 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7134 field, value)
7135 if (tree_int_cst_equal (field, index))
7136 {
7137 if (!TREE_SIDE_EFFECTS (value))
7138 return expand_expr (fold (value), target, tmode,
7139 modifier);
7140 break;
7141 }
742920c7 7142 }
f51a281b 7143 else if(TREE_CODE (init) == STRING_CST)
5c80f6e6 7144 {
f51a281b
AP
7145 tree index1 = index;
7146 tree low_bound = array_ref_low_bound (exp);
7147 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7148
7149 /* Optimize the special-case of a zero lower bound.
7150
7151 We convert the low_bound to sizetype to avoid some problems
7152 with constant folding. (E.g. suppose the lower bound is 1,
7153 and its mode is QI. Without the conversion,l (ARRAY
7154 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7155 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7156
7157 if (! integer_zerop (low_bound))
7158 index1 = size_diffop (index1, fold_convert (sizetype,
7159 low_bound));
7160
7161 if (0 > compare_tree_int (index1,
7162 TREE_STRING_LENGTH (init)))
7163 {
7164 tree type = TREE_TYPE (TREE_TYPE (init));
7165 enum machine_mode mode = TYPE_MODE (type);
7166
7167 if (GET_MODE_CLASS (mode) == MODE_INT
7168 && GET_MODE_SIZE (mode) == 1)
7169 return gen_int_mode (TREE_STRING_POINTER (init)
7170 [TREE_INT_CST_LOW (index1)],
7171 mode);
7172 }
5c80f6e6 7173 }
742920c7
RK
7174 }
7175 }
7176 }
afc6aaab 7177 goto normal_inner_ref;
bbf6f052
RK
7178
7179 case COMPONENT_REF:
4af3895e 7180 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7181 appropriate field if it is present. */
7182 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e 7183 {
4038c495
GB
7184 unsigned HOST_WIDE_INT idx;
7185 tree field, value;
4af3895e 7186
4038c495
GB
7187 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7188 idx, field, value)
7189 if (field == TREE_OPERAND (exp, 1)
86b5812c
RK
7190 /* We can normally use the value of the field in the
7191 CONSTRUCTOR. However, if this is a bitfield in
7192 an integral mode that we can fit in a HOST_WIDE_INT,
7193 we must mask only the number of bits in the bitfield,
7194 since this is done implicitly by the constructor. If
7195 the bitfield does not meet either of those conditions,
7196 we can't do this optimization. */
4038c495
GB
7197 && (! DECL_BIT_FIELD (field)
7198 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7199 && (GET_MODE_BITSIZE (DECL_MODE (field))
86b5812c
RK
7200 <= HOST_BITS_PER_WIDE_INT))))
7201 {
4038c495 7202 if (DECL_BIT_FIELD (field)
8403445a
AM
7203 && modifier == EXPAND_STACK_PARM)
7204 target = 0;
4038c495
GB
7205 op0 = expand_expr (value, target, tmode, modifier);
7206 if (DECL_BIT_FIELD (field))
86b5812c 7207 {
4038c495
GB
7208 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7209 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
86b5812c 7210
4038c495 7211 if (TYPE_UNSIGNED (TREE_TYPE (field)))
86b5812c
RK
7212 {
7213 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7214 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7215 }
7216 else
7217 {
7218 tree count
4a90aeeb 7219 = build_int_cst (NULL_TREE,
7d60be94 7220 GET_MODE_BITSIZE (imode) - bitsize);
86b5812c
RK
7221
7222 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7223 target, 0);
7224 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7225 target, 0);
7226 }
7227 }
7228
7229 return op0;
7230 }
4af3895e 7231 }
afc6aaab 7232 goto normal_inner_ref;
4af3895e 7233
afc6aaab
ZW
7234 case BIT_FIELD_REF:
7235 case ARRAY_RANGE_REF:
7236 normal_inner_ref:
bbf6f052
RK
7237 {
7238 enum machine_mode mode1;
770ae6cc 7239 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7240 tree offset;
bbf6f052 7241 int volatilep = 0;
839c4796 7242 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 7243 &mode1, &unsignedp, &volatilep, true);
f47e9b4e 7244 rtx orig_op0;
bbf6f052 7245
e7f3c83f
RK
7246 /* If we got back the original object, something is wrong. Perhaps
7247 we are evaluating an expression too early. In any event, don't
7248 infinitely recurse. */
5b0264cb 7249 gcc_assert (tem != exp);
e7f3c83f 7250
3d27140a 7251 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7252 computation, since it will need a temporary and TARGET is known
7253 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7254
f47e9b4e
RK
7255 orig_op0 = op0
7256 = expand_expr (tem,
7257 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7258 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7259 != INTEGER_CST)
8403445a 7260 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7261 ? target : NULL_RTX),
7262 VOIDmode,
7263 (modifier == EXPAND_INITIALIZER
8403445a
AM
7264 || modifier == EXPAND_CONST_ADDRESS
7265 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7266 ? modifier : EXPAND_NORMAL);
bbf6f052 7267
60a23e2e
OH
7268 /* If this is a constant, put it into a register if it is a legitimate
7269 constant, OFFSET is 0, and we won't try to extract outside the
7270 register (in case we were passed a partially uninitialized object
7271 or a view_conversion to a larger size). Force the constant to
7272 memory otherwise. */
8c8a8e34
JW
7273 if (CONSTANT_P (op0))
7274 {
7275 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9 7276 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
60a23e2e
OH
7277 && offset == 0
7278 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
8c8a8e34
JW
7279 op0 = force_reg (mode, op0);
7280 else
7281 op0 = validize_mem (force_const_mem (mode, op0));
7282 }
7283
60a23e2e
OH
7284 /* Otherwise, if this object not in memory and we either have an
7285 offset, a BLKmode result, or a reference outside the object, put it
7286 there. Such cases can occur in Ada if we have unchecked conversion
7287 of an expression from a scalar type to an array or record type or
7288 for an ARRAY_RANGE_REF whose type is BLKmode. */
3c0cb5de 7289 else if (!MEM_P (op0)
8d2e5f72 7290 && (offset != 0
60a23e2e 7291 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
8d2e5f72
RK
7292 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7293 {
82c82743
RH
7294 tree nt = build_qualified_type (TREE_TYPE (tem),
7295 (TYPE_QUALS (TREE_TYPE (tem))
7296 | TYPE_QUAL_CONST));
7297 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7298
82c82743
RH
7299 emit_move_insn (memloc, op0);
7300 op0 = memloc;
8d2e5f72
RK
7301 }
7302
7bb0943f
RS
7303 if (offset != 0)
7304 {
8403445a
AM
7305 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7306 EXPAND_SUM);
7bb0943f 7307
5b0264cb 7308 gcc_assert (MEM_P (op0));
2d48c13d 7309
2d48c13d 7310#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7311 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7312 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7313#else
7314 if (GET_MODE (offset_rtx) != ptr_mode)
7315 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7316#endif
7317
e82407b5
EB
7318 if (GET_MODE (op0) == BLKmode
7319 /* A constant address in OP0 can have VOIDmode, we must
7320 not try to call force_reg in that case. */
efd07ca7 7321 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7322 && bitsize != 0
3a94c984 7323 && (bitpos % bitsize) == 0
89752202 7324 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7325 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7326 {
e3c8ea67 7327 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7328 bitpos = 0;
7329 }
7330
0d4903b8
RK
7331 op0 = offset_address (op0, offset_rtx,
7332 highest_pow2_factor (offset));
7bb0943f
RS
7333 }
7334
1ce7f3c2
RK
7335 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7336 record its alignment as BIGGEST_ALIGNMENT. */
3c0cb5de 7337 if (MEM_P (op0) && bitpos == 0 && offset != 0
1ce7f3c2
RK
7338 && is_aligning_offset (offset, tem))
7339 set_mem_align (op0, BIGGEST_ALIGNMENT);
7340
bbf6f052 7341 /* Don't forget about volatility even if this is a bitfield. */
3c0cb5de 7342 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
bbf6f052 7343 {
f47e9b4e
RK
7344 if (op0 == orig_op0)
7345 op0 = copy_rtx (op0);
7346
bbf6f052
RK
7347 MEM_VOLATILE_P (op0) = 1;
7348 }
7349
010f87c4
JJ
7350 /* The following code doesn't handle CONCAT.
7351 Assume only bitpos == 0 can be used for CONCAT, due to
7352 one element arrays having the same mode as its element. */
7353 if (GET_CODE (op0) == CONCAT)
7354 {
5b0264cb
NS
7355 gcc_assert (bitpos == 0
7356 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
010f87c4
JJ
7357 return op0;
7358 }
7359
ccc98036
RS
7360 /* In cases where an aligned union has an unaligned object
7361 as a field, we might be extracting a BLKmode value from
7362 an integer-mode (e.g., SImode) object. Handle this case
7363 by doing the extract into an object as wide as the field
7364 (which we know to be the width of a basic mode), then
cb5fa0f8 7365 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7366 if (mode1 == VOIDmode
f8cfc6aa 7367 || REG_P (op0) || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7368 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7369 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7370 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7371 && modifier != EXPAND_CONST_ADDRESS
7372 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7373 /* If the field isn't aligned enough to fetch as a memref,
7374 fetch it as a bit field. */
7375 || (mode1 != BLKmode
9e5f281f 7376 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5 7377 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
3c0cb5de 7378 || (MEM_P (op0)
e82407b5
EB
7379 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7380 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7381 && ((modifier == EXPAND_CONST_ADDRESS
7382 || modifier == EXPAND_INITIALIZER)
7383 ? STRICT_ALIGNMENT
7384 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7385 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7386 /* If the type and the field are a constant size and the
7387 size of the type isn't the same size as the bitfield,
7388 we must use bitfield operations. */
7389 || (bitsize >= 0
dbe4d070
RH
7390 && TYPE_SIZE (TREE_TYPE (exp))
7391 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
cb5fa0f8 7392 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7393 bitsize)))
bbf6f052 7394 {
bbf6f052
RK
7395 enum machine_mode ext_mode = mode;
7396
14a774a9 7397 if (ext_mode == BLKmode
3c0cb5de
JQ
7398 && ! (target != 0 && MEM_P (op0)
7399 && MEM_P (target)
14a774a9 7400 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7401 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7402
7403 if (ext_mode == BLKmode)
a281e72d 7404 {
7a06d606
RK
7405 if (target == 0)
7406 target = assign_temp (type, 0, 1, 1);
7407
7408 if (bitsize == 0)
7409 return target;
7410
a281e72d
RK
7411 /* In this case, BITPOS must start at a byte boundary and
7412 TARGET, if specified, must be a MEM. */
5b0264cb
NS
7413 gcc_assert (MEM_P (op0)
7414 && (!target || MEM_P (target))
7415 && !(bitpos % BITS_PER_UNIT));
a281e72d 7416
7a06d606
RK
7417 emit_block_move (target,
7418 adjust_address (op0, VOIDmode,
7419 bitpos / BITS_PER_UNIT),
a06ef755 7420 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7421 / BITS_PER_UNIT),
8403445a
AM
7422 (modifier == EXPAND_STACK_PARM
7423 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7424
a281e72d
RK
7425 return target;
7426 }
bbf6f052 7427
dc6d66b3
RK
7428 op0 = validize_mem (op0);
7429
3c0cb5de 7430 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
04050c69 7431 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7432
8403445a
AM
7433 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7434 (modifier == EXPAND_STACK_PARM
7435 ? NULL_RTX : target),
b3520980 7436 ext_mode, ext_mode);
ef19912d
RK
7437
7438 /* If the result is a record type and BITSIZE is narrower than
7439 the mode of OP0, an integral mode, and this is a big endian
7440 machine, we must put the field into the high-order bits. */
7441 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7442 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7443 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7444 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7445 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7446 - bitsize),
7447 op0, 1);
7448
daae8185
RK
7449 /* If the result type is BLKmode, store the data into a temporary
7450 of the appropriate type, but with the mode corresponding to the
7451 mode for the data we have (op0's mode). It's tempting to make
7452 this a constant type, since we know it's only being stored once,
7453 but that can cause problems if we are taking the address of this
7454 COMPONENT_REF because the MEM of any reference via that address
7455 will have flags corresponding to the type, which will not
7456 necessarily be constant. */
bbf6f052
RK
7457 if (mode == BLKmode)
7458 {
daae8185
RK
7459 rtx new
7460 = assign_stack_temp_for_type
7461 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7462
7463 emit_move_insn (new, op0);
7464 op0 = copy_rtx (new);
7465 PUT_MODE (op0, BLKmode);
c3d32120 7466 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7467 }
7468
7469 return op0;
7470 }
7471
05019f83
RK
7472 /* If the result is BLKmode, use that to access the object
7473 now as well. */
7474 if (mode == BLKmode)
7475 mode1 = BLKmode;
7476
bbf6f052
RK
7477 /* Get a reference to just this component. */
7478 if (modifier == EXPAND_CONST_ADDRESS
7479 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7480 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7481 else
f4ef873c 7482 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7483
f47e9b4e
RK
7484 if (op0 == orig_op0)
7485 op0 = copy_rtx (op0);
7486
3bdf5ad1 7487 set_mem_attributes (op0, exp, 0);
f8cfc6aa 7488 if (REG_P (XEXP (op0, 0)))
a06ef755 7489 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7490
bbf6f052 7491 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7492 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7493 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7494 || modifier == EXPAND_INITIALIZER)
bbf6f052 7495 return op0;
0d15e60c 7496 else if (target == 0)
bbf6f052 7497 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7498
bbf6f052
RK
7499 convert_move (target, op0, unsignedp);
7500 return target;
7501 }
7502
0f59171d
RH
7503 case OBJ_TYPE_REF:
7504 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
4a8d0c9c 7505
bbf6f052
RK
7506 case CALL_EXPR:
7507 /* Check for a built-in function. */
7508 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7509 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7510 == FUNCTION_DECL)
bbf6f052 7511 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7512 {
c70eaeaf
KG
7513 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7514 == BUILT_IN_FRONTEND)
673fda6b
SB
7515 return lang_hooks.expand_expr (exp, original_target,
7516 tmode, modifier,
7517 alt_rtl);
c70eaeaf
KG
7518 else
7519 return expand_builtin (exp, target, subtarget, tmode, ignore);
7520 }
d6a5ac33 7521
8129842c 7522 return expand_call (exp, target, ignore);
bbf6f052
RK
7523
7524 case NON_LVALUE_EXPR:
7525 case NOP_EXPR:
7526 case CONVERT_EXPR:
4a53008b 7527 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7528 return const0_rtx;
4a53008b 7529
bbf6f052
RK
7530 if (TREE_CODE (type) == UNION_TYPE)
7531 {
7532 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7533
c3d32120
RK
7534 /* If both input and output are BLKmode, this conversion isn't doing
7535 anything except possibly changing memory attribute. */
7536 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7537 {
7538 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7539 modifier);
7540
7541 result = copy_rtx (result);
7542 set_mem_attributes (result, exp, 0);
7543 return result;
7544 }
14a774a9 7545
bbf6f052 7546 if (target == 0)
cf7cb67e
JH
7547 {
7548 if (TYPE_MODE (type) != BLKmode)
7549 target = gen_reg_rtx (TYPE_MODE (type));
7550 else
7551 target = assign_temp (type, 0, 1, 1);
7552 }
d6a5ac33 7553
3c0cb5de 7554 if (MEM_P (target))
bbf6f052
RK
7555 /* Store data into beginning of memory target. */
7556 store_expr (TREE_OPERAND (exp, 0),
8403445a 7557 adjust_address (target, TYPE_MODE (valtype), 0),
6f4fd16d 7558 modifier == EXPAND_STACK_PARM);
1499e0a8 7559
bbf6f052 7560 else
5b0264cb
NS
7561 {
7562 gcc_assert (REG_P (target));
7563
7564 /* Store this field into a union of the proper type. */
7565 store_field (target,
7566 MIN ((int_size_in_bytes (TREE_TYPE
7567 (TREE_OPERAND (exp, 0)))
7568 * BITS_PER_UNIT),
7569 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7570 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
f45bdcd0 7571 type, 0);
5b0264cb 7572 }
bbf6f052
RK
7573
7574 /* Return the entire union. */
7575 return target;
7576 }
d6a5ac33 7577
7f62854a
RK
7578 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7579 {
7580 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7581 modifier);
7f62854a
RK
7582
7583 /* If the signedness of the conversion differs and OP0 is
7584 a promoted SUBREG, clear that indication since we now
7585 have to do the proper extension. */
8df83eae 7586 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7587 && GET_CODE (op0) == SUBREG)
7588 SUBREG_PROMOTED_VAR_P (op0) = 0;
7589
bc15d0ef 7590 return REDUCE_BIT_FIELD (op0);
7f62854a
RK
7591 }
7592
fdf473ae 7593 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90 7594 if (GET_MODE (op0) == mode)
7acda552 7595 ;
12342f90 7596
d6a5ac33 7597 /* If OP0 is a constant, just convert it into the proper mode. */
7acda552 7598 else if (CONSTANT_P (op0))
fdf473ae
RH
7599 {
7600 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7601 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7602
0fb7aeda 7603 if (modifier == EXPAND_INITIALIZER)
7acda552
RK
7604 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7605 subreg_lowpart_offset (mode,
7606 inner_mode));
fdf473ae 7607 else
7acda552
RK
7608 op0= convert_modes (mode, inner_mode, op0,
7609 TYPE_UNSIGNED (inner_type));
fdf473ae 7610 }
12342f90 7611
7acda552
RK
7612 else if (modifier == EXPAND_INITIALIZER)
7613 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7614
7acda552
RK
7615 else if (target == 0)
7616 op0 = convert_to_mode (mode, op0,
7617 TYPE_UNSIGNED (TREE_TYPE
7618 (TREE_OPERAND (exp, 0))));
bbf6f052 7619 else
7acda552
RK
7620 {
7621 convert_move (target, op0,
7622 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7623 op0 = target;
7624 }
7625
7626 return REDUCE_BIT_FIELD (op0);
bbf6f052 7627
ed239f5a 7628 case VIEW_CONVERT_EXPR:
37a08a29 7629 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a 7630
fabaaf36 7631 /* If the input and output modes are both the same, we are done. */
ed239f5a
RK
7632 if (TYPE_MODE (type) == GET_MODE (op0))
7633 ;
fabaaf36
RH
7634 /* If neither mode is BLKmode, and both modes are the same size
7635 then we can use gen_lowpart. */
ed239f5a 7636 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
fabaaf36
RH
7637 && GET_MODE_SIZE (TYPE_MODE (type))
7638 == GET_MODE_SIZE (GET_MODE (op0)))
0fd662ee
RH
7639 {
7640 if (GET_CODE (op0) == SUBREG)
7641 op0 = force_reg (GET_MODE (op0), op0);
7642 op0 = gen_lowpart (TYPE_MODE (type), op0);
7643 }
fabaaf36
RH
7644 /* If both modes are integral, then we can convert from one to the
7645 other. */
7646 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7647 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7648 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7649 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7650 /* As a last resort, spill op0 to memory, and reload it in a
7651 different mode. */
3c0cb5de 7652 else if (!MEM_P (op0))
ed239f5a 7653 {
c11c10d8
RK
7654 /* If the operand is not a MEM, force it into memory. Since we
7655 are going to be be changing the mode of the MEM, don't call
7656 force_const_mem for constants because we don't allow pool
7657 constants to change mode. */
ed239f5a 7658 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7659
5b0264cb 7660 gcc_assert (!TREE_ADDRESSABLE (exp));
ed239f5a 7661
c11c10d8
RK
7662 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7663 target
7664 = assign_stack_temp_for_type
7665 (TYPE_MODE (inner_type),
7666 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7667
c11c10d8
RK
7668 emit_move_insn (target, op0);
7669 op0 = target;
ed239f5a
RK
7670 }
7671
c11c10d8
RK
7672 /* At this point, OP0 is in the correct mode. If the output type is such
7673 that the operand is known to be aligned, indicate that it is.
7674 Otherwise, we need only be concerned about alignment for non-BLKmode
7675 results. */
3c0cb5de 7676 if (MEM_P (op0))
ed239f5a
RK
7677 {
7678 op0 = copy_rtx (op0);
7679
ed239f5a
RK
7680 if (TYPE_ALIGN_OK (type))
7681 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7682 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7683 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7684 {
7685 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7686 HOST_WIDE_INT temp_size
7687 = MAX (int_size_in_bytes (inner_type),
7688 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7689 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7690 temp_size, 0, type);
c4e59f51 7691 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7692
5b0264cb 7693 gcc_assert (!TREE_ADDRESSABLE (exp));
c11c10d8 7694
ed239f5a
RK
7695 if (GET_MODE (op0) == BLKmode)
7696 emit_block_move (new_with_op0_mode, op0,
44bb111a 7697 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7698 (modifier == EXPAND_STACK_PARM
7699 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7700 else
7701 emit_move_insn (new_with_op0_mode, op0);
7702
7703 op0 = new;
7704 }
0fb7aeda 7705
c4e59f51 7706 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7707 }
7708
7709 return op0;
7710
bbf6f052 7711 case PLUS_EXPR:
4dfa0342 7712 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
bbf6f052
RK
7713 something else, make sure we add the register to the constant and
7714 then to the other thing. This case can occur during strength
7715 reduction and doing it this way will produce better code if the
7716 frame pointer or argument pointer is eliminated.
7717
7718 fold-const.c will ensure that the constant is always in the inner
7719 PLUS_EXPR, so the only case we need to do anything about is if
7720 sp, ap, or fp is our second argument, in which case we must swap
7721 the innermost first argument and our second argument. */
7722
7723 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7724 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4dfa0342
RH
7725 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7726 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7727 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7728 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
bbf6f052
RK
7729 {
7730 tree t = TREE_OPERAND (exp, 1);
7731
7732 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7733 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7734 }
7735
88f63c77 7736 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7737 something, we might be forming a constant. So try to use
7738 plus_constant. If it produces a sum and we can't accept it,
7739 use force_operand. This allows P = &ARR[const] to generate
7740 efficient code on machines where a SYMBOL_REF is not a valid
7741 address.
7742
7743 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7744 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7745 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7746 {
8403445a
AM
7747 if (modifier == EXPAND_STACK_PARM)
7748 target = 0;
c980ac49
RS
7749 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7750 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7751 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7752 {
cbbc503e
JL
7753 rtx constant_part;
7754
c980ac49
RS
7755 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7756 EXPAND_SUM);
cbbc503e
JL
7757 /* Use immed_double_const to ensure that the constant is
7758 truncated according to the mode of OP1, then sign extended
7759 to a HOST_WIDE_INT. Using the constant directly can result
7760 in non-canonical RTL in a 64x32 cross compile. */
7761 constant_part
7762 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7763 (HOST_WIDE_INT) 0,
a5efcd63 7764 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7765 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7766 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7767 op1 = force_operand (op1, target);
bc15d0ef 7768 return REDUCE_BIT_FIELD (op1);
c980ac49 7769 }
bbf6f052 7770
c980ac49 7771 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
dc38a610 7772 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
c980ac49
RS
7773 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7774 {
cbbc503e
JL
7775 rtx constant_part;
7776
c980ac49 7777 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7778 (modifier == EXPAND_INITIALIZER
7779 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7780 if (! CONSTANT_P (op0))
7781 {
7782 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7783 VOIDmode, modifier);
f0e9957a
RS
7784 /* Return a PLUS if modifier says it's OK. */
7785 if (modifier == EXPAND_SUM
7786 || modifier == EXPAND_INITIALIZER)
7787 return simplify_gen_binary (PLUS, mode, op0, op1);
7788 goto binop2;
c980ac49 7789 }
cbbc503e
JL
7790 /* Use immed_double_const to ensure that the constant is
7791 truncated according to the mode of OP1, then sign extended
7792 to a HOST_WIDE_INT. Using the constant directly can result
7793 in non-canonical RTL in a 64x32 cross compile. */
7794 constant_part
7795 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7796 (HOST_WIDE_INT) 0,
2a94e396 7797 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7798 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7799 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7800 op0 = force_operand (op0, target);
bc15d0ef 7801 return REDUCE_BIT_FIELD (op0);
c980ac49 7802 }
bbf6f052
RK
7803 }
7804
7805 /* No sense saving up arithmetic to be done
7806 if it's all in the wrong mode to form part of an address.
7807 And force_operand won't know whether to sign-extend or
7808 zero-extend. */
7809 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7810 || mode != ptr_mode)
4ef7870a 7811 {
eb698c58
RS
7812 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7813 subtarget, &op0, &op1, 0);
6e7727eb
EB
7814 if (op0 == const0_rtx)
7815 return op1;
7816 if (op1 == const0_rtx)
7817 return op0;
4ef7870a
EB
7818 goto binop2;
7819 }
bbf6f052 7820
eb698c58
RS
7821 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7822 subtarget, &op0, &op1, modifier);
bc15d0ef 7823 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
bbf6f052
RK
7824
7825 case MINUS_EXPR:
ea87523e
RK
7826 /* For initializers, we are allowed to return a MINUS of two
7827 symbolic constants. Here we handle all cases when both operands
7828 are constant. */
bbf6f052
RK
7829 /* Handle difference of two symbolic constants,
7830 for the sake of an initializer. */
7831 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7832 && really_constant_p (TREE_OPERAND (exp, 0))
7833 && really_constant_p (TREE_OPERAND (exp, 1)))
7834 {
eb698c58
RS
7835 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7836 NULL_RTX, &op0, &op1, modifier);
ea87523e 7837
ea87523e
RK
7838 /* If the last operand is a CONST_INT, use plus_constant of
7839 the negated constant. Else make the MINUS. */
7840 if (GET_CODE (op1) == CONST_INT)
bc15d0ef 7841 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
ea87523e 7842 else
bc15d0ef 7843 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
bbf6f052 7844 }
ae431183 7845
1717e19e
UW
7846 /* No sense saving up arithmetic to be done
7847 if it's all in the wrong mode to form part of an address.
7848 And force_operand won't know whether to sign-extend or
7849 zero-extend. */
7850 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7851 || mode != ptr_mode)
7852 goto binop;
7853
eb698c58
RS
7854 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7855 subtarget, &op0, &op1, modifier);
1717e19e
UW
7856
7857 /* Convert A - const to A + (-const). */
7858 if (GET_CODE (op1) == CONST_INT)
7859 {
7860 op1 = negate_rtx (mode, op1);
bc15d0ef 7861 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
1717e19e
UW
7862 }
7863
7864 goto binop2;
bbf6f052
RK
7865
7866 case MULT_EXPR:
bbf6f052
RK
7867 /* If first operand is constant, swap them.
7868 Thus the following special case checks need only
7869 check the second operand. */
7870 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7871 {
b3694847 7872 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7873 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7874 TREE_OPERAND (exp, 1) = t1;
7875 }
7876
7877 /* Attempt to return something suitable for generating an
7878 indexed address, for machines that support that. */
7879
88f63c77 7880 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7881 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7882 {
48a5f2fa
DJ
7883 tree exp1 = TREE_OPERAND (exp, 1);
7884
921b3427
RK
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7886 EXPAND_SUM);
bbf6f052 7887
f8cfc6aa 7888 if (!REG_P (op0))
906c4e36 7889 op0 = force_operand (op0, NULL_RTX);
f8cfc6aa 7890 if (!REG_P (op0))
bbf6f052
RK
7891 op0 = copy_to_mode_reg (mode, op0);
7892
bc15d0ef 7893 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
48a5f2fa 7894 gen_int_mode (tree_low_cst (exp1, 0),
bc15d0ef 7895 TYPE_MODE (TREE_TYPE (exp1)))));
bbf6f052
RK
7896 }
7897
8403445a
AM
7898 if (modifier == EXPAND_STACK_PARM)
7899 target = 0;
7900
bbf6f052
RK
7901 /* Check for multiplying things that have been extended
7902 from a narrower type. If this machine supports multiplying
7903 in that narrower type with a result in the desired type,
7904 do it that way, and avoid the explicit type-conversion. */
8b44057d
BS
7905
7906 subexp0 = TREE_OPERAND (exp, 0);
7907 subexp1 = TREE_OPERAND (exp, 1);
7908 /* First, check if we have a multiplication of one signed and one
7909 unsigned operand. */
7910 if (TREE_CODE (subexp0) == NOP_EXPR
7911 && TREE_CODE (subexp1) == NOP_EXPR
7912 && TREE_CODE (type) == INTEGER_TYPE
7913 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7914 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7915 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7916 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7917 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7918 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7919 {
7920 enum machine_mode innermode
7921 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7922 this_optab = usmul_widen_optab;
7923 if (mode == GET_MODE_WIDER_MODE (innermode))
7924 {
7925 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7926 {
7927 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
7928 expand_operands (TREE_OPERAND (subexp0, 0),
7929 TREE_OPERAND (subexp1, 0),
7930 NULL_RTX, &op0, &op1, 0);
7931 else
7932 expand_operands (TREE_OPERAND (subexp0, 0),
7933 TREE_OPERAND (subexp1, 0),
7934 NULL_RTX, &op1, &op0, 0);
7935
832942a8 7936 goto binop3;
8b44057d
BS
7937 }
7938 }
7939 }
7940 /* Check for a multiplication with matching signedness. */
7941 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
bbf6f052
RK
7942 && TREE_CODE (type) == INTEGER_TYPE
7943 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7944 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7945 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7946 && int_fits_type_p (TREE_OPERAND (exp, 1),
7947 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7948 /* Don't use a widening multiply if a shift will do. */
7949 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7950 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7951 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7952 ||
7953 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
7954 && (TYPE_PRECISION (TREE_TYPE
7955 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7956 == TYPE_PRECISION (TREE_TYPE
7957 (TREE_OPERAND
7958 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
7959 /* If both operands are extended, they must either both
7960 be zero-extended or both be sign-extended. */
8df83eae
RK
7961 && (TYPE_UNSIGNED (TREE_TYPE
7962 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7963 == TYPE_UNSIGNED (TREE_TYPE
7964 (TREE_OPERAND
7965 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 7966 {
888d65b5
RS
7967 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7968 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 7969 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
7970 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7971 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7972
d2348bd5 7973 if (mode == GET_MODE_2XWIDER_MODE (innermode))
bbf6f052 7974 {
b10af0c8
TG
7975 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7976 {
b10af0c8 7977 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7978 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7979 TREE_OPERAND (exp, 1),
84217346 7980 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
b10af0c8 7981 else
eb698c58
RS
7982 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7983 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
84217346 7984 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
c4d70ce3 7985 goto binop3;
b10af0c8
TG
7986 }
7987 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7988 && innermode == word_mode)
7989 {
888d65b5 7990 rtx htem, hipart;
84217346 7991 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
b10af0c8 7992 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062 7993 op1 = convert_modes (innermode, mode,
84217346 7994 expand_normal (TREE_OPERAND (exp, 1)),
8c118062 7995 unsignedp);
b10af0c8 7996 else
84217346 7997 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
b10af0c8
TG
7998 temp = expand_binop (mode, other_optab, op0, op1, target,
7999 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
8000 hipart = gen_highpart (innermode, temp);
8001 htem = expand_mult_highpart_adjust (innermode, hipart,
8002 op0, op1, hipart,
8003 zextend_p);
8004 if (htem != hipart)
8005 emit_move_insn (hipart, htem);
bc15d0ef 8006 return REDUCE_BIT_FIELD (temp);
b10af0c8 8007 }
bbf6f052
RK
8008 }
8009 }
eb698c58
RS
8010 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8011 subtarget, &op0, &op1, 0);
bc15d0ef 8012 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
bbf6f052
RK
8013
8014 case TRUNC_DIV_EXPR:
8015 case FLOOR_DIV_EXPR:
8016 case CEIL_DIV_EXPR:
8017 case ROUND_DIV_EXPR:
8018 case EXACT_DIV_EXPR:
8403445a
AM
8019 if (modifier == EXPAND_STACK_PARM)
8020 target = 0;
bbf6f052
RK
8021 /* Possible optimization: compute the dividend with EXPAND_SUM
8022 then if the divisor is constant can optimize the case
8023 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
8024 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8025 subtarget, &op0, &op1, 0);
bbf6f052
RK
8026 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8027
8028 case RDIV_EXPR:
bbf6f052
RK
8029 goto binop;
8030
8031 case TRUNC_MOD_EXPR:
8032 case FLOOR_MOD_EXPR:
8033 case CEIL_MOD_EXPR:
8034 case ROUND_MOD_EXPR:
8403445a
AM
8035 if (modifier == EXPAND_STACK_PARM)
8036 target = 0;
eb698c58
RS
8037 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8038 subtarget, &op0, &op1, 0);
bbf6f052
RK
8039 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8040
8041 case FIX_ROUND_EXPR:
8042 case FIX_FLOOR_EXPR:
8043 case FIX_CEIL_EXPR:
5b0264cb 8044 gcc_unreachable (); /* Not used for C. */
bbf6f052
RK
8045
8046 case FIX_TRUNC_EXPR:
84217346 8047 op0 = expand_normal (TREE_OPERAND (exp, 0));
8403445a 8048 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8049 target = gen_reg_rtx (mode);
8050 expand_fix (target, op0, unsignedp);
8051 return target;
8052
8053 case FLOAT_EXPR:
84217346 8054 op0 = expand_normal (TREE_OPERAND (exp, 0));
8403445a 8055 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8056 target = gen_reg_rtx (mode);
8057 /* expand_float can't figure out what to do if FROM has VOIDmode.
8058 So give it the correct mode. With -O, cse will optimize this. */
8059 if (GET_MODE (op0) == VOIDmode)
8060 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8061 op0);
8062 expand_float (target, op0,
8df83eae 8063 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
8064 return target;
8065
8066 case NEGATE_EXPR:
5b22bee8 8067 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8068 if (modifier == EXPAND_STACK_PARM)
8069 target = 0;
91ce572a 8070 temp = expand_unop (mode,
c4d70ce3
PB
8071 optab_for_tree_code (NEGATE_EXPR, type),
8072 op0, target, 0);
5b0264cb 8073 gcc_assert (temp);
bc15d0ef 8074 return REDUCE_BIT_FIELD (temp);
bbf6f052
RK
8075
8076 case ABS_EXPR:
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8078 if (modifier == EXPAND_STACK_PARM)
8079 target = 0;
bbf6f052 8080
11017cc7 8081 /* ABS_EXPR is not valid for complex arguments. */
5b0264cb
NS
8082 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8083 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
2d7050fd 8084
bbf6f052
RK
8085 /* Unsigned abs is simply the operand. Testing here means we don't
8086 risk generating incorrect code below. */
8df83eae 8087 if (TYPE_UNSIGNED (type))
bbf6f052
RK
8088 return op0;
8089
91ce572a 8090 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8091 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8092
8093 case MAX_EXPR:
8094 case MIN_EXPR:
8095 target = original_target;
8403445a
AM
8096 if (target == 0
8097 || modifier == EXPAND_STACK_PARM
3c0cb5de 8098 || (MEM_P (target) && MEM_VOLATILE_P (target))
d6a5ac33 8099 || GET_MODE (target) != mode
f8cfc6aa 8100 || (REG_P (target)
bbf6f052
RK
8101 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8102 target = gen_reg_rtx (mode);
eb698c58
RS
8103 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8104 target, &op0, &op1, 0);
bbf6f052
RK
8105
8106 /* First try to do it with a special MIN or MAX instruction.
8107 If that does not win, use a conditional jump to select the proper
8108 value. */
c4d70ce3 8109 this_optab = optab_for_tree_code (code, type);
bbf6f052
RK
8110 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8111 OPTAB_WIDEN);
8112 if (temp != 0)
8113 return temp;
8114
fa2981d8
JW
8115 /* At this point, a MEM target is no longer useful; we will get better
8116 code without it. */
3a94c984 8117
dbedefae 8118 if (! REG_P (target))
fa2981d8
JW
8119 target = gen_reg_rtx (mode);
8120
e3be1116
RS
8121 /* If op1 was placed in target, swap op0 and op1. */
8122 if (target != op0 && target == op1)
8123 {
927630a5 8124 temp = op0;
e3be1116 8125 op0 = op1;
927630a5 8126 op1 = temp;
e3be1116
RS
8127 }
8128
dbedefae
RS
8129 /* We generate better code and avoid problems with op1 mentioning
8130 target by forcing op1 into a pseudo if it isn't a constant. */
8131 if (! CONSTANT_P (op1))
8132 op1 = force_reg (mode, op1);
8133
230dedb3
JH
8134 {
8135 enum rtx_code comparison_code;
8136 rtx cmpop1 = op1;
927630a5 8137
230dedb3
JH
8138 if (code == MAX_EXPR)
8139 comparison_code = unsignedp ? GEU : GE;
8140 else
8141 comparison_code = unsignedp ? LEU : LE;
927630a5 8142
6416ae7f 8143 /* Canonicalize to comparisons against 0. */
230dedb3
JH
8144 if (op1 == const1_rtx)
8145 {
8146 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8147 or (a != 0 ? a : 1) for unsigned.
8148 For MIN we are safe converting (a <= 1 ? a : 1)
8149 into (a <= 0 ? a : 1) */
8150 cmpop1 = const0_rtx;
8151 if (code == MAX_EXPR)
8152 comparison_code = unsignedp ? NE : GT;
8153 }
8154 if (op1 == constm1_rtx && !unsignedp)
8155 {
8156 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8157 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8158 cmpop1 = const0_rtx;
8159 if (code == MIN_EXPR)
8160 comparison_code = LT;
8161 }
8162#ifdef HAVE_conditional_move
8163 /* Use a conditional move if possible. */
8164 if (can_conditionally_move_p (mode))
8165 {
8166 rtx insn;
927630a5 8167
230dedb3
JH
8168 /* ??? Same problem as in expmed.c: emit_conditional_move
8169 forces a stack adjustment via compare_from_rtx, and we
8170 lose the stack adjustment if the sequence we are about
8171 to create is discarded. */
8172 do_pending_stack_adjust ();
927630a5 8173
230dedb3 8174 start_sequence ();
927630a5 8175
230dedb3
JH
8176 /* Try to emit the conditional move. */
8177 insn = emit_conditional_move (target, comparison_code,
8178 op0, cmpop1, mode,
8179 op0, op1, mode,
8180 unsignedp);
927630a5 8181
230dedb3
JH
8182 /* If we could do the conditional move, emit the sequence,
8183 and return. */
8184 if (insn)
8185 {
8186 rtx seq = get_insns ();
8187 end_sequence ();
8188 emit_insn (seq);
8189 return target;
8190 }
8191
8192 /* Otherwise discard the sequence and fall back to code with
8193 branches. */
8194 end_sequence ();
8195 }
927630a5 8196#endif
230dedb3
JH
8197 if (target != op0)
8198 emit_move_insn (target, op0);
d6a5ac33 8199
230dedb3 8200 temp = gen_label_rtx ();
3bf78d3b
RS
8201 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8202 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
230dedb3 8203 }
b30f05db 8204 emit_move_insn (target, op1);
927630a5 8205 emit_label (temp);
bbf6f052
RK
8206 return target;
8207
bbf6f052
RK
8208 case BIT_NOT_EXPR:
8209 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8210 if (modifier == EXPAND_STACK_PARM)
8211 target = 0;
bbf6f052 8212 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5b0264cb 8213 gcc_assert (temp);
bbf6f052
RK
8214 return temp;
8215
d6a5ac33
RK
8216 /* ??? Can optimize bitwise operations with one arg constant.
8217 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8218 and (a bitwise1 b) bitwise2 b (etc)
8219 but that is probably not worth while. */
8220
8221 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8222 boolean values when we want in all cases to compute both of them. In
8223 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8224 as actual zero-or-1 values and then bitwise anding. In cases where
8225 there cannot be any side effects, better code would be made by
8226 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8227 how to recognize those cases. */
8228
bbf6f052 8229 case TRUTH_AND_EXPR:
c4d70ce3 8230 code = BIT_AND_EXPR;
bbf6f052 8231 case BIT_AND_EXPR:
bbf6f052
RK
8232 goto binop;
8233
bbf6f052 8234 case TRUTH_OR_EXPR:
7efcb746 8235 code = BIT_IOR_EXPR;
bbf6f052 8236 case BIT_IOR_EXPR:
bbf6f052
RK
8237 goto binop;
8238
874726a8 8239 case TRUTH_XOR_EXPR:
c4d70ce3 8240 code = BIT_XOR_EXPR;
bbf6f052 8241 case BIT_XOR_EXPR:
bbf6f052
RK
8242 goto binop;
8243
8244 case LSHIFT_EXPR:
8245 case RSHIFT_EXPR:
8246 case LROTATE_EXPR:
8247 case RROTATE_EXPR:
e5e809f4 8248 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8249 subtarget = 0;
8403445a
AM
8250 if (modifier == EXPAND_STACK_PARM)
8251 target = 0;
bbf6f052
RK
8252 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8253 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8254 unsignedp);
8255
d6a5ac33
RK
8256 /* Could determine the answer when only additive constants differ. Also,
8257 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8258 case LT_EXPR:
8259 case LE_EXPR:
8260 case GT_EXPR:
8261 case GE_EXPR:
8262 case EQ_EXPR:
8263 case NE_EXPR:
1eb8759b
RH
8264 case UNORDERED_EXPR:
8265 case ORDERED_EXPR:
8266 case UNLT_EXPR:
8267 case UNLE_EXPR:
8268 case UNGT_EXPR:
8269 case UNGE_EXPR:
8270 case UNEQ_EXPR:
d1a7edaf 8271 case LTGT_EXPR:
8403445a
AM
8272 temp = do_store_flag (exp,
8273 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8274 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8275 if (temp != 0)
8276 return temp;
d6a5ac33 8277
0f41302f 8278 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8279 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8280 && original_target
f8cfc6aa 8281 && REG_P (original_target)
bbf6f052
RK
8282 && (GET_MODE (original_target)
8283 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8284 {
d6a5ac33
RK
8285 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8286 VOIDmode, 0);
8287
c0a3eeac
UW
8288 /* If temp is constant, we can just compute the result. */
8289 if (GET_CODE (temp) == CONST_INT)
8290 {
8291 if (INTVAL (temp) != 0)
8292 emit_move_insn (target, const1_rtx);
8293 else
8294 emit_move_insn (target, const0_rtx);
8295
8296 return target;
8297 }
8298
bbf6f052 8299 if (temp != original_target)
c0a3eeac
UW
8300 {
8301 enum machine_mode mode1 = GET_MODE (temp);
8302 if (mode1 == VOIDmode)
8303 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8304
c0a3eeac
UW
8305 temp = copy_to_mode_reg (mode1, temp);
8306 }
d6a5ac33 8307
bbf6f052 8308 op1 = gen_label_rtx ();
c5d5d461 8309 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8310 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8311 emit_move_insn (temp, const1_rtx);
8312 emit_label (op1);
8313 return temp;
8314 }
d6a5ac33 8315
25f3e06c
PB
8316 /* If no set-flag instruction, must generate a conditional store
8317 into a temporary variable. Drop through and handle this
8318 like && and ||. */
8319
8320 if (! ignore
8321 && (target == 0
8322 || modifier == EXPAND_STACK_PARM
8323 || ! safe_from_p (target, exp, 1)
8324 /* Make sure we don't have a hard reg (such as function's return
8325 value) live across basic blocks, if not optimizing. */
8326 || (!optimize && REG_P (target)
8327 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8328 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8329
8330 if (target)
8331 emit_move_insn (target, const0_rtx);
8332
8333 op1 = gen_label_rtx ();
8334 jumpifnot (exp, op1);
8335
8336 if (target)
8337 emit_move_insn (target, const1_rtx);
8338
8339 emit_label (op1);
8340 return ignore ? const0_rtx : target;
8341
bbf6f052 8342 case TRUTH_NOT_EXPR:
8403445a
AM
8343 if (modifier == EXPAND_STACK_PARM)
8344 target = 0;
bbf6f052
RK
8345 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8346 /* The parser is careful to generate TRUTH_NOT_EXPR
8347 only with operands that are always zero or one. */
906c4e36 8348 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052 8349 target, 1, OPTAB_LIB_WIDEN);
5b0264cb 8350 gcc_assert (temp);
bbf6f052
RK
8351 return temp;
8352
6de9cd9a
DN
8353 case STATEMENT_LIST:
8354 {
8355 tree_stmt_iterator iter;
8356
5b0264cb 8357 gcc_assert (ignore);
6de9cd9a
DN
8358
8359 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8360 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8361 }
8362 return const0_rtx;
8363
bbf6f052 8364 case COND_EXPR:
ba8081eb
KH
8365 /* A COND_EXPR with its type being VOID_TYPE represents a
8366 conditional jump and is handled in
8367 expand_gimple_cond_expr. */
8368 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
f676971a 8369
e5bacf32
PB
8370 /* Note that COND_EXPRs whose type is a structure or union
8371 are required to be constructed to contain assignments of
8372 a temporary variable, so that we can evaluate them here
8373 for side effect only. If type is void, we must do likewise. */
8374
5b0264cb
NS
8375 gcc_assert (!TREE_ADDRESSABLE (type)
8376 && !ignore
8377 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8378 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
f676971a 8379
e5bacf32
PB
8380 /* If we are not to produce a result, we have no target. Otherwise,
8381 if a target was specified use it; it will not be used as an
8382 intermediate target unless it is safe. If no target, use a
8383 temporary. */
f676971a 8384
e5bacf32
PB
8385 if (modifier != EXPAND_STACK_PARM
8386 && original_target
8387 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8388 && GET_MODE (original_target) == mode
7c00d1fe 8389#ifdef HAVE_conditional_move
e5bacf32
PB
8390 && (! can_conditionally_move_p (mode)
8391 || REG_P (original_target))
7c00d1fe 8392#endif
e5bacf32
PB
8393 && !MEM_P (original_target))
8394 temp = original_target;
8395 else
8396 temp = assign_temp (type, 0, 0, 1);
f676971a 8397
e5bacf32
PB
8398 do_pending_stack_adjust ();
8399 NO_DEFER_POP;
8400 op0 = gen_label_rtx ();
8401 op1 = gen_label_rtx ();
8402 jumpifnot (TREE_OPERAND (exp, 0), op0);
8403 store_expr (TREE_OPERAND (exp, 1), temp,
6f4fd16d 8404 modifier == EXPAND_STACK_PARM);
f676971a 8405
e5bacf32
PB
8406 emit_jump_insn (gen_jump (op1));
8407 emit_barrier ();
8408 emit_label (op0);
8409 store_expr (TREE_OPERAND (exp, 2), temp,
6f4fd16d 8410 modifier == EXPAND_STACK_PARM);
f676971a 8411
e5bacf32
PB
8412 emit_label (op1);
8413 OK_DEFER_POP;
8414 return temp;
f676971a 8415
7ce67fbe
DP
8416 case VEC_COND_EXPR:
8417 target = expand_vec_cond_expr (exp, target);
8418 return target;
8419
bbf6f052
RK
8420 case MODIFY_EXPR:
8421 {
bbf6f052
RK
8422 tree lhs = TREE_OPERAND (exp, 0);
8423 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8424
df9af2bb
KH
8425 gcc_assert (ignore);
8426
bbf6f052
RK
8427 /* Check for |= or &= of a bitfield of size one into another bitfield
8428 of size 1. In this case, (unless we need the result of the
8429 assignment) we can do this more efficiently with a
8430 test followed by an assignment, if necessary.
8431
8432 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8433 things change so we do, this code should be enhanced to
8434 support it. */
df9af2bb 8435 if (TREE_CODE (lhs) == COMPONENT_REF
bbf6f052
RK
8436 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8437 || TREE_CODE (rhs) == BIT_AND_EXPR)
8438 && TREE_OPERAND (rhs, 0) == lhs
8439 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8440 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8441 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8442 {
8443 rtx label = gen_label_rtx ();
8444
8445 do_jump (TREE_OPERAND (rhs, 1),
8446 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8447 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8448 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8449 (TREE_CODE (rhs) == BIT_IOR_EXPR
8450 ? integer_one_node
e836a5a2 8451 : integer_zero_node)));
e7c33f54 8452 do_pending_stack_adjust ();
bbf6f052
RK
8453 emit_label (label);
8454 return const0_rtx;
8455 }
8456
e836a5a2 8457 expand_assignment (lhs, rhs);
0fb7aeda 8458
7f8adc4e 8459 return const0_rtx;
bbf6f052
RK
8460 }
8461
6e7f84a7
APB
8462 case RETURN_EXPR:
8463 if (!TREE_OPERAND (exp, 0))
8464 expand_null_return ();
8465 else
8466 expand_return (TREE_OPERAND (exp, 0));
8467 return const0_rtx;
8468
bbf6f052 8469 case ADDR_EXPR:
70bb498a 8470 return expand_expr_addr_expr (exp, target, tmode, modifier);
bbf6f052 8471
7308a047 8472 case COMPLEX_EXPR:
1466e387 8473 /* Get the rtx code of the operands. */
84217346
MD
8474 op0 = expand_normal (TREE_OPERAND (exp, 0));
8475 op1 = expand_normal (TREE_OPERAND (exp, 1));
7308a047 8476
1466e387
RH
8477 if (!target)
8478 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6551fa4d 8479
1466e387
RH
8480 /* Move the real (op0) and imaginary (op1) parts to their location. */
8481 write_complex_part (target, op0, false);
8482 write_complex_part (target, op1, true);
7308a047 8483
1466e387 8484 return target;
7308a047
RS
8485
8486 case REALPART_EXPR:
84217346 8487 op0 = expand_normal (TREE_OPERAND (exp, 0));
1466e387 8488 return read_complex_part (op0, false);
3a94c984 8489
7308a047 8490 case IMAGPART_EXPR:
84217346 8491 op0 = expand_normal (TREE_OPERAND (exp, 0));
1466e387 8492 return read_complex_part (op0, true);
7308a047 8493
6de9cd9a
DN
8494 case RESX_EXPR:
8495 expand_resx_expr (exp);
8496 return const0_rtx;
8497
e976b8b2 8498 case TRY_CATCH_EXPR:
6de9cd9a 8499 case CATCH_EXPR:
6de9cd9a 8500 case EH_FILTER_EXPR:
b335b813 8501 case TRY_FINALLY_EXPR:
ac45df5d 8502 /* Lowered by tree-eh.c. */
5b0264cb 8503 gcc_unreachable ();
b335b813 8504
ac45df5d
RH
8505 case WITH_CLEANUP_EXPR:
8506 case CLEANUP_POINT_EXPR:
8507 case TARGET_EXPR:
165b54c3 8508 case CASE_LABEL_EXPR:
77c9db77 8509 case VA_ARG_EXPR:
caf93cb0 8510 case BIND_EXPR:
e5bacf32
PB
8511 case INIT_EXPR:
8512 case CONJ_EXPR:
8513 case COMPOUND_EXPR:
8514 case PREINCREMENT_EXPR:
8515 case PREDECREMENT_EXPR:
8516 case POSTINCREMENT_EXPR:
8517 case POSTDECREMENT_EXPR:
8518 case LOOP_EXPR:
8519 case EXIT_EXPR:
e5bacf32
PB
8520 case TRUTH_ANDIF_EXPR:
8521 case TRUTH_ORIF_EXPR:
ac45df5d 8522 /* Lowered by gimplify.c. */
5b0264cb 8523 gcc_unreachable ();
b335b813 8524
52a11cbf 8525 case EXC_PTR_EXPR:
86c99549 8526 return get_exception_pointer (cfun);
52a11cbf 8527
6de9cd9a
DN
8528 case FILTER_EXPR:
8529 return get_exception_filter (cfun);
8530
67231816
RH
8531 case FDESC_EXPR:
8532 /* Function descriptors are not valid except for as
8533 initialization constants, and should not be expanded. */
5b0264cb 8534 gcc_unreachable ();
67231816 8535
6de9cd9a 8536 case SWITCH_EXPR:
7efcb746 8537 expand_case (exp);
6de9cd9a
DN
8538 return const0_rtx;
8539
8540 case LABEL_EXPR:
8541 expand_label (TREE_OPERAND (exp, 0));
8542 return const0_rtx;
8543
6de9cd9a
DN
8544 case ASM_EXPR:
8545 expand_asm_expr (exp);
8546 return const0_rtx;
8547
d25cee4d
RH
8548 case WITH_SIZE_EXPR:
8549 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8550 have pulled out the size to use in whatever context it needed. */
8551 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8552 modifier, alt_rtl);
8553
7ccf35ed
DN
8554 case REALIGN_LOAD_EXPR:
8555 {
8556 tree oprnd0 = TREE_OPERAND (exp, 0);
8557 tree oprnd1 = TREE_OPERAND (exp, 1);
8558 tree oprnd2 = TREE_OPERAND (exp, 2);
8559 rtx op2;
8560
8561 this_optab = optab_for_tree_code (code, type);
84217346
MD
8562 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8563 op2 = expand_normal (oprnd2);
7ccf35ed
DN
8564 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8565 target, unsignedp);
535a42b1 8566 gcc_assert (temp);
7ccf35ed
DN
8567 return temp;
8568 }
8569
20f06221
DN
8570 case DOT_PROD_EXPR:
8571 {
8572 tree oprnd0 = TREE_OPERAND (exp, 0);
8573 tree oprnd1 = TREE_OPERAND (exp, 1);
8574 tree oprnd2 = TREE_OPERAND (exp, 2);
8575 rtx op2;
8576
84217346
MD
8577 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8578 op2 = expand_normal (oprnd2);
20f06221
DN
8579 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8580 target, unsignedp);
8581 return target;
8582 }
8583
8584 case WIDEN_SUM_EXPR:
8585 {
8586 tree oprnd0 = TREE_OPERAND (exp, 0);
8587 tree oprnd1 = TREE_OPERAND (exp, 1);
8588
8589 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8590 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8591 target, unsignedp);
8592 return target;
8593 }
8594
61d3cdbb
DN
8595 case REDUC_MAX_EXPR:
8596 case REDUC_MIN_EXPR:
8597 case REDUC_PLUS_EXPR:
8598 {
84217346 8599 op0 = expand_normal (TREE_OPERAND (exp, 0));
61d3cdbb
DN
8600 this_optab = optab_for_tree_code (code, type);
8601 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8602 gcc_assert (temp);
8603 return temp;
8604 }
7ccf35ed 8605
a6b46ba2
DN
8606 case VEC_LSHIFT_EXPR:
8607 case VEC_RSHIFT_EXPR:
8608 {
8609 target = expand_vec_shift_expr (exp, target);
8610 return target;
8611 }
8612
bbf6f052 8613 default:
673fda6b
SB
8614 return lang_hooks.expand_expr (exp, original_target, tmode,
8615 modifier, alt_rtl);
bbf6f052
RK
8616 }
8617
c4d70ce3 8618 /* Here to do an ordinary binary operator. */
bbf6f052 8619 binop:
eb698c58
RS
8620 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8621 subtarget, &op0, &op1, 0);
bbf6f052 8622 binop2:
c4d70ce3
PB
8623 this_optab = optab_for_tree_code (code, type);
8624 binop3:
8403445a
AM
8625 if (modifier == EXPAND_STACK_PARM)
8626 target = 0;
bbf6f052
RK
8627 temp = expand_binop (mode, this_optab, op0, op1, target,
8628 unsignedp, OPTAB_LIB_WIDEN);
5b0264cb 8629 gcc_assert (temp);
bc15d0ef
JM
8630 return REDUCE_BIT_FIELD (temp);
8631}
8632#undef REDUCE_BIT_FIELD
8633\f
8634/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8635 signedness of TYPE), possibly returning the result in TARGET. */
8636static rtx
8637reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8638{
8639 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8640 if (target && GET_MODE (target) != GET_MODE (exp))
8641 target = 0;
8642 if (TYPE_UNSIGNED (type))
8643 {
8644 rtx mask;
8645 if (prec < HOST_BITS_PER_WIDE_INT)
8646 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8647 GET_MODE (exp));
8648 else
8649 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8650 ((unsigned HOST_WIDE_INT) 1
8651 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8652 GET_MODE (exp));
8653 return expand_and (GET_MODE (exp), exp, mask, target);
8654 }
8655 else
8656 {
4a90aeeb 8657 tree count = build_int_cst (NULL_TREE,
7d60be94 8658 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
bc15d0ef
JM
8659 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8660 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8661 }
bbf6f052 8662}
b93a436e 8663\f
1ce7f3c2
RK
8664/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8665 when applied to the address of EXP produces an address known to be
8666 aligned more than BIGGEST_ALIGNMENT. */
8667
8668static int
502b8322 8669is_aligning_offset (tree offset, tree exp)
1ce7f3c2 8670{
6fce44af 8671 /* Strip off any conversions. */
1ce7f3c2
RK
8672 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8673 || TREE_CODE (offset) == NOP_EXPR
6fce44af 8674 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
8675 offset = TREE_OPERAND (offset, 0);
8676
8677 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8678 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8679 if (TREE_CODE (offset) != BIT_AND_EXPR
8680 || !host_integerp (TREE_OPERAND (offset, 1), 1)
caf93cb0 8681 || compare_tree_int (TREE_OPERAND (offset, 1),
c0cfc691 8682 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
8683 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8684 return 0;
8685
8686 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8687 It must be NEGATE_EXPR. Then strip any more conversions. */
8688 offset = TREE_OPERAND (offset, 0);
8689 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8690 || TREE_CODE (offset) == NOP_EXPR
8691 || TREE_CODE (offset) == CONVERT_EXPR)
8692 offset = TREE_OPERAND (offset, 0);
8693
8694 if (TREE_CODE (offset) != NEGATE_EXPR)
8695 return 0;
8696
8697 offset = TREE_OPERAND (offset, 0);
8698 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8699 || TREE_CODE (offset) == NOP_EXPR
8700 || TREE_CODE (offset) == CONVERT_EXPR)
8701 offset = TREE_OPERAND (offset, 0);
8702
6fce44af
RK
8703 /* This must now be the address of EXP. */
8704 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
8705}
8706\f
e0a2f705 8707/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 8708 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
8709 in bytes within the string that ARG is accessing. The type of the
8710 offset will be `sizetype'. */
b93a436e 8711
28f4ec01 8712tree
502b8322 8713string_constant (tree arg, tree *ptr_offset)
b93a436e 8714{
a45f71f5 8715 tree array, offset;
b93a436e
JL
8716 STRIP_NOPS (arg);
8717
a45f71f5 8718 if (TREE_CODE (arg) == ADDR_EXPR)
b93a436e 8719 {
a45f71f5
JJ
8720 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8721 {
8722 *ptr_offset = size_zero_node;
8723 return TREE_OPERAND (arg, 0);
8724 }
8725 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8726 {
8727 array = TREE_OPERAND (arg, 0);
8728 offset = size_zero_node;
8729 }
8730 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8731 {
8732 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8733 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8734 if (TREE_CODE (array) != STRING_CST
8735 && TREE_CODE (array) != VAR_DECL)
8736 return 0;
8737 }
8738 else
8739 return 0;
6de9cd9a 8740 }
b93a436e
JL
8741 else if (TREE_CODE (arg) == PLUS_EXPR)
8742 {
8743 tree arg0 = TREE_OPERAND (arg, 0);
8744 tree arg1 = TREE_OPERAND (arg, 1);
8745
8746 STRIP_NOPS (arg0);
8747 STRIP_NOPS (arg1);
8748
8749 if (TREE_CODE (arg0) == ADDR_EXPR
a45f71f5
JJ
8750 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8751 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
bbf6f052 8752 {
a45f71f5
JJ
8753 array = TREE_OPERAND (arg0, 0);
8754 offset = arg1;
bbf6f052 8755 }
b93a436e 8756 else if (TREE_CODE (arg1) == ADDR_EXPR
a45f71f5
JJ
8757 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8758 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
bbf6f052 8759 {
a45f71f5
JJ
8760 array = TREE_OPERAND (arg1, 0);
8761 offset = arg0;
bbf6f052 8762 }
a45f71f5
JJ
8763 else
8764 return 0;
8765 }
8766 else
8767 return 0;
8768
8769 if (TREE_CODE (array) == STRING_CST)
8770 {
8771 *ptr_offset = convert (sizetype, offset);
8772 return array;
8773 }
8774 else if (TREE_CODE (array) == VAR_DECL)
8775 {
8776 int length;
8777
8778 /* Variables initialized to string literals can be handled too. */
8779 if (DECL_INITIAL (array) == NULL_TREE
8780 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8781 return 0;
8782
8783 /* If they are read-only, non-volatile and bind locally. */
8784 if (! TREE_READONLY (array)
8785 || TREE_SIDE_EFFECTS (array)
8786 || ! targetm.binds_local_p (array))
8787 return 0;
8788
8789 /* Avoid const char foo[4] = "abcde"; */
8790 if (DECL_SIZE_UNIT (array) == NULL_TREE
8791 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8792 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8793 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8794 return 0;
8795
8796 /* If variable is bigger than the string literal, OFFSET must be constant
8797 and inside of the bounds of the string literal. */
8798 offset = convert (sizetype, offset);
8799 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8800 && (! host_integerp (offset, 1)
8801 || compare_tree_int (offset, length) >= 0))
8802 return 0;
8803
8804 *ptr_offset = offset;
8805 return DECL_INITIAL (array);
b93a436e 8806 }
ca695ac9 8807
b93a436e
JL
8808 return 0;
8809}
ca695ac9 8810\f
b93a436e
JL
8811/* Generate code to calculate EXP using a store-flag instruction
8812 and return an rtx for the result. EXP is either a comparison
8813 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 8814
b93a436e 8815 If TARGET is nonzero, store the result there if convenient.
ca695ac9 8816
cc2902df 8817 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 8818 cheap.
ca695ac9 8819
b93a436e
JL
8820 Return zero if there is no suitable set-flag instruction
8821 available on this machine.
ca695ac9 8822
b93a436e
JL
8823 Once expand_expr has been called on the arguments of the comparison,
8824 we are committed to doing the store flag, since it is not safe to
8825 re-evaluate the expression. We emit the store-flag insn by calling
8826 emit_store_flag, but only expand the arguments if we have a reason
8827 to believe that emit_store_flag will be successful. If we think that
8828 it will, but it isn't, we have to simulate the store-flag with a
8829 set/jump/set sequence. */
ca695ac9 8830
b93a436e 8831static rtx
502b8322 8832do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
8833{
8834 enum rtx_code code;
8835 tree arg0, arg1, type;
8836 tree tem;
8837 enum machine_mode operand_mode;
8838 int invert = 0;
8839 int unsignedp;
8840 rtx op0, op1;
8841 enum insn_code icode;
8842 rtx subtarget = target;
381127e8 8843 rtx result, label;
ca695ac9 8844
b93a436e
JL
8845 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8846 result at the end. We can't simply invert the test since it would
8847 have already been inverted if it were valid. This case occurs for
8848 some floating-point comparisons. */
ca695ac9 8849
b93a436e
JL
8850 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8851 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 8852
b93a436e
JL
8853 arg0 = TREE_OPERAND (exp, 0);
8854 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
8855
8856 /* Don't crash if the comparison was erroneous. */
8857 if (arg0 == error_mark_node || arg1 == error_mark_node)
8858 return const0_rtx;
8859
b93a436e
JL
8860 type = TREE_TYPE (arg0);
8861 operand_mode = TYPE_MODE (type);
8df83eae 8862 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 8863
b93a436e
JL
8864 /* We won't bother with BLKmode store-flag operations because it would mean
8865 passing a lot of information to emit_store_flag. */
8866 if (operand_mode == BLKmode)
8867 return 0;
ca695ac9 8868
b93a436e
JL
8869 /* We won't bother with store-flag operations involving function pointers
8870 when function pointers must be canonicalized before comparisons. */
8871#ifdef HAVE_canonicalize_funcptr_for_compare
8872 if (HAVE_canonicalize_funcptr_for_compare
8873 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8874 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8875 == FUNCTION_TYPE))
8876 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8877 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8878 == FUNCTION_TYPE))))
8879 return 0;
ca695ac9
JB
8880#endif
8881
b93a436e
JL
8882 STRIP_NOPS (arg0);
8883 STRIP_NOPS (arg1);
ca695ac9 8884
b93a436e
JL
8885 /* Get the rtx comparison code to use. We know that EXP is a comparison
8886 operation of some type. Some comparisons against 1 and -1 can be
8887 converted to comparisons with zero. Do so here so that the tests
8888 below will be aware that we have a comparison with zero. These
8889 tests will not catch constants in the first operand, but constants
8890 are rarely passed as the first operand. */
ca695ac9 8891
b93a436e
JL
8892 switch (TREE_CODE (exp))
8893 {
8894 case EQ_EXPR:
8895 code = EQ;
bbf6f052 8896 break;
b93a436e
JL
8897 case NE_EXPR:
8898 code = NE;
bbf6f052 8899 break;
b93a436e
JL
8900 case LT_EXPR:
8901 if (integer_onep (arg1))
8902 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8903 else
8904 code = unsignedp ? LTU : LT;
ca695ac9 8905 break;
b93a436e
JL
8906 case LE_EXPR:
8907 if (! unsignedp && integer_all_onesp (arg1))
8908 arg1 = integer_zero_node, code = LT;
8909 else
8910 code = unsignedp ? LEU : LE;
ca695ac9 8911 break;
b93a436e
JL
8912 case GT_EXPR:
8913 if (! unsignedp && integer_all_onesp (arg1))
8914 arg1 = integer_zero_node, code = GE;
8915 else
8916 code = unsignedp ? GTU : GT;
8917 break;
8918 case GE_EXPR:
8919 if (integer_onep (arg1))
8920 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8921 else
8922 code = unsignedp ? GEU : GE;
ca695ac9 8923 break;
1eb8759b
RH
8924
8925 case UNORDERED_EXPR:
8926 code = UNORDERED;
8927 break;
8928 case ORDERED_EXPR:
8929 code = ORDERED;
8930 break;
8931 case UNLT_EXPR:
8932 code = UNLT;
8933 break;
8934 case UNLE_EXPR:
8935 code = UNLE;
8936 break;
8937 case UNGT_EXPR:
8938 code = UNGT;
8939 break;
8940 case UNGE_EXPR:
8941 code = UNGE;
8942 break;
8943 case UNEQ_EXPR:
8944 code = UNEQ;
8945 break;
d1a7edaf
PB
8946 case LTGT_EXPR:
8947 code = LTGT;
8948 break;
1eb8759b 8949
ca695ac9 8950 default:
5b0264cb 8951 gcc_unreachable ();
bbf6f052 8952 }
bbf6f052 8953
b93a436e
JL
8954 /* Put a constant second. */
8955 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8956 {
8957 tem = arg0; arg0 = arg1; arg1 = tem;
8958 code = swap_condition (code);
ca695ac9 8959 }
bbf6f052 8960
b93a436e
JL
8961 /* If this is an equality or inequality test of a single bit, we can
8962 do this by shifting the bit being tested to the low-order bit and
8963 masking the result with the constant 1. If the condition was EQ,
8964 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
8965 than an scc insn even if we have it.
8966
8967 The code to make this transformation was moved into fold_single_bit_test,
8968 so we just call into the folder and expand its result. */
d39985fa 8969
b93a436e
JL
8970 if ((code == NE || code == EQ)
8971 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8972 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 8973 {
ae2bcd98 8974 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 8975 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 8976 arg0, arg1, type),
60cd4dae
JL
8977 target, VOIDmode, EXPAND_NORMAL);
8978 }
bbf6f052 8979
b93a436e 8980 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 8981 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 8982 return 0;
1eb8759b 8983
b93a436e
JL
8984 icode = setcc_gen_code[(int) code];
8985 if (icode == CODE_FOR_nothing
a995e389 8986 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 8987 {
b93a436e
JL
8988 /* We can only do this if it is one of the special cases that
8989 can be handled without an scc insn. */
8990 if ((code == LT && integer_zerop (arg1))
8991 || (! only_cheap && code == GE && integer_zerop (arg1)))
8992 ;
08fd6d04 8993 else if (! only_cheap && (code == NE || code == EQ)
b93a436e
JL
8994 && TREE_CODE (type) != REAL_TYPE
8995 && ((abs_optab->handlers[(int) operand_mode].insn_code
8996 != CODE_FOR_nothing)
8997 || (ffs_optab->handlers[(int) operand_mode].insn_code
8998 != CODE_FOR_nothing)))
8999 ;
9000 else
9001 return 0;
ca695ac9 9002 }
3a94c984 9003
296b4ed9 9004 if (! get_subtarget (target)
e3be1116 9005 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9006 subtarget = 0;
9007
eb698c58 9008 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9009
9010 if (target == 0)
9011 target = gen_reg_rtx (mode);
9012
ad76cef8 9013 result = emit_store_flag (target, code, op0, op1,
b93a436e 9014 operand_mode, unsignedp, 1);
ca695ac9 9015
b93a436e
JL
9016 if (result)
9017 {
9018 if (invert)
9019 result = expand_binop (mode, xor_optab, result, const1_rtx,
9020 result, 0, OPTAB_LIB_WIDEN);
9021 return result;
ca695ac9 9022 }
bbf6f052 9023
b93a436e 9024 /* If this failed, we have to do this with set/compare/jump/set code. */
f8cfc6aa 9025 if (!REG_P (target)
b93a436e
JL
9026 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9027 target = gen_reg_rtx (GET_MODE (target));
9028
9029 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9030 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9031 operand_mode, NULL_RTX);
b93a436e
JL
9032 if (GET_CODE (result) == CONST_INT)
9033 return (((result == const0_rtx && ! invert)
9034 || (result != const0_rtx && invert))
9035 ? const0_rtx : const1_rtx);
ca695ac9 9036
8f08e8c0
JL
9037 /* The code of RESULT may not match CODE if compare_from_rtx
9038 decided to swap its operands and reverse the original code.
9039
9040 We know that compare_from_rtx returns either a CONST_INT or
9041 a new comparison code, so it is safe to just extract the
9042 code from RESULT. */
9043 code = GET_CODE (result);
9044
b93a436e 9045 label = gen_label_rtx ();
5b0264cb 9046 gcc_assert (bcc_gen_fctn[(int) code]);
0f41302f 9047
b93a436e
JL
9048 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9049 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9050 emit_label (label);
bbf6f052 9051
b93a436e 9052 return target;
ca695ac9 9053}
b93a436e 9054\f
b93a436e 9055
ad82abb8
ZW
9056/* Stubs in case we haven't got a casesi insn. */
9057#ifndef HAVE_casesi
9058# define HAVE_casesi 0
9059# define gen_casesi(a, b, c, d, e) (0)
9060# define CODE_FOR_casesi CODE_FOR_nothing
9061#endif
9062
9063/* If the machine does not have a case insn that compares the bounds,
9064 this means extra overhead for dispatch tables, which raises the
9065 threshold for using them. */
9066#ifndef CASE_VALUES_THRESHOLD
9067#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9068#endif /* CASE_VALUES_THRESHOLD */
9069
9070unsigned int
502b8322 9071case_values_threshold (void)
ad82abb8
ZW
9072{
9073 return CASE_VALUES_THRESHOLD;
9074}
9075
9076/* Attempt to generate a casesi instruction. Returns 1 if successful,
9077 0 otherwise (i.e. if there is no casesi instruction). */
9078int
502b8322
AJ
9079try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9080 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9081{
9082 enum machine_mode index_mode = SImode;
9083 int index_bits = GET_MODE_BITSIZE (index_mode);
9084 rtx op1, op2, index;
9085 enum machine_mode op_mode;
9086
9087 if (! HAVE_casesi)
9088 return 0;
9089
9090 /* Convert the index to SImode. */
9091 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9092 {
9093 enum machine_mode omode = TYPE_MODE (index_type);
84217346 9094 rtx rangertx = expand_normal (range);
ad82abb8
ZW
9095
9096 /* We must handle the endpoints in the original mode. */
3244e67d
RS
9097 index_expr = build2 (MINUS_EXPR, index_type,
9098 index_expr, minval);
ad82abb8 9099 minval = integer_zero_node;
84217346 9100 index = expand_normal (index_expr);
ad82abb8 9101 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9102 omode, 1, default_label);
ad82abb8
ZW
9103 /* Now we can safely truncate. */
9104 index = convert_to_mode (index_mode, index, 0);
9105 }
9106 else
9107 {
9108 if (TYPE_MODE (index_type) != index_mode)
9109 {
ae2bcd98 9110 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 9111 (index_bits, 0), index_expr);
ad82abb8
ZW
9112 index_type = TREE_TYPE (index_expr);
9113 }
9114
84217346 9115 index = expand_normal (index_expr);
ad82abb8 9116 }
ad76cef8 9117
ad82abb8
ZW
9118 do_pending_stack_adjust ();
9119
9120 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9121 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9122 (index, op_mode))
9123 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9124
84217346 9125 op1 = expand_normal (minval);
ad82abb8
ZW
9126
9127 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9128 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 9129 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
9130 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9131 (op1, op_mode))
9132 op1 = copy_to_mode_reg (op_mode, op1);
9133
84217346 9134 op2 = expand_normal (range);
ad82abb8
ZW
9135
9136 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9137 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 9138 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
9139 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9140 (op2, op_mode))
9141 op2 = copy_to_mode_reg (op_mode, op2);
9142
9143 emit_jump_insn (gen_casesi (index, op1, op2,
9144 table_label, default_label));
9145 return 1;
9146}
9147
9148/* Attempt to generate a tablejump instruction; same concept. */
9149#ifndef HAVE_tablejump
9150#define HAVE_tablejump 0
9151#define gen_tablejump(x, y) (0)
9152#endif
9153
9154/* Subroutine of the next function.
9155
9156 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9157 in the table already subtracted.
9158 MODE is its expected mode (needed if INDEX is constant).
9159 RANGE is the length of the jump table.
9160 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9161
b93a436e
JL
9162 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9163 index value is out of range. */
0f41302f 9164
ad82abb8 9165static void
502b8322
AJ
9166do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9167 rtx default_label)
ca695ac9 9168{
b3694847 9169 rtx temp, vector;
88d3b7f0 9170
74f6d071
JH
9171 if (INTVAL (range) > cfun->max_jumptable_ents)
9172 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9173
b93a436e
JL
9174 /* Do an unsigned comparison (in the proper mode) between the index
9175 expression and the value which represents the length of the range.
9176 Since we just finished subtracting the lower bound of the range
9177 from the index expression, this comparison allows us to simultaneously
9178 check that the original index expression value is both greater than
9179 or equal to the minimum value of the range and less than or equal to
9180 the maximum value of the range. */
709f5be1 9181
c5d5d461 9182 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9183 default_label);
bbf6f052 9184
b93a436e
JL
9185 /* If index is in range, it must fit in Pmode.
9186 Convert to Pmode so we can index with it. */
9187 if (mode != Pmode)
9188 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9189
ba228239 9190 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9191 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9192 and break_out_memory_refs will go to work on it and mess it up. */
9193#ifdef PIC_CASE_VECTOR_ADDRESS
f8cfc6aa 9194 if (flag_pic && !REG_P (index))
b93a436e
JL
9195 index = copy_to_mode_reg (Pmode, index);
9196#endif
ca695ac9 9197
b93a436e
JL
9198 /* If flag_force_addr were to affect this address
9199 it could interfere with the tricky assumptions made
9200 about addresses that contain label-refs,
9201 which may be valid only very near the tablejump itself. */
9202 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9203 GET_MODE_SIZE, because this indicates how large insns are. The other
9204 uses should all be Pmode, because they are addresses. This code
9205 could fail if addresses and insns are not the same size. */
9206 index = gen_rtx_PLUS (Pmode,
9207 gen_rtx_MULT (Pmode, index,
9208 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9209 gen_rtx_LABEL_REF (Pmode, table_label));
9210#ifdef PIC_CASE_VECTOR_ADDRESS
9211 if (flag_pic)
9212 index = PIC_CASE_VECTOR_ADDRESS (index);
9213 else
bbf6f052 9214#endif
b93a436e
JL
9215 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9216 temp = gen_reg_rtx (CASE_VECTOR_MODE);
542a8afa 9217 vector = gen_const_mem (CASE_VECTOR_MODE, index);
b93a436e
JL
9218 convert_move (temp, vector, 0);
9219
9220 emit_jump_insn (gen_tablejump (temp, table_label));
9221
9222 /* If we are generating PIC code or if the table is PC-relative, the
9223 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9224 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9225 emit_barrier ();
bbf6f052 9226}
b93a436e 9227
ad82abb8 9228int
502b8322
AJ
9229try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9230 rtx table_label, rtx default_label)
ad82abb8
ZW
9231{
9232 rtx index;
9233
9234 if (! HAVE_tablejump)
9235 return 0;
9236
4845b383
KH
9237 index_expr = fold_build2 (MINUS_EXPR, index_type,
9238 convert (index_type, index_expr),
9239 convert (index_type, minval));
84217346 9240 index = expand_normal (index_expr);
ad82abb8
ZW
9241 do_pending_stack_adjust ();
9242
9243 do_tablejump (index, TYPE_MODE (index_type),
9244 convert_modes (TYPE_MODE (index_type),
9245 TYPE_MODE (TREE_TYPE (range)),
84217346 9246 expand_normal (range),
8df83eae 9247 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
9248 table_label, default_label);
9249 return 1;
9250}
e2500fed 9251
cb2a532e
AH
9252/* Nonzero if the mode is a valid vector mode for this architecture.
9253 This returns nonzero even if there is no hardware support for the
9254 vector mode, but we can emulate with narrower modes. */
9255
9256int
502b8322 9257vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9258{
9259 enum mode_class class = GET_MODE_CLASS (mode);
9260 enum machine_mode innermode;
9261
9262 /* Doh! What's going on? */
9263 if (class != MODE_VECTOR_INT
9264 && class != MODE_VECTOR_FLOAT)
9265 return 0;
9266
9267 /* Hardware support. Woo hoo! */
f676971a 9268 if (targetm.vector_mode_supported_p (mode))
cb2a532e
AH
9269 return 1;
9270
9271 innermode = GET_MODE_INNER (mode);
9272
9273 /* We should probably return 1 if requesting V4DI and we have no DI,
9274 but we have V2DI, but this is probably very unlikely. */
9275
9276 /* If we have support for the inner mode, we can safely emulate it.
9277 We may not have V2DI, but me can emulate with a pair of DIs. */
6dd53648 9278 return targetm.scalar_mode_supported_p (innermode);
cb2a532e
AH
9279}
9280
d744e06e
AH
9281/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9282static rtx
502b8322 9283const_vector_from_tree (tree exp)
d744e06e
AH
9284{
9285 rtvec v;
9286 int units, i;
9287 tree link, elt;
9288 enum machine_mode inner, mode;
9289
9290 mode = TYPE_MODE (TREE_TYPE (exp));
9291
6de9cd9a 9292 if (initializer_zerop (exp))
d744e06e
AH
9293 return CONST0_RTX (mode);
9294
9295 units = GET_MODE_NUNITS (mode);
9296 inner = GET_MODE_INNER (mode);
9297
9298 v = rtvec_alloc (units);
9299
9300 link = TREE_VECTOR_CST_ELTS (exp);
9301 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9302 {
9303 elt = TREE_VALUE (link);
9304
9305 if (TREE_CODE (elt) == REAL_CST)
9306 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9307 inner);
9308 else
9309 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9310 TREE_INT_CST_HIGH (elt),
9311 inner);
9312 }
9313
5f6c070d
AH
9314 /* Initialize remaining elements to 0. */
9315 for (; i < units; ++i)
9316 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9317
a73b091d 9318 return gen_rtx_CONST_VECTOR (mode, v);
d744e06e 9319}
e2500fed 9320#include "gt-expr.h"
This page took 5.369928 seconds and 5 git commands to generate.