]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
expr.c (expand_expr_real_1): Use optab_for_tree_code.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
6de9cd9a 50#include "tree-iterator.h"
2f8e398b
PB
51#include "tree-pass.h"
52#include "tree-flow.h"
c988af2b 53#include "target.h"
2f8e398b 54#include "timevar.h"
bbf6f052 55
bbf6f052 56/* Decide whether a function's arguments should be processed
bbc8a071
RK
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
bbf6f052 61
bbf6f052 62#ifdef PUSH_ROUNDING
bbc8a071 63
2da4124d 64#ifndef PUSH_ARGS_REVERSED
3319a347 65#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 66#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 67#endif
2da4124d 68#endif
bbc8a071 69
bbf6f052
RK
70#endif
71
72#ifndef STACK_PUSH_CODE
73#ifdef STACK_GROWS_DOWNWARD
74#define STACK_PUSH_CODE PRE_DEC
75#else
76#define STACK_PUSH_CODE PRE_INC
77#endif
78#endif
79
4ca79136 80
bbf6f052
RK
81/* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87int cse_not_expected;
88
4969d05d
RK
89/* This structure is used by move_by_pieces to describe the move to
90 be performed. */
4969d05d
RK
91struct move_by_pieces
92{
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
3bdf5ad1
RK
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
4969d05d
RK
103 int reverse;
104};
105
57814e5e 106/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
107 be performed. */
108
57814e5e 109struct store_by_pieces
9de08200
RK
110{
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
3bdf5ad1
RK
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
502b8322 117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 118 void *constfundata;
9de08200
RK
119 int reverse;
120};
121
502b8322
AJ
122static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int);
124static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces *);
126static bool block_move_libcall_safe_for_call_parm (void);
70128ad9 127static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
502b8322
AJ
128static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
129static tree emit_block_move_libcall_fn (int);
130static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
134static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
135 struct store_by_pieces *);
70128ad9 136static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
502b8322
AJ
137static rtx clear_storage_via_libcall (rtx, rtx);
138static tree clear_storage_libcall_fn (int);
139static rtx compress_float_constant (rtx, rtx);
140static rtx get_subtarget (rtx);
502b8322
AJ
141static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree, int);
502b8322
AJ
147
148static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
d50a16c4 149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
150
151static int is_aligning_offset (tree, tree);
eb698c58
RS
152static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
bc15d0ef 154static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
502b8322 155static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 156#ifdef PUSH_ROUNDING
502b8322 157static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 158#endif
502b8322
AJ
159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160static rtx const_vector_from_tree (tree);
bbf6f052 161
4fa52007
RK
162/* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
165
166static char direct_load[NUM_MACHINE_MODES];
167static char direct_store[NUM_MACHINE_MODES];
168
51286de6
RH
169/* Record for each mode whether we can float-extend from memory. */
170
171static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172
fbe1758d 173/* This macro is used to determine whether move_by_pieces should be called
3a94c984 174 to perform a structure copy. */
fbe1758d 175#ifndef MOVE_BY_PIECES_P
19caa751 176#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 177 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
178#endif
179
78762e3b
RS
180/* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182#ifndef CLEAR_BY_PIECES_P
183#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
185#endif
186
4977bab6
ZW
187/* This macro is used to determine whether store_by_pieces should be
188 called to "memset" storage with byte values other than zero, or
189 to "memcpy" storage when the source is a constant string. */
190#ifndef STORE_BY_PIECES_P
191#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
192#endif
193
266007a7 194/* This array records the insn_code of insns to perform block moves. */
70128ad9 195enum insn_code movmem_optab[NUM_MACHINE_MODES];
266007a7 196
9de08200 197/* This array records the insn_code of insns to perform block clears. */
70128ad9 198enum insn_code clrmem_optab[NUM_MACHINE_MODES];
9de08200 199
118355a0
ZW
200/* These arrays record the insn_code of two different kinds of insns
201 to perform block compares. */
202enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
203enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
204
cc2902df 205/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
206
207#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 208#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 209#endif
bbf6f052 210\f
4fa52007 211/* This is run once per compilation to set up which modes can be used
266007a7 212 directly in memory and to initialize the block move optab. */
4fa52007
RK
213
214void
502b8322 215init_expr_once (void)
4fa52007
RK
216{
217 rtx insn, pat;
218 enum machine_mode mode;
cff48d8f 219 int num_clobbers;
9ec36da5 220 rtx mem, mem1;
bf1660a6 221 rtx reg;
9ec36da5 222
e2549997
RS
223 /* Try indexing by frame ptr and try by stack ptr.
224 It is known that on the Convex the stack ptr isn't a valid index.
225 With luck, one or the other is valid on any machine. */
9ec36da5
JL
226 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
227 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 228
bf1660a6
JL
229 /* A scratch register we can modify in-place below to avoid
230 useless RTL allocations. */
231 reg = gen_rtx_REG (VOIDmode, -1);
232
1f8c3c5b
RH
233 insn = rtx_alloc (INSN);
234 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
235 PATTERN (insn) = pat;
4fa52007
RK
236
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
239 {
240 int regno;
4fa52007
RK
241
242 direct_load[(int) mode] = direct_store[(int) mode] = 0;
243 PUT_MODE (mem, mode);
e2549997 244 PUT_MODE (mem1, mode);
bf1660a6 245 PUT_MODE (reg, mode);
4fa52007 246
e6fe56a4
RK
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
249
7308a047
RS
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
254 {
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
e6fe56a4 257
bf1660a6 258 REGNO (reg) = regno;
e6fe56a4 259
7308a047
RS
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
e6fe56a4 264
e2549997
RS
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
269
7308a047
RS
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
e2549997
RS
274
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
7308a047 279 }
4fa52007
RK
280 }
281
51286de6
RH
282 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
283
284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
285 mode = GET_MODE_WIDER_MODE (mode))
286 {
287 enum machine_mode srcmode;
288 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 289 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
290 {
291 enum insn_code ic;
292
293 ic = can_extend_p (mode, srcmode, 0);
294 if (ic == CODE_FOR_nothing)
295 continue;
296
297 PUT_MODE (mem, srcmode);
0fb7aeda 298
51286de6
RH
299 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
300 float_extend_from_mem[mode][srcmode] = true;
301 }
302 }
4fa52007 303}
cff48d8f 304
bbf6f052
RK
305/* This is run at the start of compiling a function. */
306
307void
502b8322 308init_expr (void)
bbf6f052 309{
3a70d621 310 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052 311}
bbf6f052
RK
312\f
313/* Copy data from FROM to TO, where the machine modes are not the same.
314 Both modes may be integer, or both may be floating.
315 UNSIGNEDP should be nonzero if FROM is an unsigned type.
316 This causes zero-extension instead of sign-extension. */
317
318void
502b8322 319convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
320{
321 enum machine_mode to_mode = GET_MODE (to);
322 enum machine_mode from_mode = GET_MODE (from);
323 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
324 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
325 enum insn_code code;
326 rtx libcall;
327
328 /* rtx code for making an equivalent value. */
37d0b254
SE
329 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
330 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052 331
bbf6f052
RK
332
333 if (to_real != from_real)
334 abort ();
335
6de9cd9a
DN
336 /* If the source and destination are already the same, then there's
337 nothing to do. */
338 if (to == from)
339 return;
340
1499e0a8
RK
341 /* If FROM is a SUBREG that indicates that we have already done at least
342 the required extension, strip it. We don't handle such SUBREGs as
343 TO here. */
344
345 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
346 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
347 >= GET_MODE_SIZE (to_mode))
348 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
349 from = gen_lowpart (to_mode, from), from_mode = to_mode;
350
351 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
352 abort ();
353
bbf6f052
RK
354 if (to_mode == from_mode
355 || (from_mode == VOIDmode && CONSTANT_P (from)))
356 {
357 emit_move_insn (to, from);
358 return;
359 }
360
0b4565c9
BS
361 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
362 {
363 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
364 abort ();
3a94c984 365
0b4565c9 366 if (VECTOR_MODE_P (to_mode))
bafe341a 367 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 368 else
bafe341a 369 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
370
371 emit_move_insn (to, from);
372 return;
373 }
374
06765df1
R
375 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
376 {
377 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
378 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
379 return;
380 }
381
bbf6f052
RK
382 if (to_real)
383 {
642dfa8b 384 rtx value, insns;
85363ca0 385 convert_optab tab;
81d79e2c 386
e44846d6 387 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 388 tab = sext_optab;
e44846d6 389 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
85363ca0
ZW
390 tab = trunc_optab;
391 else
392 abort ();
2b01c326 393
85363ca0 394 /* Try converting directly if the insn is supported. */
2b01c326 395
85363ca0
ZW
396 code = tab->handlers[to_mode][from_mode].insn_code;
397 if (code != CODE_FOR_nothing)
b092b471 398 {
85363ca0
ZW
399 emit_unop_insn (code, to, from,
400 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
401 return;
402 }
b092b471 403
85363ca0
ZW
404 /* Otherwise use a libcall. */
405 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 406
85363ca0 407 if (!libcall)
b092b471 408 /* This conversion is not implemented yet. */
bbf6f052
RK
409 abort ();
410
642dfa8b 411 start_sequence ();
ebb1b59a 412 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 413 1, from, from_mode);
642dfa8b
BS
414 insns = get_insns ();
415 end_sequence ();
450b1728
EC
416 emit_libcall_block (insns, to, value,
417 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
418 from)
419 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
420 return;
421 }
422
85363ca0
ZW
423 /* Handle pointer conversion. */ /* SPEE 900220. */
424 /* Targets are expected to provide conversion insns between PxImode and
425 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
426 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
427 {
428 enum machine_mode full_mode
429 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
430
431 if (trunc_optab->handlers[to_mode][full_mode].insn_code
432 == CODE_FOR_nothing)
433 abort ();
434
435 if (full_mode != from_mode)
436 from = convert_to_mode (full_mode, from, unsignedp);
437 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
438 to, from, UNKNOWN);
439 return;
440 }
441 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
442 {
443 enum machine_mode full_mode
444 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
445
446 if (sext_optab->handlers[full_mode][from_mode].insn_code
447 == CODE_FOR_nothing)
448 abort ();
449
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
451 to, from, UNKNOWN);
452 if (to_mode == full_mode)
453 return;
454
a1105617 455 /* else proceed to integer conversions below. */
85363ca0
ZW
456 from_mode = full_mode;
457 }
458
bbf6f052
RK
459 /* Now both modes are integers. */
460
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
464 {
465 rtx insns;
466 rtx lowpart;
467 rtx fill_value;
468 rtx lowfrom;
469 int i;
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
472
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
475 != CODE_FOR_nothing)
476 {
cd1b4b44
RK
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
bbf6f052
RK
483 emit_unop_insn (code, to, from, equiv_code);
484 return;
485 }
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
490 {
f8cfc6aa 491 if (REG_P (to))
6a2d136b
EB
492 {
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
496 }
bbf6f052
RK
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
500 return;
501 }
502
503 /* No special multiword conversion insn; do it by hand. */
504 start_sequence ();
505
5c5033c3
RK
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
508
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
511
bbf6f052
RK
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
515 else
516 lowpart_mode = from_mode;
517
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
519
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
522
523 /* Compute the value to put in each remaining word. */
524 if (unsignedp)
525 fill_value = const0_rtx;
526 else
527 {
528#ifdef HAVE_slt
529 if (HAVE_slt
a995e389 530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
531 && STORE_FLAG_VALUE == -1)
532 {
906c4e36 533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 534 lowpart_mode, 0);
bbf6f052
RK
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
537 }
538 else
539#endif
540 {
541 fill_value
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 544 NULL_RTX, 0);
bbf6f052
RK
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
546 }
547 }
548
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
551 {
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
554
555 if (subword == 0)
556 abort ();
557
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
560 }
561
562 insns = get_insns ();
563 end_sequence ();
564
906c4e36 565 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 566 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
567 return;
568 }
569
d3c64ee3
RS
570 /* Truncating multi-word to a word or less. */
571 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
572 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 573 {
3c0cb5de 574 if (!((MEM_P (from)
431a6eca
JW
575 && ! MEM_VOLATILE_P (from)
576 && direct_load[(int) to_mode]
577 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 578 || REG_P (from)
431a6eca
JW
579 || GET_CODE (from) == SUBREG))
580 from = force_reg (from_mode, from);
bbf6f052
RK
581 convert_move (to, gen_lowpart (word_mode, from), 0);
582 return;
583 }
584
bbf6f052
RK
585 /* Now follow all the conversions between integers
586 no more than a word long. */
587
588 /* For truncation, usually we can just refer to FROM in a narrower mode. */
589 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
590 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 591 GET_MODE_BITSIZE (from_mode)))
bbf6f052 592 {
3c0cb5de 593 if (!((MEM_P (from)
d3c64ee3
RS
594 && ! MEM_VOLATILE_P (from)
595 && direct_load[(int) to_mode]
596 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 597 || REG_P (from)
d3c64ee3
RS
598 || GET_CODE (from) == SUBREG))
599 from = force_reg (from_mode, from);
f8cfc6aa 600 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
34aa3599
RK
601 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
602 from = copy_to_reg (from);
bbf6f052
RK
603 emit_move_insn (to, gen_lowpart (to_mode, from));
604 return;
605 }
606
d3c64ee3 607 /* Handle extension. */
bbf6f052
RK
608 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 {
610 /* Convert directly if that works. */
611 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
612 != CODE_FOR_nothing)
613 {
9413de45
RK
614 if (flag_force_mem)
615 from = force_not_mem (from);
616
bbf6f052
RK
617 emit_unop_insn (code, to, from, equiv_code);
618 return;
619 }
620 else
621 {
622 enum machine_mode intermediate;
2b28d92e
NC
623 rtx tmp;
624 tree shift_amount;
bbf6f052
RK
625
626 /* Search for a mode to convert via. */
627 for (intermediate = from_mode; intermediate != VOIDmode;
628 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
629 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 != CODE_FOR_nothing)
631 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
632 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
633 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
634 && (can_extend_p (intermediate, from_mode, unsignedp)
635 != CODE_FOR_nothing))
636 {
637 convert_move (to, convert_to_mode (intermediate, from,
638 unsignedp), unsignedp);
639 return;
640 }
641
2b28d92e 642 /* No suitable intermediate mode.
3a94c984 643 Generate what we need with shifts. */
2b28d92e
NC
644 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode), 0);
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
648 to, unsignedp);
3a94c984 649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
650 to, unsignedp);
651 if (tmp != to)
652 emit_move_insn (to, tmp);
653 return;
bbf6f052
RK
654 }
655 }
656
3a94c984 657 /* Support special truncate insns for certain modes. */
85363ca0 658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 659 {
85363ca0
ZW
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
661 to, from, UNKNOWN);
b9bcad65
RK
662 return;
663 }
664
bbf6f052
RK
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
85363ca0
ZW
667 and for which there was no special instruction.
668
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
673 {
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
676 return;
677 }
678
679 /* Mode combination is not recognized. */
680 abort ();
681}
682
683/* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
ad76cef8 688 or by copying to a new temporary with conversion. */
bbf6f052
RK
689
690rtx
502b8322 691convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
692{
693 return convert_modes (mode, VOIDmode, x, unsignedp);
694}
695
696/* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
700
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
703
ad76cef8 704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
5ffe63ed
RS
705
706rtx
502b8322 707convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 708{
b3694847 709 rtx temp;
5ffe63ed 710
1499e0a8
RK
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
713
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
bbf6f052 718
64791b18
RK
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
3a94c984 721
5ffe63ed 722 if (mode == oldmode)
bbf6f052
RK
723 return x;
724
725 /* There is one case that we must handle specially: If we are converting
906c4e36 726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
730
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
734 {
735 HOST_WIDE_INT val = INTVAL (x);
736
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
739 {
740 int width = GET_MODE_BITSIZE (oldmode);
741
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
744 }
745
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
747 }
bbf6f052
RK
748
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 753
ba2e110c
RK
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 756 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 757 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 758 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
3c0cb5de 760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
d57c66da 761 && direct_load[(int) mode])
f8cfc6aa 762 || (REG_P (x)
006c9f4a
SE
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
767 {
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
773 {
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 if (! unsignedp
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
783
2496c7bd 784 return gen_int_mode (val, mode);
ba2e110c
RK
785 }
786
787 return gen_lowpart (mode, x);
788 }
bbf6f052 789
ebe75517
JH
790 /* Converting from integer constant into mode is always equivalent to an
791 subreg operation. */
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
793 {
794 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
795 abort ();
796 return simplify_gen_subreg (mode, x, oldmode, 0);
797 }
798
bbf6f052
RK
799 temp = gen_reg_rtx (mode);
800 convert_move (temp, x, unsignedp);
801 return temp;
802}
803\f
cf5124f6
RS
804/* STORE_MAX_PIECES is the number of bytes at a time that we can
805 store efficiently. Due to internal GCC limitations, this is
806 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
807 for an immediate constant. */
808
809#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810
8fd3cf4e
JJ
811/* Determine whether the LEN bytes can be moved by using several move
812 instructions. Return nonzero if a call to move_by_pieces should
813 succeed. */
814
815int
502b8322
AJ
816can_move_by_pieces (unsigned HOST_WIDE_INT len,
817 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
818{
819 return MOVE_BY_PIECES_P (len, align);
820}
821
21d93687 822/* Generate several move instructions to copy LEN bytes from block FROM to
ad76cef8 823 block TO. (These are MEM rtx's with BLKmode).
566aa174 824
21d93687
RK
825 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
826 used to push FROM to the stack.
566aa174 827
8fd3cf4e 828 ALIGN is maximum stack alignment we can assume.
bbf6f052 829
8fd3cf4e
JJ
830 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
831 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
832 stpcpy. */
833
834rtx
502b8322
AJ
835move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
836 unsigned int align, int endp)
bbf6f052
RK
837{
838 struct move_by_pieces data;
566aa174 839 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 840 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
841 enum machine_mode mode = VOIDmode, tmode;
842 enum insn_code icode;
bbf6f052 843
f26aca6d
DD
844 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
845
bbf6f052 846 data.offset = 0;
bbf6f052 847 data.from_addr = from_addr;
566aa174
JH
848 if (to)
849 {
850 to_addr = XEXP (to, 0);
851 data.to = to;
852 data.autinc_to
853 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
854 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 data.reverse
856 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
857 }
858 else
859 {
860 to_addr = NULL_RTX;
861 data.to = NULL_RTX;
862 data.autinc_to = 1;
863#ifdef STACK_GROWS_DOWNWARD
864 data.reverse = 1;
865#else
866 data.reverse = 0;
867#endif
868 }
869 data.to_addr = to_addr;
bbf6f052 870 data.from = from;
bbf6f052
RK
871 data.autinc_from
872 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
873 || GET_CODE (from_addr) == POST_INC
874 || GET_CODE (from_addr) == POST_DEC);
875
876 data.explicit_inc_from = 0;
877 data.explicit_inc_to = 0;
bbf6f052
RK
878 if (data.reverse) data.offset = len;
879 data.len = len;
880
881 /* If copying requires more than two move insns,
882 copy addresses to registers (to make displacements shorter)
883 and use post-increment if available. */
884 if (!(data.autinc_from && data.autinc_to)
885 && move_by_pieces_ninsns (len, align) > 2)
886 {
3a94c984 887 /* Find the mode of the largest move... */
fbe1758d
AM
888 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
889 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
890 if (GET_MODE_SIZE (tmode) < max_size)
891 mode = tmode;
892
893 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
894 {
895 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
896 data.autinc_from = 1;
897 data.explicit_inc_from = -1;
898 }
fbe1758d 899 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
900 {
901 data.from_addr = copy_addr_to_reg (from_addr);
902 data.autinc_from = 1;
903 data.explicit_inc_from = 1;
904 }
bbf6f052
RK
905 if (!data.autinc_from && CONSTANT_P (from_addr))
906 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 907 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
908 {
909 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.autinc_to = 1;
911 data.explicit_inc_to = -1;
912 }
fbe1758d 913 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
914 {
915 data.to_addr = copy_addr_to_reg (to_addr);
916 data.autinc_to = 1;
917 data.explicit_inc_to = 1;
918 }
bbf6f052
RK
919 if (!data.autinc_to && CONSTANT_P (to_addr))
920 data.to_addr = copy_addr_to_reg (to_addr);
921 }
922
e1565e65 923 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
924 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
925 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
926
927 /* First move what we can in the largest integer mode, then go to
928 successively smaller modes. */
929
930 while (max_size > 1)
931 {
e7c33f54
RK
932 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
933 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
934 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
935 mode = tmode;
936
937 if (mode == VOIDmode)
938 break;
939
940 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 941 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
942 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
943
944 max_size = GET_MODE_SIZE (mode);
945 }
946
947 /* The code above should have handled everything. */
2a8e278c 948 if (data.len > 0)
bbf6f052 949 abort ();
8fd3cf4e
JJ
950
951 if (endp)
952 {
953 rtx to1;
954
955 if (data.reverse)
956 abort ();
957 if (data.autinc_to)
958 {
959 if (endp == 2)
960 {
961 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
962 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
963 else
964 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
965 -1));
966 }
967 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
968 data.offset);
969 }
970 else
971 {
972 if (endp == 2)
973 --data.offset;
974 to1 = adjust_address (data.to, QImode, data.offset);
975 }
976 return to1;
977 }
978 else
979 return data.to;
bbf6f052
RK
980}
981
982/* Return number of insns required to move L bytes by pieces.
f1eaaf73 983 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 984
3bdf5ad1 985static unsigned HOST_WIDE_INT
502b8322 986move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 987{
3bdf5ad1
RK
988 unsigned HOST_WIDE_INT n_insns = 0;
989 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 990
e1565e65 991 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 992 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 993 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
994
995 while (max_size > 1)
996 {
997 enum machine_mode mode = VOIDmode, tmode;
998 enum insn_code icode;
999
e7c33f54
RK
1000 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1001 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1002 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1003 mode = tmode;
1004
1005 if (mode == VOIDmode)
1006 break;
1007
1008 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1009 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1010 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1011
1012 max_size = GET_MODE_SIZE (mode);
1013 }
1014
13c6f0d5
NS
1015 if (l)
1016 abort ();
bbf6f052
RK
1017 return n_insns;
1018}
1019
1020/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1021 with move instructions for mode MODE. GENFUN is the gen_... function
1022 to make a move insn for that mode. DATA has all the other info. */
1023
1024static void
502b8322
AJ
1025move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1026 struct move_by_pieces *data)
bbf6f052 1027{
3bdf5ad1 1028 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1029 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1030
1031 while (data->len >= size)
1032 {
3bdf5ad1
RK
1033 if (data->reverse)
1034 data->offset -= size;
1035
566aa174 1036 if (data->to)
3bdf5ad1 1037 {
566aa174 1038 if (data->autinc_to)
630036c6
JJ
1039 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1040 data->offset);
566aa174 1041 else
f4ef873c 1042 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1043 }
3bdf5ad1
RK
1044
1045 if (data->autinc_from)
630036c6
JJ
1046 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1047 data->offset);
3bdf5ad1 1048 else
f4ef873c 1049 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1050
940da324 1051 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1052 emit_insn (gen_add2_insn (data->to_addr,
1053 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1054 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1055 emit_insn (gen_add2_insn (data->from_addr,
1056 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1057
566aa174
JH
1058 if (data->to)
1059 emit_insn ((*genfun) (to1, from1));
1060 else
21d93687
RK
1061 {
1062#ifdef PUSH_ROUNDING
1063 emit_single_push_insn (mode, from1, NULL);
1064#else
1065 abort ();
1066#endif
1067 }
3bdf5ad1 1068
940da324 1069 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1070 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1071 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1072 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1073
3bdf5ad1
RK
1074 if (! data->reverse)
1075 data->offset += size;
bbf6f052
RK
1076
1077 data->len -= size;
1078 }
1079}
1080\f
4ca79136
RH
1081/* Emit code to move a block Y to a block X. This may be done with
1082 string-move instructions, with multiple scalar move instructions,
1083 or with a library call.
bbf6f052 1084
4ca79136 1085 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1086 SIZE is an rtx that says how long they are.
19caa751 1087 ALIGN is the maximum alignment we can assume they have.
44bb111a 1088 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1089
e9a25f70
JL
1090 Return the address of the new block, if memcpy is called and returns it,
1091 0 otherwise. */
1092
1093rtx
502b8322 1094emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1095{
44bb111a 1096 bool may_use_call;
e9a25f70 1097 rtx retval = 0;
44bb111a
RH
1098 unsigned int align;
1099
1100 switch (method)
1101 {
1102 case BLOCK_OP_NORMAL:
1103 may_use_call = true;
1104 break;
1105
1106 case BLOCK_OP_CALL_PARM:
1107 may_use_call = block_move_libcall_safe_for_call_parm ();
1108
1109 /* Make inhibit_defer_pop nonzero around the library call
1110 to force it to pop the arguments right away. */
1111 NO_DEFER_POP;
1112 break;
1113
1114 case BLOCK_OP_NO_LIBCALL:
1115 may_use_call = false;
1116 break;
1117
1118 default:
1119 abort ();
1120 }
1121
1122 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1123
3c0cb5de 1124 if (!MEM_P (x))
bbf6f052 1125 abort ();
3c0cb5de 1126 if (!MEM_P (y))
bbf6f052
RK
1127 abort ();
1128 if (size == 0)
1129 abort ();
1130
82c82743
RH
1131 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1132 block copy is more efficient for other large modes, e.g. DCmode. */
1133 x = adjust_address (x, BLKmode, 0);
1134 y = adjust_address (y, BLKmode, 0);
1135
cb38fd88
RH
1136 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1137 can be incorrect is coming from __builtin_memcpy. */
1138 if (GET_CODE (size) == CONST_INT)
1139 {
6972c506
JJ
1140 if (INTVAL (size) == 0)
1141 return 0;
1142
cb38fd88
RH
1143 x = shallow_copy_rtx (x);
1144 y = shallow_copy_rtx (y);
1145 set_mem_size (x, size);
1146 set_mem_size (y, size);
1147 }
1148
fbe1758d 1149 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1150 move_by_pieces (x, y, INTVAL (size), align, 0);
70128ad9 1151 else if (emit_block_move_via_movmem (x, y, size, align))
4ca79136 1152 ;
44bb111a 1153 else if (may_use_call)
4ca79136 1154 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1155 else
1156 emit_block_move_via_loop (x, y, size, align);
1157
1158 if (method == BLOCK_OP_CALL_PARM)
1159 OK_DEFER_POP;
266007a7 1160
4ca79136
RH
1161 return retval;
1162}
266007a7 1163
502b8322 1164/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1165 block move libcall will not clobber any parameters which may have
1166 already been placed on the stack. */
1167
1168static bool
502b8322 1169block_move_libcall_safe_for_call_parm (void)
44bb111a 1170{
a357a6d4 1171 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1172 if (PUSH_ARGS)
1173 return true;
44bb111a 1174
450b1728 1175 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1176 an outgoing argument. */
1177#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1178 {
1179 tree fn = emit_block_move_libcall_fn (false);
1180 (void) fn;
1181 if (REG_PARM_STACK_SPACE (fn) != 0)
1182 return false;
1183 }
44bb111a 1184#endif
44bb111a 1185
a357a6d4
GK
1186 /* If any argument goes in memory, then it might clobber an outgoing
1187 argument. */
1188 {
1189 CUMULATIVE_ARGS args_so_far;
1190 tree fn, arg;
450b1728 1191
a357a6d4 1192 fn = emit_block_move_libcall_fn (false);
0f6937fe 1193 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1194
a357a6d4
GK
1195 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1196 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1197 {
1198 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1199 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1200 if (!tmp || !REG_P (tmp))
44bb111a 1201 return false;
a357a6d4
GK
1202 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1203 NULL_TREE, 1))
1204 return false;
a357a6d4
GK
1205 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1206 }
1207 }
1208 return true;
44bb111a
RH
1209}
1210
70128ad9 1211/* A subroutine of emit_block_move. Expand a movmem pattern;
4ca79136 1212 return true if successful. */
3ef1eef4 1213
4ca79136 1214static bool
70128ad9 1215emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1216{
4ca79136 1217 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1218 int save_volatile_ok = volatile_ok;
4ca79136 1219 enum machine_mode mode;
266007a7 1220
4ca79136
RH
1221 /* Since this is a move insn, we don't care about volatility. */
1222 volatile_ok = 1;
1223
ee960939
OH
1224 /* Try the most limited insn first, because there's no point
1225 including more than one in the machine description unless
1226 the more limited one has some advantage. */
1227
4ca79136
RH
1228 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1229 mode = GET_MODE_WIDER_MODE (mode))
1230 {
70128ad9 1231 enum insn_code code = movmem_optab[(int) mode];
4ca79136
RH
1232 insn_operand_predicate_fn pred;
1233
1234 if (code != CODE_FOR_nothing
1235 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1236 here because if SIZE is less than the mode mask, as it is
1237 returned by the macro, it will definitely be less than the
1238 actual mode mask. */
1239 && ((GET_CODE (size) == CONST_INT
1240 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1241 <= (GET_MODE_MASK (mode) >> 1)))
1242 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1243 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1244 || (*pred) (x, BLKmode))
1245 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1246 || (*pred) (y, BLKmode))
1247 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1248 || (*pred) (opalign, VOIDmode)))
1249 {
1250 rtx op2;
1251 rtx last = get_last_insn ();
1252 rtx pat;
1253
1254 op2 = convert_to_mode (mode, size, 1);
1255 pred = insn_data[(int) code].operand[2].predicate;
1256 if (pred != 0 && ! (*pred) (op2, mode))
1257 op2 = copy_to_mode_reg (mode, op2);
1258
1259 /* ??? When called via emit_block_move_for_call, it'd be
1260 nice if there were some way to inform the backend, so
1261 that it doesn't fail the expansion because it thinks
1262 emitting the libcall would be more efficient. */
1263
1264 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1265 if (pat)
1266 {
1267 emit_insn (pat);
a5e9c810 1268 volatile_ok = save_volatile_ok;
4ca79136 1269 return true;
bbf6f052 1270 }
4ca79136
RH
1271 else
1272 delete_insns_since (last);
bbf6f052 1273 }
4ca79136 1274 }
bbf6f052 1275
a5e9c810 1276 volatile_ok = save_volatile_ok;
4ca79136
RH
1277 return false;
1278}
3ef1eef4 1279
8f99553f 1280/* A subroutine of emit_block_move. Expand a call to memcpy.
4ca79136 1281 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1282
4ca79136 1283static rtx
502b8322 1284emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1285{
ee960939 1286 rtx dst_addr, src_addr;
4ca79136
RH
1287 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1288 enum machine_mode size_mode;
1289 rtx retval;
4bc973ae 1290
ad76cef8
PB
1291 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1292 pseudos. We can then place those new pseudos into a VAR_DECL and
1293 use them later. */
ee960939
OH
1294
1295 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1296 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1297
ee960939
OH
1298 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1299 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1300
1301 dst_tree = make_tree (ptr_type_node, dst_addr);
1302 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136 1303
8f99553f 1304 size_mode = TYPE_MODE (sizetype);
ee960939 1305
4ca79136
RH
1306 size = convert_to_mode (size_mode, size, 1);
1307 size = copy_to_mode_reg (size_mode, size);
1308
1309 /* It is incorrect to use the libcall calling conventions to call
1310 memcpy in this context. This could be a user call to memcpy and
1311 the user may wish to examine the return value from memcpy. For
1312 targets where libcalls and normal calls have different conventions
8f99553f 1313 for returning pointers, we could end up generating incorrect code. */
4ca79136 1314
8f99553f 1315 size_tree = make_tree (sizetype, size);
4ca79136
RH
1316
1317 fn = emit_block_move_libcall_fn (true);
1318 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f
JM
1319 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1320 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
4ca79136
RH
1321
1322 /* Now we have to build up the CALL_EXPR itself. */
1323 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
1324 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1325 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1326
1327 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1328
ee960939
OH
1329 /* If we are initializing a readonly value, show the above call clobbered
1330 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1331 the delay slot scheduler might overlook conflicts and take nasty
1332 decisions. */
4ca79136 1333 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1334 add_function_usage_to
1335 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1336 gen_rtx_CLOBBER (VOIDmode, dst),
1337 NULL_RTX));
4ca79136 1338
8f99553f 1339 return retval;
4ca79136 1340}
52cf7115 1341
4ca79136
RH
1342/* A subroutine of emit_block_move_via_libcall. Create the tree node
1343 for the function we use for block copies. The first time FOR_CALL
1344 is true, we call assemble_external. */
52cf7115 1345
4ca79136
RH
1346static GTY(()) tree block_move_fn;
1347
9661b15f 1348void
502b8322 1349init_block_move_fn (const char *asmspec)
4ca79136 1350{
9661b15f 1351 if (!block_move_fn)
4ca79136 1352 {
8fd3cf4e 1353 tree args, fn;
9661b15f 1354
8f99553f
JM
1355 fn = get_identifier ("memcpy");
1356 args = build_function_type_list (ptr_type_node, ptr_type_node,
1357 const_ptr_type_node, sizetype,
1358 NULL_TREE);
52cf7115 1359
4ca79136
RH
1360 fn = build_decl (FUNCTION_DECL, fn, args);
1361 DECL_EXTERNAL (fn) = 1;
1362 TREE_PUBLIC (fn) = 1;
1363 DECL_ARTIFICIAL (fn) = 1;
1364 TREE_NOTHROW (fn) = 1;
66c60e67 1365
4ca79136 1366 block_move_fn = fn;
bbf6f052 1367 }
e9a25f70 1368
9661b15f 1369 if (asmspec)
0e6df31e 1370 set_user_assembler_name (block_move_fn, asmspec);
9661b15f
JJ
1371}
1372
1373static tree
502b8322 1374emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1375{
1376 static bool emitted_extern;
1377
1378 if (!block_move_fn)
1379 init_block_move_fn (NULL);
1380
4ca79136
RH
1381 if (for_call && !emitted_extern)
1382 {
1383 emitted_extern = true;
0e6df31e 1384 make_decl_rtl (block_move_fn);
9661b15f 1385 assemble_external (block_move_fn);
4ca79136
RH
1386 }
1387
9661b15f 1388 return block_move_fn;
bbf6f052 1389}
44bb111a
RH
1390
1391/* A subroutine of emit_block_move. Copy the data via an explicit
1392 loop. This is used only when libcalls are forbidden. */
1393/* ??? It'd be nice to copy in hunks larger than QImode. */
1394
1395static void
502b8322
AJ
1396emit_block_move_via_loop (rtx x, rtx y, rtx size,
1397 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1398{
1399 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1400 enum machine_mode iter_mode;
1401
1402 iter_mode = GET_MODE (size);
1403 if (iter_mode == VOIDmode)
1404 iter_mode = word_mode;
1405
1406 top_label = gen_label_rtx ();
1407 cmp_label = gen_label_rtx ();
1408 iter = gen_reg_rtx (iter_mode);
1409
1410 emit_move_insn (iter, const0_rtx);
1411
1412 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1413 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1414 do_pending_stack_adjust ();
1415
44bb111a
RH
1416 emit_jump (cmp_label);
1417 emit_label (top_label);
1418
1419 tmp = convert_modes (Pmode, iter_mode, iter, true);
1420 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1421 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1422 x = change_address (x, QImode, x_addr);
1423 y = change_address (y, QImode, y_addr);
1424
1425 emit_move_insn (x, y);
1426
1427 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1428 true, OPTAB_LIB_WIDEN);
1429 if (tmp != iter)
1430 emit_move_insn (iter, tmp);
1431
44bb111a
RH
1432 emit_label (cmp_label);
1433
1434 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1435 true, top_label);
44bb111a 1436}
bbf6f052
RK
1437\f
1438/* Copy all or part of a value X into registers starting at REGNO.
1439 The number of registers to be filled is NREGS. */
1440
1441void
502b8322 1442move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1443{
1444 int i;
381127e8 1445#ifdef HAVE_load_multiple
3a94c984 1446 rtx pat;
381127e8
RL
1447 rtx last;
1448#endif
bbf6f052 1449
72bb9717
RK
1450 if (nregs == 0)
1451 return;
1452
bbf6f052
RK
1453 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1454 x = validize_mem (force_const_mem (mode, x));
1455
1456 /* See if the machine can do this with a load multiple insn. */
1457#ifdef HAVE_load_multiple
c3a02afe 1458 if (HAVE_load_multiple)
bbf6f052 1459 {
c3a02afe 1460 last = get_last_insn ();
38a448ca 1461 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1462 GEN_INT (nregs));
1463 if (pat)
1464 {
1465 emit_insn (pat);
1466 return;
1467 }
1468 else
1469 delete_insns_since (last);
bbf6f052 1470 }
bbf6f052
RK
1471#endif
1472
1473 for (i = 0; i < nregs; i++)
38a448ca 1474 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1475 operand_subword_force (x, i, mode));
1476}
1477
1478/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1479 The number of registers to be filled is NREGS. */
0040593d 1480
bbf6f052 1481void
502b8322 1482move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1483{
1484 int i;
bbf6f052 1485
2954d7db
RK
1486 if (nregs == 0)
1487 return;
1488
bbf6f052
RK
1489 /* See if the machine can do this with a store multiple insn. */
1490#ifdef HAVE_store_multiple
c3a02afe 1491 if (HAVE_store_multiple)
bbf6f052 1492 {
c6b97fac
AM
1493 rtx last = get_last_insn ();
1494 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1495 GEN_INT (nregs));
c3a02afe
RK
1496 if (pat)
1497 {
1498 emit_insn (pat);
1499 return;
1500 }
1501 else
1502 delete_insns_since (last);
bbf6f052 1503 }
bbf6f052
RK
1504#endif
1505
1506 for (i = 0; i < nregs; i++)
1507 {
1508 rtx tem = operand_subword (x, i, 1, BLKmode);
1509
1510 if (tem == 0)
1511 abort ();
1512
38a448ca 1513 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1514 }
1515}
1516
084a1106
JDA
1517/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1518 ORIG, where ORIG is a non-consecutive group of registers represented by
1519 a PARALLEL. The clone is identical to the original except in that the
1520 original set of registers is replaced by a new set of pseudo registers.
1521 The new set has the same modes as the original set. */
1522
1523rtx
502b8322 1524gen_group_rtx (rtx orig)
084a1106
JDA
1525{
1526 int i, length;
1527 rtx *tmps;
1528
1529 if (GET_CODE (orig) != PARALLEL)
1530 abort ();
1531
1532 length = XVECLEN (orig, 0);
703ad42b 1533 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1534
1535 /* Skip a NULL entry in first slot. */
1536 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1537
1538 if (i)
1539 tmps[0] = 0;
1540
1541 for (; i < length; i++)
1542 {
1543 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1544 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1545
1546 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1547 }
1548
1549 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1550}
1551
6e985040
AM
1552/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1553 where DST is non-consecutive registers represented by a PARALLEL.
1554 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1555 if not known. */
fffa9c1d
JW
1556
1557void
6e985040 1558emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1559{
aac5cc16
RH
1560 rtx *tmps, src;
1561 int start, i;
fffa9c1d 1562
aac5cc16 1563 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1564 abort ();
1565
1566 /* Check for a NULL entry, used to indicate that the parameter goes
1567 both on the stack and in registers. */
aac5cc16
RH
1568 if (XEXP (XVECEXP (dst, 0, 0), 0))
1569 start = 0;
fffa9c1d 1570 else
aac5cc16
RH
1571 start = 1;
1572
703ad42b 1573 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1574
aac5cc16
RH
1575 /* Process the pieces. */
1576 for (i = start; i < XVECLEN (dst, 0); i++)
1577 {
1578 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1579 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1580 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1581 int shift = 0;
1582
1583 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1584 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1585 {
6e985040
AM
1586 /* Arrange to shift the fragment to where it belongs.
1587 extract_bit_field loads to the lsb of the reg. */
1588 if (
1589#ifdef BLOCK_REG_PADDING
1590 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1591 == (BYTES_BIG_ENDIAN ? upward : downward)
1592#else
1593 BYTES_BIG_ENDIAN
1594#endif
1595 )
1596 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1597 bytelen = ssize - bytepos;
1598 if (bytelen <= 0)
729a2125 1599 abort ();
aac5cc16
RH
1600 }
1601
f3ce87a9
DE
1602 /* If we won't be loading directly from memory, protect the real source
1603 from strange tricks we might play; but make sure that the source can
1604 be loaded directly into the destination. */
1605 src = orig_src;
3c0cb5de 1606 if (!MEM_P (orig_src)
f3ce87a9
DE
1607 && (!CONSTANT_P (orig_src)
1608 || (GET_MODE (orig_src) != mode
1609 && GET_MODE (orig_src) != VOIDmode)))
1610 {
1611 if (GET_MODE (orig_src) == VOIDmode)
1612 src = gen_reg_rtx (mode);
1613 else
1614 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1615
f3ce87a9
DE
1616 emit_move_insn (src, orig_src);
1617 }
1618
aac5cc16 1619 /* Optimize the access just a bit. */
3c0cb5de 1620 if (MEM_P (src)
6e985040
AM
1621 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1622 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1623 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1624 && bytelen == GET_MODE_SIZE (mode))
1625 {
1626 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1627 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1628 }
7c4a6db0
JW
1629 else if (GET_CODE (src) == CONCAT)
1630 {
015b1ad1
JDA
1631 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1632 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1633
1634 if ((bytepos == 0 && bytelen == slen0)
1635 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1636 {
015b1ad1
JDA
1637 /* The following assumes that the concatenated objects all
1638 have the same size. In this case, a simple calculation
1639 can be used to determine the object and the bit field
1640 to be extracted. */
1641 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744 1642 if (! CONSTANT_P (tmps[i])
f8cfc6aa 1643 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
cbb92744 1644 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1 1645 (bytepos % slen0) * BITS_PER_UNIT,
b3520980 1646 1, NULL_RTX, mode, mode);
cbb92744 1647 }
58f69841
JH
1648 else if (bytepos == 0)
1649 {
015b1ad1 1650 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1651 emit_move_insn (mem, src);
04050c69 1652 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1653 }
7c4a6db0
JW
1654 else
1655 abort ();
1656 }
9c0631a7
AH
1657 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1658 SIMD register, which is currently broken. While we get GCC
1659 to emit proper RTL for these cases, let's dump to memory. */
1660 else if (VECTOR_MODE_P (GET_MODE (dst))
f8cfc6aa 1661 && REG_P (src))
9c0631a7
AH
1662 {
1663 int slen = GET_MODE_SIZE (GET_MODE (src));
1664 rtx mem;
1665
1666 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1667 emit_move_insn (mem, src);
1668 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1669 }
d3a16cbd
FJ
1670 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1671 && XVECLEN (dst, 0) > 1)
1672 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1673 else if (CONSTANT_P (src)
f8cfc6aa 1674 || (REG_P (src) && GET_MODE (src) == mode))
2ee5437b 1675 tmps[i] = src;
fffa9c1d 1676 else
19caa751
RK
1677 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1678 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
b3520980 1679 mode, mode);
fffa9c1d 1680
6e985040 1681 if (shift)
09b52670
RS
1682 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1683 build_int_2 (shift, 0), tmps[i], 0);
fffa9c1d 1684 }
19caa751 1685
aac5cc16
RH
1686 /* Copy the extracted pieces into the proper (probable) hard regs. */
1687 for (i = start; i < XVECLEN (dst, 0); i++)
1688 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1689}
1690
084a1106
JDA
1691/* Emit code to move a block SRC to block DST, where SRC and DST are
1692 non-consecutive groups of registers, each represented by a PARALLEL. */
1693
1694void
502b8322 1695emit_group_move (rtx dst, rtx src)
084a1106
JDA
1696{
1697 int i;
1698
1699 if (GET_CODE (src) != PARALLEL
1700 || GET_CODE (dst) != PARALLEL
1701 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1702 abort ();
1703
1704 /* Skip first entry if NULL. */
1705 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1706 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1707 XEXP (XVECEXP (src, 0, i), 0));
1708}
1709
6e985040
AM
1710/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1711 where SRC is non-consecutive registers represented by a PARALLEL.
1712 SSIZE represents the total size of block ORIG_DST, or -1 if not
1713 known. */
fffa9c1d
JW
1714
1715void
6e985040 1716emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1717{
aac5cc16
RH
1718 rtx *tmps, dst;
1719 int start, i;
fffa9c1d 1720
aac5cc16 1721 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1722 abort ();
1723
1724 /* Check for a NULL entry, used to indicate that the parameter goes
1725 both on the stack and in registers. */
aac5cc16
RH
1726 if (XEXP (XVECEXP (src, 0, 0), 0))
1727 start = 0;
fffa9c1d 1728 else
aac5cc16
RH
1729 start = 1;
1730
703ad42b 1731 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 1732
aac5cc16
RH
1733 /* Copy the (probable) hard regs into pseudos. */
1734 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1735 {
aac5cc16
RH
1736 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1737 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1738 emit_move_insn (tmps[i], reg);
1739 }
fffa9c1d 1740
aac5cc16
RH
1741 /* If we won't be storing directly into memory, protect the real destination
1742 from strange tricks we might play. */
1743 dst = orig_dst;
10a9f2be
JW
1744 if (GET_CODE (dst) == PARALLEL)
1745 {
1746 rtx temp;
1747
1748 /* We can get a PARALLEL dst if there is a conditional expression in
1749 a return statement. In that case, the dst and src are the same,
1750 so no action is necessary. */
1751 if (rtx_equal_p (dst, src))
1752 return;
1753
1754 /* It is unclear if we can ever reach here, but we may as well handle
1755 it. Allocate a temporary, and split this into a store/load to/from
1756 the temporary. */
1757
1758 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
1759 emit_group_store (temp, src, type, ssize);
1760 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
1761 return;
1762 }
3c0cb5de 1763 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
aac5cc16
RH
1764 {
1765 dst = gen_reg_rtx (GET_MODE (orig_dst));
1766 /* Make life a bit easier for combine. */
8ae91fc0 1767 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 1768 }
aac5cc16
RH
1769
1770 /* Process the pieces. */
1771 for (i = start; i < XVECLEN (src, 0); i++)
1772 {
770ae6cc 1773 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 1774 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 1775 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 1776 rtx dest = dst;
aac5cc16
RH
1777
1778 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1779 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 1780 {
6e985040
AM
1781 /* store_bit_field always takes its value from the lsb.
1782 Move the fragment to the lsb if it's not already there. */
1783 if (
1784#ifdef BLOCK_REG_PADDING
1785 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1786 == (BYTES_BIG_ENDIAN ? upward : downward)
1787#else
1788 BYTES_BIG_ENDIAN
1789#endif
1790 )
aac5cc16
RH
1791 {
1792 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
09b52670
RS
1793 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1794 build_int_2 (shift, 0), tmps[i], 0);
aac5cc16
RH
1795 }
1796 bytelen = ssize - bytepos;
71bc0330 1797 }
fffa9c1d 1798
6ddae612
JJ
1799 if (GET_CODE (dst) == CONCAT)
1800 {
1801 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1802 dest = XEXP (dst, 0);
1803 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1804 {
1805 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1806 dest = XEXP (dst, 1);
1807 }
0d446150
JH
1808 else if (bytepos == 0 && XVECLEN (src, 0))
1809 {
1810 dest = assign_stack_temp (GET_MODE (dest),
1811 GET_MODE_SIZE (GET_MODE (dest)), 0);
1812 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1813 tmps[i]);
1814 dst = dest;
1815 break;
1816 }
6ddae612
JJ
1817 else
1818 abort ();
1819 }
1820
aac5cc16 1821 /* Optimize the access just a bit. */
3c0cb5de 1822 if (MEM_P (dest)
6e985040
AM
1823 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1824 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 1825 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 1826 && bytelen == GET_MODE_SIZE (mode))
6ddae612 1827 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 1828 else
6ddae612 1829 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
b3520980 1830 mode, tmps[i]);
fffa9c1d 1831 }
729a2125 1832
aac5cc16 1833 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 1834 if (orig_dst != dst)
aac5cc16 1835 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
1836}
1837
c36fce9a
GRK
1838/* Generate code to copy a BLKmode object of TYPE out of a
1839 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1840 is null, a stack temporary is created. TGTBLK is returned.
1841
c988af2b
RS
1842 The purpose of this routine is to handle functions that return
1843 BLKmode structures in registers. Some machines (the PA for example)
1844 want to return all small structures in registers regardless of the
1845 structure's alignment. */
c36fce9a
GRK
1846
1847rtx
502b8322 1848copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 1849{
19caa751
RK
1850 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1851 rtx src = NULL, dst = NULL;
1852 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 1853 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
1854
1855 if (tgtblk == 0)
1856 {
1da68f56
RK
1857 tgtblk = assign_temp (build_qualified_type (type,
1858 (TYPE_QUALS (type)
1859 | TYPE_QUAL_CONST)),
1860 0, 1, 1);
19caa751
RK
1861 preserve_temp_slots (tgtblk);
1862 }
3a94c984 1863
1ed1b4fb 1864 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 1865 into a new pseudo which is a full word. */
0d7839da 1866
19caa751
RK
1867 if (GET_MODE (srcreg) != BLKmode
1868 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 1869 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 1870
c988af2b
RS
1871 /* If the structure doesn't take up a whole number of words, see whether
1872 SRCREG is padded on the left or on the right. If it's on the left,
1873 set PADDING_CORRECTION to the number of bits to skip.
1874
1875 In most ABIs, the structure will be returned at the least end of
1876 the register, which translates to right padding on little-endian
1877 targets and left padding on big-endian targets. The opposite
1878 holds if the structure is returned at the most significant
1879 end of the register. */
1880 if (bytes % UNITS_PER_WORD != 0
1881 && (targetm.calls.return_in_msb (type)
1882 ? !BYTES_BIG_ENDIAN
1883 : BYTES_BIG_ENDIAN))
1884 padding_correction
19caa751
RK
1885 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1886
1887 /* Copy the structure BITSIZE bites at a time.
3a94c984 1888
19caa751
RK
1889 We could probably emit more efficient code for machines which do not use
1890 strict alignment, but it doesn't seem worth the effort at the current
1891 time. */
c988af2b 1892 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
1893 bitpos < bytes * BITS_PER_UNIT;
1894 bitpos += bitsize, xbitpos += bitsize)
1895 {
3a94c984 1896 /* We need a new source operand each time xbitpos is on a
c988af2b 1897 word boundary and when xbitpos == padding_correction
19caa751
RK
1898 (the first time through). */
1899 if (xbitpos % BITS_PER_WORD == 0
c988af2b 1900 || xbitpos == padding_correction)
b47f8cfc
JH
1901 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1902 GET_MODE (srcreg));
19caa751
RK
1903
1904 /* We need a new destination operand each time bitpos is on
1905 a word boundary. */
1906 if (bitpos % BITS_PER_WORD == 0)
1907 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 1908
19caa751
RK
1909 /* Use xbitpos for the source extraction (right justified) and
1910 xbitpos for the destination store (left justified). */
1911 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1912 extract_bit_field (src, bitsize,
1913 xbitpos % BITS_PER_WORD, 1,
b3520980 1914 NULL_RTX, word_mode, word_mode));
19caa751
RK
1915 }
1916
1917 return tgtblk;
c36fce9a
GRK
1918}
1919
94b25f81
RK
1920/* Add a USE expression for REG to the (possibly empty) list pointed
1921 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1922
1923void
502b8322 1924use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 1925{
f8cfc6aa 1926 if (!REG_P (reg)
0304dfbb 1927 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 1928 abort ();
b3f8cf4a
RK
1929
1930 *call_fusage
38a448ca
RH
1931 = gen_rtx_EXPR_LIST (VOIDmode,
1932 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1933}
1934
94b25f81
RK
1935/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1936 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1937
1938void
502b8322 1939use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 1940{
0304dfbb 1941 int i;
bbf6f052 1942
0304dfbb
DE
1943 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1944 abort ();
1945
1946 for (i = 0; i < nregs; i++)
e50126e8 1947 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 1948}
fffa9c1d
JW
1949
1950/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1951 PARALLEL REGS. This is for calls that pass values in multiple
1952 non-contiguous locations. The Irix 6 ABI has examples of this. */
1953
1954void
502b8322 1955use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
1956{
1957 int i;
1958
6bd35f86
DE
1959 for (i = 0; i < XVECLEN (regs, 0); i++)
1960 {
1961 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 1962
6bd35f86
DE
1963 /* A NULL entry means the parameter goes both on the stack and in
1964 registers. This can also be a MEM for targets that pass values
1965 partially on the stack and partially in registers. */
f8cfc6aa 1966 if (reg != 0 && REG_P (reg))
6bd35f86
DE
1967 use_reg (call_fusage, reg);
1968 }
fffa9c1d 1969}
bbf6f052 1970\f
57814e5e 1971
cf5124f6
RS
1972/* Determine whether the LEN bytes generated by CONSTFUN can be
1973 stored to memory using several move instructions. CONSTFUNDATA is
1974 a pointer which will be passed as argument in every CONSTFUN call.
1975 ALIGN is maximum alignment we can assume. Return nonzero if a
1976 call to store_by_pieces should succeed. */
1977
57814e5e 1978int
502b8322
AJ
1979can_store_by_pieces (unsigned HOST_WIDE_INT len,
1980 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1981 void *constfundata, unsigned int align)
57814e5e 1982{
98166639 1983 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
1984 HOST_WIDE_INT offset = 0;
1985 enum machine_mode mode, tmode;
1986 enum insn_code icode;
1987 int reverse;
1988 rtx cst;
1989
2c430630
RS
1990 if (len == 0)
1991 return 1;
1992
4977bab6 1993 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
1994 return 0;
1995
1996 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1997 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1998 align = MOVE_MAX * BITS_PER_UNIT;
1999
2000 /* We would first store what we can in the largest integer mode, then go to
2001 successively smaller modes. */
2002
2003 for (reverse = 0;
2004 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2005 reverse++)
2006 {
2007 l = len;
2008 mode = VOIDmode;
cf5124f6 2009 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2010 while (max_size > 1)
2011 {
2012 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2013 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2014 if (GET_MODE_SIZE (tmode) < max_size)
2015 mode = tmode;
2016
2017 if (mode == VOIDmode)
2018 break;
2019
2020 icode = mov_optab->handlers[(int) mode].insn_code;
2021 if (icode != CODE_FOR_nothing
2022 && align >= GET_MODE_ALIGNMENT (mode))
2023 {
2024 unsigned int size = GET_MODE_SIZE (mode);
2025
2026 while (l >= size)
2027 {
2028 if (reverse)
2029 offset -= size;
2030
2031 cst = (*constfun) (constfundata, offset, mode);
2032 if (!LEGITIMATE_CONSTANT_P (cst))
2033 return 0;
2034
2035 if (!reverse)
2036 offset += size;
2037
2038 l -= size;
2039 }
2040 }
2041
2042 max_size = GET_MODE_SIZE (mode);
2043 }
2044
2045 /* The code above should have handled everything. */
2046 if (l != 0)
2047 abort ();
2048 }
2049
2050 return 1;
2051}
2052
2053/* Generate several move instructions to store LEN bytes generated by
2054 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2055 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2056 ALIGN is maximum alignment we can assume.
2057 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2058 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2059 stpcpy. */
57814e5e 2060
8fd3cf4e 2061rtx
502b8322
AJ
2062store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2063 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2064 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2065{
2066 struct store_by_pieces data;
2067
2c430630
RS
2068 if (len == 0)
2069 {
2070 if (endp == 2)
2071 abort ();
2072 return to;
2073 }
2074
4977bab6 2075 if (! STORE_BY_PIECES_P (len, align))
57814e5e 2076 abort ();
57814e5e
JJ
2077 data.constfun = constfun;
2078 data.constfundata = constfundata;
2079 data.len = len;
2080 data.to = to;
2081 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2082 if (endp)
2083 {
2084 rtx to1;
2085
2086 if (data.reverse)
2087 abort ();
2088 if (data.autinc_to)
2089 {
2090 if (endp == 2)
2091 {
2092 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2093 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2094 else
2095 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2096 -1));
2097 }
2098 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2099 data.offset);
2100 }
2101 else
2102 {
2103 if (endp == 2)
2104 --data.offset;
2105 to1 = adjust_address (data.to, QImode, data.offset);
2106 }
2107 return to1;
2108 }
2109 else
2110 return data.to;
57814e5e
JJ
2111}
2112
19caa751 2113/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
ad76cef8 2114 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
9de08200
RK
2115
2116static void
342e2b74 2117clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2118{
57814e5e
JJ
2119 struct store_by_pieces data;
2120
2c430630
RS
2121 if (len == 0)
2122 return;
2123
57814e5e 2124 data.constfun = clear_by_pieces_1;
df4ae160 2125 data.constfundata = NULL;
57814e5e
JJ
2126 data.len = len;
2127 data.to = to;
2128 store_by_pieces_1 (&data, align);
2129}
2130
2131/* Callback routine for clear_by_pieces.
2132 Return const0_rtx unconditionally. */
2133
2134static rtx
502b8322
AJ
2135clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2136 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2137 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2138{
2139 return const0_rtx;
2140}
2141
2142/* Subroutine of clear_by_pieces and store_by_pieces.
2143 Generate several move instructions to store LEN bytes of block TO. (A MEM
ad76cef8 2144 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
57814e5e
JJ
2145
2146static void
502b8322
AJ
2147store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2148 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2149{
2150 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2151 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2152 enum machine_mode mode = VOIDmode, tmode;
2153 enum insn_code icode;
9de08200 2154
57814e5e
JJ
2155 data->offset = 0;
2156 data->to_addr = to_addr;
2157 data->autinc_to
9de08200
RK
2158 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2159 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2160
57814e5e
JJ
2161 data->explicit_inc_to = 0;
2162 data->reverse
9de08200 2163 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2164 if (data->reverse)
2165 data->offset = data->len;
9de08200 2166
57814e5e 2167 /* If storing requires more than two move insns,
9de08200
RK
2168 copy addresses to registers (to make displacements shorter)
2169 and use post-increment if available. */
57814e5e
JJ
2170 if (!data->autinc_to
2171 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2172 {
3a94c984 2173 /* Determine the main mode we'll be using. */
fbe1758d
AM
2174 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2175 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2176 if (GET_MODE_SIZE (tmode) < max_size)
2177 mode = tmode;
2178
57814e5e 2179 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2180 {
57814e5e
JJ
2181 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2182 data->autinc_to = 1;
2183 data->explicit_inc_to = -1;
9de08200 2184 }
3bdf5ad1 2185
57814e5e
JJ
2186 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2187 && ! data->autinc_to)
9de08200 2188 {
57814e5e
JJ
2189 data->to_addr = copy_addr_to_reg (to_addr);
2190 data->autinc_to = 1;
2191 data->explicit_inc_to = 1;
9de08200 2192 }
3bdf5ad1 2193
57814e5e
JJ
2194 if ( !data->autinc_to && CONSTANT_P (to_addr))
2195 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2196 }
2197
e1565e65 2198 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2199 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2200 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2201
57814e5e 2202 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2203 successively smaller modes. */
2204
2205 while (max_size > 1)
2206 {
9de08200
RK
2207 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2208 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2209 if (GET_MODE_SIZE (tmode) < max_size)
2210 mode = tmode;
2211
2212 if (mode == VOIDmode)
2213 break;
2214
2215 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2216 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2217 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2218
2219 max_size = GET_MODE_SIZE (mode);
2220 }
2221
2222 /* The code above should have handled everything. */
57814e5e 2223 if (data->len != 0)
9de08200
RK
2224 abort ();
2225}
2226
57814e5e 2227/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2228 with move instructions for mode MODE. GENFUN is the gen_... function
2229 to make a move insn for that mode. DATA has all the other info. */
2230
2231static void
502b8322
AJ
2232store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2233 struct store_by_pieces *data)
9de08200 2234{
3bdf5ad1 2235 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2236 rtx to1, cst;
9de08200
RK
2237
2238 while (data->len >= size)
2239 {
3bdf5ad1
RK
2240 if (data->reverse)
2241 data->offset -= size;
9de08200 2242
3bdf5ad1 2243 if (data->autinc_to)
630036c6
JJ
2244 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2245 data->offset);
3a94c984 2246 else
f4ef873c 2247 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2248
940da324 2249 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2250 emit_insn (gen_add2_insn (data->to_addr,
2251 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2252
57814e5e
JJ
2253 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2254 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2255
940da324 2256 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2257 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2258
3bdf5ad1
RK
2259 if (! data->reverse)
2260 data->offset += size;
9de08200
RK
2261
2262 data->len -= size;
2263 }
2264}
2265\f
19caa751 2266/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2267 its length in bytes. */
e9a25f70
JL
2268
2269rtx
502b8322 2270clear_storage (rtx object, rtx size)
bbf6f052 2271{
e9a25f70 2272 rtx retval = 0;
3c0cb5de 2273 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
8ac61af7 2274 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2275
fcf1b822
RK
2276 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2277 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2278 if (GET_MODE (object) != BLKmode
fcf1b822 2279 && GET_CODE (size) == CONST_INT
4ca79136 2280 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2281 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2282 else
bbf6f052 2283 {
6972c506 2284 if (size == const0_rtx)
2c430630
RS
2285 ;
2286 else if (GET_CODE (size) == CONST_INT
78762e3b 2287 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2288 clear_by_pieces (object, INTVAL (size), align);
70128ad9 2289 else if (clear_storage_via_clrmem (object, size, align))
4ca79136 2290 ;
9de08200 2291 else
4ca79136
RH
2292 retval = clear_storage_via_libcall (object, size);
2293 }
2294
2295 return retval;
2296}
2297
70128ad9 2298/* A subroutine of clear_storage. Expand a clrmem pattern;
4ca79136
RH
2299 return true if successful. */
2300
2301static bool
70128ad9 2302clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
4ca79136
RH
2303{
2304 /* Try the most limited insn first, because there's no point
2305 including more than one in the machine description unless
2306 the more limited one has some advantage. */
2307
2308 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2309 enum machine_mode mode;
2310
2311 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2312 mode = GET_MODE_WIDER_MODE (mode))
2313 {
70128ad9 2314 enum insn_code code = clrmem_optab[(int) mode];
4ca79136
RH
2315 insn_operand_predicate_fn pred;
2316
2317 if (code != CODE_FOR_nothing
2318 /* We don't need MODE to be narrower than
2319 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2320 the mode mask, as it is returned by the macro, it will
2321 definitely be less than the actual mode mask. */
2322 && ((GET_CODE (size) == CONST_INT
2323 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2324 <= (GET_MODE_MASK (mode) >> 1)))
2325 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2326 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2327 || (*pred) (object, BLKmode))
2328 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2329 || (*pred) (opalign, VOIDmode)))
9de08200 2330 {
4ca79136
RH
2331 rtx op1;
2332 rtx last = get_last_insn ();
2333 rtx pat;
9de08200 2334
4ca79136
RH
2335 op1 = convert_to_mode (mode, size, 1);
2336 pred = insn_data[(int) code].operand[1].predicate;
2337 if (pred != 0 && ! (*pred) (op1, mode))
2338 op1 = copy_to_mode_reg (mode, op1);
9de08200 2339
4ca79136
RH
2340 pat = GEN_FCN ((int) code) (object, op1, opalign);
2341 if (pat)
9de08200 2342 {
4ca79136
RH
2343 emit_insn (pat);
2344 return true;
2345 }
2346 else
2347 delete_insns_since (last);
2348 }
2349 }
9de08200 2350
4ca79136
RH
2351 return false;
2352}
9de08200 2353
8f99553f 2354/* A subroutine of clear_storage. Expand a call to memset.
4ca79136 2355 Return the return value of memset, 0 otherwise. */
9de08200 2356
4ca79136 2357static rtx
502b8322 2358clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2359{
2360 tree call_expr, arg_list, fn, object_tree, size_tree;
2361 enum machine_mode size_mode;
2362 rtx retval;
9de08200 2363
ad76cef8
PB
2364 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2365 place those into new pseudos into a VAR_DECL and use them later. */
52cf7115 2366
4ca79136 2367 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2368
8f99553f 2369 size_mode = TYPE_MODE (sizetype);
4ca79136
RH
2370 size = convert_to_mode (size_mode, size, 1);
2371 size = copy_to_mode_reg (size_mode, size);
52cf7115 2372
4ca79136
RH
2373 /* It is incorrect to use the libcall calling conventions to call
2374 memset in this context. This could be a user call to memset and
2375 the user may wish to examine the return value from memset. For
2376 targets where libcalls and normal calls have different conventions
8f99553f 2377 for returning pointers, we could end up generating incorrect code. */
4bc973ae 2378
4ca79136 2379 object_tree = make_tree (ptr_type_node, object);
8f99553f 2380 size_tree = make_tree (sizetype, size);
4ca79136
RH
2381
2382 fn = clear_storage_libcall_fn (true);
2383 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f 2384 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
4ca79136
RH
2385 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2386
2387 /* Now we have to build up the CALL_EXPR itself. */
2388 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
2389 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2390 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2391
2392 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2393
2394 /* If we are initializing a readonly value, show the above call
2395 clobbered it. Otherwise, a load from it may erroneously be
2396 hoisted from a loop. */
2397 if (RTX_UNCHANGING_P (object))
2398 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2399
8f99553f 2400 return retval;
4ca79136
RH
2401}
2402
2403/* A subroutine of clear_storage_via_libcall. Create the tree node
2404 for the function we use for block clears. The first time FOR_CALL
2405 is true, we call assemble_external. */
2406
2407static GTY(()) tree block_clear_fn;
66c60e67 2408
9661b15f 2409void
502b8322 2410init_block_clear_fn (const char *asmspec)
4ca79136 2411{
9661b15f 2412 if (!block_clear_fn)
4ca79136 2413 {
9661b15f
JJ
2414 tree fn, args;
2415
8f99553f
JM
2416 fn = get_identifier ("memset");
2417 args = build_function_type_list (ptr_type_node, ptr_type_node,
2418 integer_type_node, sizetype,
2419 NULL_TREE);
4ca79136
RH
2420
2421 fn = build_decl (FUNCTION_DECL, fn, args);
2422 DECL_EXTERNAL (fn) = 1;
2423 TREE_PUBLIC (fn) = 1;
2424 DECL_ARTIFICIAL (fn) = 1;
2425 TREE_NOTHROW (fn) = 1;
2426
2427 block_clear_fn = fn;
bbf6f052 2428 }
e9a25f70 2429
9661b15f 2430 if (asmspec)
0e6df31e 2431 set_user_assembler_name (block_clear_fn, asmspec);
9661b15f
JJ
2432}
2433
2434static tree
502b8322 2435clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2436{
2437 static bool emitted_extern;
2438
2439 if (!block_clear_fn)
2440 init_block_clear_fn (NULL);
2441
4ca79136
RH
2442 if (for_call && !emitted_extern)
2443 {
2444 emitted_extern = true;
0e6df31e 2445 make_decl_rtl (block_clear_fn);
9661b15f 2446 assemble_external (block_clear_fn);
4ca79136 2447 }
bbf6f052 2448
9661b15f 2449 return block_clear_fn;
4ca79136
RH
2450}
2451\f
bbf6f052
RK
2452/* Generate code to copy Y into X.
2453 Both Y and X must have the same mode, except that
2454 Y can be a constant with VOIDmode.
2455 This mode cannot be BLKmode; use emit_block_move for that.
2456
2457 Return the last instruction emitted. */
2458
2459rtx
502b8322 2460emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2461{
2462 enum machine_mode mode = GET_MODE (x);
de1b33dd 2463 rtx y_cst = NULL_RTX;
0c19a26f 2464 rtx last_insn, set;
bbf6f052 2465
bbf6f052
RK
2466 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2467 abort ();
2468
6de9cd9a 2469 if (CONSTANT_P (y))
de1b33dd 2470 {
51286de6 2471 if (optimize
075fc17a 2472 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2473 && (last_insn = compress_float_constant (x, y)))
2474 return last_insn;
2475
0c19a26f
RS
2476 y_cst = y;
2477
51286de6
RH
2478 if (!LEGITIMATE_CONSTANT_P (y))
2479 {
51286de6 2480 y = force_const_mem (mode, y);
3a04ff64
RH
2481
2482 /* If the target's cannot_force_const_mem prevented the spill,
2483 assume that the target's move expanders will also take care
2484 of the non-legitimate constant. */
2485 if (!y)
2486 y = y_cst;
51286de6 2487 }
de1b33dd 2488 }
bbf6f052
RK
2489
2490 /* If X or Y are memory references, verify that their addresses are valid
2491 for the machine. */
3c0cb5de 2492 if (MEM_P (x)
bbf6f052
RK
2493 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2494 && ! push_operand (x, GET_MODE (x)))
2495 || (flag_force_addr
2496 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2497 x = validize_mem (x);
bbf6f052 2498
3c0cb5de 2499 if (MEM_P (y)
bbf6f052
RK
2500 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2501 || (flag_force_addr
2502 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2503 y = validize_mem (y);
bbf6f052
RK
2504
2505 if (mode == BLKmode)
2506 abort ();
2507
de1b33dd
AO
2508 last_insn = emit_move_insn_1 (x, y);
2509
f8cfc6aa 2510 if (y_cst && REG_P (x)
0c19a26f
RS
2511 && (set = single_set (last_insn)) != NULL_RTX
2512 && SET_DEST (set) == x
2513 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2514 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2515
2516 return last_insn;
261c4230
RS
2517}
2518
2519/* Low level part of emit_move_insn.
2520 Called just like emit_move_insn, but assumes X and Y
2521 are basically valid. */
2522
2523rtx
502b8322 2524emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2525{
2526 enum machine_mode mode = GET_MODE (x);
2527 enum machine_mode submode;
2528 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2529
dbbbbf3b 2530 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2531 abort ();
76bbe028 2532
bbf6f052
RK
2533 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2534 return
2535 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2536
89742723 2537 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2538 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2539 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2540 && (mov_optab->handlers[(int) submode].insn_code
2541 != CODE_FOR_nothing))
2542 {
2543 /* Don't split destination if it is a stack push. */
2544 int stack = push_operand (x, GET_MODE (x));
7308a047 2545
79ce92d7 2546#ifdef PUSH_ROUNDING
0e9cbd11
KH
2547 /* In case we output to the stack, but the size is smaller than the
2548 machine can push exactly, we need to use move instructions. */
1a06f5fe 2549 if (stack
bb93b973
RK
2550 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2551 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2552 {
2553 rtx temp;
bb93b973 2554 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2555
2556 /* Do not use anti_adjust_stack, since we don't want to update
2557 stack_pointer_delta. */
2558 temp = expand_binop (Pmode,
2559#ifdef STACK_GROWS_DOWNWARD
2560 sub_optab,
2561#else
2562 add_optab,
2563#endif
2564 stack_pointer_rtx,
2565 GEN_INT
bb93b973
RK
2566 (PUSH_ROUNDING
2567 (GET_MODE_SIZE (GET_MODE (x)))),
2568 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2569
1a06f5fe
JH
2570 if (temp != stack_pointer_rtx)
2571 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2572
1a06f5fe
JH
2573#ifdef STACK_GROWS_DOWNWARD
2574 offset1 = 0;
2575 offset2 = GET_MODE_SIZE (submode);
2576#else
2577 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2578 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2579 + GET_MODE_SIZE (submode));
2580#endif
bb93b973 2581
1a06f5fe
JH
2582 emit_move_insn (change_address (x, submode,
2583 gen_rtx_PLUS (Pmode,
2584 stack_pointer_rtx,
2585 GEN_INT (offset1))),
2586 gen_realpart (submode, y));
2587 emit_move_insn (change_address (x, submode,
2588 gen_rtx_PLUS (Pmode,
2589 stack_pointer_rtx,
2590 GEN_INT (offset2))),
2591 gen_imagpart (submode, y));
2592 }
e9c0bd54 2593 else
79ce92d7 2594#endif
7308a047
RS
2595 /* If this is a stack, push the highpart first, so it
2596 will be in the argument order.
2597
2598 In that case, change_address is used only to convert
2599 the mode, not to change the address. */
e9c0bd54 2600 if (stack)
c937357e 2601 {
e33c0d66
RS
2602 /* Note that the real part always precedes the imag part in memory
2603 regardless of machine's endianness. */
c937357e 2604#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2605 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2606 gen_imagpart (submode, y));
2607 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2608 gen_realpart (submode, y));
c937357e 2609#else
a79b3dc7
RS
2610 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2611 gen_realpart (submode, y));
2612 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2613 gen_imagpart (submode, y));
c937357e
RS
2614#endif
2615 }
2616 else
2617 {
235ae7be
DM
2618 rtx realpart_x, realpart_y;
2619 rtx imagpart_x, imagpart_y;
2620
405f63da
MM
2621 /* If this is a complex value with each part being smaller than a
2622 word, the usual calling sequence will likely pack the pieces into
2623 a single register. Unfortunately, SUBREG of hard registers only
2624 deals in terms of words, so we have a problem converting input
2625 arguments to the CONCAT of two registers that is used elsewhere
2626 for complex values. If this is before reload, we can copy it into
2627 memory and reload. FIXME, we should see about using extract and
2628 insert on integer registers, but complex short and complex char
2629 variables should be rarely used. */
3a94c984 2630 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2631 && (reload_in_progress | reload_completed) == 0)
2632 {
bb93b973
RK
2633 int packed_dest_p
2634 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2635 int packed_src_p
2636 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2637
2638 if (packed_dest_p || packed_src_p)
2639 {
2640 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2641 ? MODE_FLOAT : MODE_INT);
2642
1da68f56
RK
2643 enum machine_mode reg_mode
2644 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2645
2646 if (reg_mode != BLKmode)
2647 {
2648 rtx mem = assign_stack_temp (reg_mode,
2649 GET_MODE_SIZE (mode), 0);
f4ef873c 2650 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2651
405f63da
MM
2652 if (packed_dest_p)
2653 {
2654 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2655
405f63da
MM
2656 emit_move_insn_1 (cmem, y);
2657 return emit_move_insn_1 (sreg, mem);
2658 }
2659 else
2660 {
2661 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 2662
405f63da
MM
2663 emit_move_insn_1 (mem, sreg);
2664 return emit_move_insn_1 (x, cmem);
2665 }
2666 }
2667 }
2668 }
2669
235ae7be
DM
2670 realpart_x = gen_realpart (submode, x);
2671 realpart_y = gen_realpart (submode, y);
2672 imagpart_x = gen_imagpart (submode, x);
2673 imagpart_y = gen_imagpart (submode, y);
2674
2675 /* Show the output dies here. This is necessary for SUBREGs
2676 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2677 hard regs shouldn't appear here except as return values.
2678 We never want to emit such a clobber after reload. */
2679 if (x != y
235ae7be
DM
2680 && ! (reload_in_progress || reload_completed)
2681 && (GET_CODE (realpart_x) == SUBREG
2682 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 2683 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2684
a79b3dc7
RS
2685 emit_move_insn (realpart_x, realpart_y);
2686 emit_move_insn (imagpart_x, imagpart_y);
c937357e 2687 }
7308a047 2688
7a1ab50a 2689 return get_last_insn ();
7308a047
RS
2690 }
2691
a3600c71
HPN
2692 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2693 find a mode to do it in. If we have a movcc, use it. Otherwise,
2694 find the MODE_INT mode of the same width. */
2695 else if (GET_MODE_CLASS (mode) == MODE_CC
2696 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2697 {
2698 enum insn_code insn_code;
2699 enum machine_mode tmode = VOIDmode;
2700 rtx x1 = x, y1 = y;
2701
2702 if (mode != CCmode
2703 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2704 tmode = CCmode;
2705 else
2706 for (tmode = QImode; tmode != VOIDmode;
2707 tmode = GET_MODE_WIDER_MODE (tmode))
2708 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2709 break;
2710
2711 if (tmode == VOIDmode)
2712 abort ();
2713
2714 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2715 may call change_address which is not appropriate if we were
2716 called when a reload was in progress. We don't have to worry
2717 about changing the address since the size in bytes is supposed to
2718 be the same. Copy the MEM to change the mode and move any
2719 substitutions from the old MEM to the new one. */
2720
2721 if (reload_in_progress)
2722 {
2723 x = gen_lowpart_common (tmode, x1);
3c0cb5de 2724 if (x == 0 && MEM_P (x1))
a3600c71
HPN
2725 {
2726 x = adjust_address_nv (x1, tmode, 0);
2727 copy_replacements (x1, x);
2728 }
2729
2730 y = gen_lowpart_common (tmode, y1);
3c0cb5de 2731 if (y == 0 && MEM_P (y1))
a3600c71
HPN
2732 {
2733 y = adjust_address_nv (y1, tmode, 0);
2734 copy_replacements (y1, y);
2735 }
2736 }
2737 else
2738 {
2739 x = gen_lowpart (tmode, x);
2740 y = gen_lowpart (tmode, y);
2741 }
502b8322 2742
a3600c71
HPN
2743 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2744 return emit_insn (GEN_FCN (insn_code) (x, y));
2745 }
2746
5581fc91
RS
2747 /* Try using a move pattern for the corresponding integer mode. This is
2748 only safe when simplify_subreg can convert MODE constants into integer
2749 constants. At present, it can only do this reliably if the value
2750 fits within a HOST_WIDE_INT. */
2751 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2752 && (submode = int_mode_for_mode (mode)) != BLKmode
2753 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2754 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2755 (simplify_gen_subreg (submode, x, mode, 0),
2756 simplify_gen_subreg (submode, y, mode, 0)));
2757
cffa2189
R
2758 /* This will handle any multi-word or full-word mode that lacks a move_insn
2759 pattern. However, you will get better code if you define such patterns,
bbf6f052 2760 even if they must turn into multiple assembler instructions. */
cffa2189 2761 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
2762 {
2763 rtx last_insn = 0;
3ef1eef4 2764 rtx seq, inner;
235ae7be 2765 int need_clobber;
bb93b973 2766 int i;
3a94c984 2767
a98c9f1a
RK
2768#ifdef PUSH_ROUNDING
2769
2770 /* If X is a push on the stack, do the push now and replace
2771 X with a reference to the stack pointer. */
2772 if (push_operand (x, GET_MODE (x)))
2773 {
918a6124
GK
2774 rtx temp;
2775 enum rtx_code code;
0fb7aeda 2776
918a6124
GK
2777 /* Do not use anti_adjust_stack, since we don't want to update
2778 stack_pointer_delta. */
2779 temp = expand_binop (Pmode,
2780#ifdef STACK_GROWS_DOWNWARD
2781 sub_optab,
2782#else
2783 add_optab,
2784#endif
2785 stack_pointer_rtx,
2786 GEN_INT
bb93b973
RK
2787 (PUSH_ROUNDING
2788 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 2789 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 2790
0fb7aeda
KH
2791 if (temp != stack_pointer_rtx)
2792 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
2793
2794 code = GET_CODE (XEXP (x, 0));
bb93b973 2795
918a6124
GK
2796 /* Just hope that small offsets off SP are OK. */
2797 if (code == POST_INC)
0fb7aeda 2798 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
2799 GEN_INT (-((HOST_WIDE_INT)
2800 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 2801 else if (code == POST_DEC)
0fb7aeda 2802 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
2803 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2804 else
2805 temp = stack_pointer_rtx;
2806
2807 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
2808 }
2809#endif
3a94c984 2810
3ef1eef4
RK
2811 /* If we are in reload, see if either operand is a MEM whose address
2812 is scheduled for replacement. */
3c0cb5de 2813 if (reload_in_progress && MEM_P (x)
3ef1eef4 2814 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 2815 x = replace_equiv_address_nv (x, inner);
3c0cb5de 2816 if (reload_in_progress && MEM_P (y)
3ef1eef4 2817 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 2818 y = replace_equiv_address_nv (y, inner);
3ef1eef4 2819
235ae7be 2820 start_sequence ();
15a7a8ec 2821
235ae7be 2822 need_clobber = 0;
bbf6f052 2823 for (i = 0;
3a94c984 2824 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
2825 i++)
2826 {
2827 rtx xpart = operand_subword (x, i, 1, mode);
2828 rtx ypart = operand_subword (y, i, 1, mode);
2829
2830 /* If we can't get a part of Y, put Y into memory if it is a
2831 constant. Otherwise, force it into a register. If we still
2832 can't get a part of Y, abort. */
2833 if (ypart == 0 && CONSTANT_P (y))
2834 {
2835 y = force_const_mem (mode, y);
2836 ypart = operand_subword (y, i, 1, mode);
2837 }
2838 else if (ypart == 0)
2839 ypart = operand_subword_force (y, i, mode);
2840
2841 if (xpart == 0 || ypart == 0)
2842 abort ();
2843
235ae7be
DM
2844 need_clobber |= (GET_CODE (xpart) == SUBREG);
2845
bbf6f052
RK
2846 last_insn = emit_move_insn (xpart, ypart);
2847 }
6551fa4d 2848
2f937369 2849 seq = get_insns ();
235ae7be
DM
2850 end_sequence ();
2851
2852 /* Show the output dies here. This is necessary for SUBREGs
2853 of pseudos since we cannot track their lifetimes correctly;
2854 hard regs shouldn't appear here except as return values.
2855 We never want to emit such a clobber after reload. */
2856 if (x != y
2857 && ! (reload_in_progress || reload_completed)
2858 && need_clobber != 0)
bb93b973 2859 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
2860
2861 emit_insn (seq);
2862
bbf6f052
RK
2863 return last_insn;
2864 }
2865 else
2866 abort ();
2867}
51286de6
RH
2868
2869/* If Y is representable exactly in a narrower mode, and the target can
2870 perform the extension directly from constant or memory, then emit the
2871 move as an extension. */
2872
2873static rtx
502b8322 2874compress_float_constant (rtx x, rtx y)
51286de6
RH
2875{
2876 enum machine_mode dstmode = GET_MODE (x);
2877 enum machine_mode orig_srcmode = GET_MODE (y);
2878 enum machine_mode srcmode;
2879 REAL_VALUE_TYPE r;
2880
2881 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2882
2883 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2884 srcmode != orig_srcmode;
2885 srcmode = GET_MODE_WIDER_MODE (srcmode))
2886 {
2887 enum insn_code ic;
2888 rtx trunc_y, last_insn;
2889
2890 /* Skip if the target can't extend this way. */
2891 ic = can_extend_p (dstmode, srcmode, 0);
2892 if (ic == CODE_FOR_nothing)
2893 continue;
2894
2895 /* Skip if the narrowed value isn't exact. */
2896 if (! exact_real_truncate (srcmode, &r))
2897 continue;
2898
2899 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2900
2901 if (LEGITIMATE_CONSTANT_P (trunc_y))
2902 {
2903 /* Skip if the target needs extra instructions to perform
2904 the extension. */
2905 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2906 continue;
2907 }
2908 else if (float_extend_from_mem[dstmode][srcmode])
2909 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2910 else
2911 continue;
2912
2913 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2914 last_insn = get_last_insn ();
2915
f8cfc6aa 2916 if (REG_P (x))
0c19a26f 2917 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
2918
2919 return last_insn;
2920 }
2921
2922 return NULL_RTX;
2923}
bbf6f052
RK
2924\f
2925/* Pushing data onto the stack. */
2926
2927/* Push a block of length SIZE (perhaps variable)
2928 and return an rtx to address the beginning of the block.
bbf6f052
RK
2929 The value may be virtual_outgoing_args_rtx.
2930
2931 EXTRA is the number of bytes of padding to push in addition to SIZE.
2932 BELOW nonzero means this padding comes at low addresses;
2933 otherwise, the padding comes at high addresses. */
2934
2935rtx
502b8322 2936push_block (rtx size, int extra, int below)
bbf6f052 2937{
b3694847 2938 rtx temp;
88f63c77
RK
2939
2940 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2941 if (CONSTANT_P (size))
2942 anti_adjust_stack (plus_constant (size, extra));
f8cfc6aa 2943 else if (REG_P (size) && extra == 0)
bbf6f052
RK
2944 anti_adjust_stack (size);
2945 else
2946 {
ce48579b 2947 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 2948 if (extra != 0)
906c4e36 2949 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2950 temp, 0, OPTAB_LIB_WIDEN);
2951 anti_adjust_stack (temp);
2952 }
2953
f73ad30e 2954#ifndef STACK_GROWS_DOWNWARD
f73ad30e 2955 if (0)
f73ad30e
JH
2956#else
2957 if (1)
bbf6f052 2958#endif
f73ad30e 2959 {
f73ad30e
JH
2960 temp = virtual_outgoing_args_rtx;
2961 if (extra != 0 && below)
2962 temp = plus_constant (temp, extra);
2963 }
2964 else
2965 {
2966 if (GET_CODE (size) == CONST_INT)
2967 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 2968 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
2969 else if (extra != 0 && !below)
2970 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 2971 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
2972 else
2973 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2974 negate_rtx (Pmode, size));
2975 }
bbf6f052
RK
2976
2977 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2978}
2979
21d93687
RK
2980#ifdef PUSH_ROUNDING
2981
566aa174 2982/* Emit single push insn. */
21d93687 2983
566aa174 2984static void
502b8322 2985emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 2986{
566aa174 2987 rtx dest_addr;
918a6124 2988 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 2989 rtx dest;
371b8fc0
JH
2990 enum insn_code icode;
2991 insn_operand_predicate_fn pred;
566aa174 2992
371b8fc0
JH
2993 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
2994 /* If there is push pattern, use it. Otherwise try old way of throwing
2995 MEM representing push operation to move expander. */
2996 icode = push_optab->handlers[(int) mode].insn_code;
2997 if (icode != CODE_FOR_nothing)
2998 {
2999 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3000 && !((*pred) (x, mode))))
371b8fc0
JH
3001 x = force_reg (mode, x);
3002 emit_insn (GEN_FCN (icode) (x));
3003 return;
3004 }
566aa174
JH
3005 if (GET_MODE_SIZE (mode) == rounded_size)
3006 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3007 /* If we are to pad downward, adjust the stack pointer first and
3008 then store X into the stack location using an offset. This is
3009 because emit_move_insn does not know how to pad; it does not have
3010 access to type. */
3011 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3012 {
3013 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3014 HOST_WIDE_INT offset;
3015
3016 emit_move_insn (stack_pointer_rtx,
3017 expand_binop (Pmode,
3018#ifdef STACK_GROWS_DOWNWARD
3019 sub_optab,
3020#else
3021 add_optab,
3022#endif
3023 stack_pointer_rtx,
3024 GEN_INT (rounded_size),
3025 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3026
3027 offset = (HOST_WIDE_INT) padding_size;
3028#ifdef STACK_GROWS_DOWNWARD
3029 if (STACK_PUSH_CODE == POST_DEC)
3030 /* We have already decremented the stack pointer, so get the
3031 previous value. */
3032 offset += (HOST_WIDE_INT) rounded_size;
3033#else
3034 if (STACK_PUSH_CODE == POST_INC)
3035 /* We have already incremented the stack pointer, so get the
3036 previous value. */
3037 offset -= (HOST_WIDE_INT) rounded_size;
3038#endif
3039 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3040 }
566aa174
JH
3041 else
3042 {
3043#ifdef STACK_GROWS_DOWNWARD
329d586f 3044 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3045 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3046 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3047#else
329d586f 3048 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3049 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3050 GEN_INT (rounded_size));
3051#endif
3052 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3053 }
3054
3055 dest = gen_rtx_MEM (mode, dest_addr);
3056
566aa174
JH
3057 if (type != 0)
3058 {
3059 set_mem_attributes (dest, type, 1);
c3d32120
RK
3060
3061 if (flag_optimize_sibling_calls)
3062 /* Function incoming arguments may overlap with sibling call
3063 outgoing arguments and we cannot allow reordering of reads
3064 from function arguments with stores to outgoing arguments
3065 of sibling calls. */
3066 set_mem_alias_set (dest, 0);
566aa174
JH
3067 }
3068 emit_move_insn (dest, x);
566aa174 3069}
21d93687 3070#endif
566aa174 3071
bbf6f052
RK
3072/* Generate code to push X onto the stack, assuming it has mode MODE and
3073 type TYPE.
3074 MODE is redundant except when X is a CONST_INT (since they don't
3075 carry mode info).
3076 SIZE is an rtx for the size of data to be copied (in bytes),
3077 needed only if X is BLKmode.
3078
f1eaaf73 3079 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3080
cd048831
RK
3081 If PARTIAL and REG are both nonzero, then copy that many of the first
3082 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3083 The amount of space pushed is decreased by PARTIAL words,
3084 rounded *down* to a multiple of PARM_BOUNDARY.
3085 REG must be a hard register in this case.
cd048831
RK
3086 If REG is zero but PARTIAL is not, take any all others actions for an
3087 argument partially in registers, but do not actually load any
3088 registers.
bbf6f052
RK
3089
3090 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3091 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3092
3093 On a machine that lacks real push insns, ARGS_ADDR is the address of
3094 the bottom of the argument block for this call. We use indexing off there
3095 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3096 argument block has not been preallocated.
3097
e5e809f4
JL
3098 ARGS_SO_FAR is the size of args previously pushed for this call.
3099
3100 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3101 for arguments passed in registers. If nonzero, it will be the number
3102 of bytes required. */
bbf6f052
RK
3103
3104void
502b8322
AJ
3105emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3106 unsigned int align, int partial, rtx reg, int extra,
3107 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3108 rtx alignment_pad)
bbf6f052
RK
3109{
3110 rtx xinner;
3111 enum direction stack_direction
3112#ifdef STACK_GROWS_DOWNWARD
3113 = downward;
3114#else
3115 = upward;
3116#endif
3117
3118 /* Decide where to pad the argument: `downward' for below,
3119 `upward' for above, or `none' for don't pad it.
3120 Default is below for small data on big-endian machines; else above. */
3121 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3122
0fb7aeda 3123 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3124 FIXME: why? */
3125 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3126 if (where_pad != none)
3127 where_pad = (where_pad == downward ? upward : downward);
3128
ad76cef8 3129 xinner = x;
bbf6f052
RK
3130
3131 if (mode == BLKmode)
3132 {
3133 /* Copy a block into the stack, entirely or partially. */
3134
b3694847 3135 rtx temp;
bbf6f052 3136 int used = partial * UNITS_PER_WORD;
531547e9 3137 int offset;
bbf6f052 3138 int skip;
3a94c984 3139
531547e9
FJ
3140 if (reg && GET_CODE (reg) == PARALLEL)
3141 {
3142 /* Use the size of the elt to compute offset. */
3143 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3144 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3145 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3146 }
3147 else
3148 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3149
bbf6f052
RK
3150 if (size == 0)
3151 abort ();
3152
3153 used -= offset;
3154
3155 /* USED is now the # of bytes we need not copy to the stack
3156 because registers will take care of them. */
3157
3158 if (partial != 0)
f4ef873c 3159 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3160
3161 /* If the partial register-part of the arg counts in its stack size,
3162 skip the part of stack space corresponding to the registers.
3163 Otherwise, start copying to the beginning of the stack space,
3164 by setting SKIP to 0. */
e5e809f4 3165 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3166
3167#ifdef PUSH_ROUNDING
3168 /* Do it with several push insns if that doesn't take lots of insns
3169 and if there is no difficulty with push insns that skip bytes
3170 on the stack for alignment purposes. */
3171 if (args_addr == 0
f73ad30e 3172 && PUSH_ARGS
bbf6f052
RK
3173 && GET_CODE (size) == CONST_INT
3174 && skip == 0
f26aca6d 3175 && MEM_ALIGN (xinner) >= align
15914757 3176 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3177 /* Here we avoid the case of a structure whose weak alignment
3178 forces many pushes of a small amount of data,
3179 and such small pushes do rounding that causes trouble. */
e1565e65 3180 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3181 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3182 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3183 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3184 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3185 {
3186 /* Push padding now if padding above and stack grows down,
3187 or if padding below and stack grows up.
3188 But if space already allocated, this has already been done. */
3189 if (extra && args_addr == 0
3190 && where_pad != none && where_pad != stack_direction)
906c4e36 3191 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3192
8fd3cf4e 3193 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3194 }
3195 else
3a94c984 3196#endif /* PUSH_ROUNDING */
bbf6f052 3197 {
7ab923cc
JJ
3198 rtx target;
3199
bbf6f052
RK
3200 /* Otherwise make space on the stack and copy the data
3201 to the address of that space. */
3202
3203 /* Deduct words put into registers from the size we must copy. */
3204 if (partial != 0)
3205 {
3206 if (GET_CODE (size) == CONST_INT)
906c4e36 3207 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3208 else
3209 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3210 GEN_INT (used), NULL_RTX, 0,
3211 OPTAB_LIB_WIDEN);
bbf6f052
RK
3212 }
3213
3214 /* Get the address of the stack space.
3215 In this case, we do not deal with EXTRA separately.
3216 A single stack adjust will do. */
3217 if (! args_addr)
3218 {
3219 temp = push_block (size, extra, where_pad == downward);
3220 extra = 0;
3221 }
3222 else if (GET_CODE (args_so_far) == CONST_INT)
3223 temp = memory_address (BLKmode,
3224 plus_constant (args_addr,
3225 skip + INTVAL (args_so_far)));
3226 else
3227 temp = memory_address (BLKmode,
38a448ca
RH
3228 plus_constant (gen_rtx_PLUS (Pmode,
3229 args_addr,
3230 args_so_far),
bbf6f052 3231 skip));
4ca79136
RH
3232
3233 if (!ACCUMULATE_OUTGOING_ARGS)
3234 {
3235 /* If the source is referenced relative to the stack pointer,
3236 copy it to another register to stabilize it. We do not need
3237 to do this if we know that we won't be changing sp. */
3238
3239 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3240 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3241 temp = copy_to_reg (temp);
3242 }
3243
3a94c984 3244 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3245
2bb16349
RH
3246 /* We do *not* set_mem_attributes here, because incoming arguments
3247 may overlap with sibling call outgoing arguments and we cannot
3248 allow reordering of reads from function arguments with stores
3249 to outgoing arguments of sibling calls. We do, however, want
3250 to record the alignment of the stack slot. */
44bb111a
RH
3251 /* ALIGN may well be better aligned than TYPE, e.g. due to
3252 PARM_BOUNDARY. Assume the caller isn't lying. */
3253 set_mem_align (target, align);
4ca79136 3254
44bb111a 3255 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3256 }
3257 }
3258 else if (partial > 0)
3259 {
3260 /* Scalar partly in registers. */
3261
3262 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3263 int i;
3264 int not_stack;
3265 /* # words of start of argument
3266 that we must make space for but need not store. */
3267 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3268 int args_offset = INTVAL (args_so_far);
3269 int skip;
3270
3271 /* Push padding now if padding above and stack grows down,
3272 or if padding below and stack grows up.
3273 But if space already allocated, this has already been done. */
3274 if (extra && args_addr == 0
3275 && where_pad != none && where_pad != stack_direction)
906c4e36 3276 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3277
3278 /* If we make space by pushing it, we might as well push
3279 the real data. Otherwise, we can leave OFFSET nonzero
3280 and leave the space uninitialized. */
3281 if (args_addr == 0)
3282 offset = 0;
3283
3284 /* Now NOT_STACK gets the number of words that we don't need to
3285 allocate on the stack. */
3286 not_stack = partial - offset;
3287
3288 /* If the partial register-part of the arg counts in its stack size,
3289 skip the part of stack space corresponding to the registers.
3290 Otherwise, start copying to the beginning of the stack space,
3291 by setting SKIP to 0. */
e5e809f4 3292 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3293
3294 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3295 x = validize_mem (force_const_mem (mode, x));
3296
3297 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3298 SUBREGs of such registers are not allowed. */
f8cfc6aa 3299 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
bbf6f052
RK
3300 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3301 x = copy_to_reg (x);
3302
3303 /* Loop over all the words allocated on the stack for this arg. */
3304 /* We can do it by words, because any scalar bigger than a word
3305 has a size a multiple of a word. */
3306#ifndef PUSH_ARGS_REVERSED
3307 for (i = not_stack; i < size; i++)
3308#else
3309 for (i = size - 1; i >= not_stack; i--)
3310#endif
3311 if (i >= not_stack + offset)
3312 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3313 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3314 0, args_addr,
3315 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3316 * UNITS_PER_WORD)),
4fc026cd 3317 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3318 }
3319 else
3320 {
3321 rtx addr;
3bdf5ad1 3322 rtx dest;
bbf6f052
RK
3323
3324 /* Push padding now if padding above and stack grows down,
3325 or if padding below and stack grows up.
3326 But if space already allocated, this has already been done. */
3327 if (extra && args_addr == 0
3328 && where_pad != none && where_pad != stack_direction)
906c4e36 3329 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3330
3331#ifdef PUSH_ROUNDING
f73ad30e 3332 if (args_addr == 0 && PUSH_ARGS)
566aa174 3333 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3334 else
3335#endif
921b3427
RK
3336 {
3337 if (GET_CODE (args_so_far) == CONST_INT)
3338 addr
3339 = memory_address (mode,
3a94c984 3340 plus_constant (args_addr,
921b3427 3341 INTVAL (args_so_far)));
3a94c984 3342 else
38a448ca
RH
3343 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3344 args_so_far));
566aa174 3345 dest = gen_rtx_MEM (mode, addr);
2bb16349
RH
3346
3347 /* We do *not* set_mem_attributes here, because incoming arguments
3348 may overlap with sibling call outgoing arguments and we cannot
3349 allow reordering of reads from function arguments with stores
3350 to outgoing arguments of sibling calls. We do, however, want
3351 to record the alignment of the stack slot. */
3352 /* ALIGN may well be better aligned than TYPE, e.g. due to
3353 PARM_BOUNDARY. Assume the caller isn't lying. */
3354 set_mem_align (dest, align);
bbf6f052 3355
566aa174 3356 emit_move_insn (dest, x);
566aa174 3357 }
bbf6f052
RK
3358 }
3359
bbf6f052
RK
3360 /* If part should go in registers, copy that part
3361 into the appropriate registers. Do this now, at the end,
3362 since mem-to-mem copies above may do function calls. */
cd048831 3363 if (partial > 0 && reg != 0)
fffa9c1d
JW
3364 {
3365 /* Handle calls that pass values in multiple non-contiguous locations.
3366 The Irix 6 ABI has examples of this. */
3367 if (GET_CODE (reg) == PARALLEL)
6e985040 3368 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3369 else
3370 move_block_to_reg (REGNO (reg), x, partial, mode);
3371 }
bbf6f052
RK
3372
3373 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3374 anti_adjust_stack (GEN_INT (extra));
3a94c984 3375
3ea2292a 3376 if (alignment_pad && args_addr == 0)
4fc026cd 3377 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3378}
3379\f
296b4ed9
RK
3380/* Return X if X can be used as a subtarget in a sequence of arithmetic
3381 operations. */
3382
3383static rtx
502b8322 3384get_subtarget (rtx x)
296b4ed9
RK
3385{
3386 return ((x == 0
3387 /* Only registers can be subtargets. */
f8cfc6aa 3388 || !REG_P (x)
296b4ed9
RK
3389 /* If the register is readonly, it can't be set more than once. */
3390 || RTX_UNCHANGING_P (x)
3391 /* Don't use hard regs to avoid extending their life. */
3392 || REGNO (x) < FIRST_PSEUDO_REGISTER
3393 /* Avoid subtargets inside loops,
3394 since they hide some invariant expressions. */
3395 || preserve_subexpressions_p ())
3396 ? 0 : x);
3397}
3398
bbf6f052
RK
3399/* Expand an assignment that stores the value of FROM into TO.
3400 If WANT_VALUE is nonzero, return an rtx for the value of TO.
96985307 3401 (If the value is constant, this rtx is a constant.)
b90f141a 3402 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3403
3404rtx
b90f141a 3405expand_assignment (tree to, tree from, int want_value)
bbf6f052 3406{
b3694847 3407 rtx to_rtx = 0;
bbf6f052
RK
3408 rtx result;
3409
3410 /* Don't crash if the lhs of the assignment was erroneous. */
3411
3412 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3413 {
3414 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3415 return want_value ? result : NULL_RTX;
3416 }
bbf6f052
RK
3417
3418 /* Assignment of a structure component needs special treatment
3419 if the structure component's rtx is not simply a MEM.
6be58303
JW
3420 Assignment of an array element at a constant index, and assignment of
3421 an array element in an unaligned packed structure field, has the same
3422 problem. */
bbf6f052 3423
08293add 3424 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3425 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3426 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3427 {
3428 enum machine_mode mode1;
770ae6cc 3429 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3430 rtx orig_to_rtx;
7bb0943f 3431 tree offset;
bbf6f052
RK
3432 int unsignedp;
3433 int volatilep = 0;
0088fcb1
RK
3434 tree tem;
3435
3436 push_temp_slots ();
839c4796 3437 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3438 &unsignedp, &volatilep);
bbf6f052
RK
3439
3440 /* If we are going to use store_bit_field and extract_bit_field,
3441 make sure to_rtx will be safe for multiple use. */
3442
3443 if (mode1 == VOIDmode && want_value)
3444 tem = stabilize_reference (tem);
3445
1ed1b4fb
RK
3446 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3447
7bb0943f
RS
3448 if (offset != 0)
3449 {
e3c8ea67 3450 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f 3451
3c0cb5de 3452 if (!MEM_P (to_rtx))
7bb0943f 3453 abort ();
bd070e1a 3454
bd070e1a 3455#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3456 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3457 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3458#else
3459 if (GET_MODE (offset_rtx) != ptr_mode)
3460 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3461#endif
bd070e1a 3462
9a7b9f4f
JL
3463 /* A constant address in TO_RTX can have VOIDmode, we must not try
3464 to call force_reg for that case. Avoid that case. */
3c0cb5de 3465 if (MEM_P (to_rtx)
89752202 3466 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3467 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3468 && bitsize > 0
3a94c984 3469 && (bitpos % bitsize) == 0
89752202 3470 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3471 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3472 {
e3c8ea67 3473 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3474 bitpos = 0;
3475 }
3476
0d4903b8 3477 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3478 highest_pow2_factor_for_target (to,
3479 offset));
7bb0943f 3480 }
c5c76735 3481
3c0cb5de 3482 if (MEM_P (to_rtx))
998d7deb 3483 {
998d7deb
RH
3484 /* If the field is at offset zero, we could have been given the
3485 DECL_RTX of the parent struct. Don't munge it. */
3486 to_rtx = shallow_copy_rtx (to_rtx);
3487
6f1087be 3488 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3489 }
effbcc6a 3490
a06ef755
RK
3491 /* Deal with volatile and readonly fields. The former is only done
3492 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3c0cb5de 3493 if (volatilep && MEM_P (to_rtx))
a06ef755
RK
3494 {
3495 if (to_rtx == orig_to_rtx)
3496 to_rtx = copy_rtx (to_rtx);
3497 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3498 }
3499
956d6950 3500 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3501 && TREE_READONLY (TREE_OPERAND (to, 1))
3502 /* We can't assert that a MEM won't be set more than once
3503 if the component is not addressable because another
3504 non-addressable component may be referenced by the same MEM. */
3c0cb5de 3505 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
956d6950 3506 {
a06ef755 3507 if (to_rtx == orig_to_rtx)
956d6950 3508 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3509 RTX_UNCHANGING_P (to_rtx) = 1;
3510 }
3511
3c0cb5de 3512 if (MEM_P (to_rtx) && ! can_address_p (to))
a06ef755
RK
3513 {
3514 if (to_rtx == orig_to_rtx)
3515 to_rtx = copy_rtx (to_rtx);
3516 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3517 }
3518
b8b139c7
JJ
3519 /* Optimize bitfld op= val in certain cases. */
3520 while (mode1 == VOIDmode && !want_value
3521 && bitsize > 0 && bitsize < BITS_PER_WORD
60ba25bf
JJ
3522 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3523 && !TREE_SIDE_EFFECTS (to)
3524 && !TREE_THIS_VOLATILE (to))
df62f18a 3525 {
60ba25bf 3526 tree src, op0, op1;
b8b139c7
JJ
3527 rtx value, str_rtx = to_rtx;
3528 HOST_WIDE_INT bitpos1 = bitpos;
60ba25bf
JJ
3529 optab binop;
3530
3531 src = from;
3532 STRIP_NOPS (src);
3533 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3534 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3535 break;
3536
3537 op0 = TREE_OPERAND (src, 0);
3538 op1 = TREE_OPERAND (src, 1);
3539 STRIP_NOPS (op0);
3540
3541 if (! operand_equal_p (to, op0, 0))
3542 break;
df62f18a 3543
b8b139c7
JJ
3544 if (MEM_P (str_rtx))
3545 {
3546 enum machine_mode mode = GET_MODE (str_rtx);
3547 HOST_WIDE_INT offset1;
3548
3549 if (GET_MODE_BITSIZE (mode) == 0
3550 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3551 mode = word_mode;
3552 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3553 mode, 0);
3554 if (mode == VOIDmode)
3555 break;
3556
3557 offset1 = bitpos1;
3558 bitpos1 %= GET_MODE_BITSIZE (mode);
3559 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3560 str_rtx = adjust_address (str_rtx, mode, offset1);
3561 }
3562 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3563 break;
3564
3565 /* If the bit field covers the whole REG/MEM, store_field
3566 will likely generate better code. */
3567 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3568 break;
3569
3570 /* We can't handle fields split accross multiple entities. */
3571 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3572 break;
3573
df62f18a 3574 if (BYTES_BIG_ENDIAN)
b8b139c7
JJ
3575 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3576 - bitsize;
df62f18a
JJ
3577
3578 /* Special case some bitfield op= exp. */
60ba25bf 3579 switch (TREE_CODE (src))
df62f18a
JJ
3580 {
3581 case PLUS_EXPR:
3582 case MINUS_EXPR:
df62f18a 3583 /* For now, just optimize the case of the topmost bitfield
60ba25bf
JJ
3584 where we don't need to do any masking and also
3585 1 bit bitfields where xor can be used.
df62f18a
JJ
3586 We might win by one instruction for the other bitfields
3587 too if insv/extv instructions aren't used, so that
3588 can be added later. */
b8b139c7 3589 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
60ba25bf 3590 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
df62f18a 3591 break;
b8b139c7
JJ
3592 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3593 value = convert_modes (GET_MODE (str_rtx),
3594 TYPE_MODE (TREE_TYPE (op1)), value,
3595 TYPE_UNSIGNED (TREE_TYPE (op1)));
3596
3597 /* We may be accessing data outside the field, which means
3598 we can alias adjacent data. */
3599 if (MEM_P (str_rtx))
3600 {
3601 str_rtx = shallow_copy_rtx (str_rtx);
3602 set_mem_alias_set (str_rtx, 0);
3603 set_mem_expr (str_rtx, 0);
3604 }
3605
60ba25bf
JJ
3606 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3607 if (bitsize == 1
b8b139c7 3608 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
60ba25bf 3609 {
b8b139c7 3610 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
60ba25bf
JJ
3611 NULL_RTX);
3612 binop = xor_optab;
3613 }
b8b139c7
JJ
3614 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx),
3615 value, build_int_2 (bitpos1, 0),
df62f18a 3616 NULL_RTX, 1);
b8b139c7
JJ
3617 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3618 value, str_rtx, 1, OPTAB_WIDEN);
3619 if (result != str_rtx)
3620 emit_move_insn (str_rtx, result);
df62f18a
JJ
3621 free_temp_slots ();
3622 pop_temp_slots ();
3623 return NULL_RTX;
b8b139c7 3624
df62f18a
JJ
3625 default:
3626 break;
3627 }
60ba25bf
JJ
3628
3629 break;
df62f18a
JJ
3630 }
3631
a06ef755
RK
3632 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3633 (want_value
3634 /* Spurious cast for HPUX compiler. */
3635 ? ((enum machine_mode)
3636 TYPE_MODE (TREE_TYPE (to)))
3637 : VOIDmode),
3638 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3639
a06ef755
RK
3640 preserve_temp_slots (result);
3641 free_temp_slots ();
3642 pop_temp_slots ();
a69beca1 3643
a06ef755
RK
3644 /* If the value is meaningful, convert RESULT to the proper mode.
3645 Otherwise, return nothing. */
3646 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3647 TYPE_MODE (TREE_TYPE (from)),
3648 result,
8df83eae 3649 TYPE_UNSIGNED (TREE_TYPE (to)))
a06ef755 3650 : NULL_RTX);
bbf6f052
RK
3651 }
3652
cd1db108
RS
3653 /* If the rhs is a function call and its value is not an aggregate,
3654 call the function before we start to compute the lhs.
3655 This is needed for correct code for cases such as
3656 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3657 requires loading up part of an address in a separate insn.
3658
1858863b
JW
3659 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3660 since it might be a promoted variable where the zero- or sign- extension
3661 needs to be done. Handling this in the normal way is safe because no
3662 computation is done before the call. */
61f71b34 3663 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3664 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b 3665 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
f8cfc6aa 3666 && REG_P (DECL_RTL (to))))
cd1db108 3667 {
0088fcb1
RK
3668 rtx value;
3669
3670 push_temp_slots ();
3671 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3672 if (to_rtx == 0)
37a08a29 3673 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3674
fffa9c1d
JW
3675 /* Handle calls that return values in multiple non-contiguous locations.
3676 The Irix 6 ABI has examples of this. */
3677 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3678 emit_group_load (to_rtx, value, TREE_TYPE (from),
3679 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3680 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3681 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3682 else
6419e5b0 3683 {
5ae6cd0d 3684 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3685 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3686 emit_move_insn (to_rtx, value);
3687 }
cd1db108
RS
3688 preserve_temp_slots (to_rtx);
3689 free_temp_slots ();
0088fcb1 3690 pop_temp_slots ();
709f5be1 3691 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3692 }
3693
bbf6f052
RK
3694 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3695 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3696
3697 if (to_rtx == 0)
37a08a29 3698 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3699
86d38d25 3700 /* Don't move directly into a return register. */
14a774a9 3701 if (TREE_CODE (to) == RESULT_DECL
f8cfc6aa 3702 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3703 {
0088fcb1
RK
3704 rtx temp;
3705
3706 push_temp_slots ();
3707 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3708
3709 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3710 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3711 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3712 else
3713 emit_move_insn (to_rtx, temp);
3714
86d38d25
RS
3715 preserve_temp_slots (to_rtx);
3716 free_temp_slots ();
0088fcb1 3717 pop_temp_slots ();
709f5be1 3718 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3719 }
3720
bbf6f052
RK
3721 /* In case we are returning the contents of an object which overlaps
3722 the place the value is being stored, use a safe function when copying
3723 a value through a pointer into a structure value return block. */
3724 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3725 && current_function_returns_struct
3726 && !current_function_returns_pcc_struct)
3727 {
0088fcb1
RK
3728 rtx from_rtx, size;
3729
3730 push_temp_slots ();
33a20d10 3731 size = expr_size (from);
37a08a29 3732 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3733
8f99553f
JM
3734 emit_library_call (memmove_libfunc, LCT_NORMAL,
3735 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3736 XEXP (from_rtx, 0), Pmode,
3737 convert_to_mode (TYPE_MODE (sizetype),
3738 size, TYPE_UNSIGNED (sizetype)),
3739 TYPE_MODE (sizetype));
bbf6f052
RK
3740
3741 preserve_temp_slots (to_rtx);
3742 free_temp_slots ();
0088fcb1 3743 pop_temp_slots ();
709f5be1 3744 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3745 }
3746
3747 /* Compute FROM and store the value in the rtx we got. */
3748
0088fcb1 3749 push_temp_slots ();
bbf6f052
RK
3750 result = store_expr (from, to_rtx, want_value);
3751 preserve_temp_slots (result);
3752 free_temp_slots ();
0088fcb1 3753 pop_temp_slots ();
709f5be1 3754 return want_value ? result : NULL_RTX;
bbf6f052
RK
3755}
3756
3757/* Generate code for computing expression EXP,
3758 and storing the value into TARGET.
bbf6f052 3759
8403445a 3760 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
3761 not in TARGET, so that we can be sure to use the proper
3762 value in a containing expression even if TARGET has something
3763 else stored in it. If possible, we copy the value through a pseudo
3764 and return that pseudo. Or, if the value is constant, we try to
3765 return the constant. In some cases, we return a pseudo
3766 copied *from* TARGET.
3767
3768 If the mode is BLKmode then we may return TARGET itself.
3769 It turns out that in BLKmode it doesn't cause a problem.
3770 because C has no operators that could combine two different
3771 assignments into the same BLKmode object with different values
3772 with no sequence point. Will other languages need this to
3773 be more thorough?
3774
8403445a 3775 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 3776 to catch quickly any cases where the caller uses the value
8403445a
AM
3777 and fails to set WANT_VALUE.
3778
3779 If WANT_VALUE & 2 is set, this is a store into a call param on the
3780 stack, and block moves may need to be treated specially. */
bbf6f052
RK
3781
3782rtx
502b8322 3783store_expr (tree exp, rtx target, int want_value)
bbf6f052 3784{
b3694847 3785 rtx temp;
0fab64a3 3786 rtx alt_rtl = NULL_RTX;
bbf6f052 3787 int dont_return_target = 0;
e5408e52 3788 int dont_store_target = 0;
bbf6f052 3789
847311f4
AL
3790 if (VOID_TYPE_P (TREE_TYPE (exp)))
3791 {
3792 /* C++ can generate ?: expressions with a throw expression in one
3793 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 3794 store the throw expression's nonexistent result. */
847311f4
AL
3795 if (want_value)
3796 abort ();
3797 expand_expr (exp, const0_rtx, VOIDmode, 0);
3798 return NULL_RTX;
3799 }
bbf6f052
RK
3800 if (TREE_CODE (exp) == COMPOUND_EXPR)
3801 {
3802 /* Perform first part of compound expression, then assign from second
3803 part. */
8403445a
AM
3804 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3805 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
709f5be1 3806 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3807 }
3808 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3809 {
3810 /* For conditional expression, get safe form of the target. Then
3811 test the condition, doing the appropriate assignment on either
3812 side. This avoids the creation of unnecessary temporaries.
3813 For non-BLKmode, it is more efficient not to do this. */
3814
3815 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3816
dabf8373 3817 do_pending_stack_adjust ();
bbf6f052
RK
3818 NO_DEFER_POP;
3819 jumpifnot (TREE_OPERAND (exp, 0), lab1);
8403445a 3820 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
bbf6f052
RK
3821 emit_jump_insn (gen_jump (lab2));
3822 emit_barrier ();
3823 emit_label (lab1);
8403445a 3824 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
bbf6f052
RK
3825 emit_label (lab2);
3826 OK_DEFER_POP;
a3a58acc 3827
8403445a 3828 return want_value & 1 ? target : NULL_RTX;
bbf6f052 3829 }
8403445a 3830 else if ((want_value & 1) != 0
3c0cb5de 3831 && MEM_P (target)
8403445a 3832 && ! MEM_VOLATILE_P (target)
12f06d17
CH
3833 && GET_MODE (target) != BLKmode)
3834 /* If target is in memory and caller wants value in a register instead,
3835 arrange that. Pass TARGET as target for expand_expr so that,
3836 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3837 We know expand_expr will not use the target in that case.
3838 Don't do this if TARGET is volatile because we are supposed
3839 to write it and then read it. */
3840 {
8403445a
AM
3841 temp = expand_expr (exp, target, GET_MODE (target),
3842 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 3843 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
3844 {
3845 /* If TEMP is already in the desired TARGET, only copy it from
3846 memory and don't store it there again. */
3847 if (temp == target
3848 || (rtx_equal_p (temp, target)
3849 && ! side_effects_p (temp) && ! side_effects_p (target)))
3850 dont_store_target = 1;
3851 temp = copy_to_reg (temp);
3852 }
12f06d17
CH
3853 dont_return_target = 1;
3854 }
1499e0a8 3855 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 3856 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
3857 than the declared mode, compute the result into its declared mode
3858 and then convert to the wider mode. Our value is the computed
3859 expression. */
3860 {
b76b08ef
RK
3861 rtx inner_target = 0;
3862
5a32d038 3863 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3864 which will often result in some optimizations. Do the conversion
3865 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3866 the extend. But don't do this if the type of EXP is a subtype
3867 of something else since then the conversion might involve
3868 more than just converting modes. */
8403445a
AM
3869 if ((want_value & 1) == 0
3870 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
7e7d1b4b
RH
3871 && TREE_TYPE (TREE_TYPE (exp)) == 0
3872 && (!lang_hooks.reduce_bit_field_operations
3873 || (GET_MODE_PRECISION (GET_MODE (target))
3874 == TYPE_PRECISION (TREE_TYPE (exp)))))
f635a84d 3875 {
8df83eae 3876 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 3877 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 3878 exp = convert
ae2bcd98 3879 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 3880 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 3881
ae2bcd98 3882 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
3883 (GET_MODE (SUBREG_REG (target)),
3884 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 3885 exp);
b76b08ef
RK
3886
3887 inner_target = SUBREG_REG (target);
f635a84d 3888 }
3a94c984 3889
8403445a
AM
3890 temp = expand_expr (exp, inner_target, VOIDmode,
3891 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 3892
7abec5be 3893 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
3894 now so it gets done only once. Strictly speaking, this is
3895 only necessary if the MEM is volatile, or if the address
7abec5be
RH
3896 overlaps TARGET. But not performing the load twice also
3897 reduces the amount of rtl we generate and then have to CSE. */
3c0cb5de 3898 if (MEM_P (temp) && (want_value & 1) != 0)
766f36c7
RK
3899 temp = copy_to_reg (temp);
3900
b258707c
RS
3901 /* If TEMP is a VOIDmode constant, use convert_modes to make
3902 sure that we properly convert it. */
3903 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
3904 {
3905 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3906 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3907 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3908 GET_MODE (target), temp,
3909 SUBREG_PROMOTED_UNSIGNED_P (target));
3910 }
b258707c 3911
1499e0a8
RK
3912 convert_move (SUBREG_REG (target), temp,
3913 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3914
3915 /* If we promoted a constant, change the mode back down to match
3916 target. Otherwise, the caller might get confused by a result whose
3917 mode is larger than expected. */
3918
8403445a 3919 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 3920 {
b3ca30df
JJ
3921 if (GET_MODE (temp) != VOIDmode)
3922 {
3923 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3924 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 3925 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 3926 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
3927 }
3928 else
3929 temp = convert_modes (GET_MODE (target),
3930 GET_MODE (SUBREG_REG (target)),
3931 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3932 }
3933
8403445a 3934 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 3935 }
bbf6f052
RK
3936 else
3937 {
0fab64a3 3938 temp = expand_expr_real (exp, target, GET_MODE (target),
caf93cb0 3939 (want_value & 2
0fab64a3
MM
3940 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3941 &alt_rtl);
766f36c7 3942 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3943 If TARGET is a volatile mem ref, either return TARGET
3944 or return a reg copied *from* TARGET; ANSI requires this.
3945
3946 Otherwise, if TEMP is not TARGET, return TEMP
3947 if it is constant (for efficiency),
3948 or if we really want the correct value. */
f8cfc6aa 3949 if (!(target && REG_P (target)
bbf6f052 3950 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3c0cb5de 3951 && !(MEM_P (target) && MEM_VOLATILE_P (target))
effbcc6a 3952 && ! rtx_equal_p (temp, target)
8403445a 3953 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
3954 dont_return_target = 1;
3955 }
3956
b258707c
RS
3957 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3958 the same as that of TARGET, adjust the constant. This is needed, for
3959 example, in case it is a CONST_DOUBLE and we want only a word-sized
3960 value. */
3961 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3962 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3963 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3964 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 3965 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 3966
bbf6f052 3967 /* If value was not generated in the target, store it there.
1bbd65cd
EB
3968 Convert the value to TARGET's type first if necessary and emit the
3969 pending incrementations that have been queued when expanding EXP.
3970 Note that we cannot emit the whole queue blindly because this will
3971 effectively disable the POST_INC optimization later.
3972
37a08a29 3973 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
3974 one or both of them are volatile memory refs, we have to distinguish
3975 two cases:
3976 - expand_expr has used TARGET. In this case, we must not generate
3977 another copy. This can be detected by TARGET being equal according
3978 to == .
3979 - expand_expr has not used TARGET - that means that the source just
3980 happens to have the same RTX form. Since temp will have been created
3981 by expand_expr, it will compare unequal according to == .
3982 We must generate a copy in this case, to reach the correct number
3983 of volatile memory references. */
bbf6f052 3984
6036acbb 3985 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3986 || (temp != target && (side_effects_p (temp)
3987 || side_effects_p (target))))
e5408e52 3988 && TREE_CODE (exp) != ERROR_MARK
a9772b60 3989 && ! dont_store_target
9c5c5f2c
MM
3990 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3991 but TARGET is not valid memory reference, TEMP will differ
3992 from TARGET although it is really the same location. */
0fab64a3 3993 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
e56fc090
HPN
3994 /* If there's nothing to copy, don't bother. Don't call expr_size
3995 unless necessary, because some front-ends (C++) expr_size-hook
3996 aborts on objects that are not supposed to be bit-copied or
3997 bit-initialized. */
3998 && expr_size (exp) != const0_rtx)
bbf6f052 3999 {
bbf6f052 4000 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4001 && GET_MODE (temp) != VOIDmode)
bbf6f052 4002 {
8df83eae 4003 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4004 if (dont_return_target)
4005 {
4006 /* In this case, we will return TEMP,
4007 so make sure it has the proper mode.
4008 But don't forget to store the value into TARGET. */
4009 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4010 emit_move_insn (target, temp);
4011 }
4012 else
4013 convert_move (target, temp, unsignedp);
4014 }
4015
4016 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4017 {
c24ae149
RK
4018 /* Handle copying a string constant into an array. The string
4019 constant may be shorter than the array. So copy just the string's
4020 actual length, and clear the rest. First get the size of the data
4021 type of the string, which is actually the size of the target. */
4022 rtx size = expr_size (exp);
bbf6f052 4023
e87b4f3f
RS
4024 if (GET_CODE (size) == CONST_INT
4025 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4026 emit_block_move (target, temp, size,
4027 (want_value & 2
4028 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4029 else
bbf6f052 4030 {
e87b4f3f
RS
4031 /* Compute the size of the data to copy from the string. */
4032 tree copy_size
c03b7665 4033 = size_binop (MIN_EXPR,
b50d17a1 4034 make_tree (sizetype, size),
fed3cef0 4035 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4036 rtx copy_size_rtx
4037 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4038 (want_value & 2
4039 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4040 rtx label = 0;
4041
4042 /* Copy that much. */
267b28bd 4043 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4044 TYPE_UNSIGNED (sizetype));
8403445a
AM
4045 emit_block_move (target, temp, copy_size_rtx,
4046 (want_value & 2
4047 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4048
88f63c77
RK
4049 /* Figure out how much is left in TARGET that we have to clear.
4050 Do all calculations in ptr_mode. */
e87b4f3f
RS
4051 if (GET_CODE (copy_size_rtx) == CONST_INT)
4052 {
c24ae149
RK
4053 size = plus_constant (size, -INTVAL (copy_size_rtx));
4054 target = adjust_address (target, BLKmode,
4055 INTVAL (copy_size_rtx));
e87b4f3f
RS
4056 }
4057 else
4058 {
fa06ab5c 4059 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4060 copy_size_rtx, NULL_RTX, 0,
4061 OPTAB_LIB_WIDEN);
e87b4f3f 4062
c24ae149
RK
4063#ifdef POINTERS_EXTEND_UNSIGNED
4064 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4065 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4066 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4067#endif
4068
4069 target = offset_address (target, copy_size_rtx,
4070 highest_pow2_factor (copy_size));
e87b4f3f 4071 label = gen_label_rtx ();
c5d5d461 4072 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4073 GET_MODE (size), 0, label);
e87b4f3f
RS
4074 }
4075
4076 if (size != const0_rtx)
37a08a29 4077 clear_storage (target, size);
22619c3f 4078
e87b4f3f
RS
4079 if (label)
4080 emit_label (label);
bbf6f052
RK
4081 }
4082 }
fffa9c1d
JW
4083 /* Handle calls that return values in multiple non-contiguous locations.
4084 The Irix 6 ABI has examples of this. */
4085 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4086 emit_group_load (target, temp, TREE_TYPE (exp),
4087 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4088 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4089 emit_block_move (target, temp, expr_size (exp),
4090 (want_value & 2
4091 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4092 else
b0dccb00
RH
4093 {
4094 temp = force_operand (temp, target);
4095 if (temp != target)
4096 emit_move_insn (target, temp);
4097 }
bbf6f052 4098 }
709f5be1 4099
766f36c7 4100 /* If we don't want a value, return NULL_RTX. */
8403445a 4101 if ((want_value & 1) == 0)
766f36c7
RK
4102 return NULL_RTX;
4103
4104 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4105 ??? The latter test doesn't seem to make sense. */
3c0cb5de 4106 else if (dont_return_target && !MEM_P (temp))
bbf6f052 4107 return temp;
766f36c7
RK
4108
4109 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4110 else if ((want_value & 1) != 0
4111 && GET_MODE (target) != BLKmode
f8cfc6aa 4112 && ! (REG_P (target)
766f36c7 4113 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4114 return copy_to_reg (target);
3a94c984 4115
766f36c7 4116 else
709f5be1 4117 return target;
bbf6f052
RK
4118}
4119\f
1ea7e6ad 4120/* Examine CTOR. Discover how many scalar fields are set to nonzero
6de9cd9a
DN
4121 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4122 are set to non-constant values and place it in *P_NC_ELTS. */
9de08200 4123
6de9cd9a
DN
4124static void
4125categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4126 HOST_WIDE_INT *p_nc_elts)
9de08200 4127{
6de9cd9a
DN
4128 HOST_WIDE_INT nz_elts, nc_elts;
4129 tree list;
9de08200 4130
6de9cd9a
DN
4131 nz_elts = 0;
4132 nc_elts = 0;
caf93cb0 4133
6de9cd9a 4134 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
9de08200 4135 {
6de9cd9a
DN
4136 tree value = TREE_VALUE (list);
4137 tree purpose = TREE_PURPOSE (list);
4138 HOST_WIDE_INT mult;
9de08200 4139
6de9cd9a
DN
4140 mult = 1;
4141 if (TREE_CODE (purpose) == RANGE_EXPR)
4142 {
4143 tree lo_index = TREE_OPERAND (purpose, 0);
4144 tree hi_index = TREE_OPERAND (purpose, 1);
9de08200 4145
6de9cd9a
DN
4146 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4147 mult = (tree_low_cst (hi_index, 1)
4148 - tree_low_cst (lo_index, 1) + 1);
4149 }
9de08200 4150
6de9cd9a
DN
4151 switch (TREE_CODE (value))
4152 {
4153 case CONSTRUCTOR:
4154 {
4155 HOST_WIDE_INT nz = 0, nc = 0;
4156 categorize_ctor_elements_1 (value, &nz, &nc);
4157 nz_elts += mult * nz;
4158 nc_elts += mult * nc;
4159 }
4160 break;
9de08200 4161
6de9cd9a
DN
4162 case INTEGER_CST:
4163 case REAL_CST:
4164 if (!initializer_zerop (value))
4165 nz_elts += mult;
4166 break;
4167 case COMPLEX_CST:
4168 if (!initializer_zerop (TREE_REALPART (value)))
4169 nz_elts += mult;
4170 if (!initializer_zerop (TREE_IMAGPART (value)))
4171 nz_elts += mult;
4172 break;
4173 case VECTOR_CST:
4174 {
4175 tree v;
4176 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4177 if (!initializer_zerop (TREE_VALUE (v)))
4178 nz_elts += mult;
4179 }
4180 break;
69ef87e2 4181
6de9cd9a
DN
4182 default:
4183 nz_elts += mult;
4184 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4185 nc_elts += mult;
4186 break;
4187 }
4188 }
69ef87e2 4189
6de9cd9a
DN
4190 *p_nz_elts += nz_elts;
4191 *p_nc_elts += nc_elts;
4192}
4193
4194void
4195categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4196 HOST_WIDE_INT *p_nc_elts)
4197{
4198 *p_nz_elts = 0;
4199 *p_nc_elts = 0;
4200 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4201}
4202
4203/* Count the number of scalars in TYPE. Return -1 on overflow or
4204 variable-sized. */
4205
4206HOST_WIDE_INT
4207count_type_elements (tree type)
4208{
4209 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4210 switch (TREE_CODE (type))
4211 {
4212 case ARRAY_TYPE:
4213 {
4214 tree telts = array_type_nelts (type);
4215 if (telts && host_integerp (telts, 1))
4216 {
5377d5ba 4217 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
6de9cd9a
DN
4218 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4219 if (n == 0)
4220 return 0;
5377d5ba 4221 else if (max / n > m)
6de9cd9a
DN
4222 return n * m;
4223 }
4224 return -1;
4225 }
4226
4227 case RECORD_TYPE:
4228 {
4229 HOST_WIDE_INT n = 0, t;
4230 tree f;
4231
4232 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4233 if (TREE_CODE (f) == FIELD_DECL)
4234 {
4235 t = count_type_elements (TREE_TYPE (f));
4236 if (t < 0)
4237 return -1;
4238 n += t;
4239 }
4240
4241 return n;
4242 }
9de08200 4243
6de9cd9a
DN
4244 case UNION_TYPE:
4245 case QUAL_UNION_TYPE:
4246 {
4247 /* Ho hum. How in the world do we guess here? Clearly it isn't
4248 right to count the fields. Guess based on the number of words. */
4249 HOST_WIDE_INT n = int_size_in_bytes (type);
4250 if (n < 0)
4251 return -1;
4252 return n / UNITS_PER_WORD;
4253 }
4254
4255 case COMPLEX_TYPE:
4256 return 2;
4257
4258 case VECTOR_TYPE:
3a021db2 4259 return TYPE_VECTOR_SUBPARTS (type);
6de9cd9a
DN
4260
4261 case INTEGER_TYPE:
4262 case REAL_TYPE:
4263 case ENUMERAL_TYPE:
4264 case BOOLEAN_TYPE:
4265 case CHAR_TYPE:
4266 case POINTER_TYPE:
4267 case OFFSET_TYPE:
4268 case REFERENCE_TYPE:
9de08200 4269 return 1;
3a94c984 4270
6de9cd9a
DN
4271 case VOID_TYPE:
4272 case METHOD_TYPE:
4273 case FILE_TYPE:
4274 case SET_TYPE:
4275 case FUNCTION_TYPE:
4276 case LANG_TYPE:
e9a25f70 4277 default:
6de9cd9a 4278 abort ();
9de08200 4279 }
9de08200
RK
4280}
4281
4282/* Return 1 if EXP contains mostly (3/4) zeros. */
4283
40209195 4284int
502b8322 4285mostly_zeros_p (tree exp)
9de08200 4286{
9de08200 4287 if (TREE_CODE (exp) == CONSTRUCTOR)
caf93cb0 4288
9de08200 4289 {
6de9cd9a
DN
4290 HOST_WIDE_INT nz_elts, nc_elts, elts;
4291
4292 /* If there are no ranges of true bits, it is all zero. */
e1a43f73 4293 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
6de9cd9a
DN
4294 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4295
4296 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4297 elts = count_type_elements (TREE_TYPE (exp));
9de08200 4298
6de9cd9a 4299 return nz_elts < elts / 4;
9de08200
RK
4300 }
4301
6de9cd9a 4302 return initializer_zerop (exp);
9de08200
RK
4303}
4304\f
e1a43f73
PB
4305/* Helper function for store_constructor.
4306 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4307 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4308 CLEARED is as for store_constructor.
23cb1766 4309 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4310
4311 This provides a recursive shortcut back to store_constructor when it isn't
4312 necessary to go through store_field. This is so that we can pass through
4313 the cleared field to let store_constructor know that we may not have to
4314 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4315
4316static void
502b8322
AJ
4317store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4318 HOST_WIDE_INT bitpos, enum machine_mode mode,
4319 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4320{
4321 if (TREE_CODE (exp) == CONSTRUCTOR
6c89c39a
RK
4322 /* We can only call store_constructor recursively if the size and
4323 bit position are on a byte boundary. */
23ccec44 4324 && bitpos % BITS_PER_UNIT == 0
6c89c39a 4325 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
cc2902df 4326 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4327 let store_field do the bitfield handling. This is unlikely to
4328 generate unnecessary clear instructions anyways. */
3c0cb5de 4329 && (bitpos == 0 || MEM_P (target)))
e1a43f73 4330 {
3c0cb5de 4331 if (MEM_P (target))
61cb205c
RK
4332 target
4333 = adjust_address (target,
4334 GET_MODE (target) == BLKmode
4335 || 0 != (bitpos
4336 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4337 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4338
e0339ef7 4339
04050c69 4340 /* Update the alias set, if required. */
3c0cb5de 4341 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
10b76d73 4342 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4343 {
4344 target = copy_rtx (target);
4345 set_mem_alias_set (target, alias_set);
4346 }
e0339ef7 4347
dbb5c281 4348 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4349 }
4350 else
a06ef755
RK
4351 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4352 alias_set);
e1a43f73
PB
4353}
4354
bbf6f052 4355/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4356 TARGET is either a REG or a MEM; we know it cannot conflict, since
4357 safe_from_p has been called.
dbb5c281
RK
4358 CLEARED is true if TARGET is known to have been zero'd.
4359 SIZE is the number of bytes of TARGET we are allowed to modify: this
b7010412
RK
4360 may not be the same as the size of EXP if we are assigning to a field
4361 which has been packed to exclude padding bits. */
bbf6f052
RK
4362
4363static void
502b8322 4364store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4365{
4af3895e 4366 tree type = TREE_TYPE (exp);
a5efcd63 4367#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4368 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4369#endif
4af3895e 4370
e44842fe
RK
4371 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4372 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4373 {
b3694847 4374 tree elt;
bbf6f052 4375
dbb5c281
RK
4376 /* If size is zero or the target is already cleared, do nothing. */
4377 if (size == 0 || cleared)
2c430630 4378 cleared = 1;
04050c69 4379 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4380 else if ((TREE_CODE (type) == UNION_TYPE
4381 || TREE_CODE (type) == QUAL_UNION_TYPE)
4382 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4383 /* If the constructor is empty, clear the union. */
a59f8640 4384 {
dbb5c281 4385 clear_storage (target, expr_size (exp));
04050c69 4386 cleared = 1;
a59f8640 4387 }
4af3895e
JVA
4388
4389 /* If we are building a static constructor into a register,
4390 set the initial value as zero so we can fold the value into
67225c15
RK
4391 a constant. But if more than one register is involved,
4392 this probably loses. */
f8cfc6aa 4393 else if (REG_P (target) && TREE_STATIC (exp)
67225c15 4394 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4395 {
04050c69 4396 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4397 cleared = 1;
4398 }
4399
4400 /* If the constructor has fewer fields than the structure
4401 or if we are initializing the structure to mostly zeros,
0d97bf4c 4402 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4403 register whose mode size isn't equal to SIZE since clear_storage
4404 can't handle this case. */
7c50e202
OH
4405 else if (size > 0
4406 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4407 || mostly_zeros_p (exp))
f8cfc6aa 4408 && (!REG_P (target)
dbb5c281 4409 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
04050c69 4410 == size)))
9de08200 4411 {
337f4314
RK
4412 rtx xtarget = target;
4413
4414 if (readonly_fields_p (type))
4415 {
4416 xtarget = copy_rtx (xtarget);
4417 RTX_UNCHANGING_P (xtarget) = 1;
4418 }
4419
dbb5c281 4420 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4421 cleared = 1;
4422 }
dbb5c281
RK
4423
4424 if (! cleared)
4425 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4426
4427 /* Store each element of the constructor into
4428 the corresponding field of TARGET. */
4429
4430 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4431 {
b3694847 4432 tree field = TREE_PURPOSE (elt);
34c73909 4433 tree value = TREE_VALUE (elt);
b3694847 4434 enum machine_mode mode;
770ae6cc
RK
4435 HOST_WIDE_INT bitsize;
4436 HOST_WIDE_INT bitpos = 0;
770ae6cc 4437 tree offset;
b50d17a1 4438 rtx to_rtx = target;
bbf6f052 4439
f32fd778
RS
4440 /* Just ignore missing fields.
4441 We cleared the whole structure, above,
4442 if any fields are missing. */
4443 if (field == 0)
4444 continue;
4445
6de9cd9a 4446 if (cleared && initializer_zerop (value))
e1a43f73 4447 continue;
9de08200 4448
770ae6cc
RK
4449 if (host_integerp (DECL_SIZE (field), 1))
4450 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4451 else
4452 bitsize = -1;
4453
bbf6f052
RK
4454 mode = DECL_MODE (field);
4455 if (DECL_BIT_FIELD (field))
4456 mode = VOIDmode;
4457
770ae6cc
RK
4458 offset = DECL_FIELD_OFFSET (field);
4459 if (host_integerp (offset, 0)
4460 && host_integerp (bit_position (field), 0))
4461 {
4462 bitpos = int_bit_position (field);
4463 offset = 0;
4464 }
b50d17a1 4465 else
770ae6cc 4466 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4467
b50d17a1
RK
4468 if (offset)
4469 {
4470 rtx offset_rtx;
4471
6fce44af
RK
4472 offset
4473 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4474 make_tree (TREE_TYPE (exp),
4475 target));
bbf6f052 4476
b50d17a1 4477 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3c0cb5de 4478 if (!MEM_P (to_rtx))
b50d17a1
RK
4479 abort ();
4480
bd070e1a 4481#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4482 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4483 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4484#else
4485 if (GET_MODE (offset_rtx) != ptr_mode)
4486 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4487#endif
bd070e1a 4488
0d4903b8
RK
4489 to_rtx = offset_address (to_rtx, offset_rtx,
4490 highest_pow2_factor (offset));
b50d17a1 4491 }
c5c76735 4492
4e44c1ef 4493 if (TREE_READONLY (field))
cf04eb80 4494 {
3c0cb5de 4495 if (MEM_P (to_rtx))
effbcc6a
RK
4496 to_rtx = copy_rtx (to_rtx);
4497
cf04eb80
RK
4498 RTX_UNCHANGING_P (to_rtx) = 1;
4499 }
4500
34c73909
R
4501#ifdef WORD_REGISTER_OPERATIONS
4502 /* If this initializes a field that is smaller than a word, at the
4503 start of a word, try to widen it to a full word.
4504 This special case allows us to output C++ member function
4505 initializations in a form that the optimizers can understand. */
f8cfc6aa 4506 if (REG_P (target)
34c73909
R
4507 && bitsize < BITS_PER_WORD
4508 && bitpos % BITS_PER_WORD == 0
4509 && GET_MODE_CLASS (mode) == MODE_INT
4510 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4511 && exp_size >= 0
4512 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4513 {
4514 tree type = TREE_TYPE (value);
04050c69 4515
34c73909
R
4516 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4517 {
ae2bcd98 4518 type = lang_hooks.types.type_for_size
8df83eae 4519 (BITS_PER_WORD, TYPE_UNSIGNED (type));
34c73909
R
4520 value = convert (type, value);
4521 }
04050c69 4522
34c73909
R
4523 if (BYTES_BIG_ENDIAN)
4524 value
3244e67d
RS
4525 = fold (build2 (LSHIFT_EXPR, type, value,
4526 build_int_2 (BITS_PER_WORD - bitsize, 0)));
34c73909
R
4527 bitsize = BITS_PER_WORD;
4528 mode = word_mode;
4529 }
4530#endif
10b76d73 4531
3c0cb5de 4532 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
10b76d73
RK
4533 && DECL_NONADDRESSABLE_P (field))
4534 {
4535 to_rtx = copy_rtx (to_rtx);
4536 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4537 }
4538
c5c76735 4539 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4540 value, type, cleared,
10b76d73 4541 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4542 }
4543 }
3a021db2
PB
4544
4545 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052 4546 {
b3694847
SS
4547 tree elt;
4548 int i;
e1a43f73 4549 int need_to_clear;
5c5214a9 4550 tree domain;
4af3895e 4551 tree elttype = TREE_TYPE (type);
e6834654 4552 int const_bounds_p;
ae0ed63a
JM
4553 HOST_WIDE_INT minelt = 0;
4554 HOST_WIDE_INT maxelt = 0;
e6834654 4555
3a021db2 4556 domain = TYPE_DOMAIN (type);
e6834654
SS
4557 const_bounds_p = (TYPE_MIN_VALUE (domain)
4558 && TYPE_MAX_VALUE (domain)
4559 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4560 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4561
85f3d674
RK
4562 /* If we have constant bounds for the range of the type, get them. */
4563 if (const_bounds_p)
4564 {
4565 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4566 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4567 }
bbf6f052 4568
e1a43f73 4569 /* If the constructor has fewer elements than the array,
38e01259 4570 clear the whole array first. Similarly if this is
e1a43f73 4571 static constructor of a non-BLKmode object. */
3a021db2
PB
4572 if (cleared)
4573 need_to_clear = 0;
4574 else if (REG_P (target) && TREE_STATIC (exp))
e1a43f73
PB
4575 need_to_clear = 1;
4576 else
4577 {
4578 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4579 need_to_clear = ! const_bounds_p;
4580
e1a43f73
PB
4581 /* This loop is a more accurate version of the loop in
4582 mostly_zeros_p (it handles RANGE_EXPR in an index).
4583 It is also needed to check for missing elements. */
4584 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4585 elt != NULL_TREE && ! need_to_clear;
df0faff1 4586 elt = TREE_CHAIN (elt))
e1a43f73
PB
4587 {
4588 tree index = TREE_PURPOSE (elt);
4589 HOST_WIDE_INT this_node_count;
19caa751 4590
e1a43f73
PB
4591 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4592 {
4593 tree lo_index = TREE_OPERAND (index, 0);
4594 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4595
19caa751
RK
4596 if (! host_integerp (lo_index, 1)
4597 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4598 {
4599 need_to_clear = 1;
4600 break;
4601 }
19caa751
RK
4602
4603 this_node_count = (tree_low_cst (hi_index, 1)
4604 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4605 }
4606 else
4607 this_node_count = 1;
85f3d674 4608
e1a43f73
PB
4609 count += this_node_count;
4610 if (mostly_zeros_p (TREE_VALUE (elt)))
4611 zero_count += this_node_count;
4612 }
85f3d674 4613
8e958f70 4614 /* Clear the entire array first if there are any missing elements,
0f41302f 4615 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4616 if (! need_to_clear
4617 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4618 need_to_clear = 1;
4619 }
85f3d674 4620
3a021db2 4621 if (need_to_clear && size > 0)
9de08200 4622 {
3a021db2
PB
4623 if (REG_P (target))
4624 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4625 else
4626 clear_storage (target, GEN_INT (size));
dbb5c281 4627 cleared = 1;
9de08200 4628 }
3a021db2
PB
4629
4630 if (!cleared && REG_P (target))
dbb5c281
RK
4631 /* Inform later passes that the old value is dead. */
4632 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4633
4634 /* Store each element of the constructor into
4635 the corresponding element of TARGET, determined
4636 by counting the elements. */
4637 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4638 elt;
4639 elt = TREE_CHAIN (elt), i++)
4640 {
b3694847 4641 enum machine_mode mode;
19caa751
RK
4642 HOST_WIDE_INT bitsize;
4643 HOST_WIDE_INT bitpos;
bbf6f052 4644 int unsignedp;
e1a43f73 4645 tree value = TREE_VALUE (elt);
03dc44a6
RS
4646 tree index = TREE_PURPOSE (elt);
4647 rtx xtarget = target;
bbf6f052 4648
6de9cd9a 4649 if (cleared && initializer_zerop (value))
e1a43f73 4650 continue;
9de08200 4651
8df83eae 4652 unsignedp = TYPE_UNSIGNED (elttype);
14a774a9
RK
4653 mode = TYPE_MODE (elttype);
4654 if (mode == BLKmode)
19caa751
RK
4655 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4656 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4657 : -1);
14a774a9
RK
4658 else
4659 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4660
e1a43f73
PB
4661 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4662 {
4663 tree lo_index = TREE_OPERAND (index, 0);
4664 tree hi_index = TREE_OPERAND (index, 1);
6af8eb57 4665 rtx index_r, pos_rtx;
05c0b405
PB
4666 HOST_WIDE_INT lo, hi, count;
4667 tree position;
e1a43f73 4668
0f41302f 4669 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4670 if (const_bounds_p
4671 && host_integerp (lo_index, 0)
19caa751
RK
4672 && host_integerp (hi_index, 0)
4673 && (lo = tree_low_cst (lo_index, 0),
4674 hi = tree_low_cst (hi_index, 0),
05c0b405 4675 count = hi - lo + 1,
3c0cb5de 4676 (!MEM_P (target)
05c0b405 4677 || count <= 2
19caa751
RK
4678 || (host_integerp (TYPE_SIZE (elttype), 1)
4679 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4680 <= 40 * 8)))))
e1a43f73 4681 {
05c0b405
PB
4682 lo -= minelt; hi -= minelt;
4683 for (; lo <= hi; lo++)
e1a43f73 4684 {
19caa751 4685 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73 4686
3c0cb5de 4687 if (MEM_P (target)
10b76d73 4688 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4689 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4690 && TYPE_NONALIASED_COMPONENT (type))
4691 {
4692 target = copy_rtx (target);
4693 MEM_KEEP_ALIAS_SET_P (target) = 1;
4694 }
4695
23cb1766 4696 store_constructor_field
04050c69
RK
4697 (target, bitsize, bitpos, mode, value, type, cleared,
4698 get_alias_set (elttype));
e1a43f73
PB
4699 }
4700 }
4701 else
4702 {
6af8eb57
SB
4703 rtx loop_start = gen_label_rtx ();
4704 rtx loop_end = gen_label_rtx ();
4705 tree exit_cond;
e1a43f73 4706
6af8eb57 4707 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
8df83eae 4708 unsignedp = TYPE_UNSIGNED (domain);
e1a43f73
PB
4709
4710 index = build_decl (VAR_DECL, NULL_TREE, domain);
4711
19e7881c 4712 index_r
e1a43f73
PB
4713 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4714 &unsignedp, 0));
19e7881c 4715 SET_DECL_RTL (index, index_r);
e1a43f73 4716 store_expr (lo_index, index_r, 0);
6af8eb57
SB
4717
4718 /* Build the head of the loop. */
4719 do_pending_stack_adjust ();
6af8eb57 4720 emit_label (loop_start);
e1a43f73 4721
0f41302f 4722 /* Assign value to element index. */
fed3cef0
RK
4723 position
4724 = convert (ssizetype,
3244e67d
RS
4725 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4726 index, TYPE_MIN_VALUE (domain))));
fed3cef0
RK
4727 position = size_binop (MULT_EXPR, position,
4728 convert (ssizetype,
4729 TYPE_SIZE_UNIT (elttype)));
4730
e1a43f73 4731 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
4732 xtarget = offset_address (target, pos_rtx,
4733 highest_pow2_factor (position));
4734 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4735 if (TREE_CODE (value) == CONSTRUCTOR)
dbb5c281
RK
4736 store_constructor (value, xtarget, cleared,
4737 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4738 else
4739 store_expr (value, xtarget, 0);
4740
6af8eb57 4741 /* Generate a conditional jump to exit the loop. */
3244e67d
RS
4742 exit_cond = build2 (LT_EXPR, integer_type_node,
4743 index, hi_index);
6af8eb57 4744 jumpif (exit_cond, loop_end);
e1a43f73 4745
6af8eb57
SB
4746 /* Update the loop counter, and jump to the head of
4747 the loop. */
ad76cef8
PB
4748 expand_assignment (index,
4749 build2 (PLUS_EXPR, TREE_TYPE (index),
4750 index, integer_one_node), 0);
4751
6af8eb57
SB
4752 emit_jump (loop_start);
4753
4754 /* Build the end of the loop. */
e1a43f73 4755 emit_label (loop_end);
e1a43f73
PB
4756 }
4757 }
19caa751
RK
4758 else if ((index != 0 && ! host_integerp (index, 0))
4759 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4760 {
03dc44a6
RS
4761 tree position;
4762
5b6c44ff 4763 if (index == 0)
fed3cef0 4764 index = ssize_int (1);
5b6c44ff 4765
e1a43f73 4766 if (minelt)
3a021db2 4767 index = fold_convert (ssizetype,
3244e67d
RS
4768 fold (build2 (MINUS_EXPR,
4769 TREE_TYPE (index),
4770 index,
4771 TYPE_MIN_VALUE (domain))));
19caa751 4772
fed3cef0
RK
4773 position = size_binop (MULT_EXPR, index,
4774 convert (ssizetype,
4775 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
4776 xtarget = offset_address (target,
4777 expand_expr (position, 0, VOIDmode, 0),
4778 highest_pow2_factor (position));
4779 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4780 store_expr (value, xtarget, 0);
03dc44a6
RS
4781 }
4782 else
4783 {
4784 if (index != 0)
19caa751
RK
4785 bitpos = ((tree_low_cst (index, 0) - minelt)
4786 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4787 else
19caa751
RK
4788 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4789
3c0cb5de 4790 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4791 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4792 && TYPE_NONALIASED_COMPONENT (type))
4793 {
4794 target = copy_rtx (target);
4795 MEM_KEEP_ALIAS_SET_P (target) = 1;
4796 }
9b9bd3b2
JH
4797 store_constructor_field (target, bitsize, bitpos, mode, value,
4798 type, cleared, get_alias_set (elttype));
03dc44a6 4799 }
bbf6f052 4800 }
3a021db2
PB
4801 }
4802
4803 else if (TREE_CODE (type) == VECTOR_TYPE)
4804 {
4805 tree elt;
4806 int i;
4807 int need_to_clear;
4808 int icode = 0;
4809 tree elttype = TREE_TYPE (type);
4810 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4811 enum machine_mode eltmode = TYPE_MODE (elttype);
4812 HOST_WIDE_INT bitsize;
4813 HOST_WIDE_INT bitpos;
4814 rtx *vector = NULL;
4815 unsigned n_elts;
4816
4817 if (eltmode == BLKmode)
4818 abort ();
4819
4820 n_elts = TYPE_VECTOR_SUBPARTS (type);
4821 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4822 {
4823 enum machine_mode mode = GET_MODE (target);
caf93cb0 4824
3a021db2
PB
4825 icode = (int) vec_init_optab->handlers[mode].insn_code;
4826 if (icode != CODE_FOR_nothing)
4827 {
4828 unsigned int i;
caf93cb0 4829
3a021db2
PB
4830 vector = alloca (n_elts);
4831 for (i = 0; i < n_elts; i++)
4832 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4833 }
4834 }
4835
4836 /* If the constructor has fewer elements than the vector,
4837 clear the whole array first. Similarly if this is
4838 static constructor of a non-BLKmode object. */
4839 if (cleared)
4840 need_to_clear = 0;
4841 else if (REG_P (target) && TREE_STATIC (exp))
4842 need_to_clear = 1;
4843 else
4844 {
4845 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4846
4847 for (elt = CONSTRUCTOR_ELTS (exp);
4848 elt != NULL_TREE;
4849 elt = TREE_CHAIN (elt))
4850 {
4851 int n_elts_here =
4852 tree_low_cst (
4853 int_const_binop (TRUNC_DIV_EXPR,
4854 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4855 TYPE_SIZE (elttype), 0), 1);
4856
4857 count += n_elts_here;
4858 if (mostly_zeros_p (TREE_VALUE (elt)))
4859 zero_count += n_elts_here;
4860 }
4861
4862 /* Clear the entire vector first if there are any missing elements,
4863 or if the incidence of zero elements is >= 75%. */
4864 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4865 }
4866
4867 if (need_to_clear && size > 0 && !vector)
997404de 4868 {
3a021db2
PB
4869 if (REG_P (target))
4870 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4871 else
4872 clear_storage (target, GEN_INT (size));
4873 cleared = 1;
997404de 4874 }
3a021db2
PB
4875
4876 if (!cleared && REG_P (target))
4877 /* Inform later passes that the old value is dead. */
4878 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4879
4880 /* Store each element of the constructor into the corresponding
4881 element of TARGET, determined by counting the elements. */
4882 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4883 elt;
4884 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4885 {
4886 tree value = TREE_VALUE (elt);
4887 tree index = TREE_PURPOSE (elt);
4888 HOST_WIDE_INT eltpos;
4889
4890 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4891 if (cleared && initializer_zerop (value))
4892 continue;
4893
4894 if (index != 0)
4895 eltpos = tree_low_cst (index, 1);
4896 else
4897 eltpos = i;
4898
4899 if (vector)
4900 {
4901 /* Vector CONSTRUCTORs should only be built from smaller
4902 vectors in the case of BLKmode vectors. */
4903 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4904 abort ();
4905 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4906 }
4907 else
4908 {
4909 enum machine_mode value_mode =
4910 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4911 ? TYPE_MODE (TREE_TYPE (value))
4912 : eltmode;
4913 bitpos = eltpos * elt_size;
4914 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4915 type, cleared, get_alias_set (elttype));
4916 }
4917 }
4918
4919 if (vector)
4920 emit_insn (GEN_FCN (icode) (target,
4921 gen_rtx_PARALLEL (GET_MODE (target),
4922 gen_rtvec_v (n_elts, vector))));
bbf6f052 4923 }
19caa751 4924
3a94c984 4925 /* Set constructor assignments. */
071a6595
PB
4926 else if (TREE_CODE (type) == SET_TYPE)
4927 {
e1a43f73 4928 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4929 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4930 tree domain = TYPE_DOMAIN (type);
4931 tree domain_min, domain_max, bitlength;
4932
9faa82d8 4933 /* The default implementation strategy is to extract the constant
071a6595
PB
4934 parts of the constructor, use that to initialize the target,
4935 and then "or" in whatever non-constant ranges we need in addition.
4936
4937 If a large set is all zero or all ones, it is
8f99553f 4938 probably better to set it using memset.
071a6595
PB
4939 Also, if a large set has just a single range, it may also be
4940 better to first clear all the first clear the set (using
8f99553f 4941 memset), and set the bits we want. */
3a94c984 4942
0f41302f 4943 /* Check for all zeros. */
9376fcd6 4944 if (elt == NULL_TREE && size > 0)
071a6595 4945 {
dbb5c281
RK
4946 if (!cleared)
4947 clear_storage (target, GEN_INT (size));
071a6595
PB
4948 return;
4949 }
4950
071a6595
PB
4951 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4952 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4953 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4954 size_diffop (domain_max, domain_min),
4955 ssize_int (1));
071a6595 4956
19caa751 4957 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4958
4959 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4960 are "complicated" (more than one range), initialize (the
3a94c984 4961 constant parts) by copying from a constant. */
e1a43f73
PB
4962 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4963 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4964 {
19caa751 4965 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4966 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 4967 char *bit_buffer = alloca (nbits);
b4ee5a72 4968 HOST_WIDE_INT word = 0;
19caa751
RK
4969 unsigned int bit_pos = 0;
4970 unsigned int ibit = 0;
4971 unsigned int offset = 0; /* In bytes from beginning of set. */
4972
e1a43f73 4973 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4974 for (;;)
071a6595 4975 {
b4ee5a72
PB
4976 if (bit_buffer[ibit])
4977 {
b09f3348 4978 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4979 word |= (1 << (set_word_size - 1 - bit_pos));
4980 else
4981 word |= 1 << bit_pos;
4982 }
19caa751 4983
b4ee5a72
PB
4984 bit_pos++; ibit++;
4985 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4986 {
dbb5c281 4987 if (word != 0 || ! cleared)
e1a43f73 4988 {
053ee101 4989 rtx datum = gen_int_mode (word, mode);
e1a43f73 4990 rtx to_rtx;
19caa751 4991
0f41302f
MS
4992 /* The assumption here is that it is safe to use
4993 XEXP if the set is multi-word, but not if
4994 it's single-word. */
3c0cb5de 4995 if (MEM_P (target))
f4ef873c 4996 to_rtx = adjust_address (target, mode, offset);
3a94c984 4997 else if (offset == 0)
e1a43f73
PB
4998 to_rtx = target;
4999 else
5000 abort ();
5001 emit_move_insn (to_rtx, datum);
5002 }
19caa751 5003
b4ee5a72
PB
5004 if (ibit == nbits)
5005 break;
5006 word = 0;
5007 bit_pos = 0;
5008 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5009 }
5010 }
071a6595 5011 }
dbb5c281 5012 else if (!cleared)
19caa751
RK
5013 /* Don't bother clearing storage if the set is all ones. */
5014 if (TREE_CHAIN (elt) != NULL_TREE
5015 || (TREE_PURPOSE (elt) == NULL_TREE
5016 ? nbits != 1
5017 : ( ! host_integerp (TREE_VALUE (elt), 0)
5018 || ! host_integerp (TREE_PURPOSE (elt), 0)
5019 || (tree_low_cst (TREE_VALUE (elt), 0)
5020 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5021 != (HOST_WIDE_INT) nbits))))
dbb5c281 5022 clear_storage (target, expr_size (exp));
3a94c984 5023
e1a43f73 5024 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5025 {
3a94c984 5026 /* Start of range of element or NULL. */
071a6595 5027 tree startbit = TREE_PURPOSE (elt);
3a94c984 5028 /* End of range of element, or element value. */
071a6595
PB
5029 tree endbit = TREE_VALUE (elt);
5030 HOST_WIDE_INT startb, endb;
19caa751 5031 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5032
5033 bitlength_rtx = expand_expr (bitlength,
19caa751 5034 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5035
3a94c984 5036 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5037 if (startbit == NULL_TREE)
5038 {
5039 startbit = save_expr (endbit);
5040 endbit = startbit;
5041 }
19caa751 5042
071a6595
PB
5043 startbit = convert (sizetype, startbit);
5044 endbit = convert (sizetype, endbit);
5045 if (! integer_zerop (domain_min))
5046 {
5047 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5048 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5049 }
3a94c984 5050 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5051 EXPAND_CONST_ADDRESS);
3a94c984 5052 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5053 EXPAND_CONST_ADDRESS);
5054
5055 if (REG_P (target))
5056 {
1da68f56
RK
5057 targetx
5058 = assign_temp
ae2bcd98 5059 ((build_qualified_type (lang_hooks.types.type_for_mode
b0c48229 5060 (GET_MODE (target), 0),
1da68f56
RK
5061 TYPE_QUAL_CONST)),
5062 0, 1, 1);
071a6595
PB
5063 emit_move_insn (targetx, target);
5064 }
19caa751 5065
3c0cb5de 5066 else if (MEM_P (target))
071a6595
PB
5067 targetx = target;
5068 else
5069 abort ();
5070
4ca79136
RH
5071 /* Optimization: If startbit and endbit are constants divisible
5072 by BITS_PER_UNIT, call memset instead. */
8f99553f 5073 if (TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5074 && TREE_CODE (endbit) == INTEGER_CST
5075 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5076 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5077 {
ebb1b59a 5078 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5079 VOIDmode, 3,
e1a43f73
PB
5080 plus_constant (XEXP (targetx, 0),
5081 startb / BITS_PER_UNIT),
071a6595 5082 Pmode,
3b6f75e2 5083 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5084 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5085 TYPE_MODE (sizetype));
071a6595
PB
5086 }
5087 else
68d28100
RH
5088 emit_library_call (setbits_libfunc, LCT_NORMAL,
5089 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5090 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5091 startbit_rtx, TYPE_MODE (sizetype),
5092 endbit_rtx, TYPE_MODE (sizetype));
5093
071a6595
PB
5094 if (REG_P (target))
5095 emit_move_insn (target, targetx);
5096 }
5097 }
bbf6f052
RK
5098
5099 else
5100 abort ();
5101}
5102
5103/* Store the value of EXP (an expression tree)
5104 into a subfield of TARGET which has mode MODE and occupies
5105 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5106 If MODE is VOIDmode, it means that we are storing into a bit-field.
5107
5108 If VALUE_MODE is VOIDmode, return nothing in particular.
5109 UNSIGNEDP is not used in this case.
5110
5111 Otherwise, return an rtx for the value stored. This rtx
5112 has mode VALUE_MODE if that is convenient to do.
5113 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5114
a06ef755 5115 TYPE is the type of the underlying object,
ece32014
MM
5116
5117 ALIAS_SET is the alias set for the destination. This value will
5118 (in general) be different from that for TARGET, since TARGET is a
5119 reference to the containing structure. */
bbf6f052
RK
5120
5121static rtx
502b8322
AJ
5122store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5123 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5124 int unsignedp, tree type, int alias_set)
bbf6f052 5125{
906c4e36 5126 HOST_WIDE_INT width_mask = 0;
bbf6f052 5127
e9a25f70
JL
5128 if (TREE_CODE (exp) == ERROR_MARK)
5129 return const0_rtx;
5130
2be6a7e9
RK
5131 /* If we have nothing to store, do nothing unless the expression has
5132 side-effects. */
5133 if (bitsize == 0)
5134 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5135 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5136 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5137
5138 /* If we are storing into an unaligned field of an aligned union that is
5139 in a register, we may have the mode of TARGET being an integer mode but
5140 MODE == BLKmode. In that case, get an aligned object whose size and
5141 alignment are the same as TARGET and store TARGET into it (we can avoid
5142 the store if the field being stored is the entire width of TARGET). Then
5143 call ourselves recursively to store the field into a BLKmode version of
5144 that object. Finally, load from the object into TARGET. This is not
5145 very efficient in general, but should only be slightly more expensive
5146 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5147 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5148 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5149
5150 if (mode == BLKmode
f8cfc6aa 5151 && (REG_P (target) || GET_CODE (target) == SUBREG))
bbf6f052 5152 {
85a43a2f 5153 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5154 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5155
8752c357 5156 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5157 emit_move_insn (object, target);
5158
a06ef755
RK
5159 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5160 alias_set);
bbf6f052
RK
5161
5162 emit_move_insn (target, object);
5163
a06ef755 5164 /* We want to return the BLKmode version of the data. */
46093b97 5165 return blk_object;
bbf6f052 5166 }
c3b247b4
JM
5167
5168 if (GET_CODE (target) == CONCAT)
5169 {
5170 /* We're storing into a struct containing a single __complex. */
5171
5172 if (bitpos != 0)
5173 abort ();
6de9cd9a 5174 return store_expr (exp, target, value_mode != VOIDmode);
c3b247b4 5175 }
bbf6f052
RK
5176
5177 /* If the structure is in a register or if the component
5178 is a bit field, we cannot use addressing to access it.
5179 Use bit-field techniques or SUBREG to store in it. */
5180
4fa52007 5181 if (mode == VOIDmode
6ab06cbb
JW
5182 || (mode != BLKmode && ! direct_store[(int) mode]
5183 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5184 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f8cfc6aa 5185 || REG_P (target)
c980ac49 5186 || GET_CODE (target) == SUBREG
ccc98036
RS
5187 /* If the field isn't aligned enough to store as an ordinary memref,
5188 store it as a bit field. */
15b19a7d 5189 || (mode != BLKmode
9e5f281f
OH
5190 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5191 || bitpos % GET_MODE_ALIGNMENT (mode))
5192 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5193 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5194 /* If the RHS and field are a constant size and the size of the
5195 RHS isn't the same size as the bitfield, we must use bitfield
5196 operations. */
05bccae2
RK
5197 || (bitsize >= 0
5198 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5199 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5200 {
906c4e36 5201 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5202
ef19912d
RK
5203 /* If BITSIZE is narrower than the size of the type of EXP
5204 we will be narrowing TEMP. Normally, what's wanted are the
5205 low-order bits. However, if EXP's type is a record and this is
5206 big-endian machine, we want the upper BITSIZE bits. */
5207 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5208 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5209 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5210 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5211 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5212 - bitsize),
c1853da7 5213 NULL_RTX, 1);
ef19912d 5214
bbd6cf73
RK
5215 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5216 MODE. */
5217 if (mode != VOIDmode && mode != BLKmode
5218 && mode != TYPE_MODE (TREE_TYPE (exp)))
5219 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5220
a281e72d
RK
5221 /* If the modes of TARGET and TEMP are both BLKmode, both
5222 must be in memory and BITPOS must be aligned on a byte
5223 boundary. If so, we simply do a block copy. */
5224 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5225 {
3c0cb5de 5226 if (!MEM_P (target) || !MEM_P (temp)
a281e72d
RK
5227 || bitpos % BITS_PER_UNIT != 0)
5228 abort ();
5229
f4ef873c 5230 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5231 emit_block_move (target, temp,
a06ef755 5232 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5233 / BITS_PER_UNIT),
5234 BLOCK_OP_NORMAL);
a281e72d
RK
5235
5236 return value_mode == VOIDmode ? const0_rtx : target;
5237 }
5238
bbf6f052 5239 /* Store the value in the bitfield. */
b3520980 5240 store_bit_field (target, bitsize, bitpos, mode, temp);
a06ef755 5241
bbf6f052
RK
5242 if (value_mode != VOIDmode)
5243 {
04050c69
RK
5244 /* The caller wants an rtx for the value.
5245 If possible, avoid refetching from the bitfield itself. */
bbf6f052 5246 if (width_mask != 0
3c0cb5de 5247 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5c4d7cfb 5248 {
9074de27 5249 tree count;
5c4d7cfb 5250 enum machine_mode tmode;
86a2c12a 5251
5c4d7cfb 5252 tmode = GET_MODE (temp);
86a2c12a
RS
5253 if (tmode == VOIDmode)
5254 tmode = value_mode;
22273300
JJ
5255
5256 if (unsignedp)
5257 return expand_and (tmode, temp,
2496c7bd 5258 gen_int_mode (width_mask, tmode),
22273300
JJ
5259 NULL_RTX);
5260
5c4d7cfb
RS
5261 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5262 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5263 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5264 }
04050c69 5265
bbf6f052 5266 return extract_bit_field (target, bitsize, bitpos, unsignedp,
b3520980 5267 NULL_RTX, value_mode, VOIDmode);
bbf6f052
RK
5268 }
5269 return const0_rtx;
5270 }
5271 else
5272 {
5273 rtx addr = XEXP (target, 0);
a06ef755 5274 rtx to_rtx = target;
bbf6f052
RK
5275
5276 /* If a value is wanted, it must be the lhs;
5277 so make the address stable for multiple use. */
5278
f8cfc6aa 5279 if (value_mode != VOIDmode && !REG_P (addr)
bbf6f052
RK
5280 && ! CONSTANT_ADDRESS_P (addr)
5281 /* A frame-pointer reference is already stable. */
5282 && ! (GET_CODE (addr) == PLUS
5283 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5284 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5285 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5286 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5287
5288 /* Now build a reference to just the desired component. */
5289
a06ef755
RK
5290 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5291
5292 if (to_rtx == target)
5293 to_rtx = copy_rtx (to_rtx);
792760b9 5294
c6df88cb 5295 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5296 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5297 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5298
5299 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5300 }
5301}
5302\f
5303/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5304 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5305 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5306
5307 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5308 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5309 If the position of the field is variable, we store a tree
5310 giving the variable offset (in units) in *POFFSET.
5311 This offset is in addition to the bit position.
5312 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5313
5314 If any of the extraction expressions is volatile,
5315 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5316
5317 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5318 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5319 is redundant.
5320
5321 If the field describes a variable-sized object, *PMODE is set to
5322 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5323 this case, but the address of the object can be found. */
bbf6f052
RK
5324
5325tree
502b8322
AJ
5326get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5327 HOST_WIDE_INT *pbitpos, tree *poffset,
5328 enum machine_mode *pmode, int *punsignedp,
5329 int *pvolatilep)
bbf6f052
RK
5330{
5331 tree size_tree = 0;
5332 enum machine_mode mode = VOIDmode;
fed3cef0 5333 tree offset = size_zero_node;
770ae6cc 5334 tree bit_offset = bitsize_zero_node;
770ae6cc 5335 tree tem;
bbf6f052 5336
770ae6cc
RK
5337 /* First get the mode, signedness, and size. We do this from just the
5338 outermost expression. */
bbf6f052
RK
5339 if (TREE_CODE (exp) == COMPONENT_REF)
5340 {
5341 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5342 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5343 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5344
a150de29 5345 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5346 }
5347 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5348 {
5349 size_tree = TREE_OPERAND (exp, 1);
a150de29 5350 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5351 }
5352 else
5353 {
5354 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5355 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5356
ab87f8c8
JL
5357 if (mode == BLKmode)
5358 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5359 else
5360 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5361 }
3a94c984 5362
770ae6cc 5363 if (size_tree != 0)
bbf6f052 5364 {
770ae6cc 5365 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5366 mode = BLKmode, *pbitsize = -1;
5367 else
770ae6cc 5368 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5369 }
5370
5371 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5372 and find the ultimate containing object. */
bbf6f052
RK
5373 while (1)
5374 {
770ae6cc
RK
5375 if (TREE_CODE (exp) == BIT_FIELD_REF)
5376 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5377 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5378 {
770ae6cc 5379 tree field = TREE_OPERAND (exp, 1);
44de5aeb 5380 tree this_offset = component_ref_field_offset (exp);
bbf6f052 5381
e7f3c83f
RK
5382 /* If this field hasn't been filled in yet, don't go
5383 past it. This should only happen when folding expressions
5384 made during type construction. */
770ae6cc 5385 if (this_offset == 0)
e7f3c83f
RK
5386 break;
5387
7156dead 5388 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5389 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5390 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5391
a06ef755 5392 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5393 }
7156dead 5394
b4e3fabb
RK
5395 else if (TREE_CODE (exp) == ARRAY_REF
5396 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5397 {
742920c7 5398 tree index = TREE_OPERAND (exp, 1);
44de5aeb
RK
5399 tree low_bound = array_ref_low_bound (exp);
5400 tree unit_size = array_ref_element_size (exp);
742920c7 5401
770ae6cc
RK
5402 /* We assume all arrays have sizes that are a multiple of a byte.
5403 First subtract the lower bound, if any, in the type of the
5404 index, then convert to sizetype and multiply by the size of the
5405 array element. */
44de5aeb 5406 if (! integer_zerop (low_bound))
3244e67d
RS
5407 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5408 index, low_bound));
f8dac6eb 5409
770ae6cc
RK
5410 offset = size_binop (PLUS_EXPR, offset,
5411 size_binop (MULT_EXPR,
5412 convert (sizetype, index),
7156dead 5413 unit_size));
bbf6f052 5414 }
7156dead 5415
c1853da7
RK
5416 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5417 conversions that don't change the mode, and all view conversions
5418 except those that need to "step up" the alignment. */
bbf6f052 5419 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5420 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5421 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5422 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5423 && STRICT_ALIGNMENT
5424 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5425 < BIGGEST_ALIGNMENT)
5426 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5427 || TYPE_ALIGN_OK (TREE_TYPE
5428 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5429 && ! ((TREE_CODE (exp) == NOP_EXPR
5430 || TREE_CODE (exp) == CONVERT_EXPR)
5431 && (TYPE_MODE (TREE_TYPE (exp))
5432 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5433 break;
7bb0943f
RS
5434
5435 /* If any reference in the chain is volatile, the effect is volatile. */
5436 if (TREE_THIS_VOLATILE (exp))
5437 *pvolatilep = 1;
839c4796 5438
bbf6f052
RK
5439 exp = TREE_OPERAND (exp, 0);
5440 }
5441
770ae6cc
RK
5442 /* If OFFSET is constant, see if we can return the whole thing as a
5443 constant bit position. Otherwise, split it up. */
5444 if (host_integerp (offset, 0)
5445 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5446 bitsize_unit_node))
5447 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5448 && host_integerp (tem, 0))
5449 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5450 else
5451 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5452
bbf6f052 5453 *pmode = mode;
bbf6f052
RK
5454 return exp;
5455}
921b3427 5456
44de5aeb
RK
5457/* Return a tree of sizetype representing the size, in bytes, of the element
5458 of EXP, an ARRAY_REF. */
5459
5460tree
5461array_ref_element_size (tree exp)
5462{
5463 tree aligned_size = TREE_OPERAND (exp, 3);
5464 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5465
5466 /* If a size was specified in the ARRAY_REF, it's the size measured
5467 in alignment units of the element type. So multiply by that value. */
5468 if (aligned_size)
5469 return size_binop (MULT_EXPR, aligned_size,
5470 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5471
caf93cb0 5472 /* Otherwise, take the size from that of the element type. Substitute
44de5aeb
RK
5473 any PLACEHOLDER_EXPR that we have. */
5474 else
5475 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5476}
5477
5478/* Return a tree representing the lower bound of the array mentioned in
5479 EXP, an ARRAY_REF. */
5480
5481tree
5482array_ref_low_bound (tree exp)
5483{
5484 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5485
5486 /* If a lower bound is specified in EXP, use it. */
5487 if (TREE_OPERAND (exp, 2))
5488 return TREE_OPERAND (exp, 2);
5489
5490 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5491 substituting for a PLACEHOLDER_EXPR as needed. */
5492 if (domain_type && TYPE_MIN_VALUE (domain_type))
5493 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5494
5495 /* Otherwise, return a zero of the appropriate type. */
5496 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5497}
5498
a7e5372d
ZD
5499/* Return a tree representing the upper bound of the array mentioned in
5500 EXP, an ARRAY_REF. */
5501
5502tree
5503array_ref_up_bound (tree exp)
5504{
5505 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5506
5507 /* If there is a domain type and it has an upper bound, use it, substituting
5508 for a PLACEHOLDER_EXPR as needed. */
5509 if (domain_type && TYPE_MAX_VALUE (domain_type))
5510 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5511
5512 /* Otherwise fail. */
5513 return NULL_TREE;
5514}
5515
44de5aeb
RK
5516/* Return a tree representing the offset, in bytes, of the field referenced
5517 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5518
5519tree
5520component_ref_field_offset (tree exp)
5521{
5522 tree aligned_offset = TREE_OPERAND (exp, 2);
5523 tree field = TREE_OPERAND (exp, 1);
5524
5525 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5526 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5527 value. */
5528 if (aligned_offset)
5529 return size_binop (MULT_EXPR, aligned_offset,
5530 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5531
caf93cb0 5532 /* Otherwise, take the offset from that of the field. Substitute
44de5aeb
RK
5533 any PLACEHOLDER_EXPR that we have. */
5534 else
5535 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5536}
5537
ed239f5a
RK
5538/* Return 1 if T is an expression that get_inner_reference handles. */
5539
5540int
502b8322 5541handled_component_p (tree t)
ed239f5a
RK
5542{
5543 switch (TREE_CODE (t))
5544 {
5545 case BIT_FIELD_REF:
5546 case COMPONENT_REF:
5547 case ARRAY_REF:
5548 case ARRAY_RANGE_REF:
5549 case NON_LVALUE_EXPR:
5550 case VIEW_CONVERT_EXPR:
5551 return 1;
5552
1a8c4ca6
EB
5553 /* ??? Sure they are handled, but get_inner_reference may return
5554 a different PBITSIZE, depending upon whether the expression is
5555 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5556 case NOP_EXPR:
5557 case CONVERT_EXPR:
5558 return (TYPE_MODE (TREE_TYPE (t))
5559 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5560
5561 default:
5562 return 0;
5563 }
5564}
bbf6f052 5565\f
3fe44edd
RK
5566/* Given an rtx VALUE that may contain additions and multiplications, return
5567 an equivalent value that just refers to a register, memory, or constant.
5568 This is done by generating instructions to perform the arithmetic and
5569 returning a pseudo-register containing the value.
c45a13a6
RK
5570
5571 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5572
5573rtx
502b8322 5574force_operand (rtx value, rtx target)
bbf6f052 5575{
8a28dbcc 5576 rtx op1, op2;
bbf6f052 5577 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5578 rtx subtarget = get_subtarget (target);
8a28dbcc 5579 enum rtx_code code = GET_CODE (value);
bbf6f052 5580
50654f6c
ZD
5581 /* Check for subreg applied to an expression produced by loop optimizer. */
5582 if (code == SUBREG
f8cfc6aa 5583 && !REG_P (SUBREG_REG (value))
3c0cb5de 5584 && !MEM_P (SUBREG_REG (value)))
50654f6c
ZD
5585 {
5586 value = simplify_gen_subreg (GET_MODE (value),
5587 force_reg (GET_MODE (SUBREG_REG (value)),
5588 force_operand (SUBREG_REG (value),
5589 NULL_RTX)),
5590 GET_MODE (SUBREG_REG (value)),
5591 SUBREG_BYTE (value));
5592 code = GET_CODE (value);
5593 }
5594
8b015896 5595 /* Check for a PIC address load. */
8a28dbcc 5596 if ((code == PLUS || code == MINUS)
8b015896
RH
5597 && XEXP (value, 0) == pic_offset_table_rtx
5598 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5599 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5600 || GET_CODE (XEXP (value, 1)) == CONST))
5601 {
5602 if (!subtarget)
5603 subtarget = gen_reg_rtx (GET_MODE (value));
5604 emit_move_insn (subtarget, value);
5605 return subtarget;
5606 }
5607
8a28dbcc 5608 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5609 {
8a28dbcc
JH
5610 if (!target)
5611 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5612 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5613 code == ZERO_EXTEND);
5614 return target;
bbf6f052
RK
5615 }
5616
ec8e098d 5617 if (ARITHMETIC_P (value))
bbf6f052
RK
5618 {
5619 op2 = XEXP (value, 1);
f8cfc6aa 5620 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
bbf6f052 5621 subtarget = 0;
8a28dbcc 5622 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5623 {
8a28dbcc 5624 code = PLUS;
bbf6f052
RK
5625 op2 = negate_rtx (GET_MODE (value), op2);
5626 }
5627
5628 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5629 operand a PLUS of a virtual register and something else. In that
5630 case, we want to emit the sum of the virtual register and the
5631 constant first and then add the other value. This allows virtual
5632 register instantiation to simply modify the constant rather than
5633 creating another one around this addition. */
5634 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052 5635 && GET_CODE (XEXP (value, 0)) == PLUS
f8cfc6aa 5636 && REG_P (XEXP (XEXP (value, 0), 0))
bbf6f052
RK
5637 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5638 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5639 {
8a28dbcc
JH
5640 rtx temp = expand_simple_binop (GET_MODE (value), code,
5641 XEXP (XEXP (value, 0), 0), op2,
5642 subtarget, 0, OPTAB_LIB_WIDEN);
5643 return expand_simple_binop (GET_MODE (value), code, temp,
5644 force_operand (XEXP (XEXP (value,
5645 0), 1), 0),
5646 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5647 }
3a94c984 5648
8a28dbcc
JH
5649 op1 = force_operand (XEXP (value, 0), subtarget);
5650 op2 = force_operand (op2, NULL_RTX);
5651 switch (code)
5652 {
5653 case MULT:
5654 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5655 case DIV:
5656 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5657 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5658 target, 1, OPTAB_LIB_WIDEN);
5659 else
5660 return expand_divmod (0,
5661 FLOAT_MODE_P (GET_MODE (value))
5662 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5663 GET_MODE (value), op1, op2, target, 0);
5664 break;
5665 case MOD:
5666 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5667 target, 0);
5668 break;
5669 case UDIV:
5670 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5671 target, 1);
5672 break;
5673 case UMOD:
5674 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5675 target, 1);
5676 break;
5677 case ASHIFTRT:
5678 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5679 target, 0, OPTAB_LIB_WIDEN);
5680 break;
5681 default:
5682 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5683 target, 1, OPTAB_LIB_WIDEN);
5684 }
5685 }
ec8e098d 5686 if (UNARY_P (value))
8a28dbcc
JH
5687 {
5688 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5689 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5690 }
34e81b5a
RK
5691
5692#ifdef INSN_SCHEDULING
5693 /* On machines that have insn scheduling, we want all memory reference to be
5694 explicit, so we need to deal with such paradoxical SUBREGs. */
3c0cb5de 5695 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
34e81b5a
RK
5696 && (GET_MODE_SIZE (GET_MODE (value))
5697 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5698 value
5699 = simplify_gen_subreg (GET_MODE (value),
5700 force_reg (GET_MODE (SUBREG_REG (value)),
5701 force_operand (SUBREG_REG (value),
5702 NULL_RTX)),
5703 GET_MODE (SUBREG_REG (value)),
5704 SUBREG_BYTE (value));
5705#endif
5706
bbf6f052
RK
5707 return value;
5708}
5709\f
bbf6f052 5710/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5711 EXP can reference X, which is being modified. TOP_P is nonzero if this
5712 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5713 for EXP, as opposed to a recursive call to this function.
5714
5715 It is always safe for this routine to return zero since it merely
5716 searches for optimization opportunities. */
bbf6f052 5717
8f17b5c5 5718int
502b8322 5719safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5720{
5721 rtx exp_rtl = 0;
5722 int i, nops;
5723
6676e72f
RK
5724 if (x == 0
5725 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5726 have no way of allocating temporaries of variable size
5727 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5728 So we assume here that something at a higher level has prevented a
f4510f37 5729 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5730 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5731 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5732 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5733 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5734 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5735 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5736 != INTEGER_CST)
1da68f56
RK
5737 && GET_MODE (x) == BLKmode)
5738 /* If X is in the outgoing argument area, it is always safe. */
3c0cb5de 5739 || (MEM_P (x)
1da68f56
RK
5740 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5741 || (GET_CODE (XEXP (x, 0)) == PLUS
5742 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5743 return 1;
5744
5745 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5746 find the underlying pseudo. */
5747 if (GET_CODE (x) == SUBREG)
5748 {
5749 x = SUBREG_REG (x);
f8cfc6aa 5750 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
5751 return 0;
5752 }
5753
1da68f56 5754 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5755 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5756 {
5757 case 'd':
a9772b60 5758 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5759 break;
5760
5761 case 'c':
5762 return 1;
5763
5764 case 'x':
5765 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5766 {
5767 while (1)
5768 {
5769 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5770 return 0;
5771 exp = TREE_CHAIN (exp);
5772 if (!exp)
5773 return 1;
5774 if (TREE_CODE (exp) != TREE_LIST)
5775 return safe_from_p (x, exp, 0);
5776 }
5777 }
ff439b5f
CB
5778 else if (TREE_CODE (exp) == ERROR_MARK)
5779 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5780 else
5781 return 0;
5782
350fae66
RK
5783 case 's':
5784 /* The only case we look at here is the DECL_INITIAL inside a
5785 DECL_EXPR. */
5786 return (TREE_CODE (exp) != DECL_EXPR
5787 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5788 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5789 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5790
bbf6f052
RK
5791 case '2':
5792 case '<':
f8d4be57
CE
5793 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5794 return 0;
5d3cc252 5795 /* Fall through. */
f8d4be57
CE
5796
5797 case '1':
5798 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5799
5800 case 'e':
5801 case 'r':
5802 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5803 the expression. If it is set, we conflict iff we are that rtx or
5804 both are in memory. Otherwise, we check all operands of the
5805 expression recursively. */
5806
5807 switch (TREE_CODE (exp))
5808 {
5809 case ADDR_EXPR:
70072ed9
RK
5810 /* If the operand is static or we are static, we can't conflict.
5811 Likewise if we don't conflict with the operand at all. */
5812 if (staticp (TREE_OPERAND (exp, 0))
5813 || TREE_STATIC (exp)
5814 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5815 return 1;
5816
5817 /* Otherwise, the only way this can conflict is if we are taking
5818 the address of a DECL a that address if part of X, which is
5819 very rare. */
5820 exp = TREE_OPERAND (exp, 0);
5821 if (DECL_P (exp))
5822 {
5823 if (!DECL_RTL_SET_P (exp)
3c0cb5de 5824 || !MEM_P (DECL_RTL (exp)))
70072ed9
RK
5825 return 0;
5826 else
5827 exp_rtl = XEXP (DECL_RTL (exp), 0);
5828 }
5829 break;
bbf6f052
RK
5830
5831 case INDIRECT_REF:
3c0cb5de 5832 if (MEM_P (x)
1da68f56
RK
5833 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5834 get_alias_set (exp)))
bbf6f052
RK
5835 return 0;
5836 break;
5837
5838 case CALL_EXPR:
f9808f81
MM
5839 /* Assume that the call will clobber all hard registers and
5840 all of memory. */
f8cfc6aa 5841 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
3c0cb5de 5842 || MEM_P (x))
f9808f81 5843 return 0;
bbf6f052
RK
5844 break;
5845
bbf6f052 5846 case WITH_CLEANUP_EXPR:
5dab5552 5847 case CLEANUP_POINT_EXPR:
ac45df5d
RH
5848 /* Lowered by gimplify.c. */
5849 abort ();
5850
bbf6f052 5851 case SAVE_EXPR:
82c82743 5852 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 5853
e9a25f70
JL
5854 default:
5855 break;
bbf6f052
RK
5856 }
5857
5858 /* If we have an rtx, we do not need to scan our operands. */
5859 if (exp_rtl)
5860 break;
5861
8f17b5c5 5862 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5863 for (i = 0; i < nops; i++)
5864 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5865 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5866 return 0;
8f17b5c5
MM
5867
5868 /* If this is a language-specific tree code, it may require
5869 special handling. */
dbbbbf3b
JDA
5870 if ((unsigned int) TREE_CODE (exp)
5871 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 5872 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 5873 return 0;
bbf6f052
RK
5874 }
5875
5876 /* If we have an rtl, find any enclosed object. Then see if we conflict
5877 with it. */
5878 if (exp_rtl)
5879 {
5880 if (GET_CODE (exp_rtl) == SUBREG)
5881 {
5882 exp_rtl = SUBREG_REG (exp_rtl);
f8cfc6aa 5883 if (REG_P (exp_rtl)
bbf6f052
RK
5884 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5885 return 0;
5886 }
5887
5888 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5889 are memory and they conflict. */
bbf6f052 5890 return ! (rtx_equal_p (x, exp_rtl)
3c0cb5de 5891 || (MEM_P (x) && MEM_P (exp_rtl)
21117a17 5892 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 5893 rtx_addr_varies_p)));
bbf6f052
RK
5894 }
5895
5896 /* If we reach here, it is safe. */
5897 return 1;
5898}
5899
14a774a9 5900\f
0d4903b8
RK
5901/* Return the highest power of two that EXP is known to be a multiple of.
5902 This is used in updating alignment of MEMs in array references. */
5903
9ceca302 5904static unsigned HOST_WIDE_INT
502b8322 5905highest_pow2_factor (tree exp)
0d4903b8 5906{
9ceca302 5907 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
5908
5909 switch (TREE_CODE (exp))
5910 {
5911 case INTEGER_CST:
e0f1be5c
JJ
5912 /* We can find the lowest bit that's a one. If the low
5913 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5914 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 5915 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 5916 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 5917 later ICE. */
e0f1be5c 5918 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 5919 return BIGGEST_ALIGNMENT;
e0f1be5c 5920 else
0d4903b8 5921 {
e0f1be5c
JJ
5922 /* Note: tree_low_cst is intentionally not used here,
5923 we don't care about the upper bits. */
5924 c0 = TREE_INT_CST_LOW (exp);
5925 c0 &= -c0;
5926 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
5927 }
5928 break;
5929
65a07688 5930 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
5931 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5932 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5933 return MIN (c0, c1);
5934
5935 case MULT_EXPR:
5936 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5937 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5938 return c0 * c1;
5939
5940 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5941 case CEIL_DIV_EXPR:
65a07688
RK
5942 if (integer_pow2p (TREE_OPERAND (exp, 1))
5943 && host_integerp (TREE_OPERAND (exp, 1), 1))
5944 {
5945 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5946 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5947 return MAX (1, c0 / c1);
5948 }
5949 break;
0d4903b8
RK
5950
5951 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 5952 case SAVE_EXPR:
0d4903b8
RK
5953 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5954
65a07688
RK
5955 case COMPOUND_EXPR:
5956 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5957
0d4903b8
RK
5958 case COND_EXPR:
5959 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5960 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5961 return MIN (c0, c1);
5962
5963 default:
5964 break;
5965 }
5966
5967 return 1;
5968}
818c0c94 5969
d50a16c4
EB
5970/* Similar, except that the alignment requirements of TARGET are
5971 taken into account. Assume it is at least as aligned as its
5972 type, unless it is a COMPONENT_REF in which case the layout of
5973 the structure gives the alignment. */
818c0c94 5974
9ceca302 5975static unsigned HOST_WIDE_INT
d50a16c4 5976highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 5977{
d50a16c4 5978 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
5979
5980 factor = highest_pow2_factor (exp);
d50a16c4
EB
5981 if (TREE_CODE (target) == COMPONENT_REF)
5982 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
5983 else
5984 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
5985 return MAX (factor, target_align);
818c0c94 5986}
0d4903b8 5987\f
6de9cd9a
DN
5988/* Expands variable VAR. */
5989
5990void
5991expand_var (tree var)
5992{
5993 if (DECL_EXTERNAL (var))
5994 return;
5995
5996 if (TREE_STATIC (var))
5997 /* If this is an inlined copy of a static local variable,
5998 look up the original decl. */
5999 var = DECL_ORIGIN (var);
6000
6001 if (TREE_STATIC (var)
6002 ? !TREE_ASM_WRITTEN (var)
6003 : !DECL_RTL_SET_P (var))
6004 {
6005 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6006 {
6007 /* Prepare a mem & address for the decl. */
6008 rtx x;
caf93cb0 6009
6de9cd9a
DN
6010 if (TREE_STATIC (var))
6011 abort ();
6012
6013 x = gen_rtx_MEM (DECL_MODE (var),
6014 gen_reg_rtx (Pmode));
6015
6016 set_mem_attributes (x, var, 1);
6017 SET_DECL_RTL (var, x);
6018 }
673fda6b 6019 else if (lang_hooks.expand_decl (var))
6de9cd9a
DN
6020 /* OK. */;
6021 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6022 expand_decl (var);
6023 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
0e6df31e 6024 rest_of_decl_compilation (var, 0, 0);
6de9cd9a
DN
6025 else if (TREE_CODE (var) == TYPE_DECL
6026 || TREE_CODE (var) == CONST_DECL
6027 || TREE_CODE (var) == FUNCTION_DECL
6028 || TREE_CODE (var) == LABEL_DECL)
6029 /* No expansion needed. */;
6030 else
6031 abort ();
6032 }
6033}
6034
eb698c58
RS
6035/* Subroutine of expand_expr. Expand the two operands of a binary
6036 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6037 The value may be stored in TARGET if TARGET is nonzero. The
6038 MODIFIER argument is as documented by expand_expr. */
6039
6040static void
6041expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6042 enum expand_modifier modifier)
6043{
6044 if (! safe_from_p (target, exp1, 1))
6045 target = 0;
6046 if (operand_equal_p (exp0, exp1, 0))
6047 {
6048 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6049 *op1 = copy_rtx (*op0);
6050 }
6051 else
6052 {
c67e6e14
RS
6053 /* If we need to preserve evaluation order, copy exp0 into its own
6054 temporary variable so that it can't be clobbered by exp1. */
6055 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6056 exp0 = save_expr (exp0);
eb698c58
RS
6057 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6058 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6059 }
6060}
6061
f47e9b4e 6062\f
bbf6f052
RK
6063/* expand_expr: generate code for computing expression EXP.
6064 An rtx for the computed value is returned. The value is never null.
6065 In the case of a void EXP, const0_rtx is returned.
6066
6067 The value may be stored in TARGET if TARGET is nonzero.
6068 TARGET is just a suggestion; callers must assume that
6069 the rtx returned may not be the same as TARGET.
6070
6071 If TARGET is CONST0_RTX, it means that the value will be ignored.
6072
6073 If TMODE is not VOIDmode, it suggests generating the
6074 result in mode TMODE. But this is done only when convenient.
6075 Otherwise, TMODE is ignored and the value generated in its natural mode.
6076 TMODE is just a suggestion; callers must assume that
6077 the rtx returned may not have mode TMODE.
6078
d6a5ac33
RK
6079 Note that TARGET may have neither TMODE nor MODE. In that case, it
6080 probably will not be used.
bbf6f052
RK
6081
6082 If MODIFIER is EXPAND_SUM then when EXP is an addition
6083 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6084 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6085 products as above, or REG or MEM, or constant.
6086 Ordinarily in such cases we would output mul or add instructions
6087 and then return a pseudo reg containing the sum.
6088
6089 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6090 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6091 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6092 This is used for outputting expressions used in initializers.
6093
6094 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6095 with a constant address even if that address is not normally legitimate.
8403445a
AM
6096 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6097
6098 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6099 a call parameter. Such targets require special care as we haven't yet
6100 marked TARGET so that it's safe from being trashed by libcalls. We
6101 don't want to use TARGET for anything but the final result;
6102 Intermediate values must go elsewhere. Additionally, calls to
caf93cb0 6103 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
0fab64a3
MM
6104
6105 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6106 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6107 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6108 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6109 recursively. */
bbf6f052 6110
6de9cd9a
DN
6111static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6112 enum expand_modifier, rtx *);
6113
bbf6f052 6114rtx
0fab64a3
MM
6115expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6116 enum expand_modifier modifier, rtx *alt_rtl)
6de9cd9a
DN
6117{
6118 int rn = -1;
6119 rtx ret, last = NULL;
6120
6121 /* Handle ERROR_MARK before anybody tries to access its type. */
6122 if (TREE_CODE (exp) == ERROR_MARK
6123 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6124 {
6125 ret = CONST0_RTX (tmode);
6126 return ret ? ret : const0_rtx;
6127 }
6128
6129 if (flag_non_call_exceptions)
6130 {
6131 rn = lookup_stmt_eh_region (exp);
6132 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6133 if (rn >= 0)
6134 last = get_last_insn ();
6135 }
6136
6137 /* If this is an expression of some kind and it has an associated line
caf93cb0 6138 number, then emit the line number before expanding the expression.
6de9cd9a
DN
6139
6140 We need to save and restore the file and line information so that
6141 errors discovered during expansion are emitted with the right
caf93cb0 6142 information. It would be better of the diagnostic routines
6de9cd9a
DN
6143 used the file/line information embedded in the tree nodes rather
6144 than globals. */
6145 if (cfun && EXPR_HAS_LOCATION (exp))
6146 {
6147 location_t saved_location = input_location;
6148 input_location = EXPR_LOCATION (exp);
6149 emit_line_note (input_location);
caf93cb0 6150
6de9cd9a 6151 /* Record where the insns produced belong. */
1ea463a2 6152 record_block_change (TREE_BLOCK (exp));
6de9cd9a
DN
6153
6154 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6155
6156 input_location = saved_location;
6157 }
6158 else
6159 {
6160 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6161 }
6162
6163 /* If using non-call exceptions, mark all insns that may trap.
6164 expand_call() will mark CALL_INSNs before we get to this code,
6165 but it doesn't handle libcalls, and these may trap. */
6166 if (rn >= 0)
caf93cb0 6167 {
6de9cd9a 6168 rtx insn;
caf93cb0 6169 for (insn = next_real_insn (last); insn;
6de9cd9a
DN
6170 insn = next_real_insn (insn))
6171 {
6172 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6173 /* If we want exceptions for non-call insns, any
6174 may_trap_p instruction may throw. */
6175 && GET_CODE (PATTERN (insn)) != CLOBBER
6176 && GET_CODE (PATTERN (insn)) != USE
4b4bf941 6177 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6de9cd9a
DN
6178 {
6179 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6180 REG_NOTES (insn));
6181 }
6182 }
6183 }
6184
6185 return ret;
6186}
6187
6188static rtx
6189expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6190 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6191{
b3694847 6192 rtx op0, op1, temp;
bbf6f052 6193 tree type = TREE_TYPE (exp);
8df83eae 6194 int unsignedp;
b3694847
SS
6195 enum machine_mode mode;
6196 enum tree_code code = TREE_CODE (exp);
bbf6f052 6197 optab this_optab;
68557e14
ML
6198 rtx subtarget, original_target;
6199 int ignore;
bbf6f052 6200 tree context;
bc15d0ef
JM
6201 bool reduce_bit_field = false;
6202#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6203 ? reduce_to_bit_field_precision ((expr), \
6204 target, \
6205 type) \
6206 : (expr))
bbf6f052 6207
68557e14 6208 mode = TYPE_MODE (type);
8df83eae 6209 unsignedp = TYPE_UNSIGNED (type);
bc15d0ef
JM
6210 if (lang_hooks.reduce_bit_field_operations
6211 && TREE_CODE (type) == INTEGER_TYPE
6212 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6213 {
6214 /* An operation in what may be a bit-field type needs the
6215 result to be reduced to the precision of the bit-field type,
6216 which is narrower than that of the type's mode. */
6217 reduce_bit_field = true;
6218 if (modifier == EXPAND_STACK_PARM)
6219 target = 0;
6220 }
8df83eae 6221
68557e14 6222 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6223 subtarget = get_subtarget (target);
68557e14
ML
6224 original_target = target;
6225 ignore = (target == const0_rtx
6226 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3a18db48
AP
6227 || code == CONVERT_EXPR || code == COND_EXPR
6228 || code == VIEW_CONVERT_EXPR)
68557e14
ML
6229 && TREE_CODE (type) == VOID_TYPE));
6230
dd27116b
RK
6231 /* If we are going to ignore this result, we need only do something
6232 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6233 is, short-circuit the most common cases here. Note that we must
6234 not call expand_expr with anything but const0_rtx in case this
6235 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6236
dd27116b
RK
6237 if (ignore)
6238 {
6239 if (! TREE_SIDE_EFFECTS (exp))
6240 return const0_rtx;
6241
14a774a9
RK
6242 /* Ensure we reference a volatile object even if value is ignored, but
6243 don't do this if all we are doing is taking its address. */
dd27116b
RK
6244 if (TREE_THIS_VOLATILE (exp)
6245 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6246 && mode != VOIDmode && mode != BLKmode
6247 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6248 {
37a08a29 6249 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3c0cb5de 6250 if (MEM_P (temp))
dd27116b
RK
6251 temp = copy_to_reg (temp);
6252 return const0_rtx;
6253 }
6254
14a774a9 6255 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
3a18db48 6256 || code == INDIRECT_REF)
37a08a29
RK
6257 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6258 modifier);
6259
14a774a9 6260 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6261 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6262 {
37a08a29
RK
6263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6264 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6265 return const0_rtx;
6266 }
6267 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6268 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6269 /* If the second operand has no side effects, just evaluate
0f41302f 6270 the first. */
37a08a29
RK
6271 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6272 modifier);
14a774a9
RK
6273 else if (code == BIT_FIELD_REF)
6274 {
37a08a29
RK
6275 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6276 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6277 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6278 return const0_rtx;
6279 }
37a08a29 6280
90764a87 6281 target = 0;
dd27116b 6282 }
bbf6f052 6283
e44842fe
RK
6284 /* If will do cse, generate all results into pseudo registers
6285 since 1) that allows cse to find more things
6286 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6287 cannot support. An exception is a CONSTRUCTOR into a multi-word
6288 MEM: that's much more likely to be most efficient into the MEM.
6289 Another is a CALL_EXPR which must return in memory. */
e44842fe 6290
bbf6f052 6291 if (! cse_not_expected && mode != BLKmode && target
f8cfc6aa 6292 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6293 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6294 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6295 target = 0;
bbf6f052 6296
bbf6f052
RK
6297 switch (code)
6298 {
6299 case LABEL_DECL:
b552441b
RS
6300 {
6301 tree function = decl_function_context (exp);
c5c76735 6302
6de9cd9a
DN
6303 temp = label_rtx (exp);
6304 temp = gen_rtx_LABEL_REF (Pmode, temp);
6305
d0977240 6306 if (function != current_function_decl
6de9cd9a
DN
6307 && function != 0)
6308 LABEL_REF_NONLOCAL_P (temp) = 1;
6309
6310 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
26fcb35a 6311 return temp;
b552441b 6312 }
bbf6f052
RK
6313
6314 case PARM_DECL:
bbf6f052 6315 case VAR_DECL:
2dca20cd
RS
6316 /* If a static var's type was incomplete when the decl was written,
6317 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6318 if (DECL_SIZE (exp) == 0
6319 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6320 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6321 layout_decl (exp, 0);
921b3427 6322
0f41302f 6323 /* ... fall through ... */
d6a5ac33 6324
2dca20cd 6325 case FUNCTION_DECL:
bbf6f052
RK
6326 case RESULT_DECL:
6327 if (DECL_RTL (exp) == 0)
6328 abort ();
d6a5ac33 6329
e44842fe
RK
6330 /* Ensure variable marked as used even if it doesn't go through
6331 a parser. If it hasn't be used yet, write out an external
6332 definition. */
6333 if (! TREE_USED (exp))
6334 {
6335 assemble_external (exp);
6336 TREE_USED (exp) = 1;
6337 }
6338
dc6d66b3
RK
6339 /* Show we haven't gotten RTL for this yet. */
6340 temp = 0;
6341
ab8907ef
RH
6342 /* Variables inherited from containing functions should have
6343 been lowered by this point. */
bbf6f052 6344 context = decl_function_context (exp);
ab8907ef
RH
6345 if (context != 0
6346 && context != current_function_decl
6347 && !TREE_STATIC (exp)
6348 /* ??? C++ creates functions that are not TREE_STATIC. */
6349 && TREE_CODE (exp) != FUNCTION_DECL)
6350 abort ();
4af3895e 6351
bbf6f052
RK
6352 /* This is the case of an array whose size is to be determined
6353 from its initializer, while the initializer is still being parsed.
6354 See expand_decl. */
d6a5ac33 6355
3c0cb5de 6356 else if (MEM_P (DECL_RTL (exp))
f8cfc6aa 6357 && REG_P (XEXP (DECL_RTL (exp), 0)))
792760b9 6358 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6359
6360 /* If DECL_RTL is memory, we are in the normal case and either
6361 the address is not valid or it is not a register and -fforce-addr
6362 is specified, get the address into a register. */
6363
3c0cb5de 6364 else if (MEM_P (DECL_RTL (exp))
dc6d66b3
RK
6365 && modifier != EXPAND_CONST_ADDRESS
6366 && modifier != EXPAND_SUM
6367 && modifier != EXPAND_INITIALIZER
6368 && (! memory_address_p (DECL_MODE (exp),
6369 XEXP (DECL_RTL (exp), 0))
6370 || (flag_force_addr
f8cfc6aa 6371 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
0fab64a3
MM
6372 {
6373 if (alt_rtl)
6374 *alt_rtl = DECL_RTL (exp);
6375 temp = replace_equiv_address (DECL_RTL (exp),
6376 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6377 }
1499e0a8 6378
dc6d66b3 6379 /* If we got something, return it. But first, set the alignment
04956a1a 6380 if the address is a register. */
dc6d66b3
RK
6381 if (temp != 0)
6382 {
3c0cb5de 6383 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
bdb429a5 6384 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6385
6386 return temp;
6387 }
6388
1499e0a8
RK
6389 /* If the mode of DECL_RTL does not match that of the decl, it
6390 must be a promoted value. We return a SUBREG of the wanted mode,
6391 but mark it so that we know that it was already extended. */
6392
f8cfc6aa 6393 if (REG_P (DECL_RTL (exp))
7254c5fa 6394 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6395 {
1499e0a8
RK
6396 /* Get the signedness used for this variable. Ensure we get the
6397 same mode we got when the variable was declared. */
78911e8b 6398 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6399 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6400 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6401 abort ();
6402
ddef6bc7 6403 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6404 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6405 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6406 return temp;
6407 }
6408
bbf6f052
RK
6409 return DECL_RTL (exp);
6410
6411 case INTEGER_CST:
d8a50944 6412 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6413 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6414
d8a50944
RH
6415 /* ??? If overflow is set, fold will have done an incomplete job,
6416 which can result in (plus xx (const_int 0)), which can get
6417 simplified by validate_replace_rtx during virtual register
6418 instantiation, which can result in unrecognizable insns.
6419 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6420 if (TREE_CONSTANT_OVERFLOW (exp)
6421 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6422 temp = force_reg (mode, temp);
6423
6424 return temp;
6425
d744e06e 6426 case VECTOR_CST:
3a021db2
PB
6427 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6428 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6429 return const_vector_from_tree (exp);
caf93cb0 6430 else
3a021db2
PB
6431 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6432 TREE_VECTOR_CST_ELTS (exp)),
6433 ignore ? const0_rtx : target, tmode, modifier);
d744e06e 6434
bbf6f052 6435 case CONST_DECL:
8403445a 6436 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6437
6438 case REAL_CST:
6439 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6440 which will be turned into memory by reload if necessary.
6441
bbf6f052
RK
6442 We used to force a register so that loop.c could see it. But
6443 this does not allow gen_* patterns to perform optimizations with
6444 the constants. It also produces two insns in cases like "x = 1.0;".
6445 On most machines, floating-point constants are not permitted in
6446 many insns, so we'd end up copying it to a register in any case.
6447
6448 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6449 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6450 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6451
6452 case COMPLEX_CST:
9ad58e09
RS
6453 /* Handle evaluating a complex constant in a CONCAT target. */
6454 if (original_target && GET_CODE (original_target) == CONCAT)
6455 {
6456 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6457 rtx rtarg, itarg;
6458
6459 rtarg = XEXP (original_target, 0);
6460 itarg = XEXP (original_target, 1);
6461
6462 /* Move the real and imaginary parts separately. */
6463 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6464 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6465
6466 if (op0 != rtarg)
6467 emit_move_insn (rtarg, op0);
6468 if (op1 != itarg)
6469 emit_move_insn (itarg, op1);
6470
6471 return original_target;
6472 }
6473
71c0e7fc 6474 /* ... fall through ... */
9ad58e09 6475
bbf6f052 6476 case STRING_CST:
afc6aaab 6477 temp = output_constant_def (exp, 1);
bbf6f052 6478
afc6aaab 6479 /* temp contains a constant address.
bbf6f052
RK
6480 On RISC machines where a constant address isn't valid,
6481 make some insns to get that address into a register. */
afc6aaab 6482 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6483 && modifier != EXPAND_INITIALIZER
6484 && modifier != EXPAND_SUM
afc6aaab
ZW
6485 && (! memory_address_p (mode, XEXP (temp, 0))
6486 || flag_force_addr))
6487 return replace_equiv_address (temp,
6488 copy_rtx (XEXP (temp, 0)));
6489 return temp;
bbf6f052
RK
6490
6491 case SAVE_EXPR:
82c82743
RH
6492 {
6493 tree val = TREE_OPERAND (exp, 0);
6494 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
e5e809f4 6495
82c82743
RH
6496 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6497 {
6498 /* We can indeed still hit this case, typically via builtin
6499 expanders calling save_expr immediately before expanding
6500 something. Assume this means that we only have to deal
6501 with non-BLKmode values. */
6502 if (GET_MODE (ret) == BLKmode)
6503 abort ();
1499e0a8 6504
82c82743
RH
6505 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6506 DECL_ARTIFICIAL (val) = 1;
6507 TREE_OPERAND (exp, 0) = val;
1499e0a8 6508
82c82743
RH
6509 if (!CONSTANT_P (ret))
6510 ret = copy_to_reg (ret);
6511 SET_DECL_RTL (val, ret);
6512 }
1499e0a8 6513
82c82743
RH
6514 return ret;
6515 }
bbf6f052 6516
70e6ca43
APB
6517 case GOTO_EXPR:
6518 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6519 expand_goto (TREE_OPERAND (exp, 0));
6520 else
6521 expand_computed_goto (TREE_OPERAND (exp, 0));
6522 return const0_rtx;
6523
bbf6f052 6524 case CONSTRUCTOR:
dd27116b
RK
6525 /* If we don't need the result, just ensure we evaluate any
6526 subexpressions. */
6527 if (ignore)
6528 {
6529 tree elt;
37a08a29 6530
dd27116b 6531 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6532 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6533
dd27116b
RK
6534 return const0_rtx;
6535 }
3207b172 6536
4af3895e
JVA
6537 /* All elts simple constants => refer to a constant in memory. But
6538 if this is a non-BLKmode mode, let it store a field at a time
6539 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6540 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6541 store directly into the target unless the type is large enough
6542 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6543 all operands are constant, put it in memory as well.
6544
6545 FIXME: Avoid trying to fill vector constructors piece-meal.
6546 Output them with output_constant_def below unless we're sure
6547 they're zeros. This should go away when vector initializers
6548 are treated like VECTOR_CST instead of arrays.
6549 */
dd27116b 6550 else if ((TREE_STATIC (exp)
3207b172 6551 && ((mode == BLKmode
e5e809f4 6552 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6553 || TREE_ADDRESSABLE (exp)
19caa751 6554 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6555 && (! MOVE_BY_PIECES_P
19caa751
RK
6556 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6557 TYPE_ALIGN (type)))
6de9cd9a 6558 && ! mostly_zeros_p (exp))))
f59700f9
RK
6559 || ((modifier == EXPAND_INITIALIZER
6560 || modifier == EXPAND_CONST_ADDRESS)
6561 && TREE_CONSTANT (exp)))
bbf6f052 6562 {
bd7cf17e 6563 rtx constructor = output_constant_def (exp, 1);
19caa751 6564
b552441b
RS
6565 if (modifier != EXPAND_CONST_ADDRESS
6566 && modifier != EXPAND_INITIALIZER
792760b9
RK
6567 && modifier != EXPAND_SUM)
6568 constructor = validize_mem (constructor);
6569
bbf6f052
RK
6570 return constructor;
6571 }
bbf6f052
RK
6572 else
6573 {
e9ac02a6
JW
6574 /* Handle calls that pass values in multiple non-contiguous
6575 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6576 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6577 || GET_CODE (target) == PARALLEL
6578 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6579 target
6580 = assign_temp (build_qualified_type (type,
6581 (TYPE_QUALS (type)
6582 | (TREE_READONLY (exp)
6583 * TYPE_QUAL_CONST))),
c24ae149 6584 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6585
dbb5c281 6586 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6587 return target;
6588 }
6589
6590 case INDIRECT_REF:
6591 {
6592 tree exp1 = TREE_OPERAND (exp, 0);
3a94c984 6593
6de9cd9a
DN
6594 if (modifier != EXPAND_WRITE)
6595 {
6596 tree t;
6597
6598 t = fold_read_from_constant_string (exp);
6599 if (t)
6600 return expand_expr (t, target, tmode, modifier);
6601 }
bbf6f052 6602
405f0da6
JW
6603 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6604 op0 = memory_address (mode, op0);
38a448ca 6605 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6606 set_mem_attributes (temp, exp, 0);
1125706f 6607
14a774a9
RK
6608 /* If we are writing to this object and its type is a record with
6609 readonly fields, we must mark it as readonly so it will
6610 conflict with readonly references to those fields. */
37a08a29 6611 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6612 RTX_UNCHANGING_P (temp) = 1;
6613
8c8a8e34
JW
6614 return temp;
6615 }
bbf6f052
RK
6616
6617 case ARRAY_REF:
6de9cd9a
DN
6618
6619#ifdef ENABLE_CHECKING
742920c7
RK
6620 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6621 abort ();
6de9cd9a 6622#endif
bbf6f052 6623
bbf6f052 6624 {
742920c7 6625 tree array = TREE_OPERAND (exp, 0);
44de5aeb 6626 tree low_bound = array_ref_low_bound (exp);
fed3cef0 6627 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6628 HOST_WIDE_INT i;
b50d17a1 6629
d4c89139
PB
6630 /* Optimize the special-case of a zero lower bound.
6631
6632 We convert the low_bound to sizetype to avoid some problems
6633 with constant folding. (E.g. suppose the lower bound is 1,
6634 and its mode is QI. Without the conversion, (ARRAY
6635 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6636 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6637
742920c7 6638 if (! integer_zerop (low_bound))
fed3cef0 6639 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6640
742920c7 6641 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6642 This is not done in fold so it won't happen inside &.
6643 Don't fold if this is for wide characters since it's too
6644 difficult to do correctly and this is a very rare case. */
742920c7 6645
017e1b43
RH
6646 if (modifier != EXPAND_CONST_ADDRESS
6647 && modifier != EXPAND_INITIALIZER
6de9cd9a
DN
6648 && modifier != EXPAND_MEMORY)
6649 {
6650 tree t = fold_read_from_constant_string (exp);
6651
6652 if (t)
6653 return expand_expr (t, target, tmode, modifier);
6654 }
bbf6f052 6655
742920c7
RK
6656 /* If this is a constant index into a constant array,
6657 just get the value from the array. Handle both the cases when
6658 we have an explicit constructor and when our operand is a variable
6659 that was declared const. */
4af3895e 6660
017e1b43
RH
6661 if (modifier != EXPAND_CONST_ADDRESS
6662 && modifier != EXPAND_INITIALIZER
6663 && modifier != EXPAND_MEMORY
6664 && TREE_CODE (array) == CONSTRUCTOR
6665 && ! TREE_SIDE_EFFECTS (array)
05bccae2 6666 && TREE_CODE (index) == INTEGER_CST
3a94c984 6667 && 0 > compare_tree_int (index,
05bccae2
RK
6668 list_length (CONSTRUCTOR_ELTS
6669 (TREE_OPERAND (exp, 0)))))
742920c7 6670 {
05bccae2
RK
6671 tree elem;
6672
6673 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6674 i = TREE_INT_CST_LOW (index);
6675 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6676 ;
6677
6678 if (elem)
37a08a29
RK
6679 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6680 modifier);
742920c7 6681 }
3a94c984 6682
742920c7 6683 else if (optimize >= 1
cb5fa0f8
RK
6684 && modifier != EXPAND_CONST_ADDRESS
6685 && modifier != EXPAND_INITIALIZER
017e1b43 6686 && modifier != EXPAND_MEMORY
742920c7
RK
6687 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6688 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
6689 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6690 && targetm.binds_local_p (array))
742920c7 6691 {
08293add 6692 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6693 {
6694 tree init = DECL_INITIAL (array);
6695
742920c7
RK
6696 if (TREE_CODE (init) == CONSTRUCTOR)
6697 {
665f2503 6698 tree elem;
742920c7 6699
05bccae2 6700 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6701 (elem
6702 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6703 elem = TREE_CHAIN (elem))
6704 ;
6705
c54b0a5e 6706 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6707 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 6708 tmode, modifier);
742920c7
RK
6709 }
6710 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6711 && 0 > compare_tree_int (index,
6712 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6713 {
6714 tree type = TREE_TYPE (TREE_TYPE (init));
6715 enum machine_mode mode = TYPE_MODE (type);
6716
6717 if (GET_MODE_CLASS (mode) == MODE_INT
6718 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6719 return gen_int_mode (TREE_STRING_POINTER (init)
6720 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 6721 }
742920c7
RK
6722 }
6723 }
6724 }
afc6aaab 6725 goto normal_inner_ref;
bbf6f052
RK
6726
6727 case COMPONENT_REF:
4af3895e 6728 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
6729 appropriate field if it is present. */
6730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
6731 {
6732 tree elt;
6733
6734 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6735 elt = TREE_CHAIN (elt))
86b5812c
RK
6736 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6737 /* We can normally use the value of the field in the
6738 CONSTRUCTOR. However, if this is a bitfield in
6739 an integral mode that we can fit in a HOST_WIDE_INT,
6740 we must mask only the number of bits in the bitfield,
6741 since this is done implicitly by the constructor. If
6742 the bitfield does not meet either of those conditions,
6743 we can't do this optimization. */
6744 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6745 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6746 == MODE_INT)
6747 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6748 <= HOST_BITS_PER_WIDE_INT))))
6749 {
8403445a
AM
6750 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6751 && modifier == EXPAND_STACK_PARM)
6752 target = 0;
3a94c984 6753 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6754 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6755 {
9df2c88c
RK
6756 HOST_WIDE_INT bitsize
6757 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
6758 enum machine_mode imode
6759 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6760
8df83eae 6761 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
86b5812c
RK
6762 {
6763 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 6764 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
6765 }
6766 else
6767 {
6768 tree count
e5e809f4
JL
6769 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6770 0);
86b5812c
RK
6771
6772 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6773 target, 0);
6774 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6775 target, 0);
6776 }
6777 }
6778
6779 return op0;
6780 }
4af3895e 6781 }
afc6aaab 6782 goto normal_inner_ref;
4af3895e 6783
afc6aaab
ZW
6784 case BIT_FIELD_REF:
6785 case ARRAY_RANGE_REF:
6786 normal_inner_ref:
bbf6f052
RK
6787 {
6788 enum machine_mode mode1;
770ae6cc 6789 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6790 tree offset;
bbf6f052 6791 int volatilep = 0;
839c4796 6792 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 6793 &mode1, &unsignedp, &volatilep);
f47e9b4e 6794 rtx orig_op0;
bbf6f052 6795
e7f3c83f
RK
6796 /* If we got back the original object, something is wrong. Perhaps
6797 we are evaluating an expression too early. In any event, don't
6798 infinitely recurse. */
6799 if (tem == exp)
6800 abort ();
6801
3d27140a 6802 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6803 computation, since it will need a temporary and TARGET is known
6804 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 6805
f47e9b4e
RK
6806 orig_op0 = op0
6807 = expand_expr (tem,
6808 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6809 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6810 != INTEGER_CST)
8403445a 6811 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
6812 ? target : NULL_RTX),
6813 VOIDmode,
6814 (modifier == EXPAND_INITIALIZER
8403445a
AM
6815 || modifier == EXPAND_CONST_ADDRESS
6816 || modifier == EXPAND_STACK_PARM)
f47e9b4e 6817 ? modifier : EXPAND_NORMAL);
bbf6f052 6818
8c8a8e34 6819 /* If this is a constant, put it into a register if it is a
14a774a9 6820 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6821 if (CONSTANT_P (op0))
6822 {
6823 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6824 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6825 && offset == 0)
8c8a8e34
JW
6826 op0 = force_reg (mode, op0);
6827 else
6828 op0 = validize_mem (force_const_mem (mode, op0));
6829 }
6830
8d2e5f72
RK
6831 /* Otherwise, if this object not in memory and we either have an
6832 offset or a BLKmode result, put it there. This case can't occur in
6833 C, but can in Ada if we have unchecked conversion of an expression
6834 from a scalar type to an array or record type or for an
6835 ARRAY_RANGE_REF whose type is BLKmode. */
3c0cb5de 6836 else if (!MEM_P (op0)
8d2e5f72
RK
6837 && (offset != 0
6838 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6839 {
82c82743
RH
6840 tree nt = build_qualified_type (TREE_TYPE (tem),
6841 (TYPE_QUALS (TREE_TYPE (tem))
6842 | TYPE_QUAL_CONST));
6843 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 6844
82c82743
RH
6845 emit_move_insn (memloc, op0);
6846 op0 = memloc;
8d2e5f72
RK
6847 }
6848
7bb0943f
RS
6849 if (offset != 0)
6850 {
8403445a
AM
6851 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6852 EXPAND_SUM);
7bb0943f 6853
3c0cb5de 6854 if (!MEM_P (op0))
7bb0943f 6855 abort ();
2d48c13d 6856
2d48c13d 6857#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 6858 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 6859 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
6860#else
6861 if (GET_MODE (offset_rtx) != ptr_mode)
6862 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
6863#endif
6864
e82407b5
EB
6865 if (GET_MODE (op0) == BLKmode
6866 /* A constant address in OP0 can have VOIDmode, we must
6867 not try to call force_reg in that case. */
efd07ca7 6868 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6869 && bitsize != 0
3a94c984 6870 && (bitpos % bitsize) == 0
89752202 6871 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 6872 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 6873 {
e3c8ea67 6874 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
6875 bitpos = 0;
6876 }
6877
0d4903b8
RK
6878 op0 = offset_address (op0, offset_rtx,
6879 highest_pow2_factor (offset));
7bb0943f
RS
6880 }
6881
1ce7f3c2
RK
6882 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6883 record its alignment as BIGGEST_ALIGNMENT. */
3c0cb5de 6884 if (MEM_P (op0) && bitpos == 0 && offset != 0
1ce7f3c2
RK
6885 && is_aligning_offset (offset, tem))
6886 set_mem_align (op0, BIGGEST_ALIGNMENT);
6887
bbf6f052 6888 /* Don't forget about volatility even if this is a bitfield. */
3c0cb5de 6889 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
bbf6f052 6890 {
f47e9b4e
RK
6891 if (op0 == orig_op0)
6892 op0 = copy_rtx (op0);
6893
bbf6f052
RK
6894 MEM_VOLATILE_P (op0) = 1;
6895 }
6896
010f87c4
JJ
6897 /* The following code doesn't handle CONCAT.
6898 Assume only bitpos == 0 can be used for CONCAT, due to
6899 one element arrays having the same mode as its element. */
6900 if (GET_CODE (op0) == CONCAT)
6901 {
6902 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6903 abort ();
6904 return op0;
6905 }
6906
ccc98036
RS
6907 /* In cases where an aligned union has an unaligned object
6908 as a field, we might be extracting a BLKmode value from
6909 an integer-mode (e.g., SImode) object. Handle this case
6910 by doing the extract into an object as wide as the field
6911 (which we know to be the width of a basic mode), then
cb5fa0f8 6912 storing into memory, and changing the mode to BLKmode. */
bbf6f052 6913 if (mode1 == VOIDmode
f8cfc6aa 6914 || REG_P (op0) || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
6915 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6916 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
6917 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6918 && modifier != EXPAND_CONST_ADDRESS
6919 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
6920 /* If the field isn't aligned enough to fetch as a memref,
6921 fetch it as a bit field. */
6922 || (mode1 != BLKmode
9e5f281f 6923 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5 6924 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
3c0cb5de 6925 || (MEM_P (op0)
e82407b5
EB
6926 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6927 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
6928 && ((modifier == EXPAND_CONST_ADDRESS
6929 || modifier == EXPAND_INITIALIZER)
6930 ? STRICT_ALIGNMENT
6931 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 6932 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
6933 /* If the type and the field are a constant size and the
6934 size of the type isn't the same size as the bitfield,
6935 we must use bitfield operations. */
6936 || (bitsize >= 0
dbe4d070
RH
6937 && TYPE_SIZE (TREE_TYPE (exp))
6938 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
cb5fa0f8 6939 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 6940 bitsize)))
bbf6f052 6941 {
bbf6f052
RK
6942 enum machine_mode ext_mode = mode;
6943
14a774a9 6944 if (ext_mode == BLKmode
3c0cb5de
JQ
6945 && ! (target != 0 && MEM_P (op0)
6946 && MEM_P (target)
14a774a9 6947 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
6948 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6949
6950 if (ext_mode == BLKmode)
a281e72d 6951 {
7a06d606
RK
6952 if (target == 0)
6953 target = assign_temp (type, 0, 1, 1);
6954
6955 if (bitsize == 0)
6956 return target;
6957
a281e72d
RK
6958 /* In this case, BITPOS must start at a byte boundary and
6959 TARGET, if specified, must be a MEM. */
3c0cb5de
JQ
6960 if (!MEM_P (op0)
6961 || (target != 0 && !MEM_P (target))
a281e72d
RK
6962 || bitpos % BITS_PER_UNIT != 0)
6963 abort ();
6964
7a06d606
RK
6965 emit_block_move (target,
6966 adjust_address (op0, VOIDmode,
6967 bitpos / BITS_PER_UNIT),
a06ef755 6968 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 6969 / BITS_PER_UNIT),
8403445a
AM
6970 (modifier == EXPAND_STACK_PARM
6971 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 6972
a281e72d
RK
6973 return target;
6974 }
bbf6f052 6975
dc6d66b3
RK
6976 op0 = validize_mem (op0);
6977
3c0cb5de 6978 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
04050c69 6979 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 6980
8403445a
AM
6981 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
6982 (modifier == EXPAND_STACK_PARM
6983 ? NULL_RTX : target),
b3520980 6984 ext_mode, ext_mode);
ef19912d
RK
6985
6986 /* If the result is a record type and BITSIZE is narrower than
6987 the mode of OP0, an integral mode, and this is a big endian
6988 machine, we must put the field into the high-order bits. */
6989 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6990 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 6991 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
6992 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6993 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6994 - bitsize),
6995 op0, 1);
6996
daae8185
RK
6997 /* If the result type is BLKmode, store the data into a temporary
6998 of the appropriate type, but with the mode corresponding to the
6999 mode for the data we have (op0's mode). It's tempting to make
7000 this a constant type, since we know it's only being stored once,
7001 but that can cause problems if we are taking the address of this
7002 COMPONENT_REF because the MEM of any reference via that address
7003 will have flags corresponding to the type, which will not
7004 necessarily be constant. */
bbf6f052
RK
7005 if (mode == BLKmode)
7006 {
daae8185
RK
7007 rtx new
7008 = assign_stack_temp_for_type
7009 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7010
7011 emit_move_insn (new, op0);
7012 op0 = copy_rtx (new);
7013 PUT_MODE (op0, BLKmode);
c3d32120 7014 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7015 }
7016
7017 return op0;
7018 }
7019
05019f83
RK
7020 /* If the result is BLKmode, use that to access the object
7021 now as well. */
7022 if (mode == BLKmode)
7023 mode1 = BLKmode;
7024
bbf6f052
RK
7025 /* Get a reference to just this component. */
7026 if (modifier == EXPAND_CONST_ADDRESS
7027 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7028 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7029 else
f4ef873c 7030 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7031
f47e9b4e
RK
7032 if (op0 == orig_op0)
7033 op0 = copy_rtx (op0);
7034
3bdf5ad1 7035 set_mem_attributes (op0, exp, 0);
f8cfc6aa 7036 if (REG_P (XEXP (op0, 0)))
a06ef755 7037 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7038
bbf6f052 7039 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7040 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7041 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7042 || modifier == EXPAND_INITIALIZER)
bbf6f052 7043 return op0;
0d15e60c 7044 else if (target == 0)
bbf6f052 7045 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7046
bbf6f052
RK
7047 convert_move (target, op0, unsignedp);
7048 return target;
7049 }
7050
0f59171d
RH
7051 case OBJ_TYPE_REF:
7052 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
4a8d0c9c 7053
bbf6f052
RK
7054 case CALL_EXPR:
7055 /* Check for a built-in function. */
7056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7057 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7058 == FUNCTION_DECL)
bbf6f052 7059 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7060 {
c70eaeaf
KG
7061 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7062 == BUILT_IN_FRONTEND)
673fda6b
SB
7063 return lang_hooks.expand_expr (exp, original_target,
7064 tmode, modifier,
7065 alt_rtl);
c70eaeaf
KG
7066 else
7067 return expand_builtin (exp, target, subtarget, tmode, ignore);
7068 }
d6a5ac33 7069
8129842c 7070 return expand_call (exp, target, ignore);
bbf6f052
RK
7071
7072 case NON_LVALUE_EXPR:
7073 case NOP_EXPR:
7074 case CONVERT_EXPR:
4a53008b 7075 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7076 return const0_rtx;
4a53008b 7077
bbf6f052
RK
7078 if (TREE_CODE (type) == UNION_TYPE)
7079 {
7080 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7081
c3d32120
RK
7082 /* If both input and output are BLKmode, this conversion isn't doing
7083 anything except possibly changing memory attribute. */
7084 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7085 {
7086 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7087 modifier);
7088
7089 result = copy_rtx (result);
7090 set_mem_attributes (result, exp, 0);
7091 return result;
7092 }
14a774a9 7093
bbf6f052 7094 if (target == 0)
cf7cb67e
JH
7095 {
7096 if (TYPE_MODE (type) != BLKmode)
7097 target = gen_reg_rtx (TYPE_MODE (type));
7098 else
7099 target = assign_temp (type, 0, 1, 1);
7100 }
d6a5ac33 7101
3c0cb5de 7102 if (MEM_P (target))
bbf6f052
RK
7103 /* Store data into beginning of memory target. */
7104 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7105 adjust_address (target, TYPE_MODE (valtype), 0),
7106 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7107
f8cfc6aa 7108 else if (REG_P (target))
bbf6f052 7109 /* Store this field into a union of the proper type. */
14a774a9
RK
7110 store_field (target,
7111 MIN ((int_size_in_bytes (TREE_TYPE
7112 (TREE_OPERAND (exp, 0)))
7113 * BITS_PER_UNIT),
8752c357 7114 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7115 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7116 VOIDmode, 0, type, 0);
bbf6f052
RK
7117 else
7118 abort ();
7119
7120 /* Return the entire union. */
7121 return target;
7122 }
d6a5ac33 7123
7f62854a
RK
7124 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7125 {
7126 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7127 modifier);
7f62854a
RK
7128
7129 /* If the signedness of the conversion differs and OP0 is
7130 a promoted SUBREG, clear that indication since we now
7131 have to do the proper extension. */
8df83eae 7132 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7133 && GET_CODE (op0) == SUBREG)
7134 SUBREG_PROMOTED_VAR_P (op0) = 0;
7135
bc15d0ef 7136 return REDUCE_BIT_FIELD (op0);
7f62854a
RK
7137 }
7138
fdf473ae 7139 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
bc15d0ef 7140 op0 = REDUCE_BIT_FIELD (op0);
12342f90
RS
7141 if (GET_MODE (op0) == mode)
7142 return op0;
12342f90 7143
d6a5ac33
RK
7144 /* If OP0 is a constant, just convert it into the proper mode. */
7145 if (CONSTANT_P (op0))
fdf473ae
RH
7146 {
7147 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7148 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7149
0fb7aeda 7150 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7151 return simplify_gen_subreg (mode, op0, inner_mode,
7152 subreg_lowpart_offset (mode,
7153 inner_mode));
7154 else
7155 return convert_modes (mode, inner_mode, op0,
8df83eae 7156 TYPE_UNSIGNED (inner_type));
fdf473ae 7157 }
12342f90 7158
26fcb35a 7159 if (modifier == EXPAND_INITIALIZER)
38a448ca 7160 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7161
bbf6f052 7162 if (target == 0)
d6a5ac33
RK
7163 return
7164 convert_to_mode (mode, op0,
8df83eae 7165 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7166 else
d6a5ac33 7167 convert_move (target, op0,
8df83eae 7168 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7169 return target;
7170
ed239f5a 7171 case VIEW_CONVERT_EXPR:
37a08a29 7172 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7173
7174 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7175 Otherwise, if neither mode is BLKmode and both are integral and within
7176 a word, we can use gen_lowpart. If neither is true, make sure the
7177 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7178 if (TYPE_MODE (type) == GET_MODE (op0))
7179 ;
7180 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7181 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7182 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7183 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7184 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7185 op0 = gen_lowpart (TYPE_MODE (type), op0);
3c0cb5de 7186 else if (!MEM_P (op0))
ed239f5a 7187 {
c11c10d8
RK
7188 /* If the operand is not a MEM, force it into memory. Since we
7189 are going to be be changing the mode of the MEM, don't call
7190 force_const_mem for constants because we don't allow pool
7191 constants to change mode. */
ed239f5a 7192 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7193
c11c10d8
RK
7194 if (TREE_ADDRESSABLE (exp))
7195 abort ();
ed239f5a 7196
c11c10d8
RK
7197 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7198 target
7199 = assign_stack_temp_for_type
7200 (TYPE_MODE (inner_type),
7201 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7202
c11c10d8
RK
7203 emit_move_insn (target, op0);
7204 op0 = target;
ed239f5a
RK
7205 }
7206
c11c10d8
RK
7207 /* At this point, OP0 is in the correct mode. If the output type is such
7208 that the operand is known to be aligned, indicate that it is.
7209 Otherwise, we need only be concerned about alignment for non-BLKmode
7210 results. */
3c0cb5de 7211 if (MEM_P (op0))
ed239f5a
RK
7212 {
7213 op0 = copy_rtx (op0);
7214
ed239f5a
RK
7215 if (TYPE_ALIGN_OK (type))
7216 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7217 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7218 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7219 {
7220 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7221 HOST_WIDE_INT temp_size
7222 = MAX (int_size_in_bytes (inner_type),
7223 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7224 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7225 temp_size, 0, type);
c4e59f51 7226 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7227
c11c10d8
RK
7228 if (TREE_ADDRESSABLE (exp))
7229 abort ();
7230
ed239f5a
RK
7231 if (GET_MODE (op0) == BLKmode)
7232 emit_block_move (new_with_op0_mode, op0,
44bb111a 7233 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7234 (modifier == EXPAND_STACK_PARM
7235 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7236 else
7237 emit_move_insn (new_with_op0_mode, op0);
7238
7239 op0 = new;
7240 }
0fb7aeda 7241
c4e59f51 7242 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7243 }
7244
7245 return op0;
7246
bbf6f052 7247 case PLUS_EXPR:
4dfa0342 7248 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
bbf6f052
RK
7249 something else, make sure we add the register to the constant and
7250 then to the other thing. This case can occur during strength
7251 reduction and doing it this way will produce better code if the
7252 frame pointer or argument pointer is eliminated.
7253
7254 fold-const.c will ensure that the constant is always in the inner
7255 PLUS_EXPR, so the only case we need to do anything about is if
7256 sp, ap, or fp is our second argument, in which case we must swap
7257 the innermost first argument and our second argument. */
7258
7259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7260 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4dfa0342
RH
7261 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7262 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7263 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7264 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
bbf6f052
RK
7265 {
7266 tree t = TREE_OPERAND (exp, 1);
7267
7268 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7269 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7270 }
7271
88f63c77 7272 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7273 something, we might be forming a constant. So try to use
7274 plus_constant. If it produces a sum and we can't accept it,
7275 use force_operand. This allows P = &ARR[const] to generate
7276 efficient code on machines where a SYMBOL_REF is not a valid
7277 address.
7278
7279 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7280 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7281 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7282 {
8403445a
AM
7283 if (modifier == EXPAND_STACK_PARM)
7284 target = 0;
c980ac49
RS
7285 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7286 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7287 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7288 {
cbbc503e
JL
7289 rtx constant_part;
7290
c980ac49
RS
7291 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7292 EXPAND_SUM);
cbbc503e
JL
7293 /* Use immed_double_const to ensure that the constant is
7294 truncated according to the mode of OP1, then sign extended
7295 to a HOST_WIDE_INT. Using the constant directly can result
7296 in non-canonical RTL in a 64x32 cross compile. */
7297 constant_part
7298 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7299 (HOST_WIDE_INT) 0,
a5efcd63 7300 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7301 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7302 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7303 op1 = force_operand (op1, target);
bc15d0ef 7304 return REDUCE_BIT_FIELD (op1);
c980ac49 7305 }
bbf6f052 7306
c980ac49
RS
7307 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7308 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7309 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7310 {
cbbc503e
JL
7311 rtx constant_part;
7312
c980ac49 7313 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7314 (modifier == EXPAND_INITIALIZER
7315 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7316 if (! CONSTANT_P (op0))
7317 {
7318 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7319 VOIDmode, modifier);
f0e9957a
RS
7320 /* Return a PLUS if modifier says it's OK. */
7321 if (modifier == EXPAND_SUM
7322 || modifier == EXPAND_INITIALIZER)
7323 return simplify_gen_binary (PLUS, mode, op0, op1);
7324 goto binop2;
c980ac49 7325 }
cbbc503e
JL
7326 /* Use immed_double_const to ensure that the constant is
7327 truncated according to the mode of OP1, then sign extended
7328 to a HOST_WIDE_INT. Using the constant directly can result
7329 in non-canonical RTL in a 64x32 cross compile. */
7330 constant_part
7331 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7332 (HOST_WIDE_INT) 0,
2a94e396 7333 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7334 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7335 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7336 op0 = force_operand (op0, target);
bc15d0ef 7337 return REDUCE_BIT_FIELD (op0);
c980ac49 7338 }
bbf6f052
RK
7339 }
7340
7341 /* No sense saving up arithmetic to be done
7342 if it's all in the wrong mode to form part of an address.
7343 And force_operand won't know whether to sign-extend or
7344 zero-extend. */
7345 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7346 || mode != ptr_mode)
4ef7870a 7347 {
eb698c58
RS
7348 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7349 subtarget, &op0, &op1, 0);
6e7727eb
EB
7350 if (op0 == const0_rtx)
7351 return op1;
7352 if (op1 == const0_rtx)
7353 return op0;
4ef7870a
EB
7354 goto binop2;
7355 }
bbf6f052 7356
eb698c58
RS
7357 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7358 subtarget, &op0, &op1, modifier);
bc15d0ef 7359 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
bbf6f052
RK
7360
7361 case MINUS_EXPR:
ea87523e
RK
7362 /* For initializers, we are allowed to return a MINUS of two
7363 symbolic constants. Here we handle all cases when both operands
7364 are constant. */
bbf6f052
RK
7365 /* Handle difference of two symbolic constants,
7366 for the sake of an initializer. */
7367 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7368 && really_constant_p (TREE_OPERAND (exp, 0))
7369 && really_constant_p (TREE_OPERAND (exp, 1)))
7370 {
eb698c58
RS
7371 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7372 NULL_RTX, &op0, &op1, modifier);
ea87523e 7373
ea87523e
RK
7374 /* If the last operand is a CONST_INT, use plus_constant of
7375 the negated constant. Else make the MINUS. */
7376 if (GET_CODE (op1) == CONST_INT)
bc15d0ef 7377 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
ea87523e 7378 else
bc15d0ef 7379 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
bbf6f052 7380 }
ae431183 7381
1717e19e
UW
7382 /* No sense saving up arithmetic to be done
7383 if it's all in the wrong mode to form part of an address.
7384 And force_operand won't know whether to sign-extend or
7385 zero-extend. */
7386 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7387 || mode != ptr_mode)
7388 goto binop;
7389
eb698c58
RS
7390 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7391 subtarget, &op0, &op1, modifier);
1717e19e
UW
7392
7393 /* Convert A - const to A + (-const). */
7394 if (GET_CODE (op1) == CONST_INT)
7395 {
7396 op1 = negate_rtx (mode, op1);
bc15d0ef 7397 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
1717e19e
UW
7398 }
7399
7400 goto binop2;
bbf6f052
RK
7401
7402 case MULT_EXPR:
bbf6f052
RK
7403 /* If first operand is constant, swap them.
7404 Thus the following special case checks need only
7405 check the second operand. */
7406 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7407 {
b3694847 7408 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7409 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7410 TREE_OPERAND (exp, 1) = t1;
7411 }
7412
7413 /* Attempt to return something suitable for generating an
7414 indexed address, for machines that support that. */
7415
88f63c77 7416 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7417 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7418 {
48a5f2fa
DJ
7419 tree exp1 = TREE_OPERAND (exp, 1);
7420
921b3427
RK
7421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7422 EXPAND_SUM);
bbf6f052 7423
f8cfc6aa 7424 if (!REG_P (op0))
906c4e36 7425 op0 = force_operand (op0, NULL_RTX);
f8cfc6aa 7426 if (!REG_P (op0))
bbf6f052
RK
7427 op0 = copy_to_mode_reg (mode, op0);
7428
bc15d0ef 7429 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
48a5f2fa 7430 gen_int_mode (tree_low_cst (exp1, 0),
bc15d0ef 7431 TYPE_MODE (TREE_TYPE (exp1)))));
bbf6f052
RK
7432 }
7433
8403445a
AM
7434 if (modifier == EXPAND_STACK_PARM)
7435 target = 0;
7436
bbf6f052
RK
7437 /* Check for multiplying things that have been extended
7438 from a narrower type. If this machine supports multiplying
7439 in that narrower type with a result in the desired type,
7440 do it that way, and avoid the explicit type-conversion. */
7441 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7442 && TREE_CODE (type) == INTEGER_TYPE
7443 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7444 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7445 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7446 && int_fits_type_p (TREE_OPERAND (exp, 1),
7447 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7448 /* Don't use a widening multiply if a shift will do. */
7449 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7450 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7451 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7452 ||
7453 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
7454 && (TYPE_PRECISION (TREE_TYPE
7455 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7456 == TYPE_PRECISION (TREE_TYPE
7457 (TREE_OPERAND
7458 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
7459 /* If both operands are extended, they must either both
7460 be zero-extended or both be sign-extended. */
8df83eae
RK
7461 && (TYPE_UNSIGNED (TREE_TYPE
7462 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7463 == TYPE_UNSIGNED (TREE_TYPE
7464 (TREE_OPERAND
7465 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 7466 {
888d65b5
RS
7467 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7468 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 7469 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
7470 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7471 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7472
b10af0c8 7473 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7474 {
b10af0c8
TG
7475 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7476 {
b10af0c8 7477 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7478 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7479 TREE_OPERAND (exp, 1),
7480 NULL_RTX, &op0, &op1, 0);
b10af0c8 7481 else
eb698c58
RS
7482 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7483 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7484 NULL_RTX, &op0, &op1, 0);
c4d70ce3 7485 goto binop3;
b10af0c8
TG
7486 }
7487 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7488 && innermode == word_mode)
7489 {
888d65b5 7490 rtx htem, hipart;
b10af0c8
TG
7491 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7492 NULL_RTX, VOIDmode, 0);
7493 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7494 op1 = convert_modes (innermode, mode,
7495 expand_expr (TREE_OPERAND (exp, 1),
7496 NULL_RTX, VOIDmode, 0),
7497 unsignedp);
b10af0c8
TG
7498 else
7499 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7500 NULL_RTX, VOIDmode, 0);
7501 temp = expand_binop (mode, other_optab, op0, op1, target,
7502 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
7503 hipart = gen_highpart (innermode, temp);
7504 htem = expand_mult_highpart_adjust (innermode, hipart,
7505 op0, op1, hipart,
7506 zextend_p);
7507 if (htem != hipart)
7508 emit_move_insn (hipart, htem);
bc15d0ef 7509 return REDUCE_BIT_FIELD (temp);
b10af0c8 7510 }
bbf6f052
RK
7511 }
7512 }
eb698c58
RS
7513 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7514 subtarget, &op0, &op1, 0);
bc15d0ef 7515 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
bbf6f052
RK
7516
7517 case TRUNC_DIV_EXPR:
7518 case FLOOR_DIV_EXPR:
7519 case CEIL_DIV_EXPR:
7520 case ROUND_DIV_EXPR:
7521 case EXACT_DIV_EXPR:
8403445a
AM
7522 if (modifier == EXPAND_STACK_PARM)
7523 target = 0;
bbf6f052
RK
7524 /* Possible optimization: compute the dividend with EXPAND_SUM
7525 then if the divisor is constant can optimize the case
7526 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7527 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7528 subtarget, &op0, &op1, 0);
bbf6f052
RK
7529 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7530
7531 case RDIV_EXPR:
b7e9703c
JH
7532 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7533 expensive divide. If not, combine will rebuild the original
7534 computation. */
7535 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 7536 && TREE_CODE (type) == REAL_TYPE
b7e9703c 7537 && !real_onep (TREE_OPERAND (exp, 0)))
3244e67d
RS
7538 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7539 build2 (RDIV_EXPR, type,
7540 build_real (type, dconst1),
7541 TREE_OPERAND (exp, 1))),
8e37cba8 7542 target, tmode, modifier);
c4d70ce3 7543
bbf6f052
RK
7544 goto binop;
7545
7546 case TRUNC_MOD_EXPR:
7547 case FLOOR_MOD_EXPR:
7548 case CEIL_MOD_EXPR:
7549 case ROUND_MOD_EXPR:
8403445a
AM
7550 if (modifier == EXPAND_STACK_PARM)
7551 target = 0;
eb698c58
RS
7552 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7553 subtarget, &op0, &op1, 0);
bbf6f052
RK
7554 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7555
7556 case FIX_ROUND_EXPR:
7557 case FIX_FLOOR_EXPR:
7558 case FIX_CEIL_EXPR:
7559 abort (); /* Not used for C. */
7560
7561 case FIX_TRUNC_EXPR:
906c4e36 7562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7563 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7564 target = gen_reg_rtx (mode);
7565 expand_fix (target, op0, unsignedp);
7566 return target;
7567
7568 case FLOAT_EXPR:
906c4e36 7569 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7570 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7571 target = gen_reg_rtx (mode);
7572 /* expand_float can't figure out what to do if FROM has VOIDmode.
7573 So give it the correct mode. With -O, cse will optimize this. */
7574 if (GET_MODE (op0) == VOIDmode)
7575 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7576 op0);
7577 expand_float (target, op0,
8df83eae 7578 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7579 return target;
7580
7581 case NEGATE_EXPR:
5b22bee8 7582 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7583 if (modifier == EXPAND_STACK_PARM)
7584 target = 0;
91ce572a 7585 temp = expand_unop (mode,
c4d70ce3
PB
7586 optab_for_tree_code (NEGATE_EXPR, type),
7587 op0, target, 0);
bbf6f052
RK
7588 if (temp == 0)
7589 abort ();
bc15d0ef 7590 return REDUCE_BIT_FIELD (temp);
bbf6f052
RK
7591
7592 case ABS_EXPR:
7593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7594 if (modifier == EXPAND_STACK_PARM)
7595 target = 0;
bbf6f052 7596
11017cc7 7597 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
7598 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7599 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 7600 abort ();
2d7050fd 7601
bbf6f052
RK
7602 /* Unsigned abs is simply the operand. Testing here means we don't
7603 risk generating incorrect code below. */
8df83eae 7604 if (TYPE_UNSIGNED (type))
bbf6f052
RK
7605 return op0;
7606
91ce572a 7607 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7608 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7609
7610 case MAX_EXPR:
7611 case MIN_EXPR:
7612 target = original_target;
8403445a
AM
7613 if (target == 0
7614 || modifier == EXPAND_STACK_PARM
3c0cb5de 7615 || (MEM_P (target) && MEM_VOLATILE_P (target))
d6a5ac33 7616 || GET_MODE (target) != mode
f8cfc6aa 7617 || (REG_P (target)
bbf6f052
RK
7618 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7619 target = gen_reg_rtx (mode);
eb698c58
RS
7620 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7621 target, &op0, &op1, 0);
bbf6f052
RK
7622
7623 /* First try to do it with a special MIN or MAX instruction.
7624 If that does not win, use a conditional jump to select the proper
7625 value. */
c4d70ce3 7626 this_optab = optab_for_tree_code (code, type);
bbf6f052
RK
7627 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7628 OPTAB_WIDEN);
7629 if (temp != 0)
7630 return temp;
7631
fa2981d8
JW
7632 /* At this point, a MEM target is no longer useful; we will get better
7633 code without it. */
3a94c984 7634
3c0cb5de 7635 if (MEM_P (target))
fa2981d8
JW
7636 target = gen_reg_rtx (mode);
7637
e3be1116
RS
7638 /* If op1 was placed in target, swap op0 and op1. */
7639 if (target != op0 && target == op1)
7640 {
7641 rtx tem = op0;
7642 op0 = op1;
7643 op1 = tem;
7644 }
7645
ee456b1c
RK
7646 if (target != op0)
7647 emit_move_insn (target, op0);
d6a5ac33 7648
bbf6f052 7649 op0 = gen_label_rtx ();
d6a5ac33 7650
f81497d9
RS
7651 /* If this mode is an integer too wide to compare properly,
7652 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7653 if (GET_MODE_CLASS (mode) == MODE_INT
7654 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7655 {
f81497d9 7656 if (code == MAX_EXPR)
288dc1ea
EB
7657 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7658 NULL_RTX, op0);
bbf6f052 7659 else
288dc1ea
EB
7660 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7661 NULL_RTX, op0);
bbf6f052 7662 }
f81497d9
RS
7663 else
7664 {
b30f05db 7665 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
288dc1ea 7666 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
f81497d9 7667 }
b30f05db 7668 emit_move_insn (target, op1);
bbf6f052
RK
7669 emit_label (op0);
7670 return target;
7671
bbf6f052
RK
7672 case BIT_NOT_EXPR:
7673 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7674 if (modifier == EXPAND_STACK_PARM)
7675 target = 0;
bbf6f052
RK
7676 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7677 if (temp == 0)
7678 abort ();
7679 return temp;
7680
d6a5ac33
RK
7681 /* ??? Can optimize bitwise operations with one arg constant.
7682 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7683 and (a bitwise1 b) bitwise2 b (etc)
7684 but that is probably not worth while. */
7685
7686 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7687 boolean values when we want in all cases to compute both of them. In
7688 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7689 as actual zero-or-1 values and then bitwise anding. In cases where
7690 there cannot be any side effects, better code would be made by
7691 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7692 how to recognize those cases. */
7693
bbf6f052 7694 case TRUTH_AND_EXPR:
c4d70ce3 7695 code = BIT_AND_EXPR;
bbf6f052 7696 case BIT_AND_EXPR:
bbf6f052
RK
7697 goto binop;
7698
bbf6f052 7699 case TRUTH_OR_EXPR:
c4d70ce3 7700 code = BIT_OR_EXPR;
bbf6f052 7701 case BIT_IOR_EXPR:
bbf6f052
RK
7702 goto binop;
7703
874726a8 7704 case TRUTH_XOR_EXPR:
c4d70ce3 7705 code = BIT_XOR_EXPR;
bbf6f052 7706 case BIT_XOR_EXPR:
bbf6f052
RK
7707 goto binop;
7708
7709 case LSHIFT_EXPR:
7710 case RSHIFT_EXPR:
7711 case LROTATE_EXPR:
7712 case RROTATE_EXPR:
e5e809f4 7713 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 7714 subtarget = 0;
8403445a
AM
7715 if (modifier == EXPAND_STACK_PARM)
7716 target = 0;
bbf6f052
RK
7717 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7718 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7719 unsignedp);
7720
d6a5ac33
RK
7721 /* Could determine the answer when only additive constants differ. Also,
7722 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7723 case LT_EXPR:
7724 case LE_EXPR:
7725 case GT_EXPR:
7726 case GE_EXPR:
7727 case EQ_EXPR:
7728 case NE_EXPR:
1eb8759b
RH
7729 case UNORDERED_EXPR:
7730 case ORDERED_EXPR:
7731 case UNLT_EXPR:
7732 case UNLE_EXPR:
7733 case UNGT_EXPR:
7734 case UNGE_EXPR:
7735 case UNEQ_EXPR:
d1a7edaf 7736 case LTGT_EXPR:
8403445a
AM
7737 temp = do_store_flag (exp,
7738 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7739 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
7740 if (temp != 0)
7741 return temp;
d6a5ac33 7742
0f41302f 7743 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7744 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7745 && original_target
f8cfc6aa 7746 && REG_P (original_target)
bbf6f052
RK
7747 && (GET_MODE (original_target)
7748 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7749 {
d6a5ac33
RK
7750 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7751 VOIDmode, 0);
7752
c0a3eeac
UW
7753 /* If temp is constant, we can just compute the result. */
7754 if (GET_CODE (temp) == CONST_INT)
7755 {
7756 if (INTVAL (temp) != 0)
7757 emit_move_insn (target, const1_rtx);
7758 else
7759 emit_move_insn (target, const0_rtx);
7760
7761 return target;
7762 }
7763
bbf6f052 7764 if (temp != original_target)
c0a3eeac
UW
7765 {
7766 enum machine_mode mode1 = GET_MODE (temp);
7767 if (mode1 == VOIDmode)
7768 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 7769
c0a3eeac
UW
7770 temp = copy_to_mode_reg (mode1, temp);
7771 }
d6a5ac33 7772
bbf6f052 7773 op1 = gen_label_rtx ();
c5d5d461 7774 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 7775 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
7776 emit_move_insn (temp, const1_rtx);
7777 emit_label (op1);
7778 return temp;
7779 }
d6a5ac33 7780
bbf6f052 7781 case TRUTH_NOT_EXPR:
8403445a
AM
7782 if (modifier == EXPAND_STACK_PARM)
7783 target = 0;
bbf6f052
RK
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7785 /* The parser is careful to generate TRUTH_NOT_EXPR
7786 only with operands that are always zero or one. */
906c4e36 7787 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7788 target, 1, OPTAB_LIB_WIDEN);
7789 if (temp == 0)
7790 abort ();
7791 return temp;
7792
6de9cd9a
DN
7793 case STATEMENT_LIST:
7794 {
7795 tree_stmt_iterator iter;
7796
7797 if (!ignore)
7798 abort ();
7799
7800 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7801 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7802 }
7803 return const0_rtx;
7804
bbf6f052 7805 case COND_EXPR:
6de9cd9a
DN
7806 /* If it's void, we don't need to worry about computing a value. */
7807 if (VOID_TYPE_P (TREE_TYPE (exp)))
7808 {
7809 tree pred = TREE_OPERAND (exp, 0);
e5bacf32
PB
7810 tree then_ = TREE_OPERAND (exp, 1);
7811 tree else_ = TREE_OPERAND (exp, 2);
7812
7813 if (TREE_CODE (then_) != GOTO_EXPR
7814 || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL
7815 || TREE_CODE (else_) != GOTO_EXPR
7816 || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL)
7817 abort ();
7818
7819 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7820 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7821 }
7822
7823 /* Note that COND_EXPRs whose type is a structure or union
7824 are required to be constructed to contain assignments of
7825 a temporary variable, so that we can evaluate them here
7826 for side effect only. If type is void, we must do likewise. */
7827
7828 if (TREE_ADDRESSABLE (type)
7829 || ignore
7830 || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node
7831 || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node)
7832 abort ();
7833
7834 /* If we are not to produce a result, we have no target. Otherwise,
7835 if a target was specified use it; it will not be used as an
7836 intermediate target unless it is safe. If no target, use a
7837 temporary. */
7838
7839 if (modifier != EXPAND_STACK_PARM
7840 && original_target
7841 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7842 && GET_MODE (original_target) == mode
7c00d1fe 7843#ifdef HAVE_conditional_move
e5bacf32
PB
7844 && (! can_conditionally_move_p (mode)
7845 || REG_P (original_target))
7c00d1fe 7846#endif
e5bacf32
PB
7847 && !MEM_P (original_target))
7848 temp = original_target;
7849 else
7850 temp = assign_temp (type, 0, 0, 1);
7851
7852 do_pending_stack_adjust ();
7853 NO_DEFER_POP;
7854 op0 = gen_label_rtx ();
7855 op1 = gen_label_rtx ();
7856 jumpifnot (TREE_OPERAND (exp, 0), op0);
7857 store_expr (TREE_OPERAND (exp, 1), temp,
7858 modifier == EXPAND_STACK_PARM ? 2 : 0);
7859
7860 emit_jump_insn (gen_jump (op1));
7861 emit_barrier ();
7862 emit_label (op0);
7863 store_expr (TREE_OPERAND (exp, 2), temp,
7864 modifier == EXPAND_STACK_PARM ? 2 : 0);
7865
7866 emit_label (op1);
7867 OK_DEFER_POP;
7868 return temp;
7869
bbf6f052
RK
7870 case MODIFY_EXPR:
7871 {
7872 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
7873 That's so we don't compute a pointer and save it over a
7874 call. If lhs is simple, compute it first so we can give it
7875 as a target if the rhs is just a call. This avoids an
7876 extra temp and copy and that prevents a partial-subsumption
7877 which makes bad code. Actually we could treat
7878 component_ref's of vars like vars. */
bbf6f052
RK
7879
7880 tree lhs = TREE_OPERAND (exp, 0);
7881 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
7882
7883 temp = 0;
7884
bbf6f052
RK
7885 /* Check for |= or &= of a bitfield of size one into another bitfield
7886 of size 1. In this case, (unless we need the result of the
7887 assignment) we can do this more efficiently with a
7888 test followed by an assignment, if necessary.
7889
7890 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7891 things change so we do, this code should be enhanced to
7892 support it. */
7893 if (ignore
7894 && TREE_CODE (lhs) == COMPONENT_REF
7895 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7896 || TREE_CODE (rhs) == BIT_AND_EXPR)
7897 && TREE_OPERAND (rhs, 0) == lhs
7898 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
7899 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7900 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
7901 {
7902 rtx label = gen_label_rtx ();
7903
7904 do_jump (TREE_OPERAND (rhs, 1),
7905 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7906 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7907 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7908 (TREE_CODE (rhs) == BIT_IOR_EXPR
7909 ? integer_one_node
7910 : integer_zero_node)),
b90f141a 7911 0);
e7c33f54 7912 do_pending_stack_adjust ();
bbf6f052
RK
7913 emit_label (label);
7914 return const0_rtx;
7915 }
7916
b90f141a 7917 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 7918
bbf6f052
RK
7919 return temp;
7920 }
7921
6e7f84a7
APB
7922 case RETURN_EXPR:
7923 if (!TREE_OPERAND (exp, 0))
7924 expand_null_return ();
7925 else
7926 expand_return (TREE_OPERAND (exp, 0));
7927 return const0_rtx;
7928
bbf6f052 7929 case ADDR_EXPR:
8403445a
AM
7930 if (modifier == EXPAND_STACK_PARM)
7931 target = 0;
682ba3a6
RK
7932 /* If we are taking the address of something erroneous, just
7933 return a zero. */
6de9cd9a 7934 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
682ba3a6 7935 return const0_rtx;
d6b6783b
RK
7936 /* If we are taking the address of a constant and are at the
7937 top level, we have to use output_constant_def since we can't
7938 call force_const_mem at top level. */
7939 else if (cfun == 0
7940 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7941 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
7942 == 'c')))
7943 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
7944 else
7945 {
e287fd6e
RK
7946 /* We make sure to pass const0_rtx down if we came in with
7947 ignore set, to avoid doing the cleanups twice for something. */
7948 op0 = expand_expr (TREE_OPERAND (exp, 0),
7949 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7950 (modifier == EXPAND_INITIALIZER
7951 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7952
119af78a
RK
7953 /* If we are going to ignore the result, OP0 will have been set
7954 to const0_rtx, so just return it. Don't get confused and
7955 think we are taking the address of the constant. */
7956 if (ignore)
7957 return op0;
7958
c5c76735
JL
7959 /* We would like the object in memory. If it is a constant, we can
7960 have it be statically allocated into memory. For a non-constant,
7961 we need to allocate some memory and store the value into it. */
896102d0
RK
7962
7963 if (CONSTANT_P (op0))
7964 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7965 op0);
f8cfc6aa 7966 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8fff4fc1
RH
7967 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
7968 || GET_CODE (op0) == LO_SUM)
896102d0 7969 {
82c82743
RH
7970 /* If this object is in a register, it can't be BLKmode. */
7971 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7972 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7973
7974 if (GET_CODE (op0) == PARALLEL)
7975 /* Handle calls that pass values in multiple
7976 non-contiguous locations. The Irix 6 ABI has examples
7977 of this. */
7978 emit_group_store (memloc, op0, inner_type,
7979 int_size_in_bytes (inner_type));
df6018fd 7980 else
82c82743 7981 emit_move_insn (memloc, op0);
0fb7aeda 7982
82c82743 7983 op0 = memloc;
896102d0
RK
7984 }
7985
3c0cb5de 7986 if (!MEM_P (op0))
bbf6f052 7987 abort ();
3a94c984 7988
34e81b5a 7989 mark_temp_addr_taken (op0);
bbf6f052 7990 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 7991 {
34e81b5a 7992 op0 = XEXP (op0, 0);
5ae6cd0d 7993 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 7994 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 7995 return op0;
88f63c77 7996 }
987c71d9 7997
c952ff4b
RK
7998 /* If OP0 is not aligned as least as much as the type requires, we
7999 need to make a temporary, copy OP0 to it, and take the address of
8000 the temporary. We want to use the alignment of the type, not of
8001 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8002 the test for BLKmode means that can't happen. The test for
8003 BLKmode is because we never make mis-aligned MEMs with
8004 non-BLKmode.
8005
8006 We don't need to do this at all if the machine doesn't have
8007 strict alignment. */
8008 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8009 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8010 > MEM_ALIGN (op0))
8011 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8012 {
8013 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 8014 rtx new;
a06ef755 8015
c3d32120
RK
8016 if (TYPE_ALIGN_OK (inner_type))
8017 abort ();
8018
bdaa131b
JM
8019 if (TREE_ADDRESSABLE (inner_type))
8020 {
8021 /* We can't make a bitwise copy of this object, so fail. */
8022 error ("cannot take the address of an unaligned member");
8023 return const0_rtx;
8024 }
8025
8026 new = assign_stack_temp_for_type
8027 (TYPE_MODE (inner_type),
8028 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8029 : int_size_in_bytes (inner_type),
8030 1, build_qualified_type (inner_type,
8031 (TYPE_QUALS (inner_type)
8032 | TYPE_QUAL_CONST)));
8033
44bb111a 8034 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
8035 (modifier == EXPAND_STACK_PARM
8036 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 8037
a06ef755
RK
8038 op0 = new;
8039 }
8040
bbf6f052
RK
8041 op0 = force_operand (XEXP (op0, 0), target);
8042 }
987c71d9 8043
05c8e58b 8044 if (flag_force_addr
f8cfc6aa 8045 && !REG_P (op0)
05c8e58b
HPN
8046 && modifier != EXPAND_CONST_ADDRESS
8047 && modifier != EXPAND_INITIALIZER
8048 && modifier != EXPAND_SUM)
987c71d9
RK
8049 op0 = force_reg (Pmode, op0);
8050
f8cfc6aa 8051 if (REG_P (op0)
dc6d66b3 8052 && ! REG_USERVAR_P (op0))
bdb429a5 8053 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8054
5ae6cd0d 8055 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 8056 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8057
bbf6f052
RK
8058 return op0;
8059
7308a047
RS
8060 /* COMPLEX type for Extended Pascal & Fortran */
8061 case COMPLEX_EXPR:
8062 {
8063 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8064 rtx insns;
7308a047
RS
8065
8066 /* Get the rtx code of the operands. */
8067 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8068 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8069
8070 if (! target)
8071 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8072
6551fa4d 8073 start_sequence ();
7308a047
RS
8074
8075 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8076 emit_move_insn (gen_realpart (mode, target), op0);
8077 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8078
6551fa4d
JW
8079 insns = get_insns ();
8080 end_sequence ();
8081
7308a047 8082 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8083 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8084 each with a separate pseudo as destination.
8085 It's not correct for flow to treat them as a unit. */
6d6e61ce 8086 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8087 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8088 else
2f937369 8089 emit_insn (insns);
7308a047
RS
8090
8091 return target;
8092 }
8093
8094 case REALPART_EXPR:
2d7050fd
RS
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8096 return gen_realpart (mode, op0);
3a94c984 8097
7308a047 8098 case IMAGPART_EXPR:
2d7050fd
RS
8099 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8100 return gen_imagpart (mode, op0);
7308a047 8101
6de9cd9a
DN
8102 case RESX_EXPR:
8103 expand_resx_expr (exp);
8104 return const0_rtx;
8105
e976b8b2 8106 case TRY_CATCH_EXPR:
6de9cd9a 8107 case CATCH_EXPR:
6de9cd9a 8108 case EH_FILTER_EXPR:
b335b813 8109 case TRY_FINALLY_EXPR:
ac45df5d
RH
8110 /* Lowered by tree-eh.c. */
8111 abort ();
b335b813 8112
ac45df5d
RH
8113 case WITH_CLEANUP_EXPR:
8114 case CLEANUP_POINT_EXPR:
8115 case TARGET_EXPR:
165b54c3 8116 case CASE_LABEL_EXPR:
77c9db77 8117 case VA_ARG_EXPR:
caf93cb0 8118 case BIND_EXPR:
e5bacf32
PB
8119 case INIT_EXPR:
8120 case CONJ_EXPR:
8121 case COMPOUND_EXPR:
8122 case PREINCREMENT_EXPR:
8123 case PREDECREMENT_EXPR:
8124 case POSTINCREMENT_EXPR:
8125 case POSTDECREMENT_EXPR:
8126 case LOOP_EXPR:
8127 case EXIT_EXPR:
8128 case LABELED_BLOCK_EXPR:
8129 case EXIT_BLOCK_EXPR:
8130 case TRUTH_ANDIF_EXPR:
8131 case TRUTH_ORIF_EXPR:
ac45df5d
RH
8132 /* Lowered by gimplify.c. */
8133 abort ();
b335b813 8134
52a11cbf 8135 case EXC_PTR_EXPR:
86c99549 8136 return get_exception_pointer (cfun);
52a11cbf 8137
6de9cd9a
DN
8138 case FILTER_EXPR:
8139 return get_exception_filter (cfun);
8140
67231816
RH
8141 case FDESC_EXPR:
8142 /* Function descriptors are not valid except for as
8143 initialization constants, and should not be expanded. */
8144 abort ();
8145
6de9cd9a 8146 case SWITCH_EXPR:
165b54c3
SB
8147 expand_start_case (SWITCH_COND (exp));
8148 /* The switch body is lowered in gimplify.c, we should never have
8149 switches with a non-NULL SWITCH_BODY here. */
6de9cd9a 8150 if (SWITCH_BODY (exp))
165b54c3 8151 abort ();
6de9cd9a
DN
8152 if (SWITCH_LABELS (exp))
8153 {
6de9cd9a 8154 tree vec = SWITCH_LABELS (exp);
a6c0a76c 8155 size_t i = TREE_VEC_LENGTH (vec);
6de9cd9a 8156
a6c0a76c 8157 do
6de9cd9a 8158 {
a6c0a76c
SB
8159 tree elt = TREE_VEC_ELT (vec, --i);
8160 add_case_node (CASE_LOW (elt), CASE_HIGH (elt),
8161 CASE_LABEL (elt));
6de9cd9a 8162 }
a6c0a76c 8163 while (i);
6de9cd9a 8164 }
a6c0a76c
SB
8165 else
8166 abort ();
6de9cd9a
DN
8167 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8168 return const0_rtx;
8169
8170 case LABEL_EXPR:
8171 expand_label (TREE_OPERAND (exp, 0));
8172 return const0_rtx;
8173
6de9cd9a
DN
8174 case ASM_EXPR:
8175 expand_asm_expr (exp);
8176 return const0_rtx;
8177
d25cee4d
RH
8178 case WITH_SIZE_EXPR:
8179 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8180 have pulled out the size to use in whatever context it needed. */
8181 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8182 modifier, alt_rtl);
8183
bbf6f052 8184 default:
673fda6b
SB
8185 return lang_hooks.expand_expr (exp, original_target, tmode,
8186 modifier, alt_rtl);
bbf6f052
RK
8187 }
8188
c4d70ce3 8189 /* Here to do an ordinary binary operator. */
bbf6f052 8190 binop:
eb698c58
RS
8191 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8192 subtarget, &op0, &op1, 0);
bbf6f052 8193 binop2:
c4d70ce3
PB
8194 this_optab = optab_for_tree_code (code, type);
8195 binop3:
8403445a
AM
8196 if (modifier == EXPAND_STACK_PARM)
8197 target = 0;
bbf6f052
RK
8198 temp = expand_binop (mode, this_optab, op0, op1, target,
8199 unsignedp, OPTAB_LIB_WIDEN);
8200 if (temp == 0)
8201 abort ();
bc15d0ef
JM
8202 return REDUCE_BIT_FIELD (temp);
8203}
8204#undef REDUCE_BIT_FIELD
8205\f
8206/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8207 signedness of TYPE), possibly returning the result in TARGET. */
8208static rtx
8209reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8210{
8211 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8212 if (target && GET_MODE (target) != GET_MODE (exp))
8213 target = 0;
8214 if (TYPE_UNSIGNED (type))
8215 {
8216 rtx mask;
8217 if (prec < HOST_BITS_PER_WIDE_INT)
8218 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8219 GET_MODE (exp));
8220 else
8221 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8222 ((unsigned HOST_WIDE_INT) 1
8223 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8224 GET_MODE (exp));
8225 return expand_and (GET_MODE (exp), exp, mask, target);
8226 }
8227 else
8228 {
8229 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8230 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8231 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8232 }
bbf6f052 8233}
b93a436e 8234\f
1ce7f3c2
RK
8235/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8236 when applied to the address of EXP produces an address known to be
8237 aligned more than BIGGEST_ALIGNMENT. */
8238
8239static int
502b8322 8240is_aligning_offset (tree offset, tree exp)
1ce7f3c2 8241{
6fce44af 8242 /* Strip off any conversions. */
1ce7f3c2
RK
8243 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8244 || TREE_CODE (offset) == NOP_EXPR
6fce44af 8245 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
8246 offset = TREE_OPERAND (offset, 0);
8247
8248 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8249 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8250 if (TREE_CODE (offset) != BIT_AND_EXPR
8251 || !host_integerp (TREE_OPERAND (offset, 1), 1)
caf93cb0 8252 || compare_tree_int (TREE_OPERAND (offset, 1),
c0cfc691 8253 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
8254 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8255 return 0;
8256
8257 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8258 It must be NEGATE_EXPR. Then strip any more conversions. */
8259 offset = TREE_OPERAND (offset, 0);
8260 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8261 || TREE_CODE (offset) == NOP_EXPR
8262 || TREE_CODE (offset) == CONVERT_EXPR)
8263 offset = TREE_OPERAND (offset, 0);
8264
8265 if (TREE_CODE (offset) != NEGATE_EXPR)
8266 return 0;
8267
8268 offset = TREE_OPERAND (offset, 0);
8269 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8270 || TREE_CODE (offset) == NOP_EXPR
8271 || TREE_CODE (offset) == CONVERT_EXPR)
8272 offset = TREE_OPERAND (offset, 0);
8273
6fce44af
RK
8274 /* This must now be the address of EXP. */
8275 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
8276}
8277\f
e0a2f705 8278/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 8279 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
8280 in bytes within the string that ARG is accessing. The type of the
8281 offset will be `sizetype'. */
b93a436e 8282
28f4ec01 8283tree
502b8322 8284string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
8285{
8286 STRIP_NOPS (arg);
8287
8288 if (TREE_CODE (arg) == ADDR_EXPR
8289 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8290 {
fed3cef0 8291 *ptr_offset = size_zero_node;
b93a436e
JL
8292 return TREE_OPERAND (arg, 0);
8293 }
6de9cd9a
DN
8294 if (TREE_CODE (arg) == ADDR_EXPR
8295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8296 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8297 {
8298 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8299 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8300 }
b93a436e
JL
8301 else if (TREE_CODE (arg) == PLUS_EXPR)
8302 {
8303 tree arg0 = TREE_OPERAND (arg, 0);
8304 tree arg1 = TREE_OPERAND (arg, 1);
8305
8306 STRIP_NOPS (arg0);
8307 STRIP_NOPS (arg1);
8308
8309 if (TREE_CODE (arg0) == ADDR_EXPR
8310 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8311 {
fed3cef0 8312 *ptr_offset = convert (sizetype, arg1);
b93a436e 8313 return TREE_OPERAND (arg0, 0);
bbf6f052 8314 }
b93a436e
JL
8315 else if (TREE_CODE (arg1) == ADDR_EXPR
8316 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8317 {
fed3cef0 8318 *ptr_offset = convert (sizetype, arg0);
b93a436e 8319 return TREE_OPERAND (arg1, 0);
bbf6f052 8320 }
b93a436e 8321 }
ca695ac9 8322
b93a436e
JL
8323 return 0;
8324}
ca695ac9 8325\f
b93a436e
JL
8326/* Generate code to calculate EXP using a store-flag instruction
8327 and return an rtx for the result. EXP is either a comparison
8328 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 8329
b93a436e 8330 If TARGET is nonzero, store the result there if convenient.
ca695ac9 8331
cc2902df 8332 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 8333 cheap.
ca695ac9 8334
b93a436e
JL
8335 Return zero if there is no suitable set-flag instruction
8336 available on this machine.
ca695ac9 8337
b93a436e
JL
8338 Once expand_expr has been called on the arguments of the comparison,
8339 we are committed to doing the store flag, since it is not safe to
8340 re-evaluate the expression. We emit the store-flag insn by calling
8341 emit_store_flag, but only expand the arguments if we have a reason
8342 to believe that emit_store_flag will be successful. If we think that
8343 it will, but it isn't, we have to simulate the store-flag with a
8344 set/jump/set sequence. */
ca695ac9 8345
b93a436e 8346static rtx
502b8322 8347do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
8348{
8349 enum rtx_code code;
8350 tree arg0, arg1, type;
8351 tree tem;
8352 enum machine_mode operand_mode;
8353 int invert = 0;
8354 int unsignedp;
8355 rtx op0, op1;
8356 enum insn_code icode;
8357 rtx subtarget = target;
381127e8 8358 rtx result, label;
ca695ac9 8359
b93a436e
JL
8360 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8361 result at the end. We can't simply invert the test since it would
8362 have already been inverted if it were valid. This case occurs for
8363 some floating-point comparisons. */
ca695ac9 8364
b93a436e
JL
8365 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8366 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 8367
b93a436e
JL
8368 arg0 = TREE_OPERAND (exp, 0);
8369 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
8370
8371 /* Don't crash if the comparison was erroneous. */
8372 if (arg0 == error_mark_node || arg1 == error_mark_node)
8373 return const0_rtx;
8374
b93a436e
JL
8375 type = TREE_TYPE (arg0);
8376 operand_mode = TYPE_MODE (type);
8df83eae 8377 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 8378
b93a436e
JL
8379 /* We won't bother with BLKmode store-flag operations because it would mean
8380 passing a lot of information to emit_store_flag. */
8381 if (operand_mode == BLKmode)
8382 return 0;
ca695ac9 8383
b93a436e
JL
8384 /* We won't bother with store-flag operations involving function pointers
8385 when function pointers must be canonicalized before comparisons. */
8386#ifdef HAVE_canonicalize_funcptr_for_compare
8387 if (HAVE_canonicalize_funcptr_for_compare
8388 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8389 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8390 == FUNCTION_TYPE))
8391 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8392 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8393 == FUNCTION_TYPE))))
8394 return 0;
ca695ac9
JB
8395#endif
8396
b93a436e
JL
8397 STRIP_NOPS (arg0);
8398 STRIP_NOPS (arg1);
ca695ac9 8399
b93a436e
JL
8400 /* Get the rtx comparison code to use. We know that EXP is a comparison
8401 operation of some type. Some comparisons against 1 and -1 can be
8402 converted to comparisons with zero. Do so here so that the tests
8403 below will be aware that we have a comparison with zero. These
8404 tests will not catch constants in the first operand, but constants
8405 are rarely passed as the first operand. */
ca695ac9 8406
b93a436e
JL
8407 switch (TREE_CODE (exp))
8408 {
8409 case EQ_EXPR:
8410 code = EQ;
bbf6f052 8411 break;
b93a436e
JL
8412 case NE_EXPR:
8413 code = NE;
bbf6f052 8414 break;
b93a436e
JL
8415 case LT_EXPR:
8416 if (integer_onep (arg1))
8417 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8418 else
8419 code = unsignedp ? LTU : LT;
ca695ac9 8420 break;
b93a436e
JL
8421 case LE_EXPR:
8422 if (! unsignedp && integer_all_onesp (arg1))
8423 arg1 = integer_zero_node, code = LT;
8424 else
8425 code = unsignedp ? LEU : LE;
ca695ac9 8426 break;
b93a436e
JL
8427 case GT_EXPR:
8428 if (! unsignedp && integer_all_onesp (arg1))
8429 arg1 = integer_zero_node, code = GE;
8430 else
8431 code = unsignedp ? GTU : GT;
8432 break;
8433 case GE_EXPR:
8434 if (integer_onep (arg1))
8435 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8436 else
8437 code = unsignedp ? GEU : GE;
ca695ac9 8438 break;
1eb8759b
RH
8439
8440 case UNORDERED_EXPR:
8441 code = UNORDERED;
8442 break;
8443 case ORDERED_EXPR:
8444 code = ORDERED;
8445 break;
8446 case UNLT_EXPR:
8447 code = UNLT;
8448 break;
8449 case UNLE_EXPR:
8450 code = UNLE;
8451 break;
8452 case UNGT_EXPR:
8453 code = UNGT;
8454 break;
8455 case UNGE_EXPR:
8456 code = UNGE;
8457 break;
8458 case UNEQ_EXPR:
8459 code = UNEQ;
8460 break;
d1a7edaf
PB
8461 case LTGT_EXPR:
8462 code = LTGT;
8463 break;
1eb8759b 8464
ca695ac9 8465 default:
b93a436e 8466 abort ();
bbf6f052 8467 }
bbf6f052 8468
b93a436e
JL
8469 /* Put a constant second. */
8470 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8471 {
8472 tem = arg0; arg0 = arg1; arg1 = tem;
8473 code = swap_condition (code);
ca695ac9 8474 }
bbf6f052 8475
b93a436e
JL
8476 /* If this is an equality or inequality test of a single bit, we can
8477 do this by shifting the bit being tested to the low-order bit and
8478 masking the result with the constant 1. If the condition was EQ,
8479 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
8480 than an scc insn even if we have it.
8481
8482 The code to make this transformation was moved into fold_single_bit_test,
8483 so we just call into the folder and expand its result. */
d39985fa 8484
b93a436e
JL
8485 if ((code == NE || code == EQ)
8486 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8487 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 8488 {
ae2bcd98 8489 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 8490 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 8491 arg0, arg1, type),
60cd4dae
JL
8492 target, VOIDmode, EXPAND_NORMAL);
8493 }
bbf6f052 8494
b93a436e 8495 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 8496 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 8497 return 0;
1eb8759b 8498
b93a436e
JL
8499 icode = setcc_gen_code[(int) code];
8500 if (icode == CODE_FOR_nothing
a995e389 8501 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 8502 {
b93a436e
JL
8503 /* We can only do this if it is one of the special cases that
8504 can be handled without an scc insn. */
8505 if ((code == LT && integer_zerop (arg1))
8506 || (! only_cheap && code == GE && integer_zerop (arg1)))
8507 ;
8508 else if (BRANCH_COST >= 0
8509 && ! only_cheap && (code == NE || code == EQ)
8510 && TREE_CODE (type) != REAL_TYPE
8511 && ((abs_optab->handlers[(int) operand_mode].insn_code
8512 != CODE_FOR_nothing)
8513 || (ffs_optab->handlers[(int) operand_mode].insn_code
8514 != CODE_FOR_nothing)))
8515 ;
8516 else
8517 return 0;
ca695ac9 8518 }
3a94c984 8519
296b4ed9 8520 if (! get_subtarget (target)
e3be1116 8521 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
8522 subtarget = 0;
8523
eb698c58 8524 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
8525
8526 if (target == 0)
8527 target = gen_reg_rtx (mode);
8528
ad76cef8 8529 result = emit_store_flag (target, code, op0, op1,
b93a436e 8530 operand_mode, unsignedp, 1);
ca695ac9 8531
b93a436e
JL
8532 if (result)
8533 {
8534 if (invert)
8535 result = expand_binop (mode, xor_optab, result, const1_rtx,
8536 result, 0, OPTAB_LIB_WIDEN);
8537 return result;
ca695ac9 8538 }
bbf6f052 8539
b93a436e 8540 /* If this failed, we have to do this with set/compare/jump/set code. */
f8cfc6aa 8541 if (!REG_P (target)
b93a436e
JL
8542 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8543 target = gen_reg_rtx (GET_MODE (target));
8544
8545 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8546 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 8547 operand_mode, NULL_RTX);
b93a436e
JL
8548 if (GET_CODE (result) == CONST_INT)
8549 return (((result == const0_rtx && ! invert)
8550 || (result != const0_rtx && invert))
8551 ? const0_rtx : const1_rtx);
ca695ac9 8552
8f08e8c0
JL
8553 /* The code of RESULT may not match CODE if compare_from_rtx
8554 decided to swap its operands and reverse the original code.
8555
8556 We know that compare_from_rtx returns either a CONST_INT or
8557 a new comparison code, so it is safe to just extract the
8558 code from RESULT. */
8559 code = GET_CODE (result);
8560
b93a436e
JL
8561 label = gen_label_rtx ();
8562 if (bcc_gen_fctn[(int) code] == 0)
8563 abort ();
0f41302f 8564
b93a436e
JL
8565 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8566 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8567 emit_label (label);
bbf6f052 8568
b93a436e 8569 return target;
ca695ac9 8570}
b93a436e 8571\f
b93a436e 8572
ad82abb8
ZW
8573/* Stubs in case we haven't got a casesi insn. */
8574#ifndef HAVE_casesi
8575# define HAVE_casesi 0
8576# define gen_casesi(a, b, c, d, e) (0)
8577# define CODE_FOR_casesi CODE_FOR_nothing
8578#endif
8579
8580/* If the machine does not have a case insn that compares the bounds,
8581 this means extra overhead for dispatch tables, which raises the
8582 threshold for using them. */
8583#ifndef CASE_VALUES_THRESHOLD
8584#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8585#endif /* CASE_VALUES_THRESHOLD */
8586
8587unsigned int
502b8322 8588case_values_threshold (void)
ad82abb8
ZW
8589{
8590 return CASE_VALUES_THRESHOLD;
8591}
8592
8593/* Attempt to generate a casesi instruction. Returns 1 if successful,
8594 0 otherwise (i.e. if there is no casesi instruction). */
8595int
502b8322
AJ
8596try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8597 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
8598{
8599 enum machine_mode index_mode = SImode;
8600 int index_bits = GET_MODE_BITSIZE (index_mode);
8601 rtx op1, op2, index;
8602 enum machine_mode op_mode;
8603
8604 if (! HAVE_casesi)
8605 return 0;
8606
8607 /* Convert the index to SImode. */
8608 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8609 {
8610 enum machine_mode omode = TYPE_MODE (index_type);
8611 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8612
8613 /* We must handle the endpoints in the original mode. */
3244e67d
RS
8614 index_expr = build2 (MINUS_EXPR, index_type,
8615 index_expr, minval);
ad82abb8
ZW
8616 minval = integer_zero_node;
8617 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8618 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 8619 omode, 1, default_label);
ad82abb8
ZW
8620 /* Now we can safely truncate. */
8621 index = convert_to_mode (index_mode, index, 0);
8622 }
8623 else
8624 {
8625 if (TYPE_MODE (index_type) != index_mode)
8626 {
ae2bcd98 8627 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 8628 (index_bits, 0), index_expr);
ad82abb8
ZW
8629 index_type = TREE_TYPE (index_expr);
8630 }
8631
8632 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8633 }
ad76cef8 8634
ad82abb8
ZW
8635 do_pending_stack_adjust ();
8636
8637 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8638 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8639 (index, op_mode))
8640 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 8641
ad82abb8
ZW
8642 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8643
8644 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8645 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 8646 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
8647 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8648 (op1, op_mode))
8649 op1 = copy_to_mode_reg (op_mode, op1);
8650
8651 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8652
8653 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8654 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 8655 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
8656 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8657 (op2, op_mode))
8658 op2 = copy_to_mode_reg (op_mode, op2);
8659
8660 emit_jump_insn (gen_casesi (index, op1, op2,
8661 table_label, default_label));
8662 return 1;
8663}
8664
8665/* Attempt to generate a tablejump instruction; same concept. */
8666#ifndef HAVE_tablejump
8667#define HAVE_tablejump 0
8668#define gen_tablejump(x, y) (0)
8669#endif
8670
8671/* Subroutine of the next function.
8672
8673 INDEX is the value being switched on, with the lowest value
b93a436e
JL
8674 in the table already subtracted.
8675 MODE is its expected mode (needed if INDEX is constant).
8676 RANGE is the length of the jump table.
8677 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 8678
b93a436e
JL
8679 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8680 index value is out of range. */
0f41302f 8681
ad82abb8 8682static void
502b8322
AJ
8683do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8684 rtx default_label)
ca695ac9 8685{
b3694847 8686 rtx temp, vector;
88d3b7f0 8687
74f6d071
JH
8688 if (INTVAL (range) > cfun->max_jumptable_ents)
8689 cfun->max_jumptable_ents = INTVAL (range);
1877be45 8690
b93a436e
JL
8691 /* Do an unsigned comparison (in the proper mode) between the index
8692 expression and the value which represents the length of the range.
8693 Since we just finished subtracting the lower bound of the range
8694 from the index expression, this comparison allows us to simultaneously
8695 check that the original index expression value is both greater than
8696 or equal to the minimum value of the range and less than or equal to
8697 the maximum value of the range. */
709f5be1 8698
c5d5d461 8699 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 8700 default_label);
bbf6f052 8701
b93a436e
JL
8702 /* If index is in range, it must fit in Pmode.
8703 Convert to Pmode so we can index with it. */
8704 if (mode != Pmode)
8705 index = convert_to_mode (Pmode, index, 1);
bbf6f052 8706
ba228239 8707 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
8708 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8709 and break_out_memory_refs will go to work on it and mess it up. */
8710#ifdef PIC_CASE_VECTOR_ADDRESS
f8cfc6aa 8711 if (flag_pic && !REG_P (index))
b93a436e
JL
8712 index = copy_to_mode_reg (Pmode, index);
8713#endif
ca695ac9 8714
b93a436e
JL
8715 /* If flag_force_addr were to affect this address
8716 it could interfere with the tricky assumptions made
8717 about addresses that contain label-refs,
8718 which may be valid only very near the tablejump itself. */
8719 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8720 GET_MODE_SIZE, because this indicates how large insns are. The other
8721 uses should all be Pmode, because they are addresses. This code
8722 could fail if addresses and insns are not the same size. */
8723 index = gen_rtx_PLUS (Pmode,
8724 gen_rtx_MULT (Pmode, index,
8725 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8726 gen_rtx_LABEL_REF (Pmode, table_label));
8727#ifdef PIC_CASE_VECTOR_ADDRESS
8728 if (flag_pic)
8729 index = PIC_CASE_VECTOR_ADDRESS (index);
8730 else
bbf6f052 8731#endif
b93a436e
JL
8732 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8733 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8734 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
8735 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 8736 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
8737 convert_move (temp, vector, 0);
8738
8739 emit_jump_insn (gen_tablejump (temp, table_label));
8740
8741 /* If we are generating PIC code or if the table is PC-relative, the
8742 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8743 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8744 emit_barrier ();
bbf6f052 8745}
b93a436e 8746
ad82abb8 8747int
502b8322
AJ
8748try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8749 rtx table_label, rtx default_label)
ad82abb8
ZW
8750{
8751 rtx index;
8752
8753 if (! HAVE_tablejump)
8754 return 0;
8755
3244e67d
RS
8756 index_expr = fold (build2 (MINUS_EXPR, index_type,
8757 convert (index_type, index_expr),
8758 convert (index_type, minval)));
ad82abb8 8759 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
ad82abb8
ZW
8760 do_pending_stack_adjust ();
8761
8762 do_tablejump (index, TYPE_MODE (index_type),
8763 convert_modes (TYPE_MODE (index_type),
8764 TYPE_MODE (TREE_TYPE (range)),
8765 expand_expr (range, NULL_RTX,
8766 VOIDmode, 0),
8df83eae 8767 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
8768 table_label, default_label);
8769 return 1;
8770}
e2500fed 8771
cb2a532e
AH
8772/* Nonzero if the mode is a valid vector mode for this architecture.
8773 This returns nonzero even if there is no hardware support for the
8774 vector mode, but we can emulate with narrower modes. */
8775
8776int
502b8322 8777vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
8778{
8779 enum mode_class class = GET_MODE_CLASS (mode);
8780 enum machine_mode innermode;
8781
8782 /* Doh! What's going on? */
8783 if (class != MODE_VECTOR_INT
8784 && class != MODE_VECTOR_FLOAT)
8785 return 0;
8786
8787 /* Hardware support. Woo hoo! */
8788 if (VECTOR_MODE_SUPPORTED_P (mode))
8789 return 1;
8790
8791 innermode = GET_MODE_INNER (mode);
8792
8793 /* We should probably return 1 if requesting V4DI and we have no DI,
8794 but we have V2DI, but this is probably very unlikely. */
8795
8796 /* If we have support for the inner mode, we can safely emulate it.
8797 We may not have V2DI, but me can emulate with a pair of DIs. */
8798 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
8799}
8800
d744e06e
AH
8801/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8802static rtx
502b8322 8803const_vector_from_tree (tree exp)
d744e06e
AH
8804{
8805 rtvec v;
8806 int units, i;
8807 tree link, elt;
8808 enum machine_mode inner, mode;
8809
8810 mode = TYPE_MODE (TREE_TYPE (exp));
8811
6de9cd9a 8812 if (initializer_zerop (exp))
d744e06e
AH
8813 return CONST0_RTX (mode);
8814
8815 units = GET_MODE_NUNITS (mode);
8816 inner = GET_MODE_INNER (mode);
8817
8818 v = rtvec_alloc (units);
8819
8820 link = TREE_VECTOR_CST_ELTS (exp);
8821 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8822 {
8823 elt = TREE_VALUE (link);
8824
8825 if (TREE_CODE (elt) == REAL_CST)
8826 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8827 inner);
8828 else
8829 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8830 TREE_INT_CST_HIGH (elt),
8831 inner);
8832 }
8833
5f6c070d
AH
8834 /* Initialize remaining elements to 0. */
8835 for (; i < units; ++i)
8836 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8837
d744e06e
AH
8838 return gen_rtx_raw_CONST_VECTOR (mode, v);
8839}
e2500fed 8840#include "gt-expr.h"
This page took 4.573408 seconds and 5 git commands to generate.