]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Makefile.in (cfghooks.o): Add TIMEVAR_H and toplev.h dependency.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
c988af2b 50#include "target.h"
bbf6f052 51
bbf6f052 52/* Decide whether a function's arguments should be processed
bbc8a071
RK
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
bbf6f052 57
bbf6f052 58#ifdef PUSH_ROUNDING
bbc8a071 59
2da4124d 60#ifndef PUSH_ARGS_REVERSED
3319a347 61#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 62#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 63#endif
2da4124d 64#endif
bbc8a071 65
bbf6f052
RK
66#endif
67
68#ifndef STACK_PUSH_CODE
69#ifdef STACK_GROWS_DOWNWARD
70#define STACK_PUSH_CODE PRE_DEC
71#else
72#define STACK_PUSH_CODE PRE_INC
73#endif
74#endif
75
4ca79136
RH
76/* Convert defined/undefined to boolean. */
77#ifdef TARGET_MEM_FUNCTIONS
78#undef TARGET_MEM_FUNCTIONS
79#define TARGET_MEM_FUNCTIONS 1
80#else
81#define TARGET_MEM_FUNCTIONS 0
82#endif
83
84
bbf6f052
RK
85/* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91int cse_not_expected;
92
14a774a9 93/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
1cff8964 94tree placeholder_list = 0;
14a774a9 95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
4969d05d
RK
98struct move_by_pieces
99{
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
3bdf5ad1
RK
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
4969d05d
RK
110 int reverse;
111};
112
57814e5e 113/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
114 be performed. */
115
57814e5e 116struct store_by_pieces
9de08200
RK
117{
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
3bdf5ad1
RK
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
502b8322 124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 125 void *constfundata;
9de08200
RK
126 int reverse;
127};
128
502b8322
AJ
129static rtx enqueue_insn (rtx, rtx);
130static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134static bool block_move_libcall_safe_for_call_parm (void);
135static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137static tree emit_block_move_libcall_fn (int);
138static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145static rtx clear_storage_via_libcall (rtx, rtx);
146static tree clear_storage_libcall_fn (int);
147static rtx compress_float_constant (rtx, rtx);
148static rtx get_subtarget (rtx);
149static int is_zeros_p (tree);
502b8322
AJ
150static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156static rtx var_rtx (tree);
157
158static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
160
161static int is_aligning_offset (tree, tree);
162static rtx expand_increment (tree, int, int);
eb698c58
RS
163static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
502b8322 165static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 166#ifdef PUSH_ROUNDING
502b8322 167static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 168#endif
502b8322
AJ
169static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170static rtx const_vector_from_tree (tree);
bbf6f052 171
4fa52007
RK
172/* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
175
176static char direct_load[NUM_MACHINE_MODES];
177static char direct_store[NUM_MACHINE_MODES];
178
51286de6
RH
179/* Record for each mode whether we can float-extend from memory. */
180
181static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
182
fbe1758d 183/* This macro is used to determine whether move_by_pieces should be called
3a94c984 184 to perform a structure copy. */
fbe1758d 185#ifndef MOVE_BY_PIECES_P
19caa751 186#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
188#endif
189
78762e3b
RS
190/* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192#ifndef CLEAR_BY_PIECES_P
193#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195#endif
196
4977bab6
ZW
197/* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200#ifndef STORE_BY_PIECES_P
201#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202#endif
203
266007a7 204/* This array records the insn_code of insns to perform block moves. */
e6677db3 205enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 206
9de08200
RK
207/* This array records the insn_code of insns to perform block clears. */
208enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209
118355a0
ZW
210/* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
214
72954a4f
JM
215/* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216struct file_stack *expr_wfl_stack;
217
cc2902df 218/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
219
220#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 221#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 222#endif
bbf6f052 223\f
4fa52007 224/* This is run once per compilation to set up which modes can be used
266007a7 225 directly in memory and to initialize the block move optab. */
4fa52007
RK
226
227void
502b8322 228init_expr_once (void)
4fa52007
RK
229{
230 rtx insn, pat;
231 enum machine_mode mode;
cff48d8f 232 int num_clobbers;
9ec36da5 233 rtx mem, mem1;
bf1660a6 234 rtx reg;
9ec36da5 235
e2549997
RS
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
9ec36da5
JL
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 241
bf1660a6
JL
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
245
1f8c3c5b
RH
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
4fa52007
RK
249
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
252 {
253 int regno;
4fa52007
RK
254
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
e2549997 257 PUT_MODE (mem1, mode);
bf1660a6 258 PUT_MODE (reg, mode);
4fa52007 259
e6fe56a4
RK
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
262
7308a047
RS
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
267 {
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
e6fe56a4 270
bf1660a6 271 REGNO (reg) = regno;
e6fe56a4 272
7308a047
RS
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
e6fe56a4 277
e2549997
RS
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
282
7308a047
RS
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
e2549997
RS
287
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
7308a047 292 }
4fa52007
RK
293 }
294
51286de6
RH
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
296
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
299 {
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 302 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
303 {
304 enum insn_code ic;
305
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
309
310 PUT_MODE (mem, srcmode);
0fb7aeda 311
51286de6
RH
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
314 }
315 }
4fa52007 316}
cff48d8f 317
bbf6f052
RK
318/* This is run at the start of compiling a function. */
319
320void
502b8322 321init_expr (void)
bbf6f052 322{
3a70d621 323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052
RK
324}
325
49ad7cfa 326/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 327
bbf6f052 328void
502b8322 329finish_expr_for_function (void)
bbf6f052 330{
49ad7cfa
BS
331 if (pending_chain)
332 abort ();
bbf6f052
RK
333}
334\f
335/* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
337
bbf6f052
RK
338/* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
341
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
344
345static rtx
502b8322 346enqueue_insn (rtx var, rtx body)
bbf6f052 347{
c5c76735
JL
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
bbf6f052
RK
350 return pending_chain;
351}
352
353/* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
359
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
363
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
367
368rtx
502b8322 369protect_from_queue (rtx x, int modify)
bbf6f052 370{
b3694847 371 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
372
373#if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377#endif
378
379 if (code != QUEUED)
380 {
e9baa644
RK
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
bbf6f052
RK
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
388 {
f1ec5147
RK
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 391
bbf6f052
RK
392 if (QUEUED_INSN (y))
393 {
f1ec5147
RK
394 rtx temp = gen_reg_rtx (GET_MODE (x));
395
e9baa644 396 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
397 QUEUED_INSN (y));
398 return temp;
399 }
f1ec5147 400
73b7f58c
BS
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
f1ec5147 403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 404 }
f1ec5147 405
bbf6f052
RK
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
3f15938e
RS
409 {
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
412 {
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
415 }
416 }
bbf6f052
RK
417 else if (code == PLUS || code == MULT)
418 {
3f15938e
RS
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
426 }
bbf6f052
RK
427 }
428 return x;
429 }
73b7f58c
BS
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
bbf6f052 433 if (QUEUED_INSN (x) == 0)
73b7f58c 434 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
445}
446
447/* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
451
1f06ee8d 452int
502b8322 453queued_subexp_p (rtx x)
bbf6f052 454{
b3694847 455 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
456 switch (code)
457 {
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
e9a25f70
JL
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
bbf6f052 469 }
bbf6f052
RK
470}
471
472/* Perform all the pending incrementations. */
473
474void
502b8322 475emit_queue (void)
bbf6f052 476{
b3694847 477 rtx p;
381127e8 478 while ((p = pending_chain))
bbf6f052 479 {
41b083c4
R
480 rtx body = QUEUED_BODY (p);
481
2f937369
DM
482 switch (GET_CODE (body))
483 {
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
493
494#ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498#endif
499
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
41b083c4 503 }
2f937369 504
bbf6f052
RK
505 pending_chain = QUEUED_NEXT (p);
506 }
507}
bbf6f052
RK
508\f
509/* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
513
514void
502b8322 515convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
516{
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
523
524 /* rtx code for making an equivalent value. */
37d0b254
SE
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
527
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
530
531 if (to_real != from_real)
532 abort ();
533
1499e0a8
RK
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
537
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
543
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
546
bbf6f052
RK
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
549 {
550 emit_move_insn (to, from);
551 return;
552 }
553
0b4565c9
BS
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
555 {
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
3a94c984 558
0b4565c9 559 if (VECTOR_MODE_P (to_mode))
bafe341a 560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 561 else
bafe341a 562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
563
564 emit_move_insn (to, from);
565 return;
566 }
567
06765df1
R
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
569 {
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
573 }
574
bbf6f052
RK
575 if (to_real)
576 {
642dfa8b 577 rtx value, insns;
85363ca0 578 convert_optab tab;
81d79e2c 579
e44846d6 580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 581 tab = sext_optab;
e44846d6 582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
85363ca0
ZW
583 tab = trunc_optab;
584 else
585 abort ();
2b01c326 586
85363ca0 587 /* Try converting directly if the insn is supported. */
2b01c326 588
85363ca0
ZW
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
b092b471 591 {
85363ca0
ZW
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
594 return;
595 }
b092b471 596
85363ca0
ZW
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 599
85363ca0 600 if (!libcall)
b092b471 601 /* This conversion is not implemented yet. */
bbf6f052
RK
602 abort ();
603
642dfa8b 604 start_sequence ();
ebb1b59a 605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 606 1, from, from_mode);
642dfa8b
BS
607 insns = get_insns ();
608 end_sequence ();
450b1728
EC
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
613 return;
614 }
615
85363ca0
ZW
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
620 {
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
623
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
627
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
633 }
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
635 {
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
638
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
642
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
647
648 /* else proceed to integer conversions below */
649 from_mode = full_mode;
650 }
651
bbf6f052
RK
652 /* Now both modes are integers. */
653
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
657 {
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
665
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
669 {
cd1b4b44
RK
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
bbf6f052
RK
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
678 }
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
683 {
a81fee56 684 if (GET_CODE (to) == REG)
38a448ca 685 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
686 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
687 emit_unop_insn (code, to,
688 gen_lowpart (word_mode, to), equiv_code);
689 return;
690 }
691
692 /* No special multiword conversion insn; do it by hand. */
693 start_sequence ();
694
5c5033c3
RK
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
697
698 if (reg_overlap_mentioned_p (to, from))
699 from = force_reg (from_mode, from);
700
bbf6f052
RK
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
703 lowpart_mode = word_mode;
704 else
705 lowpart_mode = from_mode;
706
707 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
708
709 lowpart = gen_lowpart (lowpart_mode, to);
710 emit_move_insn (lowpart, lowfrom);
711
712 /* Compute the value to put in each remaining word. */
713 if (unsignedp)
714 fill_value = const0_rtx;
715 else
716 {
717#ifdef HAVE_slt
718 if (HAVE_slt
a995e389 719 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
720 && STORE_FLAG_VALUE == -1)
721 {
906c4e36 722 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 723 lowpart_mode, 0);
bbf6f052
RK
724 fill_value = gen_reg_rtx (word_mode);
725 emit_insn (gen_slt (fill_value));
726 }
727 else
728#endif
729 {
730 fill_value
731 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
732 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 733 NULL_RTX, 0);
bbf6f052
RK
734 fill_value = convert_to_mode (word_mode, fill_value, 1);
735 }
736 }
737
738 /* Fill the remaining words. */
739 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
740 {
741 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
742 rtx subword = operand_subword (to, index, 1, to_mode);
743
744 if (subword == 0)
745 abort ();
746
747 if (fill_value != subword)
748 emit_move_insn (subword, fill_value);
749 }
750
751 insns = get_insns ();
752 end_sequence ();
753
906c4e36 754 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 755 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
756 return;
757 }
758
d3c64ee3
RS
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 762 {
431a6eca
JW
763 if (!((GET_CODE (from) == MEM
764 && ! MEM_VOLATILE_P (from)
765 && direct_load[(int) to_mode]
766 && ! mode_dependent_address_p (XEXP (from, 0)))
767 || GET_CODE (from) == REG
768 || GET_CODE (from) == SUBREG))
769 from = force_reg (from_mode, from);
bbf6f052
RK
770 convert_move (to, gen_lowpart (word_mode, from), 0);
771 return;
772 }
773
bbf6f052
RK
774 /* Now follow all the conversions between integers
775 no more than a word long. */
776
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 780 GET_MODE_BITSIZE (from_mode)))
bbf6f052 781 {
d3c64ee3
RS
782 if (!((GET_CODE (from) == MEM
783 && ! MEM_VOLATILE_P (from)
784 && direct_load[(int) to_mode]
785 && ! mode_dependent_address_p (XEXP (from, 0)))
786 || GET_CODE (from) == REG
787 || GET_CODE (from) == SUBREG))
788 from = force_reg (from_mode, from);
34aa3599
RK
789 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
791 from = copy_to_reg (from);
bbf6f052
RK
792 emit_move_insn (to, gen_lowpart (to_mode, from));
793 return;
794 }
795
d3c64ee3 796 /* Handle extension. */
bbf6f052
RK
797 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
798 {
799 /* Convert directly if that works. */
800 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
801 != CODE_FOR_nothing)
802 {
9413de45
RK
803 if (flag_force_mem)
804 from = force_not_mem (from);
805
bbf6f052
RK
806 emit_unop_insn (code, to, from, equiv_code);
807 return;
808 }
809 else
810 {
811 enum machine_mode intermediate;
2b28d92e
NC
812 rtx tmp;
813 tree shift_amount;
bbf6f052
RK
814
815 /* Search for a mode to convert via. */
816 for (intermediate = from_mode; intermediate != VOIDmode;
817 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
818 if (((can_extend_p (to_mode, intermediate, unsignedp)
819 != CODE_FOR_nothing)
820 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
822 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
823 && (can_extend_p (intermediate, from_mode, unsignedp)
824 != CODE_FOR_nothing))
825 {
826 convert_move (to, convert_to_mode (intermediate, from,
827 unsignedp), unsignedp);
828 return;
829 }
830
2b28d92e 831 /* No suitable intermediate mode.
3a94c984 832 Generate what we need with shifts. */
2b28d92e
NC
833 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
834 - GET_MODE_BITSIZE (from_mode), 0);
835 from = gen_lowpart (to_mode, force_reg (from_mode, from));
836 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
837 to, unsignedp);
3a94c984 838 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
839 to, unsignedp);
840 if (tmp != to)
841 emit_move_insn (to, tmp);
842 return;
bbf6f052
RK
843 }
844 }
845
3a94c984 846 /* Support special truncate insns for certain modes. */
85363ca0 847 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 848 {
85363ca0
ZW
849 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
850 to, from, UNKNOWN);
b9bcad65
RK
851 return;
852 }
853
bbf6f052
RK
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
85363ca0
ZW
856 and for which there was no special instruction.
857
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
861 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
862 {
863 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
864 emit_move_insn (to, temp);
865 return;
866 }
867
868 /* Mode combination is not recognized. */
869 abort ();
870}
871
872/* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
5d901c31
RS
877 or by copying to a new temporary with conversion.
878
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
881
882rtx
502b8322 883convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
884{
885 return convert_modes (mode, VOIDmode, x, unsignedp);
886}
887
888/* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
892
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
895
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
897
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
900
901rtx
502b8322 902convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 903{
b3694847 904 rtx temp;
5ffe63ed 905
1499e0a8
RK
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
908
909 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
911 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
912 x = gen_lowpart (mode, x);
bbf6f052 913
64791b18
RK
914 if (GET_MODE (x) != VOIDmode)
915 oldmode = GET_MODE (x);
3a94c984 916
5ffe63ed 917 if (mode == oldmode)
bbf6f052
RK
918 return x;
919
920 /* There is one case that we must handle specially: If we are converting
906c4e36 921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
925
926 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 927 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 928 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
929 {
930 HOST_WIDE_INT val = INTVAL (x);
931
932 if (oldmode != VOIDmode
933 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
934 {
935 int width = GET_MODE_BITSIZE (oldmode);
936
937 /* We need to zero extend VAL. */
938 val &= ((HOST_WIDE_INT) 1 << width) - 1;
939 }
940
941 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
942 }
bbf6f052
RK
943
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 948
ba2e110c
RK
949 if ((GET_CODE (x) == CONST_INT
950 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 951 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 952 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 953 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 954 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
955 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
956 && direct_load[(int) mode])
2bf29316 957 || (GET_CODE (x) == REG
006c9f4a
SE
958 && (! HARD_REGISTER_P (x)
959 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
961 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
962 {
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
967 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
968 {
969 HOST_WIDE_INT val = INTVAL (x);
970 int width = GET_MODE_BITSIZE (oldmode);
971
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val &= ((HOST_WIDE_INT) 1 << width) - 1;
975 if (! unsignedp
976 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
977 val |= (HOST_WIDE_INT) (-1) << width;
978
2496c7bd 979 return gen_int_mode (val, mode);
ba2e110c
RK
980 }
981
982 return gen_lowpart (mode, x);
983 }
bbf6f052 984
ebe75517
JH
985 /* Converting from integer constant into mode is always equivalent to an
986 subreg operation. */
987 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
988 {
989 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
990 abort ();
991 return simplify_gen_subreg (mode, x, oldmode, 0);
992 }
993
bbf6f052
RK
994 temp = gen_reg_rtx (mode);
995 convert_move (temp, x, unsignedp);
996 return temp;
997}
998\f
cf5124f6
RS
999/* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1003
1004#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1005
8fd3cf4e
JJ
1006/* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1008 succeed. */
1009
1010int
502b8322
AJ
1011can_move_by_pieces (unsigned HOST_WIDE_INT len,
1012 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1013{
1014 return MOVE_BY_PIECES_P (len, align);
1015}
1016
21d93687
RK
1017/* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
566aa174 1020
21d93687
RK
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
566aa174 1023
8fd3cf4e 1024 ALIGN is maximum stack alignment we can assume.
bbf6f052 1025
8fd3cf4e
JJ
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1028 stpcpy. */
1029
1030rtx
502b8322
AJ
1031move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1032 unsigned int align, int endp)
bbf6f052
RK
1033{
1034 struct move_by_pieces data;
566aa174 1035 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1036 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1037 enum machine_mode mode = VOIDmode, tmode;
1038 enum insn_code icode;
bbf6f052 1039
f26aca6d
DD
1040 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1041
bbf6f052 1042 data.offset = 0;
bbf6f052 1043 data.from_addr = from_addr;
566aa174
JH
1044 if (to)
1045 {
1046 to_addr = XEXP (to, 0);
1047 data.to = to;
1048 data.autinc_to
1049 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1050 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1051 data.reverse
1052 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1053 }
1054 else
1055 {
1056 to_addr = NULL_RTX;
1057 data.to = NULL_RTX;
1058 data.autinc_to = 1;
1059#ifdef STACK_GROWS_DOWNWARD
1060 data.reverse = 1;
1061#else
1062 data.reverse = 0;
1063#endif
1064 }
1065 data.to_addr = to_addr;
bbf6f052 1066 data.from = from;
bbf6f052
RK
1067 data.autinc_from
1068 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1069 || GET_CODE (from_addr) == POST_INC
1070 || GET_CODE (from_addr) == POST_DEC);
1071
1072 data.explicit_inc_from = 0;
1073 data.explicit_inc_to = 0;
bbf6f052
RK
1074 if (data.reverse) data.offset = len;
1075 data.len = len;
1076
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data.autinc_from && data.autinc_to)
1081 && move_by_pieces_ninsns (len, align) > 2)
1082 {
3a94c984 1083 /* Find the mode of the largest move... */
fbe1758d
AM
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1087 mode = tmode;
1088
1089 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1090 {
1091 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1092 data.autinc_from = 1;
1093 data.explicit_inc_from = -1;
1094 }
fbe1758d 1095 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1096 {
1097 data.from_addr = copy_addr_to_reg (from_addr);
1098 data.autinc_from = 1;
1099 data.explicit_inc_from = 1;
1100 }
bbf6f052
RK
1101 if (!data.autinc_from && CONSTANT_P (from_addr))
1102 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1103 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1104 {
1105 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1106 data.autinc_to = 1;
1107 data.explicit_inc_to = -1;
1108 }
fbe1758d 1109 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1110 {
1111 data.to_addr = copy_addr_to_reg (to_addr);
1112 data.autinc_to = 1;
1113 data.explicit_inc_to = 1;
1114 }
bbf6f052
RK
1115 if (!data.autinc_to && CONSTANT_P (to_addr))
1116 data.to_addr = copy_addr_to_reg (to_addr);
1117 }
1118
e1565e65 1119 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1120 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1121 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1122
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1125
1126 while (max_size > 1)
1127 {
e7c33f54
RK
1128 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1129 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1130 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1131 mode = tmode;
1132
1133 if (mode == VOIDmode)
1134 break;
1135
1136 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1137 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1138 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1139
1140 max_size = GET_MODE_SIZE (mode);
1141 }
1142
1143 /* The code above should have handled everything. */
2a8e278c 1144 if (data.len > 0)
bbf6f052 1145 abort ();
8fd3cf4e
JJ
1146
1147 if (endp)
1148 {
1149 rtx to1;
1150
1151 if (data.reverse)
1152 abort ();
1153 if (data.autinc_to)
1154 {
1155 if (endp == 2)
1156 {
1157 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1158 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1159 else
1160 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1161 -1));
1162 }
1163 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1164 data.offset);
1165 }
1166 else
1167 {
1168 if (endp == 2)
1169 --data.offset;
1170 to1 = adjust_address (data.to, QImode, data.offset);
1171 }
1172 return to1;
1173 }
1174 else
1175 return data.to;
bbf6f052
RK
1176}
1177
1178/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1179 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1180
3bdf5ad1 1181static unsigned HOST_WIDE_INT
502b8322 1182move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1183{
3bdf5ad1
RK
1184 unsigned HOST_WIDE_INT n_insns = 0;
1185 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1186
e1565e65 1187 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1188 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1189 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1190
1191 while (max_size > 1)
1192 {
1193 enum machine_mode mode = VOIDmode, tmode;
1194 enum insn_code icode;
1195
e7c33f54
RK
1196 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1197 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1198 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1199 mode = tmode;
1200
1201 if (mode == VOIDmode)
1202 break;
1203
1204 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1205 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1206 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1207
1208 max_size = GET_MODE_SIZE (mode);
1209 }
1210
13c6f0d5
NS
1211 if (l)
1212 abort ();
bbf6f052
RK
1213 return n_insns;
1214}
1215
1216/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1219
1220static void
502b8322
AJ
1221move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1222 struct move_by_pieces *data)
bbf6f052 1223{
3bdf5ad1 1224 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1225 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1226
1227 while (data->len >= size)
1228 {
3bdf5ad1
RK
1229 if (data->reverse)
1230 data->offset -= size;
1231
566aa174 1232 if (data->to)
3bdf5ad1 1233 {
566aa174 1234 if (data->autinc_to)
630036c6
JJ
1235 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1236 data->offset);
566aa174 1237 else
f4ef873c 1238 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1239 }
3bdf5ad1
RK
1240
1241 if (data->autinc_from)
630036c6
JJ
1242 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1243 data->offset);
3bdf5ad1 1244 else
f4ef873c 1245 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1246
940da324 1247 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1248 emit_insn (gen_add2_insn (data->to_addr,
1249 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1250 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1251 emit_insn (gen_add2_insn (data->from_addr,
1252 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1253
566aa174
JH
1254 if (data->to)
1255 emit_insn ((*genfun) (to1, from1));
1256 else
21d93687
RK
1257 {
1258#ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode, from1, NULL);
1260#else
1261 abort ();
1262#endif
1263 }
3bdf5ad1 1264
940da324 1265 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1266 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1267 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1268 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1269
3bdf5ad1
RK
1270 if (! data->reverse)
1271 data->offset += size;
bbf6f052
RK
1272
1273 data->len -= size;
1274 }
1275}
1276\f
4ca79136
RH
1277/* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
bbf6f052 1280
4ca79136 1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1282 SIZE is an rtx that says how long they are.
19caa751 1283 ALIGN is the maximum alignment we can assume they have.
44bb111a 1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1285
e9a25f70
JL
1286 Return the address of the new block, if memcpy is called and returns it,
1287 0 otherwise. */
1288
1289rtx
502b8322 1290emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1291{
44bb111a 1292 bool may_use_call;
e9a25f70 1293 rtx retval = 0;
44bb111a
RH
1294 unsigned int align;
1295
1296 switch (method)
1297 {
1298 case BLOCK_OP_NORMAL:
1299 may_use_call = true;
1300 break;
1301
1302 case BLOCK_OP_CALL_PARM:
1303 may_use_call = block_move_libcall_safe_for_call_parm ();
1304
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1307 NO_DEFER_POP;
1308 break;
1309
1310 case BLOCK_OP_NO_LIBCALL:
1311 may_use_call = false;
1312 break;
1313
1314 default:
1315 abort ();
1316 }
1317
1318 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1319
bbf6f052
RK
1320 if (GET_MODE (x) != BLKmode)
1321 abort ();
bbf6f052
RK
1322 if (GET_MODE (y) != BLKmode)
1323 abort ();
1324
1325 x = protect_from_queue (x, 1);
1326 y = protect_from_queue (y, 0);
5d901c31 1327 size = protect_from_queue (size, 0);
bbf6f052
RK
1328
1329 if (GET_CODE (x) != MEM)
1330 abort ();
1331 if (GET_CODE (y) != MEM)
1332 abort ();
1333 if (size == 0)
1334 abort ();
1335
cb38fd88
RH
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size) == CONST_INT)
1339 {
6972c506
JJ
1340 if (INTVAL (size) == 0)
1341 return 0;
1342
cb38fd88
RH
1343 x = shallow_copy_rtx (x);
1344 y = shallow_copy_rtx (y);
1345 set_mem_size (x, size);
1346 set_mem_size (y, size);
1347 }
1348
fbe1758d 1349 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1350 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1351 else if (emit_block_move_via_movstr (x, y, size, align))
1352 ;
44bb111a 1353 else if (may_use_call)
4ca79136 1354 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1355 else
1356 emit_block_move_via_loop (x, y, size, align);
1357
1358 if (method == BLOCK_OP_CALL_PARM)
1359 OK_DEFER_POP;
266007a7 1360
4ca79136
RH
1361 return retval;
1362}
266007a7 1363
502b8322 1364/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1367
1368static bool
502b8322 1369block_move_libcall_safe_for_call_parm (void)
44bb111a 1370{
a357a6d4 1371 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1372 if (PUSH_ARGS)
1373 return true;
44bb111a 1374
450b1728 1375 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1376 an outgoing argument. */
1377#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1378 {
1379 tree fn = emit_block_move_libcall_fn (false);
1380 (void) fn;
1381 if (REG_PARM_STACK_SPACE (fn) != 0)
1382 return false;
1383 }
44bb111a 1384#endif
44bb111a 1385
a357a6d4
GK
1386 /* If any argument goes in memory, then it might clobber an outgoing
1387 argument. */
1388 {
1389 CUMULATIVE_ARGS args_so_far;
1390 tree fn, arg;
450b1728 1391
a357a6d4
GK
1392 fn = emit_block_move_libcall_fn (false);
1393 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
450b1728 1394
a357a6d4
GK
1395 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1396 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1397 {
1398 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1399 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1400 if (!tmp || !REG_P (tmp))
44bb111a 1401 return false;
a357a6d4
GK
1402#ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1404 NULL_TREE, 1))
1405 return false;
1406#endif
1407 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1408 }
1409 }
1410 return true;
44bb111a
RH
1411}
1412
502b8322 1413/* A subroutine of emit_block_move. Expand a movstr pattern;
4ca79136 1414 return true if successful. */
3ef1eef4 1415
4ca79136 1416static bool
502b8322 1417emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1418{
4ca79136
RH
1419 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1420 enum machine_mode mode;
266007a7 1421
4ca79136
RH
1422 /* Since this is a move insn, we don't care about volatility. */
1423 volatile_ok = 1;
1424
ee960939
OH
1425 /* Try the most limited insn first, because there's no point
1426 including more than one in the machine description unless
1427 the more limited one has some advantage. */
1428
4ca79136
RH
1429 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1430 mode = GET_MODE_WIDER_MODE (mode))
1431 {
1432 enum insn_code code = movstr_optab[(int) mode];
1433 insn_operand_predicate_fn pred;
1434
1435 if (code != CODE_FOR_nothing
1436 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1437 here because if SIZE is less than the mode mask, as it is
1438 returned by the macro, it will definitely be less than the
1439 actual mode mask. */
1440 && ((GET_CODE (size) == CONST_INT
1441 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1442 <= (GET_MODE_MASK (mode) >> 1)))
1443 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1444 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1445 || (*pred) (x, BLKmode))
1446 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1447 || (*pred) (y, BLKmode))
1448 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1449 || (*pred) (opalign, VOIDmode)))
1450 {
1451 rtx op2;
1452 rtx last = get_last_insn ();
1453 rtx pat;
1454
1455 op2 = convert_to_mode (mode, size, 1);
1456 pred = insn_data[(int) code].operand[2].predicate;
1457 if (pred != 0 && ! (*pred) (op2, mode))
1458 op2 = copy_to_mode_reg (mode, op2);
1459
1460 /* ??? When called via emit_block_move_for_call, it'd be
1461 nice if there were some way to inform the backend, so
1462 that it doesn't fail the expansion because it thinks
1463 emitting the libcall would be more efficient. */
1464
1465 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1466 if (pat)
1467 {
1468 emit_insn (pat);
1469 volatile_ok = 0;
1470 return true;
bbf6f052 1471 }
4ca79136
RH
1472 else
1473 delete_insns_since (last);
bbf6f052 1474 }
4ca79136 1475 }
bbf6f052 1476
4ca79136
RH
1477 volatile_ok = 0;
1478 return false;
1479}
3ef1eef4 1480
4ca79136
RH
1481/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1482 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1483
4ca79136 1484static rtx
502b8322 1485emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1486{
ee960939 1487 rtx dst_addr, src_addr;
4ca79136
RH
1488 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1489 enum machine_mode size_mode;
1490 rtx retval;
4bc973ae 1491
4ca79136 1492 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1493
ee960939
OH
1494 It is unsafe to save the value generated by protect_from_queue and reuse
1495 it later. Consider what happens if emit_queue is called before the
1496 return value from protect_from_queue is used.
4bc973ae 1497
ee960939
OH
1498 Expansion of the CALL_EXPR below will call emit_queue before we are
1499 finished emitting RTL for argument setup. So if we are not careful we
1500 could get the wrong value for an argument.
4bc973ae 1501
ee960939
OH
1502 To avoid this problem we go ahead and emit code to copy the addresses of
1503 DST and SRC and SIZE into new pseudos. We can then place those new
1504 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1505 emit_queue.
4bc973ae 1506
ee960939
OH
1507 Note this is not strictly needed for library calls since they do not call
1508 emit_queue before loading their arguments. However, we may need to have
1509 library calls call emit_queue in the future since failing to do so could
1510 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1511 arguments in registers. */
1512
1513 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1514 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1515
ee960939
OH
1516 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1517 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1518
1519 dst_tree = make_tree (ptr_type_node, dst_addr);
1520 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1521
1522 if (TARGET_MEM_FUNCTIONS)
1523 size_mode = TYPE_MODE (sizetype);
1524 else
1525 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 1526
4ca79136
RH
1527 size = convert_to_mode (size_mode, size, 1);
1528 size = copy_to_mode_reg (size_mode, size);
1529
1530 /* It is incorrect to use the libcall calling conventions to call
1531 memcpy in this context. This could be a user call to memcpy and
1532 the user may wish to examine the return value from memcpy. For
1533 targets where libcalls and normal calls have different conventions
1534 for returning pointers, we could end up generating incorrect code.
1535
1536 For convenience, we generate the call to bcopy this way as well. */
1537
4ca79136
RH
1538 if (TARGET_MEM_FUNCTIONS)
1539 size_tree = make_tree (sizetype, size);
1540 else
1541 size_tree = make_tree (unsigned_type_node, size);
1542
1543 fn = emit_block_move_libcall_fn (true);
1544 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1545 if (TARGET_MEM_FUNCTIONS)
1546 {
1547 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1548 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1549 }
1550 else
1551 {
1552 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1554 }
1555
1556 /* Now we have to build up the CALL_EXPR itself. */
1557 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1558 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1559 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1560
1561 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1562
ee960939
OH
1563 /* If we are initializing a readonly value, show the above call clobbered
1564 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1565 the delay slot scheduler might overlook conflicts and take nasty
1566 decisions. */
4ca79136 1567 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1568 add_function_usage_to
1569 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1570 gen_rtx_CLOBBER (VOIDmode, dst),
1571 NULL_RTX));
4ca79136 1572
ee960939 1573 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 1574}
52cf7115 1575
4ca79136
RH
1576/* A subroutine of emit_block_move_via_libcall. Create the tree node
1577 for the function we use for block copies. The first time FOR_CALL
1578 is true, we call assemble_external. */
52cf7115 1579
4ca79136
RH
1580static GTY(()) tree block_move_fn;
1581
9661b15f 1582void
502b8322 1583init_block_move_fn (const char *asmspec)
4ca79136 1584{
9661b15f 1585 if (!block_move_fn)
4ca79136 1586 {
8fd3cf4e 1587 tree args, fn;
9661b15f 1588
4ca79136 1589 if (TARGET_MEM_FUNCTIONS)
52cf7115 1590 {
4ca79136
RH
1591 fn = get_identifier ("memcpy");
1592 args = build_function_type_list (ptr_type_node, ptr_type_node,
1593 const_ptr_type_node, sizetype,
1594 NULL_TREE);
1595 }
1596 else
1597 {
1598 fn = get_identifier ("bcopy");
1599 args = build_function_type_list (void_type_node, const_ptr_type_node,
1600 ptr_type_node, unsigned_type_node,
1601 NULL_TREE);
52cf7115
JL
1602 }
1603
4ca79136
RH
1604 fn = build_decl (FUNCTION_DECL, fn, args);
1605 DECL_EXTERNAL (fn) = 1;
1606 TREE_PUBLIC (fn) = 1;
1607 DECL_ARTIFICIAL (fn) = 1;
1608 TREE_NOTHROW (fn) = 1;
66c60e67 1609
4ca79136 1610 block_move_fn = fn;
bbf6f052 1611 }
e9a25f70 1612
9661b15f
JJ
1613 if (asmspec)
1614 {
1615 SET_DECL_RTL (block_move_fn, NULL_RTX);
1616 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1617 }
1618}
1619
1620static tree
502b8322 1621emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1622{
1623 static bool emitted_extern;
1624
1625 if (!block_move_fn)
1626 init_block_move_fn (NULL);
1627
4ca79136
RH
1628 if (for_call && !emitted_extern)
1629 {
1630 emitted_extern = true;
9661b15f
JJ
1631 make_decl_rtl (block_move_fn, NULL);
1632 assemble_external (block_move_fn);
4ca79136
RH
1633 }
1634
9661b15f 1635 return block_move_fn;
bbf6f052 1636}
44bb111a
RH
1637
1638/* A subroutine of emit_block_move. Copy the data via an explicit
1639 loop. This is used only when libcalls are forbidden. */
1640/* ??? It'd be nice to copy in hunks larger than QImode. */
1641
1642static void
502b8322
AJ
1643emit_block_move_via_loop (rtx x, rtx y, rtx size,
1644 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1645{
1646 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1647 enum machine_mode iter_mode;
1648
1649 iter_mode = GET_MODE (size);
1650 if (iter_mode == VOIDmode)
1651 iter_mode = word_mode;
1652
1653 top_label = gen_label_rtx ();
1654 cmp_label = gen_label_rtx ();
1655 iter = gen_reg_rtx (iter_mode);
1656
1657 emit_move_insn (iter, const0_rtx);
1658
1659 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1660 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1661 do_pending_stack_adjust ();
1662
2e040219 1663 emit_note (NOTE_INSN_LOOP_BEG);
44bb111a
RH
1664
1665 emit_jump (cmp_label);
1666 emit_label (top_label);
1667
1668 tmp = convert_modes (Pmode, iter_mode, iter, true);
1669 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1670 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1671 x = change_address (x, QImode, x_addr);
1672 y = change_address (y, QImode, y_addr);
1673
1674 emit_move_insn (x, y);
1675
1676 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1677 true, OPTAB_LIB_WIDEN);
1678 if (tmp != iter)
1679 emit_move_insn (iter, tmp);
1680
2e040219 1681 emit_note (NOTE_INSN_LOOP_CONT);
44bb111a
RH
1682 emit_label (cmp_label);
1683
1684 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1685 true, top_label);
1686
2e040219 1687 emit_note (NOTE_INSN_LOOP_END);
44bb111a 1688}
bbf6f052
RK
1689\f
1690/* Copy all or part of a value X into registers starting at REGNO.
1691 The number of registers to be filled is NREGS. */
1692
1693void
502b8322 1694move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1695{
1696 int i;
381127e8 1697#ifdef HAVE_load_multiple
3a94c984 1698 rtx pat;
381127e8
RL
1699 rtx last;
1700#endif
bbf6f052 1701
72bb9717
RK
1702 if (nregs == 0)
1703 return;
1704
bbf6f052
RK
1705 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1706 x = validize_mem (force_const_mem (mode, x));
1707
1708 /* See if the machine can do this with a load multiple insn. */
1709#ifdef HAVE_load_multiple
c3a02afe 1710 if (HAVE_load_multiple)
bbf6f052 1711 {
c3a02afe 1712 last = get_last_insn ();
38a448ca 1713 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1714 GEN_INT (nregs));
1715 if (pat)
1716 {
1717 emit_insn (pat);
1718 return;
1719 }
1720 else
1721 delete_insns_since (last);
bbf6f052 1722 }
bbf6f052
RK
1723#endif
1724
1725 for (i = 0; i < nregs; i++)
38a448ca 1726 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1727 operand_subword_force (x, i, mode));
1728}
1729
1730/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1731 The number of registers to be filled is NREGS. */
0040593d 1732
bbf6f052 1733void
502b8322 1734move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1735{
1736 int i;
bbf6f052 1737
2954d7db
RK
1738 if (nregs == 0)
1739 return;
1740
bbf6f052
RK
1741 /* See if the machine can do this with a store multiple insn. */
1742#ifdef HAVE_store_multiple
c3a02afe 1743 if (HAVE_store_multiple)
bbf6f052 1744 {
c6b97fac
AM
1745 rtx last = get_last_insn ();
1746 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1747 GEN_INT (nregs));
c3a02afe
RK
1748 if (pat)
1749 {
1750 emit_insn (pat);
1751 return;
1752 }
1753 else
1754 delete_insns_since (last);
bbf6f052 1755 }
bbf6f052
RK
1756#endif
1757
1758 for (i = 0; i < nregs; i++)
1759 {
1760 rtx tem = operand_subword (x, i, 1, BLKmode);
1761
1762 if (tem == 0)
1763 abort ();
1764
38a448ca 1765 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1766 }
1767}
1768
084a1106
JDA
1769/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1770 ORIG, where ORIG is a non-consecutive group of registers represented by
1771 a PARALLEL. The clone is identical to the original except in that the
1772 original set of registers is replaced by a new set of pseudo registers.
1773 The new set has the same modes as the original set. */
1774
1775rtx
502b8322 1776gen_group_rtx (rtx orig)
084a1106
JDA
1777{
1778 int i, length;
1779 rtx *tmps;
1780
1781 if (GET_CODE (orig) != PARALLEL)
1782 abort ();
1783
1784 length = XVECLEN (orig, 0);
703ad42b 1785 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1786
1787 /* Skip a NULL entry in first slot. */
1788 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1789
1790 if (i)
1791 tmps[0] = 0;
1792
1793 for (; i < length; i++)
1794 {
1795 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1796 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1797
1798 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1799 }
1800
1801 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1802}
1803
6e985040
AM
1804/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1805 where DST is non-consecutive registers represented by a PARALLEL.
1806 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1807 if not known. */
fffa9c1d
JW
1808
1809void
6e985040 1810emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1811{
aac5cc16
RH
1812 rtx *tmps, src;
1813 int start, i;
fffa9c1d 1814
aac5cc16 1815 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1816 abort ();
1817
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
aac5cc16
RH
1820 if (XEXP (XVECEXP (dst, 0, 0), 0))
1821 start = 0;
fffa9c1d 1822 else
aac5cc16
RH
1823 start = 1;
1824
703ad42b 1825 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1826
aac5cc16
RH
1827 /* Process the pieces. */
1828 for (i = start; i < XVECLEN (dst, 0); i++)
1829 {
1830 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1831 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1832 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1833 int shift = 0;
1834
1835 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1836 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1837 {
6e985040
AM
1838 /* Arrange to shift the fragment to where it belongs.
1839 extract_bit_field loads to the lsb of the reg. */
1840 if (
1841#ifdef BLOCK_REG_PADDING
1842 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1843 == (BYTES_BIG_ENDIAN ? upward : downward)
1844#else
1845 BYTES_BIG_ENDIAN
1846#endif
1847 )
1848 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1849 bytelen = ssize - bytepos;
1850 if (bytelen <= 0)
729a2125 1851 abort ();
aac5cc16
RH
1852 }
1853
f3ce87a9
DE
1854 /* If we won't be loading directly from memory, protect the real source
1855 from strange tricks we might play; but make sure that the source can
1856 be loaded directly into the destination. */
1857 src = orig_src;
1858 if (GET_CODE (orig_src) != MEM
1859 && (!CONSTANT_P (orig_src)
1860 || (GET_MODE (orig_src) != mode
1861 && GET_MODE (orig_src) != VOIDmode)))
1862 {
1863 if (GET_MODE (orig_src) == VOIDmode)
1864 src = gen_reg_rtx (mode);
1865 else
1866 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1867
f3ce87a9
DE
1868 emit_move_insn (src, orig_src);
1869 }
1870
aac5cc16
RH
1871 /* Optimize the access just a bit. */
1872 if (GET_CODE (src) == MEM
6e985040
AM
1873 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1874 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1875 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1876 && bytelen == GET_MODE_SIZE (mode))
1877 {
1878 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1879 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1880 }
7c4a6db0
JW
1881 else if (GET_CODE (src) == CONCAT)
1882 {
015b1ad1
JDA
1883 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1884 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1885
1886 if ((bytepos == 0 && bytelen == slen0)
1887 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1888 {
015b1ad1
JDA
1889 /* The following assumes that the concatenated objects all
1890 have the same size. In this case, a simple calculation
1891 can be used to determine the object and the bit field
1892 to be extracted. */
1893 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
1894 if (! CONSTANT_P (tmps[i])
1895 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1896 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
1897 (bytepos % slen0) * BITS_PER_UNIT,
1898 1, NULL_RTX, mode, mode, ssize);
cbb92744 1899 }
58f69841
JH
1900 else if (bytepos == 0)
1901 {
015b1ad1 1902 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1903 emit_move_insn (mem, src);
04050c69 1904 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1905 }
7c4a6db0
JW
1906 else
1907 abort ();
1908 }
9c0631a7
AH
1909 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1910 SIMD register, which is currently broken. While we get GCC
1911 to emit proper RTL for these cases, let's dump to memory. */
1912 else if (VECTOR_MODE_P (GET_MODE (dst))
1913 && GET_CODE (src) == REG)
1914 {
1915 int slen = GET_MODE_SIZE (GET_MODE (src));
1916 rtx mem;
1917
1918 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1919 emit_move_insn (mem, src);
1920 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1921 }
d3a16cbd
FJ
1922 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1923 && XVECLEN (dst, 0) > 1)
1924 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1925 else if (CONSTANT_P (src)
2ee5437b
RH
1926 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1927 tmps[i] = src;
fffa9c1d 1928 else
19caa751
RK
1929 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1930 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 1931 mode, mode, ssize);
fffa9c1d 1932
6e985040 1933 if (shift)
19caa751
RK
1934 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1935 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 1936 }
19caa751 1937
3a94c984 1938 emit_queue ();
aac5cc16
RH
1939
1940 /* Copy the extracted pieces into the proper (probable) hard regs. */
1941 for (i = start; i < XVECLEN (dst, 0); i++)
1942 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1943}
1944
084a1106
JDA
1945/* Emit code to move a block SRC to block DST, where SRC and DST are
1946 non-consecutive groups of registers, each represented by a PARALLEL. */
1947
1948void
502b8322 1949emit_group_move (rtx dst, rtx src)
084a1106
JDA
1950{
1951 int i;
1952
1953 if (GET_CODE (src) != PARALLEL
1954 || GET_CODE (dst) != PARALLEL
1955 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1956 abort ();
1957
1958 /* Skip first entry if NULL. */
1959 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1960 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1961 XEXP (XVECEXP (src, 0, i), 0));
1962}
1963
6e985040
AM
1964/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1965 where SRC is non-consecutive registers represented by a PARALLEL.
1966 SSIZE represents the total size of block ORIG_DST, or -1 if not
1967 known. */
fffa9c1d
JW
1968
1969void
6e985040 1970emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1971{
aac5cc16
RH
1972 rtx *tmps, dst;
1973 int start, i;
fffa9c1d 1974
aac5cc16 1975 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1976 abort ();
1977
1978 /* Check for a NULL entry, used to indicate that the parameter goes
1979 both on the stack and in registers. */
aac5cc16
RH
1980 if (XEXP (XVECEXP (src, 0, 0), 0))
1981 start = 0;
fffa9c1d 1982 else
aac5cc16
RH
1983 start = 1;
1984
703ad42b 1985 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 1986
aac5cc16
RH
1987 /* Copy the (probable) hard regs into pseudos. */
1988 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1989 {
aac5cc16
RH
1990 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1991 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1992 emit_move_insn (tmps[i], reg);
1993 }
3a94c984 1994 emit_queue ();
fffa9c1d 1995
aac5cc16
RH
1996 /* If we won't be storing directly into memory, protect the real destination
1997 from strange tricks we might play. */
1998 dst = orig_dst;
10a9f2be
JW
1999 if (GET_CODE (dst) == PARALLEL)
2000 {
2001 rtx temp;
2002
2003 /* We can get a PARALLEL dst if there is a conditional expression in
2004 a return statement. In that case, the dst and src are the same,
2005 so no action is necessary. */
2006 if (rtx_equal_p (dst, src))
2007 return;
2008
2009 /* It is unclear if we can ever reach here, but we may as well handle
2010 it. Allocate a temporary, and split this into a store/load to/from
2011 the temporary. */
2012
2013 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
2014 emit_group_store (temp, src, type, ssize);
2015 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
2016 return;
2017 }
75897075 2018 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2019 {
2020 dst = gen_reg_rtx (GET_MODE (orig_dst));
2021 /* Make life a bit easier for combine. */
8ae91fc0 2022 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2023 }
aac5cc16
RH
2024
2025 /* Process the pieces. */
2026 for (i = start; i < XVECLEN (src, 0); i++)
2027 {
770ae6cc 2028 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2029 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2030 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2031 rtx dest = dst;
aac5cc16
RH
2032
2033 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2034 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2035 {
6e985040
AM
2036 /* store_bit_field always takes its value from the lsb.
2037 Move the fragment to the lsb if it's not already there. */
2038 if (
2039#ifdef BLOCK_REG_PADDING
2040 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2041 == (BYTES_BIG_ENDIAN ? upward : downward)
2042#else
2043 BYTES_BIG_ENDIAN
2044#endif
2045 )
aac5cc16
RH
2046 {
2047 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2048 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2049 tmps[i], 0, OPTAB_WIDEN);
2050 }
2051 bytelen = ssize - bytepos;
71bc0330 2052 }
fffa9c1d 2053
6ddae612
JJ
2054 if (GET_CODE (dst) == CONCAT)
2055 {
2056 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2057 dest = XEXP (dst, 0);
2058 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2059 {
2060 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2061 dest = XEXP (dst, 1);
2062 }
0d446150
JH
2063 else if (bytepos == 0 && XVECLEN (src, 0))
2064 {
2065 dest = assign_stack_temp (GET_MODE (dest),
2066 GET_MODE_SIZE (GET_MODE (dest)), 0);
2067 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2068 tmps[i]);
2069 dst = dest;
2070 break;
2071 }
6ddae612
JJ
2072 else
2073 abort ();
2074 }
2075
aac5cc16 2076 /* Optimize the access just a bit. */
6ddae612 2077 if (GET_CODE (dest) == MEM
6e985040
AM
2078 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2079 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2080 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2081 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2082 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2083 else
6ddae612 2084 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2085 mode, tmps[i], ssize);
fffa9c1d 2086 }
729a2125 2087
3a94c984 2088 emit_queue ();
aac5cc16
RH
2089
2090 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2091 if (orig_dst != dst)
aac5cc16 2092 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2093}
2094
c36fce9a
GRK
2095/* Generate code to copy a BLKmode object of TYPE out of a
2096 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2097 is null, a stack temporary is created. TGTBLK is returned.
2098
c988af2b
RS
2099 The purpose of this routine is to handle functions that return
2100 BLKmode structures in registers. Some machines (the PA for example)
2101 want to return all small structures in registers regardless of the
2102 structure's alignment. */
c36fce9a
GRK
2103
2104rtx
502b8322 2105copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2106{
19caa751
RK
2107 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2108 rtx src = NULL, dst = NULL;
2109 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2110 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2111
2112 if (tgtblk == 0)
2113 {
1da68f56
RK
2114 tgtblk = assign_temp (build_qualified_type (type,
2115 (TYPE_QUALS (type)
2116 | TYPE_QUAL_CONST)),
2117 0, 1, 1);
19caa751
RK
2118 preserve_temp_slots (tgtblk);
2119 }
3a94c984 2120
1ed1b4fb 2121 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2122 into a new pseudo which is a full word. */
0d7839da 2123
19caa751
RK
2124 if (GET_MODE (srcreg) != BLKmode
2125 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2126 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751 2127
c988af2b
RS
2128 /* If the structure doesn't take up a whole number of words, see whether
2129 SRCREG is padded on the left or on the right. If it's on the left,
2130 set PADDING_CORRECTION to the number of bits to skip.
2131
2132 In most ABIs, the structure will be returned at the least end of
2133 the register, which translates to right padding on little-endian
2134 targets and left padding on big-endian targets. The opposite
2135 holds if the structure is returned at the most significant
2136 end of the register. */
2137 if (bytes % UNITS_PER_WORD != 0
2138 && (targetm.calls.return_in_msb (type)
2139 ? !BYTES_BIG_ENDIAN
2140 : BYTES_BIG_ENDIAN))
2141 padding_correction
19caa751
RK
2142 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2143
2144 /* Copy the structure BITSIZE bites at a time.
3a94c984 2145
19caa751
RK
2146 We could probably emit more efficient code for machines which do not use
2147 strict alignment, but it doesn't seem worth the effort at the current
2148 time. */
c988af2b 2149 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2150 bitpos < bytes * BITS_PER_UNIT;
2151 bitpos += bitsize, xbitpos += bitsize)
2152 {
3a94c984 2153 /* We need a new source operand each time xbitpos is on a
c988af2b 2154 word boundary and when xbitpos == padding_correction
19caa751
RK
2155 (the first time through). */
2156 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2157 || xbitpos == padding_correction)
b47f8cfc
JH
2158 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2159 GET_MODE (srcreg));
19caa751
RK
2160
2161 /* We need a new destination operand each time bitpos is on
2162 a word boundary. */
2163 if (bitpos % BITS_PER_WORD == 0)
2164 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2165
19caa751
RK
2166 /* Use xbitpos for the source extraction (right justified) and
2167 xbitpos for the destination store (left justified). */
2168 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2169 extract_bit_field (src, bitsize,
2170 xbitpos % BITS_PER_WORD, 1,
2171 NULL_RTX, word_mode, word_mode,
04050c69
RK
2172 BITS_PER_WORD),
2173 BITS_PER_WORD);
19caa751
RK
2174 }
2175
2176 return tgtblk;
c36fce9a
GRK
2177}
2178
94b25f81
RK
2179/* Add a USE expression for REG to the (possibly empty) list pointed
2180 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2181
2182void
502b8322 2183use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2184{
0304dfbb
DE
2185 if (GET_CODE (reg) != REG
2186 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2187 abort ();
b3f8cf4a
RK
2188
2189 *call_fusage
38a448ca
RH
2190 = gen_rtx_EXPR_LIST (VOIDmode,
2191 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2192}
2193
94b25f81
RK
2194/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2195 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2196
2197void
502b8322 2198use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2199{
0304dfbb 2200 int i;
bbf6f052 2201
0304dfbb
DE
2202 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2203 abort ();
2204
2205 for (i = 0; i < nregs; i++)
e50126e8 2206 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2207}
fffa9c1d
JW
2208
2209/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2210 PARALLEL REGS. This is for calls that pass values in multiple
2211 non-contiguous locations. The Irix 6 ABI has examples of this. */
2212
2213void
502b8322 2214use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2215{
2216 int i;
2217
6bd35f86
DE
2218 for (i = 0; i < XVECLEN (regs, 0); i++)
2219 {
2220 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2221
6bd35f86
DE
2222 /* A NULL entry means the parameter goes both on the stack and in
2223 registers. This can also be a MEM for targets that pass values
2224 partially on the stack and partially in registers. */
e9a25f70 2225 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2226 use_reg (call_fusage, reg);
2227 }
fffa9c1d 2228}
bbf6f052 2229\f
57814e5e 2230
cf5124f6
RS
2231/* Determine whether the LEN bytes generated by CONSTFUN can be
2232 stored to memory using several move instructions. CONSTFUNDATA is
2233 a pointer which will be passed as argument in every CONSTFUN call.
2234 ALIGN is maximum alignment we can assume. Return nonzero if a
2235 call to store_by_pieces should succeed. */
2236
57814e5e 2237int
502b8322
AJ
2238can_store_by_pieces (unsigned HOST_WIDE_INT len,
2239 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2240 void *constfundata, unsigned int align)
57814e5e 2241{
98166639 2242 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2243 HOST_WIDE_INT offset = 0;
2244 enum machine_mode mode, tmode;
2245 enum insn_code icode;
2246 int reverse;
2247 rtx cst;
2248
2c430630
RS
2249 if (len == 0)
2250 return 1;
2251
4977bab6 2252 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2253 return 0;
2254
2255 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2256 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2257 align = MOVE_MAX * BITS_PER_UNIT;
2258
2259 /* We would first store what we can in the largest integer mode, then go to
2260 successively smaller modes. */
2261
2262 for (reverse = 0;
2263 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2264 reverse++)
2265 {
2266 l = len;
2267 mode = VOIDmode;
cf5124f6 2268 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2269 while (max_size > 1)
2270 {
2271 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2273 if (GET_MODE_SIZE (tmode) < max_size)
2274 mode = tmode;
2275
2276 if (mode == VOIDmode)
2277 break;
2278
2279 icode = mov_optab->handlers[(int) mode].insn_code;
2280 if (icode != CODE_FOR_nothing
2281 && align >= GET_MODE_ALIGNMENT (mode))
2282 {
2283 unsigned int size = GET_MODE_SIZE (mode);
2284
2285 while (l >= size)
2286 {
2287 if (reverse)
2288 offset -= size;
2289
2290 cst = (*constfun) (constfundata, offset, mode);
2291 if (!LEGITIMATE_CONSTANT_P (cst))
2292 return 0;
2293
2294 if (!reverse)
2295 offset += size;
2296
2297 l -= size;
2298 }
2299 }
2300
2301 max_size = GET_MODE_SIZE (mode);
2302 }
2303
2304 /* The code above should have handled everything. */
2305 if (l != 0)
2306 abort ();
2307 }
2308
2309 return 1;
2310}
2311
2312/* Generate several move instructions to store LEN bytes generated by
2313 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2314 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2315 ALIGN is maximum alignment we can assume.
2316 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2317 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2318 stpcpy. */
57814e5e 2319
8fd3cf4e 2320rtx
502b8322
AJ
2321store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2322 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2323 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2324{
2325 struct store_by_pieces data;
2326
2c430630
RS
2327 if (len == 0)
2328 {
2329 if (endp == 2)
2330 abort ();
2331 return to;
2332 }
2333
4977bab6 2334 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2335 abort ();
2336 to = protect_from_queue (to, 1);
2337 data.constfun = constfun;
2338 data.constfundata = constfundata;
2339 data.len = len;
2340 data.to = to;
2341 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2342 if (endp)
2343 {
2344 rtx to1;
2345
2346 if (data.reverse)
2347 abort ();
2348 if (data.autinc_to)
2349 {
2350 if (endp == 2)
2351 {
2352 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2353 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2354 else
2355 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2356 -1));
2357 }
2358 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2359 data.offset);
2360 }
2361 else
2362 {
2363 if (endp == 2)
2364 --data.offset;
2365 to1 = adjust_address (data.to, QImode, data.offset);
2366 }
2367 return to1;
2368 }
2369 else
2370 return data.to;
57814e5e
JJ
2371}
2372
19caa751
RK
2373/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2376
2377static void
342e2b74 2378clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2379{
57814e5e
JJ
2380 struct store_by_pieces data;
2381
2c430630
RS
2382 if (len == 0)
2383 return;
2384
57814e5e 2385 data.constfun = clear_by_pieces_1;
df4ae160 2386 data.constfundata = NULL;
57814e5e
JJ
2387 data.len = len;
2388 data.to = to;
2389 store_by_pieces_1 (&data, align);
2390}
2391
2392/* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2394
2395static rtx
502b8322
AJ
2396clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2397 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2398 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2399{
2400 return const0_rtx;
2401}
2402
2403/* Subroutine of clear_by_pieces and store_by_pieces.
2404 Generate several move instructions to store LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). The caller must pass TO through protect_from_queue
2406 before calling. ALIGN is maximum alignment we can assume. */
2407
2408static void
502b8322
AJ
2409store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2410 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2411{
2412 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2413 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2414 enum machine_mode mode = VOIDmode, tmode;
2415 enum insn_code icode;
9de08200 2416
57814e5e
JJ
2417 data->offset = 0;
2418 data->to_addr = to_addr;
2419 data->autinc_to
9de08200
RK
2420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2422
57814e5e
JJ
2423 data->explicit_inc_to = 0;
2424 data->reverse
9de08200 2425 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2426 if (data->reverse)
2427 data->offset = data->len;
9de08200 2428
57814e5e 2429 /* If storing requires more than two move insns,
9de08200
RK
2430 copy addresses to registers (to make displacements shorter)
2431 and use post-increment if available. */
57814e5e
JJ
2432 if (!data->autinc_to
2433 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2434 {
3a94c984 2435 /* Determine the main mode we'll be using. */
fbe1758d
AM
2436 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2437 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2438 if (GET_MODE_SIZE (tmode) < max_size)
2439 mode = tmode;
2440
57814e5e 2441 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2442 {
57814e5e
JJ
2443 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2444 data->autinc_to = 1;
2445 data->explicit_inc_to = -1;
9de08200 2446 }
3bdf5ad1 2447
57814e5e
JJ
2448 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2449 && ! data->autinc_to)
9de08200 2450 {
57814e5e
JJ
2451 data->to_addr = copy_addr_to_reg (to_addr);
2452 data->autinc_to = 1;
2453 data->explicit_inc_to = 1;
9de08200 2454 }
3bdf5ad1 2455
57814e5e
JJ
2456 if ( !data->autinc_to && CONSTANT_P (to_addr))
2457 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2458 }
2459
e1565e65 2460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2462 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2463
57814e5e 2464 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2465 successively smaller modes. */
2466
2467 while (max_size > 1)
2468 {
9de08200
RK
2469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2471 if (GET_MODE_SIZE (tmode) < max_size)
2472 mode = tmode;
2473
2474 if (mode == VOIDmode)
2475 break;
2476
2477 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2479 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2480
2481 max_size = GET_MODE_SIZE (mode);
2482 }
2483
2484 /* The code above should have handled everything. */
57814e5e 2485 if (data->len != 0)
9de08200
RK
2486 abort ();
2487}
2488
57814e5e 2489/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2490 with move instructions for mode MODE. GENFUN is the gen_... function
2491 to make a move insn for that mode. DATA has all the other info. */
2492
2493static void
502b8322
AJ
2494store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2495 struct store_by_pieces *data)
9de08200 2496{
3bdf5ad1 2497 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2498 rtx to1, cst;
9de08200
RK
2499
2500 while (data->len >= size)
2501 {
3bdf5ad1
RK
2502 if (data->reverse)
2503 data->offset -= size;
9de08200 2504
3bdf5ad1 2505 if (data->autinc_to)
630036c6
JJ
2506 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2507 data->offset);
3a94c984 2508 else
f4ef873c 2509 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2510
940da324 2511 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2512 emit_insn (gen_add2_insn (data->to_addr,
2513 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2514
57814e5e
JJ
2515 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2516 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2517
940da324 2518 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2519 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2520
3bdf5ad1
RK
2521 if (! data->reverse)
2522 data->offset += size;
9de08200
RK
2523
2524 data->len -= size;
2525 }
2526}
2527\f
19caa751 2528/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2529 its length in bytes. */
e9a25f70
JL
2530
2531rtx
502b8322 2532clear_storage (rtx object, rtx size)
bbf6f052 2533{
e9a25f70 2534 rtx retval = 0;
8ac61af7
RK
2535 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2536 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2537
fcf1b822
RK
2538 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2539 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2540 if (GET_MODE (object) != BLKmode
fcf1b822 2541 && GET_CODE (size) == CONST_INT
4ca79136 2542 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2543 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2544 else
bbf6f052 2545 {
9de08200
RK
2546 object = protect_from_queue (object, 1);
2547 size = protect_from_queue (size, 0);
2548
6972c506 2549 if (size == const0_rtx)
2c430630
RS
2550 ;
2551 else if (GET_CODE (size) == CONST_INT
78762e3b 2552 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2553 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2554 else if (clear_storage_via_clrstr (object, size, align))
2555 ;
9de08200 2556 else
4ca79136
RH
2557 retval = clear_storage_via_libcall (object, size);
2558 }
2559
2560 return retval;
2561}
2562
2563/* A subroutine of clear_storage. Expand a clrstr pattern;
2564 return true if successful. */
2565
2566static bool
502b8322 2567clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
4ca79136
RH
2568{
2569 /* Try the most limited insn first, because there's no point
2570 including more than one in the machine description unless
2571 the more limited one has some advantage. */
2572
2573 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2574 enum machine_mode mode;
2575
2576 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2577 mode = GET_MODE_WIDER_MODE (mode))
2578 {
2579 enum insn_code code = clrstr_optab[(int) mode];
2580 insn_operand_predicate_fn pred;
2581
2582 if (code != CODE_FOR_nothing
2583 /* We don't need MODE to be narrower than
2584 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2585 the mode mask, as it is returned by the macro, it will
2586 definitely be less than the actual mode mask. */
2587 && ((GET_CODE (size) == CONST_INT
2588 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2589 <= (GET_MODE_MASK (mode) >> 1)))
2590 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2591 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2592 || (*pred) (object, BLKmode))
2593 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2594 || (*pred) (opalign, VOIDmode)))
9de08200 2595 {
4ca79136
RH
2596 rtx op1;
2597 rtx last = get_last_insn ();
2598 rtx pat;
9de08200 2599
4ca79136
RH
2600 op1 = convert_to_mode (mode, size, 1);
2601 pred = insn_data[(int) code].operand[1].predicate;
2602 if (pred != 0 && ! (*pred) (op1, mode))
2603 op1 = copy_to_mode_reg (mode, op1);
9de08200 2604
4ca79136
RH
2605 pat = GEN_FCN ((int) code) (object, op1, opalign);
2606 if (pat)
9de08200 2607 {
4ca79136
RH
2608 emit_insn (pat);
2609 return true;
2610 }
2611 else
2612 delete_insns_since (last);
2613 }
2614 }
9de08200 2615
4ca79136
RH
2616 return false;
2617}
9de08200 2618
4ca79136
RH
2619/* A subroutine of clear_storage. Expand a call to memset or bzero.
2620 Return the return value of memset, 0 otherwise. */
9de08200 2621
4ca79136 2622static rtx
502b8322 2623clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2624{
2625 tree call_expr, arg_list, fn, object_tree, size_tree;
2626 enum machine_mode size_mode;
2627 rtx retval;
9de08200 2628
4ca79136 2629 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2630
4ca79136
RH
2631 It is unsafe to save the value generated by protect_from_queue
2632 and reuse it later. Consider what happens if emit_queue is
2633 called before the return value from protect_from_queue is used.
52cf7115 2634
4ca79136
RH
2635 Expansion of the CALL_EXPR below will call emit_queue before
2636 we are finished emitting RTL for argument setup. So if we are
2637 not careful we could get the wrong value for an argument.
52cf7115 2638
4ca79136
RH
2639 To avoid this problem we go ahead and emit code to copy OBJECT
2640 and SIZE into new pseudos. We can then place those new pseudos
2641 into an RTL_EXPR and use them later, even after a call to
2642 emit_queue.
52cf7115 2643
4ca79136
RH
2644 Note this is not strictly needed for library calls since they
2645 do not call emit_queue before loading their arguments. However,
2646 we may need to have library calls call emit_queue in the future
2647 since failing to do so could cause problems for targets which
2648 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2649
4ca79136 2650 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2651
4ca79136
RH
2652 if (TARGET_MEM_FUNCTIONS)
2653 size_mode = TYPE_MODE (sizetype);
2654 else
2655 size_mode = TYPE_MODE (unsigned_type_node);
2656 size = convert_to_mode (size_mode, size, 1);
2657 size = copy_to_mode_reg (size_mode, size);
52cf7115 2658
4ca79136
RH
2659 /* It is incorrect to use the libcall calling conventions to call
2660 memset in this context. This could be a user call to memset and
2661 the user may wish to examine the return value from memset. For
2662 targets where libcalls and normal calls have different conventions
2663 for returning pointers, we could end up generating incorrect code.
4bc973ae 2664
4ca79136 2665 For convenience, we generate the call to bzero this way as well. */
4bc973ae 2666
4ca79136
RH
2667 object_tree = make_tree (ptr_type_node, object);
2668 if (TARGET_MEM_FUNCTIONS)
2669 size_tree = make_tree (sizetype, size);
2670 else
2671 size_tree = make_tree (unsigned_type_node, size);
2672
2673 fn = clear_storage_libcall_fn (true);
2674 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2675 if (TARGET_MEM_FUNCTIONS)
2676 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2677 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2678
2679 /* Now we have to build up the CALL_EXPR itself. */
2680 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2681 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2682 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2683
2684 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2685
2686 /* If we are initializing a readonly value, show the above call
2687 clobbered it. Otherwise, a load from it may erroneously be
2688 hoisted from a loop. */
2689 if (RTX_UNCHANGING_P (object))
2690 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2691
2692 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2693}
2694
2695/* A subroutine of clear_storage_via_libcall. Create the tree node
2696 for the function we use for block clears. The first time FOR_CALL
2697 is true, we call assemble_external. */
2698
2699static GTY(()) tree block_clear_fn;
66c60e67 2700
9661b15f 2701void
502b8322 2702init_block_clear_fn (const char *asmspec)
4ca79136 2703{
9661b15f 2704 if (!block_clear_fn)
4ca79136 2705 {
9661b15f
JJ
2706 tree fn, args;
2707
4ca79136
RH
2708 if (TARGET_MEM_FUNCTIONS)
2709 {
2710 fn = get_identifier ("memset");
2711 args = build_function_type_list (ptr_type_node, ptr_type_node,
2712 integer_type_node, sizetype,
2713 NULL_TREE);
2714 }
2715 else
2716 {
2717 fn = get_identifier ("bzero");
2718 args = build_function_type_list (void_type_node, ptr_type_node,
2719 unsigned_type_node, NULL_TREE);
9de08200 2720 }
4ca79136
RH
2721
2722 fn = build_decl (FUNCTION_DECL, fn, args);
2723 DECL_EXTERNAL (fn) = 1;
2724 TREE_PUBLIC (fn) = 1;
2725 DECL_ARTIFICIAL (fn) = 1;
2726 TREE_NOTHROW (fn) = 1;
2727
2728 block_clear_fn = fn;
bbf6f052 2729 }
e9a25f70 2730
9661b15f
JJ
2731 if (asmspec)
2732 {
2733 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2734 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2735 }
2736}
2737
2738static tree
502b8322 2739clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2740{
2741 static bool emitted_extern;
2742
2743 if (!block_clear_fn)
2744 init_block_clear_fn (NULL);
2745
4ca79136
RH
2746 if (for_call && !emitted_extern)
2747 {
2748 emitted_extern = true;
9661b15f
JJ
2749 make_decl_rtl (block_clear_fn, NULL);
2750 assemble_external (block_clear_fn);
4ca79136 2751 }
bbf6f052 2752
9661b15f 2753 return block_clear_fn;
4ca79136
RH
2754}
2755\f
bbf6f052
RK
2756/* Generate code to copy Y into X.
2757 Both Y and X must have the same mode, except that
2758 Y can be a constant with VOIDmode.
2759 This mode cannot be BLKmode; use emit_block_move for that.
2760
2761 Return the last instruction emitted. */
2762
2763rtx
502b8322 2764emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2765{
2766 enum machine_mode mode = GET_MODE (x);
de1b33dd 2767 rtx y_cst = NULL_RTX;
0c19a26f 2768 rtx last_insn, set;
bbf6f052
RK
2769
2770 x = protect_from_queue (x, 1);
2771 y = protect_from_queue (y, 0);
2772
2773 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2774 abort ();
2775
ee5332b8
RH
2776 /* Never force constant_p_rtx to memory. */
2777 if (GET_CODE (y) == CONSTANT_P_RTX)
2778 ;
51286de6 2779 else if (CONSTANT_P (y))
de1b33dd 2780 {
51286de6 2781 if (optimize
075fc17a 2782 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2783 && (last_insn = compress_float_constant (x, y)))
2784 return last_insn;
2785
0c19a26f
RS
2786 y_cst = y;
2787
51286de6
RH
2788 if (!LEGITIMATE_CONSTANT_P (y))
2789 {
51286de6 2790 y = force_const_mem (mode, y);
3a04ff64
RH
2791
2792 /* If the target's cannot_force_const_mem prevented the spill,
2793 assume that the target's move expanders will also take care
2794 of the non-legitimate constant. */
2795 if (!y)
2796 y = y_cst;
51286de6 2797 }
de1b33dd 2798 }
bbf6f052
RK
2799
2800 /* If X or Y are memory references, verify that their addresses are valid
2801 for the machine. */
2802 if (GET_CODE (x) == MEM
2803 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2804 && ! push_operand (x, GET_MODE (x)))
2805 || (flag_force_addr
2806 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2807 x = validize_mem (x);
bbf6f052
RK
2808
2809 if (GET_CODE (y) == MEM
2810 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2811 || (flag_force_addr
2812 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2813 y = validize_mem (y);
bbf6f052
RK
2814
2815 if (mode == BLKmode)
2816 abort ();
2817
de1b33dd
AO
2818 last_insn = emit_move_insn_1 (x, y);
2819
0c19a26f
RS
2820 if (y_cst && GET_CODE (x) == REG
2821 && (set = single_set (last_insn)) != NULL_RTX
2822 && SET_DEST (set) == x
2823 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2824 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2825
2826 return last_insn;
261c4230
RS
2827}
2828
2829/* Low level part of emit_move_insn.
2830 Called just like emit_move_insn, but assumes X and Y
2831 are basically valid. */
2832
2833rtx
502b8322 2834emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2835{
2836 enum machine_mode mode = GET_MODE (x);
2837 enum machine_mode submode;
2838 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2839
dbbbbf3b 2840 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2841 abort ();
76bbe028 2842
bbf6f052
RK
2843 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2844 return
2845 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2846
89742723 2847 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2848 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2849 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2850 && (mov_optab->handlers[(int) submode].insn_code
2851 != CODE_FOR_nothing))
2852 {
2853 /* Don't split destination if it is a stack push. */
2854 int stack = push_operand (x, GET_MODE (x));
7308a047 2855
79ce92d7 2856#ifdef PUSH_ROUNDING
0e9cbd11
KH
2857 /* In case we output to the stack, but the size is smaller than the
2858 machine can push exactly, we need to use move instructions. */
1a06f5fe 2859 if (stack
bb93b973
RK
2860 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2861 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2862 {
2863 rtx temp;
bb93b973 2864 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2865
2866 /* Do not use anti_adjust_stack, since we don't want to update
2867 stack_pointer_delta. */
2868 temp = expand_binop (Pmode,
2869#ifdef STACK_GROWS_DOWNWARD
2870 sub_optab,
2871#else
2872 add_optab,
2873#endif
2874 stack_pointer_rtx,
2875 GEN_INT
bb93b973
RK
2876 (PUSH_ROUNDING
2877 (GET_MODE_SIZE (GET_MODE (x)))),
2878 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2879
1a06f5fe
JH
2880 if (temp != stack_pointer_rtx)
2881 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2882
1a06f5fe
JH
2883#ifdef STACK_GROWS_DOWNWARD
2884 offset1 = 0;
2885 offset2 = GET_MODE_SIZE (submode);
2886#else
2887 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2888 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2889 + GET_MODE_SIZE (submode));
2890#endif
bb93b973 2891
1a06f5fe
JH
2892 emit_move_insn (change_address (x, submode,
2893 gen_rtx_PLUS (Pmode,
2894 stack_pointer_rtx,
2895 GEN_INT (offset1))),
2896 gen_realpart (submode, y));
2897 emit_move_insn (change_address (x, submode,
2898 gen_rtx_PLUS (Pmode,
2899 stack_pointer_rtx,
2900 GEN_INT (offset2))),
2901 gen_imagpart (submode, y));
2902 }
e9c0bd54 2903 else
79ce92d7 2904#endif
7308a047
RS
2905 /* If this is a stack, push the highpart first, so it
2906 will be in the argument order.
2907
2908 In that case, change_address is used only to convert
2909 the mode, not to change the address. */
e9c0bd54 2910 if (stack)
c937357e 2911 {
e33c0d66
RS
2912 /* Note that the real part always precedes the imag part in memory
2913 regardless of machine's endianness. */
c937357e 2914#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2915 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2916 gen_imagpart (submode, y));
2917 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2918 gen_realpart (submode, y));
c937357e 2919#else
a79b3dc7
RS
2920 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2921 gen_realpart (submode, y));
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_imagpart (submode, y));
c937357e
RS
2924#endif
2925 }
2926 else
2927 {
235ae7be
DM
2928 rtx realpart_x, realpart_y;
2929 rtx imagpart_x, imagpart_y;
2930
405f63da
MM
2931 /* If this is a complex value with each part being smaller than a
2932 word, the usual calling sequence will likely pack the pieces into
2933 a single register. Unfortunately, SUBREG of hard registers only
2934 deals in terms of words, so we have a problem converting input
2935 arguments to the CONCAT of two registers that is used elsewhere
2936 for complex values. If this is before reload, we can copy it into
2937 memory and reload. FIXME, we should see about using extract and
2938 insert on integer registers, but complex short and complex char
2939 variables should be rarely used. */
3a94c984 2940 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2941 && (reload_in_progress | reload_completed) == 0)
2942 {
bb93b973
RK
2943 int packed_dest_p
2944 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2945 int packed_src_p
2946 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2947
2948 if (packed_dest_p || packed_src_p)
2949 {
2950 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2951 ? MODE_FLOAT : MODE_INT);
2952
1da68f56
RK
2953 enum machine_mode reg_mode
2954 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2955
2956 if (reg_mode != BLKmode)
2957 {
2958 rtx mem = assign_stack_temp (reg_mode,
2959 GET_MODE_SIZE (mode), 0);
f4ef873c 2960 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2961
1da68f56
RK
2962 cfun->cannot_inline
2963 = N_("function using short complex types cannot be inline");
405f63da
MM
2964
2965 if (packed_dest_p)
2966 {
2967 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2968
405f63da
MM
2969 emit_move_insn_1 (cmem, y);
2970 return emit_move_insn_1 (sreg, mem);
2971 }
2972 else
2973 {
2974 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 2975
405f63da
MM
2976 emit_move_insn_1 (mem, sreg);
2977 return emit_move_insn_1 (x, cmem);
2978 }
2979 }
2980 }
2981 }
2982
235ae7be
DM
2983 realpart_x = gen_realpart (submode, x);
2984 realpart_y = gen_realpart (submode, y);
2985 imagpart_x = gen_imagpart (submode, x);
2986 imagpart_y = gen_imagpart (submode, y);
2987
2988 /* Show the output dies here. This is necessary for SUBREGs
2989 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2990 hard regs shouldn't appear here except as return values.
2991 We never want to emit such a clobber after reload. */
2992 if (x != y
235ae7be
DM
2993 && ! (reload_in_progress || reload_completed)
2994 && (GET_CODE (realpart_x) == SUBREG
2995 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 2996 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2997
a79b3dc7
RS
2998 emit_move_insn (realpart_x, realpart_y);
2999 emit_move_insn (imagpart_x, imagpart_y);
c937357e 3000 }
7308a047 3001
7a1ab50a 3002 return get_last_insn ();
7308a047
RS
3003 }
3004
a3600c71
HPN
3005 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3006 find a mode to do it in. If we have a movcc, use it. Otherwise,
3007 find the MODE_INT mode of the same width. */
3008 else if (GET_MODE_CLASS (mode) == MODE_CC
3009 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3010 {
3011 enum insn_code insn_code;
3012 enum machine_mode tmode = VOIDmode;
3013 rtx x1 = x, y1 = y;
3014
3015 if (mode != CCmode
3016 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3017 tmode = CCmode;
3018 else
3019 for (tmode = QImode; tmode != VOIDmode;
3020 tmode = GET_MODE_WIDER_MODE (tmode))
3021 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3022 break;
3023
3024 if (tmode == VOIDmode)
3025 abort ();
3026
3027 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3028 may call change_address which is not appropriate if we were
3029 called when a reload was in progress. We don't have to worry
3030 about changing the address since the size in bytes is supposed to
3031 be the same. Copy the MEM to change the mode and move any
3032 substitutions from the old MEM to the new one. */
3033
3034 if (reload_in_progress)
3035 {
3036 x = gen_lowpart_common (tmode, x1);
3037 if (x == 0 && GET_CODE (x1) == MEM)
3038 {
3039 x = adjust_address_nv (x1, tmode, 0);
3040 copy_replacements (x1, x);
3041 }
3042
3043 y = gen_lowpart_common (tmode, y1);
3044 if (y == 0 && GET_CODE (y1) == MEM)
3045 {
3046 y = adjust_address_nv (y1, tmode, 0);
3047 copy_replacements (y1, y);
3048 }
3049 }
3050 else
3051 {
3052 x = gen_lowpart (tmode, x);
3053 y = gen_lowpart (tmode, y);
3054 }
502b8322 3055
a3600c71
HPN
3056 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3057 return emit_insn (GEN_FCN (insn_code) (x, y));
3058 }
3059
5581fc91
RS
3060 /* Try using a move pattern for the corresponding integer mode. This is
3061 only safe when simplify_subreg can convert MODE constants into integer
3062 constants. At present, it can only do this reliably if the value
3063 fits within a HOST_WIDE_INT. */
3064 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3065 && (submode = int_mode_for_mode (mode)) != BLKmode
3066 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3067 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3068 (simplify_gen_subreg (submode, x, mode, 0),
3069 simplify_gen_subreg (submode, y, mode, 0)));
3070
cffa2189
R
3071 /* This will handle any multi-word or full-word mode that lacks a move_insn
3072 pattern. However, you will get better code if you define such patterns,
bbf6f052 3073 even if they must turn into multiple assembler instructions. */
cffa2189 3074 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3075 {
3076 rtx last_insn = 0;
3ef1eef4 3077 rtx seq, inner;
235ae7be 3078 int need_clobber;
bb93b973 3079 int i;
3a94c984 3080
a98c9f1a
RK
3081#ifdef PUSH_ROUNDING
3082
3083 /* If X is a push on the stack, do the push now and replace
3084 X with a reference to the stack pointer. */
3085 if (push_operand (x, GET_MODE (x)))
3086 {
918a6124
GK
3087 rtx temp;
3088 enum rtx_code code;
0fb7aeda 3089
918a6124
GK
3090 /* Do not use anti_adjust_stack, since we don't want to update
3091 stack_pointer_delta. */
3092 temp = expand_binop (Pmode,
3093#ifdef STACK_GROWS_DOWNWARD
3094 sub_optab,
3095#else
3096 add_optab,
3097#endif
3098 stack_pointer_rtx,
3099 GEN_INT
bb93b973
RK
3100 (PUSH_ROUNDING
3101 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3102 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3103
0fb7aeda
KH
3104 if (temp != stack_pointer_rtx)
3105 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3106
3107 code = GET_CODE (XEXP (x, 0));
bb93b973 3108
918a6124
GK
3109 /* Just hope that small offsets off SP are OK. */
3110 if (code == POST_INC)
0fb7aeda 3111 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3112 GEN_INT (-((HOST_WIDE_INT)
3113 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3114 else if (code == POST_DEC)
0fb7aeda 3115 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3116 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3117 else
3118 temp = stack_pointer_rtx;
3119
3120 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3121 }
3122#endif
3a94c984 3123
3ef1eef4
RK
3124 /* If we are in reload, see if either operand is a MEM whose address
3125 is scheduled for replacement. */
3126 if (reload_in_progress && GET_CODE (x) == MEM
3127 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3128 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3129 if (reload_in_progress && GET_CODE (y) == MEM
3130 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3131 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3132
235ae7be 3133 start_sequence ();
15a7a8ec 3134
235ae7be 3135 need_clobber = 0;
bbf6f052 3136 for (i = 0;
3a94c984 3137 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3138 i++)
3139 {
3140 rtx xpart = operand_subword (x, i, 1, mode);
3141 rtx ypart = operand_subword (y, i, 1, mode);
3142
3143 /* If we can't get a part of Y, put Y into memory if it is a
3144 constant. Otherwise, force it into a register. If we still
3145 can't get a part of Y, abort. */
3146 if (ypart == 0 && CONSTANT_P (y))
3147 {
3148 y = force_const_mem (mode, y);
3149 ypart = operand_subword (y, i, 1, mode);
3150 }
3151 else if (ypart == 0)
3152 ypart = operand_subword_force (y, i, mode);
3153
3154 if (xpart == 0 || ypart == 0)
3155 abort ();
3156
235ae7be
DM
3157 need_clobber |= (GET_CODE (xpart) == SUBREG);
3158
bbf6f052
RK
3159 last_insn = emit_move_insn (xpart, ypart);
3160 }
6551fa4d 3161
2f937369 3162 seq = get_insns ();
235ae7be
DM
3163 end_sequence ();
3164
3165 /* Show the output dies here. This is necessary for SUBREGs
3166 of pseudos since we cannot track their lifetimes correctly;
3167 hard regs shouldn't appear here except as return values.
3168 We never want to emit such a clobber after reload. */
3169 if (x != y
3170 && ! (reload_in_progress || reload_completed)
3171 && need_clobber != 0)
bb93b973 3172 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3173
3174 emit_insn (seq);
3175
bbf6f052
RK
3176 return last_insn;
3177 }
3178 else
3179 abort ();
3180}
51286de6
RH
3181
3182/* If Y is representable exactly in a narrower mode, and the target can
3183 perform the extension directly from constant or memory, then emit the
3184 move as an extension. */
3185
3186static rtx
502b8322 3187compress_float_constant (rtx x, rtx y)
51286de6
RH
3188{
3189 enum machine_mode dstmode = GET_MODE (x);
3190 enum machine_mode orig_srcmode = GET_MODE (y);
3191 enum machine_mode srcmode;
3192 REAL_VALUE_TYPE r;
3193
3194 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3195
3196 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3197 srcmode != orig_srcmode;
3198 srcmode = GET_MODE_WIDER_MODE (srcmode))
3199 {
3200 enum insn_code ic;
3201 rtx trunc_y, last_insn;
3202
3203 /* Skip if the target can't extend this way. */
3204 ic = can_extend_p (dstmode, srcmode, 0);
3205 if (ic == CODE_FOR_nothing)
3206 continue;
3207
3208 /* Skip if the narrowed value isn't exact. */
3209 if (! exact_real_truncate (srcmode, &r))
3210 continue;
3211
3212 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3213
3214 if (LEGITIMATE_CONSTANT_P (trunc_y))
3215 {
3216 /* Skip if the target needs extra instructions to perform
3217 the extension. */
3218 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3219 continue;
3220 }
3221 else if (float_extend_from_mem[dstmode][srcmode])
3222 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3223 else
3224 continue;
3225
3226 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3227 last_insn = get_last_insn ();
3228
3229 if (GET_CODE (x) == REG)
0c19a26f 3230 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3231
3232 return last_insn;
3233 }
3234
3235 return NULL_RTX;
3236}
bbf6f052
RK
3237\f
3238/* Pushing data onto the stack. */
3239
3240/* Push a block of length SIZE (perhaps variable)
3241 and return an rtx to address the beginning of the block.
3242 Note that it is not possible for the value returned to be a QUEUED.
3243 The value may be virtual_outgoing_args_rtx.
3244
3245 EXTRA is the number of bytes of padding to push in addition to SIZE.
3246 BELOW nonzero means this padding comes at low addresses;
3247 otherwise, the padding comes at high addresses. */
3248
3249rtx
502b8322 3250push_block (rtx size, int extra, int below)
bbf6f052 3251{
b3694847 3252 rtx temp;
88f63c77
RK
3253
3254 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3255 if (CONSTANT_P (size))
3256 anti_adjust_stack (plus_constant (size, extra));
3257 else if (GET_CODE (size) == REG && extra == 0)
3258 anti_adjust_stack (size);
3259 else
3260 {
ce48579b 3261 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3262 if (extra != 0)
906c4e36 3263 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3264 temp, 0, OPTAB_LIB_WIDEN);
3265 anti_adjust_stack (temp);
3266 }
3267
f73ad30e 3268#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3269 if (0)
f73ad30e
JH
3270#else
3271 if (1)
bbf6f052 3272#endif
f73ad30e 3273 {
f73ad30e
JH
3274 temp = virtual_outgoing_args_rtx;
3275 if (extra != 0 && below)
3276 temp = plus_constant (temp, extra);
3277 }
3278 else
3279 {
3280 if (GET_CODE (size) == CONST_INT)
3281 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3282 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3283 else if (extra != 0 && !below)
3284 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3285 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3286 else
3287 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3288 negate_rtx (Pmode, size));
3289 }
bbf6f052
RK
3290
3291 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3292}
3293
21d93687
RK
3294#ifdef PUSH_ROUNDING
3295
566aa174 3296/* Emit single push insn. */
21d93687 3297
566aa174 3298static void
502b8322 3299emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3300{
566aa174 3301 rtx dest_addr;
918a6124 3302 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3303 rtx dest;
371b8fc0
JH
3304 enum insn_code icode;
3305 insn_operand_predicate_fn pred;
566aa174 3306
371b8fc0
JH
3307 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3308 /* If there is push pattern, use it. Otherwise try old way of throwing
3309 MEM representing push operation to move expander. */
3310 icode = push_optab->handlers[(int) mode].insn_code;
3311 if (icode != CODE_FOR_nothing)
3312 {
3313 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3314 && !((*pred) (x, mode))))
371b8fc0
JH
3315 x = force_reg (mode, x);
3316 emit_insn (GEN_FCN (icode) (x));
3317 return;
3318 }
566aa174
JH
3319 if (GET_MODE_SIZE (mode) == rounded_size)
3320 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3321 /* If we are to pad downward, adjust the stack pointer first and
3322 then store X into the stack location using an offset. This is
3323 because emit_move_insn does not know how to pad; it does not have
3324 access to type. */
3325 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3326 {
3327 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3328 HOST_WIDE_INT offset;
3329
3330 emit_move_insn (stack_pointer_rtx,
3331 expand_binop (Pmode,
3332#ifdef STACK_GROWS_DOWNWARD
3333 sub_optab,
3334#else
3335 add_optab,
3336#endif
3337 stack_pointer_rtx,
3338 GEN_INT (rounded_size),
3339 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3340
3341 offset = (HOST_WIDE_INT) padding_size;
3342#ifdef STACK_GROWS_DOWNWARD
3343 if (STACK_PUSH_CODE == POST_DEC)
3344 /* We have already decremented the stack pointer, so get the
3345 previous value. */
3346 offset += (HOST_WIDE_INT) rounded_size;
3347#else
3348 if (STACK_PUSH_CODE == POST_INC)
3349 /* We have already incremented the stack pointer, so get the
3350 previous value. */
3351 offset -= (HOST_WIDE_INT) rounded_size;
3352#endif
3353 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3354 }
566aa174
JH
3355 else
3356 {
3357#ifdef STACK_GROWS_DOWNWARD
329d586f 3358 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3359 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3360 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3361#else
329d586f 3362 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3363 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3364 GEN_INT (rounded_size));
3365#endif
3366 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3367 }
3368
3369 dest = gen_rtx_MEM (mode, dest_addr);
3370
566aa174
JH
3371 if (type != 0)
3372 {
3373 set_mem_attributes (dest, type, 1);
c3d32120
RK
3374
3375 if (flag_optimize_sibling_calls)
3376 /* Function incoming arguments may overlap with sibling call
3377 outgoing arguments and we cannot allow reordering of reads
3378 from function arguments with stores to outgoing arguments
3379 of sibling calls. */
3380 set_mem_alias_set (dest, 0);
566aa174
JH
3381 }
3382 emit_move_insn (dest, x);
566aa174 3383}
21d93687 3384#endif
566aa174 3385
bbf6f052
RK
3386/* Generate code to push X onto the stack, assuming it has mode MODE and
3387 type TYPE.
3388 MODE is redundant except when X is a CONST_INT (since they don't
3389 carry mode info).
3390 SIZE is an rtx for the size of data to be copied (in bytes),
3391 needed only if X is BLKmode.
3392
f1eaaf73 3393 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3394
cd048831
RK
3395 If PARTIAL and REG are both nonzero, then copy that many of the first
3396 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3397 The amount of space pushed is decreased by PARTIAL words,
3398 rounded *down* to a multiple of PARM_BOUNDARY.
3399 REG must be a hard register in this case.
cd048831
RK
3400 If REG is zero but PARTIAL is not, take any all others actions for an
3401 argument partially in registers, but do not actually load any
3402 registers.
bbf6f052
RK
3403
3404 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3405 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3406
3407 On a machine that lacks real push insns, ARGS_ADDR is the address of
3408 the bottom of the argument block for this call. We use indexing off there
3409 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3410 argument block has not been preallocated.
3411
e5e809f4
JL
3412 ARGS_SO_FAR is the size of args previously pushed for this call.
3413
3414 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3415 for arguments passed in registers. If nonzero, it will be the number
3416 of bytes required. */
bbf6f052
RK
3417
3418void
502b8322
AJ
3419emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3420 unsigned int align, int partial, rtx reg, int extra,
3421 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3422 rtx alignment_pad)
bbf6f052
RK
3423{
3424 rtx xinner;
3425 enum direction stack_direction
3426#ifdef STACK_GROWS_DOWNWARD
3427 = downward;
3428#else
3429 = upward;
3430#endif
3431
3432 /* Decide where to pad the argument: `downward' for below,
3433 `upward' for above, or `none' for don't pad it.
3434 Default is below for small data on big-endian machines; else above. */
3435 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3436
0fb7aeda 3437 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3438 FIXME: why? */
3439 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3440 if (where_pad != none)
3441 where_pad = (where_pad == downward ? upward : downward);
3442
3443 xinner = x = protect_from_queue (x, 0);
3444
3445 if (mode == BLKmode)
3446 {
3447 /* Copy a block into the stack, entirely or partially. */
3448
b3694847 3449 rtx temp;
bbf6f052 3450 int used = partial * UNITS_PER_WORD;
531547e9 3451 int offset;
bbf6f052 3452 int skip;
3a94c984 3453
531547e9
FJ
3454 if (reg && GET_CODE (reg) == PARALLEL)
3455 {
3456 /* Use the size of the elt to compute offset. */
3457 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3458 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3459 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3460 }
3461 else
3462 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3463
bbf6f052
RK
3464 if (size == 0)
3465 abort ();
3466
3467 used -= offset;
3468
3469 /* USED is now the # of bytes we need not copy to the stack
3470 because registers will take care of them. */
3471
3472 if (partial != 0)
f4ef873c 3473 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3474
3475 /* If the partial register-part of the arg counts in its stack size,
3476 skip the part of stack space corresponding to the registers.
3477 Otherwise, start copying to the beginning of the stack space,
3478 by setting SKIP to 0. */
e5e809f4 3479 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3480
3481#ifdef PUSH_ROUNDING
3482 /* Do it with several push insns if that doesn't take lots of insns
3483 and if there is no difficulty with push insns that skip bytes
3484 on the stack for alignment purposes. */
3485 if (args_addr == 0
f73ad30e 3486 && PUSH_ARGS
bbf6f052
RK
3487 && GET_CODE (size) == CONST_INT
3488 && skip == 0
f26aca6d 3489 && MEM_ALIGN (xinner) >= align
15914757 3490 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3491 /* Here we avoid the case of a structure whose weak alignment
3492 forces many pushes of a small amount of data,
3493 and such small pushes do rounding that causes trouble. */
e1565e65 3494 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3495 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3496 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3497 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3498 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3499 {
3500 /* Push padding now if padding above and stack grows down,
3501 or if padding below and stack grows up.
3502 But if space already allocated, this has already been done. */
3503 if (extra && args_addr == 0
3504 && where_pad != none && where_pad != stack_direction)
906c4e36 3505 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3506
8fd3cf4e 3507 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3508 }
3509 else
3a94c984 3510#endif /* PUSH_ROUNDING */
bbf6f052 3511 {
7ab923cc
JJ
3512 rtx target;
3513
bbf6f052
RK
3514 /* Otherwise make space on the stack and copy the data
3515 to the address of that space. */
3516
3517 /* Deduct words put into registers from the size we must copy. */
3518 if (partial != 0)
3519 {
3520 if (GET_CODE (size) == CONST_INT)
906c4e36 3521 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3522 else
3523 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3524 GEN_INT (used), NULL_RTX, 0,
3525 OPTAB_LIB_WIDEN);
bbf6f052
RK
3526 }
3527
3528 /* Get the address of the stack space.
3529 In this case, we do not deal with EXTRA separately.
3530 A single stack adjust will do. */
3531 if (! args_addr)
3532 {
3533 temp = push_block (size, extra, where_pad == downward);
3534 extra = 0;
3535 }
3536 else if (GET_CODE (args_so_far) == CONST_INT)
3537 temp = memory_address (BLKmode,
3538 plus_constant (args_addr,
3539 skip + INTVAL (args_so_far)));
3540 else
3541 temp = memory_address (BLKmode,
38a448ca
RH
3542 plus_constant (gen_rtx_PLUS (Pmode,
3543 args_addr,
3544 args_so_far),
bbf6f052 3545 skip));
4ca79136
RH
3546
3547 if (!ACCUMULATE_OUTGOING_ARGS)
3548 {
3549 /* If the source is referenced relative to the stack pointer,
3550 copy it to another register to stabilize it. We do not need
3551 to do this if we know that we won't be changing sp. */
3552
3553 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3554 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3555 temp = copy_to_reg (temp);
3556 }
3557
3a94c984 3558 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3559
3a94c984
KH
3560 if (type != 0)
3561 {
3562 set_mem_attributes (target, type, 1);
3563 /* Function incoming arguments may overlap with sibling call
3564 outgoing arguments and we cannot allow reordering of reads
3565 from function arguments with stores to outgoing arguments
3566 of sibling calls. */
ba4828e0 3567 set_mem_alias_set (target, 0);
3a94c984 3568 }
4ca79136 3569
44bb111a
RH
3570 /* ALIGN may well be better aligned than TYPE, e.g. due to
3571 PARM_BOUNDARY. Assume the caller isn't lying. */
3572 set_mem_align (target, align);
4ca79136 3573
44bb111a 3574 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3575 }
3576 }
3577 else if (partial > 0)
3578 {
3579 /* Scalar partly in registers. */
3580
3581 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3582 int i;
3583 int not_stack;
3584 /* # words of start of argument
3585 that we must make space for but need not store. */
3586 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3587 int args_offset = INTVAL (args_so_far);
3588 int skip;
3589
3590 /* Push padding now if padding above and stack grows down,
3591 or if padding below and stack grows up.
3592 But if space already allocated, this has already been done. */
3593 if (extra && args_addr == 0
3594 && where_pad != none && where_pad != stack_direction)
906c4e36 3595 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3596
3597 /* If we make space by pushing it, we might as well push
3598 the real data. Otherwise, we can leave OFFSET nonzero
3599 and leave the space uninitialized. */
3600 if (args_addr == 0)
3601 offset = 0;
3602
3603 /* Now NOT_STACK gets the number of words that we don't need to
3604 allocate on the stack. */
3605 not_stack = partial - offset;
3606
3607 /* If the partial register-part of the arg counts in its stack size,
3608 skip the part of stack space corresponding to the registers.
3609 Otherwise, start copying to the beginning of the stack space,
3610 by setting SKIP to 0. */
e5e809f4 3611 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3612
3613 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3614 x = validize_mem (force_const_mem (mode, x));
3615
3616 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3617 SUBREGs of such registers are not allowed. */
3618 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3619 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3620 x = copy_to_reg (x);
3621
3622 /* Loop over all the words allocated on the stack for this arg. */
3623 /* We can do it by words, because any scalar bigger than a word
3624 has a size a multiple of a word. */
3625#ifndef PUSH_ARGS_REVERSED
3626 for (i = not_stack; i < size; i++)
3627#else
3628 for (i = size - 1; i >= not_stack; i--)
3629#endif
3630 if (i >= not_stack + offset)
3631 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3632 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3633 0, args_addr,
3634 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3635 * UNITS_PER_WORD)),
4fc026cd 3636 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3637 }
3638 else
3639 {
3640 rtx addr;
3bdf5ad1 3641 rtx dest;
bbf6f052
RK
3642
3643 /* Push padding now if padding above and stack grows down,
3644 or if padding below and stack grows up.
3645 But if space already allocated, this has already been done. */
3646 if (extra && args_addr == 0
3647 && where_pad != none && where_pad != stack_direction)
906c4e36 3648 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3649
3650#ifdef PUSH_ROUNDING
f73ad30e 3651 if (args_addr == 0 && PUSH_ARGS)
566aa174 3652 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3653 else
3654#endif
921b3427
RK
3655 {
3656 if (GET_CODE (args_so_far) == CONST_INT)
3657 addr
3658 = memory_address (mode,
3a94c984 3659 plus_constant (args_addr,
921b3427 3660 INTVAL (args_so_far)));
3a94c984 3661 else
38a448ca
RH
3662 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3663 args_so_far));
566aa174
JH
3664 dest = gen_rtx_MEM (mode, addr);
3665 if (type != 0)
3666 {
3667 set_mem_attributes (dest, type, 1);
3668 /* Function incoming arguments may overlap with sibling call
3669 outgoing arguments and we cannot allow reordering of reads
3670 from function arguments with stores to outgoing arguments
3671 of sibling calls. */
ba4828e0 3672 set_mem_alias_set (dest, 0);
566aa174 3673 }
bbf6f052 3674
566aa174 3675 emit_move_insn (dest, x);
566aa174 3676 }
bbf6f052
RK
3677 }
3678
bbf6f052
RK
3679 /* If part should go in registers, copy that part
3680 into the appropriate registers. Do this now, at the end,
3681 since mem-to-mem copies above may do function calls. */
cd048831 3682 if (partial > 0 && reg != 0)
fffa9c1d
JW
3683 {
3684 /* Handle calls that pass values in multiple non-contiguous locations.
3685 The Irix 6 ABI has examples of this. */
3686 if (GET_CODE (reg) == PARALLEL)
6e985040 3687 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3688 else
3689 move_block_to_reg (REGNO (reg), x, partial, mode);
3690 }
bbf6f052
RK
3691
3692 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3693 anti_adjust_stack (GEN_INT (extra));
3a94c984 3694
3ea2292a 3695 if (alignment_pad && args_addr == 0)
4fc026cd 3696 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3697}
3698\f
296b4ed9
RK
3699/* Return X if X can be used as a subtarget in a sequence of arithmetic
3700 operations. */
3701
3702static rtx
502b8322 3703get_subtarget (rtx x)
296b4ed9
RK
3704{
3705 return ((x == 0
3706 /* Only registers can be subtargets. */
3707 || GET_CODE (x) != REG
3708 /* If the register is readonly, it can't be set more than once. */
3709 || RTX_UNCHANGING_P (x)
3710 /* Don't use hard regs to avoid extending their life. */
3711 || REGNO (x) < FIRST_PSEUDO_REGISTER
3712 /* Avoid subtargets inside loops,
3713 since they hide some invariant expressions. */
3714 || preserve_subexpressions_p ())
3715 ? 0 : x);
3716}
3717
bbf6f052
RK
3718/* Expand an assignment that stores the value of FROM into TO.
3719 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3720 (This may contain a QUEUED rtx;
3721 if the value is constant, this rtx is a constant.)
b90f141a 3722 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3723
3724rtx
b90f141a 3725expand_assignment (tree to, tree from, int want_value)
bbf6f052 3726{
b3694847 3727 rtx to_rtx = 0;
bbf6f052
RK
3728 rtx result;
3729
3730 /* Don't crash if the lhs of the assignment was erroneous. */
3731
3732 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3733 {
3734 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3735 return want_value ? result : NULL_RTX;
3736 }
bbf6f052
RK
3737
3738 /* Assignment of a structure component needs special treatment
3739 if the structure component's rtx is not simply a MEM.
6be58303
JW
3740 Assignment of an array element at a constant index, and assignment of
3741 an array element in an unaligned packed structure field, has the same
3742 problem. */
bbf6f052 3743
08293add 3744 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3745 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3746 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3747 {
3748 enum machine_mode mode1;
770ae6cc 3749 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3750 rtx orig_to_rtx;
7bb0943f 3751 tree offset;
bbf6f052
RK
3752 int unsignedp;
3753 int volatilep = 0;
0088fcb1
RK
3754 tree tem;
3755
3756 push_temp_slots ();
839c4796 3757 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3758 &unsignedp, &volatilep);
bbf6f052
RK
3759
3760 /* If we are going to use store_bit_field and extract_bit_field,
3761 make sure to_rtx will be safe for multiple use. */
3762
3763 if (mode1 == VOIDmode && want_value)
3764 tem = stabilize_reference (tem);
3765
1ed1b4fb
RK
3766 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3767
7bb0943f
RS
3768 if (offset != 0)
3769 {
e3c8ea67 3770 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3771
3772 if (GET_CODE (to_rtx) != MEM)
3773 abort ();
bd070e1a 3774
bd070e1a 3775#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3776 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3777 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3778#else
3779 if (GET_MODE (offset_rtx) != ptr_mode)
3780 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3781#endif
bd070e1a 3782
9a7b9f4f
JL
3783 /* A constant address in TO_RTX can have VOIDmode, we must not try
3784 to call force_reg for that case. Avoid that case. */
89752202
HB
3785 if (GET_CODE (to_rtx) == MEM
3786 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3787 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3788 && bitsize > 0
3a94c984 3789 && (bitpos % bitsize) == 0
89752202 3790 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3791 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3792 {
e3c8ea67 3793 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3794 bitpos = 0;
3795 }
3796
0d4903b8 3797 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
3798 highest_pow2_factor_for_type (TREE_TYPE (to),
3799 offset));
7bb0943f 3800 }
c5c76735 3801
998d7deb
RH
3802 if (GET_CODE (to_rtx) == MEM)
3803 {
998d7deb
RH
3804 /* If the field is at offset zero, we could have been given the
3805 DECL_RTX of the parent struct. Don't munge it. */
3806 to_rtx = shallow_copy_rtx (to_rtx);
3807
6f1087be 3808 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3809 }
effbcc6a 3810
a06ef755
RK
3811 /* Deal with volatile and readonly fields. The former is only done
3812 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3813 if (volatilep && GET_CODE (to_rtx) == MEM)
3814 {
3815 if (to_rtx == orig_to_rtx)
3816 to_rtx = copy_rtx (to_rtx);
3817 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3818 }
3819
956d6950 3820 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3821 && TREE_READONLY (TREE_OPERAND (to, 1))
3822 /* We can't assert that a MEM won't be set more than once
3823 if the component is not addressable because another
3824 non-addressable component may be referenced by the same MEM. */
3825 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
956d6950 3826 {
a06ef755 3827 if (to_rtx == orig_to_rtx)
956d6950 3828 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3829 RTX_UNCHANGING_P (to_rtx) = 1;
3830 }
3831
a84b4898 3832 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
3833 {
3834 if (to_rtx == orig_to_rtx)
3835 to_rtx = copy_rtx (to_rtx);
3836 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3837 }
3838
a06ef755
RK
3839 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3840 (want_value
3841 /* Spurious cast for HPUX compiler. */
3842 ? ((enum machine_mode)
3843 TYPE_MODE (TREE_TYPE (to)))
3844 : VOIDmode),
3845 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3846
a06ef755
RK
3847 preserve_temp_slots (result);
3848 free_temp_slots ();
3849 pop_temp_slots ();
a69beca1 3850
a06ef755
RK
3851 /* If the value is meaningful, convert RESULT to the proper mode.
3852 Otherwise, return nothing. */
3853 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3854 TYPE_MODE (TREE_TYPE (from)),
3855 result,
3856 TREE_UNSIGNED (TREE_TYPE (to)))
3857 : NULL_RTX);
bbf6f052
RK
3858 }
3859
cd1db108
RS
3860 /* If the rhs is a function call and its value is not an aggregate,
3861 call the function before we start to compute the lhs.
3862 This is needed for correct code for cases such as
3863 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3864 requires loading up part of an address in a separate insn.
3865
1858863b
JW
3866 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3867 since it might be a promoted variable where the zero- or sign- extension
3868 needs to be done. Handling this in the normal way is safe because no
3869 computation is done before the call. */
61f71b34 3870 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3871 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3872 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3873 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3874 {
0088fcb1
RK
3875 rtx value;
3876
3877 push_temp_slots ();
3878 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3879 if (to_rtx == 0)
37a08a29 3880 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3881
fffa9c1d
JW
3882 /* Handle calls that return values in multiple non-contiguous locations.
3883 The Irix 6 ABI has examples of this. */
3884 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3885 emit_group_load (to_rtx, value, TREE_TYPE (from),
3886 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3887 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3888 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3889 else
6419e5b0 3890 {
5ae6cd0d 3891 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3892 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3893 emit_move_insn (to_rtx, value);
3894 }
cd1db108
RS
3895 preserve_temp_slots (to_rtx);
3896 free_temp_slots ();
0088fcb1 3897 pop_temp_slots ();
709f5be1 3898 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3899 }
3900
bbf6f052
RK
3901 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3902 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3903
3904 if (to_rtx == 0)
37a08a29 3905 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3906
86d38d25 3907 /* Don't move directly into a return register. */
14a774a9
RK
3908 if (TREE_CODE (to) == RESULT_DECL
3909 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3910 {
0088fcb1
RK
3911 rtx temp;
3912
3913 push_temp_slots ();
3914 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3915
3916 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3917 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3918 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3919 else
3920 emit_move_insn (to_rtx, temp);
3921
86d38d25
RS
3922 preserve_temp_slots (to_rtx);
3923 free_temp_slots ();
0088fcb1 3924 pop_temp_slots ();
709f5be1 3925 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3926 }
3927
bbf6f052
RK
3928 /* In case we are returning the contents of an object which overlaps
3929 the place the value is being stored, use a safe function when copying
3930 a value through a pointer into a structure value return block. */
3931 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3932 && current_function_returns_struct
3933 && !current_function_returns_pcc_struct)
3934 {
0088fcb1
RK
3935 rtx from_rtx, size;
3936
3937 push_temp_slots ();
33a20d10 3938 size = expr_size (from);
37a08a29 3939 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3940
4ca79136
RH
3941 if (TARGET_MEM_FUNCTIONS)
3942 emit_library_call (memmove_libfunc, LCT_NORMAL,
3943 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3944 XEXP (from_rtx, 0), Pmode,
3945 convert_to_mode (TYPE_MODE (sizetype),
3946 size, TREE_UNSIGNED (sizetype)),
3947 TYPE_MODE (sizetype));
3948 else
3949 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3950 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3951 XEXP (to_rtx, 0), Pmode,
3952 convert_to_mode (TYPE_MODE (integer_type_node),
3953 size,
3954 TREE_UNSIGNED (integer_type_node)),
3955 TYPE_MODE (integer_type_node));
bbf6f052
RK
3956
3957 preserve_temp_slots (to_rtx);
3958 free_temp_slots ();
0088fcb1 3959 pop_temp_slots ();
709f5be1 3960 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3961 }
3962
3963 /* Compute FROM and store the value in the rtx we got. */
3964
0088fcb1 3965 push_temp_slots ();
bbf6f052
RK
3966 result = store_expr (from, to_rtx, want_value);
3967 preserve_temp_slots (result);
3968 free_temp_slots ();
0088fcb1 3969 pop_temp_slots ();
709f5be1 3970 return want_value ? result : NULL_RTX;
bbf6f052
RK
3971}
3972
3973/* Generate code for computing expression EXP,
3974 and storing the value into TARGET.
bbf6f052
RK
3975 TARGET may contain a QUEUED rtx.
3976
8403445a 3977 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
3978 not in TARGET, so that we can be sure to use the proper
3979 value in a containing expression even if TARGET has something
3980 else stored in it. If possible, we copy the value through a pseudo
3981 and return that pseudo. Or, if the value is constant, we try to
3982 return the constant. In some cases, we return a pseudo
3983 copied *from* TARGET.
3984
3985 If the mode is BLKmode then we may return TARGET itself.
3986 It turns out that in BLKmode it doesn't cause a problem.
3987 because C has no operators that could combine two different
3988 assignments into the same BLKmode object with different values
3989 with no sequence point. Will other languages need this to
3990 be more thorough?
3991
8403445a 3992 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 3993 to catch quickly any cases where the caller uses the value
8403445a
AM
3994 and fails to set WANT_VALUE.
3995
3996 If WANT_VALUE & 2 is set, this is a store into a call param on the
3997 stack, and block moves may need to be treated specially. */
bbf6f052
RK
3998
3999rtx
502b8322 4000store_expr (tree exp, rtx target, int want_value)
bbf6f052 4001{
b3694847 4002 rtx temp;
0fab64a3 4003 rtx alt_rtl = NULL_RTX;
bbf6f052 4004 int dont_return_target = 0;
e5408e52 4005 int dont_store_target = 0;
bbf6f052 4006
847311f4
AL
4007 if (VOID_TYPE_P (TREE_TYPE (exp)))
4008 {
4009 /* C++ can generate ?: expressions with a throw expression in one
4010 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4011 store the throw expression's nonexistent result. */
847311f4
AL
4012 if (want_value)
4013 abort ();
4014 expand_expr (exp, const0_rtx, VOIDmode, 0);
4015 return NULL_RTX;
4016 }
bbf6f052
RK
4017 if (TREE_CODE (exp) == COMPOUND_EXPR)
4018 {
4019 /* Perform first part of compound expression, then assign from second
4020 part. */
8403445a
AM
4021 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4022 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4023 emit_queue ();
709f5be1 4024 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4025 }
4026 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4027 {
4028 /* For conditional expression, get safe form of the target. Then
4029 test the condition, doing the appropriate assignment on either
4030 side. This avoids the creation of unnecessary temporaries.
4031 For non-BLKmode, it is more efficient not to do this. */
4032
4033 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4034
4035 emit_queue ();
4036 target = protect_from_queue (target, 1);
4037
dabf8373 4038 do_pending_stack_adjust ();
bbf6f052
RK
4039 NO_DEFER_POP;
4040 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4041 start_cleanup_deferral ();
8403445a 4042 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4043 end_cleanup_deferral ();
bbf6f052
RK
4044 emit_queue ();
4045 emit_jump_insn (gen_jump (lab2));
4046 emit_barrier ();
4047 emit_label (lab1);
956d6950 4048 start_cleanup_deferral ();
8403445a 4049 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4050 end_cleanup_deferral ();
bbf6f052
RK
4051 emit_queue ();
4052 emit_label (lab2);
4053 OK_DEFER_POP;
a3a58acc 4054
8403445a 4055 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4056 }
bbf6f052 4057 else if (queued_subexp_p (target))
709f5be1
RS
4058 /* If target contains a postincrement, let's not risk
4059 using it as the place to generate the rhs. */
bbf6f052
RK
4060 {
4061 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4062 {
4063 /* Expand EXP into a new pseudo. */
4064 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4065 temp = expand_expr (exp, temp, GET_MODE (target),
4066 (want_value & 2
4067 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4068 }
4069 else
8403445a
AM
4070 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4071 (want_value & 2
4072 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4073
4074 /* If target is volatile, ANSI requires accessing the value
4075 *from* the target, if it is accessed. So make that happen.
4076 In no case return the target itself. */
8403445a 4077 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4078 dont_return_target = 1;
bbf6f052 4079 }
8403445a
AM
4080 else if ((want_value & 1) != 0
4081 && GET_CODE (target) == MEM
4082 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4083 && GET_MODE (target) != BLKmode)
4084 /* If target is in memory and caller wants value in a register instead,
4085 arrange that. Pass TARGET as target for expand_expr so that,
4086 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4087 We know expand_expr will not use the target in that case.
4088 Don't do this if TARGET is volatile because we are supposed
4089 to write it and then read it. */
4090 {
8403445a
AM
4091 temp = expand_expr (exp, target, GET_MODE (target),
4092 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4093 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4094 {
4095 /* If TEMP is already in the desired TARGET, only copy it from
4096 memory and don't store it there again. */
4097 if (temp == target
4098 || (rtx_equal_p (temp, target)
4099 && ! side_effects_p (temp) && ! side_effects_p (target)))
4100 dont_store_target = 1;
4101 temp = copy_to_reg (temp);
4102 }
12f06d17
CH
4103 dont_return_target = 1;
4104 }
1499e0a8 4105 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4106 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4107 than the declared mode, compute the result into its declared mode
4108 and then convert to the wider mode. Our value is the computed
4109 expression. */
4110 {
b76b08ef
RK
4111 rtx inner_target = 0;
4112
5a32d038 4113 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4114 which will often result in some optimizations. Do the conversion
4115 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4116 the extend. But don't do this if the type of EXP is a subtype
4117 of something else since then the conversion might involve
4118 more than just converting modes. */
8403445a
AM
4119 if ((want_value & 1) == 0
4120 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4121 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4122 {
4123 if (TREE_UNSIGNED (TREE_TYPE (exp))
4124 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4125 exp = convert
4126 ((*lang_hooks.types.signed_or_unsigned_type)
4127 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4128
b0c48229
NB
4129 exp = convert ((*lang_hooks.types.type_for_mode)
4130 (GET_MODE (SUBREG_REG (target)),
4131 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4132 exp);
b76b08ef
RK
4133
4134 inner_target = SUBREG_REG (target);
f635a84d 4135 }
3a94c984 4136
8403445a
AM
4137 temp = expand_expr (exp, inner_target, VOIDmode,
4138 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4139
7abec5be 4140 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4141 now so it gets done only once. Strictly speaking, this is
4142 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4143 overlaps TARGET. But not performing the load twice also
4144 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4145 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4146 temp = copy_to_reg (temp);
4147
b258707c
RS
4148 /* If TEMP is a VOIDmode constant, use convert_modes to make
4149 sure that we properly convert it. */
4150 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4151 {
4152 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4153 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4154 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4155 GET_MODE (target), temp,
4156 SUBREG_PROMOTED_UNSIGNED_P (target));
4157 }
b258707c 4158
1499e0a8
RK
4159 convert_move (SUBREG_REG (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4161
4162 /* If we promoted a constant, change the mode back down to match
4163 target. Otherwise, the caller might get confused by a result whose
4164 mode is larger than expected. */
4165
8403445a 4166 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4167 {
b3ca30df
JJ
4168 if (GET_MODE (temp) != VOIDmode)
4169 {
4170 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4171 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4172 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4173 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4174 }
4175 else
4176 temp = convert_modes (GET_MODE (target),
4177 GET_MODE (SUBREG_REG (target)),
4178 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4179 }
4180
8403445a 4181 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4182 }
bbf6f052
RK
4183 else
4184 {
0fab64a3
MM
4185 temp = expand_expr_real (exp, target, GET_MODE (target),
4186 (want_value & 2
4187 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4188 &alt_rtl);
766f36c7 4189 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4190 If TARGET is a volatile mem ref, either return TARGET
4191 or return a reg copied *from* TARGET; ANSI requires this.
4192
4193 Otherwise, if TEMP is not TARGET, return TEMP
4194 if it is constant (for efficiency),
4195 or if we really want the correct value. */
bbf6f052
RK
4196 if (!(target && GET_CODE (target) == REG
4197 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4198 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4199 && ! rtx_equal_p (temp, target)
8403445a 4200 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4201 dont_return_target = 1;
4202 }
4203
b258707c
RS
4204 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4205 the same as that of TARGET, adjust the constant. This is needed, for
4206 example, in case it is a CONST_DOUBLE and we want only a word-sized
4207 value. */
4208 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4209 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4210 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4211 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4212 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4213
bbf6f052 4214 /* If value was not generated in the target, store it there.
37a08a29
RK
4215 Convert the value to TARGET's type first if necessary.
4216 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4217 one or both of them are volatile memory refs, we have to distinguish
4218 two cases:
4219 - expand_expr has used TARGET. In this case, we must not generate
4220 another copy. This can be detected by TARGET being equal according
4221 to == .
4222 - expand_expr has not used TARGET - that means that the source just
4223 happens to have the same RTX form. Since temp will have been created
4224 by expand_expr, it will compare unequal according to == .
4225 We must generate a copy in this case, to reach the correct number
4226 of volatile memory references. */
bbf6f052 4227
6036acbb 4228 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4229 || (temp != target && (side_effects_p (temp)
4230 || side_effects_p (target))))
e5408e52 4231 && TREE_CODE (exp) != ERROR_MARK
a9772b60 4232 && ! dont_store_target
9c5c5f2c
MM
4233 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4234 but TARGET is not valid memory reference, TEMP will differ
4235 from TARGET although it is really the same location. */
0fab64a3 4236 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
e56fc090
HPN
4237 /* If there's nothing to copy, don't bother. Don't call expr_size
4238 unless necessary, because some front-ends (C++) expr_size-hook
4239 aborts on objects that are not supposed to be bit-copied or
4240 bit-initialized. */
4241 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4242 {
4243 target = protect_from_queue (target, 1);
4244 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4245 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4246 {
4247 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 if (dont_return_target)
4249 {
4250 /* In this case, we will return TEMP,
4251 so make sure it has the proper mode.
4252 But don't forget to store the value into TARGET. */
4253 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4254 emit_move_insn (target, temp);
4255 }
4256 else
4257 convert_move (target, temp, unsignedp);
4258 }
4259
4260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4261 {
c24ae149
RK
4262 /* Handle copying a string constant into an array. The string
4263 constant may be shorter than the array. So copy just the string's
4264 actual length, and clear the rest. First get the size of the data
4265 type of the string, which is actually the size of the target. */
4266 rtx size = expr_size (exp);
bbf6f052 4267
e87b4f3f
RS
4268 if (GET_CODE (size) == CONST_INT
4269 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4270 emit_block_move (target, temp, size,
4271 (want_value & 2
4272 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4273 else
bbf6f052 4274 {
e87b4f3f
RS
4275 /* Compute the size of the data to copy from the string. */
4276 tree copy_size
c03b7665 4277 = size_binop (MIN_EXPR,
b50d17a1 4278 make_tree (sizetype, size),
fed3cef0 4279 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4280 rtx copy_size_rtx
4281 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4282 (want_value & 2
4283 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4284 rtx label = 0;
4285
4286 /* Copy that much. */
267b28bd
SE
4287 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4288 TREE_UNSIGNED (sizetype));
8403445a
AM
4289 emit_block_move (target, temp, copy_size_rtx,
4290 (want_value & 2
4291 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4292
88f63c77
RK
4293 /* Figure out how much is left in TARGET that we have to clear.
4294 Do all calculations in ptr_mode. */
e87b4f3f
RS
4295 if (GET_CODE (copy_size_rtx) == CONST_INT)
4296 {
c24ae149
RK
4297 size = plus_constant (size, -INTVAL (copy_size_rtx));
4298 target = adjust_address (target, BLKmode,
4299 INTVAL (copy_size_rtx));
e87b4f3f
RS
4300 }
4301 else
4302 {
fa06ab5c 4303 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4304 copy_size_rtx, NULL_RTX, 0,
4305 OPTAB_LIB_WIDEN);
e87b4f3f 4306
c24ae149
RK
4307#ifdef POINTERS_EXTEND_UNSIGNED
4308 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd
SE
4309 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4310 TREE_UNSIGNED (sizetype));
c24ae149
RK
4311#endif
4312
4313 target = offset_address (target, copy_size_rtx,
4314 highest_pow2_factor (copy_size));
e87b4f3f 4315 label = gen_label_rtx ();
c5d5d461 4316 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4317 GET_MODE (size), 0, label);
e87b4f3f
RS
4318 }
4319
4320 if (size != const0_rtx)
37a08a29 4321 clear_storage (target, size);
22619c3f 4322
e87b4f3f
RS
4323 if (label)
4324 emit_label (label);
bbf6f052
RK
4325 }
4326 }
fffa9c1d
JW
4327 /* Handle calls that return values in multiple non-contiguous locations.
4328 The Irix 6 ABI has examples of this. */
4329 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4330 emit_group_load (target, temp, TREE_TYPE (exp),
4331 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4332 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4333 emit_block_move (target, temp, expr_size (exp),
4334 (want_value & 2
4335 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052
RK
4336 else
4337 emit_move_insn (target, temp);
4338 }
709f5be1 4339
766f36c7 4340 /* If we don't want a value, return NULL_RTX. */
8403445a 4341 if ((want_value & 1) == 0)
766f36c7
RK
4342 return NULL_RTX;
4343
4344 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4345 ??? The latter test doesn't seem to make sense. */
4346 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4347 return temp;
766f36c7
RK
4348
4349 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4350 else if ((want_value & 1) != 0
4351 && GET_MODE (target) != BLKmode
766f36c7
RK
4352 && ! (GET_CODE (target) == REG
4353 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4354 return copy_to_reg (target);
3a94c984 4355
766f36c7 4356 else
709f5be1 4357 return target;
bbf6f052
RK
4358}
4359\f
40209195 4360/* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
9de08200
RK
4361
4362static int
502b8322 4363is_zeros_p (tree exp)
9de08200
RK
4364{
4365 tree elt;
4366
4367 switch (TREE_CODE (exp))
4368 {
4369 case CONVERT_EXPR:
4370 case NOP_EXPR:
4371 case NON_LVALUE_EXPR:
ed239f5a 4372 case VIEW_CONVERT_EXPR:
9de08200
RK
4373 return is_zeros_p (TREE_OPERAND (exp, 0));
4374
4375 case INTEGER_CST:
05bccae2 4376 return integer_zerop (exp);
9de08200
RK
4377
4378 case COMPLEX_CST:
4379 return
4380 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4381
4382 case REAL_CST:
41c9120b 4383 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4384
69ef87e2
AH
4385 case VECTOR_CST:
4386 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4387 elt = TREE_CHAIN (elt))
4388 if (!is_zeros_p (TREE_VALUE (elt)))
4389 return 0;
4390
4391 return 1;
4392
9de08200 4393 case CONSTRUCTOR:
e1a43f73
PB
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4395 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4396 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4397 if (! is_zeros_p (TREE_VALUE (elt)))
4398 return 0;
4399
4400 return 1;
3a94c984 4401
e9a25f70
JL
4402 default:
4403 return 0;
9de08200 4404 }
9de08200
RK
4405}
4406
4407/* Return 1 if EXP contains mostly (3/4) zeros. */
4408
40209195 4409int
502b8322 4410mostly_zeros_p (tree exp)
9de08200 4411{
9de08200
RK
4412 if (TREE_CODE (exp) == CONSTRUCTOR)
4413 {
e1a43f73
PB
4414 int elts = 0, zeros = 0;
4415 tree elt = CONSTRUCTOR_ELTS (exp);
4416 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4417 {
4418 /* If there are no ranges of true bits, it is all zero. */
4419 return elt == NULL_TREE;
4420 }
4421 for (; elt; elt = TREE_CHAIN (elt))
4422 {
4423 /* We do not handle the case where the index is a RANGE_EXPR,
4424 so the statistic will be somewhat inaccurate.
4425 We do make a more accurate count in store_constructor itself,
4426 so since this function is only used for nested array elements,
0f41302f 4427 this should be close enough. */
e1a43f73
PB
4428 if (mostly_zeros_p (TREE_VALUE (elt)))
4429 zeros++;
4430 elts++;
4431 }
9de08200
RK
4432
4433 return 4 * zeros >= 3 * elts;
4434 }
4435
4436 return is_zeros_p (exp);
4437}
4438\f
e1a43f73
PB
4439/* Helper function for store_constructor.
4440 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4441 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4442 CLEARED is as for store_constructor.
23cb1766 4443 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4444
4445 This provides a recursive shortcut back to store_constructor when it isn't
4446 necessary to go through store_field. This is so that we can pass through
4447 the cleared field to let store_constructor know that we may not have to
4448 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4449
4450static void
502b8322
AJ
4451store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4452 HOST_WIDE_INT bitpos, enum machine_mode mode,
4453 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4454{
4455 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4456 && bitpos % BITS_PER_UNIT == 0
cc2902df 4457 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4458 let store_field do the bitfield handling. This is unlikely to
4459 generate unnecessary clear instructions anyways. */
4460 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4461 {
61cb205c
RK
4462 if (GET_CODE (target) == MEM)
4463 target
4464 = adjust_address (target,
4465 GET_MODE (target) == BLKmode
4466 || 0 != (bitpos
4467 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4468 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4469
e0339ef7 4470
04050c69 4471 /* Update the alias set, if required. */
10b76d73
RK
4472 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4473 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4474 {
4475 target = copy_rtx (target);
4476 set_mem_alias_set (target, alias_set);
4477 }
e0339ef7 4478
04050c69 4479 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4480 }
4481 else
a06ef755
RK
4482 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4483 alias_set);
e1a43f73
PB
4484}
4485
bbf6f052 4486/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4487 TARGET is either a REG or a MEM; we know it cannot conflict, since
4488 safe_from_p has been called.
b7010412
RK
4489 CLEARED is true if TARGET is known to have been zero'd.
4490 SIZE is the number of bytes of TARGET we are allowed to modify: this
4491 may not be the same as the size of EXP if we are assigning to a field
4492 which has been packed to exclude padding bits. */
bbf6f052
RK
4493
4494static void
502b8322 4495store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4496{
4af3895e 4497 tree type = TREE_TYPE (exp);
a5efcd63 4498#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4499 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4500#endif
4af3895e 4501
e44842fe
RK
4502 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4503 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4504 {
b3694847 4505 tree elt;
bbf6f052 4506
2c430630
RS
4507 /* If size is zero or the target is already cleared, do nothing. */
4508 if (size == 0 || cleared)
4509 cleared = 1;
04050c69 4510 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4511 else if ((TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4513 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4514 /* If the constructor is empty, clear the union. */
a59f8640 4515 {
04050c69
RK
4516 clear_storage (target, expr_size (exp));
4517 cleared = 1;
a59f8640 4518 }
4af3895e
JVA
4519
4520 /* If we are building a static constructor into a register,
4521 set the initial value as zero so we can fold the value into
67225c15
RK
4522 a constant. But if more than one register is involved,
4523 this probably loses. */
2c430630 4524 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4525 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4526 {
04050c69 4527 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4528 cleared = 1;
4529 }
4530
4531 /* If the constructor has fewer fields than the structure
4532 or if we are initializing the structure to mostly zeros,
0d97bf4c 4533 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4534 register whose mode size isn't equal to SIZE since clear_storage
4535 can't handle this case. */
2c430630
RS
4536 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4537 || mostly_zeros_p (exp))
fcf1b822 4538 && (GET_CODE (target) != REG
04050c69
RK
4539 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4540 == size)))
9de08200 4541 {
337f4314
RK
4542 rtx xtarget = target;
4543
4544 if (readonly_fields_p (type))
4545 {
4546 xtarget = copy_rtx (xtarget);
4547 RTX_UNCHANGING_P (xtarget) = 1;
4548 }
4549
4550 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4551 cleared = 1;
4552 }
04050c69
RK
4553
4554 if (! cleared)
38a448ca 4555 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4556
4557 /* Store each element of the constructor into
4558 the corresponding field of TARGET. */
4559
4560 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4561 {
b3694847 4562 tree field = TREE_PURPOSE (elt);
34c73909 4563 tree value = TREE_VALUE (elt);
b3694847 4564 enum machine_mode mode;
770ae6cc
RK
4565 HOST_WIDE_INT bitsize;
4566 HOST_WIDE_INT bitpos = 0;
770ae6cc 4567 tree offset;
b50d17a1 4568 rtx to_rtx = target;
bbf6f052 4569
f32fd778
RS
4570 /* Just ignore missing fields.
4571 We cleared the whole structure, above,
4572 if any fields are missing. */
4573 if (field == 0)
4574 continue;
4575
8b6000fc 4576 if (cleared && is_zeros_p (value))
e1a43f73 4577 continue;
9de08200 4578
770ae6cc
RK
4579 if (host_integerp (DECL_SIZE (field), 1))
4580 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4581 else
4582 bitsize = -1;
4583
bbf6f052
RK
4584 mode = DECL_MODE (field);
4585 if (DECL_BIT_FIELD (field))
4586 mode = VOIDmode;
4587
770ae6cc
RK
4588 offset = DECL_FIELD_OFFSET (field);
4589 if (host_integerp (offset, 0)
4590 && host_integerp (bit_position (field), 0))
4591 {
4592 bitpos = int_bit_position (field);
4593 offset = 0;
4594 }
b50d17a1 4595 else
770ae6cc 4596 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4597
b50d17a1
RK
4598 if (offset)
4599 {
4600 rtx offset_rtx;
4601
7a6cdb44 4602 if (CONTAINS_PLACEHOLDER_P (offset))
7fa96708 4603 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4604 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4605
b50d17a1
RK
4606 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4607 if (GET_CODE (to_rtx) != MEM)
4608 abort ();
4609
bd070e1a 4610#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4611 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4612 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4613#else
4614 if (GET_MODE (offset_rtx) != ptr_mode)
4615 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4616#endif
bd070e1a 4617
0d4903b8
RK
4618 to_rtx = offset_address (to_rtx, offset_rtx,
4619 highest_pow2_factor (offset));
b50d17a1 4620 }
c5c76735 4621
8ecc63eb
JJ
4622 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4623 on the MEM might lead to scheduling the clearing after the
4624 store. */
4625 if (TREE_READONLY (field) && !cleared)
cf04eb80 4626 {
9151b3bf 4627 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4628 to_rtx = copy_rtx (to_rtx);
4629
cf04eb80
RK
4630 RTX_UNCHANGING_P (to_rtx) = 1;
4631 }
4632
34c73909
R
4633#ifdef WORD_REGISTER_OPERATIONS
4634 /* If this initializes a field that is smaller than a word, at the
4635 start of a word, try to widen it to a full word.
4636 This special case allows us to output C++ member function
4637 initializations in a form that the optimizers can understand. */
770ae6cc 4638 if (GET_CODE (target) == REG
34c73909
R
4639 && bitsize < BITS_PER_WORD
4640 && bitpos % BITS_PER_WORD == 0
4641 && GET_MODE_CLASS (mode) == MODE_INT
4642 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4643 && exp_size >= 0
4644 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4645 {
4646 tree type = TREE_TYPE (value);
04050c69 4647
34c73909
R
4648 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4649 {
b0c48229
NB
4650 type = (*lang_hooks.types.type_for_size)
4651 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
4652 value = convert (type, value);
4653 }
04050c69 4654
34c73909
R
4655 if (BYTES_BIG_ENDIAN)
4656 value
4657 = fold (build (LSHIFT_EXPR, type, value,
4658 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4659 bitsize = BITS_PER_WORD;
4660 mode = word_mode;
4661 }
4662#endif
10b76d73
RK
4663
4664 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4665 && DECL_NONADDRESSABLE_P (field))
4666 {
4667 to_rtx = copy_rtx (to_rtx);
4668 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4669 }
4670
c5c76735 4671 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4672 value, type, cleared,
10b76d73 4673 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4674 }
4675 }
e6834654
SS
4676 else if (TREE_CODE (type) == ARRAY_TYPE
4677 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4678 {
b3694847
SS
4679 tree elt;
4680 int i;
e1a43f73 4681 int need_to_clear;
4af3895e 4682 tree domain = TYPE_DOMAIN (type);
4af3895e 4683 tree elttype = TREE_TYPE (type);
e6834654 4684 int const_bounds_p;
ae0ed63a
JM
4685 HOST_WIDE_INT minelt = 0;
4686 HOST_WIDE_INT maxelt = 0;
997404de
JH
4687 int icode = 0;
4688 rtx *vector = NULL;
4689 int elt_size = 0;
4690 unsigned n_elts = 0;
85f3d674 4691
e6834654
SS
4692 /* Vectors are like arrays, but the domain is stored via an array
4693 type indirectly. */
4694 if (TREE_CODE (type) == VECTOR_TYPE)
4695 {
4696 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4697 the same field as TYPE_DOMAIN, we are not guaranteed that
4698 it always will. */
4699 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4700 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
997404de
JH
4701 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4702 {
4703 enum machine_mode mode = GET_MODE (target);
4704
4705 icode = (int) vec_init_optab->handlers[mode].insn_code;
4706 if (icode != CODE_FOR_nothing)
4707 {
4708 unsigned int i;
4709
4710 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4711 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4712 vector = alloca (n_elts);
4713 for (i = 0; i < n_elts; i++)
4714 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4715 }
4716 }
e6834654
SS
4717 }
4718
4719 const_bounds_p = (TYPE_MIN_VALUE (domain)
4720 && TYPE_MAX_VALUE (domain)
4721 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4722 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4723
85f3d674
RK
4724 /* If we have constant bounds for the range of the type, get them. */
4725 if (const_bounds_p)
4726 {
4727 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4728 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4729 }
bbf6f052 4730
e1a43f73 4731 /* If the constructor has fewer elements than the array,
38e01259 4732 clear the whole array first. Similarly if this is
e1a43f73
PB
4733 static constructor of a non-BLKmode object. */
4734 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4735 need_to_clear = 1;
4736 else
4737 {
4738 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4739 need_to_clear = ! const_bounds_p;
4740
e1a43f73
PB
4741 /* This loop is a more accurate version of the loop in
4742 mostly_zeros_p (it handles RANGE_EXPR in an index).
4743 It is also needed to check for missing elements. */
4744 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4745 elt != NULL_TREE && ! need_to_clear;
df0faff1 4746 elt = TREE_CHAIN (elt))
e1a43f73
PB
4747 {
4748 tree index = TREE_PURPOSE (elt);
4749 HOST_WIDE_INT this_node_count;
19caa751 4750
e1a43f73
PB
4751 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4752 {
4753 tree lo_index = TREE_OPERAND (index, 0);
4754 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4755
19caa751
RK
4756 if (! host_integerp (lo_index, 1)
4757 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4758 {
4759 need_to_clear = 1;
4760 break;
4761 }
19caa751
RK
4762
4763 this_node_count = (tree_low_cst (hi_index, 1)
4764 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4765 }
4766 else
4767 this_node_count = 1;
85f3d674 4768
e1a43f73
PB
4769 count += this_node_count;
4770 if (mostly_zeros_p (TREE_VALUE (elt)))
4771 zero_count += this_node_count;
4772 }
85f3d674 4773
8e958f70 4774 /* Clear the entire array first if there are any missing elements,
0f41302f 4775 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4776 if (! need_to_clear
4777 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4778 need_to_clear = 1;
4779 }
85f3d674 4780
997404de 4781 if (need_to_clear && size > 0 && !vector)
9de08200
RK
4782 {
4783 if (! cleared)
725e58b1
RK
4784 {
4785 if (REG_P (target))
4786 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4787 else
4788 clear_storage (target, GEN_INT (size));
4789 }
9de08200
RK
4790 cleared = 1;
4791 }
df4556a3 4792 else if (REG_P (target))
bbf6f052 4793 /* Inform later passes that the old value is dead. */
38a448ca 4794 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4795
4796 /* Store each element of the constructor into
4797 the corresponding element of TARGET, determined
4798 by counting the elements. */
4799 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4800 elt;
4801 elt = TREE_CHAIN (elt), i++)
4802 {
b3694847 4803 enum machine_mode mode;
19caa751
RK
4804 HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
bbf6f052 4806 int unsignedp;
e1a43f73 4807 tree value = TREE_VALUE (elt);
03dc44a6
RS
4808 tree index = TREE_PURPOSE (elt);
4809 rtx xtarget = target;
bbf6f052 4810
e1a43f73
PB
4811 if (cleared && is_zeros_p (value))
4812 continue;
9de08200 4813
bbf6f052 4814 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4815 mode = TYPE_MODE (elttype);
4816 if (mode == BLKmode)
19caa751
RK
4817 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4818 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4819 : -1);
14a774a9
RK
4820 else
4821 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4822
e1a43f73
PB
4823 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4824 {
4825 tree lo_index = TREE_OPERAND (index, 0);
4826 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 4827 rtx index_r, pos_rtx, loop_end;
e1a43f73 4828 struct nesting *loop;
05c0b405
PB
4829 HOST_WIDE_INT lo, hi, count;
4830 tree position;
e1a43f73 4831
997404de
JH
4832 if (vector)
4833 abort ();
4834
0f41302f 4835 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4836 if (const_bounds_p
4837 && host_integerp (lo_index, 0)
19caa751
RK
4838 && host_integerp (hi_index, 0)
4839 && (lo = tree_low_cst (lo_index, 0),
4840 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4841 count = hi - lo + 1,
4842 (GET_CODE (target) != MEM
4843 || count <= 2
19caa751
RK
4844 || (host_integerp (TYPE_SIZE (elttype), 1)
4845 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4846 <= 40 * 8)))))
e1a43f73 4847 {
05c0b405
PB
4848 lo -= minelt; hi -= minelt;
4849 for (; lo <= hi; lo++)
e1a43f73 4850 {
19caa751 4851 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
4852
4853 if (GET_CODE (target) == MEM
4854 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4855 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4856 && TYPE_NONALIASED_COMPONENT (type))
4857 {
4858 target = copy_rtx (target);
4859 MEM_KEEP_ALIAS_SET_P (target) = 1;
4860 }
4861
23cb1766 4862 store_constructor_field
04050c69
RK
4863 (target, bitsize, bitpos, mode, value, type, cleared,
4864 get_alias_set (elttype));
e1a43f73
PB
4865 }
4866 }
4867 else
4868 {
4977bab6 4869 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
4870 loop_end = gen_label_rtx ();
4871
4872 unsignedp = TREE_UNSIGNED (domain);
4873
4874 index = build_decl (VAR_DECL, NULL_TREE, domain);
4875
19e7881c 4876 index_r
e1a43f73
PB
4877 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4878 &unsignedp, 0));
19e7881c 4879 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4880 if (TREE_CODE (value) == SAVE_EXPR
4881 && SAVE_EXPR_RTL (value) == 0)
4882 {
0f41302f
MS
4883 /* Make sure value gets expanded once before the
4884 loop. */
e1a43f73
PB
4885 expand_expr (value, const0_rtx, VOIDmode, 0);
4886 emit_queue ();
4887 }
4888 store_expr (lo_index, index_r, 0);
4889 loop = expand_start_loop (0);
4890
0f41302f 4891 /* Assign value to element index. */
fed3cef0
RK
4892 position
4893 = convert (ssizetype,
4894 fold (build (MINUS_EXPR, TREE_TYPE (index),
4895 index, TYPE_MIN_VALUE (domain))));
4896 position = size_binop (MULT_EXPR, position,
4897 convert (ssizetype,
4898 TYPE_SIZE_UNIT (elttype)));
4899
e1a43f73 4900 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
4901 xtarget = offset_address (target, pos_rtx,
4902 highest_pow2_factor (position));
4903 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4904 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 4905 store_constructor (value, xtarget, cleared,
b7010412 4906 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4907 else
4908 store_expr (value, xtarget, 0);
4909
4910 expand_exit_loop_if_false (loop,
4911 build (LT_EXPR, integer_type_node,
4912 index, hi_index));
4913
4914 expand_increment (build (PREINCREMENT_EXPR,
4915 TREE_TYPE (index),
7b8b9722 4916 index, integer_one_node), 0, 0);
e1a43f73
PB
4917 expand_end_loop ();
4918 emit_label (loop_end);
e1a43f73
PB
4919 }
4920 }
19caa751
RK
4921 else if ((index != 0 && ! host_integerp (index, 0))
4922 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4923 {
03dc44a6
RS
4924 tree position;
4925
997404de
JH
4926 if (vector)
4927 abort ();
4928
5b6c44ff 4929 if (index == 0)
fed3cef0 4930 index = ssize_int (1);
5b6c44ff 4931
e1a43f73 4932 if (minelt)
fed3cef0
RK
4933 index = convert (ssizetype,
4934 fold (build (MINUS_EXPR, index,
4935 TYPE_MIN_VALUE (domain))));
19caa751 4936
fed3cef0
RK
4937 position = size_binop (MULT_EXPR, index,
4938 convert (ssizetype,
4939 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
4940 xtarget = offset_address (target,
4941 expand_expr (position, 0, VOIDmode, 0),
4942 highest_pow2_factor (position));
4943 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4944 store_expr (value, xtarget, 0);
03dc44a6 4945 }
997404de
JH
4946 else if (vector)
4947 {
4948 int pos;
4949
4950 if (index != 0)
4951 pos = tree_low_cst (index, 0) - minelt;
4952 else
4953 pos = i;
4954 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4955 }
03dc44a6
RS
4956 else
4957 {
4958 if (index != 0)
19caa751
RK
4959 bitpos = ((tree_low_cst (index, 0) - minelt)
4960 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4961 else
19caa751
RK
4962 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4963
10b76d73 4964 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4965 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4966 && TYPE_NONALIASED_COMPONENT (type))
4967 {
4968 target = copy_rtx (target);
4969 MEM_KEEP_ALIAS_SET_P (target) = 1;
4970 }
9b9bd3b2
JH
4971 store_constructor_field (target, bitsize, bitpos, mode, value,
4972 type, cleared, get_alias_set (elttype));
03dc44a6 4973 }
bbf6f052 4974 }
997404de
JH
4975 if (vector)
4976 {
4977 emit_insn (GEN_FCN (icode) (target,
4978 gen_rtx_PARALLEL (GET_MODE (target),
4979 gen_rtvec_v (n_elts, vector))));
4980 }
bbf6f052 4981 }
19caa751 4982
3a94c984 4983 /* Set constructor assignments. */
071a6595
PB
4984 else if (TREE_CODE (type) == SET_TYPE)
4985 {
e1a43f73 4986 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4987 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4988 tree domain = TYPE_DOMAIN (type);
4989 tree domain_min, domain_max, bitlength;
4990
9faa82d8 4991 /* The default implementation strategy is to extract the constant
071a6595
PB
4992 parts of the constructor, use that to initialize the target,
4993 and then "or" in whatever non-constant ranges we need in addition.
4994
4995 If a large set is all zero or all ones, it is
4996 probably better to set it using memset (if available) or bzero.
4997 Also, if a large set has just a single range, it may also be
4998 better to first clear all the first clear the set (using
0f41302f 4999 bzero/memset), and set the bits we want. */
3a94c984 5000
0f41302f 5001 /* Check for all zeros. */
9376fcd6 5002 if (elt == NULL_TREE && size > 0)
071a6595 5003 {
e1a43f73 5004 if (!cleared)
8ac61af7 5005 clear_storage (target, GEN_INT (size));
071a6595
PB
5006 return;
5007 }
5008
071a6595
PB
5009 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5010 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5011 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5012 size_diffop (domain_max, domain_min),
5013 ssize_int (1));
071a6595 5014
19caa751 5015 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5016
5017 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5018 are "complicated" (more than one range), initialize (the
3a94c984 5019 constant parts) by copying from a constant. */
e1a43f73
PB
5020 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5021 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5022 {
19caa751 5023 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5024 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 5025 char *bit_buffer = alloca (nbits);
b4ee5a72 5026 HOST_WIDE_INT word = 0;
19caa751
RK
5027 unsigned int bit_pos = 0;
5028 unsigned int ibit = 0;
5029 unsigned int offset = 0; /* In bytes from beginning of set. */
5030
e1a43f73 5031 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5032 for (;;)
071a6595 5033 {
b4ee5a72
PB
5034 if (bit_buffer[ibit])
5035 {
b09f3348 5036 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5037 word |= (1 << (set_word_size - 1 - bit_pos));
5038 else
5039 word |= 1 << bit_pos;
5040 }
19caa751 5041
b4ee5a72
PB
5042 bit_pos++; ibit++;
5043 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5044 {
e1a43f73
PB
5045 if (word != 0 || ! cleared)
5046 {
5047 rtx datum = GEN_INT (word);
5048 rtx to_rtx;
19caa751 5049
0f41302f
MS
5050 /* The assumption here is that it is safe to use
5051 XEXP if the set is multi-word, but not if
5052 it's single-word. */
e1a43f73 5053 if (GET_CODE (target) == MEM)
f4ef873c 5054 to_rtx = adjust_address (target, mode, offset);
3a94c984 5055 else if (offset == 0)
e1a43f73
PB
5056 to_rtx = target;
5057 else
5058 abort ();
5059 emit_move_insn (to_rtx, datum);
5060 }
19caa751 5061
b4ee5a72
PB
5062 if (ibit == nbits)
5063 break;
5064 word = 0;
5065 bit_pos = 0;
5066 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5067 }
5068 }
071a6595 5069 }
e1a43f73 5070 else if (!cleared)
19caa751
RK
5071 /* Don't bother clearing storage if the set is all ones. */
5072 if (TREE_CHAIN (elt) != NULL_TREE
5073 || (TREE_PURPOSE (elt) == NULL_TREE
5074 ? nbits != 1
5075 : ( ! host_integerp (TREE_VALUE (elt), 0)
5076 || ! host_integerp (TREE_PURPOSE (elt), 0)
5077 || (tree_low_cst (TREE_VALUE (elt), 0)
5078 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5079 != (HOST_WIDE_INT) nbits))))
8ac61af7 5080 clear_storage (target, expr_size (exp));
3a94c984 5081
e1a43f73 5082 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5083 {
3a94c984 5084 /* Start of range of element or NULL. */
071a6595 5085 tree startbit = TREE_PURPOSE (elt);
3a94c984 5086 /* End of range of element, or element value. */
071a6595
PB
5087 tree endbit = TREE_VALUE (elt);
5088 HOST_WIDE_INT startb, endb;
19caa751 5089 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5090
5091 bitlength_rtx = expand_expr (bitlength,
19caa751 5092 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5093
3a94c984 5094 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5095 if (startbit == NULL_TREE)
5096 {
5097 startbit = save_expr (endbit);
5098 endbit = startbit;
5099 }
19caa751 5100
071a6595
PB
5101 startbit = convert (sizetype, startbit);
5102 endbit = convert (sizetype, endbit);
5103 if (! integer_zerop (domain_min))
5104 {
5105 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5106 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5107 }
3a94c984 5108 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5109 EXPAND_CONST_ADDRESS);
3a94c984 5110 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5111 EXPAND_CONST_ADDRESS);
5112
5113 if (REG_P (target))
5114 {
1da68f56
RK
5115 targetx
5116 = assign_temp
b0c48229
NB
5117 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5118 (GET_MODE (target), 0),
1da68f56
RK
5119 TYPE_QUAL_CONST)),
5120 0, 1, 1);
071a6595
PB
5121 emit_move_insn (targetx, target);
5122 }
19caa751 5123
071a6595
PB
5124 else if (GET_CODE (target) == MEM)
5125 targetx = target;
5126 else
5127 abort ();
5128
4ca79136
RH
5129 /* Optimization: If startbit and endbit are constants divisible
5130 by BITS_PER_UNIT, call memset instead. */
5131 if (TARGET_MEM_FUNCTIONS
5132 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5133 && TREE_CODE (endbit) == INTEGER_CST
5134 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5135 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5136 {
ebb1b59a 5137 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5138 VOIDmode, 3,
e1a43f73
PB
5139 plus_constant (XEXP (targetx, 0),
5140 startb / BITS_PER_UNIT),
071a6595 5141 Pmode,
3b6f75e2 5142 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5143 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5144 TYPE_MODE (sizetype));
071a6595
PB
5145 }
5146 else
68d28100
RH
5147 emit_library_call (setbits_libfunc, LCT_NORMAL,
5148 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5149 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5150 startbit_rtx, TYPE_MODE (sizetype),
5151 endbit_rtx, TYPE_MODE (sizetype));
5152
071a6595
PB
5153 if (REG_P (target))
5154 emit_move_insn (target, targetx);
5155 }
5156 }
bbf6f052
RK
5157
5158 else
5159 abort ();
5160}
5161
5162/* Store the value of EXP (an expression tree)
5163 into a subfield of TARGET which has mode MODE and occupies
5164 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5165 If MODE is VOIDmode, it means that we are storing into a bit-field.
5166
5167 If VALUE_MODE is VOIDmode, return nothing in particular.
5168 UNSIGNEDP is not used in this case.
5169
5170 Otherwise, return an rtx for the value stored. This rtx
5171 has mode VALUE_MODE if that is convenient to do.
5172 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5173
a06ef755 5174 TYPE is the type of the underlying object,
ece32014
MM
5175
5176 ALIAS_SET is the alias set for the destination. This value will
5177 (in general) be different from that for TARGET, since TARGET is a
5178 reference to the containing structure. */
bbf6f052
RK
5179
5180static rtx
502b8322
AJ
5181store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5182 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5183 int unsignedp, tree type, int alias_set)
bbf6f052 5184{
906c4e36 5185 HOST_WIDE_INT width_mask = 0;
bbf6f052 5186
e9a25f70
JL
5187 if (TREE_CODE (exp) == ERROR_MARK)
5188 return const0_rtx;
5189
2be6a7e9
RK
5190 /* If we have nothing to store, do nothing unless the expression has
5191 side-effects. */
5192 if (bitsize == 0)
5193 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5194 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5195 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5196
5197 /* If we are storing into an unaligned field of an aligned union that is
5198 in a register, we may have the mode of TARGET being an integer mode but
5199 MODE == BLKmode. In that case, get an aligned object whose size and
5200 alignment are the same as TARGET and store TARGET into it (we can avoid
5201 the store if the field being stored is the entire width of TARGET). Then
5202 call ourselves recursively to store the field into a BLKmode version of
5203 that object. Finally, load from the object into TARGET. This is not
5204 very efficient in general, but should only be slightly more expensive
5205 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5206 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5207 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5208
5209 if (mode == BLKmode
5210 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5211 {
85a43a2f 5212 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5213 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5214
8752c357 5215 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5216 emit_move_insn (object, target);
5217
a06ef755
RK
5218 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5219 alias_set);
bbf6f052
RK
5220
5221 emit_move_insn (target, object);
5222
a06ef755 5223 /* We want to return the BLKmode version of the data. */
46093b97 5224 return blk_object;
bbf6f052 5225 }
c3b247b4
JM
5226
5227 if (GET_CODE (target) == CONCAT)
5228 {
5229 /* We're storing into a struct containing a single __complex. */
5230
5231 if (bitpos != 0)
5232 abort ();
5233 return store_expr (exp, target, 0);
5234 }
bbf6f052
RK
5235
5236 /* If the structure is in a register or if the component
5237 is a bit field, we cannot use addressing to access it.
5238 Use bit-field techniques or SUBREG to store in it. */
5239
4fa52007 5240 if (mode == VOIDmode
6ab06cbb
JW
5241 || (mode != BLKmode && ! direct_store[(int) mode]
5242 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5243 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5244 || GET_CODE (target) == REG
c980ac49 5245 || GET_CODE (target) == SUBREG
ccc98036
RS
5246 /* If the field isn't aligned enough to store as an ordinary memref,
5247 store it as a bit field. */
15b19a7d 5248 || (mode != BLKmode
9e5f281f
OH
5249 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5250 || bitpos % GET_MODE_ALIGNMENT (mode))
5251 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5252 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5253 /* If the RHS and field are a constant size and the size of the
5254 RHS isn't the same size as the bitfield, we must use bitfield
5255 operations. */
05bccae2
RK
5256 || (bitsize >= 0
5257 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5258 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5259 {
906c4e36 5260 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5261
ef19912d
RK
5262 /* If BITSIZE is narrower than the size of the type of EXP
5263 we will be narrowing TEMP. Normally, what's wanted are the
5264 low-order bits. However, if EXP's type is a record and this is
5265 big-endian machine, we want the upper BITSIZE bits. */
5266 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5267 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5268 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5269 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5270 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5271 - bitsize),
c1853da7 5272 NULL_RTX, 1);
ef19912d 5273
bbd6cf73
RK
5274 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5275 MODE. */
5276 if (mode != VOIDmode && mode != BLKmode
5277 && mode != TYPE_MODE (TREE_TYPE (exp)))
5278 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5279
a281e72d
RK
5280 /* If the modes of TARGET and TEMP are both BLKmode, both
5281 must be in memory and BITPOS must be aligned on a byte
5282 boundary. If so, we simply do a block copy. */
5283 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5284 {
5285 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5286 || bitpos % BITS_PER_UNIT != 0)
5287 abort ();
5288
f4ef873c 5289 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5290 emit_block_move (target, temp,
a06ef755 5291 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5292 / BITS_PER_UNIT),
5293 BLOCK_OP_NORMAL);
a281e72d
RK
5294
5295 return value_mode == VOIDmode ? const0_rtx : target;
5296 }
5297
bbf6f052 5298 /* Store the value in the bitfield. */
a06ef755
RK
5299 store_bit_field (target, bitsize, bitpos, mode, temp,
5300 int_size_in_bytes (type));
5301
bbf6f052
RK
5302 if (value_mode != VOIDmode)
5303 {
04050c69
RK
5304 /* The caller wants an rtx for the value.
5305 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5306 if (width_mask != 0
5307 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5308 {
9074de27 5309 tree count;
5c4d7cfb 5310 enum machine_mode tmode;
86a2c12a 5311
5c4d7cfb 5312 tmode = GET_MODE (temp);
86a2c12a
RS
5313 if (tmode == VOIDmode)
5314 tmode = value_mode;
22273300
JJ
5315
5316 if (unsignedp)
5317 return expand_and (tmode, temp,
2496c7bd 5318 gen_int_mode (width_mask, tmode),
22273300
JJ
5319 NULL_RTX);
5320
5c4d7cfb
RS
5321 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5322 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5323 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5324 }
04050c69 5325
bbf6f052 5326 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5327 NULL_RTX, value_mode, VOIDmode,
a06ef755 5328 int_size_in_bytes (type));
bbf6f052
RK
5329 }
5330 return const0_rtx;
5331 }
5332 else
5333 {
5334 rtx addr = XEXP (target, 0);
a06ef755 5335 rtx to_rtx = target;
bbf6f052
RK
5336
5337 /* If a value is wanted, it must be the lhs;
5338 so make the address stable for multiple use. */
5339
5340 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5341 && ! CONSTANT_ADDRESS_P (addr)
5342 /* A frame-pointer reference is already stable. */
5343 && ! (GET_CODE (addr) == PLUS
5344 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5345 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5346 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5347 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5348
5349 /* Now build a reference to just the desired component. */
5350
a06ef755
RK
5351 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5352
5353 if (to_rtx == target)
5354 to_rtx = copy_rtx (to_rtx);
792760b9 5355
c6df88cb 5356 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5357 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5358 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5359
5360 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5361 }
5362}
5363\f
5364/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5365 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5366 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5367
5368 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5369 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5370 If the position of the field is variable, we store a tree
5371 giving the variable offset (in units) in *POFFSET.
5372 This offset is in addition to the bit position.
5373 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5374
5375 If any of the extraction expressions is volatile,
5376 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5377
5378 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5379 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5380 is redundant.
5381
5382 If the field describes a variable-sized object, *PMODE is set to
5383 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5384 this case, but the address of the object can be found. */
bbf6f052
RK
5385
5386tree
502b8322
AJ
5387get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5388 HOST_WIDE_INT *pbitpos, tree *poffset,
5389 enum machine_mode *pmode, int *punsignedp,
5390 int *pvolatilep)
bbf6f052
RK
5391{
5392 tree size_tree = 0;
5393 enum machine_mode mode = VOIDmode;
fed3cef0 5394 tree offset = size_zero_node;
770ae6cc 5395 tree bit_offset = bitsize_zero_node;
738cc472 5396 tree placeholder_ptr = 0;
770ae6cc 5397 tree tem;
bbf6f052 5398
770ae6cc
RK
5399 /* First get the mode, signedness, and size. We do this from just the
5400 outermost expression. */
bbf6f052
RK
5401 if (TREE_CODE (exp) == COMPONENT_REF)
5402 {
5403 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5404 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5405 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5406
bbf6f052
RK
5407 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5408 }
5409 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5410 {
5411 size_tree = TREE_OPERAND (exp, 1);
5412 *punsignedp = TREE_UNSIGNED (exp);
5413 }
5414 else
5415 {
5416 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5417 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5418
ab87f8c8
JL
5419 if (mode == BLKmode)
5420 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5421 else
5422 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5423 }
3a94c984 5424
770ae6cc 5425 if (size_tree != 0)
bbf6f052 5426 {
770ae6cc 5427 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5428 mode = BLKmode, *pbitsize = -1;
5429 else
770ae6cc 5430 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5431 }
5432
5433 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5434 and find the ultimate containing object. */
bbf6f052
RK
5435 while (1)
5436 {
770ae6cc
RK
5437 if (TREE_CODE (exp) == BIT_FIELD_REF)
5438 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5439 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5440 {
770ae6cc
RK
5441 tree field = TREE_OPERAND (exp, 1);
5442 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5443
e7f3c83f
RK
5444 /* If this field hasn't been filled in yet, don't go
5445 past it. This should only happen when folding expressions
5446 made during type construction. */
770ae6cc 5447 if (this_offset == 0)
e7f3c83f 5448 break;
7a6cdb44 5449 else if (CONTAINS_PLACEHOLDER_P (this_offset))
770ae6cc 5450 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5451
7156dead 5452 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5453 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5454 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5455
a06ef755 5456 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5457 }
7156dead 5458
b4e3fabb
RK
5459 else if (TREE_CODE (exp) == ARRAY_REF
5460 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5461 {
742920c7 5462 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5463 tree array = TREE_OPERAND (exp, 0);
5464 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5465 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5466 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5467
770ae6cc
RK
5468 /* We assume all arrays have sizes that are a multiple of a byte.
5469 First subtract the lower bound, if any, in the type of the
5470 index, then convert to sizetype and multiply by the size of the
5471 array element. */
5472 if (low_bound != 0 && ! integer_zerop (low_bound))
5473 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5474 index, low_bound));
f8dac6eb 5475
7156dead
RK
5476 /* If the index has a self-referential type, pass it to a
5477 WITH_RECORD_EXPR; if the component size is, pass our
5478 component to one. */
7a6cdb44 5479 if (CONTAINS_PLACEHOLDER_P (index))
770ae6cc 5480 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7a6cdb44 5481 if (CONTAINS_PLACEHOLDER_P (unit_size))
b4e3fabb 5482 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5483
770ae6cc
RK
5484 offset = size_binop (PLUS_EXPR, offset,
5485 size_binop (MULT_EXPR,
5486 convert (sizetype, index),
7156dead 5487 unit_size));
bbf6f052 5488 }
7156dead 5489
738cc472
RK
5490 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5491 {
70072ed9
RK
5492 tree new = find_placeholder (exp, &placeholder_ptr);
5493
5494 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5495 We might have been called from tree optimization where we
5496 haven't set up an object yet. */
5497 if (new == 0)
5498 break;
5499 else
5500 exp = new;
5501
738cc472
RK
5502 continue;
5503 }
c1853da7
RK
5504
5505 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5506 conversions that don't change the mode, and all view conversions
5507 except those that need to "step up" the alignment. */
bbf6f052 5508 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5509 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5510 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5511 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5512 && STRICT_ALIGNMENT
5513 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5514 < BIGGEST_ALIGNMENT)
5515 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5516 || TYPE_ALIGN_OK (TREE_TYPE
5517 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5518 && ! ((TREE_CODE (exp) == NOP_EXPR
5519 || TREE_CODE (exp) == CONVERT_EXPR)
5520 && (TYPE_MODE (TREE_TYPE (exp))
5521 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5522 break;
7bb0943f
RS
5523
5524 /* If any reference in the chain is volatile, the effect is volatile. */
5525 if (TREE_THIS_VOLATILE (exp))
5526 *pvolatilep = 1;
839c4796 5527
bbf6f052
RK
5528 exp = TREE_OPERAND (exp, 0);
5529 }
5530
770ae6cc
RK
5531 /* If OFFSET is constant, see if we can return the whole thing as a
5532 constant bit position. Otherwise, split it up. */
5533 if (host_integerp (offset, 0)
5534 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5535 bitsize_unit_node))
5536 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5537 && host_integerp (tem, 0))
5538 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5539 else
5540 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5541
bbf6f052 5542 *pmode = mode;
bbf6f052
RK
5543 return exp;
5544}
921b3427 5545
ed239f5a
RK
5546/* Return 1 if T is an expression that get_inner_reference handles. */
5547
5548int
502b8322 5549handled_component_p (tree t)
ed239f5a
RK
5550{
5551 switch (TREE_CODE (t))
5552 {
5553 case BIT_FIELD_REF:
5554 case COMPONENT_REF:
5555 case ARRAY_REF:
5556 case ARRAY_RANGE_REF:
5557 case NON_LVALUE_EXPR:
5558 case VIEW_CONVERT_EXPR:
5559 return 1;
5560
1a8c4ca6
EB
5561 /* ??? Sure they are handled, but get_inner_reference may return
5562 a different PBITSIZE, depending upon whether the expression is
5563 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5564 case NOP_EXPR:
5565 case CONVERT_EXPR:
5566 return (TYPE_MODE (TREE_TYPE (t))
5567 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5568
5569 default:
5570 return 0;
5571 }
5572}
bbf6f052 5573\f
3fe44edd
RK
5574/* Given an rtx VALUE that may contain additions and multiplications, return
5575 an equivalent value that just refers to a register, memory, or constant.
5576 This is done by generating instructions to perform the arithmetic and
5577 returning a pseudo-register containing the value.
c45a13a6
RK
5578
5579 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5580
5581rtx
502b8322 5582force_operand (rtx value, rtx target)
bbf6f052 5583{
8a28dbcc 5584 rtx op1, op2;
bbf6f052 5585 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5586 rtx subtarget = get_subtarget (target);
8a28dbcc 5587 enum rtx_code code = GET_CODE (value);
bbf6f052 5588
8b015896 5589 /* Check for a PIC address load. */
8a28dbcc 5590 if ((code == PLUS || code == MINUS)
8b015896
RH
5591 && XEXP (value, 0) == pic_offset_table_rtx
5592 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5593 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5594 || GET_CODE (XEXP (value, 1)) == CONST))
5595 {
5596 if (!subtarget)
5597 subtarget = gen_reg_rtx (GET_MODE (value));
5598 emit_move_insn (subtarget, value);
5599 return subtarget;
5600 }
5601
8a28dbcc 5602 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5603 {
8a28dbcc
JH
5604 if (!target)
5605 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5606 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5607 code == ZERO_EXTEND);
5608 return target;
bbf6f052
RK
5609 }
5610
8a28dbcc 5611 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
5612 {
5613 op2 = XEXP (value, 1);
8a28dbcc 5614 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5615 subtarget = 0;
8a28dbcc 5616 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5617 {
8a28dbcc 5618 code = PLUS;
bbf6f052
RK
5619 op2 = negate_rtx (GET_MODE (value), op2);
5620 }
5621
5622 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5623 operand a PLUS of a virtual register and something else. In that
5624 case, we want to emit the sum of the virtual register and the
5625 constant first and then add the other value. This allows virtual
5626 register instantiation to simply modify the constant rather than
5627 creating another one around this addition. */
5628 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5629 && GET_CODE (XEXP (value, 0)) == PLUS
5630 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5631 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5632 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5633 {
8a28dbcc
JH
5634 rtx temp = expand_simple_binop (GET_MODE (value), code,
5635 XEXP (XEXP (value, 0), 0), op2,
5636 subtarget, 0, OPTAB_LIB_WIDEN);
5637 return expand_simple_binop (GET_MODE (value), code, temp,
5638 force_operand (XEXP (XEXP (value,
5639 0), 1), 0),
5640 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5641 }
3a94c984 5642
8a28dbcc
JH
5643 op1 = force_operand (XEXP (value, 0), subtarget);
5644 op2 = force_operand (op2, NULL_RTX);
5645 switch (code)
5646 {
5647 case MULT:
5648 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5649 case DIV:
5650 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5651 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5652 target, 1, OPTAB_LIB_WIDEN);
5653 else
5654 return expand_divmod (0,
5655 FLOAT_MODE_P (GET_MODE (value))
5656 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5657 GET_MODE (value), op1, op2, target, 0);
5658 break;
5659 case MOD:
5660 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5661 target, 0);
5662 break;
5663 case UDIV:
5664 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5665 target, 1);
5666 break;
5667 case UMOD:
5668 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5669 target, 1);
5670 break;
5671 case ASHIFTRT:
5672 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5673 target, 0, OPTAB_LIB_WIDEN);
5674 break;
5675 default:
5676 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5677 target, 1, OPTAB_LIB_WIDEN);
5678 }
5679 }
5680 if (GET_RTX_CLASS (code) == '1')
5681 {
5682 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5683 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5684 }
34e81b5a
RK
5685
5686#ifdef INSN_SCHEDULING
5687 /* On machines that have insn scheduling, we want all memory reference to be
5688 explicit, so we need to deal with such paradoxical SUBREGs. */
5689 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5690 && (GET_MODE_SIZE (GET_MODE (value))
5691 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5692 value
5693 = simplify_gen_subreg (GET_MODE (value),
5694 force_reg (GET_MODE (SUBREG_REG (value)),
5695 force_operand (SUBREG_REG (value),
5696 NULL_RTX)),
5697 GET_MODE (SUBREG_REG (value)),
5698 SUBREG_BYTE (value));
5699#endif
5700
bbf6f052
RK
5701 return value;
5702}
5703\f
bbf6f052 5704/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5705 EXP can reference X, which is being modified. TOP_P is nonzero if this
5706 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5707 for EXP, as opposed to a recursive call to this function.
5708
5709 It is always safe for this routine to return zero since it merely
5710 searches for optimization opportunities. */
bbf6f052 5711
8f17b5c5 5712int
502b8322 5713safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5714{
5715 rtx exp_rtl = 0;
5716 int i, nops;
1da68f56 5717 static tree save_expr_list;
bbf6f052 5718
6676e72f
RK
5719 if (x == 0
5720 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5721 have no way of allocating temporaries of variable size
5722 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5723 So we assume here that something at a higher level has prevented a
f4510f37 5724 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5725 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5726 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5727 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5728 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5729 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5730 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5731 != INTEGER_CST)
1da68f56
RK
5732 && GET_MODE (x) == BLKmode)
5733 /* If X is in the outgoing argument area, it is always safe. */
5734 || (GET_CODE (x) == MEM
5735 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5736 || (GET_CODE (XEXP (x, 0)) == PLUS
5737 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5738 return 1;
5739
5740 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5741 find the underlying pseudo. */
5742 if (GET_CODE (x) == SUBREG)
5743 {
5744 x = SUBREG_REG (x);
5745 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5746 return 0;
5747 }
5748
1da68f56
RK
5749 /* A SAVE_EXPR might appear many times in the expression passed to the
5750 top-level safe_from_p call, and if it has a complex subexpression,
5751 examining it multiple times could result in a combinatorial explosion.
7ef0daad 5752 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
1da68f56
RK
5753 with optimization took about 28 minutes to compile -- even though it was
5754 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5755 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5756 we have processed. Note that the only test of top_p was above. */
5757
5758 if (top_p)
5759 {
5760 int rtn;
5761 tree t;
5762
5763 save_expr_list = 0;
5764
5765 rtn = safe_from_p (x, exp, 0);
5766
5767 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5768 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5769
5770 return rtn;
5771 }
bbf6f052 5772
1da68f56 5773 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5774 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5775 {
5776 case 'd':
a9772b60 5777 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5778 break;
5779
5780 case 'c':
5781 return 1;
5782
5783 case 'x':
5784 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5785 {
5786 while (1)
5787 {
5788 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5789 return 0;
5790 exp = TREE_CHAIN (exp);
5791 if (!exp)
5792 return 1;
5793 if (TREE_CODE (exp) != TREE_LIST)
5794 return safe_from_p (x, exp, 0);
5795 }
5796 }
ff439b5f
CB
5797 else if (TREE_CODE (exp) == ERROR_MARK)
5798 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5799 else
5800 return 0;
5801
bbf6f052
RK
5802 case '2':
5803 case '<':
f8d4be57
CE
5804 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5805 return 0;
5d3cc252 5806 /* Fall through. */
f8d4be57
CE
5807
5808 case '1':
5809 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5810
5811 case 'e':
5812 case 'r':
5813 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5814 the expression. If it is set, we conflict iff we are that rtx or
5815 both are in memory. Otherwise, we check all operands of the
5816 expression recursively. */
5817
5818 switch (TREE_CODE (exp))
5819 {
5820 case ADDR_EXPR:
70072ed9
RK
5821 /* If the operand is static or we are static, we can't conflict.
5822 Likewise if we don't conflict with the operand at all. */
5823 if (staticp (TREE_OPERAND (exp, 0))
5824 || TREE_STATIC (exp)
5825 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5826 return 1;
5827
5828 /* Otherwise, the only way this can conflict is if we are taking
5829 the address of a DECL a that address if part of X, which is
5830 very rare. */
5831 exp = TREE_OPERAND (exp, 0);
5832 if (DECL_P (exp))
5833 {
5834 if (!DECL_RTL_SET_P (exp)
5835 || GET_CODE (DECL_RTL (exp)) != MEM)
5836 return 0;
5837 else
5838 exp_rtl = XEXP (DECL_RTL (exp), 0);
5839 }
5840 break;
bbf6f052
RK
5841
5842 case INDIRECT_REF:
1da68f56
RK
5843 if (GET_CODE (x) == MEM
5844 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5845 get_alias_set (exp)))
bbf6f052
RK
5846 return 0;
5847 break;
5848
5849 case CALL_EXPR:
f9808f81
MM
5850 /* Assume that the call will clobber all hard registers and
5851 all of memory. */
5852 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5853 || GET_CODE (x) == MEM)
5854 return 0;
bbf6f052
RK
5855 break;
5856
5857 case RTL_EXPR:
3bb5826a
RK
5858 /* If a sequence exists, we would have to scan every instruction
5859 in the sequence to see if it was safe. This is probably not
5860 worthwhile. */
5861 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5862 return 0;
5863
3bb5826a 5864 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5865 break;
5866
5867 case WITH_CLEANUP_EXPR:
6ad7895a 5868 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
5869 break;
5870
5dab5552 5871 case CLEANUP_POINT_EXPR:
e5e809f4 5872 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5873
bbf6f052
RK
5874 case SAVE_EXPR:
5875 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5876 if (exp_rtl)
5877 break;
5878
1da68f56
RK
5879 /* If we've already scanned this, don't do it again. Otherwise,
5880 show we've scanned it and record for clearing the flag if we're
5881 going on. */
5882 if (TREE_PRIVATE (exp))
5883 return 1;
ff439b5f 5884
1da68f56
RK
5885 TREE_PRIVATE (exp) = 1;
5886 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5887 {
1da68f56
RK
5888 TREE_PRIVATE (exp) = 0;
5889 return 0;
ff59bfe6 5890 }
1da68f56
RK
5891
5892 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5893 return 1;
bbf6f052 5894
8129842c
RS
5895 case BIND_EXPR:
5896 /* The only operand we look at is operand 1. The rest aren't
5897 part of the expression. */
e5e809f4 5898 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5899
e9a25f70
JL
5900 default:
5901 break;
bbf6f052
RK
5902 }
5903
5904 /* If we have an rtx, we do not need to scan our operands. */
5905 if (exp_rtl)
5906 break;
5907
8f17b5c5 5908 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5909 for (i = 0; i < nops; i++)
5910 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5911 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5912 return 0;
8f17b5c5
MM
5913
5914 /* If this is a language-specific tree code, it may require
5915 special handling. */
dbbbbf3b
JDA
5916 if ((unsigned int) TREE_CODE (exp)
5917 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 5918 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 5919 return 0;
bbf6f052
RK
5920 }
5921
5922 /* If we have an rtl, find any enclosed object. Then see if we conflict
5923 with it. */
5924 if (exp_rtl)
5925 {
5926 if (GET_CODE (exp_rtl) == SUBREG)
5927 {
5928 exp_rtl = SUBREG_REG (exp_rtl);
5929 if (GET_CODE (exp_rtl) == REG
5930 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5931 return 0;
5932 }
5933
5934 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5935 are memory and they conflict. */
bbf6f052
RK
5936 return ! (rtx_equal_p (x, exp_rtl)
5937 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 5938 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 5939 rtx_addr_varies_p)));
bbf6f052
RK
5940 }
5941
5942 /* If we reach here, it is safe. */
5943 return 1;
5944}
5945
01c8a7c8
RK
5946/* Subroutine of expand_expr: return rtx if EXP is a
5947 variable or parameter; else return 0. */
5948
5949static rtx
502b8322 5950var_rtx (tree exp)
01c8a7c8
RK
5951{
5952 STRIP_NOPS (exp);
5953 switch (TREE_CODE (exp))
5954 {
5955 case PARM_DECL:
5956 case VAR_DECL:
5957 return DECL_RTL (exp);
5958 default:
5959 return 0;
5960 }
5961}
14a774a9 5962\f
0d4903b8
RK
5963/* Return the highest power of two that EXP is known to be a multiple of.
5964 This is used in updating alignment of MEMs in array references. */
5965
9ceca302 5966static unsigned HOST_WIDE_INT
502b8322 5967highest_pow2_factor (tree exp)
0d4903b8 5968{
9ceca302 5969 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
5970
5971 switch (TREE_CODE (exp))
5972 {
5973 case INTEGER_CST:
e0f1be5c
JJ
5974 /* We can find the lowest bit that's a one. If the low
5975 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5976 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 5977 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 5978 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 5979 later ICE. */
e0f1be5c 5980 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 5981 return BIGGEST_ALIGNMENT;
e0f1be5c 5982 else
0d4903b8 5983 {
e0f1be5c
JJ
5984 /* Note: tree_low_cst is intentionally not used here,
5985 we don't care about the upper bits. */
5986 c0 = TREE_INT_CST_LOW (exp);
5987 c0 &= -c0;
5988 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
5989 }
5990 break;
5991
65a07688 5992 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
5993 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5994 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5995 return MIN (c0, c1);
5996
5997 case MULT_EXPR:
5998 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5999 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6000 return c0 * c1;
6001
6002 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6003 case CEIL_DIV_EXPR:
65a07688
RK
6004 if (integer_pow2p (TREE_OPERAND (exp, 1))
6005 && host_integerp (TREE_OPERAND (exp, 1), 1))
6006 {
6007 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6008 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6009 return MAX (1, c0 / c1);
6010 }
6011 break;
0d4903b8
RK
6012
6013 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6014 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6015 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6016
65a07688
RK
6017 case COMPOUND_EXPR:
6018 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6019
0d4903b8
RK
6020 case COND_EXPR:
6021 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6023 return MIN (c0, c1);
6024
6025 default:
6026 break;
6027 }
6028
6029 return 1;
6030}
818c0c94
RH
6031
6032/* Similar, except that it is known that the expression must be a multiple
6033 of the alignment of TYPE. */
6034
9ceca302 6035static unsigned HOST_WIDE_INT
502b8322 6036highest_pow2_factor_for_type (tree type, tree exp)
818c0c94 6037{
9ceca302 6038 unsigned HOST_WIDE_INT type_align, factor;
818c0c94
RH
6039
6040 factor = highest_pow2_factor (exp);
6041 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6042 return MAX (factor, type_align);
6043}
0d4903b8 6044\f
f47e9b4e
RK
6045/* Return an object on the placeholder list that matches EXP, a
6046 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6047 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6048 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6049 is a location which initially points to a starting location in the
738cc472
RK
6050 placeholder list (zero means start of the list) and where a pointer into
6051 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6052
6053tree
502b8322 6054find_placeholder (tree exp, tree *plist)
f47e9b4e
RK
6055{
6056 tree type = TREE_TYPE (exp);
6057 tree placeholder_expr;
6058
738cc472
RK
6059 for (placeholder_expr
6060 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6061 placeholder_expr != 0;
f47e9b4e
RK
6062 placeholder_expr = TREE_CHAIN (placeholder_expr))
6063 {
6064 tree need_type = TYPE_MAIN_VARIANT (type);
6065 tree elt;
6066
6067 /* Find the outermost reference that is of the type we want. If none,
6068 see if any object has a type that is a pointer to the type we
6069 want. */
6070 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6071 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6072 || TREE_CODE (elt) == COND_EXPR)
6073 ? TREE_OPERAND (elt, 1)
6074 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6075 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6076 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6077 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6078 ? TREE_OPERAND (elt, 0) : 0))
6079 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6080 {
6081 if (plist)
6082 *plist = placeholder_expr;
6083 return elt;
6084 }
6085
6086 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6087 elt
6088 = ((TREE_CODE (elt) == COMPOUND_EXPR
6089 || TREE_CODE (elt) == COND_EXPR)
6090 ? TREE_OPERAND (elt, 1)
6091 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6095 ? TREE_OPERAND (elt, 0) : 0))
6096 if (POINTER_TYPE_P (TREE_TYPE (elt))
6097 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6098 == need_type))
6099 {
6100 if (plist)
6101 *plist = placeholder_expr;
6102 return build1 (INDIRECT_REF, need_type, elt);
6103 }
6104 }
6105
70072ed9 6106 return 0;
f47e9b4e 6107}
eb698c58
RS
6108
6109/* Subroutine of expand_expr. Expand the two operands of a binary
6110 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6111 The value may be stored in TARGET if TARGET is nonzero. The
6112 MODIFIER argument is as documented by expand_expr. */
6113
6114static void
6115expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6116 enum expand_modifier modifier)
6117{
6118 if (! safe_from_p (target, exp1, 1))
6119 target = 0;
6120 if (operand_equal_p (exp0, exp1, 0))
6121 {
6122 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6123 *op1 = copy_rtx (*op0);
6124 }
6125 else
6126 {
c67e6e14
RS
6127 /* If we need to preserve evaluation order, copy exp0 into its own
6128 temporary variable so that it can't be clobbered by exp1. */
6129 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6130 exp0 = save_expr (exp0);
eb698c58
RS
6131 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6132 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6133 }
6134}
6135
f47e9b4e 6136\f
bbf6f052
RK
6137/* expand_expr: generate code for computing expression EXP.
6138 An rtx for the computed value is returned. The value is never null.
6139 In the case of a void EXP, const0_rtx is returned.
6140
6141 The value may be stored in TARGET if TARGET is nonzero.
6142 TARGET is just a suggestion; callers must assume that
6143 the rtx returned may not be the same as TARGET.
6144
6145 If TARGET is CONST0_RTX, it means that the value will be ignored.
6146
6147 If TMODE is not VOIDmode, it suggests generating the
6148 result in mode TMODE. But this is done only when convenient.
6149 Otherwise, TMODE is ignored and the value generated in its natural mode.
6150 TMODE is just a suggestion; callers must assume that
6151 the rtx returned may not have mode TMODE.
6152
d6a5ac33
RK
6153 Note that TARGET may have neither TMODE nor MODE. In that case, it
6154 probably will not be used.
bbf6f052
RK
6155
6156 If MODIFIER is EXPAND_SUM then when EXP is an addition
6157 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6158 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6159 products as above, or REG or MEM, or constant.
6160 Ordinarily in such cases we would output mul or add instructions
6161 and then return a pseudo reg containing the sum.
6162
6163 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6164 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6165 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6166 This is used for outputting expressions used in initializers.
6167
6168 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6169 with a constant address even if that address is not normally legitimate.
8403445a
AM
6170 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6171
6172 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6173 a call parameter. Such targets require special care as we haven't yet
6174 marked TARGET so that it's safe from being trashed by libcalls. We
6175 don't want to use TARGET for anything but the final result;
6176 Intermediate values must go elsewhere. Additionally, calls to
0fab64a3
MM
6177 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6178
6179 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6180 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6181 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6182 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6183 recursively. */
bbf6f052
RK
6184
6185rtx
0fab64a3
MM
6186expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6187 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6188{
b3694847 6189 rtx op0, op1, temp;
bbf6f052
RK
6190 tree type = TREE_TYPE (exp);
6191 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6192 enum machine_mode mode;
6193 enum tree_code code = TREE_CODE (exp);
bbf6f052 6194 optab this_optab;
68557e14
ML
6195 rtx subtarget, original_target;
6196 int ignore;
bbf6f052
RK
6197 tree context;
6198
3a94c984 6199 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6200 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6201 {
6202 op0 = CONST0_RTX (tmode);
6203 if (op0 != 0)
6204 return op0;
6205 return const0_rtx;
6206 }
6207
6208 mode = TYPE_MODE (type);
6209 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6210 subtarget = get_subtarget (target);
68557e14
ML
6211 original_target = target;
6212 ignore = (target == const0_rtx
6213 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6214 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6215 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6216 && TREE_CODE (type) == VOID_TYPE));
6217
dd27116b
RK
6218 /* If we are going to ignore this result, we need only do something
6219 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6220 is, short-circuit the most common cases here. Note that we must
6221 not call expand_expr with anything but const0_rtx in case this
6222 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6223
dd27116b
RK
6224 if (ignore)
6225 {
6226 if (! TREE_SIDE_EFFECTS (exp))
6227 return const0_rtx;
6228
14a774a9
RK
6229 /* Ensure we reference a volatile object even if value is ignored, but
6230 don't do this if all we are doing is taking its address. */
dd27116b
RK
6231 if (TREE_THIS_VOLATILE (exp)
6232 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6233 && mode != VOIDmode && mode != BLKmode
6234 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6235 {
37a08a29 6236 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6237 if (GET_CODE (temp) == MEM)
6238 temp = copy_to_reg (temp);
6239 return const0_rtx;
6240 }
6241
14a774a9
RK
6242 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6243 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6244 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6245 modifier);
6246
14a774a9 6247 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6248 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6249 {
37a08a29
RK
6250 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6251 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6252 return const0_rtx;
6253 }
6254 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6255 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6256 /* If the second operand has no side effects, just evaluate
0f41302f 6257 the first. */
37a08a29
RK
6258 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6259 modifier);
14a774a9
RK
6260 else if (code == BIT_FIELD_REF)
6261 {
37a08a29
RK
6262 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6263 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6264 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6265 return const0_rtx;
6266 }
37a08a29 6267
90764a87 6268 target = 0;
dd27116b 6269 }
bbf6f052 6270
e44842fe
RK
6271 /* If will do cse, generate all results into pseudo registers
6272 since 1) that allows cse to find more things
6273 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6274 cannot support. An exception is a CONSTRUCTOR into a multi-word
6275 MEM: that's much more likely to be most efficient into the MEM.
6276 Another is a CALL_EXPR which must return in memory. */
e44842fe 6277
bbf6f052 6278 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6279 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6280 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6281 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6282 target = 0;
bbf6f052 6283
bbf6f052
RK
6284 switch (code)
6285 {
6286 case LABEL_DECL:
b552441b
RS
6287 {
6288 tree function = decl_function_context (exp);
046e4e36
ZW
6289 /* Labels in containing functions, or labels used from initializers,
6290 must be forced. */
6291 if (modifier == EXPAND_INITIALIZER
6292 || (function != current_function_decl
6293 && function != inline_function_decl
6294 && function != 0))
6295 temp = force_label_rtx (exp);
ab87f8c8 6296 else
046e4e36 6297 temp = label_rtx (exp);
c5c76735 6298
046e4e36 6299 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
d0977240
RK
6300 if (function != current_function_decl
6301 && function != inline_function_decl && function != 0)
26fcb35a
RS
6302 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6303 return temp;
b552441b 6304 }
bbf6f052
RK
6305
6306 case PARM_DECL:
1877be45 6307 if (!DECL_RTL_SET_P (exp))
bbf6f052 6308 {
ddd2d57e 6309 error ("%Jprior parameter's size depends on '%D'", exp, exp);
4af3895e 6310 return CONST0_RTX (mode);
bbf6f052
RK
6311 }
6312
0f41302f 6313 /* ... fall through ... */
d6a5ac33 6314
bbf6f052 6315 case VAR_DECL:
2dca20cd
RS
6316 /* If a static var's type was incomplete when the decl was written,
6317 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6318 if (DECL_SIZE (exp) == 0
6319 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6320 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6321 layout_decl (exp, 0);
921b3427 6322
0f41302f 6323 /* ... fall through ... */
d6a5ac33 6324
2dca20cd 6325 case FUNCTION_DECL:
bbf6f052
RK
6326 case RESULT_DECL:
6327 if (DECL_RTL (exp) == 0)
6328 abort ();
d6a5ac33 6329
e44842fe
RK
6330 /* Ensure variable marked as used even if it doesn't go through
6331 a parser. If it hasn't be used yet, write out an external
6332 definition. */
6333 if (! TREE_USED (exp))
6334 {
6335 assemble_external (exp);
6336 TREE_USED (exp) = 1;
6337 }
6338
dc6d66b3
RK
6339 /* Show we haven't gotten RTL for this yet. */
6340 temp = 0;
6341
bbf6f052
RK
6342 /* Handle variables inherited from containing functions. */
6343 context = decl_function_context (exp);
6344
6345 /* We treat inline_function_decl as an alias for the current function
6346 because that is the inline function whose vars, types, etc.
6347 are being merged into the current function.
6348 See expand_inline_function. */
d6a5ac33 6349
bbf6f052
RK
6350 if (context != 0 && context != current_function_decl
6351 && context != inline_function_decl
6352 /* If var is static, we don't need a static chain to access it. */
6353 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6354 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6355 {
6356 rtx addr;
6357
6358 /* Mark as non-local and addressable. */
81feeecb 6359 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6360 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6361 abort ();
dffd7eb6 6362 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6363 if (GET_CODE (DECL_RTL (exp)) != MEM)
6364 abort ();
6365 addr = XEXP (DECL_RTL (exp), 0);
6366 if (GET_CODE (addr) == MEM)
792760b9
RK
6367 addr
6368 = replace_equiv_address (addr,
6369 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6370 else
6371 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6372
792760b9 6373 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6374 }
4af3895e 6375
bbf6f052
RK
6376 /* This is the case of an array whose size is to be determined
6377 from its initializer, while the initializer is still being parsed.
6378 See expand_decl. */
d6a5ac33 6379
dc6d66b3
RK
6380 else if (GET_CODE (DECL_RTL (exp)) == MEM
6381 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6382 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6383
6384 /* If DECL_RTL is memory, we are in the normal case and either
6385 the address is not valid or it is not a register and -fforce-addr
6386 is specified, get the address into a register. */
6387
dc6d66b3
RK
6388 else if (GET_CODE (DECL_RTL (exp)) == MEM
6389 && modifier != EXPAND_CONST_ADDRESS
6390 && modifier != EXPAND_SUM
6391 && modifier != EXPAND_INITIALIZER
6392 && (! memory_address_p (DECL_MODE (exp),
6393 XEXP (DECL_RTL (exp), 0))
6394 || (flag_force_addr
6395 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
0fab64a3
MM
6396 {
6397 if (alt_rtl)
6398 *alt_rtl = DECL_RTL (exp);
6399 temp = replace_equiv_address (DECL_RTL (exp),
6400 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6401 }
1499e0a8 6402
dc6d66b3 6403 /* If we got something, return it. But first, set the alignment
04956a1a 6404 if the address is a register. */
dc6d66b3
RK
6405 if (temp != 0)
6406 {
6407 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6408 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6409
6410 return temp;
6411 }
6412
1499e0a8
RK
6413 /* If the mode of DECL_RTL does not match that of the decl, it
6414 must be a promoted value. We return a SUBREG of the wanted mode,
6415 but mark it so that we know that it was already extended. */
6416
6417 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6418 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6419 {
1499e0a8
RK
6420 /* Get the signedness used for this variable. Ensure we get the
6421 same mode we got when the variable was declared. */
78911e8b 6422 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6423 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6424 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6425 abort ();
6426
ddef6bc7 6427 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6428 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6429 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6430 return temp;
6431 }
6432
bbf6f052
RK
6433 return DECL_RTL (exp);
6434
6435 case INTEGER_CST:
d8a50944 6436 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6437 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6438
d8a50944
RH
6439 /* ??? If overflow is set, fold will have done an incomplete job,
6440 which can result in (plus xx (const_int 0)), which can get
6441 simplified by validate_replace_rtx during virtual register
6442 instantiation, which can result in unrecognizable insns.
6443 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6444 if (TREE_CONSTANT_OVERFLOW (exp)
6445 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6446 temp = force_reg (mode, temp);
6447
6448 return temp;
6449
d744e06e
AH
6450 case VECTOR_CST:
6451 return const_vector_from_tree (exp);
6452
bbf6f052 6453 case CONST_DECL:
8403445a 6454 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6455
6456 case REAL_CST:
6457 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6458 which will be turned into memory by reload if necessary.
6459
bbf6f052
RK
6460 We used to force a register so that loop.c could see it. But
6461 this does not allow gen_* patterns to perform optimizations with
6462 the constants. It also produces two insns in cases like "x = 1.0;".
6463 On most machines, floating-point constants are not permitted in
6464 many insns, so we'd end up copying it to a register in any case.
6465
6466 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6467 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6468 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6469
6470 case COMPLEX_CST:
9ad58e09
RS
6471 /* Handle evaluating a complex constant in a CONCAT target. */
6472 if (original_target && GET_CODE (original_target) == CONCAT)
6473 {
6474 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6475 rtx rtarg, itarg;
6476
6477 rtarg = XEXP (original_target, 0);
6478 itarg = XEXP (original_target, 1);
6479
6480 /* Move the real and imaginary parts separately. */
6481 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6482 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6483
6484 if (op0 != rtarg)
6485 emit_move_insn (rtarg, op0);
6486 if (op1 != itarg)
6487 emit_move_insn (itarg, op1);
6488
6489 return original_target;
6490 }
6491
71c0e7fc 6492 /* ... fall through ... */
9ad58e09 6493
bbf6f052 6494 case STRING_CST:
afc6aaab 6495 temp = output_constant_def (exp, 1);
bbf6f052 6496
afc6aaab 6497 /* temp contains a constant address.
bbf6f052
RK
6498 On RISC machines where a constant address isn't valid,
6499 make some insns to get that address into a register. */
afc6aaab 6500 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6501 && modifier != EXPAND_INITIALIZER
6502 && modifier != EXPAND_SUM
afc6aaab
ZW
6503 && (! memory_address_p (mode, XEXP (temp, 0))
6504 || flag_force_addr))
6505 return replace_equiv_address (temp,
6506 copy_rtx (XEXP (temp, 0)));
6507 return temp;
bbf6f052 6508
bf1e5319 6509 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6510 {
6511 rtx to_return;
72954a4f
JM
6512 struct file_stack fs;
6513
6514 fs.location = input_location;
6515 fs.next = expr_wfl_stack;
b24f65cd 6516 input_filename = EXPR_WFL_FILENAME (exp);
d479d37f 6517 input_line = EXPR_WFL_LINENO (exp);
72954a4f 6518 expr_wfl_stack = &fs;
b24f65cd 6519 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
0cea056b 6520 emit_line_note (input_location);
6ad7895a 6521 /* Possibly avoid switching back and forth here. */
72954a4f
JM
6522 to_return = expand_expr (EXPR_WFL_NODE (exp),
6523 (ignore ? const0_rtx : target),
6524 tmode, modifier);
6525 if (expr_wfl_stack != &fs)
6526 abort ();
6527 input_location = fs.location;
6528 expr_wfl_stack = fs.next;
b24f65cd
APB
6529 return to_return;
6530 }
bf1e5319 6531
bbf6f052
RK
6532 case SAVE_EXPR:
6533 context = decl_function_context (exp);
d6a5ac33 6534
d0977240
RK
6535 /* If this SAVE_EXPR was at global context, assume we are an
6536 initialization function and move it into our context. */
6537 if (context == 0)
6538 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6539
bbf6f052
RK
6540 /* We treat inline_function_decl as an alias for the current function
6541 because that is the inline function whose vars, types, etc.
6542 are being merged into the current function.
6543 See expand_inline_function. */
6544 if (context == current_function_decl || context == inline_function_decl)
6545 context = 0;
6546
6547 /* If this is non-local, handle it. */
6548 if (context)
6549 {
d0977240
RK
6550 /* The following call just exists to abort if the context is
6551 not of a containing function. */
6552 find_function_data (context);
6553
bbf6f052
RK
6554 temp = SAVE_EXPR_RTL (exp);
6555 if (temp && GET_CODE (temp) == REG)
6556 {
f29a2bd1 6557 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6558 temp = SAVE_EXPR_RTL (exp);
6559 }
6560 if (temp == 0 || GET_CODE (temp) != MEM)
6561 abort ();
792760b9
RK
6562 return
6563 replace_equiv_address (temp,
6564 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6565 }
6566 if (SAVE_EXPR_RTL (exp) == 0)
6567 {
06089a8b
RK
6568 if (mode == VOIDmode)
6569 temp = const0_rtx;
6570 else
1da68f56
RK
6571 temp = assign_temp (build_qualified_type (type,
6572 (TYPE_QUALS (type)
6573 | TYPE_QUAL_CONST)),
6574 3, 0, 0);
1499e0a8 6575
bbf6f052 6576 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6577 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6578 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6579 save_expr_regs);
ff78f773
RK
6580
6581 /* If the mode of TEMP does not match that of the expression, it
6582 must be a promoted value. We pass store_expr a SUBREG of the
6583 wanted mode but mark it so that we know that it was already
3ac1a319 6584 extended. */
ff78f773
RK
6585
6586 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6587 {
ddef6bc7 6588 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6589 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6590 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6591 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6592 }
6593
4c7a0be9 6594 if (temp == const0_rtx)
37a08a29 6595 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6596 else
8403445a
AM
6597 store_expr (TREE_OPERAND (exp, 0), temp,
6598 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6599
6600 TREE_USED (exp) = 1;
bbf6f052 6601 }
1499e0a8
RK
6602
6603 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6604 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6605 but mark it so that we know that it was already extended. */
1499e0a8
RK
6606
6607 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6608 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6609 {
e70d22c8
RK
6610 /* Compute the signedness and make the proper SUBREG. */
6611 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6612 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6613 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6614 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6615 return temp;
6616 }
6617
bbf6f052
RK
6618 return SAVE_EXPR_RTL (exp);
6619
679163cf
MS
6620 case UNSAVE_EXPR:
6621 {
6622 rtx temp;
6623 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
6624 TREE_OPERAND (exp, 0)
6625 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
6626 return temp;
6627 }
6628
b50d17a1 6629 case PLACEHOLDER_EXPR:
e9a25f70 6630 {
f47e9b4e 6631 tree old_list = placeholder_list;
738cc472 6632 tree placeholder_expr = 0;
e9a25f70 6633
f47e9b4e 6634 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
6635 if (exp == 0)
6636 abort ();
6637
f47e9b4e 6638 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 6639 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
6640 placeholder_list = old_list;
6641 return temp;
e9a25f70 6642 }
b50d17a1 6643
b50d17a1
RK
6644 case WITH_RECORD_EXPR:
6645 /* Put the object on the placeholder list, expand our first operand,
6646 and pop the list. */
6647 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6648 placeholder_list);
37a08a29
RK
6649 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6650 modifier);
b50d17a1
RK
6651 placeholder_list = TREE_CHAIN (placeholder_list);
6652 return target;
6653
70e6ca43
APB
6654 case GOTO_EXPR:
6655 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6656 expand_goto (TREE_OPERAND (exp, 0));
6657 else
6658 expand_computed_goto (TREE_OPERAND (exp, 0));
6659 return const0_rtx;
6660
bbf6f052 6661 case EXIT_EXPR:
df4ae160 6662 expand_exit_loop_if_false (NULL,
e44842fe 6663 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6664 return const0_rtx;
6665
f42e28dd
APB
6666 case LABELED_BLOCK_EXPR:
6667 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6668 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6669 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6670 do_pending_stack_adjust ();
f42e28dd
APB
6671 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6672 return const0_rtx;
6673
6674 case EXIT_BLOCK_EXPR:
6675 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6676 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6677 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6678 return const0_rtx;
6679
bbf6f052 6680 case LOOP_EXPR:
0088fcb1 6681 push_temp_slots ();
bbf6f052 6682 expand_start_loop (1);
b0832fe1 6683 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6684 expand_end_loop ();
0088fcb1 6685 pop_temp_slots ();
bbf6f052
RK
6686
6687 return const0_rtx;
6688
6689 case BIND_EXPR:
6690 {
6691 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
6692
6693 /* Need to open a binding contour here because
e976b8b2 6694 if there are any cleanups they must be contained here. */
8e91754e 6695 expand_start_bindings (2);
bbf6f052 6696
2df53c0b
RS
6697 /* Mark the corresponding BLOCK for output in its proper place. */
6698 if (TREE_OPERAND (exp, 2) != 0
6699 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 6700 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
6701
6702 /* If VARS have not yet been expanded, expand them now. */
6703 while (vars)
6704 {
19e7881c 6705 if (!DECL_RTL_SET_P (vars))
4977bab6 6706 expand_decl (vars);
bbf6f052
RK
6707 expand_decl_init (vars);
6708 vars = TREE_CHAIN (vars);
6709 }
6710
37a08a29 6711 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
6712
6713 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6714
6715 return temp;
6716 }
6717
6718 case RTL_EXPR:
83b853c9
JM
6719 if (RTL_EXPR_SEQUENCE (exp))
6720 {
6721 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6722 abort ();
2f937369 6723 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
6724 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6725 }
64dc53f3
MM
6726 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6727 free_temps_for_rtl_expr (exp);
0fab64a3
MM
6728 if (alt_rtl)
6729 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
bbf6f052
RK
6730 return RTL_EXPR_RTL (exp);
6731
6732 case CONSTRUCTOR:
dd27116b
RK
6733 /* If we don't need the result, just ensure we evaluate any
6734 subexpressions. */
6735 if (ignore)
6736 {
6737 tree elt;
37a08a29 6738
dd27116b 6739 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6740 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6741
dd27116b
RK
6742 return const0_rtx;
6743 }
3207b172 6744
4af3895e
JVA
6745 /* All elts simple constants => refer to a constant in memory. But
6746 if this is a non-BLKmode mode, let it store a field at a time
6747 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6748 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6749 store directly into the target unless the type is large enough
6750 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6751 all operands are constant, put it in memory as well.
6752
6753 FIXME: Avoid trying to fill vector constructors piece-meal.
6754 Output them with output_constant_def below unless we're sure
6755 they're zeros. This should go away when vector initializers
6756 are treated like VECTOR_CST instead of arrays.
6757 */
dd27116b 6758 else if ((TREE_STATIC (exp)
3207b172 6759 && ((mode == BLKmode
e5e809f4 6760 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6761 || TREE_ADDRESSABLE (exp)
19caa751 6762 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6763 && (! MOVE_BY_PIECES_P
19caa751
RK
6764 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6765 TYPE_ALIGN (type)))
0fb7aeda
KH
6766 && ((TREE_CODE (type) == VECTOR_TYPE
6767 && !is_zeros_p (exp))
6768 || ! mostly_zeros_p (exp)))))
f59700f9
RK
6769 || ((modifier == EXPAND_INITIALIZER
6770 || modifier == EXPAND_CONST_ADDRESS)
6771 && TREE_CONSTANT (exp)))
bbf6f052 6772 {
bd7cf17e 6773 rtx constructor = output_constant_def (exp, 1);
19caa751 6774
b552441b
RS
6775 if (modifier != EXPAND_CONST_ADDRESS
6776 && modifier != EXPAND_INITIALIZER
792760b9
RK
6777 && modifier != EXPAND_SUM)
6778 constructor = validize_mem (constructor);
6779
bbf6f052
RK
6780 return constructor;
6781 }
bbf6f052
RK
6782 else
6783 {
e9ac02a6
JW
6784 /* Handle calls that pass values in multiple non-contiguous
6785 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6786 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6787 || GET_CODE (target) == PARALLEL
6788 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6789 target
6790 = assign_temp (build_qualified_type (type,
6791 (TYPE_QUALS (type)
6792 | (TREE_READONLY (exp)
6793 * TYPE_QUAL_CONST))),
c24ae149 6794 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6795
de8920be 6796 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6797 return target;
6798 }
6799
6800 case INDIRECT_REF:
6801 {
6802 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6803 tree index;
3a94c984
KH
6804 tree string = string_constant (exp1, &index);
6805
06eaa86f 6806 /* Try to optimize reads from const strings. */
0fb7aeda
KH
6807 if (string
6808 && TREE_CODE (string) == STRING_CST
6809 && TREE_CODE (index) == INTEGER_CST
05bccae2 6810 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
6811 && GET_MODE_CLASS (mode) == MODE_INT
6812 && GET_MODE_SIZE (mode) == 1
37a08a29 6813 && modifier != EXPAND_WRITE)
0fb7aeda 6814 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 6815 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6816
405f0da6
JW
6817 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6818 op0 = memory_address (mode, op0);
38a448ca 6819 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6820 set_mem_attributes (temp, exp, 0);
1125706f 6821
14a774a9
RK
6822 /* If we are writing to this object and its type is a record with
6823 readonly fields, we must mark it as readonly so it will
6824 conflict with readonly references to those fields. */
37a08a29 6825 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6826 RTX_UNCHANGING_P (temp) = 1;
6827
8c8a8e34
JW
6828 return temp;
6829 }
bbf6f052
RK
6830
6831 case ARRAY_REF:
742920c7
RK
6832 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6833 abort ();
bbf6f052 6834
bbf6f052 6835 {
742920c7
RK
6836 tree array = TREE_OPERAND (exp, 0);
6837 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6838 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6839 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6840 HOST_WIDE_INT i;
b50d17a1 6841
d4c89139
PB
6842 /* Optimize the special-case of a zero lower bound.
6843
6844 We convert the low_bound to sizetype to avoid some problems
6845 with constant folding. (E.g. suppose the lower bound is 1,
6846 and its mode is QI. Without the conversion, (ARRAY
6847 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6848 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6849
742920c7 6850 if (! integer_zerop (low_bound))
fed3cef0 6851 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6852
742920c7 6853 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6854 This is not done in fold so it won't happen inside &.
6855 Don't fold if this is for wide characters since it's too
6856 difficult to do correctly and this is a very rare case. */
742920c7 6857
017e1b43
RH
6858 if (modifier != EXPAND_CONST_ADDRESS
6859 && modifier != EXPAND_INITIALIZER
6860 && modifier != EXPAND_MEMORY
cb5fa0f8 6861 && TREE_CODE (array) == STRING_CST
742920c7 6862 && TREE_CODE (index) == INTEGER_CST
05bccae2 6863 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6864 && GET_MODE_CLASS (mode) == MODE_INT
6865 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6866 return gen_int_mode (TREE_STRING_POINTER (array)
6867 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6868
742920c7
RK
6869 /* If this is a constant index into a constant array,
6870 just get the value from the array. Handle both the cases when
6871 we have an explicit constructor and when our operand is a variable
6872 that was declared const. */
4af3895e 6873
017e1b43
RH
6874 if (modifier != EXPAND_CONST_ADDRESS
6875 && modifier != EXPAND_INITIALIZER
6876 && modifier != EXPAND_MEMORY
6877 && TREE_CODE (array) == CONSTRUCTOR
6878 && ! TREE_SIDE_EFFECTS (array)
05bccae2 6879 && TREE_CODE (index) == INTEGER_CST
3a94c984 6880 && 0 > compare_tree_int (index,
05bccae2
RK
6881 list_length (CONSTRUCTOR_ELTS
6882 (TREE_OPERAND (exp, 0)))))
742920c7 6883 {
05bccae2
RK
6884 tree elem;
6885
6886 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6887 i = TREE_INT_CST_LOW (index);
6888 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6889 ;
6890
6891 if (elem)
37a08a29
RK
6892 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6893 modifier);
742920c7 6894 }
3a94c984 6895
742920c7 6896 else if (optimize >= 1
cb5fa0f8
RK
6897 && modifier != EXPAND_CONST_ADDRESS
6898 && modifier != EXPAND_INITIALIZER
017e1b43 6899 && modifier != EXPAND_MEMORY
742920c7
RK
6900 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6901 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
6902 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6903 && targetm.binds_local_p (array))
742920c7 6904 {
08293add 6905 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6906 {
6907 tree init = DECL_INITIAL (array);
6908
742920c7
RK
6909 if (TREE_CODE (init) == CONSTRUCTOR)
6910 {
665f2503 6911 tree elem;
742920c7 6912
05bccae2 6913 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6914 (elem
6915 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6916 elem = TREE_CHAIN (elem))
6917 ;
6918
c54b0a5e 6919 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6920 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 6921 tmode, modifier);
742920c7
RK
6922 }
6923 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6924 && 0 > compare_tree_int (index,
6925 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6926 {
6927 tree type = TREE_TYPE (TREE_TYPE (init));
6928 enum machine_mode mode = TYPE_MODE (type);
6929
6930 if (GET_MODE_CLASS (mode) == MODE_INT
6931 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6932 return gen_int_mode (TREE_STRING_POINTER (init)
6933 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 6934 }
742920c7
RK
6935 }
6936 }
6937 }
afc6aaab 6938 goto normal_inner_ref;
bbf6f052
RK
6939
6940 case COMPONENT_REF:
4af3895e 6941 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
6942 appropriate field if it is present. */
6943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
6944 {
6945 tree elt;
6946
6947 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6948 elt = TREE_CHAIN (elt))
86b5812c
RK
6949 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6950 /* We can normally use the value of the field in the
6951 CONSTRUCTOR. However, if this is a bitfield in
6952 an integral mode that we can fit in a HOST_WIDE_INT,
6953 we must mask only the number of bits in the bitfield,
6954 since this is done implicitly by the constructor. If
6955 the bitfield does not meet either of those conditions,
6956 we can't do this optimization. */
6957 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6958 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6959 == MODE_INT)
6960 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6961 <= HOST_BITS_PER_WIDE_INT))))
6962 {
8403445a
AM
6963 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6964 && modifier == EXPAND_STACK_PARM)
6965 target = 0;
3a94c984 6966 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6967 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6968 {
9df2c88c
RK
6969 HOST_WIDE_INT bitsize
6970 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
6971 enum machine_mode imode
6972 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
6973
6974 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6975 {
6976 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 6977 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
6978 }
6979 else
6980 {
6981 tree count
e5e809f4
JL
6982 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6983 0);
86b5812c
RK
6984
6985 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6986 target, 0);
6987 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6988 target, 0);
6989 }
6990 }
6991
6992 return op0;
6993 }
4af3895e 6994 }
afc6aaab 6995 goto normal_inner_ref;
4af3895e 6996
afc6aaab
ZW
6997 case BIT_FIELD_REF:
6998 case ARRAY_RANGE_REF:
6999 normal_inner_ref:
bbf6f052
RK
7000 {
7001 enum machine_mode mode1;
770ae6cc 7002 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7003 tree offset;
bbf6f052 7004 int volatilep = 0;
839c4796 7005 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7006 &mode1, &unsignedp, &volatilep);
f47e9b4e 7007 rtx orig_op0;
bbf6f052 7008
e7f3c83f
RK
7009 /* If we got back the original object, something is wrong. Perhaps
7010 we are evaluating an expression too early. In any event, don't
7011 infinitely recurse. */
7012 if (tem == exp)
7013 abort ();
7014
3d27140a 7015 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7016 computation, since it will need a temporary and TARGET is known
7017 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7018
f47e9b4e
RK
7019 orig_op0 = op0
7020 = expand_expr (tem,
7021 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7022 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7023 != INTEGER_CST)
8403445a 7024 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7025 ? target : NULL_RTX),
7026 VOIDmode,
7027 (modifier == EXPAND_INITIALIZER
8403445a
AM
7028 || modifier == EXPAND_CONST_ADDRESS
7029 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7030 ? modifier : EXPAND_NORMAL);
bbf6f052 7031
8c8a8e34 7032 /* If this is a constant, put it into a register if it is a
14a774a9 7033 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7034 if (CONSTANT_P (op0))
7035 {
7036 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7037 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7038 && offset == 0)
8c8a8e34
JW
7039 op0 = force_reg (mode, op0);
7040 else
7041 op0 = validize_mem (force_const_mem (mode, op0));
7042 }
7043
8d2e5f72
RK
7044 /* Otherwise, if this object not in memory and we either have an
7045 offset or a BLKmode result, put it there. This case can't occur in
7046 C, but can in Ada if we have unchecked conversion of an expression
7047 from a scalar type to an array or record type or for an
7048 ARRAY_RANGE_REF whose type is BLKmode. */
7049 else if (GET_CODE (op0) != MEM
7050 && (offset != 0
7051 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7052 {
7053 /* If the operand is a SAVE_EXPR, we can deal with this by
7054 forcing the SAVE_EXPR into memory. */
7055 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7056 {
7057 put_var_into_stack (TREE_OPERAND (exp, 0),
7058 /*rescan=*/true);
7059 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7060 }
7061 else
7062 {
7063 tree nt
7064 = build_qualified_type (TREE_TYPE (tem),
7065 (TYPE_QUALS (TREE_TYPE (tem))
7066 | TYPE_QUAL_CONST));
7067 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7068
8d2e5f72
RK
7069 emit_move_insn (memloc, op0);
7070 op0 = memloc;
7071 }
7072 }
7073
7bb0943f
RS
7074 if (offset != 0)
7075 {
8403445a
AM
7076 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7077 EXPAND_SUM);
7bb0943f
RS
7078
7079 if (GET_CODE (op0) != MEM)
7080 abort ();
2d48c13d 7081
2d48c13d 7082#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7083 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7084 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7085#else
7086 if (GET_MODE (offset_rtx) != ptr_mode)
7087 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7088#endif
7089
e82407b5
EB
7090 if (GET_MODE (op0) == BLKmode
7091 /* A constant address in OP0 can have VOIDmode, we must
7092 not try to call force_reg in that case. */
efd07ca7 7093 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7094 && bitsize != 0
3a94c984 7095 && (bitpos % bitsize) == 0
89752202 7096 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7097 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7098 {
e3c8ea67 7099 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7100 bitpos = 0;
7101 }
7102
0d4903b8
RK
7103 op0 = offset_address (op0, offset_rtx,
7104 highest_pow2_factor (offset));
7bb0943f
RS
7105 }
7106
1ce7f3c2
RK
7107 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7108 record its alignment as BIGGEST_ALIGNMENT. */
7109 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7110 && is_aligning_offset (offset, tem))
7111 set_mem_align (op0, BIGGEST_ALIGNMENT);
7112
bbf6f052
RK
7113 /* Don't forget about volatility even if this is a bitfield. */
7114 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7115 {
f47e9b4e
RK
7116 if (op0 == orig_op0)
7117 op0 = copy_rtx (op0);
7118
bbf6f052
RK
7119 MEM_VOLATILE_P (op0) = 1;
7120 }
7121
010f87c4
JJ
7122 /* The following code doesn't handle CONCAT.
7123 Assume only bitpos == 0 can be used for CONCAT, due to
7124 one element arrays having the same mode as its element. */
7125 if (GET_CODE (op0) == CONCAT)
7126 {
7127 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7128 abort ();
7129 return op0;
7130 }
7131
ccc98036
RS
7132 /* In cases where an aligned union has an unaligned object
7133 as a field, we might be extracting a BLKmode value from
7134 an integer-mode (e.g., SImode) object. Handle this case
7135 by doing the extract into an object as wide as the field
7136 (which we know to be the width of a basic mode), then
cb5fa0f8 7137 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7138 if (mode1 == VOIDmode
ccc98036 7139 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7140 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7141 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7142 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7143 && modifier != EXPAND_CONST_ADDRESS
7144 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7145 /* If the field isn't aligned enough to fetch as a memref,
7146 fetch it as a bit field. */
7147 || (mode1 != BLKmode
9e5f281f 7148 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5
EB
7149 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7150 || (GET_CODE (op0) == MEM
7151 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7152 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7153 && ((modifier == EXPAND_CONST_ADDRESS
7154 || modifier == EXPAND_INITIALIZER)
7155 ? STRICT_ALIGNMENT
7156 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7157 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7158 /* If the type and the field are a constant size and the
7159 size of the type isn't the same size as the bitfield,
7160 we must use bitfield operations. */
7161 || (bitsize >= 0
7162 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7163 == INTEGER_CST)
7164 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7165 bitsize)))
bbf6f052 7166 {
bbf6f052
RK
7167 enum machine_mode ext_mode = mode;
7168
14a774a9
RK
7169 if (ext_mode == BLKmode
7170 && ! (target != 0 && GET_CODE (op0) == MEM
7171 && GET_CODE (target) == MEM
7172 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7173 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7174
7175 if (ext_mode == BLKmode)
a281e72d 7176 {
7a06d606
RK
7177 if (target == 0)
7178 target = assign_temp (type, 0, 1, 1);
7179
7180 if (bitsize == 0)
7181 return target;
7182
a281e72d
RK
7183 /* In this case, BITPOS must start at a byte boundary and
7184 TARGET, if specified, must be a MEM. */
7185 if (GET_CODE (op0) != MEM
7186 || (target != 0 && GET_CODE (target) != MEM)
7187 || bitpos % BITS_PER_UNIT != 0)
7188 abort ();
7189
7a06d606
RK
7190 emit_block_move (target,
7191 adjust_address (op0, VOIDmode,
7192 bitpos / BITS_PER_UNIT),
a06ef755 7193 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7194 / BITS_PER_UNIT),
8403445a
AM
7195 (modifier == EXPAND_STACK_PARM
7196 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7197
a281e72d
RK
7198 return target;
7199 }
bbf6f052 7200
dc6d66b3
RK
7201 op0 = validize_mem (op0);
7202
7203 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7204 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7205
8403445a
AM
7206 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7207 (modifier == EXPAND_STACK_PARM
7208 ? NULL_RTX : target),
7209 ext_mode, ext_mode,
bbf6f052 7210 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7211
7212 /* If the result is a record type and BITSIZE is narrower than
7213 the mode of OP0, an integral mode, and this is a big endian
7214 machine, we must put the field into the high-order bits. */
7215 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7216 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7217 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7218 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7219 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7220 - bitsize),
7221 op0, 1);
7222
bbf6f052
RK
7223 if (mode == BLKmode)
7224 {
c3d32120 7225 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7226 ((*lang_hooks.types.type_for_mode)
7227 (ext_mode, 0),
c3d32120 7228 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7229
7230 emit_move_insn (new, op0);
7231 op0 = copy_rtx (new);
7232 PUT_MODE (op0, BLKmode);
c3d32120 7233 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7234 }
7235
7236 return op0;
7237 }
7238
05019f83
RK
7239 /* If the result is BLKmode, use that to access the object
7240 now as well. */
7241 if (mode == BLKmode)
7242 mode1 = BLKmode;
7243
bbf6f052
RK
7244 /* Get a reference to just this component. */
7245 if (modifier == EXPAND_CONST_ADDRESS
7246 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7247 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7248 else
f4ef873c 7249 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7250
f47e9b4e
RK
7251 if (op0 == orig_op0)
7252 op0 = copy_rtx (op0);
7253
3bdf5ad1 7254 set_mem_attributes (op0, exp, 0);
dc6d66b3 7255 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7256 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7257
bbf6f052 7258 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7259 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7260 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7261 || modifier == EXPAND_INITIALIZER)
bbf6f052 7262 return op0;
0d15e60c 7263 else if (target == 0)
bbf6f052 7264 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7265
bbf6f052
RK
7266 convert_move (target, op0, unsignedp);
7267 return target;
7268 }
7269
4a8d0c9c
RH
7270 case VTABLE_REF:
7271 {
7272 rtx insn, before = get_last_insn (), vtbl_ref;
7273
7274 /* Evaluate the interior expression. */
7275 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7276 tmode, modifier);
7277
7278 /* Get or create an instruction off which to hang a note. */
7279 if (REG_P (subtarget))
7280 {
7281 target = subtarget;
7282 insn = get_last_insn ();
7283 if (insn == before)
7284 abort ();
7285 if (! INSN_P (insn))
7286 insn = prev_nonnote_insn (insn);
7287 }
7288 else
7289 {
7290 target = gen_reg_rtx (GET_MODE (subtarget));
7291 insn = emit_move_insn (target, subtarget);
7292 }
7293
7294 /* Collect the data for the note. */
7295 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7296 vtbl_ref = plus_constant (vtbl_ref,
7297 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7298 /* Discard the initial CONST that was added. */
7299 vtbl_ref = XEXP (vtbl_ref, 0);
7300
7301 REG_NOTES (insn)
7302 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7303
7304 return target;
7305 }
7306
bbf6f052
RK
7307 /* Intended for a reference to a buffer of a file-object in Pascal.
7308 But it's not certain that a special tree code will really be
7309 necessary for these. INDIRECT_REF might work for them. */
7310 case BUFFER_REF:
7311 abort ();
7312
7308a047 7313 case IN_EXPR:
7308a047 7314 {
d6a5ac33
RK
7315 /* Pascal set IN expression.
7316
7317 Algorithm:
7318 rlo = set_low - (set_low%bits_per_word);
7319 the_word = set [ (index - rlo)/bits_per_word ];
7320 bit_index = index % bits_per_word;
7321 bitmask = 1 << bit_index;
7322 return !!(the_word & bitmask); */
7323
7308a047
RS
7324 tree set = TREE_OPERAND (exp, 0);
7325 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7326 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7327 tree set_type = TREE_TYPE (set);
7308a047
RS
7328 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7329 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7330 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7331 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7332 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7333 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7334 rtx setaddr = XEXP (setval, 0);
7335 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7336 rtx rlow;
7337 rtx diff, quo, rem, addr, bit, result;
7308a047 7338
d6a5ac33
RK
7339 /* If domain is empty, answer is no. Likewise if index is constant
7340 and out of bounds. */
51723711 7341 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7342 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7343 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7344 || (TREE_CODE (index) == INTEGER_CST
7345 && TREE_CODE (set_low_bound) == INTEGER_CST
7346 && tree_int_cst_lt (index, set_low_bound))
7347 || (TREE_CODE (set_high_bound) == INTEGER_CST
7348 && TREE_CODE (index) == INTEGER_CST
7349 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7350 return const0_rtx;
7351
d6a5ac33
RK
7352 if (target == 0)
7353 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7354
7355 /* If we get here, we have to generate the code for both cases
7356 (in range and out of range). */
7357
7358 op0 = gen_label_rtx ();
7359 op1 = gen_label_rtx ();
7360
7361 if (! (GET_CODE (index_val) == CONST_INT
7362 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7363 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7364 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7365
7366 if (! (GET_CODE (index_val) == CONST_INT
7367 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7368 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7369 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7370
7371 /* Calculate the element number of bit zero in the first word
7372 of the set. */
7373 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7374 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7375 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7376 else
17938e57
RK
7377 rlow = expand_binop (index_mode, and_optab, lo_r,
7378 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7379 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7380
d6a5ac33
RK
7381 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7382 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7383
7384 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7385 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7386 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7387 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7388
7308a047 7389 addr = memory_address (byte_mode,
d6a5ac33
RK
7390 expand_binop (index_mode, add_optab, diff,
7391 setaddr, NULL_RTX, iunsignedp,
17938e57 7392 OPTAB_LIB_WIDEN));
d6a5ac33 7393
3a94c984 7394 /* Extract the bit we want to examine. */
7308a047 7395 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7396 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7397 make_tree (TREE_TYPE (index), rem),
7398 NULL_RTX, 1);
7399 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7400 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7401 1, OPTAB_LIB_WIDEN);
17938e57
RK
7402
7403 if (result != target)
7404 convert_move (target, result, 1);
7308a047
RS
7405
7406 /* Output the code to handle the out-of-range case. */
7407 emit_jump (op0);
7408 emit_label (op1);
7409 emit_move_insn (target, const0_rtx);
7410 emit_label (op0);
7411 return target;
7412 }
7413
bbf6f052 7414 case WITH_CLEANUP_EXPR:
6ad7895a 7415 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7416 {
6ad7895a 7417 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7418 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7419 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7420 CLEANUP_EH_ONLY (exp));
e976b8b2 7421
bbf6f052 7422 /* That's it for this cleanup. */
6ad7895a 7423 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7424 }
6ad7895a 7425 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7426
5dab5552
MS
7427 case CLEANUP_POINT_EXPR:
7428 {
e976b8b2
MS
7429 /* Start a new binding layer that will keep track of all cleanup
7430 actions to be performed. */
8e91754e 7431 expand_start_bindings (2);
e976b8b2 7432
d93d4205 7433 target_temp_slot_level = temp_slot_level;
e976b8b2 7434
37a08a29 7435 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7436 /* If we're going to use this value, load it up now. */
7437 if (! ignore)
7438 op0 = force_not_mem (op0);
d93d4205 7439 preserve_temp_slots (op0);
e976b8b2 7440 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7441 }
7442 return op0;
7443
bbf6f052
RK
7444 case CALL_EXPR:
7445 /* Check for a built-in function. */
7446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7447 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7448 == FUNCTION_DECL)
bbf6f052 7449 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7450 {
c70eaeaf
KG
7451 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7452 == BUILT_IN_FRONTEND)
8403445a 7453 return (*lang_hooks.expand_expr) (exp, original_target,
0fab64a3
MM
7454 tmode, modifier,
7455 alt_rtl);
c70eaeaf
KG
7456 else
7457 return expand_builtin (exp, target, subtarget, tmode, ignore);
7458 }
d6a5ac33 7459
8129842c 7460 return expand_call (exp, target, ignore);
bbf6f052
RK
7461
7462 case NON_LVALUE_EXPR:
7463 case NOP_EXPR:
7464 case CONVERT_EXPR:
7465 case REFERENCE_EXPR:
4a53008b 7466 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7467 return const0_rtx;
4a53008b 7468
bbf6f052
RK
7469 if (TREE_CODE (type) == UNION_TYPE)
7470 {
7471 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7472
c3d32120
RK
7473 /* If both input and output are BLKmode, this conversion isn't doing
7474 anything except possibly changing memory attribute. */
7475 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7476 {
7477 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7478 modifier);
7479
7480 result = copy_rtx (result);
7481 set_mem_attributes (result, exp, 0);
7482 return result;
7483 }
14a774a9 7484
bbf6f052 7485 if (target == 0)
cf7cb67e
JH
7486 {
7487 if (TYPE_MODE (type) != BLKmode)
7488 target = gen_reg_rtx (TYPE_MODE (type));
7489 else
7490 target = assign_temp (type, 0, 1, 1);
7491 }
d6a5ac33 7492
bbf6f052
RK
7493 if (GET_CODE (target) == MEM)
7494 /* Store data into beginning of memory target. */
7495 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7496 adjust_address (target, TYPE_MODE (valtype), 0),
7497 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7498
bbf6f052
RK
7499 else if (GET_CODE (target) == REG)
7500 /* Store this field into a union of the proper type. */
14a774a9
RK
7501 store_field (target,
7502 MIN ((int_size_in_bytes (TREE_TYPE
7503 (TREE_OPERAND (exp, 0)))
7504 * BITS_PER_UNIT),
8752c357 7505 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7506 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7507 VOIDmode, 0, type, 0);
bbf6f052
RK
7508 else
7509 abort ();
7510
7511 /* Return the entire union. */
7512 return target;
7513 }
d6a5ac33 7514
7f62854a
RK
7515 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7516 {
7517 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7518 modifier);
7f62854a
RK
7519
7520 /* If the signedness of the conversion differs and OP0 is
7521 a promoted SUBREG, clear that indication since we now
7522 have to do the proper extension. */
7523 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7524 && GET_CODE (op0) == SUBREG)
7525 SUBREG_PROMOTED_VAR_P (op0) = 0;
7526
7527 return op0;
7528 }
7529
fdf473ae 7530 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7531 if (GET_MODE (op0) == mode)
7532 return op0;
12342f90 7533
d6a5ac33
RK
7534 /* If OP0 is a constant, just convert it into the proper mode. */
7535 if (CONSTANT_P (op0))
fdf473ae
RH
7536 {
7537 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7538 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7539
0fb7aeda 7540 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7541 return simplify_gen_subreg (mode, op0, inner_mode,
7542 subreg_lowpart_offset (mode,
7543 inner_mode));
7544 else
7545 return convert_modes (mode, inner_mode, op0,
7546 TREE_UNSIGNED (inner_type));
7547 }
12342f90 7548
26fcb35a 7549 if (modifier == EXPAND_INITIALIZER)
38a448ca 7550 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7551
bbf6f052 7552 if (target == 0)
d6a5ac33
RK
7553 return
7554 convert_to_mode (mode, op0,
7555 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7556 else
d6a5ac33
RK
7557 convert_move (target, op0,
7558 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7559 return target;
7560
ed239f5a 7561 case VIEW_CONVERT_EXPR:
37a08a29 7562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7563
7564 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7565 Otherwise, if neither mode is BLKmode and both are integral and within
7566 a word, we can use gen_lowpart. If neither is true, make sure the
7567 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7568 if (TYPE_MODE (type) == GET_MODE (op0))
7569 ;
7570 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7571 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7572 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7573 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7574 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7575 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7576 else if (GET_CODE (op0) != MEM)
ed239f5a 7577 {
c11c10d8
RK
7578 /* If the operand is not a MEM, force it into memory. Since we
7579 are going to be be changing the mode of the MEM, don't call
7580 force_const_mem for constants because we don't allow pool
7581 constants to change mode. */
ed239f5a 7582 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7583
c11c10d8
RK
7584 if (TREE_ADDRESSABLE (exp))
7585 abort ();
ed239f5a 7586
c11c10d8
RK
7587 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7588 target
7589 = assign_stack_temp_for_type
7590 (TYPE_MODE (inner_type),
7591 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7592
c11c10d8
RK
7593 emit_move_insn (target, op0);
7594 op0 = target;
ed239f5a
RK
7595 }
7596
c11c10d8
RK
7597 /* At this point, OP0 is in the correct mode. If the output type is such
7598 that the operand is known to be aligned, indicate that it is.
7599 Otherwise, we need only be concerned about alignment for non-BLKmode
7600 results. */
ed239f5a
RK
7601 if (GET_CODE (op0) == MEM)
7602 {
7603 op0 = copy_rtx (op0);
7604
ed239f5a
RK
7605 if (TYPE_ALIGN_OK (type))
7606 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7607 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7608 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7609 {
7610 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7611 HOST_WIDE_INT temp_size
7612 = MAX (int_size_in_bytes (inner_type),
7613 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7614 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7615 temp_size, 0, type);
c4e59f51 7616 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7617
c11c10d8
RK
7618 if (TREE_ADDRESSABLE (exp))
7619 abort ();
7620
ed239f5a
RK
7621 if (GET_MODE (op0) == BLKmode)
7622 emit_block_move (new_with_op0_mode, op0,
44bb111a 7623 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7624 (modifier == EXPAND_STACK_PARM
7625 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7626 else
7627 emit_move_insn (new_with_op0_mode, op0);
7628
7629 op0 = new;
7630 }
0fb7aeda 7631
c4e59f51 7632 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7633 }
7634
7635 return op0;
7636
bbf6f052 7637 case PLUS_EXPR:
91ce572a 7638 this_optab = ! unsignedp && flag_trapv
a9785c70 7639 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7640 ? addv_optab : add_optab;
bbf6f052
RK
7641
7642 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7643 something else, make sure we add the register to the constant and
7644 then to the other thing. This case can occur during strength
7645 reduction and doing it this way will produce better code if the
7646 frame pointer or argument pointer is eliminated.
7647
7648 fold-const.c will ensure that the constant is always in the inner
7649 PLUS_EXPR, so the only case we need to do anything about is if
7650 sp, ap, or fp is our second argument, in which case we must swap
7651 the innermost first argument and our second argument. */
7652
7653 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7654 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7655 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7656 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7657 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7658 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7659 {
7660 tree t = TREE_OPERAND (exp, 1);
7661
7662 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7663 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7664 }
7665
88f63c77 7666 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7667 something, we might be forming a constant. So try to use
7668 plus_constant. If it produces a sum and we can't accept it,
7669 use force_operand. This allows P = &ARR[const] to generate
7670 efficient code on machines where a SYMBOL_REF is not a valid
7671 address.
7672
7673 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7674 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7675 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7676 {
8403445a
AM
7677 if (modifier == EXPAND_STACK_PARM)
7678 target = 0;
c980ac49
RS
7679 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7680 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7681 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7682 {
cbbc503e
JL
7683 rtx constant_part;
7684
c980ac49
RS
7685 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7686 EXPAND_SUM);
cbbc503e
JL
7687 /* Use immed_double_const to ensure that the constant is
7688 truncated according to the mode of OP1, then sign extended
7689 to a HOST_WIDE_INT. Using the constant directly can result
7690 in non-canonical RTL in a 64x32 cross compile. */
7691 constant_part
7692 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7693 (HOST_WIDE_INT) 0,
a5efcd63 7694 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7695 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7696 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7697 op1 = force_operand (op1, target);
7698 return op1;
7699 }
bbf6f052 7700
c980ac49
RS
7701 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7702 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7703 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7704 {
cbbc503e
JL
7705 rtx constant_part;
7706
c980ac49 7707 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7708 (modifier == EXPAND_INITIALIZER
7709 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7710 if (! CONSTANT_P (op0))
7711 {
7712 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7713 VOIDmode, modifier);
f0e9957a
RS
7714 /* Return a PLUS if modifier says it's OK. */
7715 if (modifier == EXPAND_SUM
7716 || modifier == EXPAND_INITIALIZER)
7717 return simplify_gen_binary (PLUS, mode, op0, op1);
7718 goto binop2;
c980ac49 7719 }
cbbc503e
JL
7720 /* Use immed_double_const to ensure that the constant is
7721 truncated according to the mode of OP1, then sign extended
7722 to a HOST_WIDE_INT. Using the constant directly can result
7723 in non-canonical RTL in a 64x32 cross compile. */
7724 constant_part
7725 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7726 (HOST_WIDE_INT) 0,
2a94e396 7727 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7728 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7729 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7730 op0 = force_operand (op0, target);
7731 return op0;
7732 }
bbf6f052
RK
7733 }
7734
7735 /* No sense saving up arithmetic to be done
7736 if it's all in the wrong mode to form part of an address.
7737 And force_operand won't know whether to sign-extend or
7738 zero-extend. */
7739 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7740 || mode != ptr_mode)
4ef7870a 7741 {
eb698c58
RS
7742 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7743 subtarget, &op0, &op1, 0);
6e7727eb
EB
7744 if (op0 == const0_rtx)
7745 return op1;
7746 if (op1 == const0_rtx)
7747 return op0;
4ef7870a
EB
7748 goto binop2;
7749 }
bbf6f052 7750
eb698c58
RS
7751 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7752 subtarget, &op0, &op1, modifier);
f0e9957a 7753 return simplify_gen_binary (PLUS, mode, op0, op1);
bbf6f052
RK
7754
7755 case MINUS_EXPR:
ea87523e
RK
7756 /* For initializers, we are allowed to return a MINUS of two
7757 symbolic constants. Here we handle all cases when both operands
7758 are constant. */
bbf6f052
RK
7759 /* Handle difference of two symbolic constants,
7760 for the sake of an initializer. */
7761 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7762 && really_constant_p (TREE_OPERAND (exp, 0))
7763 && really_constant_p (TREE_OPERAND (exp, 1)))
7764 {
eb698c58
RS
7765 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7766 NULL_RTX, &op0, &op1, modifier);
ea87523e 7767
ea87523e
RK
7768 /* If the last operand is a CONST_INT, use plus_constant of
7769 the negated constant. Else make the MINUS. */
7770 if (GET_CODE (op1) == CONST_INT)
7771 return plus_constant (op0, - INTVAL (op1));
7772 else
38a448ca 7773 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 7774 }
ae431183 7775
91ce572a
CC
7776 this_optab = ! unsignedp && flag_trapv
7777 && (GET_MODE_CLASS(mode) == MODE_INT)
7778 ? subv_optab : sub_optab;
1717e19e
UW
7779
7780 /* No sense saving up arithmetic to be done
7781 if it's all in the wrong mode to form part of an address.
7782 And force_operand won't know whether to sign-extend or
7783 zero-extend. */
7784 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7785 || mode != ptr_mode)
7786 goto binop;
7787
eb698c58
RS
7788 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7789 subtarget, &op0, &op1, modifier);
1717e19e
UW
7790
7791 /* Convert A - const to A + (-const). */
7792 if (GET_CODE (op1) == CONST_INT)
7793 {
7794 op1 = negate_rtx (mode, op1);
f0e9957a 7795 return simplify_gen_binary (PLUS, mode, op0, op1);
1717e19e
UW
7796 }
7797
7798 goto binop2;
bbf6f052
RK
7799
7800 case MULT_EXPR:
bbf6f052
RK
7801 /* If first operand is constant, swap them.
7802 Thus the following special case checks need only
7803 check the second operand. */
7804 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7805 {
b3694847 7806 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7807 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7808 TREE_OPERAND (exp, 1) = t1;
7809 }
7810
7811 /* Attempt to return something suitable for generating an
7812 indexed address, for machines that support that. */
7813
88f63c77 7814 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7815 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7816 {
48a5f2fa
DJ
7817 tree exp1 = TREE_OPERAND (exp, 1);
7818
921b3427
RK
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7820 EXPAND_SUM);
bbf6f052 7821
bbf6f052 7822 if (GET_CODE (op0) != REG)
906c4e36 7823 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7824 if (GET_CODE (op0) != REG)
7825 op0 = copy_to_mode_reg (mode, op0);
7826
48a5f2fa
DJ
7827 return gen_rtx_MULT (mode, op0,
7828 gen_int_mode (tree_low_cst (exp1, 0),
7829 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
7830 }
7831
8403445a
AM
7832 if (modifier == EXPAND_STACK_PARM)
7833 target = 0;
7834
bbf6f052
RK
7835 /* Check for multiplying things that have been extended
7836 from a narrower type. If this machine supports multiplying
7837 in that narrower type with a result in the desired type,
7838 do it that way, and avoid the explicit type-conversion. */
7839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7840 && TREE_CODE (type) == INTEGER_TYPE
7841 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7842 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7843 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7844 && int_fits_type_p (TREE_OPERAND (exp, 1),
7845 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7846 /* Don't use a widening multiply if a shift will do. */
7847 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7848 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7849 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7850 ||
7851 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7852 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7853 ==
7854 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7855 /* If both operands are extended, they must either both
7856 be zero-extended or both be sign-extended. */
7857 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7858 ==
7859 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7860 {
7861 enum machine_mode innermode
7862 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7863 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7864 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7865 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7866 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7867 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7868 {
b10af0c8
TG
7869 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7870 {
b10af0c8 7871 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7872 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7873 TREE_OPERAND (exp, 1),
7874 NULL_RTX, &op0, &op1, 0);
b10af0c8 7875 else
eb698c58
RS
7876 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7877 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7878 NULL_RTX, &op0, &op1, 0);
b10af0c8
TG
7879 goto binop2;
7880 }
7881 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7882 && innermode == word_mode)
7883 {
7884 rtx htem;
7885 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7886 NULL_RTX, VOIDmode, 0);
7887 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7888 op1 = convert_modes (innermode, mode,
7889 expand_expr (TREE_OPERAND (exp, 1),
7890 NULL_RTX, VOIDmode, 0),
7891 unsignedp);
b10af0c8
TG
7892 else
7893 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7894 NULL_RTX, VOIDmode, 0);
7895 temp = expand_binop (mode, other_optab, op0, op1, target,
7896 unsignedp, OPTAB_LIB_WIDEN);
7897 htem = expand_mult_highpart_adjust (innermode,
7898 gen_highpart (innermode, temp),
7899 op0, op1,
7900 gen_highpart (innermode, temp),
7901 unsignedp);
7902 emit_move_insn (gen_highpart (innermode, temp), htem);
7903 return temp;
7904 }
bbf6f052
RK
7905 }
7906 }
eb698c58
RS
7907 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7908 subtarget, &op0, &op1, 0);
bbf6f052
RK
7909 return expand_mult (mode, op0, op1, target, unsignedp);
7910
7911 case TRUNC_DIV_EXPR:
7912 case FLOOR_DIV_EXPR:
7913 case CEIL_DIV_EXPR:
7914 case ROUND_DIV_EXPR:
7915 case EXACT_DIV_EXPR:
8403445a
AM
7916 if (modifier == EXPAND_STACK_PARM)
7917 target = 0;
bbf6f052
RK
7918 /* Possible optimization: compute the dividend with EXPAND_SUM
7919 then if the divisor is constant can optimize the case
7920 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7921 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7922 subtarget, &op0, &op1, 0);
bbf6f052
RK
7923 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7924
7925 case RDIV_EXPR:
b7e9703c
JH
7926 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7927 expensive divide. If not, combine will rebuild the original
7928 computation. */
7929 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 7930 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
7931 && !real_onep (TREE_OPERAND (exp, 0)))
7932 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7933 build (RDIV_EXPR, type,
7934 build_real (type, dconst1),
7935 TREE_OPERAND (exp, 1))),
8e37cba8 7936 target, tmode, modifier);
ef89d648 7937 this_optab = sdiv_optab;
bbf6f052
RK
7938 goto binop;
7939
7940 case TRUNC_MOD_EXPR:
7941 case FLOOR_MOD_EXPR:
7942 case CEIL_MOD_EXPR:
7943 case ROUND_MOD_EXPR:
8403445a
AM
7944 if (modifier == EXPAND_STACK_PARM)
7945 target = 0;
eb698c58
RS
7946 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7947 subtarget, &op0, &op1, 0);
bbf6f052
RK
7948 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7949
7950 case FIX_ROUND_EXPR:
7951 case FIX_FLOOR_EXPR:
7952 case FIX_CEIL_EXPR:
7953 abort (); /* Not used for C. */
7954
7955 case FIX_TRUNC_EXPR:
906c4e36 7956 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7957 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7958 target = gen_reg_rtx (mode);
7959 expand_fix (target, op0, unsignedp);
7960 return target;
7961
7962 case FLOAT_EXPR:
906c4e36 7963 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7964 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7965 target = gen_reg_rtx (mode);
7966 /* expand_float can't figure out what to do if FROM has VOIDmode.
7967 So give it the correct mode. With -O, cse will optimize this. */
7968 if (GET_MODE (op0) == VOIDmode)
7969 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7970 op0);
7971 expand_float (target, op0,
7972 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7973 return target;
7974
7975 case NEGATE_EXPR:
5b22bee8 7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7977 if (modifier == EXPAND_STACK_PARM)
7978 target = 0;
91ce572a 7979 temp = expand_unop (mode,
0fb7aeda
KH
7980 ! unsignedp && flag_trapv
7981 && (GET_MODE_CLASS(mode) == MODE_INT)
7982 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7983 if (temp == 0)
7984 abort ();
7985 return temp;
7986
7987 case ABS_EXPR:
7988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7989 if (modifier == EXPAND_STACK_PARM)
7990 target = 0;
bbf6f052 7991
11017cc7 7992 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
7993 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7994 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 7995 abort ();
2d7050fd 7996
bbf6f052
RK
7997 /* Unsigned abs is simply the operand. Testing here means we don't
7998 risk generating incorrect code below. */
7999 if (TREE_UNSIGNED (type))
8000 return op0;
8001
91ce572a 8002 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8003 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8004
8005 case MAX_EXPR:
8006 case MIN_EXPR:
8007 target = original_target;
8403445a
AM
8008 if (target == 0
8009 || modifier == EXPAND_STACK_PARM
fc155707 8010 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8011 || GET_MODE (target) != mode
bbf6f052
RK
8012 || (GET_CODE (target) == REG
8013 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8014 target = gen_reg_rtx (mode);
eb698c58
RS
8015 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8016 target, &op0, &op1, 0);
bbf6f052
RK
8017
8018 /* First try to do it with a special MIN or MAX instruction.
8019 If that does not win, use a conditional jump to select the proper
8020 value. */
8021 this_optab = (TREE_UNSIGNED (type)
8022 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8023 : (code == MIN_EXPR ? smin_optab : smax_optab));
8024
8025 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8026 OPTAB_WIDEN);
8027 if (temp != 0)
8028 return temp;
8029
fa2981d8
JW
8030 /* At this point, a MEM target is no longer useful; we will get better
8031 code without it. */
3a94c984 8032
fa2981d8
JW
8033 if (GET_CODE (target) == MEM)
8034 target = gen_reg_rtx (mode);
8035
e3be1116
RS
8036 /* If op1 was placed in target, swap op0 and op1. */
8037 if (target != op0 && target == op1)
8038 {
8039 rtx tem = op0;
8040 op0 = op1;
8041 op1 = tem;
8042 }
8043
ee456b1c
RK
8044 if (target != op0)
8045 emit_move_insn (target, op0);
d6a5ac33 8046
bbf6f052 8047 op0 = gen_label_rtx ();
d6a5ac33 8048
f81497d9
RS
8049 /* If this mode is an integer too wide to compare properly,
8050 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8051 if (GET_MODE_CLASS (mode) == MODE_INT
8052 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8053 {
f81497d9 8054 if (code == MAX_EXPR)
d6a5ac33
RK
8055 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8056 target, op1, NULL_RTX, op0);
bbf6f052 8057 else
d6a5ac33
RK
8058 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8059 op1, target, NULL_RTX, op0);
bbf6f052 8060 }
f81497d9
RS
8061 else
8062 {
b30f05db
BS
8063 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8064 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8065 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8066 op0);
f81497d9 8067 }
b30f05db 8068 emit_move_insn (target, op1);
bbf6f052
RK
8069 emit_label (op0);
8070 return target;
8071
bbf6f052
RK
8072 case BIT_NOT_EXPR:
8073 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8074 if (modifier == EXPAND_STACK_PARM)
8075 target = 0;
bbf6f052
RK
8076 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8077 if (temp == 0)
8078 abort ();
8079 return temp;
8080
d6a5ac33
RK
8081 /* ??? Can optimize bitwise operations with one arg constant.
8082 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8083 and (a bitwise1 b) bitwise2 b (etc)
8084 but that is probably not worth while. */
8085
8086 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8087 boolean values when we want in all cases to compute both of them. In
8088 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8089 as actual zero-or-1 values and then bitwise anding. In cases where
8090 there cannot be any side effects, better code would be made by
8091 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8092 how to recognize those cases. */
8093
bbf6f052
RK
8094 case TRUTH_AND_EXPR:
8095 case BIT_AND_EXPR:
8096 this_optab = and_optab;
8097 goto binop;
8098
bbf6f052
RK
8099 case TRUTH_OR_EXPR:
8100 case BIT_IOR_EXPR:
8101 this_optab = ior_optab;
8102 goto binop;
8103
874726a8 8104 case TRUTH_XOR_EXPR:
bbf6f052
RK
8105 case BIT_XOR_EXPR:
8106 this_optab = xor_optab;
8107 goto binop;
8108
8109 case LSHIFT_EXPR:
8110 case RSHIFT_EXPR:
8111 case LROTATE_EXPR:
8112 case RROTATE_EXPR:
e5e809f4 8113 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8114 subtarget = 0;
8403445a
AM
8115 if (modifier == EXPAND_STACK_PARM)
8116 target = 0;
bbf6f052
RK
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8118 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8119 unsignedp);
8120
d6a5ac33
RK
8121 /* Could determine the answer when only additive constants differ. Also,
8122 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8123 case LT_EXPR:
8124 case LE_EXPR:
8125 case GT_EXPR:
8126 case GE_EXPR:
8127 case EQ_EXPR:
8128 case NE_EXPR:
1eb8759b
RH
8129 case UNORDERED_EXPR:
8130 case ORDERED_EXPR:
8131 case UNLT_EXPR:
8132 case UNLE_EXPR:
8133 case UNGT_EXPR:
8134 case UNGE_EXPR:
8135 case UNEQ_EXPR:
8403445a
AM
8136 temp = do_store_flag (exp,
8137 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8138 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8139 if (temp != 0)
8140 return temp;
d6a5ac33 8141
0f41302f 8142 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8143 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8144 && original_target
8145 && GET_CODE (original_target) == REG
8146 && (GET_MODE (original_target)
8147 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8148 {
d6a5ac33
RK
8149 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8150 VOIDmode, 0);
8151
c0a3eeac
UW
8152 /* If temp is constant, we can just compute the result. */
8153 if (GET_CODE (temp) == CONST_INT)
8154 {
8155 if (INTVAL (temp) != 0)
8156 emit_move_insn (target, const1_rtx);
8157 else
8158 emit_move_insn (target, const0_rtx);
8159
8160 return target;
8161 }
8162
bbf6f052 8163 if (temp != original_target)
c0a3eeac
UW
8164 {
8165 enum machine_mode mode1 = GET_MODE (temp);
8166 if (mode1 == VOIDmode)
8167 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8168
c0a3eeac
UW
8169 temp = copy_to_mode_reg (mode1, temp);
8170 }
d6a5ac33 8171
bbf6f052 8172 op1 = gen_label_rtx ();
c5d5d461 8173 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8174 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8175 emit_move_insn (temp, const1_rtx);
8176 emit_label (op1);
8177 return temp;
8178 }
d6a5ac33 8179
bbf6f052
RK
8180 /* If no set-flag instruction, must generate a conditional
8181 store into a temporary variable. Drop through
8182 and handle this like && and ||. */
8183
8184 case TRUTH_ANDIF_EXPR:
8185 case TRUTH_ORIF_EXPR:
e44842fe 8186 if (! ignore
8403445a
AM
8187 && (target == 0
8188 || modifier == EXPAND_STACK_PARM
8189 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8190 /* Make sure we don't have a hard reg (such as function's return
8191 value) live across basic blocks, if not optimizing. */
8192 || (!optimize && GET_CODE (target) == REG
8193 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8194 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8195
8196 if (target)
8197 emit_clr_insn (target);
8198
bbf6f052
RK
8199 op1 = gen_label_rtx ();
8200 jumpifnot (exp, op1);
e44842fe
RK
8201
8202 if (target)
8203 emit_0_to_1_insn (target);
8204
bbf6f052 8205 emit_label (op1);
e44842fe 8206 return ignore ? const0_rtx : target;
bbf6f052
RK
8207
8208 case TRUTH_NOT_EXPR:
8403445a
AM
8209 if (modifier == EXPAND_STACK_PARM)
8210 target = 0;
bbf6f052
RK
8211 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8212 /* The parser is careful to generate TRUTH_NOT_EXPR
8213 only with operands that are always zero or one. */
906c4e36 8214 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8215 target, 1, OPTAB_LIB_WIDEN);
8216 if (temp == 0)
8217 abort ();
8218 return temp;
8219
8220 case COMPOUND_EXPR:
8221 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8222 emit_queue ();
0fab64a3
MM
8223 return expand_expr_real (TREE_OPERAND (exp, 1),
8224 (ignore ? const0_rtx : target),
8225 VOIDmode, modifier, alt_rtl);
bbf6f052
RK
8226
8227 case COND_EXPR:
ac01eace
RK
8228 /* If we would have a "singleton" (see below) were it not for a
8229 conversion in each arm, bring that conversion back out. */
8230 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8231 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8232 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8233 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8234 {
d6edb99e
ZW
8235 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8236 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8237
8238 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8239 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8240 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8241 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8242 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8243 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8244 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8245 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8246 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8247 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8248 TREE_OPERAND (exp, 0),
d6edb99e 8249 iftrue, iffalse)),
ac01eace
RK
8250 target, tmode, modifier);
8251 }
8252
bbf6f052
RK
8253 {
8254 /* Note that COND_EXPRs whose type is a structure or union
8255 are required to be constructed to contain assignments of
8256 a temporary variable, so that we can evaluate them here
8257 for side effect only. If type is void, we must do likewise. */
8258
8259 /* If an arm of the branch requires a cleanup,
8260 only that cleanup is performed. */
8261
8262 tree singleton = 0;
8263 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8264
8265 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8266 convert it to our mode, if necessary. */
8267 if (integer_onep (TREE_OPERAND (exp, 1))
8268 && integer_zerop (TREE_OPERAND (exp, 2))
8269 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8270 {
dd27116b
RK
8271 if (ignore)
8272 {
8273 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8274 modifier);
dd27116b
RK
8275 return const0_rtx;
8276 }
8277
8403445a
AM
8278 if (modifier == EXPAND_STACK_PARM)
8279 target = 0;
37a08a29 8280 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8281 if (GET_MODE (op0) == mode)
8282 return op0;
d6a5ac33 8283
bbf6f052
RK
8284 if (target == 0)
8285 target = gen_reg_rtx (mode);
8286 convert_move (target, op0, unsignedp);
8287 return target;
8288 }
8289
ac01eace
RK
8290 /* Check for X ? A + B : A. If we have this, we can copy A to the
8291 output and conditionally add B. Similarly for unary operations.
8292 Don't do this if X has side-effects because those side effects
8293 might affect A or B and the "?" operation is a sequence point in
8294 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8295
8296 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8297 && operand_equal_p (TREE_OPERAND (exp, 2),
8298 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8299 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8300 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8301 && operand_equal_p (TREE_OPERAND (exp, 1),
8302 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8303 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8304 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8305 && operand_equal_p (TREE_OPERAND (exp, 2),
8306 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8307 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8308 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8309 && operand_equal_p (TREE_OPERAND (exp, 1),
8310 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8311 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8312
01c8a7c8
RK
8313 /* If we are not to produce a result, we have no target. Otherwise,
8314 if a target was specified use it; it will not be used as an
3a94c984 8315 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8316 temporary. */
8317
8318 if (ignore)
8319 temp = 0;
8403445a
AM
8320 else if (modifier == EXPAND_STACK_PARM)
8321 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8322 else if (original_target
e5e809f4 8323 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8324 || (singleton && GET_CODE (original_target) == REG
8325 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8326 && original_target == var_rtx (singleton)))
8327 && GET_MODE (original_target) == mode
7c00d1fe
RK
8328#ifdef HAVE_conditional_move
8329 && (! can_conditionally_move_p (mode)
8330 || GET_CODE (original_target) == REG
8331 || TREE_ADDRESSABLE (type))
8332#endif
8125d7e9
BS
8333 && (GET_CODE (original_target) != MEM
8334 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8335 temp = original_target;
8336 else if (TREE_ADDRESSABLE (type))
8337 abort ();
8338 else
8339 temp = assign_temp (type, 0, 0, 1);
8340
ac01eace
RK
8341 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8342 do the test of X as a store-flag operation, do this as
8343 A + ((X != 0) << log C). Similarly for other simple binary
8344 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8345 if (temp && singleton && binary_op
bbf6f052
RK
8346 && (TREE_CODE (binary_op) == PLUS_EXPR
8347 || TREE_CODE (binary_op) == MINUS_EXPR
8348 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8349 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8350 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8351 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8352 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8353 {
8354 rtx result;
61f6c84f 8355 tree cond;
91ce572a 8356 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8357 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8358 ? addv_optab : add_optab)
8359 : TREE_CODE (binary_op) == MINUS_EXPR
8360 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8361 ? subv_optab : sub_optab)
8362 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8363 : xor_optab);
bbf6f052 8364
61f6c84f 8365 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8366 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8367 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8368 else
8369 cond = TREE_OPERAND (exp, 0);
bbf6f052 8370
61f6c84f
JJ
8371 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8372 ? temp : NULL_RTX),
bbf6f052
RK
8373 mode, BRANCH_COST <= 1);
8374
ac01eace
RK
8375 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8376 result = expand_shift (LSHIFT_EXPR, mode, result,
8377 build_int_2 (tree_log2
8378 (TREE_OPERAND
8379 (binary_op, 1)),
8380 0),
e5e809f4 8381 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8382 ? temp : NULL_RTX), 0);
8383
bbf6f052
RK
8384 if (result)
8385 {
906c4e36 8386 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8387 return expand_binop (mode, boptab, op1, result, temp,
8388 unsignedp, OPTAB_LIB_WIDEN);
8389 }
bbf6f052 8390 }
3a94c984 8391
dabf8373 8392 do_pending_stack_adjust ();
bbf6f052
RK
8393 NO_DEFER_POP;
8394 op0 = gen_label_rtx ();
8395
8396 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8397 {
8398 if (temp != 0)
8399 {
8400 /* If the target conflicts with the other operand of the
8401 binary op, we can't use it. Also, we can't use the target
8402 if it is a hard register, because evaluating the condition
8403 might clobber it. */
8404 if ((binary_op
e5e809f4 8405 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8406 || (GET_CODE (temp) == REG
8407 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8408 temp = gen_reg_rtx (mode);
8403445a
AM
8409 store_expr (singleton, temp,
8410 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8411 }
8412 else
906c4e36 8413 expand_expr (singleton,
2937cf87 8414 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8415 if (singleton == TREE_OPERAND (exp, 1))
8416 jumpif (TREE_OPERAND (exp, 0), op0);
8417 else
8418 jumpifnot (TREE_OPERAND (exp, 0), op0);
8419
956d6950 8420 start_cleanup_deferral ();
bbf6f052
RK
8421 if (binary_op && temp == 0)
8422 /* Just touch the other operand. */
8423 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8424 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8425 else if (binary_op)
8426 store_expr (build (TREE_CODE (binary_op), type,
8427 make_tree (type, temp),
8428 TREE_OPERAND (binary_op, 1)),
8403445a 8429 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8430 else
8431 store_expr (build1 (TREE_CODE (unary_op), type,
8432 make_tree (type, temp)),
8403445a 8433 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8434 op1 = op0;
bbf6f052 8435 }
bbf6f052
RK
8436 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8437 comparison operator. If we have one of these cases, set the
8438 output to A, branch on A (cse will merge these two references),
8439 then set the output to FOO. */
8440 else if (temp
8441 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8442 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8443 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8444 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8445 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8446 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8447 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8448 {
3a94c984
KH
8449 if (GET_CODE (temp) == REG
8450 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8451 temp = gen_reg_rtx (mode);
8403445a
AM
8452 store_expr (TREE_OPERAND (exp, 1), temp,
8453 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8454 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8455
956d6950 8456 start_cleanup_deferral ();
c37b68d4
RS
8457 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8458 store_expr (TREE_OPERAND (exp, 2), temp,
8459 modifier == EXPAND_STACK_PARM ? 2 : 0);
8460 else
8461 expand_expr (TREE_OPERAND (exp, 2),
8462 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8463 op1 = op0;
8464 }
8465 else if (temp
8466 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8467 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8468 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8469 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8470 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8471 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8472 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8473 {
3a94c984
KH
8474 if (GET_CODE (temp) == REG
8475 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8476 temp = gen_reg_rtx (mode);
8403445a
AM
8477 store_expr (TREE_OPERAND (exp, 2), temp,
8478 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8479 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8480
956d6950 8481 start_cleanup_deferral ();
c37b68d4
RS
8482 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8483 store_expr (TREE_OPERAND (exp, 1), temp,
8484 modifier == EXPAND_STACK_PARM ? 2 : 0);
8485 else
8486 expand_expr (TREE_OPERAND (exp, 1),
8487 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8488 op1 = op0;
8489 }
8490 else
8491 {
8492 op1 = gen_label_rtx ();
8493 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8494
956d6950 8495 start_cleanup_deferral ();
3a94c984 8496
2ac84cfe 8497 /* One branch of the cond can be void, if it never returns. For
3a94c984 8498 example A ? throw : E */
2ac84cfe 8499 if (temp != 0
3a94c984 8500 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8501 store_expr (TREE_OPERAND (exp, 1), temp,
8502 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8503 else
906c4e36
RK
8504 expand_expr (TREE_OPERAND (exp, 1),
8505 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8506 end_cleanup_deferral ();
bbf6f052
RK
8507 emit_queue ();
8508 emit_jump_insn (gen_jump (op1));
8509 emit_barrier ();
8510 emit_label (op0);
956d6950 8511 start_cleanup_deferral ();
2ac84cfe 8512 if (temp != 0
3a94c984 8513 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8514 store_expr (TREE_OPERAND (exp, 2), temp,
8515 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8516 else
906c4e36
RK
8517 expand_expr (TREE_OPERAND (exp, 2),
8518 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8519 }
8520
956d6950 8521 end_cleanup_deferral ();
bbf6f052
RK
8522
8523 emit_queue ();
8524 emit_label (op1);
8525 OK_DEFER_POP;
5dab5552 8526
bbf6f052
RK
8527 return temp;
8528 }
8529
8530 case TARGET_EXPR:
8531 {
8532 /* Something needs to be initialized, but we didn't know
8533 where that thing was when building the tree. For example,
8534 it could be the return value of a function, or a parameter
8535 to a function which lays down in the stack, or a temporary
8536 variable which must be passed by reference.
8537
8538 We guarantee that the expression will either be constructed
8539 or copied into our original target. */
8540
8541 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8542 tree cleanups = NULL_TREE;
5c062816 8543 tree exp1;
bbf6f052
RK
8544
8545 if (TREE_CODE (slot) != VAR_DECL)
8546 abort ();
8547
9c51f375
RK
8548 if (! ignore)
8549 target = original_target;
8550
6fbfac92
JM
8551 /* Set this here so that if we get a target that refers to a
8552 register variable that's already been used, put_reg_into_stack
3a94c984 8553 knows that it should fix up those uses. */
6fbfac92
JM
8554 TREE_USED (slot) = 1;
8555
bbf6f052
RK
8556 if (target == 0)
8557 {
19e7881c 8558 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8559 {
8560 target = DECL_RTL (slot);
5c062816 8561 /* If we have already expanded the slot, so don't do
ac993f4f 8562 it again. (mrs) */
5c062816
MS
8563 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8564 return target;
ac993f4f 8565 }
bbf6f052
RK
8566 else
8567 {
e9a25f70 8568 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8569 /* All temp slots at this level must not conflict. */
8570 preserve_temp_slots (target);
19e7881c 8571 SET_DECL_RTL (slot, target);
e9a25f70 8572 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8573 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 8574
e287fd6e
RK
8575 /* Since SLOT is not known to the called function
8576 to belong to its stack frame, we must build an explicit
8577 cleanup. This case occurs when we must build up a reference
8578 to pass the reference as an argument. In this case,
8579 it is very likely that such a reference need not be
8580 built here. */
8581
8582 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
8583 TREE_OPERAND (exp, 2)
8584 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 8585 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8586 }
bbf6f052
RK
8587 }
8588 else
8589 {
8590 /* This case does occur, when expanding a parameter which
8591 needs to be constructed on the stack. The target
8592 is the actual stack address that we want to initialize.
8593 The function we call will perform the cleanup in this case. */
8594
8c042b47
RS
8595 /* If we have already assigned it space, use that space,
8596 not target that we were passed in, as our target
8597 parameter is only a hint. */
19e7881c 8598 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8599 {
8600 target = DECL_RTL (slot);
8601 /* If we have already expanded the slot, so don't do
8c042b47 8602 it again. (mrs) */
3a94c984
KH
8603 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8604 return target;
8c042b47 8605 }
21002281
JW
8606 else
8607 {
19e7881c 8608 SET_DECL_RTL (slot, target);
21002281
JW
8609 /* If we must have an addressable slot, then make sure that
8610 the RTL that we just stored in slot is OK. */
8611 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8612 put_var_into_stack (slot, /*rescan=*/true);
21002281 8613 }
bbf6f052
RK
8614 }
8615
4847c938 8616 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8617 /* Mark it as expanded. */
8618 TREE_OPERAND (exp, 1) = NULL_TREE;
8619
8403445a 8620 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 8621
659e5a7a 8622 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 8623
41531e5b 8624 return target;
bbf6f052
RK
8625 }
8626
8627 case INIT_EXPR:
8628 {
8629 tree lhs = TREE_OPERAND (exp, 0);
8630 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8631
b90f141a 8632 temp = expand_assignment (lhs, rhs, ! ignore);
bbf6f052
RK
8633 return temp;
8634 }
8635
8636 case MODIFY_EXPR:
8637 {
8638 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8639 That's so we don't compute a pointer and save it over a
8640 call. If lhs is simple, compute it first so we can give it
8641 as a target if the rhs is just a call. This avoids an
8642 extra temp and copy and that prevents a partial-subsumption
8643 which makes bad code. Actually we could treat
8644 component_ref's of vars like vars. */
bbf6f052
RK
8645
8646 tree lhs = TREE_OPERAND (exp, 0);
8647 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8648
8649 temp = 0;
8650
bbf6f052
RK
8651 /* Check for |= or &= of a bitfield of size one into another bitfield
8652 of size 1. In this case, (unless we need the result of the
8653 assignment) we can do this more efficiently with a
8654 test followed by an assignment, if necessary.
8655
8656 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8657 things change so we do, this code should be enhanced to
8658 support it. */
8659 if (ignore
8660 && TREE_CODE (lhs) == COMPONENT_REF
8661 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8662 || TREE_CODE (rhs) == BIT_AND_EXPR)
8663 && TREE_OPERAND (rhs, 0) == lhs
8664 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8665 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8666 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8667 {
8668 rtx label = gen_label_rtx ();
8669
8670 do_jump (TREE_OPERAND (rhs, 1),
8671 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8672 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8673 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8674 (TREE_CODE (rhs) == BIT_IOR_EXPR
8675 ? integer_one_node
8676 : integer_zero_node)),
b90f141a 8677 0);
e7c33f54 8678 do_pending_stack_adjust ();
bbf6f052
RK
8679 emit_label (label);
8680 return const0_rtx;
8681 }
8682
b90f141a 8683 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 8684
bbf6f052
RK
8685 return temp;
8686 }
8687
6e7f84a7
APB
8688 case RETURN_EXPR:
8689 if (!TREE_OPERAND (exp, 0))
8690 expand_null_return ();
8691 else
8692 expand_return (TREE_OPERAND (exp, 0));
8693 return const0_rtx;
8694
bbf6f052
RK
8695 case PREINCREMENT_EXPR:
8696 case PREDECREMENT_EXPR:
7b8b9722 8697 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8698
8699 case POSTINCREMENT_EXPR:
8700 case POSTDECREMENT_EXPR:
8701 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8702 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8703
8704 case ADDR_EXPR:
8403445a
AM
8705 if (modifier == EXPAND_STACK_PARM)
8706 target = 0;
bbf6f052
RK
8707 /* Are we taking the address of a nested function? */
8708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8709 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8710 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8711 && ! TREE_STATIC (exp))
bbf6f052
RK
8712 {
8713 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8714 op0 = force_operand (op0, target);
8715 }
682ba3a6
RK
8716 /* If we are taking the address of something erroneous, just
8717 return a zero. */
8718 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8719 return const0_rtx;
d6b6783b
RK
8720 /* If we are taking the address of a constant and are at the
8721 top level, we have to use output_constant_def since we can't
8722 call force_const_mem at top level. */
8723 else if (cfun == 0
8724 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8725 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8726 == 'c')))
8727 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8728 else
8729 {
e287fd6e
RK
8730 /* We make sure to pass const0_rtx down if we came in with
8731 ignore set, to avoid doing the cleanups twice for something. */
8732 op0 = expand_expr (TREE_OPERAND (exp, 0),
8733 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8734 (modifier == EXPAND_INITIALIZER
8735 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8736
119af78a
RK
8737 /* If we are going to ignore the result, OP0 will have been set
8738 to const0_rtx, so just return it. Don't get confused and
8739 think we are taking the address of the constant. */
8740 if (ignore)
8741 return op0;
8742
73b7f58c
BS
8743 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8744 clever and returns a REG when given a MEM. */
8745 op0 = protect_from_queue (op0, 1);
3539e816 8746
c5c76735
JL
8747 /* We would like the object in memory. If it is a constant, we can
8748 have it be statically allocated into memory. For a non-constant,
8749 we need to allocate some memory and store the value into it. */
896102d0
RK
8750
8751 if (CONSTANT_P (op0))
8752 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8753 op0);
682ba3a6 8754 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 8755 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 8756 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 8757 {
6c7d86ec
RK
8758 /* If the operand is a SAVE_EXPR, we can deal with this by
8759 forcing the SAVE_EXPR into memory. */
8760 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8761 {
f29a2bd1
MM
8762 put_var_into_stack (TREE_OPERAND (exp, 0),
8763 /*rescan=*/true);
6c7d86ec
RK
8764 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8765 }
df6018fd 8766 else
6c7d86ec
RK
8767 {
8768 /* If this object is in a register, it can't be BLKmode. */
8769 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8770 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
8771
8772 if (GET_CODE (op0) == PARALLEL)
8773 /* Handle calls that pass values in multiple
8774 non-contiguous locations. The Irix 6 ABI has examples
8775 of this. */
6e985040 8776 emit_group_store (memloc, op0, inner_type,
6c7d86ec
RK
8777 int_size_in_bytes (inner_type));
8778 else
8779 emit_move_insn (memloc, op0);
0fb7aeda 8780
6c7d86ec
RK
8781 op0 = memloc;
8782 }
896102d0
RK
8783 }
8784
bbf6f052
RK
8785 if (GET_CODE (op0) != MEM)
8786 abort ();
3a94c984 8787
34e81b5a 8788 mark_temp_addr_taken (op0);
bbf6f052 8789 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8790 {
34e81b5a 8791 op0 = XEXP (op0, 0);
5ae6cd0d 8792 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 8793 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 8794 return op0;
88f63c77 8795 }
987c71d9 8796
c952ff4b
RK
8797 /* If OP0 is not aligned as least as much as the type requires, we
8798 need to make a temporary, copy OP0 to it, and take the address of
8799 the temporary. We want to use the alignment of the type, not of
8800 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8801 the test for BLKmode means that can't happen. The test for
8802 BLKmode is because we never make mis-aligned MEMs with
8803 non-BLKmode.
8804
8805 We don't need to do this at all if the machine doesn't have
8806 strict alignment. */
8807 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8808 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8809 > MEM_ALIGN (op0))
8810 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8811 {
8812 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 8813 rtx new;
a06ef755 8814
c3d32120
RK
8815 if (TYPE_ALIGN_OK (inner_type))
8816 abort ();
8817
bdaa131b
JM
8818 if (TREE_ADDRESSABLE (inner_type))
8819 {
8820 /* We can't make a bitwise copy of this object, so fail. */
8821 error ("cannot take the address of an unaligned member");
8822 return const0_rtx;
8823 }
8824
8825 new = assign_stack_temp_for_type
8826 (TYPE_MODE (inner_type),
8827 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8828 : int_size_in_bytes (inner_type),
8829 1, build_qualified_type (inner_type,
8830 (TYPE_QUALS (inner_type)
8831 | TYPE_QUAL_CONST)));
8832
44bb111a 8833 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
8834 (modifier == EXPAND_STACK_PARM
8835 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 8836
a06ef755
RK
8837 op0 = new;
8838 }
8839
bbf6f052
RK
8840 op0 = force_operand (XEXP (op0, 0), target);
8841 }
987c71d9 8842
05c8e58b
HPN
8843 if (flag_force_addr
8844 && GET_CODE (op0) != REG
8845 && modifier != EXPAND_CONST_ADDRESS
8846 && modifier != EXPAND_INITIALIZER
8847 && modifier != EXPAND_SUM)
987c71d9
RK
8848 op0 = force_reg (Pmode, op0);
8849
dc6d66b3
RK
8850 if (GET_CODE (op0) == REG
8851 && ! REG_USERVAR_P (op0))
bdb429a5 8852 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8853
5ae6cd0d 8854 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 8855 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8856
bbf6f052
RK
8857 return op0;
8858
8859 case ENTRY_VALUE_EXPR:
8860 abort ();
8861
7308a047
RS
8862 /* COMPLEX type for Extended Pascal & Fortran */
8863 case COMPLEX_EXPR:
8864 {
8865 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8866 rtx insns;
7308a047
RS
8867
8868 /* Get the rtx code of the operands. */
8869 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8870 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8871
8872 if (! target)
8873 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8874
6551fa4d 8875 start_sequence ();
7308a047
RS
8876
8877 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8878 emit_move_insn (gen_realpart (mode, target), op0);
8879 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8880
6551fa4d
JW
8881 insns = get_insns ();
8882 end_sequence ();
8883
7308a047 8884 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8885 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8886 each with a separate pseudo as destination.
8887 It's not correct for flow to treat them as a unit. */
6d6e61ce 8888 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8889 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8890 else
2f937369 8891 emit_insn (insns);
7308a047
RS
8892
8893 return target;
8894 }
8895
8896 case REALPART_EXPR:
2d7050fd
RS
8897 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8898 return gen_realpart (mode, op0);
3a94c984 8899
7308a047 8900 case IMAGPART_EXPR:
2d7050fd
RS
8901 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8902 return gen_imagpart (mode, op0);
7308a047
RS
8903
8904 case CONJ_EXPR:
8905 {
62acb978 8906 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8907 rtx imag_t;
6551fa4d 8908 rtx insns;
3a94c984
KH
8909
8910 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8911
8912 if (! target)
d6a5ac33 8913 target = gen_reg_rtx (mode);
3a94c984 8914
6551fa4d 8915 start_sequence ();
7308a047
RS
8916
8917 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8918 emit_move_insn (gen_realpart (partmode, target),
8919 gen_realpart (partmode, op0));
7308a047 8920
62acb978 8921 imag_t = gen_imagpart (partmode, target);
91ce572a 8922 temp = expand_unop (partmode,
0fb7aeda
KH
8923 ! unsignedp && flag_trapv
8924 && (GET_MODE_CLASS(partmode) == MODE_INT)
8925 ? negv_optab : neg_optab,
3a94c984 8926 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8927 if (temp != imag_t)
8928 emit_move_insn (imag_t, temp);
8929
6551fa4d
JW
8930 insns = get_insns ();
8931 end_sequence ();
8932
3a94c984 8933 /* Conjugate should appear as a single unit
d6a5ac33 8934 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8935 each with a separate pseudo as destination.
8936 It's not correct for flow to treat them as a unit. */
6d6e61ce 8937 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8938 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8939 else
2f937369 8940 emit_insn (insns);
7308a047
RS
8941
8942 return target;
8943 }
8944
e976b8b2
MS
8945 case TRY_CATCH_EXPR:
8946 {
8947 tree handler = TREE_OPERAND (exp, 1);
8948
8949 expand_eh_region_start ();
8950
8951 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8952
52a11cbf 8953 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8954
8955 return op0;
8956 }
8957
b335b813
PB
8958 case TRY_FINALLY_EXPR:
8959 {
8960 tree try_block = TREE_OPERAND (exp, 0);
8961 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 8962
8ad8135a 8963 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
8964 {
8965 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8966 is not sufficient, so we cannot expand the block twice.
8967 So we play games with GOTO_SUBROUTINE_EXPR to let us
8968 expand the thing only once. */
8ad8135a
RH
8969 /* When not optimizing, we go ahead with this form since
8970 (1) user breakpoints operate more predictably without
8971 code duplication, and
8972 (2) we're not running any of the global optimizers
8973 that would explode in time/space with the highly
8974 connected CFG created by the indirect branching. */
8943a0b4
RH
8975
8976 rtx finally_label = gen_label_rtx ();
8977 rtx done_label = gen_label_rtx ();
8978 rtx return_link = gen_reg_rtx (Pmode);
8979 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8980 (tree) finally_label, (tree) return_link);
8981 TREE_SIDE_EFFECTS (cleanup) = 1;
8982
8983 /* Start a new binding layer that will keep track of all cleanup
8984 actions to be performed. */
8985 expand_start_bindings (2);
8986 target_temp_slot_level = temp_slot_level;
8987
8988 expand_decl_cleanup (NULL_TREE, cleanup);
8989 op0 = expand_expr (try_block, target, tmode, modifier);
8990
8991 preserve_temp_slots (op0);
8992 expand_end_bindings (NULL_TREE, 0, 0);
8993 emit_jump (done_label);
8994 emit_label (finally_label);
8995 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8996 emit_indirect_jump (return_link);
8997 emit_label (done_label);
8998 }
8999 else
9000 {
9001 expand_start_bindings (2);
9002 target_temp_slot_level = temp_slot_level;
b335b813 9003
8943a0b4
RH
9004 expand_decl_cleanup (NULL_TREE, finally_block);
9005 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9006
8943a0b4
RH
9007 preserve_temp_slots (op0);
9008 expand_end_bindings (NULL_TREE, 0, 0);
9009 }
b335b813 9010
b335b813
PB
9011 return op0;
9012 }
9013
3a94c984 9014 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9015 {
9016 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9017 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9018 rtx return_address = gen_label_rtx ();
3a94c984
KH
9019 emit_move_insn (return_link,
9020 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9021 emit_jump (subr);
9022 emit_label (return_address);
9023 return const0_rtx;
9024 }
9025
d3707adb
RH
9026 case VA_ARG_EXPR:
9027 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9028
52a11cbf 9029 case EXC_PTR_EXPR:
86c99549 9030 return get_exception_pointer (cfun);
52a11cbf 9031
67231816
RH
9032 case FDESC_EXPR:
9033 /* Function descriptors are not valid except for as
9034 initialization constants, and should not be expanded. */
9035 abort ();
9036
bbf6f052 9037 default:
0fab64a3
MM
9038 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9039 alt_rtl);
bbf6f052
RK
9040 }
9041
9042 /* Here to do an ordinary binary operator, generating an instruction
9043 from the optab already placed in `this_optab'. */
9044 binop:
eb698c58
RS
9045 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9046 subtarget, &op0, &op1, 0);
bbf6f052 9047 binop2:
8403445a
AM
9048 if (modifier == EXPAND_STACK_PARM)
9049 target = 0;
bbf6f052
RK
9050 temp = expand_binop (mode, this_optab, op0, op1, target,
9051 unsignedp, OPTAB_LIB_WIDEN);
9052 if (temp == 0)
9053 abort ();
9054 return temp;
9055}
b93a436e 9056\f
1ce7f3c2
RK
9057/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9058 when applied to the address of EXP produces an address known to be
9059 aligned more than BIGGEST_ALIGNMENT. */
9060
9061static int
502b8322 9062is_aligning_offset (tree offset, tree exp)
1ce7f3c2
RK
9063{
9064 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9065 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9066 || TREE_CODE (offset) == NOP_EXPR
9067 || TREE_CODE (offset) == CONVERT_EXPR
9068 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9069 offset = TREE_OPERAND (offset, 0);
9070
9071 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9072 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9073 if (TREE_CODE (offset) != BIT_AND_EXPR
9074 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9075 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9076 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9077 return 0;
9078
9079 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9080 It must be NEGATE_EXPR. Then strip any more conversions. */
9081 offset = TREE_OPERAND (offset, 0);
9082 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9083 || TREE_CODE (offset) == NOP_EXPR
9084 || TREE_CODE (offset) == CONVERT_EXPR)
9085 offset = TREE_OPERAND (offset, 0);
9086
9087 if (TREE_CODE (offset) != NEGATE_EXPR)
9088 return 0;
9089
9090 offset = TREE_OPERAND (offset, 0);
9091 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9092 || TREE_CODE (offset) == NOP_EXPR
9093 || TREE_CODE (offset) == CONVERT_EXPR)
9094 offset = TREE_OPERAND (offset, 0);
9095
9096 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9097 whose type is the same as EXP. */
9098 return (TREE_CODE (offset) == ADDR_EXPR
9099 && (TREE_OPERAND (offset, 0) == exp
9100 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9101 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9102 == TREE_TYPE (exp)))));
9103}
9104\f
e0a2f705 9105/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9106 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9107 in bytes within the string that ARG is accessing. The type of the
9108 offset will be `sizetype'. */
b93a436e 9109
28f4ec01 9110tree
502b8322 9111string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9112{
9113 STRIP_NOPS (arg);
9114
9115 if (TREE_CODE (arg) == ADDR_EXPR
9116 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9117 {
fed3cef0 9118 *ptr_offset = size_zero_node;
b93a436e
JL
9119 return TREE_OPERAND (arg, 0);
9120 }
9121 else if (TREE_CODE (arg) == PLUS_EXPR)
9122 {
9123 tree arg0 = TREE_OPERAND (arg, 0);
9124 tree arg1 = TREE_OPERAND (arg, 1);
9125
9126 STRIP_NOPS (arg0);
9127 STRIP_NOPS (arg1);
9128
9129 if (TREE_CODE (arg0) == ADDR_EXPR
9130 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9131 {
fed3cef0 9132 *ptr_offset = convert (sizetype, arg1);
b93a436e 9133 return TREE_OPERAND (arg0, 0);
bbf6f052 9134 }
b93a436e
JL
9135 else if (TREE_CODE (arg1) == ADDR_EXPR
9136 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9137 {
fed3cef0 9138 *ptr_offset = convert (sizetype, arg0);
b93a436e 9139 return TREE_OPERAND (arg1, 0);
bbf6f052 9140 }
b93a436e 9141 }
ca695ac9 9142
b93a436e
JL
9143 return 0;
9144}
ca695ac9 9145\f
b93a436e
JL
9146/* Expand code for a post- or pre- increment or decrement
9147 and return the RTX for the result.
9148 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9149
b93a436e 9150static rtx
502b8322 9151expand_increment (tree exp, int post, int ignore)
ca695ac9 9152{
b3694847
SS
9153 rtx op0, op1;
9154 rtx temp, value;
9155 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9156 optab this_optab = add_optab;
9157 int icode;
9158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9159 int op0_is_copy = 0;
9160 int single_insn = 0;
9161 /* 1 means we can't store into OP0 directly,
9162 because it is a subreg narrower than a word,
9163 and we don't dare clobber the rest of the word. */
9164 int bad_subreg = 0;
1499e0a8 9165
b93a436e
JL
9166 /* Stabilize any component ref that might need to be
9167 evaluated more than once below. */
9168 if (!post
9169 || TREE_CODE (incremented) == BIT_FIELD_REF
9170 || (TREE_CODE (incremented) == COMPONENT_REF
9171 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9172 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9173 incremented = stabilize_reference (incremented);
9174 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9175 ones into save exprs so that they don't accidentally get evaluated
9176 more than once by the code below. */
9177 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9178 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9179 incremented = save_expr (incremented);
e9a25f70 9180
b93a436e
JL
9181 /* Compute the operands as RTX.
9182 Note whether OP0 is the actual lvalue or a copy of it:
9183 I believe it is a copy iff it is a register or subreg
6d2f8887 9184 and insns were generated in computing it. */
e9a25f70 9185
b93a436e 9186 temp = get_last_insn ();
37a08a29 9187 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9188
b93a436e
JL
9189 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9190 in place but instead must do sign- or zero-extension during assignment,
9191 so we copy it into a new register and let the code below use it as
9192 a copy.
e9a25f70 9193
b93a436e
JL
9194 Note that we can safely modify this SUBREG since it is know not to be
9195 shared (it was made by the expand_expr call above). */
9196
9197 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9198 {
9199 if (post)
9200 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9201 else
9202 bad_subreg = 1;
9203 }
9204 else if (GET_CODE (op0) == SUBREG
9205 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9206 {
9207 /* We cannot increment this SUBREG in place. If we are
9208 post-incrementing, get a copy of the old value. Otherwise,
9209 just mark that we cannot increment in place. */
9210 if (post)
9211 op0 = copy_to_reg (op0);
9212 else
9213 bad_subreg = 1;
e9a25f70
JL
9214 }
9215
b93a436e
JL
9216 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9217 && temp != get_last_insn ());
37a08a29 9218 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9219
b93a436e
JL
9220 /* Decide whether incrementing or decrementing. */
9221 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9222 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9223 this_optab = sub_optab;
9224
9225 /* Convert decrement by a constant into a negative increment. */
9226 if (this_optab == sub_optab
9227 && GET_CODE (op1) == CONST_INT)
ca695ac9 9228 {
3a94c984 9229 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9230 this_optab = add_optab;
ca695ac9 9231 }
1499e0a8 9232
91ce572a 9233 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9234 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9235
b93a436e
JL
9236 /* For a preincrement, see if we can do this with a single instruction. */
9237 if (!post)
9238 {
9239 icode = (int) this_optab->handlers[(int) mode].insn_code;
9240 if (icode != (int) CODE_FOR_nothing
9241 /* Make sure that OP0 is valid for operands 0 and 1
9242 of the insn we want to queue. */
a995e389
RH
9243 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9244 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9245 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9246 single_insn = 1;
9247 }
bbf6f052 9248
b93a436e
JL
9249 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9250 then we cannot just increment OP0. We must therefore contrive to
9251 increment the original value. Then, for postincrement, we can return
9252 OP0 since it is a copy of the old value. For preincrement, expand here
9253 unless we can do it with a single insn.
bbf6f052 9254
b93a436e
JL
9255 Likewise if storing directly into OP0 would clobber high bits
9256 we need to preserve (bad_subreg). */
9257 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9258 {
b93a436e
JL
9259 /* This is the easiest way to increment the value wherever it is.
9260 Problems with multiple evaluation of INCREMENTED are prevented
9261 because either (1) it is a component_ref or preincrement,
9262 in which case it was stabilized above, or (2) it is an array_ref
9263 with constant index in an array in a register, which is
9264 safe to reevaluate. */
9265 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9266 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9267 ? MINUS_EXPR : PLUS_EXPR),
9268 TREE_TYPE (exp),
9269 incremented,
9270 TREE_OPERAND (exp, 1));
a358cee0 9271
b93a436e
JL
9272 while (TREE_CODE (incremented) == NOP_EXPR
9273 || TREE_CODE (incremented) == CONVERT_EXPR)
9274 {
9275 newexp = convert (TREE_TYPE (incremented), newexp);
9276 incremented = TREE_OPERAND (incremented, 0);
9277 }
bbf6f052 9278
b90f141a 9279 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b93a436e
JL
9280 return post ? op0 : temp;
9281 }
bbf6f052 9282
b93a436e
JL
9283 if (post)
9284 {
9285 /* We have a true reference to the value in OP0.
9286 If there is an insn to add or subtract in this mode, queue it.
d91edf86 9287 Queuing the increment insn avoids the register shuffling
b93a436e
JL
9288 that often results if we must increment now and first save
9289 the old value for subsequent use. */
bbf6f052 9290
b93a436e
JL
9291#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9292 op0 = stabilize (op0);
9293#endif
41dfd40c 9294
b93a436e
JL
9295 icode = (int) this_optab->handlers[(int) mode].insn_code;
9296 if (icode != (int) CODE_FOR_nothing
9297 /* Make sure that OP0 is valid for operands 0 and 1
9298 of the insn we want to queue. */
a995e389
RH
9299 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9300 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9301 {
a995e389 9302 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9303 op1 = force_reg (mode, op1);
bbf6f052 9304
b93a436e
JL
9305 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9306 }
9307 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9308 {
9309 rtx addr = (general_operand (XEXP (op0, 0), mode)
9310 ? force_reg (Pmode, XEXP (op0, 0))
9311 : copy_to_reg (XEXP (op0, 0)));
9312 rtx temp, result;
ca695ac9 9313
792760b9 9314 op0 = replace_equiv_address (op0, addr);
b93a436e 9315 temp = force_reg (GET_MODE (op0), op0);
a995e389 9316 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9317 op1 = force_reg (mode, op1);
ca695ac9 9318
b93a436e
JL
9319 /* The increment queue is LIFO, thus we have to `queue'
9320 the instructions in reverse order. */
9321 enqueue_insn (op0, gen_move_insn (op0, temp));
9322 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9323 return result;
bbf6f052
RK
9324 }
9325 }
ca695ac9 9326
b93a436e
JL
9327 /* Preincrement, or we can't increment with one simple insn. */
9328 if (post)
9329 /* Save a copy of the value before inc or dec, to return it later. */
9330 temp = value = copy_to_reg (op0);
9331 else
9332 /* Arrange to return the incremented value. */
9333 /* Copy the rtx because expand_binop will protect from the queue,
9334 and the results of that would be invalid for us to return
9335 if our caller does emit_queue before using our result. */
9336 temp = copy_rtx (value = op0);
bbf6f052 9337
b93a436e 9338 /* Increment however we can. */
37a08a29 9339 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9340 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9341
b93a436e
JL
9342 /* Make sure the value is stored into OP0. */
9343 if (op1 != op0)
9344 emit_move_insn (op0, op1);
5718612f 9345
b93a436e
JL
9346 return temp;
9347}
9348\f
b93a436e
JL
9349/* Generate code to calculate EXP using a store-flag instruction
9350 and return an rtx for the result. EXP is either a comparison
9351 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9352
b93a436e 9353 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9354
cc2902df 9355 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9356 cheap.
ca695ac9 9357
b93a436e
JL
9358 Return zero if there is no suitable set-flag instruction
9359 available on this machine.
ca695ac9 9360
b93a436e
JL
9361 Once expand_expr has been called on the arguments of the comparison,
9362 we are committed to doing the store flag, since it is not safe to
9363 re-evaluate the expression. We emit the store-flag insn by calling
9364 emit_store_flag, but only expand the arguments if we have a reason
9365 to believe that emit_store_flag will be successful. If we think that
9366 it will, but it isn't, we have to simulate the store-flag with a
9367 set/jump/set sequence. */
ca695ac9 9368
b93a436e 9369static rtx
502b8322 9370do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9371{
9372 enum rtx_code code;
9373 tree arg0, arg1, type;
9374 tree tem;
9375 enum machine_mode operand_mode;
9376 int invert = 0;
9377 int unsignedp;
9378 rtx op0, op1;
9379 enum insn_code icode;
9380 rtx subtarget = target;
381127e8 9381 rtx result, label;
ca695ac9 9382
b93a436e
JL
9383 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9384 result at the end. We can't simply invert the test since it would
9385 have already been inverted if it were valid. This case occurs for
9386 some floating-point comparisons. */
ca695ac9 9387
b93a436e
JL
9388 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9389 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9390
b93a436e
JL
9391 arg0 = TREE_OPERAND (exp, 0);
9392 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9393
9394 /* Don't crash if the comparison was erroneous. */
9395 if (arg0 == error_mark_node || arg1 == error_mark_node)
9396 return const0_rtx;
9397
b93a436e
JL
9398 type = TREE_TYPE (arg0);
9399 operand_mode = TYPE_MODE (type);
9400 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9401
b93a436e
JL
9402 /* We won't bother with BLKmode store-flag operations because it would mean
9403 passing a lot of information to emit_store_flag. */
9404 if (operand_mode == BLKmode)
9405 return 0;
ca695ac9 9406
b93a436e
JL
9407 /* We won't bother with store-flag operations involving function pointers
9408 when function pointers must be canonicalized before comparisons. */
9409#ifdef HAVE_canonicalize_funcptr_for_compare
9410 if (HAVE_canonicalize_funcptr_for_compare
9411 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9412 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9413 == FUNCTION_TYPE))
9414 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9415 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9416 == FUNCTION_TYPE))))
9417 return 0;
ca695ac9
JB
9418#endif
9419
b93a436e
JL
9420 STRIP_NOPS (arg0);
9421 STRIP_NOPS (arg1);
ca695ac9 9422
b93a436e
JL
9423 /* Get the rtx comparison code to use. We know that EXP is a comparison
9424 operation of some type. Some comparisons against 1 and -1 can be
9425 converted to comparisons with zero. Do so here so that the tests
9426 below will be aware that we have a comparison with zero. These
9427 tests will not catch constants in the first operand, but constants
9428 are rarely passed as the first operand. */
ca695ac9 9429
b93a436e
JL
9430 switch (TREE_CODE (exp))
9431 {
9432 case EQ_EXPR:
9433 code = EQ;
bbf6f052 9434 break;
b93a436e
JL
9435 case NE_EXPR:
9436 code = NE;
bbf6f052 9437 break;
b93a436e
JL
9438 case LT_EXPR:
9439 if (integer_onep (arg1))
9440 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9441 else
9442 code = unsignedp ? LTU : LT;
ca695ac9 9443 break;
b93a436e
JL
9444 case LE_EXPR:
9445 if (! unsignedp && integer_all_onesp (arg1))
9446 arg1 = integer_zero_node, code = LT;
9447 else
9448 code = unsignedp ? LEU : LE;
ca695ac9 9449 break;
b93a436e
JL
9450 case GT_EXPR:
9451 if (! unsignedp && integer_all_onesp (arg1))
9452 arg1 = integer_zero_node, code = GE;
9453 else
9454 code = unsignedp ? GTU : GT;
9455 break;
9456 case GE_EXPR:
9457 if (integer_onep (arg1))
9458 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9459 else
9460 code = unsignedp ? GEU : GE;
ca695ac9 9461 break;
1eb8759b
RH
9462
9463 case UNORDERED_EXPR:
9464 code = UNORDERED;
9465 break;
9466 case ORDERED_EXPR:
9467 code = ORDERED;
9468 break;
9469 case UNLT_EXPR:
9470 code = UNLT;
9471 break;
9472 case UNLE_EXPR:
9473 code = UNLE;
9474 break;
9475 case UNGT_EXPR:
9476 code = UNGT;
9477 break;
9478 case UNGE_EXPR:
9479 code = UNGE;
9480 break;
9481 case UNEQ_EXPR:
9482 code = UNEQ;
9483 break;
1eb8759b 9484
ca695ac9 9485 default:
b93a436e 9486 abort ();
bbf6f052 9487 }
bbf6f052 9488
b93a436e
JL
9489 /* Put a constant second. */
9490 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9491 {
9492 tem = arg0; arg0 = arg1; arg1 = tem;
9493 code = swap_condition (code);
ca695ac9 9494 }
bbf6f052 9495
b93a436e
JL
9496 /* If this is an equality or inequality test of a single bit, we can
9497 do this by shifting the bit being tested to the low-order bit and
9498 masking the result with the constant 1. If the condition was EQ,
9499 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9500 than an scc insn even if we have it.
9501
9502 The code to make this transformation was moved into fold_single_bit_test,
9503 so we just call into the folder and expand its result. */
d39985fa 9504
b93a436e
JL
9505 if ((code == NE || code == EQ)
9506 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9507 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae
JL
9508 {
9509 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9510 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9511 arg0, arg1, type),
60cd4dae
JL
9512 target, VOIDmode, EXPAND_NORMAL);
9513 }
bbf6f052 9514
b93a436e 9515 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9516 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9517 return 0;
1eb8759b 9518
b93a436e
JL
9519 icode = setcc_gen_code[(int) code];
9520 if (icode == CODE_FOR_nothing
a995e389 9521 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9522 {
b93a436e
JL
9523 /* We can only do this if it is one of the special cases that
9524 can be handled without an scc insn. */
9525 if ((code == LT && integer_zerop (arg1))
9526 || (! only_cheap && code == GE && integer_zerop (arg1)))
9527 ;
9528 else if (BRANCH_COST >= 0
9529 && ! only_cheap && (code == NE || code == EQ)
9530 && TREE_CODE (type) != REAL_TYPE
9531 && ((abs_optab->handlers[(int) operand_mode].insn_code
9532 != CODE_FOR_nothing)
9533 || (ffs_optab->handlers[(int) operand_mode].insn_code
9534 != CODE_FOR_nothing)))
9535 ;
9536 else
9537 return 0;
ca695ac9 9538 }
3a94c984 9539
296b4ed9 9540 if (! get_subtarget (target)
e3be1116 9541 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9542 subtarget = 0;
9543
eb698c58 9544 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9545
9546 if (target == 0)
9547 target = gen_reg_rtx (mode);
9548
9549 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9550 because, if the emit_store_flag does anything it will succeed and
9551 OP0 and OP1 will not be used subsequently. */
ca695ac9 9552
b93a436e
JL
9553 result = emit_store_flag (target, code,
9554 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9555 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9556 operand_mode, unsignedp, 1);
ca695ac9 9557
b93a436e
JL
9558 if (result)
9559 {
9560 if (invert)
9561 result = expand_binop (mode, xor_optab, result, const1_rtx,
9562 result, 0, OPTAB_LIB_WIDEN);
9563 return result;
ca695ac9 9564 }
bbf6f052 9565
b93a436e
JL
9566 /* If this failed, we have to do this with set/compare/jump/set code. */
9567 if (GET_CODE (target) != REG
9568 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9569 target = gen_reg_rtx (GET_MODE (target));
9570
9571 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9572 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9573 operand_mode, NULL_RTX);
b93a436e
JL
9574 if (GET_CODE (result) == CONST_INT)
9575 return (((result == const0_rtx && ! invert)
9576 || (result != const0_rtx && invert))
9577 ? const0_rtx : const1_rtx);
ca695ac9 9578
8f08e8c0
JL
9579 /* The code of RESULT may not match CODE if compare_from_rtx
9580 decided to swap its operands and reverse the original code.
9581
9582 We know that compare_from_rtx returns either a CONST_INT or
9583 a new comparison code, so it is safe to just extract the
9584 code from RESULT. */
9585 code = GET_CODE (result);
9586
b93a436e
JL
9587 label = gen_label_rtx ();
9588 if (bcc_gen_fctn[(int) code] == 0)
9589 abort ();
0f41302f 9590
b93a436e
JL
9591 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9592 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9593 emit_label (label);
bbf6f052 9594
b93a436e 9595 return target;
ca695ac9 9596}
b93a436e 9597\f
b93a436e 9598
ad82abb8
ZW
9599/* Stubs in case we haven't got a casesi insn. */
9600#ifndef HAVE_casesi
9601# define HAVE_casesi 0
9602# define gen_casesi(a, b, c, d, e) (0)
9603# define CODE_FOR_casesi CODE_FOR_nothing
9604#endif
9605
9606/* If the machine does not have a case insn that compares the bounds,
9607 this means extra overhead for dispatch tables, which raises the
9608 threshold for using them. */
9609#ifndef CASE_VALUES_THRESHOLD
9610#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9611#endif /* CASE_VALUES_THRESHOLD */
9612
9613unsigned int
502b8322 9614case_values_threshold (void)
ad82abb8
ZW
9615{
9616 return CASE_VALUES_THRESHOLD;
9617}
9618
9619/* Attempt to generate a casesi instruction. Returns 1 if successful,
9620 0 otherwise (i.e. if there is no casesi instruction). */
9621int
502b8322
AJ
9622try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9623 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9624{
9625 enum machine_mode index_mode = SImode;
9626 int index_bits = GET_MODE_BITSIZE (index_mode);
9627 rtx op1, op2, index;
9628 enum machine_mode op_mode;
9629
9630 if (! HAVE_casesi)
9631 return 0;
9632
9633 /* Convert the index to SImode. */
9634 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9635 {
9636 enum machine_mode omode = TYPE_MODE (index_type);
9637 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9638
9639 /* We must handle the endpoints in the original mode. */
9640 index_expr = build (MINUS_EXPR, index_type,
9641 index_expr, minval);
9642 minval = integer_zero_node;
9643 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9644 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9645 omode, 1, default_label);
ad82abb8
ZW
9646 /* Now we can safely truncate. */
9647 index = convert_to_mode (index_mode, index, 0);
9648 }
9649 else
9650 {
9651 if (TYPE_MODE (index_type) != index_mode)
9652 {
b0c48229
NB
9653 index_expr = convert ((*lang_hooks.types.type_for_size)
9654 (index_bits, 0), index_expr);
ad82abb8
ZW
9655 index_type = TREE_TYPE (index_expr);
9656 }
9657
9658 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9659 }
9660 emit_queue ();
9661 index = protect_from_queue (index, 0);
9662 do_pending_stack_adjust ();
9663
9664 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9665 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9666 (index, op_mode))
9667 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9668
ad82abb8
ZW
9669 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9670
9671 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9672 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9673 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9674 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9675 (op1, op_mode))
9676 op1 = copy_to_mode_reg (op_mode, op1);
9677
9678 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9679
9680 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9681 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9682 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9683 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9684 (op2, op_mode))
9685 op2 = copy_to_mode_reg (op_mode, op2);
9686
9687 emit_jump_insn (gen_casesi (index, op1, op2,
9688 table_label, default_label));
9689 return 1;
9690}
9691
9692/* Attempt to generate a tablejump instruction; same concept. */
9693#ifndef HAVE_tablejump
9694#define HAVE_tablejump 0
9695#define gen_tablejump(x, y) (0)
9696#endif
9697
9698/* Subroutine of the next function.
9699
9700 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9701 in the table already subtracted.
9702 MODE is its expected mode (needed if INDEX is constant).
9703 RANGE is the length of the jump table.
9704 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9705
b93a436e
JL
9706 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9707 index value is out of range. */
0f41302f 9708
ad82abb8 9709static void
502b8322
AJ
9710do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9711 rtx default_label)
ca695ac9 9712{
b3694847 9713 rtx temp, vector;
88d3b7f0 9714
74f6d071
JH
9715 if (INTVAL (range) > cfun->max_jumptable_ents)
9716 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9717
b93a436e
JL
9718 /* Do an unsigned comparison (in the proper mode) between the index
9719 expression and the value which represents the length of the range.
9720 Since we just finished subtracting the lower bound of the range
9721 from the index expression, this comparison allows us to simultaneously
9722 check that the original index expression value is both greater than
9723 or equal to the minimum value of the range and less than or equal to
9724 the maximum value of the range. */
709f5be1 9725
c5d5d461 9726 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9727 default_label);
bbf6f052 9728
b93a436e
JL
9729 /* If index is in range, it must fit in Pmode.
9730 Convert to Pmode so we can index with it. */
9731 if (mode != Pmode)
9732 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9733
ba228239 9734 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9735 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9736 and break_out_memory_refs will go to work on it and mess it up. */
9737#ifdef PIC_CASE_VECTOR_ADDRESS
9738 if (flag_pic && GET_CODE (index) != REG)
9739 index = copy_to_mode_reg (Pmode, index);
9740#endif
ca695ac9 9741
b93a436e
JL
9742 /* If flag_force_addr were to affect this address
9743 it could interfere with the tricky assumptions made
9744 about addresses that contain label-refs,
9745 which may be valid only very near the tablejump itself. */
9746 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9747 GET_MODE_SIZE, because this indicates how large insns are. The other
9748 uses should all be Pmode, because they are addresses. This code
9749 could fail if addresses and insns are not the same size. */
9750 index = gen_rtx_PLUS (Pmode,
9751 gen_rtx_MULT (Pmode, index,
9752 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9753 gen_rtx_LABEL_REF (Pmode, table_label));
9754#ifdef PIC_CASE_VECTOR_ADDRESS
9755 if (flag_pic)
9756 index = PIC_CASE_VECTOR_ADDRESS (index);
9757 else
bbf6f052 9758#endif
b93a436e
JL
9759 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9760 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9761 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9762 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 9763 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
9764 convert_move (temp, vector, 0);
9765
9766 emit_jump_insn (gen_tablejump (temp, table_label));
9767
9768 /* If we are generating PIC code or if the table is PC-relative, the
9769 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9770 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9771 emit_barrier ();
bbf6f052 9772}
b93a436e 9773
ad82abb8 9774int
502b8322
AJ
9775try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9776 rtx table_label, rtx default_label)
ad82abb8
ZW
9777{
9778 rtx index;
9779
9780 if (! HAVE_tablejump)
9781 return 0;
9782
9783 index_expr = fold (build (MINUS_EXPR, index_type,
9784 convert (index_type, index_expr),
9785 convert (index_type, minval)));
9786 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9787 emit_queue ();
9788 index = protect_from_queue (index, 0);
9789 do_pending_stack_adjust ();
9790
9791 do_tablejump (index, TYPE_MODE (index_type),
9792 convert_modes (TYPE_MODE (index_type),
9793 TYPE_MODE (TREE_TYPE (range)),
9794 expand_expr (range, NULL_RTX,
9795 VOIDmode, 0),
9796 TREE_UNSIGNED (TREE_TYPE (range))),
9797 table_label, default_label);
9798 return 1;
9799}
e2500fed 9800
cb2a532e
AH
9801/* Nonzero if the mode is a valid vector mode for this architecture.
9802 This returns nonzero even if there is no hardware support for the
9803 vector mode, but we can emulate with narrower modes. */
9804
9805int
502b8322 9806vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9807{
9808 enum mode_class class = GET_MODE_CLASS (mode);
9809 enum machine_mode innermode;
9810
9811 /* Doh! What's going on? */
9812 if (class != MODE_VECTOR_INT
9813 && class != MODE_VECTOR_FLOAT)
9814 return 0;
9815
9816 /* Hardware support. Woo hoo! */
9817 if (VECTOR_MODE_SUPPORTED_P (mode))
9818 return 1;
9819
9820 innermode = GET_MODE_INNER (mode);
9821
9822 /* We should probably return 1 if requesting V4DI and we have no DI,
9823 but we have V2DI, but this is probably very unlikely. */
9824
9825 /* If we have support for the inner mode, we can safely emulate it.
9826 We may not have V2DI, but me can emulate with a pair of DIs. */
9827 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9828}
9829
d744e06e
AH
9830/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9831static rtx
502b8322 9832const_vector_from_tree (tree exp)
d744e06e
AH
9833{
9834 rtvec v;
9835 int units, i;
9836 tree link, elt;
9837 enum machine_mode inner, mode;
9838
9839 mode = TYPE_MODE (TREE_TYPE (exp));
9840
9841 if (is_zeros_p (exp))
9842 return CONST0_RTX (mode);
9843
9844 units = GET_MODE_NUNITS (mode);
9845 inner = GET_MODE_INNER (mode);
9846
9847 v = rtvec_alloc (units);
9848
9849 link = TREE_VECTOR_CST_ELTS (exp);
9850 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9851 {
9852 elt = TREE_VALUE (link);
9853
9854 if (TREE_CODE (elt) == REAL_CST)
9855 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9856 inner);
9857 else
9858 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9859 TREE_INT_CST_HIGH (elt),
9860 inner);
9861 }
9862
5f6c070d
AH
9863 /* Initialize remaining elements to 0. */
9864 for (; i < units; ++i)
9865 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9866
d744e06e
AH
9867 return gen_rtx_raw_CONST_VECTOR (mode, v);
9868}
9869
e2500fed 9870#include "gt-expr.h"
This page took 4.000411 seconds and 5 git commands to generate.