]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Daily bump.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
c988af2b 50#include "target.h"
bbf6f052 51
bbf6f052 52/* Decide whether a function's arguments should be processed
bbc8a071
RK
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
bbf6f052 57
bbf6f052 58#ifdef PUSH_ROUNDING
bbc8a071 59
2da4124d 60#ifndef PUSH_ARGS_REVERSED
3319a347 61#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 62#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 63#endif
2da4124d 64#endif
bbc8a071 65
bbf6f052
RK
66#endif
67
68#ifndef STACK_PUSH_CODE
69#ifdef STACK_GROWS_DOWNWARD
70#define STACK_PUSH_CODE PRE_DEC
71#else
72#define STACK_PUSH_CODE PRE_INC
73#endif
74#endif
75
4ca79136
RH
76/* Convert defined/undefined to boolean. */
77#ifdef TARGET_MEM_FUNCTIONS
78#undef TARGET_MEM_FUNCTIONS
79#define TARGET_MEM_FUNCTIONS 1
80#else
81#define TARGET_MEM_FUNCTIONS 0
82#endif
83
84
bbf6f052
RK
85/* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91int cse_not_expected;
92
4969d05d
RK
93/* This structure is used by move_by_pieces to describe the move to
94 be performed. */
4969d05d
RK
95struct move_by_pieces
96{
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
3bdf5ad1
RK
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
4969d05d
RK
107 int reverse;
108};
109
57814e5e 110/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
111 be performed. */
112
57814e5e 113struct store_by_pieces
9de08200
RK
114{
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
3bdf5ad1
RK
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
502b8322 121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 122 void *constfundata;
9de08200
RK
123 int reverse;
124};
125
502b8322
AJ
126static rtx enqueue_insn (rtx, rtx);
127static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 unsigned int);
129static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
130 struct move_by_pieces *);
131static bool block_move_libcall_safe_for_call_parm (void);
132static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
133static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
134static tree emit_block_move_libcall_fn (int);
135static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
136static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
137static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
138static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
139static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
140 struct store_by_pieces *);
141static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
142static rtx clear_storage_via_libcall (rtx, rtx);
143static tree clear_storage_libcall_fn (int);
144static rtx compress_float_constant (rtx, rtx);
145static rtx get_subtarget (rtx);
146static int is_zeros_p (tree);
502b8322
AJ
147static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153static rtx var_rtx (tree);
154
155static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
d50a16c4 156static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
157
158static int is_aligning_offset (tree, tree);
159static rtx expand_increment (tree, int, int);
eb698c58
RS
160static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
502b8322 162static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 163#ifdef PUSH_ROUNDING
502b8322 164static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 165#endif
502b8322
AJ
166static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167static rtx const_vector_from_tree (tree);
bbf6f052 168
4fa52007
RK
169/* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
172
173static char direct_load[NUM_MACHINE_MODES];
174static char direct_store[NUM_MACHINE_MODES];
175
51286de6
RH
176/* Record for each mode whether we can float-extend from memory. */
177
178static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
179
fbe1758d 180/* This macro is used to determine whether move_by_pieces should be called
3a94c984 181 to perform a structure copy. */
fbe1758d 182#ifndef MOVE_BY_PIECES_P
19caa751 183#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
185#endif
186
78762e3b
RS
187/* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189#ifndef CLEAR_BY_PIECES_P
190#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
192#endif
193
4977bab6
ZW
194/* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197#ifndef STORE_BY_PIECES_P
198#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
199#endif
200
266007a7 201/* This array records the insn_code of insns to perform block moves. */
e6677db3 202enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 203
9de08200
RK
204/* This array records the insn_code of insns to perform block clears. */
205enum insn_code clrstr_optab[NUM_MACHINE_MODES];
206
118355a0
ZW
207/* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211
72954a4f
JM
212/* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
213struct file_stack *expr_wfl_stack;
214
cc2902df 215/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
216
217#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 218#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 219#endif
bbf6f052 220\f
4fa52007 221/* This is run once per compilation to set up which modes can be used
266007a7 222 directly in memory and to initialize the block move optab. */
4fa52007
RK
223
224void
502b8322 225init_expr_once (void)
4fa52007
RK
226{
227 rtx insn, pat;
228 enum machine_mode mode;
cff48d8f 229 int num_clobbers;
9ec36da5 230 rtx mem, mem1;
bf1660a6 231 rtx reg;
9ec36da5 232
e2549997
RS
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
9ec36da5
JL
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 238
bf1660a6
JL
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
242
1f8c3c5b
RH
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
4fa52007
RK
246
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
249 {
250 int regno;
4fa52007
RK
251
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
e2549997 254 PUT_MODE (mem1, mode);
bf1660a6 255 PUT_MODE (reg, mode);
4fa52007 256
e6fe56a4
RK
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
259
7308a047
RS
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
263 regno++)
264 {
265 if (! HARD_REGNO_MODE_OK (regno, mode))
266 continue;
e6fe56a4 267
bf1660a6 268 REGNO (reg) = regno;
e6fe56a4 269
7308a047
RS
270 SET_SRC (pat) = mem;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
e6fe56a4 274
e2549997
RS
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
279
7308a047
RS
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
e2549997
RS
284
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
7308a047 289 }
4fa52007
RK
290 }
291
51286de6
RH
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
293
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
296 {
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 299 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
300 {
301 enum insn_code ic;
302
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
305 continue;
306
307 PUT_MODE (mem, srcmode);
0fb7aeda 308
51286de6
RH
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
311 }
312 }
4fa52007 313}
cff48d8f 314
bbf6f052
RK
315/* This is run at the start of compiling a function. */
316
317void
502b8322 318init_expr (void)
bbf6f052 319{
3a70d621 320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052
RK
321}
322
49ad7cfa 323/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 324
bbf6f052 325void
502b8322 326finish_expr_for_function (void)
bbf6f052 327{
49ad7cfa
BS
328 if (pending_chain)
329 abort ();
bbf6f052
RK
330}
331\f
332/* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
334
bbf6f052
RK
335/* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
338
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
341
342static rtx
502b8322 343enqueue_insn (rtx var, rtx body)
bbf6f052 344{
c5c76735
JL
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
bbf6f052
RK
347 return pending_chain;
348}
349
350/* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
356
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
360
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
364
365rtx
502b8322 366protect_from_queue (rtx x, int modify)
bbf6f052 367{
b3694847 368 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
369
370#if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374#endif
375
376 if (code != QUEUED)
377 {
e9baa644
RK
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
bbf6f052
RK
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
385 {
f1ec5147
RK
386 rtx y = XEXP (x, 0);
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 388
bbf6f052
RK
389 if (QUEUED_INSN (y))
390 {
f1ec5147
RK
391 rtx temp = gen_reg_rtx (GET_MODE (x));
392
e9baa644 393 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
394 QUEUED_INSN (y));
395 return temp;
396 }
f1ec5147 397
73b7f58c
BS
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
f1ec5147 400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 401 }
f1ec5147 402
bbf6f052
RK
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
3f15938e
RS
406 {
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
409 {
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
412 }
413 }
bbf6f052
RK
414 else if (code == PLUS || code == MULT)
415 {
3f15938e
RS
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
419 {
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
423 }
bbf6f052
RK
424 }
425 return x;
426 }
73b7f58c
BS
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
bbf6f052 430 if (QUEUED_INSN (x) == 0)
73b7f58c 431 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
442}
443
444/* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
448
1f06ee8d 449int
502b8322 450queued_subexp_p (rtx x)
bbf6f052 451{
b3694847 452 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
453 switch (code)
454 {
455 case QUEUED:
456 return 1;
457 case MEM:
458 return queued_subexp_p (XEXP (x, 0));
459 case MULT:
460 case PLUS:
461 case MINUS:
e9a25f70
JL
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
464 default:
465 return 0;
bbf6f052 466 }
bbf6f052
RK
467}
468
1bbd65cd
EB
469/* Retrieve a mark on the queue. */
470
471static rtx
472mark_queue (void)
473{
474 return pending_chain;
475}
bbf6f052 476
1bbd65cd
EB
477/* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
480
481static void
482emit_insns_enqueued_after_mark (rtx mark)
bbf6f052 483{
b3694847 484 rtx p;
1bbd65cd
EB
485
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
490 return;
491
492 while ((p = pending_chain) != mark)
bbf6f052 493 {
41b083c4
R
494 rtx body = QUEUED_BODY (p);
495
2f937369
DM
496 switch (GET_CODE (body))
497 {
498 case INSN:
499 case JUMP_INSN:
500 case CALL_INSN:
501 case CODE_LABEL:
502 case BARRIER:
503 case NOTE:
504 QUEUED_INSN (p) = body;
505 emit_insn (body);
506 break;
507
508#ifdef ENABLE_CHECKING
509 case SEQUENCE:
510 abort ();
511 break;
512#endif
513
514 default:
515 QUEUED_INSN (p) = emit_insn (body);
516 break;
41b083c4 517 }
2f937369 518
1bbd65cd 519 QUEUED_BODY (p) = 0;
bbf6f052
RK
520 pending_chain = QUEUED_NEXT (p);
521 }
522}
1bbd65cd
EB
523
524/* Perform all the pending incrementations. */
525
526void
527emit_queue (void)
528{
529 emit_insns_enqueued_after_mark (NULL_RTX);
530}
bbf6f052
RK
531\f
532/* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
536
537void
502b8322 538convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
539{
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
544 enum insn_code code;
545 rtx libcall;
546
547 /* rtx code for making an equivalent value. */
37d0b254
SE
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
550
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
553
554 if (to_real != from_real)
555 abort ();
556
1499e0a8
RK
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
559 TO here. */
560
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
566
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
568 abort ();
569
bbf6f052
RK
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
572 {
573 emit_move_insn (to, from);
574 return;
575 }
576
0b4565c9
BS
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
578 {
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
580 abort ();
3a94c984 581
0b4565c9 582 if (VECTOR_MODE_P (to_mode))
bafe341a 583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 584 else
bafe341a 585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
586
587 emit_move_insn (to, from);
588 return;
589 }
590
06765df1
R
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
592 {
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
595 return;
596 }
597
bbf6f052
RK
598 if (to_real)
599 {
642dfa8b 600 rtx value, insns;
85363ca0 601 convert_optab tab;
81d79e2c 602
e44846d6 603 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 604 tab = sext_optab;
e44846d6 605 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
85363ca0
ZW
606 tab = trunc_optab;
607 else
608 abort ();
2b01c326 609
85363ca0 610 /* Try converting directly if the insn is supported. */
2b01c326 611
85363ca0
ZW
612 code = tab->handlers[to_mode][from_mode].insn_code;
613 if (code != CODE_FOR_nothing)
b092b471 614 {
85363ca0
ZW
615 emit_unop_insn (code, to, from,
616 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
617 return;
618 }
b092b471 619
85363ca0
ZW
620 /* Otherwise use a libcall. */
621 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 622
85363ca0 623 if (!libcall)
b092b471 624 /* This conversion is not implemented yet. */
bbf6f052
RK
625 abort ();
626
642dfa8b 627 start_sequence ();
ebb1b59a 628 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 629 1, from, from_mode);
642dfa8b
BS
630 insns = get_insns ();
631 end_sequence ();
450b1728
EC
632 emit_libcall_block (insns, to, value,
633 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
634 from)
635 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
636 return;
637 }
638
85363ca0
ZW
639 /* Handle pointer conversion. */ /* SPEE 900220. */
640 /* Targets are expected to provide conversion insns between PxImode and
641 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
642 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
643 {
644 enum machine_mode full_mode
645 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
646
647 if (trunc_optab->handlers[to_mode][full_mode].insn_code
648 == CODE_FOR_nothing)
649 abort ();
650
651 if (full_mode != from_mode)
652 from = convert_to_mode (full_mode, from, unsignedp);
653 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
654 to, from, UNKNOWN);
655 return;
656 }
657 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
658 {
659 enum machine_mode full_mode
660 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
661
662 if (sext_optab->handlers[full_mode][from_mode].insn_code
663 == CODE_FOR_nothing)
664 abort ();
665
666 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
667 to, from, UNKNOWN);
668 if (to_mode == full_mode)
669 return;
670
a1105617 671 /* else proceed to integer conversions below. */
85363ca0
ZW
672 from_mode = full_mode;
673 }
674
bbf6f052
RK
675 /* Now both modes are integers. */
676
677 /* Handle expanding beyond a word. */
678 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
679 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
680 {
681 rtx insns;
682 rtx lowpart;
683 rtx fill_value;
684 rtx lowfrom;
685 int i;
686 enum machine_mode lowpart_mode;
687 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
688
689 /* Try converting directly if the insn is supported. */
690 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
691 != CODE_FOR_nothing)
692 {
cd1b4b44
RK
693 /* If FROM is a SUBREG, put it into a register. Do this
694 so that we always generate the same set of insns for
695 better cse'ing; if an intermediate assignment occurred,
696 we won't be doing the operation directly on the SUBREG. */
697 if (optimize > 0 && GET_CODE (from) == SUBREG)
698 from = force_reg (from_mode, from);
bbf6f052
RK
699 emit_unop_insn (code, to, from, equiv_code);
700 return;
701 }
702 /* Next, try converting via full word. */
703 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
704 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
705 != CODE_FOR_nothing))
706 {
a81fee56 707 if (GET_CODE (to) == REG)
6a2d136b
EB
708 {
709 if (reg_overlap_mentioned_p (to, from))
710 from = force_reg (from_mode, from);
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
712 }
bbf6f052
RK
713 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
714 emit_unop_insn (code, to,
715 gen_lowpart (word_mode, to), equiv_code);
716 return;
717 }
718
719 /* No special multiword conversion insn; do it by hand. */
720 start_sequence ();
721
5c5033c3
RK
722 /* Since we will turn this into a no conflict block, we must ensure
723 that the source does not overlap the target. */
724
725 if (reg_overlap_mentioned_p (to, from))
726 from = force_reg (from_mode, from);
727
bbf6f052
RK
728 /* Get a copy of FROM widened to a word, if necessary. */
729 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
730 lowpart_mode = word_mode;
731 else
732 lowpart_mode = from_mode;
733
734 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
735
736 lowpart = gen_lowpart (lowpart_mode, to);
737 emit_move_insn (lowpart, lowfrom);
738
739 /* Compute the value to put in each remaining word. */
740 if (unsignedp)
741 fill_value = const0_rtx;
742 else
743 {
744#ifdef HAVE_slt
745 if (HAVE_slt
a995e389 746 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
747 && STORE_FLAG_VALUE == -1)
748 {
906c4e36 749 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 750 lowpart_mode, 0);
bbf6f052
RK
751 fill_value = gen_reg_rtx (word_mode);
752 emit_insn (gen_slt (fill_value));
753 }
754 else
755#endif
756 {
757 fill_value
758 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
759 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 760 NULL_RTX, 0);
bbf6f052
RK
761 fill_value = convert_to_mode (word_mode, fill_value, 1);
762 }
763 }
764
765 /* Fill the remaining words. */
766 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
767 {
768 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
769 rtx subword = operand_subword (to, index, 1, to_mode);
770
771 if (subword == 0)
772 abort ();
773
774 if (fill_value != subword)
775 emit_move_insn (subword, fill_value);
776 }
777
778 insns = get_insns ();
779 end_sequence ();
780
906c4e36 781 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 782 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
783 return;
784 }
785
d3c64ee3
RS
786 /* Truncating multi-word to a word or less. */
787 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
788 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 789 {
431a6eca
JW
790 if (!((GET_CODE (from) == MEM
791 && ! MEM_VOLATILE_P (from)
792 && direct_load[(int) to_mode]
793 && ! mode_dependent_address_p (XEXP (from, 0)))
794 || GET_CODE (from) == REG
795 || GET_CODE (from) == SUBREG))
796 from = force_reg (from_mode, from);
bbf6f052
RK
797 convert_move (to, gen_lowpart (word_mode, from), 0);
798 return;
799 }
800
bbf6f052
RK
801 /* Now follow all the conversions between integers
802 no more than a word long. */
803
804 /* For truncation, usually we can just refer to FROM in a narrower mode. */
805 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
806 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 807 GET_MODE_BITSIZE (from_mode)))
bbf6f052 808 {
d3c64ee3
RS
809 if (!((GET_CODE (from) == MEM
810 && ! MEM_VOLATILE_P (from)
811 && direct_load[(int) to_mode]
812 && ! mode_dependent_address_p (XEXP (from, 0)))
813 || GET_CODE (from) == REG
814 || GET_CODE (from) == SUBREG))
815 from = force_reg (from_mode, from);
34aa3599
RK
816 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
817 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
818 from = copy_to_reg (from);
bbf6f052
RK
819 emit_move_insn (to, gen_lowpart (to_mode, from));
820 return;
821 }
822
d3c64ee3 823 /* Handle extension. */
bbf6f052
RK
824 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
825 {
826 /* Convert directly if that works. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
829 {
9413de45
RK
830 if (flag_force_mem)
831 from = force_not_mem (from);
832
bbf6f052
RK
833 emit_unop_insn (code, to, from, equiv_code);
834 return;
835 }
836 else
837 {
838 enum machine_mode intermediate;
2b28d92e
NC
839 rtx tmp;
840 tree shift_amount;
bbf6f052
RK
841
842 /* Search for a mode to convert via. */
843 for (intermediate = from_mode; intermediate != VOIDmode;
844 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
845 if (((can_extend_p (to_mode, intermediate, unsignedp)
846 != CODE_FOR_nothing)
847 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
848 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
849 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
850 && (can_extend_p (intermediate, from_mode, unsignedp)
851 != CODE_FOR_nothing))
852 {
853 convert_move (to, convert_to_mode (intermediate, from,
854 unsignedp), unsignedp);
855 return;
856 }
857
2b28d92e 858 /* No suitable intermediate mode.
3a94c984 859 Generate what we need with shifts. */
2b28d92e
NC
860 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
861 - GET_MODE_BITSIZE (from_mode), 0);
862 from = gen_lowpart (to_mode, force_reg (from_mode, from));
863 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
864 to, unsignedp);
3a94c984 865 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
866 to, unsignedp);
867 if (tmp != to)
868 emit_move_insn (to, tmp);
869 return;
bbf6f052
RK
870 }
871 }
872
3a94c984 873 /* Support special truncate insns for certain modes. */
85363ca0 874 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 875 {
85363ca0
ZW
876 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
877 to, from, UNKNOWN);
b9bcad65
RK
878 return;
879 }
880
bbf6f052
RK
881 /* Handle truncation of volatile memrefs, and so on;
882 the things that couldn't be truncated directly,
85363ca0
ZW
883 and for which there was no special instruction.
884
885 ??? Code above formerly short-circuited this, for most integer
886 mode pairs, with a force_reg in from_mode followed by a recursive
887 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
889 {
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
892 return;
893 }
894
895 /* Mode combination is not recognized. */
896 abort ();
897}
898
899/* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
5d901c31
RS
904 or by copying to a new temporary with conversion.
905
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
908
909rtx
502b8322 910convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
911{
912 return convert_modes (mode, VOIDmode, x, unsignedp);
913}
914
915/* Return an rtx for a value that would result
916 from converting X from mode OLDMODE to mode MODE.
917 Both modes may be floating, or both integer.
918 UNSIGNEDP is nonzero if X is an unsigned value.
919
920 This can be done by referring to a part of X in place
921 or by copying to a new temporary with conversion.
922
923 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
924
925 This function *must not* call protect_from_queue
926 except when putting X into an insn (in which case convert_move does it). */
927
928rtx
502b8322 929convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 930{
b3694847 931 rtx temp;
5ffe63ed 932
1499e0a8
RK
933 /* If FROM is a SUBREG that indicates that we have already done at least
934 the required extension, strip it. */
935
936 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
937 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
938 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
939 x = gen_lowpart (mode, x);
bbf6f052 940
64791b18
RK
941 if (GET_MODE (x) != VOIDmode)
942 oldmode = GET_MODE (x);
3a94c984 943
5ffe63ed 944 if (mode == oldmode)
bbf6f052
RK
945 return x;
946
947 /* There is one case that we must handle specially: If we are converting
906c4e36 948 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
949 we are to interpret the constant as unsigned, gen_lowpart will do
950 the wrong if the constant appears negative. What we want to do is
951 make the high-order word of the constant zero, not all ones. */
952
953 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 954 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 955 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
956 {
957 HOST_WIDE_INT val = INTVAL (x);
958
959 if (oldmode != VOIDmode
960 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
961 {
962 int width = GET_MODE_BITSIZE (oldmode);
963
964 /* We need to zero extend VAL. */
965 val &= ((HOST_WIDE_INT) 1 << width) - 1;
966 }
967
968 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
969 }
bbf6f052
RK
970
971 /* We can do this with a gen_lowpart if both desired and current modes
972 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
973 non-volatile MEM. Except for the constant case where MODE is no
974 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 975
ba2e110c
RK
976 if ((GET_CODE (x) == CONST_INT
977 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 978 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 979 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 980 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 981 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
982 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
983 && direct_load[(int) mode])
2bf29316 984 || (GET_CODE (x) == REG
006c9f4a
SE
985 && (! HARD_REGISTER_P (x)
986 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
987 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
988 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
989 {
990 /* ?? If we don't know OLDMODE, we have to assume here that
991 X does not need sign- or zero-extension. This may not be
992 the case, but it's the best we can do. */
993 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
994 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
995 {
996 HOST_WIDE_INT val = INTVAL (x);
997 int width = GET_MODE_BITSIZE (oldmode);
998
999 /* We must sign or zero-extend in this case. Start by
1000 zero-extending, then sign extend if we need to. */
1001 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1002 if (! unsignedp
1003 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1004 val |= (HOST_WIDE_INT) (-1) << width;
1005
2496c7bd 1006 return gen_int_mode (val, mode);
ba2e110c
RK
1007 }
1008
1009 return gen_lowpart (mode, x);
1010 }
bbf6f052 1011
ebe75517
JH
1012 /* Converting from integer constant into mode is always equivalent to an
1013 subreg operation. */
1014 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1015 {
1016 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1017 abort ();
1018 return simplify_gen_subreg (mode, x, oldmode, 0);
1019 }
1020
bbf6f052
RK
1021 temp = gen_reg_rtx (mode);
1022 convert_move (temp, x, unsignedp);
1023 return temp;
1024}
1025\f
cf5124f6
RS
1026/* STORE_MAX_PIECES is the number of bytes at a time that we can
1027 store efficiently. Due to internal GCC limitations, this is
1028 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1029 for an immediate constant. */
1030
1031#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1032
8fd3cf4e
JJ
1033/* Determine whether the LEN bytes can be moved by using several move
1034 instructions. Return nonzero if a call to move_by_pieces should
1035 succeed. */
1036
1037int
502b8322
AJ
1038can_move_by_pieces (unsigned HOST_WIDE_INT len,
1039 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1040{
1041 return MOVE_BY_PIECES_P (len, align);
1042}
1043
21d93687
RK
1044/* Generate several move instructions to copy LEN bytes from block FROM to
1045 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1046 and TO through protect_from_queue before calling.
566aa174 1047
21d93687
RK
1048 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1049 used to push FROM to the stack.
566aa174 1050
8fd3cf4e 1051 ALIGN is maximum stack alignment we can assume.
bbf6f052 1052
8fd3cf4e
JJ
1053 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1054 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1055 stpcpy. */
1056
1057rtx
502b8322
AJ
1058move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1059 unsigned int align, int endp)
bbf6f052
RK
1060{
1061 struct move_by_pieces data;
566aa174 1062 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1063 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1064 enum machine_mode mode = VOIDmode, tmode;
1065 enum insn_code icode;
bbf6f052 1066
f26aca6d
DD
1067 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1068
bbf6f052 1069 data.offset = 0;
bbf6f052 1070 data.from_addr = from_addr;
566aa174
JH
1071 if (to)
1072 {
1073 to_addr = XEXP (to, 0);
1074 data.to = to;
1075 data.autinc_to
1076 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1077 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1078 data.reverse
1079 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1080 }
1081 else
1082 {
1083 to_addr = NULL_RTX;
1084 data.to = NULL_RTX;
1085 data.autinc_to = 1;
1086#ifdef STACK_GROWS_DOWNWARD
1087 data.reverse = 1;
1088#else
1089 data.reverse = 0;
1090#endif
1091 }
1092 data.to_addr = to_addr;
bbf6f052 1093 data.from = from;
bbf6f052
RK
1094 data.autinc_from
1095 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1096 || GET_CODE (from_addr) == POST_INC
1097 || GET_CODE (from_addr) == POST_DEC);
1098
1099 data.explicit_inc_from = 0;
1100 data.explicit_inc_to = 0;
bbf6f052
RK
1101 if (data.reverse) data.offset = len;
1102 data.len = len;
1103
1104 /* If copying requires more than two move insns,
1105 copy addresses to registers (to make displacements shorter)
1106 and use post-increment if available. */
1107 if (!(data.autinc_from && data.autinc_to)
1108 && move_by_pieces_ninsns (len, align) > 2)
1109 {
3a94c984 1110 /* Find the mode of the largest move... */
fbe1758d
AM
1111 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1112 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1113 if (GET_MODE_SIZE (tmode) < max_size)
1114 mode = tmode;
1115
1116 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1117 {
1118 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1119 data.autinc_from = 1;
1120 data.explicit_inc_from = -1;
1121 }
fbe1758d 1122 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1123 {
1124 data.from_addr = copy_addr_to_reg (from_addr);
1125 data.autinc_from = 1;
1126 data.explicit_inc_from = 1;
1127 }
bbf6f052
RK
1128 if (!data.autinc_from && CONSTANT_P (from_addr))
1129 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1130 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1131 {
1132 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1133 data.autinc_to = 1;
1134 data.explicit_inc_to = -1;
1135 }
fbe1758d 1136 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1137 {
1138 data.to_addr = copy_addr_to_reg (to_addr);
1139 data.autinc_to = 1;
1140 data.explicit_inc_to = 1;
1141 }
bbf6f052
RK
1142 if (!data.autinc_to && CONSTANT_P (to_addr))
1143 data.to_addr = copy_addr_to_reg (to_addr);
1144 }
1145
e1565e65 1146 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1147 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1148 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1149
1150 /* First move what we can in the largest integer mode, then go to
1151 successively smaller modes. */
1152
1153 while (max_size > 1)
1154 {
e7c33f54
RK
1155 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1156 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1157 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1158 mode = tmode;
1159
1160 if (mode == VOIDmode)
1161 break;
1162
1163 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1164 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1165 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1166
1167 max_size = GET_MODE_SIZE (mode);
1168 }
1169
1170 /* The code above should have handled everything. */
2a8e278c 1171 if (data.len > 0)
bbf6f052 1172 abort ();
8fd3cf4e
JJ
1173
1174 if (endp)
1175 {
1176 rtx to1;
1177
1178 if (data.reverse)
1179 abort ();
1180 if (data.autinc_to)
1181 {
1182 if (endp == 2)
1183 {
1184 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1185 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1186 else
1187 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1188 -1));
1189 }
1190 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1191 data.offset);
1192 }
1193 else
1194 {
1195 if (endp == 2)
1196 --data.offset;
1197 to1 = adjust_address (data.to, QImode, data.offset);
1198 }
1199 return to1;
1200 }
1201 else
1202 return data.to;
bbf6f052
RK
1203}
1204
1205/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1206 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1207
3bdf5ad1 1208static unsigned HOST_WIDE_INT
502b8322 1209move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1210{
3bdf5ad1
RK
1211 unsigned HOST_WIDE_INT n_insns = 0;
1212 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1213
e1565e65 1214 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1215 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1216 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1217
1218 while (max_size > 1)
1219 {
1220 enum machine_mode mode = VOIDmode, tmode;
1221 enum insn_code icode;
1222
e7c33f54
RK
1223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1224 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1225 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1226 mode = tmode;
1227
1228 if (mode == VOIDmode)
1229 break;
1230
1231 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1232 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1233 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1234
1235 max_size = GET_MODE_SIZE (mode);
1236 }
1237
13c6f0d5
NS
1238 if (l)
1239 abort ();
bbf6f052
RK
1240 return n_insns;
1241}
1242
1243/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1244 with move instructions for mode MODE. GENFUN is the gen_... function
1245 to make a move insn for that mode. DATA has all the other info. */
1246
1247static void
502b8322
AJ
1248move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1249 struct move_by_pieces *data)
bbf6f052 1250{
3bdf5ad1 1251 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1252 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1253
1254 while (data->len >= size)
1255 {
3bdf5ad1
RK
1256 if (data->reverse)
1257 data->offset -= size;
1258
566aa174 1259 if (data->to)
3bdf5ad1 1260 {
566aa174 1261 if (data->autinc_to)
630036c6
JJ
1262 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1263 data->offset);
566aa174 1264 else
f4ef873c 1265 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1266 }
3bdf5ad1
RK
1267
1268 if (data->autinc_from)
630036c6
JJ
1269 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1270 data->offset);
3bdf5ad1 1271 else
f4ef873c 1272 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1273
940da324 1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1275 emit_insn (gen_add2_insn (data->to_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1277 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1278 emit_insn (gen_add2_insn (data->from_addr,
1279 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1280
566aa174
JH
1281 if (data->to)
1282 emit_insn ((*genfun) (to1, from1));
1283 else
21d93687
RK
1284 {
1285#ifdef PUSH_ROUNDING
1286 emit_single_push_insn (mode, from1, NULL);
1287#else
1288 abort ();
1289#endif
1290 }
3bdf5ad1 1291
940da324 1292 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1293 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1294 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1295 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1296
3bdf5ad1
RK
1297 if (! data->reverse)
1298 data->offset += size;
bbf6f052
RK
1299
1300 data->len -= size;
1301 }
1302}
1303\f
4ca79136
RH
1304/* Emit code to move a block Y to a block X. This may be done with
1305 string-move instructions, with multiple scalar move instructions,
1306 or with a library call.
bbf6f052 1307
4ca79136 1308 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1309 SIZE is an rtx that says how long they are.
19caa751 1310 ALIGN is the maximum alignment we can assume they have.
44bb111a 1311 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1312
e9a25f70
JL
1313 Return the address of the new block, if memcpy is called and returns it,
1314 0 otherwise. */
1315
1316rtx
502b8322 1317emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1318{
44bb111a 1319 bool may_use_call;
e9a25f70 1320 rtx retval = 0;
44bb111a
RH
1321 unsigned int align;
1322
1323 switch (method)
1324 {
1325 case BLOCK_OP_NORMAL:
1326 may_use_call = true;
1327 break;
1328
1329 case BLOCK_OP_CALL_PARM:
1330 may_use_call = block_move_libcall_safe_for_call_parm ();
1331
1332 /* Make inhibit_defer_pop nonzero around the library call
1333 to force it to pop the arguments right away. */
1334 NO_DEFER_POP;
1335 break;
1336
1337 case BLOCK_OP_NO_LIBCALL:
1338 may_use_call = false;
1339 break;
1340
1341 default:
1342 abort ();
1343 }
1344
1345 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1346
bbf6f052
RK
1347 if (GET_MODE (x) != BLKmode)
1348 abort ();
bbf6f052
RK
1349 if (GET_MODE (y) != BLKmode)
1350 abort ();
1351
1352 x = protect_from_queue (x, 1);
1353 y = protect_from_queue (y, 0);
5d901c31 1354 size = protect_from_queue (size, 0);
bbf6f052
RK
1355
1356 if (GET_CODE (x) != MEM)
1357 abort ();
1358 if (GET_CODE (y) != MEM)
1359 abort ();
1360 if (size == 0)
1361 abort ();
1362
cb38fd88
RH
1363 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1364 can be incorrect is coming from __builtin_memcpy. */
1365 if (GET_CODE (size) == CONST_INT)
1366 {
6972c506
JJ
1367 if (INTVAL (size) == 0)
1368 return 0;
1369
cb38fd88
RH
1370 x = shallow_copy_rtx (x);
1371 y = shallow_copy_rtx (y);
1372 set_mem_size (x, size);
1373 set_mem_size (y, size);
1374 }
1375
fbe1758d 1376 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1377 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1378 else if (emit_block_move_via_movstr (x, y, size, align))
1379 ;
44bb111a 1380 else if (may_use_call)
4ca79136 1381 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1382 else
1383 emit_block_move_via_loop (x, y, size, align);
1384
1385 if (method == BLOCK_OP_CALL_PARM)
1386 OK_DEFER_POP;
266007a7 1387
4ca79136
RH
1388 return retval;
1389}
266007a7 1390
502b8322 1391/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1392 block move libcall will not clobber any parameters which may have
1393 already been placed on the stack. */
1394
1395static bool
502b8322 1396block_move_libcall_safe_for_call_parm (void)
44bb111a 1397{
a357a6d4 1398 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1399 if (PUSH_ARGS)
1400 return true;
44bb111a 1401
450b1728 1402 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1403 an outgoing argument. */
1404#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1405 {
1406 tree fn = emit_block_move_libcall_fn (false);
1407 (void) fn;
1408 if (REG_PARM_STACK_SPACE (fn) != 0)
1409 return false;
1410 }
44bb111a 1411#endif
44bb111a 1412
a357a6d4
GK
1413 /* If any argument goes in memory, then it might clobber an outgoing
1414 argument. */
1415 {
1416 CUMULATIVE_ARGS args_so_far;
1417 tree fn, arg;
450b1728 1418
a357a6d4 1419 fn = emit_block_move_libcall_fn (false);
0f6937fe 1420 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1421
a357a6d4
GK
1422 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1423 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1424 {
1425 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1426 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1427 if (!tmp || !REG_P (tmp))
44bb111a 1428 return false;
a357a6d4
GK
1429#ifdef FUNCTION_ARG_PARTIAL_NREGS
1430 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1431 NULL_TREE, 1))
1432 return false;
1433#endif
1434 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1435 }
1436 }
1437 return true;
44bb111a
RH
1438}
1439
502b8322 1440/* A subroutine of emit_block_move. Expand a movstr pattern;
4ca79136 1441 return true if successful. */
3ef1eef4 1442
4ca79136 1443static bool
502b8322 1444emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1445{
4ca79136 1446 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1447 int save_volatile_ok = volatile_ok;
4ca79136 1448 enum machine_mode mode;
266007a7 1449
4ca79136
RH
1450 /* Since this is a move insn, we don't care about volatility. */
1451 volatile_ok = 1;
1452
ee960939
OH
1453 /* Try the most limited insn first, because there's no point
1454 including more than one in the machine description unless
1455 the more limited one has some advantage. */
1456
4ca79136
RH
1457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1458 mode = GET_MODE_WIDER_MODE (mode))
1459 {
1460 enum insn_code code = movstr_optab[(int) mode];
1461 insn_operand_predicate_fn pred;
1462
1463 if (code != CODE_FOR_nothing
1464 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1465 here because if SIZE is less than the mode mask, as it is
1466 returned by the macro, it will definitely be less than the
1467 actual mode mask. */
1468 && ((GET_CODE (size) == CONST_INT
1469 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1470 <= (GET_MODE_MASK (mode) >> 1)))
1471 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1472 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1473 || (*pred) (x, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1475 || (*pred) (y, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1477 || (*pred) (opalign, VOIDmode)))
1478 {
1479 rtx op2;
1480 rtx last = get_last_insn ();
1481 rtx pat;
1482
1483 op2 = convert_to_mode (mode, size, 1);
1484 pred = insn_data[(int) code].operand[2].predicate;
1485 if (pred != 0 && ! (*pred) (op2, mode))
1486 op2 = copy_to_mode_reg (mode, op2);
1487
1488 /* ??? When called via emit_block_move_for_call, it'd be
1489 nice if there were some way to inform the backend, so
1490 that it doesn't fail the expansion because it thinks
1491 emitting the libcall would be more efficient. */
1492
1493 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1494 if (pat)
1495 {
1496 emit_insn (pat);
a5e9c810 1497 volatile_ok = save_volatile_ok;
4ca79136 1498 return true;
bbf6f052 1499 }
4ca79136
RH
1500 else
1501 delete_insns_since (last);
bbf6f052 1502 }
4ca79136 1503 }
bbf6f052 1504
a5e9c810 1505 volatile_ok = save_volatile_ok;
4ca79136
RH
1506 return false;
1507}
3ef1eef4 1508
4ca79136
RH
1509/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1510 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1511
4ca79136 1512static rtx
502b8322 1513emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1514{
ee960939 1515 rtx dst_addr, src_addr;
4ca79136
RH
1516 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1517 enum machine_mode size_mode;
1518 rtx retval;
4bc973ae 1519
4ca79136 1520 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1521
ee960939
OH
1522 It is unsafe to save the value generated by protect_from_queue and reuse
1523 it later. Consider what happens if emit_queue is called before the
1524 return value from protect_from_queue is used.
4bc973ae 1525
ee960939
OH
1526 Expansion of the CALL_EXPR below will call emit_queue before we are
1527 finished emitting RTL for argument setup. So if we are not careful we
1528 could get the wrong value for an argument.
4bc973ae 1529
ee960939
OH
1530 To avoid this problem we go ahead and emit code to copy the addresses of
1531 DST and SRC and SIZE into new pseudos. We can then place those new
1532 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1533 emit_queue.
4bc973ae 1534
ee960939
OH
1535 Note this is not strictly needed for library calls since they do not call
1536 emit_queue before loading their arguments. However, we may need to have
1537 library calls call emit_queue in the future since failing to do so could
1538 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1539 arguments in registers. */
1540
1541 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1542 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1543
ee960939
OH
1544 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1545 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1546
1547 dst_tree = make_tree (ptr_type_node, dst_addr);
1548 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1549
1550 if (TARGET_MEM_FUNCTIONS)
1551 size_mode = TYPE_MODE (sizetype);
1552 else
1553 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 1554
4ca79136
RH
1555 size = convert_to_mode (size_mode, size, 1);
1556 size = copy_to_mode_reg (size_mode, size);
1557
1558 /* It is incorrect to use the libcall calling conventions to call
1559 memcpy in this context. This could be a user call to memcpy and
1560 the user may wish to examine the return value from memcpy. For
1561 targets where libcalls and normal calls have different conventions
1562 for returning pointers, we could end up generating incorrect code.
1563
1564 For convenience, we generate the call to bcopy this way as well. */
1565
4ca79136
RH
1566 if (TARGET_MEM_FUNCTIONS)
1567 size_tree = make_tree (sizetype, size);
1568 else
1569 size_tree = make_tree (unsigned_type_node, size);
1570
1571 fn = emit_block_move_libcall_fn (true);
1572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1573 if (TARGET_MEM_FUNCTIONS)
1574 {
1575 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1576 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1577 }
1578 else
1579 {
1580 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1582 }
1583
1584 /* Now we have to build up the CALL_EXPR itself. */
1585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1586 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1587 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1588
1589 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1590
ee960939
OH
1591 /* If we are initializing a readonly value, show the above call clobbered
1592 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1593 the delay slot scheduler might overlook conflicts and take nasty
1594 decisions. */
4ca79136 1595 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1596 add_function_usage_to
1597 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1598 gen_rtx_CLOBBER (VOIDmode, dst),
1599 NULL_RTX));
4ca79136 1600
ee960939 1601 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 1602}
52cf7115 1603
4ca79136
RH
1604/* A subroutine of emit_block_move_via_libcall. Create the tree node
1605 for the function we use for block copies. The first time FOR_CALL
1606 is true, we call assemble_external. */
52cf7115 1607
4ca79136
RH
1608static GTY(()) tree block_move_fn;
1609
9661b15f 1610void
502b8322 1611init_block_move_fn (const char *asmspec)
4ca79136 1612{
9661b15f 1613 if (!block_move_fn)
4ca79136 1614 {
8fd3cf4e 1615 tree args, fn;
9661b15f 1616
4ca79136 1617 if (TARGET_MEM_FUNCTIONS)
52cf7115 1618 {
4ca79136
RH
1619 fn = get_identifier ("memcpy");
1620 args = build_function_type_list (ptr_type_node, ptr_type_node,
1621 const_ptr_type_node, sizetype,
1622 NULL_TREE);
1623 }
1624 else
1625 {
1626 fn = get_identifier ("bcopy");
1627 args = build_function_type_list (void_type_node, const_ptr_type_node,
1628 ptr_type_node, unsigned_type_node,
1629 NULL_TREE);
52cf7115
JL
1630 }
1631
4ca79136
RH
1632 fn = build_decl (FUNCTION_DECL, fn, args);
1633 DECL_EXTERNAL (fn) = 1;
1634 TREE_PUBLIC (fn) = 1;
1635 DECL_ARTIFICIAL (fn) = 1;
1636 TREE_NOTHROW (fn) = 1;
66c60e67 1637
4ca79136 1638 block_move_fn = fn;
bbf6f052 1639 }
e9a25f70 1640
9661b15f
JJ
1641 if (asmspec)
1642 {
1643 SET_DECL_RTL (block_move_fn, NULL_RTX);
1644 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1645 }
1646}
1647
1648static tree
502b8322 1649emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1650{
1651 static bool emitted_extern;
1652
1653 if (!block_move_fn)
1654 init_block_move_fn (NULL);
1655
4ca79136
RH
1656 if (for_call && !emitted_extern)
1657 {
1658 emitted_extern = true;
9661b15f
JJ
1659 make_decl_rtl (block_move_fn, NULL);
1660 assemble_external (block_move_fn);
4ca79136
RH
1661 }
1662
9661b15f 1663 return block_move_fn;
bbf6f052 1664}
44bb111a
RH
1665
1666/* A subroutine of emit_block_move. Copy the data via an explicit
1667 loop. This is used only when libcalls are forbidden. */
1668/* ??? It'd be nice to copy in hunks larger than QImode. */
1669
1670static void
502b8322
AJ
1671emit_block_move_via_loop (rtx x, rtx y, rtx size,
1672 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1673{
1674 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1675 enum machine_mode iter_mode;
1676
1677 iter_mode = GET_MODE (size);
1678 if (iter_mode == VOIDmode)
1679 iter_mode = word_mode;
1680
1681 top_label = gen_label_rtx ();
1682 cmp_label = gen_label_rtx ();
1683 iter = gen_reg_rtx (iter_mode);
1684
1685 emit_move_insn (iter, const0_rtx);
1686
1687 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1688 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1689 do_pending_stack_adjust ();
1690
2e040219 1691 emit_note (NOTE_INSN_LOOP_BEG);
44bb111a
RH
1692
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1695
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1701
1702 emit_move_insn (x, y);
1703
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1706 if (tmp != iter)
1707 emit_move_insn (iter, tmp);
1708
2e040219 1709 emit_note (NOTE_INSN_LOOP_CONT);
44bb111a
RH
1710 emit_label (cmp_label);
1711
1712 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1713 true, top_label);
1714
2e040219 1715 emit_note (NOTE_INSN_LOOP_END);
44bb111a 1716}
bbf6f052
RK
1717\f
1718/* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1720
1721void
502b8322 1722move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1723{
1724 int i;
381127e8 1725#ifdef HAVE_load_multiple
3a94c984 1726 rtx pat;
381127e8
RL
1727 rtx last;
1728#endif
bbf6f052 1729
72bb9717
RK
1730 if (nregs == 0)
1731 return;
1732
bbf6f052
RK
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1735
1736 /* See if the machine can do this with a load multiple insn. */
1737#ifdef HAVE_load_multiple
c3a02afe 1738 if (HAVE_load_multiple)
bbf6f052 1739 {
c3a02afe 1740 last = get_last_insn ();
38a448ca 1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1742 GEN_INT (nregs));
1743 if (pat)
1744 {
1745 emit_insn (pat);
1746 return;
1747 }
1748 else
1749 delete_insns_since (last);
bbf6f052 1750 }
bbf6f052
RK
1751#endif
1752
1753 for (i = 0; i < nregs; i++)
38a448ca 1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1755 operand_subword_force (x, i, mode));
1756}
1757
1758/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1759 The number of registers to be filled is NREGS. */
0040593d 1760
bbf6f052 1761void
502b8322 1762move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1763{
1764 int i;
bbf6f052 1765
2954d7db
RK
1766 if (nregs == 0)
1767 return;
1768
bbf6f052
RK
1769 /* See if the machine can do this with a store multiple insn. */
1770#ifdef HAVE_store_multiple
c3a02afe 1771 if (HAVE_store_multiple)
bbf6f052 1772 {
c6b97fac
AM
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1775 GEN_INT (nregs));
c3a02afe
RK
1776 if (pat)
1777 {
1778 emit_insn (pat);
1779 return;
1780 }
1781 else
1782 delete_insns_since (last);
bbf6f052 1783 }
bbf6f052
RK
1784#endif
1785
1786 for (i = 0; i < nregs; i++)
1787 {
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1789
1790 if (tem == 0)
1791 abort ();
1792
38a448ca 1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1794 }
1795}
1796
084a1106
JDA
1797/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1802
1803rtx
502b8322 1804gen_group_rtx (rtx orig)
084a1106
JDA
1805{
1806 int i, length;
1807 rtx *tmps;
1808
1809 if (GET_CODE (orig) != PARALLEL)
1810 abort ();
1811
1812 length = XVECLEN (orig, 0);
703ad42b 1813 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1814
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1817
1818 if (i)
1819 tmps[0] = 0;
1820
1821 for (; i < length; i++)
1822 {
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1825
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1827 }
1828
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1830}
1831
6e985040
AM
1832/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1835 if not known. */
fffa9c1d
JW
1836
1837void
6e985040 1838emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1839{
aac5cc16
RH
1840 rtx *tmps, src;
1841 int start, i;
fffa9c1d 1842
aac5cc16 1843 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1844 abort ();
1845
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
aac5cc16
RH
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1849 start = 0;
fffa9c1d 1850 else
aac5cc16
RH
1851 start = 1;
1852
703ad42b 1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1854
aac5cc16
RH
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1857 {
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1861 int shift = 0;
1862
1863 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1865 {
6e985040
AM
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1868 if (
1869#ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1872#else
1873 BYTES_BIG_ENDIAN
1874#endif
1875 )
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1877 bytelen = ssize - bytepos;
1878 if (bytelen <= 0)
729a2125 1879 abort ();
aac5cc16
RH
1880 }
1881
f3ce87a9
DE
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1885 src = orig_src;
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1890 {
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1893 else
1894 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1895
f3ce87a9
DE
1896 emit_move_insn (src, orig_src);
1897 }
1898
aac5cc16
RH
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
6e985040
AM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1904 && bytelen == GET_MODE_SIZE (mode))
1905 {
1906 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1908 }
7c4a6db0
JW
1909 else if (GET_CODE (src) == CONCAT)
1910 {
015b1ad1
JDA
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1913
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1916 {
015b1ad1
JDA
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1920 to be extracted. */
1921 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
1922 if (! CONSTANT_P (tmps[i])
1923 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
cbb92744 1927 }
58f69841
JH
1928 else if (bytepos == 0)
1929 {
015b1ad1 1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1931 emit_move_insn (mem, src);
04050c69 1932 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1933 }
7c4a6db0
JW
1934 else
1935 abort ();
1936 }
9c0631a7
AH
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && GET_CODE (src) == REG)
1942 {
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1944 rtx mem;
1945
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1949 }
d3a16cbd
FJ
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1953 else if (CONSTANT_P (src)
2ee5437b
RH
1954 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1955 tmps[i] = src;
fffa9c1d 1956 else
19caa751
RK
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 1959 mode, mode, ssize);
fffa9c1d 1960
6e985040 1961 if (shift)
19caa751
RK
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 1964 }
19caa751 1965
3a94c984 1966 emit_queue ();
aac5cc16
RH
1967
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1971}
1972
084a1106
JDA
1973/* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1975
1976void
502b8322 1977emit_group_move (rtx dst, rtx src)
084a1106
JDA
1978{
1979 int i;
1980
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1984 abort ();
1985
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1990}
1991
6e985040
AM
1992/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1995 known. */
fffa9c1d
JW
1996
1997void
6e985040 1998emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1999{
aac5cc16
RH
2000 rtx *tmps, dst;
2001 int start, i;
fffa9c1d 2002
aac5cc16 2003 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2004 abort ();
2005
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
aac5cc16
RH
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2009 start = 0;
fffa9c1d 2010 else
aac5cc16
RH
2011 start = 1;
2012
703ad42b 2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2014
aac5cc16
RH
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2017 {
aac5cc16
RH
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2021 }
3a94c984 2022 emit_queue ();
fffa9c1d 2023
aac5cc16
RH
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2026 dst = orig_dst;
10a9f2be
JW
2027 if (GET_CODE (dst) == PARALLEL)
2028 {
2029 rtx temp;
2030
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2035 return;
2036
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2039 the temporary. */
2040
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
2044 return;
2045 }
75897075 2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2047 {
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
8ae91fc0 2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2051 }
aac5cc16
RH
2052
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2055 {
770ae6cc 2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2057 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2058 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2059 rtx dest = dst;
aac5cc16
RH
2060
2061 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2063 {
6e985040
AM
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2066 if (
2067#ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2070#else
2071 BYTES_BIG_ENDIAN
2072#endif
2073 )
aac5cc16
RH
2074 {
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2078 }
2079 bytelen = ssize - bytepos;
71bc0330 2080 }
fffa9c1d 2081
6ddae612
JJ
2082 if (GET_CODE (dst) == CONCAT)
2083 {
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2087 {
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2090 }
0d446150
JH
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2092 {
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2096 tmps[i]);
2097 dst = dest;
2098 break;
2099 }
6ddae612
JJ
2100 else
2101 abort ();
2102 }
2103
aac5cc16 2104 /* Optimize the access just a bit. */
6ddae612 2105 if (GET_CODE (dest) == MEM
6e985040
AM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2109 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2111 else
6ddae612 2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2113 mode, tmps[i], ssize);
fffa9c1d 2114 }
729a2125 2115
3a94c984 2116 emit_queue ();
aac5cc16
RH
2117
2118 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2119 if (orig_dst != dst)
aac5cc16 2120 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2121}
2122
c36fce9a
GRK
2123/* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2126
c988af2b
RS
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
c36fce9a
GRK
2131
2132rtx
502b8322 2133copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2134{
19caa751
RK
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2139
2140 if (tgtblk == 0)
2141 {
1da68f56
RK
2142 tgtblk = assign_temp (build_qualified_type (type,
2143 (TYPE_QUALS (type)
2144 | TYPE_QUAL_CONST)),
2145 0, 1, 1);
19caa751
RK
2146 preserve_temp_slots (tgtblk);
2147 }
3a94c984 2148
1ed1b4fb 2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2150 into a new pseudo which is a full word. */
0d7839da 2151
19caa751
RK
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2154 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751 2155
c988af2b
RS
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2159
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2167 ? !BYTES_BIG_ENDIAN
2168 : BYTES_BIG_ENDIAN))
2169 padding_correction
19caa751
RK
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2171
2172 /* Copy the structure BITSIZE bites at a time.
3a94c984 2173
19caa751
RK
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2176 time. */
c988af2b 2177 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2180 {
3a94c984 2181 /* We need a new source operand each time xbitpos is on a
c988af2b 2182 word boundary and when xbitpos == padding_correction
19caa751
RK
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2185 || xbitpos == padding_correction)
b47f8cfc
JH
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2187 GET_MODE (srcreg));
19caa751
RK
2188
2189 /* We need a new destination operand each time bitpos is on
2190 a word boundary. */
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2193
19caa751
RK
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
04050c69
RK
2200 BITS_PER_WORD),
2201 BITS_PER_WORD);
19caa751
RK
2202 }
2203
2204 return tgtblk;
c36fce9a
GRK
2205}
2206
94b25f81
RK
2207/* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2209
2210void
502b8322 2211use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2212{
0304dfbb
DE
2213 if (GET_CODE (reg) != REG
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2215 abort ();
b3f8cf4a
RK
2216
2217 *call_fusage
38a448ca
RH
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2220}
2221
94b25f81
RK
2222/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2224
2225void
502b8322 2226use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2227{
0304dfbb 2228 int i;
bbf6f052 2229
0304dfbb
DE
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2231 abort ();
2232
2233 for (i = 0; i < nregs; i++)
e50126e8 2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2235}
fffa9c1d
JW
2236
2237/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240
2241void
502b8322 2242use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2243{
2244 int i;
2245
6bd35f86
DE
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2247 {
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2249
6bd35f86
DE
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
e9a25f70 2253 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2254 use_reg (call_fusage, reg);
2255 }
fffa9c1d 2256}
bbf6f052 2257\f
57814e5e 2258
cf5124f6
RS
2259/* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2264
57814e5e 2265int
502b8322
AJ
2266can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
57814e5e 2269{
98166639 2270 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2274 int reverse;
2275 rtx cst;
2276
2c430630
RS
2277 if (len == 0)
2278 return 1;
2279
4977bab6 2280 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2281 return 0;
2282
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2286
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2289
2290 for (reverse = 0;
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2292 reverse++)
2293 {
2294 l = len;
2295 mode = VOIDmode;
cf5124f6 2296 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2297 while (max_size > 1)
2298 {
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2302 mode = tmode;
2303
2304 if (mode == VOIDmode)
2305 break;
2306
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2310 {
2311 unsigned int size = GET_MODE_SIZE (mode);
2312
2313 while (l >= size)
2314 {
2315 if (reverse)
2316 offset -= size;
2317
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2320 return 0;
2321
2322 if (!reverse)
2323 offset += size;
2324
2325 l -= size;
2326 }
2327 }
2328
2329 max_size = GET_MODE_SIZE (mode);
2330 }
2331
2332 /* The code above should have handled everything. */
2333 if (l != 0)
2334 abort ();
2335 }
2336
2337 return 1;
2338}
2339
2340/* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2346 stpcpy. */
57814e5e 2347
8fd3cf4e 2348rtx
502b8322
AJ
2349store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2352{
2353 struct store_by_pieces data;
2354
2c430630
RS
2355 if (len == 0)
2356 {
2357 if (endp == 2)
2358 abort ();
2359 return to;
2360 }
2361
4977bab6 2362 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2363 abort ();
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2367 data.len = len;
2368 data.to = to;
2369 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2370 if (endp)
2371 {
2372 rtx to1;
2373
2374 if (data.reverse)
2375 abort ();
2376 if (data.autinc_to)
2377 {
2378 if (endp == 2)
2379 {
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2382 else
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2384 -1));
2385 }
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2387 data.offset);
2388 }
2389 else
2390 {
2391 if (endp == 2)
2392 --data.offset;
2393 to1 = adjust_address (data.to, QImode, data.offset);
2394 }
2395 return to1;
2396 }
2397 else
2398 return data.to;
57814e5e
JJ
2399}
2400
19caa751
RK
2401/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2404
2405static void
342e2b74 2406clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2407{
57814e5e
JJ
2408 struct store_by_pieces data;
2409
2c430630
RS
2410 if (len == 0)
2411 return;
2412
57814e5e 2413 data.constfun = clear_by_pieces_1;
df4ae160 2414 data.constfundata = NULL;
57814e5e
JJ
2415 data.len = len;
2416 data.to = to;
2417 store_by_pieces_1 (&data, align);
2418}
2419
2420/* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2422
2423static rtx
502b8322
AJ
2424clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2427{
2428 return const0_rtx;
2429}
2430
2431/* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2435
2436static void
502b8322
AJ
2437store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2439{
2440 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
9de08200 2444
57814e5e
JJ
2445 data->offset = 0;
2446 data->to_addr = to_addr;
2447 data->autinc_to
9de08200
RK
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2450
57814e5e
JJ
2451 data->explicit_inc_to = 0;
2452 data->reverse
9de08200 2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2454 if (data->reverse)
2455 data->offset = data->len;
9de08200 2456
57814e5e 2457 /* If storing requires more than two move insns,
9de08200
RK
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
57814e5e
JJ
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2462 {
3a94c984 2463 /* Determine the main mode we'll be using. */
fbe1758d
AM
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2467 mode = tmode;
2468
57814e5e 2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2470 {
57814e5e
JJ
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
9de08200 2474 }
3bdf5ad1 2475
57814e5e
JJ
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
9de08200 2478 {
57814e5e
JJ
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
9de08200 2482 }
3bdf5ad1 2483
57814e5e
JJ
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2486 }
2487
e1565e65 2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2490 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2491
57814e5e 2492 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2493 successively smaller modes. */
2494
2495 while (max_size > 1)
2496 {
9de08200
RK
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2500 mode = tmode;
2501
2502 if (mode == VOIDmode)
2503 break;
2504
2505 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2508
2509 max_size = GET_MODE_SIZE (mode);
2510 }
2511
2512 /* The code above should have handled everything. */
57814e5e 2513 if (data->len != 0)
9de08200
RK
2514 abort ();
2515}
2516
57814e5e 2517/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2520
2521static void
502b8322
AJ
2522store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
9de08200 2524{
3bdf5ad1 2525 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2526 rtx to1, cst;
9de08200
RK
2527
2528 while (data->len >= size)
2529 {
3bdf5ad1
RK
2530 if (data->reverse)
2531 data->offset -= size;
9de08200 2532
3bdf5ad1 2533 if (data->autinc_to)
630036c6
JJ
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 data->offset);
3a94c984 2536 else
f4ef873c 2537 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2538
940da324 2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2542
57814e5e
JJ
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2545
940da324 2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2548
3bdf5ad1
RK
2549 if (! data->reverse)
2550 data->offset += size;
9de08200
RK
2551
2552 data->len -= size;
2553 }
2554}
2555\f
19caa751 2556/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2557 its length in bytes. */
e9a25f70
JL
2558
2559rtx
502b8322 2560clear_storage (rtx object, rtx size)
bbf6f052 2561{
e9a25f70 2562 rtx retval = 0;
8ac61af7
RK
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2565
fcf1b822
RK
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2568 if (GET_MODE (object) != BLKmode
fcf1b822 2569 && GET_CODE (size) == CONST_INT
4ca79136 2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 else
bbf6f052 2573 {
9de08200
RK
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2576
6972c506 2577 if (size == const0_rtx)
2c430630
RS
2578 ;
2579 else if (GET_CODE (size) == CONST_INT
78762e3b 2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2581 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2582 else if (clear_storage_via_clrstr (object, size, align))
2583 ;
9de08200 2584 else
4ca79136
RH
2585 retval = clear_storage_via_libcall (object, size);
2586 }
2587
2588 return retval;
2589}
2590
2591/* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2593
2594static bool
502b8322 2595clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
4ca79136
RH
2596{
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2600
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2603
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2606 {
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2609
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
9de08200 2623 {
4ca79136
RH
2624 rtx op1;
2625 rtx last = get_last_insn ();
2626 rtx pat;
9de08200 2627
4ca79136
RH
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
9de08200 2632
4ca79136
RH
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2634 if (pat)
9de08200 2635 {
4ca79136
RH
2636 emit_insn (pat);
2637 return true;
2638 }
2639 else
2640 delete_insns_since (last);
2641 }
2642 }
9de08200 2643
4ca79136
RH
2644 return false;
2645}
9de08200 2646
4ca79136
RH
2647/* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
9de08200 2649
4ca79136 2650static rtx
502b8322 2651clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2652{
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2655 rtx retval;
9de08200 2656
4ca79136 2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2658
4ca79136
RH
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
52cf7115 2662
4ca79136
RH
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
52cf7115 2666
4ca79136
RH
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2670 emit_queue.
52cf7115 2671
4ca79136
RH
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2677
4ca79136 2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2679
4ca79136
RH
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2682 else
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
52cf7115 2686
4ca79136
RH
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
4bc973ae 2692
4ca79136 2693 For convenience, we generate the call to bzero this way as well. */
4bc973ae 2694
4ca79136
RH
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2698 else
2699 size_tree = make_tree (unsigned_type_node, size);
2700
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2706
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2711
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2713
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2719
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2721}
2722
2723/* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2726
2727static GTY(()) tree block_clear_fn;
66c60e67 2728
9661b15f 2729void
502b8322 2730init_block_clear_fn (const char *asmspec)
4ca79136 2731{
9661b15f 2732 if (!block_clear_fn)
4ca79136 2733 {
9661b15f
JJ
2734 tree fn, args;
2735
4ca79136
RH
2736 if (TARGET_MEM_FUNCTIONS)
2737 {
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2741 NULL_TREE);
2742 }
2743 else
2744 {
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
9de08200 2748 }
4ca79136
RH
2749
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2755
2756 block_clear_fn = fn;
bbf6f052 2757 }
e9a25f70 2758
9661b15f
JJ
2759 if (asmspec)
2760 {
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2763 }
2764}
2765
2766static tree
502b8322 2767clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2768{
2769 static bool emitted_extern;
2770
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2773
4ca79136
RH
2774 if (for_call && !emitted_extern)
2775 {
2776 emitted_extern = true;
9661b15f
JJ
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
4ca79136 2779 }
bbf6f052 2780
9661b15f 2781 return block_clear_fn;
4ca79136
RH
2782}
2783\f
bbf6f052
RK
2784/* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2788
2789 Return the last instruction emitted. */
2790
2791rtx
502b8322 2792emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2793{
2794 enum machine_mode mode = GET_MODE (x);
de1b33dd 2795 rtx y_cst = NULL_RTX;
0c19a26f 2796 rtx last_insn, set;
bbf6f052
RK
2797
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2800
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2802 abort ();
2803
ee5332b8
RH
2804 /* Never force constant_p_rtx to memory. */
2805 if (GET_CODE (y) == CONSTANT_P_RTX)
2806 ;
51286de6 2807 else if (CONSTANT_P (y))
de1b33dd 2808 {
51286de6 2809 if (optimize
075fc17a 2810 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2811 && (last_insn = compress_float_constant (x, y)))
2812 return last_insn;
2813
0c19a26f
RS
2814 y_cst = y;
2815
51286de6
RH
2816 if (!LEGITIMATE_CONSTANT_P (y))
2817 {
51286de6 2818 y = force_const_mem (mode, y);
3a04ff64
RH
2819
2820 /* If the target's cannot_force_const_mem prevented the spill,
2821 assume that the target's move expanders will also take care
2822 of the non-legitimate constant. */
2823 if (!y)
2824 y = y_cst;
51286de6 2825 }
de1b33dd 2826 }
bbf6f052
RK
2827
2828 /* If X or Y are memory references, verify that their addresses are valid
2829 for the machine. */
2830 if (GET_CODE (x) == MEM
2831 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2832 && ! push_operand (x, GET_MODE (x)))
2833 || (flag_force_addr
2834 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2835 x = validize_mem (x);
bbf6f052
RK
2836
2837 if (GET_CODE (y) == MEM
2838 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2839 || (flag_force_addr
2840 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2841 y = validize_mem (y);
bbf6f052
RK
2842
2843 if (mode == BLKmode)
2844 abort ();
2845
de1b33dd
AO
2846 last_insn = emit_move_insn_1 (x, y);
2847
0c19a26f
RS
2848 if (y_cst && GET_CODE (x) == REG
2849 && (set = single_set (last_insn)) != NULL_RTX
2850 && SET_DEST (set) == x
2851 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2852 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2853
2854 return last_insn;
261c4230
RS
2855}
2856
2857/* Low level part of emit_move_insn.
2858 Called just like emit_move_insn, but assumes X and Y
2859 are basically valid. */
2860
2861rtx
502b8322 2862emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2863{
2864 enum machine_mode mode = GET_MODE (x);
2865 enum machine_mode submode;
2866 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2867
dbbbbf3b 2868 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2869 abort ();
76bbe028 2870
bbf6f052
RK
2871 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2872 return
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2874
89742723 2875 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2876 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2877 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2878 && (mov_optab->handlers[(int) submode].insn_code
2879 != CODE_FOR_nothing))
2880 {
2881 /* Don't split destination if it is a stack push. */
2882 int stack = push_operand (x, GET_MODE (x));
7308a047 2883
79ce92d7 2884#ifdef PUSH_ROUNDING
0e9cbd11
KH
2885 /* In case we output to the stack, but the size is smaller than the
2886 machine can push exactly, we need to use move instructions. */
1a06f5fe 2887 if (stack
bb93b973
RK
2888 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2889 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2890 {
2891 rtx temp;
bb93b973 2892 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2893
2894 /* Do not use anti_adjust_stack, since we don't want to update
2895 stack_pointer_delta. */
2896 temp = expand_binop (Pmode,
2897#ifdef STACK_GROWS_DOWNWARD
2898 sub_optab,
2899#else
2900 add_optab,
2901#endif
2902 stack_pointer_rtx,
2903 GEN_INT
bb93b973
RK
2904 (PUSH_ROUNDING
2905 (GET_MODE_SIZE (GET_MODE (x)))),
2906 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2907
1a06f5fe
JH
2908 if (temp != stack_pointer_rtx)
2909 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2910
1a06f5fe
JH
2911#ifdef STACK_GROWS_DOWNWARD
2912 offset1 = 0;
2913 offset2 = GET_MODE_SIZE (submode);
2914#else
2915 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2916 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2917 + GET_MODE_SIZE (submode));
2918#endif
bb93b973 2919
1a06f5fe
JH
2920 emit_move_insn (change_address (x, submode,
2921 gen_rtx_PLUS (Pmode,
2922 stack_pointer_rtx,
2923 GEN_INT (offset1))),
2924 gen_realpart (submode, y));
2925 emit_move_insn (change_address (x, submode,
2926 gen_rtx_PLUS (Pmode,
2927 stack_pointer_rtx,
2928 GEN_INT (offset2))),
2929 gen_imagpart (submode, y));
2930 }
e9c0bd54 2931 else
79ce92d7 2932#endif
7308a047
RS
2933 /* If this is a stack, push the highpart first, so it
2934 will be in the argument order.
2935
2936 In that case, change_address is used only to convert
2937 the mode, not to change the address. */
e9c0bd54 2938 if (stack)
c937357e 2939 {
e33c0d66
RS
2940 /* Note that the real part always precedes the imag part in memory
2941 regardless of machine's endianness. */
c937357e 2942#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_imagpart (submode, y));
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
c937357e 2947#else
a79b3dc7
RS
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_realpart (submode, y));
2950 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2951 gen_imagpart (submode, y));
c937357e
RS
2952#endif
2953 }
2954 else
2955 {
235ae7be
DM
2956 rtx realpart_x, realpart_y;
2957 rtx imagpart_x, imagpart_y;
2958
405f63da
MM
2959 /* If this is a complex value with each part being smaller than a
2960 word, the usual calling sequence will likely pack the pieces into
2961 a single register. Unfortunately, SUBREG of hard registers only
2962 deals in terms of words, so we have a problem converting input
2963 arguments to the CONCAT of two registers that is used elsewhere
2964 for complex values. If this is before reload, we can copy it into
2965 memory and reload. FIXME, we should see about using extract and
2966 insert on integer registers, but complex short and complex char
2967 variables should be rarely used. */
3a94c984 2968 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2969 && (reload_in_progress | reload_completed) == 0)
2970 {
bb93b973
RK
2971 int packed_dest_p
2972 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2973 int packed_src_p
2974 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2975
2976 if (packed_dest_p || packed_src_p)
2977 {
2978 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2979 ? MODE_FLOAT : MODE_INT);
2980
1da68f56
RK
2981 enum machine_mode reg_mode
2982 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2983
2984 if (reg_mode != BLKmode)
2985 {
2986 rtx mem = assign_stack_temp (reg_mode,
2987 GET_MODE_SIZE (mode), 0);
f4ef873c 2988 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2989
1da68f56
RK
2990 cfun->cannot_inline
2991 = N_("function using short complex types cannot be inline");
405f63da
MM
2992
2993 if (packed_dest_p)
2994 {
2995 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2996
405f63da
MM
2997 emit_move_insn_1 (cmem, y);
2998 return emit_move_insn_1 (sreg, mem);
2999 }
3000 else
3001 {
3002 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3003
405f63da
MM
3004 emit_move_insn_1 (mem, sreg);
3005 return emit_move_insn_1 (x, cmem);
3006 }
3007 }
3008 }
3009 }
3010
235ae7be
DM
3011 realpart_x = gen_realpart (submode, x);
3012 realpart_y = gen_realpart (submode, y);
3013 imagpart_x = gen_imagpart (submode, x);
3014 imagpart_y = gen_imagpart (submode, y);
3015
3016 /* Show the output dies here. This is necessary for SUBREGs
3017 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3018 hard regs shouldn't appear here except as return values.
3019 We never want to emit such a clobber after reload. */
3020 if (x != y
235ae7be
DM
3021 && ! (reload_in_progress || reload_completed)
3022 && (GET_CODE (realpart_x) == SUBREG
3023 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3024 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3025
a79b3dc7
RS
3026 emit_move_insn (realpart_x, realpart_y);
3027 emit_move_insn (imagpart_x, imagpart_y);
c937357e 3028 }
7308a047 3029
7a1ab50a 3030 return get_last_insn ();
7308a047
RS
3031 }
3032
a3600c71
HPN
3033 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3034 find a mode to do it in. If we have a movcc, use it. Otherwise,
3035 find the MODE_INT mode of the same width. */
3036 else if (GET_MODE_CLASS (mode) == MODE_CC
3037 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3038 {
3039 enum insn_code insn_code;
3040 enum machine_mode tmode = VOIDmode;
3041 rtx x1 = x, y1 = y;
3042
3043 if (mode != CCmode
3044 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3045 tmode = CCmode;
3046 else
3047 for (tmode = QImode; tmode != VOIDmode;
3048 tmode = GET_MODE_WIDER_MODE (tmode))
3049 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3050 break;
3051
3052 if (tmode == VOIDmode)
3053 abort ();
3054
3055 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3056 may call change_address which is not appropriate if we were
3057 called when a reload was in progress. We don't have to worry
3058 about changing the address since the size in bytes is supposed to
3059 be the same. Copy the MEM to change the mode and move any
3060 substitutions from the old MEM to the new one. */
3061
3062 if (reload_in_progress)
3063 {
3064 x = gen_lowpart_common (tmode, x1);
3065 if (x == 0 && GET_CODE (x1) == MEM)
3066 {
3067 x = adjust_address_nv (x1, tmode, 0);
3068 copy_replacements (x1, x);
3069 }
3070
3071 y = gen_lowpart_common (tmode, y1);
3072 if (y == 0 && GET_CODE (y1) == MEM)
3073 {
3074 y = adjust_address_nv (y1, tmode, 0);
3075 copy_replacements (y1, y);
3076 }
3077 }
3078 else
3079 {
3080 x = gen_lowpart (tmode, x);
3081 y = gen_lowpart (tmode, y);
3082 }
502b8322 3083
a3600c71
HPN
3084 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3085 return emit_insn (GEN_FCN (insn_code) (x, y));
3086 }
3087
5581fc91
RS
3088 /* Try using a move pattern for the corresponding integer mode. This is
3089 only safe when simplify_subreg can convert MODE constants into integer
3090 constants. At present, it can only do this reliably if the value
3091 fits within a HOST_WIDE_INT. */
3092 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3093 && (submode = int_mode_for_mode (mode)) != BLKmode
3094 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3095 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3096 (simplify_gen_subreg (submode, x, mode, 0),
3097 simplify_gen_subreg (submode, y, mode, 0)));
3098
cffa2189
R
3099 /* This will handle any multi-word or full-word mode that lacks a move_insn
3100 pattern. However, you will get better code if you define such patterns,
bbf6f052 3101 even if they must turn into multiple assembler instructions. */
cffa2189 3102 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3103 {
3104 rtx last_insn = 0;
3ef1eef4 3105 rtx seq, inner;
235ae7be 3106 int need_clobber;
bb93b973 3107 int i;
3a94c984 3108
a98c9f1a
RK
3109#ifdef PUSH_ROUNDING
3110
3111 /* If X is a push on the stack, do the push now and replace
3112 X with a reference to the stack pointer. */
3113 if (push_operand (x, GET_MODE (x)))
3114 {
918a6124
GK
3115 rtx temp;
3116 enum rtx_code code;
0fb7aeda 3117
918a6124
GK
3118 /* Do not use anti_adjust_stack, since we don't want to update
3119 stack_pointer_delta. */
3120 temp = expand_binop (Pmode,
3121#ifdef STACK_GROWS_DOWNWARD
3122 sub_optab,
3123#else
3124 add_optab,
3125#endif
3126 stack_pointer_rtx,
3127 GEN_INT
bb93b973
RK
3128 (PUSH_ROUNDING
3129 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3130 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3131
0fb7aeda
KH
3132 if (temp != stack_pointer_rtx)
3133 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3134
3135 code = GET_CODE (XEXP (x, 0));
bb93b973 3136
918a6124
GK
3137 /* Just hope that small offsets off SP are OK. */
3138 if (code == POST_INC)
0fb7aeda 3139 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3140 GEN_INT (-((HOST_WIDE_INT)
3141 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3142 else if (code == POST_DEC)
0fb7aeda 3143 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3144 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3145 else
3146 temp = stack_pointer_rtx;
3147
3148 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3149 }
3150#endif
3a94c984 3151
3ef1eef4
RK
3152 /* If we are in reload, see if either operand is a MEM whose address
3153 is scheduled for replacement. */
3154 if (reload_in_progress && GET_CODE (x) == MEM
3155 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3156 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3157 if (reload_in_progress && GET_CODE (y) == MEM
3158 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3159 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3160
235ae7be 3161 start_sequence ();
15a7a8ec 3162
235ae7be 3163 need_clobber = 0;
bbf6f052 3164 for (i = 0;
3a94c984 3165 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3166 i++)
3167 {
3168 rtx xpart = operand_subword (x, i, 1, mode);
3169 rtx ypart = operand_subword (y, i, 1, mode);
3170
3171 /* If we can't get a part of Y, put Y into memory if it is a
3172 constant. Otherwise, force it into a register. If we still
3173 can't get a part of Y, abort. */
3174 if (ypart == 0 && CONSTANT_P (y))
3175 {
3176 y = force_const_mem (mode, y);
3177 ypart = operand_subword (y, i, 1, mode);
3178 }
3179 else if (ypart == 0)
3180 ypart = operand_subword_force (y, i, mode);
3181
3182 if (xpart == 0 || ypart == 0)
3183 abort ();
3184
235ae7be
DM
3185 need_clobber |= (GET_CODE (xpart) == SUBREG);
3186
bbf6f052
RK
3187 last_insn = emit_move_insn (xpart, ypart);
3188 }
6551fa4d 3189
2f937369 3190 seq = get_insns ();
235ae7be
DM
3191 end_sequence ();
3192
3193 /* Show the output dies here. This is necessary for SUBREGs
3194 of pseudos since we cannot track their lifetimes correctly;
3195 hard regs shouldn't appear here except as return values.
3196 We never want to emit such a clobber after reload. */
3197 if (x != y
3198 && ! (reload_in_progress || reload_completed)
3199 && need_clobber != 0)
bb93b973 3200 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3201
3202 emit_insn (seq);
3203
bbf6f052
RK
3204 return last_insn;
3205 }
3206 else
3207 abort ();
3208}
51286de6
RH
3209
3210/* If Y is representable exactly in a narrower mode, and the target can
3211 perform the extension directly from constant or memory, then emit the
3212 move as an extension. */
3213
3214static rtx
502b8322 3215compress_float_constant (rtx x, rtx y)
51286de6
RH
3216{
3217 enum machine_mode dstmode = GET_MODE (x);
3218 enum machine_mode orig_srcmode = GET_MODE (y);
3219 enum machine_mode srcmode;
3220 REAL_VALUE_TYPE r;
3221
3222 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3223
3224 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3225 srcmode != orig_srcmode;
3226 srcmode = GET_MODE_WIDER_MODE (srcmode))
3227 {
3228 enum insn_code ic;
3229 rtx trunc_y, last_insn;
3230
3231 /* Skip if the target can't extend this way. */
3232 ic = can_extend_p (dstmode, srcmode, 0);
3233 if (ic == CODE_FOR_nothing)
3234 continue;
3235
3236 /* Skip if the narrowed value isn't exact. */
3237 if (! exact_real_truncate (srcmode, &r))
3238 continue;
3239
3240 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3241
3242 if (LEGITIMATE_CONSTANT_P (trunc_y))
3243 {
3244 /* Skip if the target needs extra instructions to perform
3245 the extension. */
3246 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3247 continue;
3248 }
3249 else if (float_extend_from_mem[dstmode][srcmode])
3250 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3251 else
3252 continue;
3253
3254 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3255 last_insn = get_last_insn ();
3256
3257 if (GET_CODE (x) == REG)
0c19a26f 3258 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3259
3260 return last_insn;
3261 }
3262
3263 return NULL_RTX;
3264}
bbf6f052
RK
3265\f
3266/* Pushing data onto the stack. */
3267
3268/* Push a block of length SIZE (perhaps variable)
3269 and return an rtx to address the beginning of the block.
3270 Note that it is not possible for the value returned to be a QUEUED.
3271 The value may be virtual_outgoing_args_rtx.
3272
3273 EXTRA is the number of bytes of padding to push in addition to SIZE.
3274 BELOW nonzero means this padding comes at low addresses;
3275 otherwise, the padding comes at high addresses. */
3276
3277rtx
502b8322 3278push_block (rtx size, int extra, int below)
bbf6f052 3279{
b3694847 3280 rtx temp;
88f63c77
RK
3281
3282 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3283 if (CONSTANT_P (size))
3284 anti_adjust_stack (plus_constant (size, extra));
3285 else if (GET_CODE (size) == REG && extra == 0)
3286 anti_adjust_stack (size);
3287 else
3288 {
ce48579b 3289 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3290 if (extra != 0)
906c4e36 3291 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3292 temp, 0, OPTAB_LIB_WIDEN);
3293 anti_adjust_stack (temp);
3294 }
3295
f73ad30e 3296#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3297 if (0)
f73ad30e
JH
3298#else
3299 if (1)
bbf6f052 3300#endif
f73ad30e 3301 {
f73ad30e
JH
3302 temp = virtual_outgoing_args_rtx;
3303 if (extra != 0 && below)
3304 temp = plus_constant (temp, extra);
3305 }
3306 else
3307 {
3308 if (GET_CODE (size) == CONST_INT)
3309 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3310 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3311 else if (extra != 0 && !below)
3312 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3313 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3314 else
3315 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3316 negate_rtx (Pmode, size));
3317 }
bbf6f052
RK
3318
3319 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3320}
3321
21d93687
RK
3322#ifdef PUSH_ROUNDING
3323
566aa174 3324/* Emit single push insn. */
21d93687 3325
566aa174 3326static void
502b8322 3327emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3328{
566aa174 3329 rtx dest_addr;
918a6124 3330 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3331 rtx dest;
371b8fc0
JH
3332 enum insn_code icode;
3333 insn_operand_predicate_fn pred;
566aa174 3334
371b8fc0
JH
3335 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3336 /* If there is push pattern, use it. Otherwise try old way of throwing
3337 MEM representing push operation to move expander. */
3338 icode = push_optab->handlers[(int) mode].insn_code;
3339 if (icode != CODE_FOR_nothing)
3340 {
3341 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3342 && !((*pred) (x, mode))))
371b8fc0
JH
3343 x = force_reg (mode, x);
3344 emit_insn (GEN_FCN (icode) (x));
3345 return;
3346 }
566aa174
JH
3347 if (GET_MODE_SIZE (mode) == rounded_size)
3348 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3349 /* If we are to pad downward, adjust the stack pointer first and
3350 then store X into the stack location using an offset. This is
3351 because emit_move_insn does not know how to pad; it does not have
3352 access to type. */
3353 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3354 {
3355 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3356 HOST_WIDE_INT offset;
3357
3358 emit_move_insn (stack_pointer_rtx,
3359 expand_binop (Pmode,
3360#ifdef STACK_GROWS_DOWNWARD
3361 sub_optab,
3362#else
3363 add_optab,
3364#endif
3365 stack_pointer_rtx,
3366 GEN_INT (rounded_size),
3367 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3368
3369 offset = (HOST_WIDE_INT) padding_size;
3370#ifdef STACK_GROWS_DOWNWARD
3371 if (STACK_PUSH_CODE == POST_DEC)
3372 /* We have already decremented the stack pointer, so get the
3373 previous value. */
3374 offset += (HOST_WIDE_INT) rounded_size;
3375#else
3376 if (STACK_PUSH_CODE == POST_INC)
3377 /* We have already incremented the stack pointer, so get the
3378 previous value. */
3379 offset -= (HOST_WIDE_INT) rounded_size;
3380#endif
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3382 }
566aa174
JH
3383 else
3384 {
3385#ifdef STACK_GROWS_DOWNWARD
329d586f 3386 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3387 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3388 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3389#else
329d586f 3390 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3391 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3392 GEN_INT (rounded_size));
3393#endif
3394 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3395 }
3396
3397 dest = gen_rtx_MEM (mode, dest_addr);
3398
566aa174
JH
3399 if (type != 0)
3400 {
3401 set_mem_attributes (dest, type, 1);
c3d32120
RK
3402
3403 if (flag_optimize_sibling_calls)
3404 /* Function incoming arguments may overlap with sibling call
3405 outgoing arguments and we cannot allow reordering of reads
3406 from function arguments with stores to outgoing arguments
3407 of sibling calls. */
3408 set_mem_alias_set (dest, 0);
566aa174
JH
3409 }
3410 emit_move_insn (dest, x);
566aa174 3411}
21d93687 3412#endif
566aa174 3413
bbf6f052
RK
3414/* Generate code to push X onto the stack, assuming it has mode MODE and
3415 type TYPE.
3416 MODE is redundant except when X is a CONST_INT (since they don't
3417 carry mode info).
3418 SIZE is an rtx for the size of data to be copied (in bytes),
3419 needed only if X is BLKmode.
3420
f1eaaf73 3421 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3422
cd048831
RK
3423 If PARTIAL and REG are both nonzero, then copy that many of the first
3424 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3425 The amount of space pushed is decreased by PARTIAL words,
3426 rounded *down* to a multiple of PARM_BOUNDARY.
3427 REG must be a hard register in this case.
cd048831
RK
3428 If REG is zero but PARTIAL is not, take any all others actions for an
3429 argument partially in registers, but do not actually load any
3430 registers.
bbf6f052
RK
3431
3432 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3433 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3434
3435 On a machine that lacks real push insns, ARGS_ADDR is the address of
3436 the bottom of the argument block for this call. We use indexing off there
3437 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3438 argument block has not been preallocated.
3439
e5e809f4
JL
3440 ARGS_SO_FAR is the size of args previously pushed for this call.
3441
3442 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3443 for arguments passed in registers. If nonzero, it will be the number
3444 of bytes required. */
bbf6f052
RK
3445
3446void
502b8322
AJ
3447emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3448 unsigned int align, int partial, rtx reg, int extra,
3449 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3450 rtx alignment_pad)
bbf6f052
RK
3451{
3452 rtx xinner;
3453 enum direction stack_direction
3454#ifdef STACK_GROWS_DOWNWARD
3455 = downward;
3456#else
3457 = upward;
3458#endif
3459
3460 /* Decide where to pad the argument: `downward' for below,
3461 `upward' for above, or `none' for don't pad it.
3462 Default is below for small data on big-endian machines; else above. */
3463 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3464
0fb7aeda 3465 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3466 FIXME: why? */
3467 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3468 if (where_pad != none)
3469 where_pad = (where_pad == downward ? upward : downward);
3470
3471 xinner = x = protect_from_queue (x, 0);
3472
3473 if (mode == BLKmode)
3474 {
3475 /* Copy a block into the stack, entirely or partially. */
3476
b3694847 3477 rtx temp;
bbf6f052 3478 int used = partial * UNITS_PER_WORD;
531547e9 3479 int offset;
bbf6f052 3480 int skip;
3a94c984 3481
531547e9
FJ
3482 if (reg && GET_CODE (reg) == PARALLEL)
3483 {
3484 /* Use the size of the elt to compute offset. */
3485 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3486 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3487 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3488 }
3489 else
3490 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3491
bbf6f052
RK
3492 if (size == 0)
3493 abort ();
3494
3495 used -= offset;
3496
3497 /* USED is now the # of bytes we need not copy to the stack
3498 because registers will take care of them. */
3499
3500 if (partial != 0)
f4ef873c 3501 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3502
3503 /* If the partial register-part of the arg counts in its stack size,
3504 skip the part of stack space corresponding to the registers.
3505 Otherwise, start copying to the beginning of the stack space,
3506 by setting SKIP to 0. */
e5e809f4 3507 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3508
3509#ifdef PUSH_ROUNDING
3510 /* Do it with several push insns if that doesn't take lots of insns
3511 and if there is no difficulty with push insns that skip bytes
3512 on the stack for alignment purposes. */
3513 if (args_addr == 0
f73ad30e 3514 && PUSH_ARGS
bbf6f052
RK
3515 && GET_CODE (size) == CONST_INT
3516 && skip == 0
f26aca6d 3517 && MEM_ALIGN (xinner) >= align
15914757 3518 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3519 /* Here we avoid the case of a structure whose weak alignment
3520 forces many pushes of a small amount of data,
3521 and such small pushes do rounding that causes trouble. */
e1565e65 3522 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3523 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3524 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3525 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3526 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3527 {
3528 /* Push padding now if padding above and stack grows down,
3529 or if padding below and stack grows up.
3530 But if space already allocated, this has already been done. */
3531 if (extra && args_addr == 0
3532 && where_pad != none && where_pad != stack_direction)
906c4e36 3533 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3534
8fd3cf4e 3535 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3536 }
3537 else
3a94c984 3538#endif /* PUSH_ROUNDING */
bbf6f052 3539 {
7ab923cc
JJ
3540 rtx target;
3541
bbf6f052
RK
3542 /* Otherwise make space on the stack and copy the data
3543 to the address of that space. */
3544
3545 /* Deduct words put into registers from the size we must copy. */
3546 if (partial != 0)
3547 {
3548 if (GET_CODE (size) == CONST_INT)
906c4e36 3549 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3550 else
3551 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3552 GEN_INT (used), NULL_RTX, 0,
3553 OPTAB_LIB_WIDEN);
bbf6f052
RK
3554 }
3555
3556 /* Get the address of the stack space.
3557 In this case, we do not deal with EXTRA separately.
3558 A single stack adjust will do. */
3559 if (! args_addr)
3560 {
3561 temp = push_block (size, extra, where_pad == downward);
3562 extra = 0;
3563 }
3564 else if (GET_CODE (args_so_far) == CONST_INT)
3565 temp = memory_address (BLKmode,
3566 plus_constant (args_addr,
3567 skip + INTVAL (args_so_far)));
3568 else
3569 temp = memory_address (BLKmode,
38a448ca
RH
3570 plus_constant (gen_rtx_PLUS (Pmode,
3571 args_addr,
3572 args_so_far),
bbf6f052 3573 skip));
4ca79136
RH
3574
3575 if (!ACCUMULATE_OUTGOING_ARGS)
3576 {
3577 /* If the source is referenced relative to the stack pointer,
3578 copy it to another register to stabilize it. We do not need
3579 to do this if we know that we won't be changing sp. */
3580
3581 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3582 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3583 temp = copy_to_reg (temp);
3584 }
3585
3a94c984 3586 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3587
3a94c984
KH
3588 if (type != 0)
3589 {
3590 set_mem_attributes (target, type, 1);
3591 /* Function incoming arguments may overlap with sibling call
3592 outgoing arguments and we cannot allow reordering of reads
3593 from function arguments with stores to outgoing arguments
3594 of sibling calls. */
ba4828e0 3595 set_mem_alias_set (target, 0);
3a94c984 3596 }
4ca79136 3597
44bb111a
RH
3598 /* ALIGN may well be better aligned than TYPE, e.g. due to
3599 PARM_BOUNDARY. Assume the caller isn't lying. */
3600 set_mem_align (target, align);
4ca79136 3601
44bb111a 3602 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3603 }
3604 }
3605 else if (partial > 0)
3606 {
3607 /* Scalar partly in registers. */
3608
3609 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3610 int i;
3611 int not_stack;
3612 /* # words of start of argument
3613 that we must make space for but need not store. */
3614 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3615 int args_offset = INTVAL (args_so_far);
3616 int skip;
3617
3618 /* Push padding now if padding above and stack grows down,
3619 or if padding below and stack grows up.
3620 But if space already allocated, this has already been done. */
3621 if (extra && args_addr == 0
3622 && where_pad != none && where_pad != stack_direction)
906c4e36 3623 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3624
3625 /* If we make space by pushing it, we might as well push
3626 the real data. Otherwise, we can leave OFFSET nonzero
3627 and leave the space uninitialized. */
3628 if (args_addr == 0)
3629 offset = 0;
3630
3631 /* Now NOT_STACK gets the number of words that we don't need to
3632 allocate on the stack. */
3633 not_stack = partial - offset;
3634
3635 /* If the partial register-part of the arg counts in its stack size,
3636 skip the part of stack space corresponding to the registers.
3637 Otherwise, start copying to the beginning of the stack space,
3638 by setting SKIP to 0. */
e5e809f4 3639 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3640
3641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3642 x = validize_mem (force_const_mem (mode, x));
3643
3644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3645 SUBREGs of such registers are not allowed. */
3646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3648 x = copy_to_reg (x);
3649
3650 /* Loop over all the words allocated on the stack for this arg. */
3651 /* We can do it by words, because any scalar bigger than a word
3652 has a size a multiple of a word. */
3653#ifndef PUSH_ARGS_REVERSED
3654 for (i = not_stack; i < size; i++)
3655#else
3656 for (i = size - 1; i >= not_stack; i--)
3657#endif
3658 if (i >= not_stack + offset)
3659 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3661 0, args_addr,
3662 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3663 * UNITS_PER_WORD)),
4fc026cd 3664 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3665 }
3666 else
3667 {
3668 rtx addr;
3bdf5ad1 3669 rtx dest;
bbf6f052
RK
3670
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
906c4e36 3676 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3677
3678#ifdef PUSH_ROUNDING
f73ad30e 3679 if (args_addr == 0 && PUSH_ARGS)
566aa174 3680 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3681 else
3682#endif
921b3427
RK
3683 {
3684 if (GET_CODE (args_so_far) == CONST_INT)
3685 addr
3686 = memory_address (mode,
3a94c984 3687 plus_constant (args_addr,
921b3427 3688 INTVAL (args_so_far)));
3a94c984 3689 else
38a448ca
RH
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3691 args_so_far));
566aa174
JH
3692 dest = gen_rtx_MEM (mode, addr);
3693 if (type != 0)
3694 {
3695 set_mem_attributes (dest, type, 1);
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
ba4828e0 3700 set_mem_alias_set (dest, 0);
566aa174 3701 }
bbf6f052 3702
566aa174 3703 emit_move_insn (dest, x);
566aa174 3704 }
bbf6f052
RK
3705 }
3706
bbf6f052
RK
3707 /* If part should go in registers, copy that part
3708 into the appropriate registers. Do this now, at the end,
3709 since mem-to-mem copies above may do function calls. */
cd048831 3710 if (partial > 0 && reg != 0)
fffa9c1d
JW
3711 {
3712 /* Handle calls that pass values in multiple non-contiguous locations.
3713 The Irix 6 ABI has examples of this. */
3714 if (GET_CODE (reg) == PARALLEL)
6e985040 3715 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3716 else
3717 move_block_to_reg (REGNO (reg), x, partial, mode);
3718 }
bbf6f052
RK
3719
3720 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3721 anti_adjust_stack (GEN_INT (extra));
3a94c984 3722
3ea2292a 3723 if (alignment_pad && args_addr == 0)
4fc026cd 3724 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3725}
3726\f
296b4ed9
RK
3727/* Return X if X can be used as a subtarget in a sequence of arithmetic
3728 operations. */
3729
3730static rtx
502b8322 3731get_subtarget (rtx x)
296b4ed9
RK
3732{
3733 return ((x == 0
3734 /* Only registers can be subtargets. */
3735 || GET_CODE (x) != REG
3736 /* If the register is readonly, it can't be set more than once. */
3737 || RTX_UNCHANGING_P (x)
3738 /* Don't use hard regs to avoid extending their life. */
3739 || REGNO (x) < FIRST_PSEUDO_REGISTER
3740 /* Avoid subtargets inside loops,
3741 since they hide some invariant expressions. */
3742 || preserve_subexpressions_p ())
3743 ? 0 : x);
3744}
3745
bbf6f052
RK
3746/* Expand an assignment that stores the value of FROM into TO.
3747 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3748 (This may contain a QUEUED rtx;
3749 if the value is constant, this rtx is a constant.)
b90f141a 3750 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3751
3752rtx
b90f141a 3753expand_assignment (tree to, tree from, int want_value)
bbf6f052 3754{
b3694847 3755 rtx to_rtx = 0;
bbf6f052
RK
3756 rtx result;
3757
3758 /* Don't crash if the lhs of the assignment was erroneous. */
3759
3760 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3761 {
3762 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3763 return want_value ? result : NULL_RTX;
3764 }
bbf6f052
RK
3765
3766 /* Assignment of a structure component needs special treatment
3767 if the structure component's rtx is not simply a MEM.
6be58303
JW
3768 Assignment of an array element at a constant index, and assignment of
3769 an array element in an unaligned packed structure field, has the same
3770 problem. */
bbf6f052 3771
08293add 3772 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3773 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3774 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3775 {
3776 enum machine_mode mode1;
770ae6cc 3777 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3778 rtx orig_to_rtx;
7bb0943f 3779 tree offset;
bbf6f052
RK
3780 int unsignedp;
3781 int volatilep = 0;
0088fcb1
RK
3782 tree tem;
3783
3784 push_temp_slots ();
839c4796 3785 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3786 &unsignedp, &volatilep);
bbf6f052
RK
3787
3788 /* If we are going to use store_bit_field and extract_bit_field,
3789 make sure to_rtx will be safe for multiple use. */
3790
3791 if (mode1 == VOIDmode && want_value)
3792 tem = stabilize_reference (tem);
3793
1ed1b4fb
RK
3794 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3795
7bb0943f
RS
3796 if (offset != 0)
3797 {
e3c8ea67 3798 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3799
3800 if (GET_CODE (to_rtx) != MEM)
3801 abort ();
bd070e1a 3802
bd070e1a 3803#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3804 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3805 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3806#else
3807 if (GET_MODE (offset_rtx) != ptr_mode)
3808 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3809#endif
bd070e1a 3810
9a7b9f4f
JL
3811 /* A constant address in TO_RTX can have VOIDmode, we must not try
3812 to call force_reg for that case. Avoid that case. */
89752202
HB
3813 if (GET_CODE (to_rtx) == MEM
3814 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3815 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3816 && bitsize > 0
3a94c984 3817 && (bitpos % bitsize) == 0
89752202 3818 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3819 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3820 {
e3c8ea67 3821 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3822 bitpos = 0;
3823 }
3824
0d4903b8 3825 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3826 highest_pow2_factor_for_target (to,
3827 offset));
7bb0943f 3828 }
c5c76735 3829
998d7deb
RH
3830 if (GET_CODE (to_rtx) == MEM)
3831 {
998d7deb
RH
3832 /* If the field is at offset zero, we could have been given the
3833 DECL_RTX of the parent struct. Don't munge it. */
3834 to_rtx = shallow_copy_rtx (to_rtx);
3835
6f1087be 3836 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3837 }
effbcc6a 3838
a06ef755
RK
3839 /* Deal with volatile and readonly fields. The former is only done
3840 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3841 if (volatilep && GET_CODE (to_rtx) == MEM)
3842 {
3843 if (to_rtx == orig_to_rtx)
3844 to_rtx = copy_rtx (to_rtx);
3845 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3846 }
3847
956d6950 3848 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3849 && TREE_READONLY (TREE_OPERAND (to, 1))
3850 /* We can't assert that a MEM won't be set more than once
3851 if the component is not addressable because another
3852 non-addressable component may be referenced by the same MEM. */
3853 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
956d6950 3854 {
a06ef755 3855 if (to_rtx == orig_to_rtx)
956d6950 3856 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3857 RTX_UNCHANGING_P (to_rtx) = 1;
3858 }
3859
a84b4898 3860 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
3861 {
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3865 }
3866
a06ef755
RK
3867 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3868 (want_value
3869 /* Spurious cast for HPUX compiler. */
3870 ? ((enum machine_mode)
3871 TYPE_MODE (TREE_TYPE (to)))
3872 : VOIDmode),
3873 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3874
a06ef755
RK
3875 preserve_temp_slots (result);
3876 free_temp_slots ();
3877 pop_temp_slots ();
a69beca1 3878
a06ef755
RK
3879 /* If the value is meaningful, convert RESULT to the proper mode.
3880 Otherwise, return nothing. */
3881 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3882 TYPE_MODE (TREE_TYPE (from)),
3883 result,
3884 TREE_UNSIGNED (TREE_TYPE (to)))
3885 : NULL_RTX);
bbf6f052
RK
3886 }
3887
cd1db108
RS
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3892 requires loading up part of an address in a separate insn.
3893
1858863b
JW
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
61f71b34 3898 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3900 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3901 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3902 {
0088fcb1
RK
3903 rtx value;
3904
3905 push_temp_slots ();
3906 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3907 if (to_rtx == 0)
37a08a29 3908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3909
fffa9c1d
JW
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3913 emit_group_load (to_rtx, value, TREE_TYPE (from),
3914 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3915 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3916 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3917 else
6419e5b0 3918 {
5ae6cd0d 3919 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3920 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3921 emit_move_insn (to_rtx, value);
3922 }
cd1db108
RS
3923 preserve_temp_slots (to_rtx);
3924 free_temp_slots ();
0088fcb1 3925 pop_temp_slots ();
709f5be1 3926 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3927 }
3928
bbf6f052
RK
3929 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3930 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3931
3932 if (to_rtx == 0)
37a08a29 3933 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3934
86d38d25 3935 /* Don't move directly into a return register. */
14a774a9
RK
3936 if (TREE_CODE (to) == RESULT_DECL
3937 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3938 {
0088fcb1
RK
3939 rtx temp;
3940
3941 push_temp_slots ();
3942 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3943
3944 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3945 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3946 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3947 else
3948 emit_move_insn (to_rtx, temp);
3949
86d38d25
RS
3950 preserve_temp_slots (to_rtx);
3951 free_temp_slots ();
0088fcb1 3952 pop_temp_slots ();
709f5be1 3953 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3954 }
3955
bbf6f052
RK
3956 /* In case we are returning the contents of an object which overlaps
3957 the place the value is being stored, use a safe function when copying
3958 a value through a pointer into a structure value return block. */
3959 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3960 && current_function_returns_struct
3961 && !current_function_returns_pcc_struct)
3962 {
0088fcb1
RK
3963 rtx from_rtx, size;
3964
3965 push_temp_slots ();
33a20d10 3966 size = expr_size (from);
37a08a29 3967 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3968
4ca79136
RH
3969 if (TARGET_MEM_FUNCTIONS)
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TREE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3976 else
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size,
3982 TREE_UNSIGNED (integer_type_node)),
3983 TYPE_MODE (integer_type_node));
bbf6f052
RK
3984
3985 preserve_temp_slots (to_rtx);
3986 free_temp_slots ();
0088fcb1 3987 pop_temp_slots ();
709f5be1 3988 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3989 }
3990
3991 /* Compute FROM and store the value in the rtx we got. */
3992
0088fcb1 3993 push_temp_slots ();
bbf6f052
RK
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3996 free_temp_slots ();
0088fcb1 3997 pop_temp_slots ();
709f5be1 3998 return want_value ? result : NULL_RTX;
bbf6f052
RK
3999}
4000
4001/* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
bbf6f052
RK
4003 TARGET may contain a QUEUED rtx.
4004
8403445a 4005 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4012
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4018 be more thorough?
4019
8403445a 4020 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4021 to catch quickly any cases where the caller uses the value
8403445a
AM
4022 and fails to set WANT_VALUE.
4023
4024 If WANT_VALUE & 2 is set, this is a store into a call param on the
4025 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4026
4027rtx
502b8322 4028store_expr (tree exp, rtx target, int want_value)
bbf6f052 4029{
b3694847 4030 rtx temp;
0fab64a3 4031 rtx alt_rtl = NULL_RTX;
1bbd65cd 4032 rtx mark = mark_queue ();
bbf6f052 4033 int dont_return_target = 0;
e5408e52 4034 int dont_store_target = 0;
bbf6f052 4035
847311f4
AL
4036 if (VOID_TYPE_P (TREE_TYPE (exp)))
4037 {
4038 /* C++ can generate ?: expressions with a throw expression in one
4039 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4040 store the throw expression's nonexistent result. */
847311f4
AL
4041 if (want_value)
4042 abort ();
4043 expand_expr (exp, const0_rtx, VOIDmode, 0);
4044 return NULL_RTX;
4045 }
bbf6f052
RK
4046 if (TREE_CODE (exp) == COMPOUND_EXPR)
4047 {
4048 /* Perform first part of compound expression, then assign from second
4049 part. */
8403445a
AM
4050 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4051 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4052 emit_queue ();
709f5be1 4053 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4054 }
4055 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4056 {
4057 /* For conditional expression, get safe form of the target. Then
4058 test the condition, doing the appropriate assignment on either
4059 side. This avoids the creation of unnecessary temporaries.
4060 For non-BLKmode, it is more efficient not to do this. */
4061
4062 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4063
4064 emit_queue ();
4065 target = protect_from_queue (target, 1);
4066
dabf8373 4067 do_pending_stack_adjust ();
bbf6f052
RK
4068 NO_DEFER_POP;
4069 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4070 start_cleanup_deferral ();
8403445a 4071 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4072 end_cleanup_deferral ();
bbf6f052
RK
4073 emit_queue ();
4074 emit_jump_insn (gen_jump (lab2));
4075 emit_barrier ();
4076 emit_label (lab1);
956d6950 4077 start_cleanup_deferral ();
8403445a 4078 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4079 end_cleanup_deferral ();
bbf6f052
RK
4080 emit_queue ();
4081 emit_label (lab2);
4082 OK_DEFER_POP;
a3a58acc 4083
8403445a 4084 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4085 }
bbf6f052 4086 else if (queued_subexp_p (target))
709f5be1
RS
4087 /* If target contains a postincrement, let's not risk
4088 using it as the place to generate the rhs. */
bbf6f052
RK
4089 {
4090 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4091 {
4092 /* Expand EXP into a new pseudo. */
4093 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4094 temp = expand_expr (exp, temp, GET_MODE (target),
4095 (want_value & 2
4096 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4097 }
4098 else
8403445a
AM
4099 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4100 (want_value & 2
4101 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4102
4103 /* If target is volatile, ANSI requires accessing the value
4104 *from* the target, if it is accessed. So make that happen.
4105 In no case return the target itself. */
8403445a 4106 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4107 dont_return_target = 1;
bbf6f052 4108 }
8403445a
AM
4109 else if ((want_value & 1) != 0
4110 && GET_CODE (target) == MEM
4111 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4112 && GET_MODE (target) != BLKmode)
4113 /* If target is in memory and caller wants value in a register instead,
4114 arrange that. Pass TARGET as target for expand_expr so that,
4115 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4116 We know expand_expr will not use the target in that case.
4117 Don't do this if TARGET is volatile because we are supposed
4118 to write it and then read it. */
4119 {
8403445a
AM
4120 temp = expand_expr (exp, target, GET_MODE (target),
4121 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4122 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4123 {
4124 /* If TEMP is already in the desired TARGET, only copy it from
4125 memory and don't store it there again. */
4126 if (temp == target
4127 || (rtx_equal_p (temp, target)
4128 && ! side_effects_p (temp) && ! side_effects_p (target)))
4129 dont_store_target = 1;
4130 temp = copy_to_reg (temp);
4131 }
12f06d17
CH
4132 dont_return_target = 1;
4133 }
1499e0a8 4134 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4135 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4136 than the declared mode, compute the result into its declared mode
4137 and then convert to the wider mode. Our value is the computed
4138 expression. */
4139 {
b76b08ef
RK
4140 rtx inner_target = 0;
4141
5a32d038 4142 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4143 which will often result in some optimizations. Do the conversion
4144 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4145 the extend. But don't do this if the type of EXP is a subtype
4146 of something else since then the conversion might involve
4147 more than just converting modes. */
8403445a
AM
4148 if ((want_value & 1) == 0
4149 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4150 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4151 {
4152 if (TREE_UNSIGNED (TREE_TYPE (exp))
4153 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 4154 exp = convert
ae2bcd98 4155 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4156 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4157
ae2bcd98 4158 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
4159 (GET_MODE (SUBREG_REG (target)),
4160 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4161 exp);
b76b08ef
RK
4162
4163 inner_target = SUBREG_REG (target);
f635a84d 4164 }
3a94c984 4165
8403445a
AM
4166 temp = expand_expr (exp, inner_target, VOIDmode,
4167 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4168
7abec5be 4169 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4170 now so it gets done only once. Strictly speaking, this is
4171 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4172 overlaps TARGET. But not performing the load twice also
4173 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4174 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4175 temp = copy_to_reg (temp);
4176
b258707c
RS
4177 /* If TEMP is a VOIDmode constant, use convert_modes to make
4178 sure that we properly convert it. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4180 {
4181 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4182 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4183 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4184 GET_MODE (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4186 }
b258707c 4187
1499e0a8
RK
4188 convert_move (SUBREG_REG (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4190
4191 /* If we promoted a constant, change the mode back down to match
4192 target. Otherwise, the caller might get confused by a result whose
4193 mode is larger than expected. */
4194
8403445a 4195 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4196 {
b3ca30df
JJ
4197 if (GET_MODE (temp) != VOIDmode)
4198 {
4199 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4200 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4201 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4202 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4203 }
4204 else
4205 temp = convert_modes (GET_MODE (target),
4206 GET_MODE (SUBREG_REG (target)),
4207 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4208 }
4209
8403445a 4210 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4211 }
bbf6f052
RK
4212 else
4213 {
0fab64a3
MM
4214 temp = expand_expr_real (exp, target, GET_MODE (target),
4215 (want_value & 2
4216 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4217 &alt_rtl);
766f36c7 4218 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4219 If TARGET is a volatile mem ref, either return TARGET
4220 or return a reg copied *from* TARGET; ANSI requires this.
4221
4222 Otherwise, if TEMP is not TARGET, return TEMP
4223 if it is constant (for efficiency),
4224 or if we really want the correct value. */
bbf6f052
RK
4225 if (!(target && GET_CODE (target) == REG
4226 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4227 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4228 && ! rtx_equal_p (temp, target)
8403445a 4229 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4230 dont_return_target = 1;
4231 }
4232
b258707c
RS
4233 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4234 the same as that of TARGET, adjust the constant. This is needed, for
4235 example, in case it is a CONST_DOUBLE and we want only a word-sized
4236 value. */
4237 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4238 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4239 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4240 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4241 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4242
bbf6f052 4243 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4244 Convert the value to TARGET's type first if necessary and emit the
4245 pending incrementations that have been queued when expanding EXP.
4246 Note that we cannot emit the whole queue blindly because this will
4247 effectively disable the POST_INC optimization later.
4248
37a08a29 4249 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4250 one or both of them are volatile memory refs, we have to distinguish
4251 two cases:
4252 - expand_expr has used TARGET. In this case, we must not generate
4253 another copy. This can be detected by TARGET being equal according
4254 to == .
4255 - expand_expr has not used TARGET - that means that the source just
4256 happens to have the same RTX form. Since temp will have been created
4257 by expand_expr, it will compare unequal according to == .
4258 We must generate a copy in this case, to reach the correct number
4259 of volatile memory references. */
bbf6f052 4260
6036acbb 4261 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4262 || (temp != target && (side_effects_p (temp)
4263 || side_effects_p (target))))
e5408e52 4264 && TREE_CODE (exp) != ERROR_MARK
a9772b60 4265 && ! dont_store_target
9c5c5f2c
MM
4266 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4267 but TARGET is not valid memory reference, TEMP will differ
4268 from TARGET although it is really the same location. */
0fab64a3 4269 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
e56fc090
HPN
4270 /* If there's nothing to copy, don't bother. Don't call expr_size
4271 unless necessary, because some front-ends (C++) expr_size-hook
4272 aborts on objects that are not supposed to be bit-copied or
4273 bit-initialized. */
4274 && expr_size (exp) != const0_rtx)
bbf6f052 4275 {
1bbd65cd 4276 emit_insns_enqueued_after_mark (mark);
bbf6f052 4277 target = protect_from_queue (target, 1);
e6d55fd7 4278 temp = protect_from_queue (temp, 0);
bbf6f052 4279 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4280 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4281 {
4282 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4283 if (dont_return_target)
4284 {
4285 /* In this case, we will return TEMP,
4286 so make sure it has the proper mode.
4287 But don't forget to store the value into TARGET. */
4288 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4289 emit_move_insn (target, temp);
4290 }
4291 else
4292 convert_move (target, temp, unsignedp);
4293 }
4294
4295 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4296 {
c24ae149
RK
4297 /* Handle copying a string constant into an array. The string
4298 constant may be shorter than the array. So copy just the string's
4299 actual length, and clear the rest. First get the size of the data
4300 type of the string, which is actually the size of the target. */
4301 rtx size = expr_size (exp);
bbf6f052 4302
e87b4f3f
RS
4303 if (GET_CODE (size) == CONST_INT
4304 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4305 emit_block_move (target, temp, size,
4306 (want_value & 2
4307 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4308 else
bbf6f052 4309 {
e87b4f3f
RS
4310 /* Compute the size of the data to copy from the string. */
4311 tree copy_size
c03b7665 4312 = size_binop (MIN_EXPR,
b50d17a1 4313 make_tree (sizetype, size),
fed3cef0 4314 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4315 rtx copy_size_rtx
4316 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4317 (want_value & 2
4318 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4319 rtx label = 0;
4320
4321 /* Copy that much. */
267b28bd
SE
4322 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4323 TREE_UNSIGNED (sizetype));
8403445a
AM
4324 emit_block_move (target, temp, copy_size_rtx,
4325 (want_value & 2
4326 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4327
88f63c77
RK
4328 /* Figure out how much is left in TARGET that we have to clear.
4329 Do all calculations in ptr_mode. */
e87b4f3f
RS
4330 if (GET_CODE (copy_size_rtx) == CONST_INT)
4331 {
c24ae149
RK
4332 size = plus_constant (size, -INTVAL (copy_size_rtx));
4333 target = adjust_address (target, BLKmode,
4334 INTVAL (copy_size_rtx));
e87b4f3f
RS
4335 }
4336 else
4337 {
fa06ab5c 4338 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4339 copy_size_rtx, NULL_RTX, 0,
4340 OPTAB_LIB_WIDEN);
e87b4f3f 4341
c24ae149
RK
4342#ifdef POINTERS_EXTEND_UNSIGNED
4343 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd
SE
4344 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4345 TREE_UNSIGNED (sizetype));
c24ae149
RK
4346#endif
4347
4348 target = offset_address (target, copy_size_rtx,
4349 highest_pow2_factor (copy_size));
e87b4f3f 4350 label = gen_label_rtx ();
c5d5d461 4351 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4352 GET_MODE (size), 0, label);
e87b4f3f
RS
4353 }
4354
4355 if (size != const0_rtx)
37a08a29 4356 clear_storage (target, size);
22619c3f 4357
e87b4f3f
RS
4358 if (label)
4359 emit_label (label);
bbf6f052
RK
4360 }
4361 }
fffa9c1d
JW
4362 /* Handle calls that return values in multiple non-contiguous locations.
4363 The Irix 6 ABI has examples of this. */
4364 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4365 emit_group_load (target, temp, TREE_TYPE (exp),
4366 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4367 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4368 emit_block_move (target, temp, expr_size (exp),
4369 (want_value & 2
4370 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4371 else
b0dccb00
RH
4372 {
4373 temp = force_operand (temp, target);
4374 if (temp != target)
4375 emit_move_insn (target, temp);
4376 }
bbf6f052 4377 }
709f5be1 4378
766f36c7 4379 /* If we don't want a value, return NULL_RTX. */
8403445a 4380 if ((want_value & 1) == 0)
766f36c7
RK
4381 return NULL_RTX;
4382
4383 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4384 ??? The latter test doesn't seem to make sense. */
4385 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4386 return temp;
766f36c7
RK
4387
4388 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4389 else if ((want_value & 1) != 0
4390 && GET_MODE (target) != BLKmode
766f36c7
RK
4391 && ! (GET_CODE (target) == REG
4392 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4393 return copy_to_reg (target);
3a94c984 4394
766f36c7 4395 else
709f5be1 4396 return target;
bbf6f052
RK
4397}
4398\f
40209195 4399/* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
9de08200
RK
4400
4401static int
502b8322 4402is_zeros_p (tree exp)
9de08200
RK
4403{
4404 tree elt;
4405
4406 switch (TREE_CODE (exp))
4407 {
4408 case CONVERT_EXPR:
4409 case NOP_EXPR:
4410 case NON_LVALUE_EXPR:
ed239f5a 4411 case VIEW_CONVERT_EXPR:
9de08200
RK
4412 return is_zeros_p (TREE_OPERAND (exp, 0));
4413
4414 case INTEGER_CST:
05bccae2 4415 return integer_zerop (exp);
9de08200
RK
4416
4417 case COMPLEX_CST:
4418 return
4419 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4420
4421 case REAL_CST:
41c9120b 4422 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4423
69ef87e2
AH
4424 case VECTOR_CST:
4425 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4426 elt = TREE_CHAIN (elt))
4427 if (!is_zeros_p (TREE_VALUE (elt)))
4428 return 0;
4429
4430 return 1;
4431
9de08200 4432 case CONSTRUCTOR:
e1a43f73
PB
4433 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4434 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4435 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4436 if (! is_zeros_p (TREE_VALUE (elt)))
4437 return 0;
4438
4439 return 1;
3a94c984 4440
e9a25f70
JL
4441 default:
4442 return 0;
9de08200 4443 }
9de08200
RK
4444}
4445
4446/* Return 1 if EXP contains mostly (3/4) zeros. */
4447
40209195 4448int
502b8322 4449mostly_zeros_p (tree exp)
9de08200 4450{
9de08200
RK
4451 if (TREE_CODE (exp) == CONSTRUCTOR)
4452 {
e1a43f73
PB
4453 int elts = 0, zeros = 0;
4454 tree elt = CONSTRUCTOR_ELTS (exp);
4455 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4456 {
4457 /* If there are no ranges of true bits, it is all zero. */
4458 return elt == NULL_TREE;
4459 }
4460 for (; elt; elt = TREE_CHAIN (elt))
4461 {
4462 /* We do not handle the case where the index is a RANGE_EXPR,
4463 so the statistic will be somewhat inaccurate.
4464 We do make a more accurate count in store_constructor itself,
4465 so since this function is only used for nested array elements,
0f41302f 4466 this should be close enough. */
e1a43f73
PB
4467 if (mostly_zeros_p (TREE_VALUE (elt)))
4468 zeros++;
4469 elts++;
4470 }
9de08200
RK
4471
4472 return 4 * zeros >= 3 * elts;
4473 }
4474
4475 return is_zeros_p (exp);
4476}
4477\f
e1a43f73
PB
4478/* Helper function for store_constructor.
4479 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4480 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4481 CLEARED is as for store_constructor.
23cb1766 4482 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4483
4484 This provides a recursive shortcut back to store_constructor when it isn't
4485 necessary to go through store_field. This is so that we can pass through
4486 the cleared field to let store_constructor know that we may not have to
4487 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4488
4489static void
502b8322
AJ
4490store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4491 HOST_WIDE_INT bitpos, enum machine_mode mode,
4492 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4493{
4494 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4495 && bitpos % BITS_PER_UNIT == 0
cc2902df 4496 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4497 let store_field do the bitfield handling. This is unlikely to
4498 generate unnecessary clear instructions anyways. */
4499 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4500 {
61cb205c
RK
4501 if (GET_CODE (target) == MEM)
4502 target
4503 = adjust_address (target,
4504 GET_MODE (target) == BLKmode
4505 || 0 != (bitpos
4506 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4507 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4508
e0339ef7 4509
04050c69 4510 /* Update the alias set, if required. */
10b76d73
RK
4511 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4512 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4513 {
4514 target = copy_rtx (target);
4515 set_mem_alias_set (target, alias_set);
4516 }
e0339ef7 4517
04050c69 4518 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4519 }
4520 else
a06ef755
RK
4521 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4522 alias_set);
e1a43f73
PB
4523}
4524
bbf6f052 4525/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4526 TARGET is either a REG or a MEM; we know it cannot conflict, since
4527 safe_from_p has been called.
b7010412
RK
4528 CLEARED is true if TARGET is known to have been zero'd.
4529 SIZE is the number of bytes of TARGET we are allowed to modify: this
4530 may not be the same as the size of EXP if we are assigning to a field
4531 which has been packed to exclude padding bits. */
bbf6f052
RK
4532
4533static void
502b8322 4534store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4535{
4af3895e 4536 tree type = TREE_TYPE (exp);
a5efcd63 4537#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4538 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4539#endif
4af3895e 4540
e44842fe
RK
4541 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4542 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4543 {
b3694847 4544 tree elt;
bbf6f052 4545
2c430630
RS
4546 /* If size is zero or the target is already cleared, do nothing. */
4547 if (size == 0 || cleared)
4548 cleared = 1;
04050c69 4549 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4550 else if ((TREE_CODE (type) == UNION_TYPE
4551 || TREE_CODE (type) == QUAL_UNION_TYPE)
4552 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4553 /* If the constructor is empty, clear the union. */
a59f8640 4554 {
04050c69
RK
4555 clear_storage (target, expr_size (exp));
4556 cleared = 1;
a59f8640 4557 }
4af3895e
JVA
4558
4559 /* If we are building a static constructor into a register,
4560 set the initial value as zero so we can fold the value into
67225c15
RK
4561 a constant. But if more than one register is involved,
4562 this probably loses. */
2c430630 4563 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4564 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4565 {
04050c69 4566 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4567 cleared = 1;
4568 }
4569
4570 /* If the constructor has fewer fields than the structure
4571 or if we are initializing the structure to mostly zeros,
0d97bf4c 4572 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4573 register whose mode size isn't equal to SIZE since clear_storage
4574 can't handle this case. */
2c430630
RS
4575 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4576 || mostly_zeros_p (exp))
fcf1b822 4577 && (GET_CODE (target) != REG
04050c69
RK
4578 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4579 == size)))
9de08200 4580 {
337f4314
RK
4581 rtx xtarget = target;
4582
4583 if (readonly_fields_p (type))
4584 {
4585 xtarget = copy_rtx (xtarget);
4586 RTX_UNCHANGING_P (xtarget) = 1;
4587 }
4588
4589 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4590 cleared = 1;
4591 }
04050c69
RK
4592
4593 if (! cleared)
38a448ca 4594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4595
4596 /* Store each element of the constructor into
4597 the corresponding field of TARGET. */
4598
4599 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4600 {
b3694847 4601 tree field = TREE_PURPOSE (elt);
34c73909 4602 tree value = TREE_VALUE (elt);
b3694847 4603 enum machine_mode mode;
770ae6cc
RK
4604 HOST_WIDE_INT bitsize;
4605 HOST_WIDE_INT bitpos = 0;
770ae6cc 4606 tree offset;
b50d17a1 4607 rtx to_rtx = target;
bbf6f052 4608
f32fd778
RS
4609 /* Just ignore missing fields.
4610 We cleared the whole structure, above,
4611 if any fields are missing. */
4612 if (field == 0)
4613 continue;
4614
8b6000fc 4615 if (cleared && is_zeros_p (value))
e1a43f73 4616 continue;
9de08200 4617
770ae6cc
RK
4618 if (host_integerp (DECL_SIZE (field), 1))
4619 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4620 else
4621 bitsize = -1;
4622
bbf6f052
RK
4623 mode = DECL_MODE (field);
4624 if (DECL_BIT_FIELD (field))
4625 mode = VOIDmode;
4626
770ae6cc
RK
4627 offset = DECL_FIELD_OFFSET (field);
4628 if (host_integerp (offset, 0)
4629 && host_integerp (bit_position (field), 0))
4630 {
4631 bitpos = int_bit_position (field);
4632 offset = 0;
4633 }
b50d17a1 4634 else
770ae6cc 4635 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4636
b50d17a1
RK
4637 if (offset)
4638 {
4639 rtx offset_rtx;
4640
6fce44af
RK
4641 offset
4642 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4643 make_tree (TREE_TYPE (exp),
4644 target));
bbf6f052 4645
b50d17a1
RK
4646 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4647 if (GET_CODE (to_rtx) != MEM)
4648 abort ();
4649
bd070e1a 4650#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4651 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4652 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4653#else
4654 if (GET_MODE (offset_rtx) != ptr_mode)
4655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4656#endif
bd070e1a 4657
0d4903b8
RK
4658 to_rtx = offset_address (to_rtx, offset_rtx,
4659 highest_pow2_factor (offset));
b50d17a1 4660 }
c5c76735 4661
4e44c1ef 4662 if (TREE_READONLY (field))
cf04eb80 4663 {
9151b3bf 4664 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4665 to_rtx = copy_rtx (to_rtx);
4666
cf04eb80
RK
4667 RTX_UNCHANGING_P (to_rtx) = 1;
4668 }
4669
34c73909
R
4670#ifdef WORD_REGISTER_OPERATIONS
4671 /* If this initializes a field that is smaller than a word, at the
4672 start of a word, try to widen it to a full word.
4673 This special case allows us to output C++ member function
4674 initializations in a form that the optimizers can understand. */
770ae6cc 4675 if (GET_CODE (target) == REG
34c73909
R
4676 && bitsize < BITS_PER_WORD
4677 && bitpos % BITS_PER_WORD == 0
4678 && GET_MODE_CLASS (mode) == MODE_INT
4679 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4680 && exp_size >= 0
4681 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4682 {
4683 tree type = TREE_TYPE (value);
04050c69 4684
34c73909
R
4685 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4686 {
ae2bcd98 4687 type = lang_hooks.types.type_for_size
b0c48229 4688 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
4689 value = convert (type, value);
4690 }
04050c69 4691
34c73909
R
4692 if (BYTES_BIG_ENDIAN)
4693 value
4694 = fold (build (LSHIFT_EXPR, type, value,
4695 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4696 bitsize = BITS_PER_WORD;
4697 mode = word_mode;
4698 }
4699#endif
10b76d73
RK
4700
4701 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4702 && DECL_NONADDRESSABLE_P (field))
4703 {
4704 to_rtx = copy_rtx (to_rtx);
4705 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4706 }
4707
c5c76735 4708 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4709 value, type, cleared,
10b76d73 4710 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4711 }
4712 }
e6834654
SS
4713 else if (TREE_CODE (type) == ARRAY_TYPE
4714 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4715 {
b3694847
SS
4716 tree elt;
4717 int i;
e1a43f73 4718 int need_to_clear;
4af3895e 4719 tree domain = TYPE_DOMAIN (type);
4af3895e 4720 tree elttype = TREE_TYPE (type);
e6834654 4721 int const_bounds_p;
ae0ed63a
JM
4722 HOST_WIDE_INT minelt = 0;
4723 HOST_WIDE_INT maxelt = 0;
997404de
JH
4724 int icode = 0;
4725 rtx *vector = NULL;
4726 int elt_size = 0;
4727 unsigned n_elts = 0;
85f3d674 4728
e6834654
SS
4729 /* Vectors are like arrays, but the domain is stored via an array
4730 type indirectly. */
4731 if (TREE_CODE (type) == VECTOR_TYPE)
4732 {
4733 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4734 the same field as TYPE_DOMAIN, we are not guaranteed that
4735 it always will. */
4736 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4737 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
997404de
JH
4738 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4739 {
4740 enum machine_mode mode = GET_MODE (target);
4741
4742 icode = (int) vec_init_optab->handlers[mode].insn_code;
4743 if (icode != CODE_FOR_nothing)
4744 {
4745 unsigned int i;
4746
4747 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4748 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4749 vector = alloca (n_elts);
4750 for (i = 0; i < n_elts; i++)
4751 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4752 }
4753 }
e6834654
SS
4754 }
4755
4756 const_bounds_p = (TYPE_MIN_VALUE (domain)
4757 && TYPE_MAX_VALUE (domain)
4758 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4759 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4760
85f3d674
RK
4761 /* If we have constant bounds for the range of the type, get them. */
4762 if (const_bounds_p)
4763 {
4764 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4765 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4766 }
bbf6f052 4767
e1a43f73 4768 /* If the constructor has fewer elements than the array,
38e01259 4769 clear the whole array first. Similarly if this is
e1a43f73
PB
4770 static constructor of a non-BLKmode object. */
4771 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4772 need_to_clear = 1;
4773 else
4774 {
4775 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4776 need_to_clear = ! const_bounds_p;
4777
e1a43f73
PB
4778 /* This loop is a more accurate version of the loop in
4779 mostly_zeros_p (it handles RANGE_EXPR in an index).
4780 It is also needed to check for missing elements. */
4781 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4782 elt != NULL_TREE && ! need_to_clear;
df0faff1 4783 elt = TREE_CHAIN (elt))
e1a43f73
PB
4784 {
4785 tree index = TREE_PURPOSE (elt);
4786 HOST_WIDE_INT this_node_count;
19caa751 4787
e1a43f73
PB
4788 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4789 {
4790 tree lo_index = TREE_OPERAND (index, 0);
4791 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4792
19caa751
RK
4793 if (! host_integerp (lo_index, 1)
4794 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4795 {
4796 need_to_clear = 1;
4797 break;
4798 }
19caa751
RK
4799
4800 this_node_count = (tree_low_cst (hi_index, 1)
4801 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4802 }
4803 else
4804 this_node_count = 1;
85f3d674 4805
e1a43f73
PB
4806 count += this_node_count;
4807 if (mostly_zeros_p (TREE_VALUE (elt)))
4808 zero_count += this_node_count;
4809 }
85f3d674 4810
8e958f70 4811 /* Clear the entire array first if there are any missing elements,
0f41302f 4812 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4813 if (! need_to_clear
4814 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4815 need_to_clear = 1;
4816 }
85f3d674 4817
997404de 4818 if (need_to_clear && size > 0 && !vector)
9de08200
RK
4819 {
4820 if (! cleared)
725e58b1
RK
4821 {
4822 if (REG_P (target))
4823 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4824 else
4825 clear_storage (target, GEN_INT (size));
4826 }
9de08200
RK
4827 cleared = 1;
4828 }
df4556a3 4829 else if (REG_P (target))
bbf6f052 4830 /* Inform later passes that the old value is dead. */
38a448ca 4831 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4832
4833 /* Store each element of the constructor into
4834 the corresponding element of TARGET, determined
4835 by counting the elements. */
4836 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4837 elt;
4838 elt = TREE_CHAIN (elt), i++)
4839 {
b3694847 4840 enum machine_mode mode;
19caa751
RK
4841 HOST_WIDE_INT bitsize;
4842 HOST_WIDE_INT bitpos;
bbf6f052 4843 int unsignedp;
e1a43f73 4844 tree value = TREE_VALUE (elt);
03dc44a6
RS
4845 tree index = TREE_PURPOSE (elt);
4846 rtx xtarget = target;
bbf6f052 4847
e1a43f73
PB
4848 if (cleared && is_zeros_p (value))
4849 continue;
9de08200 4850
bbf6f052 4851 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4852 mode = TYPE_MODE (elttype);
4853 if (mode == BLKmode)
19caa751
RK
4854 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4855 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4856 : -1);
14a774a9
RK
4857 else
4858 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4859
e1a43f73
PB
4860 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4861 {
4862 tree lo_index = TREE_OPERAND (index, 0);
4863 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 4864 rtx index_r, pos_rtx, loop_end;
e1a43f73 4865 struct nesting *loop;
05c0b405
PB
4866 HOST_WIDE_INT lo, hi, count;
4867 tree position;
e1a43f73 4868
997404de
JH
4869 if (vector)
4870 abort ();
4871
0f41302f 4872 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4873 if (const_bounds_p
4874 && host_integerp (lo_index, 0)
19caa751
RK
4875 && host_integerp (hi_index, 0)
4876 && (lo = tree_low_cst (lo_index, 0),
4877 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4878 count = hi - lo + 1,
4879 (GET_CODE (target) != MEM
4880 || count <= 2
19caa751
RK
4881 || (host_integerp (TYPE_SIZE (elttype), 1)
4882 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4883 <= 40 * 8)))))
e1a43f73 4884 {
05c0b405
PB
4885 lo -= minelt; hi -= minelt;
4886 for (; lo <= hi; lo++)
e1a43f73 4887 {
19caa751 4888 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
4889
4890 if (GET_CODE (target) == MEM
4891 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4892 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4893 && TYPE_NONALIASED_COMPONENT (type))
4894 {
4895 target = copy_rtx (target);
4896 MEM_KEEP_ALIAS_SET_P (target) = 1;
4897 }
4898
23cb1766 4899 store_constructor_field
04050c69
RK
4900 (target, bitsize, bitpos, mode, value, type, cleared,
4901 get_alias_set (elttype));
e1a43f73
PB
4902 }
4903 }
4904 else
4905 {
4977bab6 4906 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
4907 loop_end = gen_label_rtx ();
4908
4909 unsignedp = TREE_UNSIGNED (domain);
4910
4911 index = build_decl (VAR_DECL, NULL_TREE, domain);
4912
19e7881c 4913 index_r
e1a43f73
PB
4914 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4915 &unsignedp, 0));
19e7881c 4916 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4917 if (TREE_CODE (value) == SAVE_EXPR
4918 && SAVE_EXPR_RTL (value) == 0)
4919 {
0f41302f
MS
4920 /* Make sure value gets expanded once before the
4921 loop. */
e1a43f73
PB
4922 expand_expr (value, const0_rtx, VOIDmode, 0);
4923 emit_queue ();
4924 }
4925 store_expr (lo_index, index_r, 0);
4926 loop = expand_start_loop (0);
4927
0f41302f 4928 /* Assign value to element index. */
fed3cef0
RK
4929 position
4930 = convert (ssizetype,
4931 fold (build (MINUS_EXPR, TREE_TYPE (index),
4932 index, TYPE_MIN_VALUE (domain))));
4933 position = size_binop (MULT_EXPR, position,
4934 convert (ssizetype,
4935 TYPE_SIZE_UNIT (elttype)));
4936
e1a43f73 4937 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
4938 xtarget = offset_address (target, pos_rtx,
4939 highest_pow2_factor (position));
4940 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4941 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 4942 store_constructor (value, xtarget, cleared,
b7010412 4943 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4944 else
4945 store_expr (value, xtarget, 0);
4946
4947 expand_exit_loop_if_false (loop,
4948 build (LT_EXPR, integer_type_node,
4949 index, hi_index));
4950
4951 expand_increment (build (PREINCREMENT_EXPR,
4952 TREE_TYPE (index),
7b8b9722 4953 index, integer_one_node), 0, 0);
e1a43f73
PB
4954 expand_end_loop ();
4955 emit_label (loop_end);
e1a43f73
PB
4956 }
4957 }
19caa751
RK
4958 else if ((index != 0 && ! host_integerp (index, 0))
4959 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4960 {
03dc44a6
RS
4961 tree position;
4962
997404de
JH
4963 if (vector)
4964 abort ();
4965
5b6c44ff 4966 if (index == 0)
fed3cef0 4967 index = ssize_int (1);
5b6c44ff 4968
e1a43f73 4969 if (minelt)
fed3cef0
RK
4970 index = convert (ssizetype,
4971 fold (build (MINUS_EXPR, index,
4972 TYPE_MIN_VALUE (domain))));
19caa751 4973
fed3cef0
RK
4974 position = size_binop (MULT_EXPR, index,
4975 convert (ssizetype,
4976 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
4977 xtarget = offset_address (target,
4978 expand_expr (position, 0, VOIDmode, 0),
4979 highest_pow2_factor (position));
4980 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4981 store_expr (value, xtarget, 0);
03dc44a6 4982 }
997404de
JH
4983 else if (vector)
4984 {
4985 int pos;
4986
4987 if (index != 0)
4988 pos = tree_low_cst (index, 0) - minelt;
4989 else
4990 pos = i;
4991 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4992 }
03dc44a6
RS
4993 else
4994 {
4995 if (index != 0)
19caa751
RK
4996 bitpos = ((tree_low_cst (index, 0) - minelt)
4997 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4998 else
19caa751
RK
4999 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5000
10b76d73 5001 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5002 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5003 && TYPE_NONALIASED_COMPONENT (type))
5004 {
5005 target = copy_rtx (target);
5006 MEM_KEEP_ALIAS_SET_P (target) = 1;
5007 }
9b9bd3b2
JH
5008 store_constructor_field (target, bitsize, bitpos, mode, value,
5009 type, cleared, get_alias_set (elttype));
03dc44a6 5010 }
bbf6f052 5011 }
997404de
JH
5012 if (vector)
5013 {
5014 emit_insn (GEN_FCN (icode) (target,
5015 gen_rtx_PARALLEL (GET_MODE (target),
5016 gen_rtvec_v (n_elts, vector))));
5017 }
bbf6f052 5018 }
19caa751 5019
3a94c984 5020 /* Set constructor assignments. */
071a6595
PB
5021 else if (TREE_CODE (type) == SET_TYPE)
5022 {
e1a43f73 5023 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5024 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5025 tree domain = TYPE_DOMAIN (type);
5026 tree domain_min, domain_max, bitlength;
5027
9faa82d8 5028 /* The default implementation strategy is to extract the constant
071a6595
PB
5029 parts of the constructor, use that to initialize the target,
5030 and then "or" in whatever non-constant ranges we need in addition.
5031
5032 If a large set is all zero or all ones, it is
5033 probably better to set it using memset (if available) or bzero.
5034 Also, if a large set has just a single range, it may also be
5035 better to first clear all the first clear the set (using
0f41302f 5036 bzero/memset), and set the bits we want. */
3a94c984 5037
0f41302f 5038 /* Check for all zeros. */
9376fcd6 5039 if (elt == NULL_TREE && size > 0)
071a6595 5040 {
e1a43f73 5041 if (!cleared)
8ac61af7 5042 clear_storage (target, GEN_INT (size));
071a6595
PB
5043 return;
5044 }
5045
071a6595
PB
5046 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5047 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5048 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5049 size_diffop (domain_max, domain_min),
5050 ssize_int (1));
071a6595 5051
19caa751 5052 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5053
5054 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5055 are "complicated" (more than one range), initialize (the
3a94c984 5056 constant parts) by copying from a constant. */
e1a43f73
PB
5057 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5058 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5059 {
19caa751 5060 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5061 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 5062 char *bit_buffer = alloca (nbits);
b4ee5a72 5063 HOST_WIDE_INT word = 0;
19caa751
RK
5064 unsigned int bit_pos = 0;
5065 unsigned int ibit = 0;
5066 unsigned int offset = 0; /* In bytes from beginning of set. */
5067
e1a43f73 5068 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5069 for (;;)
071a6595 5070 {
b4ee5a72
PB
5071 if (bit_buffer[ibit])
5072 {
b09f3348 5073 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5074 word |= (1 << (set_word_size - 1 - bit_pos));
5075 else
5076 word |= 1 << bit_pos;
5077 }
19caa751 5078
b4ee5a72
PB
5079 bit_pos++; ibit++;
5080 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5081 {
e1a43f73
PB
5082 if (word != 0 || ! cleared)
5083 {
5084 rtx datum = GEN_INT (word);
5085 rtx to_rtx;
19caa751 5086
0f41302f
MS
5087 /* The assumption here is that it is safe to use
5088 XEXP if the set is multi-word, but not if
5089 it's single-word. */
e1a43f73 5090 if (GET_CODE (target) == MEM)
f4ef873c 5091 to_rtx = adjust_address (target, mode, offset);
3a94c984 5092 else if (offset == 0)
e1a43f73
PB
5093 to_rtx = target;
5094 else
5095 abort ();
5096 emit_move_insn (to_rtx, datum);
5097 }
19caa751 5098
b4ee5a72
PB
5099 if (ibit == nbits)
5100 break;
5101 word = 0;
5102 bit_pos = 0;
5103 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5104 }
5105 }
071a6595 5106 }
e1a43f73 5107 else if (!cleared)
19caa751
RK
5108 /* Don't bother clearing storage if the set is all ones. */
5109 if (TREE_CHAIN (elt) != NULL_TREE
5110 || (TREE_PURPOSE (elt) == NULL_TREE
5111 ? nbits != 1
5112 : ( ! host_integerp (TREE_VALUE (elt), 0)
5113 || ! host_integerp (TREE_PURPOSE (elt), 0)
5114 || (tree_low_cst (TREE_VALUE (elt), 0)
5115 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5116 != (HOST_WIDE_INT) nbits))))
8ac61af7 5117 clear_storage (target, expr_size (exp));
3a94c984 5118
e1a43f73 5119 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5120 {
3a94c984 5121 /* Start of range of element or NULL. */
071a6595 5122 tree startbit = TREE_PURPOSE (elt);
3a94c984 5123 /* End of range of element, or element value. */
071a6595
PB
5124 tree endbit = TREE_VALUE (elt);
5125 HOST_WIDE_INT startb, endb;
19caa751 5126 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5127
5128 bitlength_rtx = expand_expr (bitlength,
19caa751 5129 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5130
3a94c984 5131 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5132 if (startbit == NULL_TREE)
5133 {
5134 startbit = save_expr (endbit);
5135 endbit = startbit;
5136 }
19caa751 5137
071a6595
PB
5138 startbit = convert (sizetype, startbit);
5139 endbit = convert (sizetype, endbit);
5140 if (! integer_zerop (domain_min))
5141 {
5142 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5143 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5144 }
3a94c984 5145 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5146 EXPAND_CONST_ADDRESS);
3a94c984 5147 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5148 EXPAND_CONST_ADDRESS);
5149
5150 if (REG_P (target))
5151 {
1da68f56
RK
5152 targetx
5153 = assign_temp
ae2bcd98 5154 ((build_qualified_type (lang_hooks.types.type_for_mode
b0c48229 5155 (GET_MODE (target), 0),
1da68f56
RK
5156 TYPE_QUAL_CONST)),
5157 0, 1, 1);
071a6595
PB
5158 emit_move_insn (targetx, target);
5159 }
19caa751 5160
071a6595
PB
5161 else if (GET_CODE (target) == MEM)
5162 targetx = target;
5163 else
5164 abort ();
5165
4ca79136
RH
5166 /* Optimization: If startbit and endbit are constants divisible
5167 by BITS_PER_UNIT, call memset instead. */
5168 if (TARGET_MEM_FUNCTIONS
5169 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5170 && TREE_CODE (endbit) == INTEGER_CST
5171 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5172 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5173 {
ebb1b59a 5174 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5175 VOIDmode, 3,
e1a43f73
PB
5176 plus_constant (XEXP (targetx, 0),
5177 startb / BITS_PER_UNIT),
071a6595 5178 Pmode,
3b6f75e2 5179 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5180 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5181 TYPE_MODE (sizetype));
071a6595
PB
5182 }
5183 else
68d28100
RH
5184 emit_library_call (setbits_libfunc, LCT_NORMAL,
5185 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5186 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5187 startbit_rtx, TYPE_MODE (sizetype),
5188 endbit_rtx, TYPE_MODE (sizetype));
5189
071a6595
PB
5190 if (REG_P (target))
5191 emit_move_insn (target, targetx);
5192 }
5193 }
bbf6f052
RK
5194
5195 else
5196 abort ();
5197}
5198
5199/* Store the value of EXP (an expression tree)
5200 into a subfield of TARGET which has mode MODE and occupies
5201 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5202 If MODE is VOIDmode, it means that we are storing into a bit-field.
5203
5204 If VALUE_MODE is VOIDmode, return nothing in particular.
5205 UNSIGNEDP is not used in this case.
5206
5207 Otherwise, return an rtx for the value stored. This rtx
5208 has mode VALUE_MODE if that is convenient to do.
5209 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5210
a06ef755 5211 TYPE is the type of the underlying object,
ece32014
MM
5212
5213 ALIAS_SET is the alias set for the destination. This value will
5214 (in general) be different from that for TARGET, since TARGET is a
5215 reference to the containing structure. */
bbf6f052
RK
5216
5217static rtx
502b8322
AJ
5218store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5219 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5220 int unsignedp, tree type, int alias_set)
bbf6f052 5221{
906c4e36 5222 HOST_WIDE_INT width_mask = 0;
bbf6f052 5223
e9a25f70
JL
5224 if (TREE_CODE (exp) == ERROR_MARK)
5225 return const0_rtx;
5226
2be6a7e9
RK
5227 /* If we have nothing to store, do nothing unless the expression has
5228 side-effects. */
5229 if (bitsize == 0)
5230 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5231 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5232 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5233
5234 /* If we are storing into an unaligned field of an aligned union that is
5235 in a register, we may have the mode of TARGET being an integer mode but
5236 MODE == BLKmode. In that case, get an aligned object whose size and
5237 alignment are the same as TARGET and store TARGET into it (we can avoid
5238 the store if the field being stored is the entire width of TARGET). Then
5239 call ourselves recursively to store the field into a BLKmode version of
5240 that object. Finally, load from the object into TARGET. This is not
5241 very efficient in general, but should only be slightly more expensive
5242 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5243 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5244 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5245
5246 if (mode == BLKmode
5247 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5248 {
85a43a2f 5249 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5250 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5251
8752c357 5252 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5253 emit_move_insn (object, target);
5254
a06ef755
RK
5255 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5256 alias_set);
bbf6f052
RK
5257
5258 emit_move_insn (target, object);
5259
a06ef755 5260 /* We want to return the BLKmode version of the data. */
46093b97 5261 return blk_object;
bbf6f052 5262 }
c3b247b4
JM
5263
5264 if (GET_CODE (target) == CONCAT)
5265 {
5266 /* We're storing into a struct containing a single __complex. */
5267
5268 if (bitpos != 0)
5269 abort ();
5270 return store_expr (exp, target, 0);
5271 }
bbf6f052
RK
5272
5273 /* If the structure is in a register or if the component
5274 is a bit field, we cannot use addressing to access it.
5275 Use bit-field techniques or SUBREG to store in it. */
5276
4fa52007 5277 if (mode == VOIDmode
6ab06cbb
JW
5278 || (mode != BLKmode && ! direct_store[(int) mode]
5279 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5280 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5281 || GET_CODE (target) == REG
c980ac49 5282 || GET_CODE (target) == SUBREG
ccc98036
RS
5283 /* If the field isn't aligned enough to store as an ordinary memref,
5284 store it as a bit field. */
15b19a7d 5285 || (mode != BLKmode
9e5f281f
OH
5286 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5287 || bitpos % GET_MODE_ALIGNMENT (mode))
5288 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5289 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5290 /* If the RHS and field are a constant size and the size of the
5291 RHS isn't the same size as the bitfield, we must use bitfield
5292 operations. */
05bccae2
RK
5293 || (bitsize >= 0
5294 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5295 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5296 {
906c4e36 5297 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5298
ef19912d
RK
5299 /* If BITSIZE is narrower than the size of the type of EXP
5300 we will be narrowing TEMP. Normally, what's wanted are the
5301 low-order bits. However, if EXP's type is a record and this is
5302 big-endian machine, we want the upper BITSIZE bits. */
5303 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5304 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5305 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5306 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5307 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5308 - bitsize),
c1853da7 5309 NULL_RTX, 1);
ef19912d 5310
bbd6cf73
RK
5311 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5312 MODE. */
5313 if (mode != VOIDmode && mode != BLKmode
5314 && mode != TYPE_MODE (TREE_TYPE (exp)))
5315 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5316
a281e72d
RK
5317 /* If the modes of TARGET and TEMP are both BLKmode, both
5318 must be in memory and BITPOS must be aligned on a byte
5319 boundary. If so, we simply do a block copy. */
5320 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5321 {
5322 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5323 || bitpos % BITS_PER_UNIT != 0)
5324 abort ();
5325
f4ef873c 5326 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5327 emit_block_move (target, temp,
a06ef755 5328 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5329 / BITS_PER_UNIT),
5330 BLOCK_OP_NORMAL);
a281e72d
RK
5331
5332 return value_mode == VOIDmode ? const0_rtx : target;
5333 }
5334
bbf6f052 5335 /* Store the value in the bitfield. */
a06ef755
RK
5336 store_bit_field (target, bitsize, bitpos, mode, temp,
5337 int_size_in_bytes (type));
5338
bbf6f052
RK
5339 if (value_mode != VOIDmode)
5340 {
04050c69
RK
5341 /* The caller wants an rtx for the value.
5342 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5343 if (width_mask != 0
5344 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5345 {
9074de27 5346 tree count;
5c4d7cfb 5347 enum machine_mode tmode;
86a2c12a 5348
5c4d7cfb 5349 tmode = GET_MODE (temp);
86a2c12a
RS
5350 if (tmode == VOIDmode)
5351 tmode = value_mode;
22273300
JJ
5352
5353 if (unsignedp)
5354 return expand_and (tmode, temp,
2496c7bd 5355 gen_int_mode (width_mask, tmode),
22273300
JJ
5356 NULL_RTX);
5357
5c4d7cfb
RS
5358 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5359 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5360 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5361 }
04050c69 5362
bbf6f052 5363 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5364 NULL_RTX, value_mode, VOIDmode,
a06ef755 5365 int_size_in_bytes (type));
bbf6f052
RK
5366 }
5367 return const0_rtx;
5368 }
5369 else
5370 {
5371 rtx addr = XEXP (target, 0);
a06ef755 5372 rtx to_rtx = target;
bbf6f052
RK
5373
5374 /* If a value is wanted, it must be the lhs;
5375 so make the address stable for multiple use. */
5376
5377 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5378 && ! CONSTANT_ADDRESS_P (addr)
5379 /* A frame-pointer reference is already stable. */
5380 && ! (GET_CODE (addr) == PLUS
5381 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5382 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5383 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5384 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5385
5386 /* Now build a reference to just the desired component. */
5387
a06ef755
RK
5388 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5389
5390 if (to_rtx == target)
5391 to_rtx = copy_rtx (to_rtx);
792760b9 5392
c6df88cb 5393 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5394 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5395 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5396
5397 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5398 }
5399}
5400\f
5401/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5402 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5403 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5404
5405 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5406 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5407 If the position of the field is variable, we store a tree
5408 giving the variable offset (in units) in *POFFSET.
5409 This offset is in addition to the bit position.
5410 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5411
5412 If any of the extraction expressions is volatile,
5413 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5414
5415 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5416 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5417 is redundant.
5418
5419 If the field describes a variable-sized object, *PMODE is set to
5420 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5421 this case, but the address of the object can be found. */
bbf6f052
RK
5422
5423tree
502b8322
AJ
5424get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5425 HOST_WIDE_INT *pbitpos, tree *poffset,
5426 enum machine_mode *pmode, int *punsignedp,
5427 int *pvolatilep)
bbf6f052
RK
5428{
5429 tree size_tree = 0;
5430 enum machine_mode mode = VOIDmode;
fed3cef0 5431 tree offset = size_zero_node;
770ae6cc 5432 tree bit_offset = bitsize_zero_node;
770ae6cc 5433 tree tem;
bbf6f052 5434
770ae6cc
RK
5435 /* First get the mode, signedness, and size. We do this from just the
5436 outermost expression. */
bbf6f052
RK
5437 if (TREE_CODE (exp) == COMPONENT_REF)
5438 {
5439 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5440 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5441 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5442
bbf6f052
RK
5443 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5444 }
5445 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5446 {
5447 size_tree = TREE_OPERAND (exp, 1);
5448 *punsignedp = TREE_UNSIGNED (exp);
5449 }
5450 else
5451 {
5452 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5453 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5454
ab87f8c8
JL
5455 if (mode == BLKmode)
5456 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5457 else
5458 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5459 }
3a94c984 5460
770ae6cc 5461 if (size_tree != 0)
bbf6f052 5462 {
770ae6cc 5463 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5464 mode = BLKmode, *pbitsize = -1;
5465 else
770ae6cc 5466 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5467 }
5468
5469 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5470 and find the ultimate containing object. */
bbf6f052
RK
5471 while (1)
5472 {
770ae6cc
RK
5473 if (TREE_CODE (exp) == BIT_FIELD_REF)
5474 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5475 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5476 {
770ae6cc
RK
5477 tree field = TREE_OPERAND (exp, 1);
5478 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5479
e7f3c83f
RK
5480 /* If this field hasn't been filled in yet, don't go
5481 past it. This should only happen when folding expressions
5482 made during type construction. */
770ae6cc 5483 if (this_offset == 0)
e7f3c83f 5484 break;
6fce44af
RK
5485 else
5486 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
e7f3c83f 5487
7156dead 5488 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5489 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5490 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5491
a06ef755 5492 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5493 }
7156dead 5494
b4e3fabb
RK
5495 else if (TREE_CODE (exp) == ARRAY_REF
5496 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5497 {
742920c7 5498 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5499 tree array = TREE_OPERAND (exp, 0);
5500 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5501 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5502 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5503
770ae6cc
RK
5504 /* We assume all arrays have sizes that are a multiple of a byte.
5505 First subtract the lower bound, if any, in the type of the
5506 index, then convert to sizetype and multiply by the size of the
5507 array element. */
5508 if (low_bound != 0 && ! integer_zerop (low_bound))
5509 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5510 index, low_bound));
f8dac6eb 5511
6fce44af
RK
5512 /* If the index has a self-referential type, instantiate it with
5513 the object; likewise fkor the component size. */
5514 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5515 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
770ae6cc
RK
5516 offset = size_binop (PLUS_EXPR, offset,
5517 size_binop (MULT_EXPR,
5518 convert (sizetype, index),
7156dead 5519 unit_size));
bbf6f052 5520 }
7156dead 5521
c1853da7
RK
5522 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5523 conversions that don't change the mode, and all view conversions
5524 except those that need to "step up" the alignment. */
bbf6f052 5525 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5526 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5527 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5528 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5529 && STRICT_ALIGNMENT
5530 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5531 < BIGGEST_ALIGNMENT)
5532 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5533 || TYPE_ALIGN_OK (TREE_TYPE
5534 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5535 && ! ((TREE_CODE (exp) == NOP_EXPR
5536 || TREE_CODE (exp) == CONVERT_EXPR)
5537 && (TYPE_MODE (TREE_TYPE (exp))
5538 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5539 break;
7bb0943f
RS
5540
5541 /* If any reference in the chain is volatile, the effect is volatile. */
5542 if (TREE_THIS_VOLATILE (exp))
5543 *pvolatilep = 1;
839c4796 5544
bbf6f052
RK
5545 exp = TREE_OPERAND (exp, 0);
5546 }
5547
770ae6cc
RK
5548 /* If OFFSET is constant, see if we can return the whole thing as a
5549 constant bit position. Otherwise, split it up. */
5550 if (host_integerp (offset, 0)
5551 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5552 bitsize_unit_node))
5553 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5554 && host_integerp (tem, 0))
5555 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5556 else
5557 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5558
bbf6f052 5559 *pmode = mode;
bbf6f052
RK
5560 return exp;
5561}
921b3427 5562
ed239f5a
RK
5563/* Return 1 if T is an expression that get_inner_reference handles. */
5564
5565int
502b8322 5566handled_component_p (tree t)
ed239f5a
RK
5567{
5568 switch (TREE_CODE (t))
5569 {
5570 case BIT_FIELD_REF:
5571 case COMPONENT_REF:
5572 case ARRAY_REF:
5573 case ARRAY_RANGE_REF:
5574 case NON_LVALUE_EXPR:
5575 case VIEW_CONVERT_EXPR:
5576 return 1;
5577
1a8c4ca6
EB
5578 /* ??? Sure they are handled, but get_inner_reference may return
5579 a different PBITSIZE, depending upon whether the expression is
5580 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5581 case NOP_EXPR:
5582 case CONVERT_EXPR:
5583 return (TYPE_MODE (TREE_TYPE (t))
5584 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5585
5586 default:
5587 return 0;
5588 }
5589}
bbf6f052 5590\f
3fe44edd
RK
5591/* Given an rtx VALUE that may contain additions and multiplications, return
5592 an equivalent value that just refers to a register, memory, or constant.
5593 This is done by generating instructions to perform the arithmetic and
5594 returning a pseudo-register containing the value.
c45a13a6
RK
5595
5596 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5597
5598rtx
502b8322 5599force_operand (rtx value, rtx target)
bbf6f052 5600{
8a28dbcc 5601 rtx op1, op2;
bbf6f052 5602 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5603 rtx subtarget = get_subtarget (target);
8a28dbcc 5604 enum rtx_code code = GET_CODE (value);
bbf6f052 5605
50654f6c
ZD
5606 /* Check for subreg applied to an expression produced by loop optimizer. */
5607 if (code == SUBREG
5608 && GET_CODE (SUBREG_REG (value)) != REG
5609 && GET_CODE (SUBREG_REG (value)) != MEM)
5610 {
5611 value = simplify_gen_subreg (GET_MODE (value),
5612 force_reg (GET_MODE (SUBREG_REG (value)),
5613 force_operand (SUBREG_REG (value),
5614 NULL_RTX)),
5615 GET_MODE (SUBREG_REG (value)),
5616 SUBREG_BYTE (value));
5617 code = GET_CODE (value);
5618 }
5619
8b015896 5620 /* Check for a PIC address load. */
8a28dbcc 5621 if ((code == PLUS || code == MINUS)
8b015896
RH
5622 && XEXP (value, 0) == pic_offset_table_rtx
5623 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5624 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5625 || GET_CODE (XEXP (value, 1)) == CONST))
5626 {
5627 if (!subtarget)
5628 subtarget = gen_reg_rtx (GET_MODE (value));
5629 emit_move_insn (subtarget, value);
5630 return subtarget;
5631 }
5632
8a28dbcc 5633 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5634 {
8a28dbcc
JH
5635 if (!target)
5636 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5637 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5638 code == ZERO_EXTEND);
5639 return target;
bbf6f052
RK
5640 }
5641
ec8e098d 5642 if (ARITHMETIC_P (value))
bbf6f052
RK
5643 {
5644 op2 = XEXP (value, 1);
8a28dbcc 5645 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5646 subtarget = 0;
8a28dbcc 5647 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5648 {
8a28dbcc 5649 code = PLUS;
bbf6f052
RK
5650 op2 = negate_rtx (GET_MODE (value), op2);
5651 }
5652
5653 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5654 operand a PLUS of a virtual register and something else. In that
5655 case, we want to emit the sum of the virtual register and the
5656 constant first and then add the other value. This allows virtual
5657 register instantiation to simply modify the constant rather than
5658 creating another one around this addition. */
5659 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5660 && GET_CODE (XEXP (value, 0)) == PLUS
5661 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5662 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5663 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5664 {
8a28dbcc
JH
5665 rtx temp = expand_simple_binop (GET_MODE (value), code,
5666 XEXP (XEXP (value, 0), 0), op2,
5667 subtarget, 0, OPTAB_LIB_WIDEN);
5668 return expand_simple_binop (GET_MODE (value), code, temp,
5669 force_operand (XEXP (XEXP (value,
5670 0), 1), 0),
5671 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5672 }
3a94c984 5673
8a28dbcc
JH
5674 op1 = force_operand (XEXP (value, 0), subtarget);
5675 op2 = force_operand (op2, NULL_RTX);
5676 switch (code)
5677 {
5678 case MULT:
5679 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5680 case DIV:
5681 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5682 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5683 target, 1, OPTAB_LIB_WIDEN);
5684 else
5685 return expand_divmod (0,
5686 FLOAT_MODE_P (GET_MODE (value))
5687 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5688 GET_MODE (value), op1, op2, target, 0);
5689 break;
5690 case MOD:
5691 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5692 target, 0);
5693 break;
5694 case UDIV:
5695 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5696 target, 1);
5697 break;
5698 case UMOD:
5699 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5700 target, 1);
5701 break;
5702 case ASHIFTRT:
5703 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5704 target, 0, OPTAB_LIB_WIDEN);
5705 break;
5706 default:
5707 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5708 target, 1, OPTAB_LIB_WIDEN);
5709 }
5710 }
ec8e098d 5711 if (UNARY_P (value))
8a28dbcc
JH
5712 {
5713 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5714 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5715 }
34e81b5a
RK
5716
5717#ifdef INSN_SCHEDULING
5718 /* On machines that have insn scheduling, we want all memory reference to be
5719 explicit, so we need to deal with such paradoxical SUBREGs. */
5720 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5721 && (GET_MODE_SIZE (GET_MODE (value))
5722 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5723 value
5724 = simplify_gen_subreg (GET_MODE (value),
5725 force_reg (GET_MODE (SUBREG_REG (value)),
5726 force_operand (SUBREG_REG (value),
5727 NULL_RTX)),
5728 GET_MODE (SUBREG_REG (value)),
5729 SUBREG_BYTE (value));
5730#endif
5731
bbf6f052
RK
5732 return value;
5733}
5734\f
bbf6f052 5735/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5736 EXP can reference X, which is being modified. TOP_P is nonzero if this
5737 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5738 for EXP, as opposed to a recursive call to this function.
5739
5740 It is always safe for this routine to return zero since it merely
5741 searches for optimization opportunities. */
bbf6f052 5742
8f17b5c5 5743int
502b8322 5744safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5745{
5746 rtx exp_rtl = 0;
5747 int i, nops;
1da68f56 5748 static tree save_expr_list;
bbf6f052 5749
6676e72f
RK
5750 if (x == 0
5751 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5752 have no way of allocating temporaries of variable size
5753 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5754 So we assume here that something at a higher level has prevented a
f4510f37 5755 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5756 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5757 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5758 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5759 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5760 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5761 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5762 != INTEGER_CST)
1da68f56
RK
5763 && GET_MODE (x) == BLKmode)
5764 /* If X is in the outgoing argument area, it is always safe. */
5765 || (GET_CODE (x) == MEM
5766 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5767 || (GET_CODE (XEXP (x, 0)) == PLUS
5768 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5769 return 1;
5770
5771 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5772 find the underlying pseudo. */
5773 if (GET_CODE (x) == SUBREG)
5774 {
5775 x = SUBREG_REG (x);
5776 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5777 return 0;
5778 }
5779
1da68f56
RK
5780 /* A SAVE_EXPR might appear many times in the expression passed to the
5781 top-level safe_from_p call, and if it has a complex subexpression,
5782 examining it multiple times could result in a combinatorial explosion.
7ef0daad 5783 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
1da68f56
RK
5784 with optimization took about 28 minutes to compile -- even though it was
5785 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5786 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5787 we have processed. Note that the only test of top_p was above. */
5788
5789 if (top_p)
5790 {
5791 int rtn;
5792 tree t;
5793
5794 save_expr_list = 0;
5795
5796 rtn = safe_from_p (x, exp, 0);
5797
5798 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5799 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5800
5801 return rtn;
5802 }
bbf6f052 5803
1da68f56 5804 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5805 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5806 {
5807 case 'd':
a9772b60 5808 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5809 break;
5810
5811 case 'c':
5812 return 1;
5813
5814 case 'x':
5815 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5816 {
5817 while (1)
5818 {
5819 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5820 return 0;
5821 exp = TREE_CHAIN (exp);
5822 if (!exp)
5823 return 1;
5824 if (TREE_CODE (exp) != TREE_LIST)
5825 return safe_from_p (x, exp, 0);
5826 }
5827 }
ff439b5f
CB
5828 else if (TREE_CODE (exp) == ERROR_MARK)
5829 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5830 else
5831 return 0;
5832
bbf6f052
RK
5833 case '2':
5834 case '<':
f8d4be57
CE
5835 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5836 return 0;
5d3cc252 5837 /* Fall through. */
f8d4be57
CE
5838
5839 case '1':
5840 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5841
5842 case 'e':
5843 case 'r':
5844 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5845 the expression. If it is set, we conflict iff we are that rtx or
5846 both are in memory. Otherwise, we check all operands of the
5847 expression recursively. */
5848
5849 switch (TREE_CODE (exp))
5850 {
5851 case ADDR_EXPR:
70072ed9
RK
5852 /* If the operand is static or we are static, we can't conflict.
5853 Likewise if we don't conflict with the operand at all. */
5854 if (staticp (TREE_OPERAND (exp, 0))
5855 || TREE_STATIC (exp)
5856 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5857 return 1;
5858
5859 /* Otherwise, the only way this can conflict is if we are taking
5860 the address of a DECL a that address if part of X, which is
5861 very rare. */
5862 exp = TREE_OPERAND (exp, 0);
5863 if (DECL_P (exp))
5864 {
5865 if (!DECL_RTL_SET_P (exp)
5866 || GET_CODE (DECL_RTL (exp)) != MEM)
5867 return 0;
5868 else
5869 exp_rtl = XEXP (DECL_RTL (exp), 0);
5870 }
5871 break;
bbf6f052
RK
5872
5873 case INDIRECT_REF:
1da68f56
RK
5874 if (GET_CODE (x) == MEM
5875 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5876 get_alias_set (exp)))
bbf6f052
RK
5877 return 0;
5878 break;
5879
5880 case CALL_EXPR:
f9808f81
MM
5881 /* Assume that the call will clobber all hard registers and
5882 all of memory. */
5883 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5884 || GET_CODE (x) == MEM)
5885 return 0;
bbf6f052
RK
5886 break;
5887
5888 case RTL_EXPR:
3bb5826a
RK
5889 /* If a sequence exists, we would have to scan every instruction
5890 in the sequence to see if it was safe. This is probably not
5891 worthwhile. */
5892 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5893 return 0;
5894
3bb5826a 5895 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5896 break;
5897
5898 case WITH_CLEANUP_EXPR:
6ad7895a 5899 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
5900 break;
5901
5dab5552 5902 case CLEANUP_POINT_EXPR:
e5e809f4 5903 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5904
bbf6f052
RK
5905 case SAVE_EXPR:
5906 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5907 if (exp_rtl)
5908 break;
5909
1da68f56
RK
5910 /* If we've already scanned this, don't do it again. Otherwise,
5911 show we've scanned it and record for clearing the flag if we're
5912 going on. */
5913 if (TREE_PRIVATE (exp))
5914 return 1;
ff439b5f 5915
1da68f56
RK
5916 TREE_PRIVATE (exp) = 1;
5917 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5918 {
1da68f56
RK
5919 TREE_PRIVATE (exp) = 0;
5920 return 0;
ff59bfe6 5921 }
1da68f56
RK
5922
5923 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5924 return 1;
bbf6f052 5925
8129842c
RS
5926 case BIND_EXPR:
5927 /* The only operand we look at is operand 1. The rest aren't
5928 part of the expression. */
e5e809f4 5929 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5930
e9a25f70
JL
5931 default:
5932 break;
bbf6f052
RK
5933 }
5934
5935 /* If we have an rtx, we do not need to scan our operands. */
5936 if (exp_rtl)
5937 break;
5938
8f17b5c5 5939 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5940 for (i = 0; i < nops; i++)
5941 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5942 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5943 return 0;
8f17b5c5
MM
5944
5945 /* If this is a language-specific tree code, it may require
5946 special handling. */
dbbbbf3b
JDA
5947 if ((unsigned int) TREE_CODE (exp)
5948 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 5949 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 5950 return 0;
bbf6f052
RK
5951 }
5952
5953 /* If we have an rtl, find any enclosed object. Then see if we conflict
5954 with it. */
5955 if (exp_rtl)
5956 {
5957 if (GET_CODE (exp_rtl) == SUBREG)
5958 {
5959 exp_rtl = SUBREG_REG (exp_rtl);
5960 if (GET_CODE (exp_rtl) == REG
5961 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5962 return 0;
5963 }
5964
5965 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5966 are memory and they conflict. */
bbf6f052
RK
5967 return ! (rtx_equal_p (x, exp_rtl)
5968 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 5969 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 5970 rtx_addr_varies_p)));
bbf6f052
RK
5971 }
5972
5973 /* If we reach here, it is safe. */
5974 return 1;
5975}
5976
01c8a7c8
RK
5977/* Subroutine of expand_expr: return rtx if EXP is a
5978 variable or parameter; else return 0. */
5979
5980static rtx
502b8322 5981var_rtx (tree exp)
01c8a7c8
RK
5982{
5983 STRIP_NOPS (exp);
5984 switch (TREE_CODE (exp))
5985 {
5986 case PARM_DECL:
5987 case VAR_DECL:
5988 return DECL_RTL (exp);
5989 default:
5990 return 0;
5991 }
5992}
14a774a9 5993\f
0d4903b8
RK
5994/* Return the highest power of two that EXP is known to be a multiple of.
5995 This is used in updating alignment of MEMs in array references. */
5996
9ceca302 5997static unsigned HOST_WIDE_INT
502b8322 5998highest_pow2_factor (tree exp)
0d4903b8 5999{
9ceca302 6000 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6001
6002 switch (TREE_CODE (exp))
6003 {
6004 case INTEGER_CST:
e0f1be5c
JJ
6005 /* We can find the lowest bit that's a one. If the low
6006 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6007 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6008 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6009 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6010 later ICE. */
e0f1be5c 6011 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6012 return BIGGEST_ALIGNMENT;
e0f1be5c 6013 else
0d4903b8 6014 {
e0f1be5c
JJ
6015 /* Note: tree_low_cst is intentionally not used here,
6016 we don't care about the upper bits. */
6017 c0 = TREE_INT_CST_LOW (exp);
6018 c0 &= -c0;
6019 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6020 }
6021 break;
6022
65a07688 6023 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6024 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6025 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6026 return MIN (c0, c1);
6027
6028 case MULT_EXPR:
6029 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6030 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6031 return c0 * c1;
6032
6033 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6034 case CEIL_DIV_EXPR:
65a07688
RK
6035 if (integer_pow2p (TREE_OPERAND (exp, 1))
6036 && host_integerp (TREE_OPERAND (exp, 1), 1))
6037 {
6038 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6039 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6040 return MAX (1, c0 / c1);
6041 }
6042 break;
0d4903b8
RK
6043
6044 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6045 case SAVE_EXPR:
0d4903b8
RK
6046 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6047
65a07688
RK
6048 case COMPOUND_EXPR:
6049 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6050
0d4903b8
RK
6051 case COND_EXPR:
6052 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6053 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6054 return MIN (c0, c1);
6055
6056 default:
6057 break;
6058 }
6059
6060 return 1;
6061}
818c0c94 6062
d50a16c4
EB
6063/* Similar, except that the alignment requirements of TARGET are
6064 taken into account. Assume it is at least as aligned as its
6065 type, unless it is a COMPONENT_REF in which case the layout of
6066 the structure gives the alignment. */
818c0c94 6067
9ceca302 6068static unsigned HOST_WIDE_INT
d50a16c4 6069highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6070{
d50a16c4 6071 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6072
6073 factor = highest_pow2_factor (exp);
d50a16c4
EB
6074 if (TREE_CODE (target) == COMPONENT_REF)
6075 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6076 else
6077 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6078 return MAX (factor, target_align);
818c0c94 6079}
0d4903b8 6080\f
eb698c58
RS
6081/* Subroutine of expand_expr. Expand the two operands of a binary
6082 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6083 The value may be stored in TARGET if TARGET is nonzero. The
6084 MODIFIER argument is as documented by expand_expr. */
6085
6086static void
6087expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6088 enum expand_modifier modifier)
6089{
6090 if (! safe_from_p (target, exp1, 1))
6091 target = 0;
6092 if (operand_equal_p (exp0, exp1, 0))
6093 {
6094 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6095 *op1 = copy_rtx (*op0);
6096 }
6097 else
6098 {
c67e6e14
RS
6099 /* If we need to preserve evaluation order, copy exp0 into its own
6100 temporary variable so that it can't be clobbered by exp1. */
6101 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6102 exp0 = save_expr (exp0);
eb698c58
RS
6103 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6104 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6105 }
6106}
6107
f47e9b4e 6108\f
bbf6f052
RK
6109/* expand_expr: generate code for computing expression EXP.
6110 An rtx for the computed value is returned. The value is never null.
6111 In the case of a void EXP, const0_rtx is returned.
6112
6113 The value may be stored in TARGET if TARGET is nonzero.
6114 TARGET is just a suggestion; callers must assume that
6115 the rtx returned may not be the same as TARGET.
6116
6117 If TARGET is CONST0_RTX, it means that the value will be ignored.
6118
6119 If TMODE is not VOIDmode, it suggests generating the
6120 result in mode TMODE. But this is done only when convenient.
6121 Otherwise, TMODE is ignored and the value generated in its natural mode.
6122 TMODE is just a suggestion; callers must assume that
6123 the rtx returned may not have mode TMODE.
6124
d6a5ac33
RK
6125 Note that TARGET may have neither TMODE nor MODE. In that case, it
6126 probably will not be used.
bbf6f052
RK
6127
6128 If MODIFIER is EXPAND_SUM then when EXP is an addition
6129 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6130 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6131 products as above, or REG or MEM, or constant.
6132 Ordinarily in such cases we would output mul or add instructions
6133 and then return a pseudo reg containing the sum.
6134
6135 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6136 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6137 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6138 This is used for outputting expressions used in initializers.
6139
6140 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6141 with a constant address even if that address is not normally legitimate.
8403445a
AM
6142 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6143
6144 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6145 a call parameter. Such targets require special care as we haven't yet
6146 marked TARGET so that it's safe from being trashed by libcalls. We
6147 don't want to use TARGET for anything but the final result;
6148 Intermediate values must go elsewhere. Additionally, calls to
0fab64a3
MM
6149 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6150
6151 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6152 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6153 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6154 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6155 recursively. */
bbf6f052
RK
6156
6157rtx
0fab64a3
MM
6158expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6159 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6160{
b3694847 6161 rtx op0, op1, temp;
bbf6f052
RK
6162 tree type = TREE_TYPE (exp);
6163 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6164 enum machine_mode mode;
6165 enum tree_code code = TREE_CODE (exp);
bbf6f052 6166 optab this_optab;
68557e14
ML
6167 rtx subtarget, original_target;
6168 int ignore;
bbf6f052
RK
6169 tree context;
6170
3a94c984 6171 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6172 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6173 {
6174 op0 = CONST0_RTX (tmode);
6175 if (op0 != 0)
6176 return op0;
6177 return const0_rtx;
6178 }
6179
6180 mode = TYPE_MODE (type);
6181 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6182 subtarget = get_subtarget (target);
68557e14
ML
6183 original_target = target;
6184 ignore = (target == const0_rtx
6185 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6186 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6187 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6188 && TREE_CODE (type) == VOID_TYPE));
6189
dd27116b
RK
6190 /* If we are going to ignore this result, we need only do something
6191 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6192 is, short-circuit the most common cases here. Note that we must
6193 not call expand_expr with anything but const0_rtx in case this
6194 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6195
dd27116b
RK
6196 if (ignore)
6197 {
6198 if (! TREE_SIDE_EFFECTS (exp))
6199 return const0_rtx;
6200
14a774a9
RK
6201 /* Ensure we reference a volatile object even if value is ignored, but
6202 don't do this if all we are doing is taking its address. */
dd27116b
RK
6203 if (TREE_THIS_VOLATILE (exp)
6204 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6205 && mode != VOIDmode && mode != BLKmode
6206 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6207 {
37a08a29 6208 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6209 if (GET_CODE (temp) == MEM)
6210 temp = copy_to_reg (temp);
6211 return const0_rtx;
6212 }
6213
14a774a9
RK
6214 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6215 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6216 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6217 modifier);
6218
14a774a9 6219 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6220 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6221 {
37a08a29
RK
6222 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6223 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6224 return const0_rtx;
6225 }
6226 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6227 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6228 /* If the second operand has no side effects, just evaluate
0f41302f 6229 the first. */
37a08a29
RK
6230 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6231 modifier);
14a774a9
RK
6232 else if (code == BIT_FIELD_REF)
6233 {
37a08a29
RK
6234 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6235 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6236 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6237 return const0_rtx;
6238 }
37a08a29 6239
90764a87 6240 target = 0;
dd27116b 6241 }
bbf6f052 6242
e44842fe
RK
6243 /* If will do cse, generate all results into pseudo registers
6244 since 1) that allows cse to find more things
6245 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6246 cannot support. An exception is a CONSTRUCTOR into a multi-word
6247 MEM: that's much more likely to be most efficient into the MEM.
6248 Another is a CALL_EXPR which must return in memory. */
e44842fe 6249
bbf6f052 6250 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6251 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6252 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6253 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6254 target = 0;
bbf6f052 6255
bbf6f052
RK
6256 switch (code)
6257 {
6258 case LABEL_DECL:
b552441b
RS
6259 {
6260 tree function = decl_function_context (exp);
046e4e36
ZW
6261 /* Labels in containing functions, or labels used from initializers,
6262 must be forced. */
6263 if (modifier == EXPAND_INITIALIZER
6264 || (function != current_function_decl
6265 && function != inline_function_decl
6266 && function != 0))
6267 temp = force_label_rtx (exp);
ab87f8c8 6268 else
046e4e36 6269 temp = label_rtx (exp);
c5c76735 6270
046e4e36 6271 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
d0977240
RK
6272 if (function != current_function_decl
6273 && function != inline_function_decl && function != 0)
26fcb35a
RS
6274 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6275 return temp;
b552441b 6276 }
bbf6f052
RK
6277
6278 case PARM_DECL:
1877be45 6279 if (!DECL_RTL_SET_P (exp))
bbf6f052 6280 {
ddd2d57e 6281 error ("%Jprior parameter's size depends on '%D'", exp, exp);
4af3895e 6282 return CONST0_RTX (mode);
bbf6f052
RK
6283 }
6284
0f41302f 6285 /* ... fall through ... */
d6a5ac33 6286
bbf6f052 6287 case VAR_DECL:
2dca20cd
RS
6288 /* If a static var's type was incomplete when the decl was written,
6289 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6290 if (DECL_SIZE (exp) == 0
6291 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6292 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6293 layout_decl (exp, 0);
921b3427 6294
0f41302f 6295 /* ... fall through ... */
d6a5ac33 6296
2dca20cd 6297 case FUNCTION_DECL:
bbf6f052
RK
6298 case RESULT_DECL:
6299 if (DECL_RTL (exp) == 0)
6300 abort ();
d6a5ac33 6301
e44842fe
RK
6302 /* Ensure variable marked as used even if it doesn't go through
6303 a parser. If it hasn't be used yet, write out an external
6304 definition. */
6305 if (! TREE_USED (exp))
6306 {
6307 assemble_external (exp);
6308 TREE_USED (exp) = 1;
6309 }
6310
dc6d66b3
RK
6311 /* Show we haven't gotten RTL for this yet. */
6312 temp = 0;
6313
bbf6f052
RK
6314 /* Handle variables inherited from containing functions. */
6315 context = decl_function_context (exp);
6316
6317 /* We treat inline_function_decl as an alias for the current function
6318 because that is the inline function whose vars, types, etc.
6319 are being merged into the current function.
6320 See expand_inline_function. */
d6a5ac33 6321
bbf6f052
RK
6322 if (context != 0 && context != current_function_decl
6323 && context != inline_function_decl
6324 /* If var is static, we don't need a static chain to access it. */
6325 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6326 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6327 {
6328 rtx addr;
6329
6330 /* Mark as non-local and addressable. */
81feeecb 6331 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6332 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6333 abort ();
ae2bcd98 6334 lang_hooks.mark_addressable (exp);
bbf6f052
RK
6335 if (GET_CODE (DECL_RTL (exp)) != MEM)
6336 abort ();
6337 addr = XEXP (DECL_RTL (exp), 0);
6338 if (GET_CODE (addr) == MEM)
792760b9
RK
6339 addr
6340 = replace_equiv_address (addr,
6341 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6342 else
6343 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6344
792760b9 6345 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6346 }
4af3895e 6347
bbf6f052
RK
6348 /* This is the case of an array whose size is to be determined
6349 from its initializer, while the initializer is still being parsed.
6350 See expand_decl. */
d6a5ac33 6351
dc6d66b3
RK
6352 else if (GET_CODE (DECL_RTL (exp)) == MEM
6353 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6354 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6355
6356 /* If DECL_RTL is memory, we are in the normal case and either
6357 the address is not valid or it is not a register and -fforce-addr
6358 is specified, get the address into a register. */
6359
dc6d66b3
RK
6360 else if (GET_CODE (DECL_RTL (exp)) == MEM
6361 && modifier != EXPAND_CONST_ADDRESS
6362 && modifier != EXPAND_SUM
6363 && modifier != EXPAND_INITIALIZER
6364 && (! memory_address_p (DECL_MODE (exp),
6365 XEXP (DECL_RTL (exp), 0))
6366 || (flag_force_addr
6367 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
0fab64a3
MM
6368 {
6369 if (alt_rtl)
6370 *alt_rtl = DECL_RTL (exp);
6371 temp = replace_equiv_address (DECL_RTL (exp),
6372 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6373 }
1499e0a8 6374
dc6d66b3 6375 /* If we got something, return it. But first, set the alignment
04956a1a 6376 if the address is a register. */
dc6d66b3
RK
6377 if (temp != 0)
6378 {
6379 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6380 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6381
6382 return temp;
6383 }
6384
1499e0a8
RK
6385 /* If the mode of DECL_RTL does not match that of the decl, it
6386 must be a promoted value. We return a SUBREG of the wanted mode,
6387 but mark it so that we know that it was already extended. */
6388
6389 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6390 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6391 {
1499e0a8
RK
6392 /* Get the signedness used for this variable. Ensure we get the
6393 same mode we got when the variable was declared. */
78911e8b 6394 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6395 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6396 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6397 abort ();
6398
ddef6bc7 6399 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6400 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6401 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6402 return temp;
6403 }
6404
bbf6f052
RK
6405 return DECL_RTL (exp);
6406
6407 case INTEGER_CST:
d8a50944 6408 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6409 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6410
d8a50944
RH
6411 /* ??? If overflow is set, fold will have done an incomplete job,
6412 which can result in (plus xx (const_int 0)), which can get
6413 simplified by validate_replace_rtx during virtual register
6414 instantiation, which can result in unrecognizable insns.
6415 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6416 if (TREE_CONSTANT_OVERFLOW (exp)
6417 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6418 temp = force_reg (mode, temp);
6419
6420 return temp;
6421
d744e06e
AH
6422 case VECTOR_CST:
6423 return const_vector_from_tree (exp);
6424
bbf6f052 6425 case CONST_DECL:
8403445a 6426 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6427
6428 case REAL_CST:
6429 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6430 which will be turned into memory by reload if necessary.
6431
bbf6f052
RK
6432 We used to force a register so that loop.c could see it. But
6433 this does not allow gen_* patterns to perform optimizations with
6434 the constants. It also produces two insns in cases like "x = 1.0;".
6435 On most machines, floating-point constants are not permitted in
6436 many insns, so we'd end up copying it to a register in any case.
6437
6438 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6439 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6440 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6441
6442 case COMPLEX_CST:
9ad58e09
RS
6443 /* Handle evaluating a complex constant in a CONCAT target. */
6444 if (original_target && GET_CODE (original_target) == CONCAT)
6445 {
6446 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6447 rtx rtarg, itarg;
6448
6449 rtarg = XEXP (original_target, 0);
6450 itarg = XEXP (original_target, 1);
6451
6452 /* Move the real and imaginary parts separately. */
6453 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6454 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6455
6456 if (op0 != rtarg)
6457 emit_move_insn (rtarg, op0);
6458 if (op1 != itarg)
6459 emit_move_insn (itarg, op1);
6460
6461 return original_target;
6462 }
6463
71c0e7fc 6464 /* ... fall through ... */
9ad58e09 6465
bbf6f052 6466 case STRING_CST:
afc6aaab 6467 temp = output_constant_def (exp, 1);
bbf6f052 6468
afc6aaab 6469 /* temp contains a constant address.
bbf6f052
RK
6470 On RISC machines where a constant address isn't valid,
6471 make some insns to get that address into a register. */
afc6aaab 6472 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6473 && modifier != EXPAND_INITIALIZER
6474 && modifier != EXPAND_SUM
afc6aaab
ZW
6475 && (! memory_address_p (mode, XEXP (temp, 0))
6476 || flag_force_addr))
6477 return replace_equiv_address (temp,
6478 copy_rtx (XEXP (temp, 0)));
6479 return temp;
bbf6f052 6480
bf1e5319 6481 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6482 {
6483 rtx to_return;
72954a4f
JM
6484 struct file_stack fs;
6485
6486 fs.location = input_location;
6487 fs.next = expr_wfl_stack;
b24f65cd 6488 input_filename = EXPR_WFL_FILENAME (exp);
d479d37f 6489 input_line = EXPR_WFL_LINENO (exp);
72954a4f 6490 expr_wfl_stack = &fs;
b24f65cd 6491 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
0cea056b 6492 emit_line_note (input_location);
6ad7895a 6493 /* Possibly avoid switching back and forth here. */
72954a4f
JM
6494 to_return = expand_expr (EXPR_WFL_NODE (exp),
6495 (ignore ? const0_rtx : target),
6496 tmode, modifier);
6497 if (expr_wfl_stack != &fs)
6498 abort ();
6499 input_location = fs.location;
6500 expr_wfl_stack = fs.next;
b24f65cd
APB
6501 return to_return;
6502 }
bf1e5319 6503
bbf6f052
RK
6504 case SAVE_EXPR:
6505 context = decl_function_context (exp);
d6a5ac33 6506
d0977240
RK
6507 /* If this SAVE_EXPR was at global context, assume we are an
6508 initialization function and move it into our context. */
6509 if (context == 0)
6510 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6511
bbf6f052
RK
6512 /* We treat inline_function_decl as an alias for the current function
6513 because that is the inline function whose vars, types, etc.
6514 are being merged into the current function.
6515 See expand_inline_function. */
6516 if (context == current_function_decl || context == inline_function_decl)
6517 context = 0;
6518
6519 /* If this is non-local, handle it. */
6520 if (context)
6521 {
d0977240
RK
6522 /* The following call just exists to abort if the context is
6523 not of a containing function. */
6524 find_function_data (context);
6525
bbf6f052
RK
6526 temp = SAVE_EXPR_RTL (exp);
6527 if (temp && GET_CODE (temp) == REG)
6528 {
f29a2bd1 6529 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6530 temp = SAVE_EXPR_RTL (exp);
6531 }
6532 if (temp == 0 || GET_CODE (temp) != MEM)
6533 abort ();
792760b9
RK
6534 return
6535 replace_equiv_address (temp,
6536 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6537 }
6538 if (SAVE_EXPR_RTL (exp) == 0)
6539 {
06089a8b
RK
6540 if (mode == VOIDmode)
6541 temp = const0_rtx;
6542 else
1da68f56
RK
6543 temp = assign_temp (build_qualified_type (type,
6544 (TYPE_QUALS (type)
6545 | TYPE_QUAL_CONST)),
6546 3, 0, 0);
1499e0a8 6547
bbf6f052 6548 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6549 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6550 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6551 save_expr_regs);
ff78f773
RK
6552
6553 /* If the mode of TEMP does not match that of the expression, it
6554 must be a promoted value. We pass store_expr a SUBREG of the
6555 wanted mode but mark it so that we know that it was already
3ac1a319 6556 extended. */
ff78f773
RK
6557
6558 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6559 {
ddef6bc7 6560 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6561 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6562 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6563 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6564 }
6565
4c7a0be9 6566 if (temp == const0_rtx)
37a08a29 6567 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6568 else
8403445a
AM
6569 store_expr (TREE_OPERAND (exp, 0), temp,
6570 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6571
6572 TREE_USED (exp) = 1;
bbf6f052 6573 }
1499e0a8
RK
6574
6575 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6576 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6577 but mark it so that we know that it was already extended. */
1499e0a8
RK
6578
6579 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6580 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6581 {
e70d22c8
RK
6582 /* Compute the signedness and make the proper SUBREG. */
6583 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6584 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6585 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6586 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6587 return temp;
6588 }
6589
bbf6f052
RK
6590 return SAVE_EXPR_RTL (exp);
6591
679163cf
MS
6592 case UNSAVE_EXPR:
6593 {
6594 rtx temp;
6595 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a 6596 TREE_OPERAND (exp, 0)
ae2bcd98 6597 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
679163cf
MS
6598 return temp;
6599 }
6600
70e6ca43
APB
6601 case GOTO_EXPR:
6602 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6603 expand_goto (TREE_OPERAND (exp, 0));
6604 else
6605 expand_computed_goto (TREE_OPERAND (exp, 0));
6606 return const0_rtx;
6607
bbf6f052 6608 case EXIT_EXPR:
df4ae160 6609 expand_exit_loop_if_false (NULL,
e44842fe 6610 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6611 return const0_rtx;
6612
f42e28dd
APB
6613 case LABELED_BLOCK_EXPR:
6614 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6615 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6616 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6617 do_pending_stack_adjust ();
f42e28dd
APB
6618 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6619 return const0_rtx;
6620
6621 case EXIT_BLOCK_EXPR:
6622 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6623 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6624 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6625 return const0_rtx;
6626
bbf6f052 6627 case LOOP_EXPR:
0088fcb1 6628 push_temp_slots ();
bbf6f052 6629 expand_start_loop (1);
b0832fe1 6630 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6631 expand_end_loop ();
0088fcb1 6632 pop_temp_slots ();
bbf6f052
RK
6633
6634 return const0_rtx;
6635
6636 case BIND_EXPR:
6637 {
6638 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
6639
6640 /* Need to open a binding contour here because
e976b8b2 6641 if there are any cleanups they must be contained here. */
8e91754e 6642 expand_start_bindings (2);
bbf6f052 6643
2df53c0b
RS
6644 /* Mark the corresponding BLOCK for output in its proper place. */
6645 if (TREE_OPERAND (exp, 2) != 0
6646 && ! TREE_USED (TREE_OPERAND (exp, 2)))
ae2bcd98 6647 lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6648
6649 /* If VARS have not yet been expanded, expand them now. */
6650 while (vars)
6651 {
19e7881c 6652 if (!DECL_RTL_SET_P (vars))
4977bab6 6653 expand_decl (vars);
bbf6f052
RK
6654 expand_decl_init (vars);
6655 vars = TREE_CHAIN (vars);
6656 }
6657
37a08a29 6658 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
6659
6660 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6661
6662 return temp;
6663 }
6664
6665 case RTL_EXPR:
83b853c9
JM
6666 if (RTL_EXPR_SEQUENCE (exp))
6667 {
6668 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6669 abort ();
2f937369 6670 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
6671 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6672 }
64dc53f3
MM
6673 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6674 free_temps_for_rtl_expr (exp);
0fab64a3
MM
6675 if (alt_rtl)
6676 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
bbf6f052
RK
6677 return RTL_EXPR_RTL (exp);
6678
6679 case CONSTRUCTOR:
dd27116b
RK
6680 /* If we don't need the result, just ensure we evaluate any
6681 subexpressions. */
6682 if (ignore)
6683 {
6684 tree elt;
37a08a29 6685
dd27116b 6686 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6687 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6688
dd27116b
RK
6689 return const0_rtx;
6690 }
3207b172 6691
4af3895e
JVA
6692 /* All elts simple constants => refer to a constant in memory. But
6693 if this is a non-BLKmode mode, let it store a field at a time
6694 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6695 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6696 store directly into the target unless the type is large enough
6697 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6698 all operands are constant, put it in memory as well.
6699
6700 FIXME: Avoid trying to fill vector constructors piece-meal.
6701 Output them with output_constant_def below unless we're sure
6702 they're zeros. This should go away when vector initializers
6703 are treated like VECTOR_CST instead of arrays.
6704 */
dd27116b 6705 else if ((TREE_STATIC (exp)
3207b172 6706 && ((mode == BLKmode
e5e809f4 6707 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6708 || TREE_ADDRESSABLE (exp)
19caa751 6709 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6710 && (! MOVE_BY_PIECES_P
19caa751
RK
6711 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6712 TYPE_ALIGN (type)))
0fb7aeda
KH
6713 && ((TREE_CODE (type) == VECTOR_TYPE
6714 && !is_zeros_p (exp))
6715 || ! mostly_zeros_p (exp)))))
f59700f9
RK
6716 || ((modifier == EXPAND_INITIALIZER
6717 || modifier == EXPAND_CONST_ADDRESS)
6718 && TREE_CONSTANT (exp)))
bbf6f052 6719 {
bd7cf17e 6720 rtx constructor = output_constant_def (exp, 1);
19caa751 6721
b552441b
RS
6722 if (modifier != EXPAND_CONST_ADDRESS
6723 && modifier != EXPAND_INITIALIZER
792760b9
RK
6724 && modifier != EXPAND_SUM)
6725 constructor = validize_mem (constructor);
6726
bbf6f052
RK
6727 return constructor;
6728 }
bbf6f052
RK
6729 else
6730 {
e9ac02a6
JW
6731 /* Handle calls that pass values in multiple non-contiguous
6732 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6733 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6734 || GET_CODE (target) == PARALLEL
6735 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6736 target
6737 = assign_temp (build_qualified_type (type,
6738 (TYPE_QUALS (type)
6739 | (TREE_READONLY (exp)
6740 * TYPE_QUAL_CONST))),
c24ae149 6741 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6742
de8920be 6743 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6744 return target;
6745 }
6746
6747 case INDIRECT_REF:
6748 {
6749 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6750 tree index;
3a94c984
KH
6751 tree string = string_constant (exp1, &index);
6752
06eaa86f 6753 /* Try to optimize reads from const strings. */
0fb7aeda
KH
6754 if (string
6755 && TREE_CODE (string) == STRING_CST
6756 && TREE_CODE (index) == INTEGER_CST
05bccae2 6757 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
6758 && GET_MODE_CLASS (mode) == MODE_INT
6759 && GET_MODE_SIZE (mode) == 1
37a08a29 6760 && modifier != EXPAND_WRITE)
0fb7aeda 6761 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 6762 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6763
405f0da6
JW
6764 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6765 op0 = memory_address (mode, op0);
38a448ca 6766 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6767 set_mem_attributes (temp, exp, 0);
1125706f 6768
14a774a9
RK
6769 /* If we are writing to this object and its type is a record with
6770 readonly fields, we must mark it as readonly so it will
6771 conflict with readonly references to those fields. */
37a08a29 6772 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6773 RTX_UNCHANGING_P (temp) = 1;
6774
8c8a8e34
JW
6775 return temp;
6776 }
bbf6f052
RK
6777
6778 case ARRAY_REF:
742920c7
RK
6779 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6780 abort ();
bbf6f052 6781
bbf6f052 6782 {
742920c7
RK
6783 tree array = TREE_OPERAND (exp, 0);
6784 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6785 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6786 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6787 HOST_WIDE_INT i;
b50d17a1 6788
d4c89139
PB
6789 /* Optimize the special-case of a zero lower bound.
6790
6791 We convert the low_bound to sizetype to avoid some problems
6792 with constant folding. (E.g. suppose the lower bound is 1,
6793 and its mode is QI. Without the conversion, (ARRAY
6794 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6795 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6796
742920c7 6797 if (! integer_zerop (low_bound))
fed3cef0 6798 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6799
742920c7 6800 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6801 This is not done in fold so it won't happen inside &.
6802 Don't fold if this is for wide characters since it's too
6803 difficult to do correctly and this is a very rare case. */
742920c7 6804
017e1b43
RH
6805 if (modifier != EXPAND_CONST_ADDRESS
6806 && modifier != EXPAND_INITIALIZER
6807 && modifier != EXPAND_MEMORY
cb5fa0f8 6808 && TREE_CODE (array) == STRING_CST
742920c7 6809 && TREE_CODE (index) == INTEGER_CST
05bccae2 6810 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6811 && GET_MODE_CLASS (mode) == MODE_INT
6812 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6813 return gen_int_mode (TREE_STRING_POINTER (array)
6814 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6815
742920c7
RK
6816 /* If this is a constant index into a constant array,
6817 just get the value from the array. Handle both the cases when
6818 we have an explicit constructor and when our operand is a variable
6819 that was declared const. */
4af3895e 6820
017e1b43
RH
6821 if (modifier != EXPAND_CONST_ADDRESS
6822 && modifier != EXPAND_INITIALIZER
6823 && modifier != EXPAND_MEMORY
6824 && TREE_CODE (array) == CONSTRUCTOR
6825 && ! TREE_SIDE_EFFECTS (array)
05bccae2 6826 && TREE_CODE (index) == INTEGER_CST
3a94c984 6827 && 0 > compare_tree_int (index,
05bccae2
RK
6828 list_length (CONSTRUCTOR_ELTS
6829 (TREE_OPERAND (exp, 0)))))
742920c7 6830 {
05bccae2
RK
6831 tree elem;
6832
6833 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6834 i = TREE_INT_CST_LOW (index);
6835 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6836 ;
6837
6838 if (elem)
37a08a29
RK
6839 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6840 modifier);
742920c7 6841 }
3a94c984 6842
742920c7 6843 else if (optimize >= 1
cb5fa0f8
RK
6844 && modifier != EXPAND_CONST_ADDRESS
6845 && modifier != EXPAND_INITIALIZER
017e1b43 6846 && modifier != EXPAND_MEMORY
742920c7
RK
6847 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6848 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
6849 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6850 && targetm.binds_local_p (array))
742920c7 6851 {
08293add 6852 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6853 {
6854 tree init = DECL_INITIAL (array);
6855
742920c7
RK
6856 if (TREE_CODE (init) == CONSTRUCTOR)
6857 {
665f2503 6858 tree elem;
742920c7 6859
05bccae2 6860 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6861 (elem
6862 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6863 elem = TREE_CHAIN (elem))
6864 ;
6865
c54b0a5e 6866 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6867 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 6868 tmode, modifier);
742920c7
RK
6869 }
6870 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6871 && 0 > compare_tree_int (index,
6872 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6873 {
6874 tree type = TREE_TYPE (TREE_TYPE (init));
6875 enum machine_mode mode = TYPE_MODE (type);
6876
6877 if (GET_MODE_CLASS (mode) == MODE_INT
6878 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6879 return gen_int_mode (TREE_STRING_POINTER (init)
6880 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 6881 }
742920c7
RK
6882 }
6883 }
6884 }
afc6aaab 6885 goto normal_inner_ref;
bbf6f052
RK
6886
6887 case COMPONENT_REF:
4af3895e 6888 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
6889 appropriate field if it is present. */
6890 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
6891 {
6892 tree elt;
6893
6894 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6895 elt = TREE_CHAIN (elt))
86b5812c
RK
6896 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6897 /* We can normally use the value of the field in the
6898 CONSTRUCTOR. However, if this is a bitfield in
6899 an integral mode that we can fit in a HOST_WIDE_INT,
6900 we must mask only the number of bits in the bitfield,
6901 since this is done implicitly by the constructor. If
6902 the bitfield does not meet either of those conditions,
6903 we can't do this optimization. */
6904 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6905 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6906 == MODE_INT)
6907 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6908 <= HOST_BITS_PER_WIDE_INT))))
6909 {
8403445a
AM
6910 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6911 && modifier == EXPAND_STACK_PARM)
6912 target = 0;
3a94c984 6913 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6914 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6915 {
9df2c88c
RK
6916 HOST_WIDE_INT bitsize
6917 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
6918 enum machine_mode imode
6919 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
6920
6921 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6922 {
6923 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 6924 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
6925 }
6926 else
6927 {
6928 tree count
e5e809f4
JL
6929 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6930 0);
86b5812c
RK
6931
6932 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6933 target, 0);
6934 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6935 target, 0);
6936 }
6937 }
6938
6939 return op0;
6940 }
4af3895e 6941 }
afc6aaab 6942 goto normal_inner_ref;
4af3895e 6943
afc6aaab
ZW
6944 case BIT_FIELD_REF:
6945 case ARRAY_RANGE_REF:
6946 normal_inner_ref:
bbf6f052
RK
6947 {
6948 enum machine_mode mode1;
770ae6cc 6949 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6950 tree offset;
bbf6f052 6951 int volatilep = 0;
839c4796 6952 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 6953 &mode1, &unsignedp, &volatilep);
f47e9b4e 6954 rtx orig_op0;
bbf6f052 6955
e7f3c83f
RK
6956 /* If we got back the original object, something is wrong. Perhaps
6957 we are evaluating an expression too early. In any event, don't
6958 infinitely recurse. */
6959 if (tem == exp)
6960 abort ();
6961
3d27140a 6962 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6963 computation, since it will need a temporary and TARGET is known
6964 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 6965
f47e9b4e
RK
6966 orig_op0 = op0
6967 = expand_expr (tem,
6968 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6969 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6970 != INTEGER_CST)
8403445a 6971 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
6972 ? target : NULL_RTX),
6973 VOIDmode,
6974 (modifier == EXPAND_INITIALIZER
8403445a
AM
6975 || modifier == EXPAND_CONST_ADDRESS
6976 || modifier == EXPAND_STACK_PARM)
f47e9b4e 6977 ? modifier : EXPAND_NORMAL);
bbf6f052 6978
8c8a8e34 6979 /* If this is a constant, put it into a register if it is a
14a774a9 6980 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6981 if (CONSTANT_P (op0))
6982 {
6983 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6984 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6985 && offset == 0)
8c8a8e34
JW
6986 op0 = force_reg (mode, op0);
6987 else
6988 op0 = validize_mem (force_const_mem (mode, op0));
6989 }
6990
8d2e5f72
RK
6991 /* Otherwise, if this object not in memory and we either have an
6992 offset or a BLKmode result, put it there. This case can't occur in
6993 C, but can in Ada if we have unchecked conversion of an expression
6994 from a scalar type to an array or record type or for an
6995 ARRAY_RANGE_REF whose type is BLKmode. */
6996 else if (GET_CODE (op0) != MEM
6997 && (offset != 0
6998 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6999 {
7000 /* If the operand is a SAVE_EXPR, we can deal with this by
7001 forcing the SAVE_EXPR into memory. */
7002 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7003 {
7004 put_var_into_stack (TREE_OPERAND (exp, 0),
7005 /*rescan=*/true);
7006 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7007 }
7008 else
7009 {
7010 tree nt
7011 = build_qualified_type (TREE_TYPE (tem),
7012 (TYPE_QUALS (TREE_TYPE (tem))
7013 | TYPE_QUAL_CONST));
7014 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7015
8d2e5f72
RK
7016 emit_move_insn (memloc, op0);
7017 op0 = memloc;
7018 }
7019 }
7020
7bb0943f
RS
7021 if (offset != 0)
7022 {
8403445a
AM
7023 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7024 EXPAND_SUM);
7bb0943f
RS
7025
7026 if (GET_CODE (op0) != MEM)
7027 abort ();
2d48c13d 7028
2d48c13d 7029#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7030 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7031 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7032#else
7033 if (GET_MODE (offset_rtx) != ptr_mode)
7034 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7035#endif
7036
e82407b5
EB
7037 if (GET_MODE (op0) == BLKmode
7038 /* A constant address in OP0 can have VOIDmode, we must
7039 not try to call force_reg in that case. */
efd07ca7 7040 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7041 && bitsize != 0
3a94c984 7042 && (bitpos % bitsize) == 0
89752202 7043 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7044 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7045 {
e3c8ea67 7046 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7047 bitpos = 0;
7048 }
7049
0d4903b8
RK
7050 op0 = offset_address (op0, offset_rtx,
7051 highest_pow2_factor (offset));
7bb0943f
RS
7052 }
7053
1ce7f3c2
RK
7054 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7055 record its alignment as BIGGEST_ALIGNMENT. */
7056 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7057 && is_aligning_offset (offset, tem))
7058 set_mem_align (op0, BIGGEST_ALIGNMENT);
7059
bbf6f052
RK
7060 /* Don't forget about volatility even if this is a bitfield. */
7061 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7062 {
f47e9b4e
RK
7063 if (op0 == orig_op0)
7064 op0 = copy_rtx (op0);
7065
bbf6f052
RK
7066 MEM_VOLATILE_P (op0) = 1;
7067 }
7068
010f87c4
JJ
7069 /* The following code doesn't handle CONCAT.
7070 Assume only bitpos == 0 can be used for CONCAT, due to
7071 one element arrays having the same mode as its element. */
7072 if (GET_CODE (op0) == CONCAT)
7073 {
7074 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7075 abort ();
7076 return op0;
7077 }
7078
ccc98036
RS
7079 /* In cases where an aligned union has an unaligned object
7080 as a field, we might be extracting a BLKmode value from
7081 an integer-mode (e.g., SImode) object. Handle this case
7082 by doing the extract into an object as wide as the field
7083 (which we know to be the width of a basic mode), then
cb5fa0f8 7084 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7085 if (mode1 == VOIDmode
ccc98036 7086 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7087 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7088 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7089 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7090 && modifier != EXPAND_CONST_ADDRESS
7091 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7092 /* If the field isn't aligned enough to fetch as a memref,
7093 fetch it as a bit field. */
7094 || (mode1 != BLKmode
9e5f281f 7095 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5
EB
7096 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7097 || (GET_CODE (op0) == MEM
7098 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7099 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7100 && ((modifier == EXPAND_CONST_ADDRESS
7101 || modifier == EXPAND_INITIALIZER)
7102 ? STRICT_ALIGNMENT
7103 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7104 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7105 /* If the type and the field are a constant size and the
7106 size of the type isn't the same size as the bitfield,
7107 we must use bitfield operations. */
7108 || (bitsize >= 0
7109 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7110 == INTEGER_CST)
7111 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7112 bitsize)))
bbf6f052 7113 {
bbf6f052
RK
7114 enum machine_mode ext_mode = mode;
7115
14a774a9
RK
7116 if (ext_mode == BLKmode
7117 && ! (target != 0 && GET_CODE (op0) == MEM
7118 && GET_CODE (target) == MEM
7119 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7120 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7121
7122 if (ext_mode == BLKmode)
a281e72d 7123 {
7a06d606
RK
7124 if (target == 0)
7125 target = assign_temp (type, 0, 1, 1);
7126
7127 if (bitsize == 0)
7128 return target;
7129
a281e72d
RK
7130 /* In this case, BITPOS must start at a byte boundary and
7131 TARGET, if specified, must be a MEM. */
7132 if (GET_CODE (op0) != MEM
7133 || (target != 0 && GET_CODE (target) != MEM)
7134 || bitpos % BITS_PER_UNIT != 0)
7135 abort ();
7136
7a06d606
RK
7137 emit_block_move (target,
7138 adjust_address (op0, VOIDmode,
7139 bitpos / BITS_PER_UNIT),
a06ef755 7140 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7141 / BITS_PER_UNIT),
8403445a
AM
7142 (modifier == EXPAND_STACK_PARM
7143 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7144
a281e72d
RK
7145 return target;
7146 }
bbf6f052 7147
dc6d66b3
RK
7148 op0 = validize_mem (op0);
7149
7150 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7151 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7152
8403445a
AM
7153 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7154 (modifier == EXPAND_STACK_PARM
7155 ? NULL_RTX : target),
7156 ext_mode, ext_mode,
bbf6f052 7157 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7158
7159 /* If the result is a record type and BITSIZE is narrower than
7160 the mode of OP0, an integral mode, and this is a big endian
7161 machine, we must put the field into the high-order bits. */
7162 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7163 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7164 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7165 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7166 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7167 - bitsize),
7168 op0, 1);
7169
daae8185
RK
7170 /* If the result type is BLKmode, store the data into a temporary
7171 of the appropriate type, but with the mode corresponding to the
7172 mode for the data we have (op0's mode). It's tempting to make
7173 this a constant type, since we know it's only being stored once,
7174 but that can cause problems if we are taking the address of this
7175 COMPONENT_REF because the MEM of any reference via that address
7176 will have flags corresponding to the type, which will not
7177 necessarily be constant. */
bbf6f052
RK
7178 if (mode == BLKmode)
7179 {
daae8185
RK
7180 rtx new
7181 = assign_stack_temp_for_type
7182 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7183
7184 emit_move_insn (new, op0);
7185 op0 = copy_rtx (new);
7186 PUT_MODE (op0, BLKmode);
c3d32120 7187 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7188 }
7189
7190 return op0;
7191 }
7192
05019f83
RK
7193 /* If the result is BLKmode, use that to access the object
7194 now as well. */
7195 if (mode == BLKmode)
7196 mode1 = BLKmode;
7197
bbf6f052
RK
7198 /* Get a reference to just this component. */
7199 if (modifier == EXPAND_CONST_ADDRESS
7200 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7201 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7202 else
f4ef873c 7203 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7204
f47e9b4e
RK
7205 if (op0 == orig_op0)
7206 op0 = copy_rtx (op0);
7207
3bdf5ad1 7208 set_mem_attributes (op0, exp, 0);
dc6d66b3 7209 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7210 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7211
bbf6f052 7212 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7213 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7214 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7215 || modifier == EXPAND_INITIALIZER)
bbf6f052 7216 return op0;
0d15e60c 7217 else if (target == 0)
bbf6f052 7218 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7219
bbf6f052
RK
7220 convert_move (target, op0, unsignedp);
7221 return target;
7222 }
7223
4a8d0c9c
RH
7224 case VTABLE_REF:
7225 {
7226 rtx insn, before = get_last_insn (), vtbl_ref;
7227
7228 /* Evaluate the interior expression. */
7229 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7230 tmode, modifier);
7231
7232 /* Get or create an instruction off which to hang a note. */
7233 if (REG_P (subtarget))
7234 {
7235 target = subtarget;
7236 insn = get_last_insn ();
7237 if (insn == before)
7238 abort ();
7239 if (! INSN_P (insn))
7240 insn = prev_nonnote_insn (insn);
7241 }
7242 else
7243 {
7244 target = gen_reg_rtx (GET_MODE (subtarget));
7245 insn = emit_move_insn (target, subtarget);
7246 }
7247
7248 /* Collect the data for the note. */
7249 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7250 vtbl_ref = plus_constant (vtbl_ref,
7251 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7252 /* Discard the initial CONST that was added. */
7253 vtbl_ref = XEXP (vtbl_ref, 0);
7254
7255 REG_NOTES (insn)
7256 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7257
7258 return target;
7259 }
7260
bbf6f052
RK
7261 /* Intended for a reference to a buffer of a file-object in Pascal.
7262 But it's not certain that a special tree code will really be
7263 necessary for these. INDIRECT_REF might work for them. */
7264 case BUFFER_REF:
7265 abort ();
7266
7308a047 7267 case IN_EXPR:
7308a047 7268 {
d6a5ac33
RK
7269 /* Pascal set IN expression.
7270
7271 Algorithm:
7272 rlo = set_low - (set_low%bits_per_word);
7273 the_word = set [ (index - rlo)/bits_per_word ];
7274 bit_index = index % bits_per_word;
7275 bitmask = 1 << bit_index;
7276 return !!(the_word & bitmask); */
7277
7308a047
RS
7278 tree set = TREE_OPERAND (exp, 0);
7279 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7280 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7281 tree set_type = TREE_TYPE (set);
7308a047
RS
7282 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7283 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7284 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7285 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7286 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7287 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7288 rtx setaddr = XEXP (setval, 0);
7289 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7290 rtx rlow;
7291 rtx diff, quo, rem, addr, bit, result;
7308a047 7292
d6a5ac33
RK
7293 /* If domain is empty, answer is no. Likewise if index is constant
7294 and out of bounds. */
51723711 7295 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7296 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7297 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7298 || (TREE_CODE (index) == INTEGER_CST
7299 && TREE_CODE (set_low_bound) == INTEGER_CST
7300 && tree_int_cst_lt (index, set_low_bound))
7301 || (TREE_CODE (set_high_bound) == INTEGER_CST
7302 && TREE_CODE (index) == INTEGER_CST
7303 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7304 return const0_rtx;
7305
d6a5ac33
RK
7306 if (target == 0)
7307 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7308
7309 /* If we get here, we have to generate the code for both cases
7310 (in range and out of range). */
7311
7312 op0 = gen_label_rtx ();
7313 op1 = gen_label_rtx ();
7314
7315 if (! (GET_CODE (index_val) == CONST_INT
7316 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7317 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7318 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7319
7320 if (! (GET_CODE (index_val) == CONST_INT
7321 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7322 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7323 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7324
7325 /* Calculate the element number of bit zero in the first word
7326 of the set. */
7327 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7328 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7329 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7330 else
17938e57
RK
7331 rlow = expand_binop (index_mode, and_optab, lo_r,
7332 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7333 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7334
d6a5ac33
RK
7335 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7336 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7337
7338 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7339 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7340 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7341 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7342
7308a047 7343 addr = memory_address (byte_mode,
d6a5ac33
RK
7344 expand_binop (index_mode, add_optab, diff,
7345 setaddr, NULL_RTX, iunsignedp,
17938e57 7346 OPTAB_LIB_WIDEN));
d6a5ac33 7347
3a94c984 7348 /* Extract the bit we want to examine. */
7308a047 7349 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7350 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7351 make_tree (TREE_TYPE (index), rem),
7352 NULL_RTX, 1);
7353 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7354 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7355 1, OPTAB_LIB_WIDEN);
17938e57
RK
7356
7357 if (result != target)
7358 convert_move (target, result, 1);
7308a047
RS
7359
7360 /* Output the code to handle the out-of-range case. */
7361 emit_jump (op0);
7362 emit_label (op1);
7363 emit_move_insn (target, const0_rtx);
7364 emit_label (op0);
7365 return target;
7366 }
7367
bbf6f052 7368 case WITH_CLEANUP_EXPR:
6ad7895a 7369 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7370 {
6ad7895a 7371 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7372 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7373 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7374 CLEANUP_EH_ONLY (exp));
e976b8b2 7375
bbf6f052 7376 /* That's it for this cleanup. */
6ad7895a 7377 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7378 }
6ad7895a 7379 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7380
5dab5552
MS
7381 case CLEANUP_POINT_EXPR:
7382 {
e976b8b2
MS
7383 /* Start a new binding layer that will keep track of all cleanup
7384 actions to be performed. */
8e91754e 7385 expand_start_bindings (2);
e976b8b2 7386
d93d4205 7387 target_temp_slot_level = temp_slot_level;
e976b8b2 7388
37a08a29 7389 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7390 /* If we're going to use this value, load it up now. */
7391 if (! ignore)
7392 op0 = force_not_mem (op0);
d93d4205 7393 preserve_temp_slots (op0);
e976b8b2 7394 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7395 }
7396 return op0;
7397
bbf6f052
RK
7398 case CALL_EXPR:
7399 /* Check for a built-in function. */
7400 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7401 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7402 == FUNCTION_DECL)
bbf6f052 7403 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7404 {
c70eaeaf
KG
7405 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7406 == BUILT_IN_FRONTEND)
ae2bcd98 7407 /* ??? Use (*fun) form because expand_expr is a macro. */
8403445a 7408 return (*lang_hooks.expand_expr) (exp, original_target,
0fab64a3
MM
7409 tmode, modifier,
7410 alt_rtl);
c70eaeaf
KG
7411 else
7412 return expand_builtin (exp, target, subtarget, tmode, ignore);
7413 }
d6a5ac33 7414
8129842c 7415 return expand_call (exp, target, ignore);
bbf6f052
RK
7416
7417 case NON_LVALUE_EXPR:
7418 case NOP_EXPR:
7419 case CONVERT_EXPR:
7420 case REFERENCE_EXPR:
4a53008b 7421 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7422 return const0_rtx;
4a53008b 7423
bbf6f052
RK
7424 if (TREE_CODE (type) == UNION_TYPE)
7425 {
7426 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7427
c3d32120
RK
7428 /* If both input and output are BLKmode, this conversion isn't doing
7429 anything except possibly changing memory attribute. */
7430 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7431 {
7432 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7433 modifier);
7434
7435 result = copy_rtx (result);
7436 set_mem_attributes (result, exp, 0);
7437 return result;
7438 }
14a774a9 7439
bbf6f052 7440 if (target == 0)
cf7cb67e
JH
7441 {
7442 if (TYPE_MODE (type) != BLKmode)
7443 target = gen_reg_rtx (TYPE_MODE (type));
7444 else
7445 target = assign_temp (type, 0, 1, 1);
7446 }
d6a5ac33 7447
bbf6f052
RK
7448 if (GET_CODE (target) == MEM)
7449 /* Store data into beginning of memory target. */
7450 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7451 adjust_address (target, TYPE_MODE (valtype), 0),
7452 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7453
bbf6f052
RK
7454 else if (GET_CODE (target) == REG)
7455 /* Store this field into a union of the proper type. */
14a774a9
RK
7456 store_field (target,
7457 MIN ((int_size_in_bytes (TREE_TYPE
7458 (TREE_OPERAND (exp, 0)))
7459 * BITS_PER_UNIT),
8752c357 7460 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7461 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7462 VOIDmode, 0, type, 0);
bbf6f052
RK
7463 else
7464 abort ();
7465
7466 /* Return the entire union. */
7467 return target;
7468 }
d6a5ac33 7469
7f62854a
RK
7470 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7471 {
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7473 modifier);
7f62854a
RK
7474
7475 /* If the signedness of the conversion differs and OP0 is
7476 a promoted SUBREG, clear that indication since we now
7477 have to do the proper extension. */
7478 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7479 && GET_CODE (op0) == SUBREG)
7480 SUBREG_PROMOTED_VAR_P (op0) = 0;
7481
7482 return op0;
7483 }
7484
fdf473ae 7485 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7486 if (GET_MODE (op0) == mode)
7487 return op0;
12342f90 7488
d6a5ac33
RK
7489 /* If OP0 is a constant, just convert it into the proper mode. */
7490 if (CONSTANT_P (op0))
fdf473ae
RH
7491 {
7492 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7493 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7494
0fb7aeda 7495 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7496 return simplify_gen_subreg (mode, op0, inner_mode,
7497 subreg_lowpart_offset (mode,
7498 inner_mode));
7499 else
7500 return convert_modes (mode, inner_mode, op0,
7501 TREE_UNSIGNED (inner_type));
7502 }
12342f90 7503
26fcb35a 7504 if (modifier == EXPAND_INITIALIZER)
38a448ca 7505 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7506
bbf6f052 7507 if (target == 0)
d6a5ac33
RK
7508 return
7509 convert_to_mode (mode, op0,
7510 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7511 else
d6a5ac33
RK
7512 convert_move (target, op0,
7513 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7514 return target;
7515
ed239f5a 7516 case VIEW_CONVERT_EXPR:
37a08a29 7517 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7518
7519 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7520 Otherwise, if neither mode is BLKmode and both are integral and within
7521 a word, we can use gen_lowpart. If neither is true, make sure the
7522 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7523 if (TYPE_MODE (type) == GET_MODE (op0))
7524 ;
7525 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7526 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7527 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7528 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7529 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7530 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7531 else if (GET_CODE (op0) != MEM)
ed239f5a 7532 {
c11c10d8
RK
7533 /* If the operand is not a MEM, force it into memory. Since we
7534 are going to be be changing the mode of the MEM, don't call
7535 force_const_mem for constants because we don't allow pool
7536 constants to change mode. */
ed239f5a 7537 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7538
c11c10d8
RK
7539 if (TREE_ADDRESSABLE (exp))
7540 abort ();
ed239f5a 7541
c11c10d8
RK
7542 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7543 target
7544 = assign_stack_temp_for_type
7545 (TYPE_MODE (inner_type),
7546 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7547
c11c10d8
RK
7548 emit_move_insn (target, op0);
7549 op0 = target;
ed239f5a
RK
7550 }
7551
c11c10d8
RK
7552 /* At this point, OP0 is in the correct mode. If the output type is such
7553 that the operand is known to be aligned, indicate that it is.
7554 Otherwise, we need only be concerned about alignment for non-BLKmode
7555 results. */
ed239f5a
RK
7556 if (GET_CODE (op0) == MEM)
7557 {
7558 op0 = copy_rtx (op0);
7559
ed239f5a
RK
7560 if (TYPE_ALIGN_OK (type))
7561 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7562 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7563 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7564 {
7565 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7566 HOST_WIDE_INT temp_size
7567 = MAX (int_size_in_bytes (inner_type),
7568 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7569 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7570 temp_size, 0, type);
c4e59f51 7571 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7572
c11c10d8
RK
7573 if (TREE_ADDRESSABLE (exp))
7574 abort ();
7575
ed239f5a
RK
7576 if (GET_MODE (op0) == BLKmode)
7577 emit_block_move (new_with_op0_mode, op0,
44bb111a 7578 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7579 (modifier == EXPAND_STACK_PARM
7580 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7581 else
7582 emit_move_insn (new_with_op0_mode, op0);
7583
7584 op0 = new;
7585 }
0fb7aeda 7586
c4e59f51 7587 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7588 }
7589
7590 return op0;
7591
bbf6f052 7592 case PLUS_EXPR:
91ce572a 7593 this_optab = ! unsignedp && flag_trapv
a9785c70 7594 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7595 ? addv_optab : add_optab;
bbf6f052
RK
7596
7597 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7598 something else, make sure we add the register to the constant and
7599 then to the other thing. This case can occur during strength
7600 reduction and doing it this way will produce better code if the
7601 frame pointer or argument pointer is eliminated.
7602
7603 fold-const.c will ensure that the constant is always in the inner
7604 PLUS_EXPR, so the only case we need to do anything about is if
7605 sp, ap, or fp is our second argument, in which case we must swap
7606 the innermost first argument and our second argument. */
7607
7608 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7609 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7610 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7611 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7612 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7613 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7614 {
7615 tree t = TREE_OPERAND (exp, 1);
7616
7617 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7618 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7619 }
7620
88f63c77 7621 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7622 something, we might be forming a constant. So try to use
7623 plus_constant. If it produces a sum and we can't accept it,
7624 use force_operand. This allows P = &ARR[const] to generate
7625 efficient code on machines where a SYMBOL_REF is not a valid
7626 address.
7627
7628 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7629 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7630 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7631 {
8403445a
AM
7632 if (modifier == EXPAND_STACK_PARM)
7633 target = 0;
c980ac49
RS
7634 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7635 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7636 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7637 {
cbbc503e
JL
7638 rtx constant_part;
7639
c980ac49
RS
7640 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7641 EXPAND_SUM);
cbbc503e
JL
7642 /* Use immed_double_const to ensure that the constant is
7643 truncated according to the mode of OP1, then sign extended
7644 to a HOST_WIDE_INT. Using the constant directly can result
7645 in non-canonical RTL in a 64x32 cross compile. */
7646 constant_part
7647 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7648 (HOST_WIDE_INT) 0,
a5efcd63 7649 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7650 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7651 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7652 op1 = force_operand (op1, target);
7653 return op1;
7654 }
bbf6f052 7655
c980ac49
RS
7656 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7657 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7658 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7659 {
cbbc503e
JL
7660 rtx constant_part;
7661
c980ac49 7662 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7663 (modifier == EXPAND_INITIALIZER
7664 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7665 if (! CONSTANT_P (op0))
7666 {
7667 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7668 VOIDmode, modifier);
f0e9957a
RS
7669 /* Return a PLUS if modifier says it's OK. */
7670 if (modifier == EXPAND_SUM
7671 || modifier == EXPAND_INITIALIZER)
7672 return simplify_gen_binary (PLUS, mode, op0, op1);
7673 goto binop2;
c980ac49 7674 }
cbbc503e
JL
7675 /* Use immed_double_const to ensure that the constant is
7676 truncated according to the mode of OP1, then sign extended
7677 to a HOST_WIDE_INT. Using the constant directly can result
7678 in non-canonical RTL in a 64x32 cross compile. */
7679 constant_part
7680 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7681 (HOST_WIDE_INT) 0,
2a94e396 7682 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7683 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7684 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7685 op0 = force_operand (op0, target);
7686 return op0;
7687 }
bbf6f052
RK
7688 }
7689
7690 /* No sense saving up arithmetic to be done
7691 if it's all in the wrong mode to form part of an address.
7692 And force_operand won't know whether to sign-extend or
7693 zero-extend. */
7694 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7695 || mode != ptr_mode)
4ef7870a 7696 {
eb698c58
RS
7697 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7698 subtarget, &op0, &op1, 0);
6e7727eb
EB
7699 if (op0 == const0_rtx)
7700 return op1;
7701 if (op1 == const0_rtx)
7702 return op0;
4ef7870a
EB
7703 goto binop2;
7704 }
bbf6f052 7705
eb698c58
RS
7706 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7707 subtarget, &op0, &op1, modifier);
f0e9957a 7708 return simplify_gen_binary (PLUS, mode, op0, op1);
bbf6f052
RK
7709
7710 case MINUS_EXPR:
ea87523e
RK
7711 /* For initializers, we are allowed to return a MINUS of two
7712 symbolic constants. Here we handle all cases when both operands
7713 are constant. */
bbf6f052
RK
7714 /* Handle difference of two symbolic constants,
7715 for the sake of an initializer. */
7716 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7717 && really_constant_p (TREE_OPERAND (exp, 0))
7718 && really_constant_p (TREE_OPERAND (exp, 1)))
7719 {
eb698c58
RS
7720 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7721 NULL_RTX, &op0, &op1, modifier);
ea87523e 7722
ea87523e
RK
7723 /* If the last operand is a CONST_INT, use plus_constant of
7724 the negated constant. Else make the MINUS. */
7725 if (GET_CODE (op1) == CONST_INT)
7726 return plus_constant (op0, - INTVAL (op1));
7727 else
38a448ca 7728 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 7729 }
ae431183 7730
91ce572a
CC
7731 this_optab = ! unsignedp && flag_trapv
7732 && (GET_MODE_CLASS(mode) == MODE_INT)
7733 ? subv_optab : sub_optab;
1717e19e
UW
7734
7735 /* No sense saving up arithmetic to be done
7736 if it's all in the wrong mode to form part of an address.
7737 And force_operand won't know whether to sign-extend or
7738 zero-extend. */
7739 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7740 || mode != ptr_mode)
7741 goto binop;
7742
eb698c58
RS
7743 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7744 subtarget, &op0, &op1, modifier);
1717e19e
UW
7745
7746 /* Convert A - const to A + (-const). */
7747 if (GET_CODE (op1) == CONST_INT)
7748 {
7749 op1 = negate_rtx (mode, op1);
f0e9957a 7750 return simplify_gen_binary (PLUS, mode, op0, op1);
1717e19e
UW
7751 }
7752
7753 goto binop2;
bbf6f052
RK
7754
7755 case MULT_EXPR:
bbf6f052
RK
7756 /* If first operand is constant, swap them.
7757 Thus the following special case checks need only
7758 check the second operand. */
7759 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7760 {
b3694847 7761 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7762 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7763 TREE_OPERAND (exp, 1) = t1;
7764 }
7765
7766 /* Attempt to return something suitable for generating an
7767 indexed address, for machines that support that. */
7768
88f63c77 7769 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7770 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7771 {
48a5f2fa
DJ
7772 tree exp1 = TREE_OPERAND (exp, 1);
7773
921b3427
RK
7774 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7775 EXPAND_SUM);
bbf6f052 7776
bbf6f052 7777 if (GET_CODE (op0) != REG)
906c4e36 7778 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7779 if (GET_CODE (op0) != REG)
7780 op0 = copy_to_mode_reg (mode, op0);
7781
48a5f2fa
DJ
7782 return gen_rtx_MULT (mode, op0,
7783 gen_int_mode (tree_low_cst (exp1, 0),
7784 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
7785 }
7786
8403445a
AM
7787 if (modifier == EXPAND_STACK_PARM)
7788 target = 0;
7789
bbf6f052
RK
7790 /* Check for multiplying things that have been extended
7791 from a narrower type. If this machine supports multiplying
7792 in that narrower type with a result in the desired type,
7793 do it that way, and avoid the explicit type-conversion. */
7794 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7795 && TREE_CODE (type) == INTEGER_TYPE
7796 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7797 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7798 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7799 && int_fits_type_p (TREE_OPERAND (exp, 1),
7800 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7801 /* Don't use a widening multiply if a shift will do. */
7802 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7803 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7804 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7805 ||
7806 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7807 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7808 ==
7809 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7810 /* If both operands are extended, they must either both
7811 be zero-extended or both be sign-extended. */
7812 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7813 ==
7814 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7815 {
888d65b5
RS
7816 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7817 enum machine_mode innermode = TYPE_MODE (op0type);
7818 bool zextend_p = TREE_UNSIGNED (op0type);
7819 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7820 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7821
b10af0c8 7822 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7823 {
b10af0c8
TG
7824 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7825 {
b10af0c8 7826 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7827 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7828 TREE_OPERAND (exp, 1),
7829 NULL_RTX, &op0, &op1, 0);
b10af0c8 7830 else
eb698c58
RS
7831 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7832 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7833 NULL_RTX, &op0, &op1, 0);
b10af0c8
TG
7834 goto binop2;
7835 }
7836 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7837 && innermode == word_mode)
7838 {
888d65b5 7839 rtx htem, hipart;
b10af0c8
TG
7840 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7841 NULL_RTX, VOIDmode, 0);
7842 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7843 op1 = convert_modes (innermode, mode,
7844 expand_expr (TREE_OPERAND (exp, 1),
7845 NULL_RTX, VOIDmode, 0),
7846 unsignedp);
b10af0c8
TG
7847 else
7848 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7849 NULL_RTX, VOIDmode, 0);
7850 temp = expand_binop (mode, other_optab, op0, op1, target,
7851 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
7852 hipart = gen_highpart (innermode, temp);
7853 htem = expand_mult_highpart_adjust (innermode, hipart,
7854 op0, op1, hipart,
7855 zextend_p);
7856 if (htem != hipart)
7857 emit_move_insn (hipart, htem);
b10af0c8
TG
7858 return temp;
7859 }
bbf6f052
RK
7860 }
7861 }
eb698c58
RS
7862 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7863 subtarget, &op0, &op1, 0);
bbf6f052
RK
7864 return expand_mult (mode, op0, op1, target, unsignedp);
7865
7866 case TRUNC_DIV_EXPR:
7867 case FLOOR_DIV_EXPR:
7868 case CEIL_DIV_EXPR:
7869 case ROUND_DIV_EXPR:
7870 case EXACT_DIV_EXPR:
8403445a
AM
7871 if (modifier == EXPAND_STACK_PARM)
7872 target = 0;
bbf6f052
RK
7873 /* Possible optimization: compute the dividend with EXPAND_SUM
7874 then if the divisor is constant can optimize the case
7875 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7876 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7877 subtarget, &op0, &op1, 0);
bbf6f052
RK
7878 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7879
7880 case RDIV_EXPR:
b7e9703c
JH
7881 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7882 expensive divide. If not, combine will rebuild the original
7883 computation. */
7884 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 7885 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
7886 && !real_onep (TREE_OPERAND (exp, 0)))
7887 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7888 build (RDIV_EXPR, type,
7889 build_real (type, dconst1),
7890 TREE_OPERAND (exp, 1))),
8e37cba8 7891 target, tmode, modifier);
ef89d648 7892 this_optab = sdiv_optab;
bbf6f052
RK
7893 goto binop;
7894
7895 case TRUNC_MOD_EXPR:
7896 case FLOOR_MOD_EXPR:
7897 case CEIL_MOD_EXPR:
7898 case ROUND_MOD_EXPR:
8403445a
AM
7899 if (modifier == EXPAND_STACK_PARM)
7900 target = 0;
eb698c58
RS
7901 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7902 subtarget, &op0, &op1, 0);
bbf6f052
RK
7903 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7904
7905 case FIX_ROUND_EXPR:
7906 case FIX_FLOOR_EXPR:
7907 case FIX_CEIL_EXPR:
7908 abort (); /* Not used for C. */
7909
7910 case FIX_TRUNC_EXPR:
906c4e36 7911 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7912 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7913 target = gen_reg_rtx (mode);
7914 expand_fix (target, op0, unsignedp);
7915 return target;
7916
7917 case FLOAT_EXPR:
906c4e36 7918 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7919 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7920 target = gen_reg_rtx (mode);
7921 /* expand_float can't figure out what to do if FROM has VOIDmode.
7922 So give it the correct mode. With -O, cse will optimize this. */
7923 if (GET_MODE (op0) == VOIDmode)
7924 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7925 op0);
7926 expand_float (target, op0,
7927 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7928 return target;
7929
7930 case NEGATE_EXPR:
5b22bee8 7931 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7932 if (modifier == EXPAND_STACK_PARM)
7933 target = 0;
91ce572a 7934 temp = expand_unop (mode,
0fb7aeda
KH
7935 ! unsignedp && flag_trapv
7936 && (GET_MODE_CLASS(mode) == MODE_INT)
7937 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7938 if (temp == 0)
7939 abort ();
7940 return temp;
7941
7942 case ABS_EXPR:
7943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7944 if (modifier == EXPAND_STACK_PARM)
7945 target = 0;
bbf6f052 7946
11017cc7 7947 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
7948 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7949 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 7950 abort ();
2d7050fd 7951
bbf6f052
RK
7952 /* Unsigned abs is simply the operand. Testing here means we don't
7953 risk generating incorrect code below. */
7954 if (TREE_UNSIGNED (type))
7955 return op0;
7956
91ce572a 7957 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7958 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7959
7960 case MAX_EXPR:
7961 case MIN_EXPR:
7962 target = original_target;
8403445a
AM
7963 if (target == 0
7964 || modifier == EXPAND_STACK_PARM
fc155707 7965 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7966 || GET_MODE (target) != mode
bbf6f052
RK
7967 || (GET_CODE (target) == REG
7968 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7969 target = gen_reg_rtx (mode);
eb698c58
RS
7970 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7971 target, &op0, &op1, 0);
bbf6f052
RK
7972
7973 /* First try to do it with a special MIN or MAX instruction.
7974 If that does not win, use a conditional jump to select the proper
7975 value. */
288dc1ea 7976 this_optab = (unsignedp
bbf6f052
RK
7977 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7978 : (code == MIN_EXPR ? smin_optab : smax_optab));
7979
7980 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7981 OPTAB_WIDEN);
7982 if (temp != 0)
7983 return temp;
7984
fa2981d8
JW
7985 /* At this point, a MEM target is no longer useful; we will get better
7986 code without it. */
3a94c984 7987
fa2981d8
JW
7988 if (GET_CODE (target) == MEM)
7989 target = gen_reg_rtx (mode);
7990
e3be1116
RS
7991 /* If op1 was placed in target, swap op0 and op1. */
7992 if (target != op0 && target == op1)
7993 {
7994 rtx tem = op0;
7995 op0 = op1;
7996 op1 = tem;
7997 }
7998
ee456b1c
RK
7999 if (target != op0)
8000 emit_move_insn (target, op0);
d6a5ac33 8001
bbf6f052 8002 op0 = gen_label_rtx ();
d6a5ac33 8003
f81497d9
RS
8004 /* If this mode is an integer too wide to compare properly,
8005 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8006 if (GET_MODE_CLASS (mode) == MODE_INT
8007 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8008 {
f81497d9 8009 if (code == MAX_EXPR)
288dc1ea
EB
8010 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8011 NULL_RTX, op0);
bbf6f052 8012 else
288dc1ea
EB
8013 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8014 NULL_RTX, op0);
bbf6f052 8015 }
f81497d9
RS
8016 else
8017 {
b30f05db 8018 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
288dc1ea 8019 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
f81497d9 8020 }
b30f05db 8021 emit_move_insn (target, op1);
bbf6f052
RK
8022 emit_label (op0);
8023 return target;
8024
bbf6f052
RK
8025 case BIT_NOT_EXPR:
8026 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8027 if (modifier == EXPAND_STACK_PARM)
8028 target = 0;
bbf6f052
RK
8029 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8030 if (temp == 0)
8031 abort ();
8032 return temp;
8033
d6a5ac33
RK
8034 /* ??? Can optimize bitwise operations with one arg constant.
8035 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8036 and (a bitwise1 b) bitwise2 b (etc)
8037 but that is probably not worth while. */
8038
8039 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8040 boolean values when we want in all cases to compute both of them. In
8041 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8042 as actual zero-or-1 values and then bitwise anding. In cases where
8043 there cannot be any side effects, better code would be made by
8044 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8045 how to recognize those cases. */
8046
bbf6f052
RK
8047 case TRUTH_AND_EXPR:
8048 case BIT_AND_EXPR:
8049 this_optab = and_optab;
8050 goto binop;
8051
bbf6f052
RK
8052 case TRUTH_OR_EXPR:
8053 case BIT_IOR_EXPR:
8054 this_optab = ior_optab;
8055 goto binop;
8056
874726a8 8057 case TRUTH_XOR_EXPR:
bbf6f052
RK
8058 case BIT_XOR_EXPR:
8059 this_optab = xor_optab;
8060 goto binop;
8061
8062 case LSHIFT_EXPR:
8063 case RSHIFT_EXPR:
8064 case LROTATE_EXPR:
8065 case RROTATE_EXPR:
e5e809f4 8066 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8067 subtarget = 0;
8403445a
AM
8068 if (modifier == EXPAND_STACK_PARM)
8069 target = 0;
bbf6f052
RK
8070 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8071 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8072 unsignedp);
8073
d6a5ac33
RK
8074 /* Could determine the answer when only additive constants differ. Also,
8075 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8076 case LT_EXPR:
8077 case LE_EXPR:
8078 case GT_EXPR:
8079 case GE_EXPR:
8080 case EQ_EXPR:
8081 case NE_EXPR:
1eb8759b
RH
8082 case UNORDERED_EXPR:
8083 case ORDERED_EXPR:
8084 case UNLT_EXPR:
8085 case UNLE_EXPR:
8086 case UNGT_EXPR:
8087 case UNGE_EXPR:
8088 case UNEQ_EXPR:
8403445a
AM
8089 temp = do_store_flag (exp,
8090 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8091 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8092 if (temp != 0)
8093 return temp;
d6a5ac33 8094
0f41302f 8095 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8096 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8097 && original_target
8098 && GET_CODE (original_target) == REG
8099 && (GET_MODE (original_target)
8100 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8101 {
d6a5ac33
RK
8102 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8103 VOIDmode, 0);
8104
c0a3eeac
UW
8105 /* If temp is constant, we can just compute the result. */
8106 if (GET_CODE (temp) == CONST_INT)
8107 {
8108 if (INTVAL (temp) != 0)
8109 emit_move_insn (target, const1_rtx);
8110 else
8111 emit_move_insn (target, const0_rtx);
8112
8113 return target;
8114 }
8115
bbf6f052 8116 if (temp != original_target)
c0a3eeac
UW
8117 {
8118 enum machine_mode mode1 = GET_MODE (temp);
8119 if (mode1 == VOIDmode)
8120 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8121
c0a3eeac
UW
8122 temp = copy_to_mode_reg (mode1, temp);
8123 }
d6a5ac33 8124
bbf6f052 8125 op1 = gen_label_rtx ();
c5d5d461 8126 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8127 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8128 emit_move_insn (temp, const1_rtx);
8129 emit_label (op1);
8130 return temp;
8131 }
d6a5ac33 8132
bbf6f052
RK
8133 /* If no set-flag instruction, must generate a conditional
8134 store into a temporary variable. Drop through
8135 and handle this like && and ||. */
8136
8137 case TRUTH_ANDIF_EXPR:
8138 case TRUTH_ORIF_EXPR:
e44842fe 8139 if (! ignore
8403445a
AM
8140 && (target == 0
8141 || modifier == EXPAND_STACK_PARM
8142 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8143 /* Make sure we don't have a hard reg (such as function's return
8144 value) live across basic blocks, if not optimizing. */
8145 || (!optimize && GET_CODE (target) == REG
8146 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8147 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8148
8149 if (target)
8150 emit_clr_insn (target);
8151
bbf6f052
RK
8152 op1 = gen_label_rtx ();
8153 jumpifnot (exp, op1);
e44842fe
RK
8154
8155 if (target)
8156 emit_0_to_1_insn (target);
8157
bbf6f052 8158 emit_label (op1);
e44842fe 8159 return ignore ? const0_rtx : target;
bbf6f052
RK
8160
8161 case TRUTH_NOT_EXPR:
8403445a
AM
8162 if (modifier == EXPAND_STACK_PARM)
8163 target = 0;
bbf6f052
RK
8164 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8165 /* The parser is careful to generate TRUTH_NOT_EXPR
8166 only with operands that are always zero or one. */
906c4e36 8167 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8168 target, 1, OPTAB_LIB_WIDEN);
8169 if (temp == 0)
8170 abort ();
8171 return temp;
8172
8173 case COMPOUND_EXPR:
8174 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8175 emit_queue ();
0fab64a3
MM
8176 return expand_expr_real (TREE_OPERAND (exp, 1),
8177 (ignore ? const0_rtx : target),
8178 VOIDmode, modifier, alt_rtl);
bbf6f052
RK
8179
8180 case COND_EXPR:
ac01eace
RK
8181 /* If we would have a "singleton" (see below) were it not for a
8182 conversion in each arm, bring that conversion back out. */
8183 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8184 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8185 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8186 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8187 {
d6edb99e
ZW
8188 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8189 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8190
8191 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8192 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8193 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8194 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8195 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8196 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8197 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8198 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8199 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8200 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8201 TREE_OPERAND (exp, 0),
d6edb99e 8202 iftrue, iffalse)),
ac01eace
RK
8203 target, tmode, modifier);
8204 }
8205
bbf6f052
RK
8206 {
8207 /* Note that COND_EXPRs whose type is a structure or union
8208 are required to be constructed to contain assignments of
8209 a temporary variable, so that we can evaluate them here
8210 for side effect only. If type is void, we must do likewise. */
8211
8212 /* If an arm of the branch requires a cleanup,
8213 only that cleanup is performed. */
8214
8215 tree singleton = 0;
8216 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8217
8218 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8219 convert it to our mode, if necessary. */
8220 if (integer_onep (TREE_OPERAND (exp, 1))
8221 && integer_zerop (TREE_OPERAND (exp, 2))
8222 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8223 {
dd27116b
RK
8224 if (ignore)
8225 {
8226 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8227 modifier);
dd27116b
RK
8228 return const0_rtx;
8229 }
8230
8403445a
AM
8231 if (modifier == EXPAND_STACK_PARM)
8232 target = 0;
37a08a29 8233 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8234 if (GET_MODE (op0) == mode)
8235 return op0;
d6a5ac33 8236
bbf6f052
RK
8237 if (target == 0)
8238 target = gen_reg_rtx (mode);
8239 convert_move (target, op0, unsignedp);
8240 return target;
8241 }
8242
ac01eace
RK
8243 /* Check for X ? A + B : A. If we have this, we can copy A to the
8244 output and conditionally add B. Similarly for unary operations.
8245 Don't do this if X has side-effects because those side effects
8246 might affect A or B and the "?" operation is a sequence point in
8247 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8248
8249 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8250 && operand_equal_p (TREE_OPERAND (exp, 2),
8251 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8252 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8253 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8254 && operand_equal_p (TREE_OPERAND (exp, 1),
8255 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8256 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8257 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8258 && operand_equal_p (TREE_OPERAND (exp, 2),
8259 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8260 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8261 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8262 && operand_equal_p (TREE_OPERAND (exp, 1),
8263 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8264 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8265
01c8a7c8
RK
8266 /* If we are not to produce a result, we have no target. Otherwise,
8267 if a target was specified use it; it will not be used as an
3a94c984 8268 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8269 temporary. */
8270
8271 if (ignore)
8272 temp = 0;
8403445a
AM
8273 else if (modifier == EXPAND_STACK_PARM)
8274 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8275 else if (original_target
e5e809f4 8276 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8277 || (singleton && GET_CODE (original_target) == REG
8278 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8279 && original_target == var_rtx (singleton)))
8280 && GET_MODE (original_target) == mode
7c00d1fe
RK
8281#ifdef HAVE_conditional_move
8282 && (! can_conditionally_move_p (mode)
8283 || GET_CODE (original_target) == REG
8284 || TREE_ADDRESSABLE (type))
8285#endif
8125d7e9
BS
8286 && (GET_CODE (original_target) != MEM
8287 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8288 temp = original_target;
8289 else if (TREE_ADDRESSABLE (type))
8290 abort ();
8291 else
8292 temp = assign_temp (type, 0, 0, 1);
8293
ac01eace
RK
8294 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8295 do the test of X as a store-flag operation, do this as
8296 A + ((X != 0) << log C). Similarly for other simple binary
8297 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8298 if (temp && singleton && binary_op
bbf6f052
RK
8299 && (TREE_CODE (binary_op) == PLUS_EXPR
8300 || TREE_CODE (binary_op) == MINUS_EXPR
8301 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8302 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8303 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8304 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8305 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8306 {
8307 rtx result;
61f6c84f 8308 tree cond;
91ce572a 8309 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8310 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8311 ? addv_optab : add_optab)
8312 : TREE_CODE (binary_op) == MINUS_EXPR
8313 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8314 ? subv_optab : sub_optab)
8315 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8316 : xor_optab);
bbf6f052 8317
61f6c84f 8318 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8319 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8320 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8321 else
8322 cond = TREE_OPERAND (exp, 0);
bbf6f052 8323
61f6c84f
JJ
8324 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8325 ? temp : NULL_RTX),
bbf6f052
RK
8326 mode, BRANCH_COST <= 1);
8327
ac01eace
RK
8328 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8329 result = expand_shift (LSHIFT_EXPR, mode, result,
8330 build_int_2 (tree_log2
8331 (TREE_OPERAND
8332 (binary_op, 1)),
8333 0),
e5e809f4 8334 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8335 ? temp : NULL_RTX), 0);
8336
bbf6f052
RK
8337 if (result)
8338 {
906c4e36 8339 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8340 return expand_binop (mode, boptab, op1, result, temp,
8341 unsignedp, OPTAB_LIB_WIDEN);
8342 }
bbf6f052 8343 }
3a94c984 8344
dabf8373 8345 do_pending_stack_adjust ();
bbf6f052
RK
8346 NO_DEFER_POP;
8347 op0 = gen_label_rtx ();
8348
8349 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8350 {
8351 if (temp != 0)
8352 {
8353 /* If the target conflicts with the other operand of the
8354 binary op, we can't use it. Also, we can't use the target
8355 if it is a hard register, because evaluating the condition
8356 might clobber it. */
8357 if ((binary_op
e5e809f4 8358 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8359 || (GET_CODE (temp) == REG
8360 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8361 temp = gen_reg_rtx (mode);
8403445a
AM
8362 store_expr (singleton, temp,
8363 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8364 }
8365 else
906c4e36 8366 expand_expr (singleton,
2937cf87 8367 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8368 if (singleton == TREE_OPERAND (exp, 1))
8369 jumpif (TREE_OPERAND (exp, 0), op0);
8370 else
8371 jumpifnot (TREE_OPERAND (exp, 0), op0);
8372
956d6950 8373 start_cleanup_deferral ();
bbf6f052
RK
8374 if (binary_op && temp == 0)
8375 /* Just touch the other operand. */
8376 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8377 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8378 else if (binary_op)
8379 store_expr (build (TREE_CODE (binary_op), type,
8380 make_tree (type, temp),
8381 TREE_OPERAND (binary_op, 1)),
8403445a 8382 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8383 else
8384 store_expr (build1 (TREE_CODE (unary_op), type,
8385 make_tree (type, temp)),
8403445a 8386 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8387 op1 = op0;
bbf6f052 8388 }
bbf6f052
RK
8389 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8390 comparison operator. If we have one of these cases, set the
8391 output to A, branch on A (cse will merge these two references),
8392 then set the output to FOO. */
8393 else if (temp
8394 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8395 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8396 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8397 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8398 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8399 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8400 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8401 {
3a94c984
KH
8402 if (GET_CODE (temp) == REG
8403 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8404 temp = gen_reg_rtx (mode);
8403445a
AM
8405 store_expr (TREE_OPERAND (exp, 1), temp,
8406 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8407 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8408
956d6950 8409 start_cleanup_deferral ();
c37b68d4
RS
8410 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8411 store_expr (TREE_OPERAND (exp, 2), temp,
8412 modifier == EXPAND_STACK_PARM ? 2 : 0);
8413 else
8414 expand_expr (TREE_OPERAND (exp, 2),
8415 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8416 op1 = op0;
8417 }
8418 else if (temp
8419 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8420 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8421 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8422 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8423 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8424 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8425 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8426 {
3a94c984
KH
8427 if (GET_CODE (temp) == REG
8428 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8429 temp = gen_reg_rtx (mode);
8403445a
AM
8430 store_expr (TREE_OPERAND (exp, 2), temp,
8431 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8432 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8433
956d6950 8434 start_cleanup_deferral ();
c37b68d4
RS
8435 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8436 store_expr (TREE_OPERAND (exp, 1), temp,
8437 modifier == EXPAND_STACK_PARM ? 2 : 0);
8438 else
8439 expand_expr (TREE_OPERAND (exp, 1),
8440 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8441 op1 = op0;
8442 }
8443 else
8444 {
8445 op1 = gen_label_rtx ();
8446 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8447
956d6950 8448 start_cleanup_deferral ();
3a94c984 8449
2ac84cfe 8450 /* One branch of the cond can be void, if it never returns. For
3a94c984 8451 example A ? throw : E */
2ac84cfe 8452 if (temp != 0
3a94c984 8453 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8454 store_expr (TREE_OPERAND (exp, 1), temp,
8455 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8456 else
906c4e36
RK
8457 expand_expr (TREE_OPERAND (exp, 1),
8458 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8459 end_cleanup_deferral ();
bbf6f052
RK
8460 emit_queue ();
8461 emit_jump_insn (gen_jump (op1));
8462 emit_barrier ();
8463 emit_label (op0);
956d6950 8464 start_cleanup_deferral ();
2ac84cfe 8465 if (temp != 0
3a94c984 8466 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8467 store_expr (TREE_OPERAND (exp, 2), temp,
8468 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8469 else
906c4e36
RK
8470 expand_expr (TREE_OPERAND (exp, 2),
8471 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8472 }
8473
956d6950 8474 end_cleanup_deferral ();
bbf6f052
RK
8475
8476 emit_queue ();
8477 emit_label (op1);
8478 OK_DEFER_POP;
5dab5552 8479
bbf6f052
RK
8480 return temp;
8481 }
8482
8483 case TARGET_EXPR:
8484 {
8485 /* Something needs to be initialized, but we didn't know
8486 where that thing was when building the tree. For example,
8487 it could be the return value of a function, or a parameter
8488 to a function which lays down in the stack, or a temporary
8489 variable which must be passed by reference.
8490
8491 We guarantee that the expression will either be constructed
8492 or copied into our original target. */
8493
8494 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8495 tree cleanups = NULL_TREE;
5c062816 8496 tree exp1;
bbf6f052
RK
8497
8498 if (TREE_CODE (slot) != VAR_DECL)
8499 abort ();
8500
9c51f375
RK
8501 if (! ignore)
8502 target = original_target;
8503
6fbfac92
JM
8504 /* Set this here so that if we get a target that refers to a
8505 register variable that's already been used, put_reg_into_stack
3a94c984 8506 knows that it should fix up those uses. */
6fbfac92
JM
8507 TREE_USED (slot) = 1;
8508
bbf6f052
RK
8509 if (target == 0)
8510 {
19e7881c 8511 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8512 {
8513 target = DECL_RTL (slot);
5c062816 8514 /* If we have already expanded the slot, so don't do
ac993f4f 8515 it again. (mrs) */
5c062816
MS
8516 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8517 return target;
ac993f4f 8518 }
bbf6f052
RK
8519 else
8520 {
e9a25f70 8521 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8522 /* All temp slots at this level must not conflict. */
8523 preserve_temp_slots (target);
19e7881c 8524 SET_DECL_RTL (slot, target);
e9a25f70 8525 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8526 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 8527
e287fd6e
RK
8528 /* Since SLOT is not known to the called function
8529 to belong to its stack frame, we must build an explicit
8530 cleanup. This case occurs when we must build up a reference
8531 to pass the reference as an argument. In this case,
8532 it is very likely that such a reference need not be
8533 built here. */
8534
8535 if (TREE_OPERAND (exp, 2) == 0)
c88770e9 8536 TREE_OPERAND (exp, 2)
ae2bcd98 8537 = lang_hooks.maybe_build_cleanup (slot);
2a888d4c 8538 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8539 }
bbf6f052
RK
8540 }
8541 else
8542 {
8543 /* This case does occur, when expanding a parameter which
8544 needs to be constructed on the stack. The target
8545 is the actual stack address that we want to initialize.
8546 The function we call will perform the cleanup in this case. */
8547
8c042b47
RS
8548 /* If we have already assigned it space, use that space,
8549 not target that we were passed in, as our target
8550 parameter is only a hint. */
19e7881c 8551 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8552 {
8553 target = DECL_RTL (slot);
8554 /* If we have already expanded the slot, so don't do
8c042b47 8555 it again. (mrs) */
3a94c984
KH
8556 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8557 return target;
8c042b47 8558 }
21002281
JW
8559 else
8560 {
19e7881c 8561 SET_DECL_RTL (slot, target);
21002281
JW
8562 /* If we must have an addressable slot, then make sure that
8563 the RTL that we just stored in slot is OK. */
8564 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8565 put_var_into_stack (slot, /*rescan=*/true);
21002281 8566 }
bbf6f052
RK
8567 }
8568
4847c938 8569 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8570 /* Mark it as expanded. */
8571 TREE_OPERAND (exp, 1) = NULL_TREE;
8572
8403445a 8573 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 8574
659e5a7a 8575 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 8576
41531e5b 8577 return target;
bbf6f052
RK
8578 }
8579
8580 case INIT_EXPR:
8581 {
8582 tree lhs = TREE_OPERAND (exp, 0);
8583 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8584
b90f141a 8585 temp = expand_assignment (lhs, rhs, ! ignore);
bbf6f052
RK
8586 return temp;
8587 }
8588
8589 case MODIFY_EXPR:
8590 {
8591 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8592 That's so we don't compute a pointer and save it over a
8593 call. If lhs is simple, compute it first so we can give it
8594 as a target if the rhs is just a call. This avoids an
8595 extra temp and copy and that prevents a partial-subsumption
8596 which makes bad code. Actually we could treat
8597 component_ref's of vars like vars. */
bbf6f052
RK
8598
8599 tree lhs = TREE_OPERAND (exp, 0);
8600 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8601
8602 temp = 0;
8603
bbf6f052
RK
8604 /* Check for |= or &= of a bitfield of size one into another bitfield
8605 of size 1. In this case, (unless we need the result of the
8606 assignment) we can do this more efficiently with a
8607 test followed by an assignment, if necessary.
8608
8609 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8610 things change so we do, this code should be enhanced to
8611 support it. */
8612 if (ignore
8613 && TREE_CODE (lhs) == COMPONENT_REF
8614 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8615 || TREE_CODE (rhs) == BIT_AND_EXPR)
8616 && TREE_OPERAND (rhs, 0) == lhs
8617 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8618 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8619 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8620 {
8621 rtx label = gen_label_rtx ();
8622
8623 do_jump (TREE_OPERAND (rhs, 1),
8624 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8625 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8626 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8627 (TREE_CODE (rhs) == BIT_IOR_EXPR
8628 ? integer_one_node
8629 : integer_zero_node)),
b90f141a 8630 0);
e7c33f54 8631 do_pending_stack_adjust ();
bbf6f052
RK
8632 emit_label (label);
8633 return const0_rtx;
8634 }
8635
b90f141a 8636 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 8637
bbf6f052
RK
8638 return temp;
8639 }
8640
6e7f84a7
APB
8641 case RETURN_EXPR:
8642 if (!TREE_OPERAND (exp, 0))
8643 expand_null_return ();
8644 else
8645 expand_return (TREE_OPERAND (exp, 0));
8646 return const0_rtx;
8647
bbf6f052
RK
8648 case PREINCREMENT_EXPR:
8649 case PREDECREMENT_EXPR:
7b8b9722 8650 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8651
8652 case POSTINCREMENT_EXPR:
8653 case POSTDECREMENT_EXPR:
8654 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8655 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8656
8657 case ADDR_EXPR:
8403445a
AM
8658 if (modifier == EXPAND_STACK_PARM)
8659 target = 0;
bbf6f052
RK
8660 /* Are we taking the address of a nested function? */
8661 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8662 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8663 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8664 && ! TREE_STATIC (exp))
bbf6f052
RK
8665 {
8666 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8667 op0 = force_operand (op0, target);
8668 }
682ba3a6
RK
8669 /* If we are taking the address of something erroneous, just
8670 return a zero. */
8671 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8672 return const0_rtx;
d6b6783b
RK
8673 /* If we are taking the address of a constant and are at the
8674 top level, we have to use output_constant_def since we can't
8675 call force_const_mem at top level. */
8676 else if (cfun == 0
8677 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8678 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8679 == 'c')))
8680 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8681 else
8682 {
e287fd6e
RK
8683 /* We make sure to pass const0_rtx down if we came in with
8684 ignore set, to avoid doing the cleanups twice for something. */
8685 op0 = expand_expr (TREE_OPERAND (exp, 0),
8686 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8687 (modifier == EXPAND_INITIALIZER
8688 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8689
119af78a
RK
8690 /* If we are going to ignore the result, OP0 will have been set
8691 to const0_rtx, so just return it. Don't get confused and
8692 think we are taking the address of the constant. */
8693 if (ignore)
8694 return op0;
8695
73b7f58c
BS
8696 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8697 clever and returns a REG when given a MEM. */
8698 op0 = protect_from_queue (op0, 1);
3539e816 8699
c5c76735
JL
8700 /* We would like the object in memory. If it is a constant, we can
8701 have it be statically allocated into memory. For a non-constant,
8702 we need to allocate some memory and store the value into it. */
896102d0
RK
8703
8704 if (CONSTANT_P (op0))
8705 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8706 op0);
682ba3a6 8707 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 8708 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 8709 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 8710 {
6c7d86ec
RK
8711 /* If the operand is a SAVE_EXPR, we can deal with this by
8712 forcing the SAVE_EXPR into memory. */
8713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8714 {
f29a2bd1
MM
8715 put_var_into_stack (TREE_OPERAND (exp, 0),
8716 /*rescan=*/true);
6c7d86ec
RK
8717 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8718 }
df6018fd 8719 else
6c7d86ec
RK
8720 {
8721 /* If this object is in a register, it can't be BLKmode. */
8722 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8723 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
8724
8725 if (GET_CODE (op0) == PARALLEL)
8726 /* Handle calls that pass values in multiple
8727 non-contiguous locations. The Irix 6 ABI has examples
8728 of this. */
6e985040 8729 emit_group_store (memloc, op0, inner_type,
6c7d86ec
RK
8730 int_size_in_bytes (inner_type));
8731 else
8732 emit_move_insn (memloc, op0);
0fb7aeda 8733
6c7d86ec
RK
8734 op0 = memloc;
8735 }
896102d0
RK
8736 }
8737
bbf6f052
RK
8738 if (GET_CODE (op0) != MEM)
8739 abort ();
3a94c984 8740
34e81b5a 8741 mark_temp_addr_taken (op0);
bbf6f052 8742 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8743 {
34e81b5a 8744 op0 = XEXP (op0, 0);
5ae6cd0d 8745 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 8746 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 8747 return op0;
88f63c77 8748 }
987c71d9 8749
c952ff4b
RK
8750 /* If OP0 is not aligned as least as much as the type requires, we
8751 need to make a temporary, copy OP0 to it, and take the address of
8752 the temporary. We want to use the alignment of the type, not of
8753 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8754 the test for BLKmode means that can't happen. The test for
8755 BLKmode is because we never make mis-aligned MEMs with
8756 non-BLKmode.
8757
8758 We don't need to do this at all if the machine doesn't have
8759 strict alignment. */
8760 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8761 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8762 > MEM_ALIGN (op0))
8763 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8764 {
8765 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 8766 rtx new;
a06ef755 8767
c3d32120
RK
8768 if (TYPE_ALIGN_OK (inner_type))
8769 abort ();
8770
bdaa131b
JM
8771 if (TREE_ADDRESSABLE (inner_type))
8772 {
8773 /* We can't make a bitwise copy of this object, so fail. */
8774 error ("cannot take the address of an unaligned member");
8775 return const0_rtx;
8776 }
8777
8778 new = assign_stack_temp_for_type
8779 (TYPE_MODE (inner_type),
8780 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8781 : int_size_in_bytes (inner_type),
8782 1, build_qualified_type (inner_type,
8783 (TYPE_QUALS (inner_type)
8784 | TYPE_QUAL_CONST)));
8785
44bb111a 8786 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
8787 (modifier == EXPAND_STACK_PARM
8788 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 8789
a06ef755
RK
8790 op0 = new;
8791 }
8792
bbf6f052
RK
8793 op0 = force_operand (XEXP (op0, 0), target);
8794 }
987c71d9 8795
05c8e58b
HPN
8796 if (flag_force_addr
8797 && GET_CODE (op0) != REG
8798 && modifier != EXPAND_CONST_ADDRESS
8799 && modifier != EXPAND_INITIALIZER
8800 && modifier != EXPAND_SUM)
987c71d9
RK
8801 op0 = force_reg (Pmode, op0);
8802
dc6d66b3
RK
8803 if (GET_CODE (op0) == REG
8804 && ! REG_USERVAR_P (op0))
bdb429a5 8805 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8806
5ae6cd0d 8807 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 8808 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8809
bbf6f052
RK
8810 return op0;
8811
8812 case ENTRY_VALUE_EXPR:
8813 abort ();
8814
7308a047
RS
8815 /* COMPLEX type for Extended Pascal & Fortran */
8816 case COMPLEX_EXPR:
8817 {
8818 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8819 rtx insns;
7308a047
RS
8820
8821 /* Get the rtx code of the operands. */
8822 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8823 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8824
8825 if (! target)
8826 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8827
6551fa4d 8828 start_sequence ();
7308a047
RS
8829
8830 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8831 emit_move_insn (gen_realpart (mode, target), op0);
8832 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8833
6551fa4d
JW
8834 insns = get_insns ();
8835 end_sequence ();
8836
7308a047 8837 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8838 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8839 each with a separate pseudo as destination.
8840 It's not correct for flow to treat them as a unit. */
6d6e61ce 8841 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8842 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8843 else
2f937369 8844 emit_insn (insns);
7308a047
RS
8845
8846 return target;
8847 }
8848
8849 case REALPART_EXPR:
2d7050fd
RS
8850 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8851 return gen_realpart (mode, op0);
3a94c984 8852
7308a047 8853 case IMAGPART_EXPR:
2d7050fd
RS
8854 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8855 return gen_imagpart (mode, op0);
7308a047
RS
8856
8857 case CONJ_EXPR:
8858 {
62acb978 8859 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8860 rtx imag_t;
6551fa4d 8861 rtx insns;
3a94c984
KH
8862
8863 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8864
8865 if (! target)
d6a5ac33 8866 target = gen_reg_rtx (mode);
3a94c984 8867
6551fa4d 8868 start_sequence ();
7308a047
RS
8869
8870 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8871 emit_move_insn (gen_realpart (partmode, target),
8872 gen_realpart (partmode, op0));
7308a047 8873
62acb978 8874 imag_t = gen_imagpart (partmode, target);
91ce572a 8875 temp = expand_unop (partmode,
0fb7aeda
KH
8876 ! unsignedp && flag_trapv
8877 && (GET_MODE_CLASS(partmode) == MODE_INT)
8878 ? negv_optab : neg_optab,
3a94c984 8879 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8880 if (temp != imag_t)
8881 emit_move_insn (imag_t, temp);
8882
6551fa4d
JW
8883 insns = get_insns ();
8884 end_sequence ();
8885
3a94c984 8886 /* Conjugate should appear as a single unit
d6a5ac33 8887 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8888 each with a separate pseudo as destination.
8889 It's not correct for flow to treat them as a unit. */
6d6e61ce 8890 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8891 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8892 else
2f937369 8893 emit_insn (insns);
7308a047
RS
8894
8895 return target;
8896 }
8897
e976b8b2
MS
8898 case TRY_CATCH_EXPR:
8899 {
8900 tree handler = TREE_OPERAND (exp, 1);
8901
8902 expand_eh_region_start ();
8903
8904 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8905
52a11cbf 8906 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8907
8908 return op0;
8909 }
8910
b335b813
PB
8911 case TRY_FINALLY_EXPR:
8912 {
8913 tree try_block = TREE_OPERAND (exp, 0);
8914 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 8915
8ad8135a 8916 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
8917 {
8918 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8919 is not sufficient, so we cannot expand the block twice.
8920 So we play games with GOTO_SUBROUTINE_EXPR to let us
8921 expand the thing only once. */
8ad8135a
RH
8922 /* When not optimizing, we go ahead with this form since
8923 (1) user breakpoints operate more predictably without
8924 code duplication, and
8925 (2) we're not running any of the global optimizers
8926 that would explode in time/space with the highly
8927 connected CFG created by the indirect branching. */
8943a0b4
RH
8928
8929 rtx finally_label = gen_label_rtx ();
8930 rtx done_label = gen_label_rtx ();
8931 rtx return_link = gen_reg_rtx (Pmode);
8932 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8933 (tree) finally_label, (tree) return_link);
8934 TREE_SIDE_EFFECTS (cleanup) = 1;
8935
8936 /* Start a new binding layer that will keep track of all cleanup
8937 actions to be performed. */
8938 expand_start_bindings (2);
8939 target_temp_slot_level = temp_slot_level;
8940
8941 expand_decl_cleanup (NULL_TREE, cleanup);
8942 op0 = expand_expr (try_block, target, tmode, modifier);
8943
8944 preserve_temp_slots (op0);
8945 expand_end_bindings (NULL_TREE, 0, 0);
8946 emit_jump (done_label);
8947 emit_label (finally_label);
8948 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8949 emit_indirect_jump (return_link);
8950 emit_label (done_label);
8951 }
8952 else
8953 {
8954 expand_start_bindings (2);
8955 target_temp_slot_level = temp_slot_level;
b335b813 8956
8943a0b4
RH
8957 expand_decl_cleanup (NULL_TREE, finally_block);
8958 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 8959
8943a0b4
RH
8960 preserve_temp_slots (op0);
8961 expand_end_bindings (NULL_TREE, 0, 0);
8962 }
b335b813 8963
b335b813
PB
8964 return op0;
8965 }
8966
3a94c984 8967 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8968 {
8969 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8970 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8971 rtx return_address = gen_label_rtx ();
3a94c984
KH
8972 emit_move_insn (return_link,
8973 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8974 emit_jump (subr);
8975 emit_label (return_address);
8976 return const0_rtx;
8977 }
8978
d3707adb
RH
8979 case VA_ARG_EXPR:
8980 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8981
52a11cbf 8982 case EXC_PTR_EXPR:
86c99549 8983 return get_exception_pointer (cfun);
52a11cbf 8984
67231816
RH
8985 case FDESC_EXPR:
8986 /* Function descriptors are not valid except for as
8987 initialization constants, and should not be expanded. */
8988 abort ();
8989
bbf6f052 8990 default:
ae2bcd98
RS
8991 /* ??? Use (*fun) form because expand_expr is a macro. */
8992 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
8993 modifier, alt_rtl);
bbf6f052
RK
8994 }
8995
8996 /* Here to do an ordinary binary operator, generating an instruction
8997 from the optab already placed in `this_optab'. */
8998 binop:
eb698c58
RS
8999 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9000 subtarget, &op0, &op1, 0);
bbf6f052 9001 binop2:
8403445a
AM
9002 if (modifier == EXPAND_STACK_PARM)
9003 target = 0;
bbf6f052
RK
9004 temp = expand_binop (mode, this_optab, op0, op1, target,
9005 unsignedp, OPTAB_LIB_WIDEN);
9006 if (temp == 0)
9007 abort ();
9008 return temp;
9009}
b93a436e 9010\f
1ce7f3c2
RK
9011/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9012 when applied to the address of EXP produces an address known to be
9013 aligned more than BIGGEST_ALIGNMENT. */
9014
9015static int
502b8322 9016is_aligning_offset (tree offset, tree exp)
1ce7f3c2 9017{
6fce44af 9018 /* Strip off any conversions. */
1ce7f3c2
RK
9019 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9020 || TREE_CODE (offset) == NOP_EXPR
6fce44af 9021 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
9022 offset = TREE_OPERAND (offset, 0);
9023
9024 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9025 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9026 if (TREE_CODE (offset) != BIT_AND_EXPR
9027 || !host_integerp (TREE_OPERAND (offset, 1), 1)
c0cfc691
OH
9028 || compare_tree_int (TREE_OPERAND (offset, 1),
9029 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
9030 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9031 return 0;
9032
9033 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9034 It must be NEGATE_EXPR. Then strip any more conversions. */
9035 offset = TREE_OPERAND (offset, 0);
9036 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9037 || TREE_CODE (offset) == NOP_EXPR
9038 || TREE_CODE (offset) == CONVERT_EXPR)
9039 offset = TREE_OPERAND (offset, 0);
9040
9041 if (TREE_CODE (offset) != NEGATE_EXPR)
9042 return 0;
9043
9044 offset = TREE_OPERAND (offset, 0);
9045 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9046 || TREE_CODE (offset) == NOP_EXPR
9047 || TREE_CODE (offset) == CONVERT_EXPR)
9048 offset = TREE_OPERAND (offset, 0);
9049
6fce44af
RK
9050 /* This must now be the address of EXP. */
9051 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
9052}
9053\f
e0a2f705 9054/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9055 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9056 in bytes within the string that ARG is accessing. The type of the
9057 offset will be `sizetype'. */
b93a436e 9058
28f4ec01 9059tree
502b8322 9060string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9061{
9062 STRIP_NOPS (arg);
9063
9064 if (TREE_CODE (arg) == ADDR_EXPR
9065 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9066 {
fed3cef0 9067 *ptr_offset = size_zero_node;
b93a436e
JL
9068 return TREE_OPERAND (arg, 0);
9069 }
9070 else if (TREE_CODE (arg) == PLUS_EXPR)
9071 {
9072 tree arg0 = TREE_OPERAND (arg, 0);
9073 tree arg1 = TREE_OPERAND (arg, 1);
9074
9075 STRIP_NOPS (arg0);
9076 STRIP_NOPS (arg1);
9077
9078 if (TREE_CODE (arg0) == ADDR_EXPR
9079 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9080 {
fed3cef0 9081 *ptr_offset = convert (sizetype, arg1);
b93a436e 9082 return TREE_OPERAND (arg0, 0);
bbf6f052 9083 }
b93a436e
JL
9084 else if (TREE_CODE (arg1) == ADDR_EXPR
9085 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9086 {
fed3cef0 9087 *ptr_offset = convert (sizetype, arg0);
b93a436e 9088 return TREE_OPERAND (arg1, 0);
bbf6f052 9089 }
b93a436e 9090 }
ca695ac9 9091
b93a436e
JL
9092 return 0;
9093}
ca695ac9 9094\f
b93a436e
JL
9095/* Expand code for a post- or pre- increment or decrement
9096 and return the RTX for the result.
9097 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9098
b93a436e 9099static rtx
502b8322 9100expand_increment (tree exp, int post, int ignore)
ca695ac9 9101{
b3694847
SS
9102 rtx op0, op1;
9103 rtx temp, value;
9104 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9105 optab this_optab = add_optab;
9106 int icode;
9107 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9108 int op0_is_copy = 0;
9109 int single_insn = 0;
9110 /* 1 means we can't store into OP0 directly,
9111 because it is a subreg narrower than a word,
9112 and we don't dare clobber the rest of the word. */
9113 int bad_subreg = 0;
1499e0a8 9114
b93a436e
JL
9115 /* Stabilize any component ref that might need to be
9116 evaluated more than once below. */
9117 if (!post
9118 || TREE_CODE (incremented) == BIT_FIELD_REF
9119 || (TREE_CODE (incremented) == COMPONENT_REF
9120 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9121 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9122 incremented = stabilize_reference (incremented);
9123 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9124 ones into save exprs so that they don't accidentally get evaluated
9125 more than once by the code below. */
9126 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9127 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9128 incremented = save_expr (incremented);
e9a25f70 9129
b93a436e
JL
9130 /* Compute the operands as RTX.
9131 Note whether OP0 is the actual lvalue or a copy of it:
9132 I believe it is a copy iff it is a register or subreg
6d2f8887 9133 and insns were generated in computing it. */
e9a25f70 9134
b93a436e 9135 temp = get_last_insn ();
37a08a29 9136 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9137
b93a436e
JL
9138 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9139 in place but instead must do sign- or zero-extension during assignment,
9140 so we copy it into a new register and let the code below use it as
9141 a copy.
e9a25f70 9142
b93a436e
JL
9143 Note that we can safely modify this SUBREG since it is know not to be
9144 shared (it was made by the expand_expr call above). */
9145
9146 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9147 {
9148 if (post)
9149 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9150 else
9151 bad_subreg = 1;
9152 }
9153 else if (GET_CODE (op0) == SUBREG
9154 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9155 {
9156 /* We cannot increment this SUBREG in place. If we are
9157 post-incrementing, get a copy of the old value. Otherwise,
9158 just mark that we cannot increment in place. */
9159 if (post)
9160 op0 = copy_to_reg (op0);
9161 else
9162 bad_subreg = 1;
e9a25f70
JL
9163 }
9164
b93a436e
JL
9165 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9166 && temp != get_last_insn ());
37a08a29 9167 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9168
b93a436e
JL
9169 /* Decide whether incrementing or decrementing. */
9170 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9171 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9172 this_optab = sub_optab;
9173
9174 /* Convert decrement by a constant into a negative increment. */
9175 if (this_optab == sub_optab
9176 && GET_CODE (op1) == CONST_INT)
ca695ac9 9177 {
3a94c984 9178 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9179 this_optab = add_optab;
ca695ac9 9180 }
1499e0a8 9181
91ce572a 9182 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9183 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9184
b93a436e
JL
9185 /* For a preincrement, see if we can do this with a single instruction. */
9186 if (!post)
9187 {
9188 icode = (int) this_optab->handlers[(int) mode].insn_code;
9189 if (icode != (int) CODE_FOR_nothing
9190 /* Make sure that OP0 is valid for operands 0 and 1
9191 of the insn we want to queue. */
a995e389
RH
9192 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9193 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9194 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9195 single_insn = 1;
9196 }
bbf6f052 9197
b93a436e
JL
9198 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9199 then we cannot just increment OP0. We must therefore contrive to
9200 increment the original value. Then, for postincrement, we can return
9201 OP0 since it is a copy of the old value. For preincrement, expand here
9202 unless we can do it with a single insn.
bbf6f052 9203
b93a436e
JL
9204 Likewise if storing directly into OP0 would clobber high bits
9205 we need to preserve (bad_subreg). */
9206 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9207 {
b93a436e
JL
9208 /* This is the easiest way to increment the value wherever it is.
9209 Problems with multiple evaluation of INCREMENTED are prevented
9210 because either (1) it is a component_ref or preincrement,
9211 in which case it was stabilized above, or (2) it is an array_ref
9212 with constant index in an array in a register, which is
9213 safe to reevaluate. */
9214 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9215 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9216 ? MINUS_EXPR : PLUS_EXPR),
9217 TREE_TYPE (exp),
9218 incremented,
9219 TREE_OPERAND (exp, 1));
a358cee0 9220
b93a436e
JL
9221 while (TREE_CODE (incremented) == NOP_EXPR
9222 || TREE_CODE (incremented) == CONVERT_EXPR)
9223 {
9224 newexp = convert (TREE_TYPE (incremented), newexp);
9225 incremented = TREE_OPERAND (incremented, 0);
9226 }
bbf6f052 9227
b90f141a 9228 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b93a436e
JL
9229 return post ? op0 : temp;
9230 }
bbf6f052 9231
b93a436e
JL
9232 if (post)
9233 {
9234 /* We have a true reference to the value in OP0.
9235 If there is an insn to add or subtract in this mode, queue it.
d91edf86 9236 Queuing the increment insn avoids the register shuffling
b93a436e
JL
9237 that often results if we must increment now and first save
9238 the old value for subsequent use. */
bbf6f052 9239
b93a436e
JL
9240#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9241 op0 = stabilize (op0);
9242#endif
41dfd40c 9243
b93a436e
JL
9244 icode = (int) this_optab->handlers[(int) mode].insn_code;
9245 if (icode != (int) CODE_FOR_nothing
9246 /* Make sure that OP0 is valid for operands 0 and 1
9247 of the insn we want to queue. */
a995e389
RH
9248 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9249 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9250 {
a995e389 9251 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9252 op1 = force_reg (mode, op1);
bbf6f052 9253
b93a436e
JL
9254 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9255 }
9256 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9257 {
9258 rtx addr = (general_operand (XEXP (op0, 0), mode)
9259 ? force_reg (Pmode, XEXP (op0, 0))
9260 : copy_to_reg (XEXP (op0, 0)));
9261 rtx temp, result;
ca695ac9 9262
792760b9 9263 op0 = replace_equiv_address (op0, addr);
b93a436e 9264 temp = force_reg (GET_MODE (op0), op0);
a995e389 9265 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9266 op1 = force_reg (mode, op1);
ca695ac9 9267
b93a436e
JL
9268 /* The increment queue is LIFO, thus we have to `queue'
9269 the instructions in reverse order. */
9270 enqueue_insn (op0, gen_move_insn (op0, temp));
9271 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9272 return result;
bbf6f052
RK
9273 }
9274 }
ca695ac9 9275
b93a436e
JL
9276 /* Preincrement, or we can't increment with one simple insn. */
9277 if (post)
9278 /* Save a copy of the value before inc or dec, to return it later. */
9279 temp = value = copy_to_reg (op0);
9280 else
9281 /* Arrange to return the incremented value. */
9282 /* Copy the rtx because expand_binop will protect from the queue,
9283 and the results of that would be invalid for us to return
9284 if our caller does emit_queue before using our result. */
9285 temp = copy_rtx (value = op0);
bbf6f052 9286
b93a436e 9287 /* Increment however we can. */
37a08a29 9288 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9289 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9290
b93a436e
JL
9291 /* Make sure the value is stored into OP0. */
9292 if (op1 != op0)
9293 emit_move_insn (op0, op1);
5718612f 9294
b93a436e
JL
9295 return temp;
9296}
9297\f
b93a436e
JL
9298/* Generate code to calculate EXP using a store-flag instruction
9299 and return an rtx for the result. EXP is either a comparison
9300 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9301
b93a436e 9302 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9303
cc2902df 9304 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9305 cheap.
ca695ac9 9306
b93a436e
JL
9307 Return zero if there is no suitable set-flag instruction
9308 available on this machine.
ca695ac9 9309
b93a436e
JL
9310 Once expand_expr has been called on the arguments of the comparison,
9311 we are committed to doing the store flag, since it is not safe to
9312 re-evaluate the expression. We emit the store-flag insn by calling
9313 emit_store_flag, but only expand the arguments if we have a reason
9314 to believe that emit_store_flag will be successful. If we think that
9315 it will, but it isn't, we have to simulate the store-flag with a
9316 set/jump/set sequence. */
ca695ac9 9317
b93a436e 9318static rtx
502b8322 9319do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9320{
9321 enum rtx_code code;
9322 tree arg0, arg1, type;
9323 tree tem;
9324 enum machine_mode operand_mode;
9325 int invert = 0;
9326 int unsignedp;
9327 rtx op0, op1;
9328 enum insn_code icode;
9329 rtx subtarget = target;
381127e8 9330 rtx result, label;
ca695ac9 9331
b93a436e
JL
9332 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9333 result at the end. We can't simply invert the test since it would
9334 have already been inverted if it were valid. This case occurs for
9335 some floating-point comparisons. */
ca695ac9 9336
b93a436e
JL
9337 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9338 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9339
b93a436e
JL
9340 arg0 = TREE_OPERAND (exp, 0);
9341 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9342
9343 /* Don't crash if the comparison was erroneous. */
9344 if (arg0 == error_mark_node || arg1 == error_mark_node)
9345 return const0_rtx;
9346
b93a436e
JL
9347 type = TREE_TYPE (arg0);
9348 operand_mode = TYPE_MODE (type);
9349 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9350
b93a436e
JL
9351 /* We won't bother with BLKmode store-flag operations because it would mean
9352 passing a lot of information to emit_store_flag. */
9353 if (operand_mode == BLKmode)
9354 return 0;
ca695ac9 9355
b93a436e
JL
9356 /* We won't bother with store-flag operations involving function pointers
9357 when function pointers must be canonicalized before comparisons. */
9358#ifdef HAVE_canonicalize_funcptr_for_compare
9359 if (HAVE_canonicalize_funcptr_for_compare
9360 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9361 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9362 == FUNCTION_TYPE))
9363 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9364 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9365 == FUNCTION_TYPE))))
9366 return 0;
ca695ac9
JB
9367#endif
9368
b93a436e
JL
9369 STRIP_NOPS (arg0);
9370 STRIP_NOPS (arg1);
ca695ac9 9371
b93a436e
JL
9372 /* Get the rtx comparison code to use. We know that EXP is a comparison
9373 operation of some type. Some comparisons against 1 and -1 can be
9374 converted to comparisons with zero. Do so here so that the tests
9375 below will be aware that we have a comparison with zero. These
9376 tests will not catch constants in the first operand, but constants
9377 are rarely passed as the first operand. */
ca695ac9 9378
b93a436e
JL
9379 switch (TREE_CODE (exp))
9380 {
9381 case EQ_EXPR:
9382 code = EQ;
bbf6f052 9383 break;
b93a436e
JL
9384 case NE_EXPR:
9385 code = NE;
bbf6f052 9386 break;
b93a436e
JL
9387 case LT_EXPR:
9388 if (integer_onep (arg1))
9389 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9390 else
9391 code = unsignedp ? LTU : LT;
ca695ac9 9392 break;
b93a436e
JL
9393 case LE_EXPR:
9394 if (! unsignedp && integer_all_onesp (arg1))
9395 arg1 = integer_zero_node, code = LT;
9396 else
9397 code = unsignedp ? LEU : LE;
ca695ac9 9398 break;
b93a436e
JL
9399 case GT_EXPR:
9400 if (! unsignedp && integer_all_onesp (arg1))
9401 arg1 = integer_zero_node, code = GE;
9402 else
9403 code = unsignedp ? GTU : GT;
9404 break;
9405 case GE_EXPR:
9406 if (integer_onep (arg1))
9407 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9408 else
9409 code = unsignedp ? GEU : GE;
ca695ac9 9410 break;
1eb8759b
RH
9411
9412 case UNORDERED_EXPR:
9413 code = UNORDERED;
9414 break;
9415 case ORDERED_EXPR:
9416 code = ORDERED;
9417 break;
9418 case UNLT_EXPR:
9419 code = UNLT;
9420 break;
9421 case UNLE_EXPR:
9422 code = UNLE;
9423 break;
9424 case UNGT_EXPR:
9425 code = UNGT;
9426 break;
9427 case UNGE_EXPR:
9428 code = UNGE;
9429 break;
9430 case UNEQ_EXPR:
9431 code = UNEQ;
9432 break;
1eb8759b 9433
ca695ac9 9434 default:
b93a436e 9435 abort ();
bbf6f052 9436 }
bbf6f052 9437
b93a436e
JL
9438 /* Put a constant second. */
9439 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9440 {
9441 tem = arg0; arg0 = arg1; arg1 = tem;
9442 code = swap_condition (code);
ca695ac9 9443 }
bbf6f052 9444
b93a436e
JL
9445 /* If this is an equality or inequality test of a single bit, we can
9446 do this by shifting the bit being tested to the low-order bit and
9447 masking the result with the constant 1. If the condition was EQ,
9448 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9449 than an scc insn even if we have it.
9450
9451 The code to make this transformation was moved into fold_single_bit_test,
9452 so we just call into the folder and expand its result. */
d39985fa 9453
b93a436e
JL
9454 if ((code == NE || code == EQ)
9455 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9456 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 9457 {
ae2bcd98 9458 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 9459 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9460 arg0, arg1, type),
60cd4dae
JL
9461 target, VOIDmode, EXPAND_NORMAL);
9462 }
bbf6f052 9463
b93a436e 9464 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9465 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9466 return 0;
1eb8759b 9467
b93a436e
JL
9468 icode = setcc_gen_code[(int) code];
9469 if (icode == CODE_FOR_nothing
a995e389 9470 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9471 {
b93a436e
JL
9472 /* We can only do this if it is one of the special cases that
9473 can be handled without an scc insn. */
9474 if ((code == LT && integer_zerop (arg1))
9475 || (! only_cheap && code == GE && integer_zerop (arg1)))
9476 ;
9477 else if (BRANCH_COST >= 0
9478 && ! only_cheap && (code == NE || code == EQ)
9479 && TREE_CODE (type) != REAL_TYPE
9480 && ((abs_optab->handlers[(int) operand_mode].insn_code
9481 != CODE_FOR_nothing)
9482 || (ffs_optab->handlers[(int) operand_mode].insn_code
9483 != CODE_FOR_nothing)))
9484 ;
9485 else
9486 return 0;
ca695ac9 9487 }
3a94c984 9488
296b4ed9 9489 if (! get_subtarget (target)
e3be1116 9490 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9491 subtarget = 0;
9492
eb698c58 9493 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9494
9495 if (target == 0)
9496 target = gen_reg_rtx (mode);
9497
9498 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9499 because, if the emit_store_flag does anything it will succeed and
9500 OP0 and OP1 will not be used subsequently. */
ca695ac9 9501
b93a436e
JL
9502 result = emit_store_flag (target, code,
9503 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9504 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9505 operand_mode, unsignedp, 1);
ca695ac9 9506
b93a436e
JL
9507 if (result)
9508 {
9509 if (invert)
9510 result = expand_binop (mode, xor_optab, result, const1_rtx,
9511 result, 0, OPTAB_LIB_WIDEN);
9512 return result;
ca695ac9 9513 }
bbf6f052 9514
b93a436e
JL
9515 /* If this failed, we have to do this with set/compare/jump/set code. */
9516 if (GET_CODE (target) != REG
9517 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9518 target = gen_reg_rtx (GET_MODE (target));
9519
9520 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9521 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9522 operand_mode, NULL_RTX);
b93a436e
JL
9523 if (GET_CODE (result) == CONST_INT)
9524 return (((result == const0_rtx && ! invert)
9525 || (result != const0_rtx && invert))
9526 ? const0_rtx : const1_rtx);
ca695ac9 9527
8f08e8c0
JL
9528 /* The code of RESULT may not match CODE if compare_from_rtx
9529 decided to swap its operands and reverse the original code.
9530
9531 We know that compare_from_rtx returns either a CONST_INT or
9532 a new comparison code, so it is safe to just extract the
9533 code from RESULT. */
9534 code = GET_CODE (result);
9535
b93a436e
JL
9536 label = gen_label_rtx ();
9537 if (bcc_gen_fctn[(int) code] == 0)
9538 abort ();
0f41302f 9539
b93a436e
JL
9540 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9541 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9542 emit_label (label);
bbf6f052 9543
b93a436e 9544 return target;
ca695ac9 9545}
b93a436e 9546\f
b93a436e 9547
ad82abb8
ZW
9548/* Stubs in case we haven't got a casesi insn. */
9549#ifndef HAVE_casesi
9550# define HAVE_casesi 0
9551# define gen_casesi(a, b, c, d, e) (0)
9552# define CODE_FOR_casesi CODE_FOR_nothing
9553#endif
9554
9555/* If the machine does not have a case insn that compares the bounds,
9556 this means extra overhead for dispatch tables, which raises the
9557 threshold for using them. */
9558#ifndef CASE_VALUES_THRESHOLD
9559#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9560#endif /* CASE_VALUES_THRESHOLD */
9561
9562unsigned int
502b8322 9563case_values_threshold (void)
ad82abb8
ZW
9564{
9565 return CASE_VALUES_THRESHOLD;
9566}
9567
9568/* Attempt to generate a casesi instruction. Returns 1 if successful,
9569 0 otherwise (i.e. if there is no casesi instruction). */
9570int
502b8322
AJ
9571try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9572 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9573{
9574 enum machine_mode index_mode = SImode;
9575 int index_bits = GET_MODE_BITSIZE (index_mode);
9576 rtx op1, op2, index;
9577 enum machine_mode op_mode;
9578
9579 if (! HAVE_casesi)
9580 return 0;
9581
9582 /* Convert the index to SImode. */
9583 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9584 {
9585 enum machine_mode omode = TYPE_MODE (index_type);
9586 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9587
9588 /* We must handle the endpoints in the original mode. */
9589 index_expr = build (MINUS_EXPR, index_type,
9590 index_expr, minval);
9591 minval = integer_zero_node;
9592 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9593 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9594 omode, 1, default_label);
ad82abb8
ZW
9595 /* Now we can safely truncate. */
9596 index = convert_to_mode (index_mode, index, 0);
9597 }
9598 else
9599 {
9600 if (TYPE_MODE (index_type) != index_mode)
9601 {
ae2bcd98 9602 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 9603 (index_bits, 0), index_expr);
ad82abb8
ZW
9604 index_type = TREE_TYPE (index_expr);
9605 }
9606
9607 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9608 }
9609 emit_queue ();
9610 index = protect_from_queue (index, 0);
9611 do_pending_stack_adjust ();
9612
9613 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9614 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9615 (index, op_mode))
9616 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9617
ad82abb8
ZW
9618 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9619
9620 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9621 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9622 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9623 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9624 (op1, op_mode))
9625 op1 = copy_to_mode_reg (op_mode, op1);
9626
9627 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9628
9629 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9630 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9631 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9632 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9633 (op2, op_mode))
9634 op2 = copy_to_mode_reg (op_mode, op2);
9635
9636 emit_jump_insn (gen_casesi (index, op1, op2,
9637 table_label, default_label));
9638 return 1;
9639}
9640
9641/* Attempt to generate a tablejump instruction; same concept. */
9642#ifndef HAVE_tablejump
9643#define HAVE_tablejump 0
9644#define gen_tablejump(x, y) (0)
9645#endif
9646
9647/* Subroutine of the next function.
9648
9649 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9650 in the table already subtracted.
9651 MODE is its expected mode (needed if INDEX is constant).
9652 RANGE is the length of the jump table.
9653 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9654
b93a436e
JL
9655 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9656 index value is out of range. */
0f41302f 9657
ad82abb8 9658static void
502b8322
AJ
9659do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9660 rtx default_label)
ca695ac9 9661{
b3694847 9662 rtx temp, vector;
88d3b7f0 9663
74f6d071
JH
9664 if (INTVAL (range) > cfun->max_jumptable_ents)
9665 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9666
b93a436e
JL
9667 /* Do an unsigned comparison (in the proper mode) between the index
9668 expression and the value which represents the length of the range.
9669 Since we just finished subtracting the lower bound of the range
9670 from the index expression, this comparison allows us to simultaneously
9671 check that the original index expression value is both greater than
9672 or equal to the minimum value of the range and less than or equal to
9673 the maximum value of the range. */
709f5be1 9674
c5d5d461 9675 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9676 default_label);
bbf6f052 9677
b93a436e
JL
9678 /* If index is in range, it must fit in Pmode.
9679 Convert to Pmode so we can index with it. */
9680 if (mode != Pmode)
9681 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9682
ba228239 9683 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9684 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9685 and break_out_memory_refs will go to work on it and mess it up. */
9686#ifdef PIC_CASE_VECTOR_ADDRESS
9687 if (flag_pic && GET_CODE (index) != REG)
9688 index = copy_to_mode_reg (Pmode, index);
9689#endif
ca695ac9 9690
b93a436e
JL
9691 /* If flag_force_addr were to affect this address
9692 it could interfere with the tricky assumptions made
9693 about addresses that contain label-refs,
9694 which may be valid only very near the tablejump itself. */
9695 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9696 GET_MODE_SIZE, because this indicates how large insns are. The other
9697 uses should all be Pmode, because they are addresses. This code
9698 could fail if addresses and insns are not the same size. */
9699 index = gen_rtx_PLUS (Pmode,
9700 gen_rtx_MULT (Pmode, index,
9701 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9702 gen_rtx_LABEL_REF (Pmode, table_label));
9703#ifdef PIC_CASE_VECTOR_ADDRESS
9704 if (flag_pic)
9705 index = PIC_CASE_VECTOR_ADDRESS (index);
9706 else
bbf6f052 9707#endif
b93a436e
JL
9708 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9709 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9710 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9711 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 9712 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
9713 convert_move (temp, vector, 0);
9714
9715 emit_jump_insn (gen_tablejump (temp, table_label));
9716
9717 /* If we are generating PIC code or if the table is PC-relative, the
9718 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9719 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9720 emit_barrier ();
bbf6f052 9721}
b93a436e 9722
ad82abb8 9723int
502b8322
AJ
9724try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9725 rtx table_label, rtx default_label)
ad82abb8
ZW
9726{
9727 rtx index;
9728
9729 if (! HAVE_tablejump)
9730 return 0;
9731
9732 index_expr = fold (build (MINUS_EXPR, index_type,
9733 convert (index_type, index_expr),
9734 convert (index_type, minval)));
9735 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9736 emit_queue ();
9737 index = protect_from_queue (index, 0);
9738 do_pending_stack_adjust ();
9739
9740 do_tablejump (index, TYPE_MODE (index_type),
9741 convert_modes (TYPE_MODE (index_type),
9742 TYPE_MODE (TREE_TYPE (range)),
9743 expand_expr (range, NULL_RTX,
9744 VOIDmode, 0),
9745 TREE_UNSIGNED (TREE_TYPE (range))),
9746 table_label, default_label);
9747 return 1;
9748}
e2500fed 9749
cb2a532e
AH
9750/* Nonzero if the mode is a valid vector mode for this architecture.
9751 This returns nonzero even if there is no hardware support for the
9752 vector mode, but we can emulate with narrower modes. */
9753
9754int
502b8322 9755vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9756{
9757 enum mode_class class = GET_MODE_CLASS (mode);
9758 enum machine_mode innermode;
9759
9760 /* Doh! What's going on? */
9761 if (class != MODE_VECTOR_INT
9762 && class != MODE_VECTOR_FLOAT)
9763 return 0;
9764
9765 /* Hardware support. Woo hoo! */
9766 if (VECTOR_MODE_SUPPORTED_P (mode))
9767 return 1;
9768
9769 innermode = GET_MODE_INNER (mode);
9770
9771 /* We should probably return 1 if requesting V4DI and we have no DI,
9772 but we have V2DI, but this is probably very unlikely. */
9773
9774 /* If we have support for the inner mode, we can safely emulate it.
9775 We may not have V2DI, but me can emulate with a pair of DIs. */
9776 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9777}
9778
d744e06e
AH
9779/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9780static rtx
502b8322 9781const_vector_from_tree (tree exp)
d744e06e
AH
9782{
9783 rtvec v;
9784 int units, i;
9785 tree link, elt;
9786 enum machine_mode inner, mode;
9787
9788 mode = TYPE_MODE (TREE_TYPE (exp));
9789
9790 if (is_zeros_p (exp))
9791 return CONST0_RTX (mode);
9792
9793 units = GET_MODE_NUNITS (mode);
9794 inner = GET_MODE_INNER (mode);
9795
9796 v = rtvec_alloc (units);
9797
9798 link = TREE_VECTOR_CST_ELTS (exp);
9799 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9800 {
9801 elt = TREE_VALUE (link);
9802
9803 if (TREE_CODE (elt) == REAL_CST)
9804 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9805 inner);
9806 else
9807 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9808 TREE_INT_CST_HIGH (elt),
9809 inner);
9810 }
9811
5f6c070d
AH
9812 /* Initialize remaining elements to 0. */
9813 for (; i < units; ++i)
9814 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9815
d744e06e
AH
9816 return gen_rtx_raw_CONST_VECTOR (mode, v);
9817}
9818
e2500fed 9819#include "gt-expr.h"
This page took 4.265223 seconds and 5 git commands to generate.