]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
configure.ac: Fix stupid brain-fade; set default_gnattools_target correctly.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
c988af2b 50#include "target.h"
bbf6f052 51
bbf6f052 52/* Decide whether a function's arguments should be processed
bbc8a071
RK
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
bbf6f052 57
bbf6f052 58#ifdef PUSH_ROUNDING
bbc8a071 59
2da4124d 60#ifndef PUSH_ARGS_REVERSED
3319a347 61#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 62#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 63#endif
2da4124d 64#endif
bbc8a071 65
bbf6f052
RK
66#endif
67
68#ifndef STACK_PUSH_CODE
69#ifdef STACK_GROWS_DOWNWARD
70#define STACK_PUSH_CODE PRE_DEC
71#else
72#define STACK_PUSH_CODE PRE_INC
73#endif
74#endif
75
4ca79136
RH
76/* Convert defined/undefined to boolean. */
77#ifdef TARGET_MEM_FUNCTIONS
78#undef TARGET_MEM_FUNCTIONS
79#define TARGET_MEM_FUNCTIONS 1
80#else
81#define TARGET_MEM_FUNCTIONS 0
82#endif
83
84
bbf6f052
RK
85/* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91int cse_not_expected;
92
4969d05d
RK
93/* This structure is used by move_by_pieces to describe the move to
94 be performed. */
4969d05d
RK
95struct move_by_pieces
96{
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
3bdf5ad1
RK
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
4969d05d
RK
107 int reverse;
108};
109
57814e5e 110/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
111 be performed. */
112
57814e5e 113struct store_by_pieces
9de08200
RK
114{
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
3bdf5ad1
RK
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
502b8322 121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 122 void *constfundata;
9de08200
RK
123 int reverse;
124};
125
502b8322
AJ
126static rtx enqueue_insn (rtx, rtx);
127static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 unsigned int);
129static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
130 struct move_by_pieces *);
131static bool block_move_libcall_safe_for_call_parm (void);
132static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
133static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
134static tree emit_block_move_libcall_fn (int);
135static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
136static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
137static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
138static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
139static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
140 struct store_by_pieces *);
141static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
142static rtx clear_storage_via_libcall (rtx, rtx);
143static tree clear_storage_libcall_fn (int);
144static rtx compress_float_constant (rtx, rtx);
145static rtx get_subtarget (rtx);
146static int is_zeros_p (tree);
502b8322
AJ
147static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153static rtx var_rtx (tree);
154
155static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
d50a16c4 156static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
157
158static int is_aligning_offset (tree, tree);
159static rtx expand_increment (tree, int, int);
eb698c58
RS
160static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
502b8322 162static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 163#ifdef PUSH_ROUNDING
502b8322 164static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 165#endif
502b8322
AJ
166static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167static rtx const_vector_from_tree (tree);
bbf6f052 168
4fa52007
RK
169/* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
172
173static char direct_load[NUM_MACHINE_MODES];
174static char direct_store[NUM_MACHINE_MODES];
175
51286de6
RH
176/* Record for each mode whether we can float-extend from memory. */
177
178static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
179
fbe1758d 180/* This macro is used to determine whether move_by_pieces should be called
3a94c984 181 to perform a structure copy. */
fbe1758d 182#ifndef MOVE_BY_PIECES_P
19caa751 183#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
185#endif
186
78762e3b
RS
187/* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189#ifndef CLEAR_BY_PIECES_P
190#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
192#endif
193
4977bab6
ZW
194/* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197#ifndef STORE_BY_PIECES_P
198#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
199#endif
200
266007a7 201/* This array records the insn_code of insns to perform block moves. */
e6677db3 202enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 203
9de08200
RK
204/* This array records the insn_code of insns to perform block clears. */
205enum insn_code clrstr_optab[NUM_MACHINE_MODES];
206
118355a0
ZW
207/* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211
72954a4f
JM
212/* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
213struct file_stack *expr_wfl_stack;
214
cc2902df 215/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
216
217#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 218#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 219#endif
bbf6f052 220\f
4fa52007 221/* This is run once per compilation to set up which modes can be used
266007a7 222 directly in memory and to initialize the block move optab. */
4fa52007
RK
223
224void
502b8322 225init_expr_once (void)
4fa52007
RK
226{
227 rtx insn, pat;
228 enum machine_mode mode;
cff48d8f 229 int num_clobbers;
9ec36da5 230 rtx mem, mem1;
bf1660a6 231 rtx reg;
9ec36da5 232
e2549997
RS
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
9ec36da5
JL
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 238
bf1660a6
JL
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
242
1f8c3c5b
RH
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
4fa52007
RK
246
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
249 {
250 int regno;
4fa52007
RK
251
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
e2549997 254 PUT_MODE (mem1, mode);
bf1660a6 255 PUT_MODE (reg, mode);
4fa52007 256
e6fe56a4
RK
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
259
7308a047
RS
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
263 regno++)
264 {
265 if (! HARD_REGNO_MODE_OK (regno, mode))
266 continue;
e6fe56a4 267
bf1660a6 268 REGNO (reg) = regno;
e6fe56a4 269
7308a047
RS
270 SET_SRC (pat) = mem;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
e6fe56a4 274
e2549997
RS
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
279
7308a047
RS
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
e2549997
RS
284
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
7308a047 289 }
4fa52007
RK
290 }
291
51286de6
RH
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
293
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
296 {
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 299 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
300 {
301 enum insn_code ic;
302
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
305 continue;
306
307 PUT_MODE (mem, srcmode);
0fb7aeda 308
51286de6
RH
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
311 }
312 }
4fa52007 313}
cff48d8f 314
bbf6f052
RK
315/* This is run at the start of compiling a function. */
316
317void
502b8322 318init_expr (void)
bbf6f052 319{
3a70d621 320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052
RK
321}
322
49ad7cfa 323/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 324
bbf6f052 325void
502b8322 326finish_expr_for_function (void)
bbf6f052 327{
49ad7cfa
BS
328 if (pending_chain)
329 abort ();
bbf6f052
RK
330}
331\f
332/* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
334
bbf6f052
RK
335/* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
338
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
341
342static rtx
502b8322 343enqueue_insn (rtx var, rtx body)
bbf6f052 344{
c5c76735
JL
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
bbf6f052
RK
347 return pending_chain;
348}
349
350/* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
356
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
360
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
364
365rtx
502b8322 366protect_from_queue (rtx x, int modify)
bbf6f052 367{
b3694847 368 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
369
370#if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374#endif
375
376 if (code != QUEUED)
377 {
e9baa644
RK
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
bbf6f052
RK
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
385 {
f1ec5147
RK
386 rtx y = XEXP (x, 0);
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 388
bbf6f052
RK
389 if (QUEUED_INSN (y))
390 {
f1ec5147
RK
391 rtx temp = gen_reg_rtx (GET_MODE (x));
392
e9baa644 393 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
394 QUEUED_INSN (y));
395 return temp;
396 }
f1ec5147 397
73b7f58c
BS
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
f1ec5147 400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 401 }
f1ec5147 402
bbf6f052
RK
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
3f15938e
RS
406 {
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
409 {
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
412 }
413 }
bbf6f052
RK
414 else if (code == PLUS || code == MULT)
415 {
3f15938e
RS
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
419 {
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
423 }
bbf6f052
RK
424 }
425 return x;
426 }
73b7f58c
BS
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
bbf6f052 430 if (QUEUED_INSN (x) == 0)
73b7f58c 431 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
442}
443
444/* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
448
1f06ee8d 449int
502b8322 450queued_subexp_p (rtx x)
bbf6f052 451{
b3694847 452 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
453 switch (code)
454 {
455 case QUEUED:
456 return 1;
457 case MEM:
458 return queued_subexp_p (XEXP (x, 0));
459 case MULT:
460 case PLUS:
461 case MINUS:
e9a25f70
JL
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
464 default:
465 return 0;
bbf6f052 466 }
bbf6f052
RK
467}
468
1bbd65cd
EB
469/* Retrieve a mark on the queue. */
470
471static rtx
472mark_queue (void)
473{
474 return pending_chain;
475}
bbf6f052 476
1bbd65cd
EB
477/* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
480
481static void
482emit_insns_enqueued_after_mark (rtx mark)
bbf6f052 483{
b3694847 484 rtx p;
1bbd65cd
EB
485
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
490 return;
491
492 while ((p = pending_chain) != mark)
bbf6f052 493 {
41b083c4
R
494 rtx body = QUEUED_BODY (p);
495
2f937369
DM
496 switch (GET_CODE (body))
497 {
498 case INSN:
499 case JUMP_INSN:
500 case CALL_INSN:
501 case CODE_LABEL:
502 case BARRIER:
503 case NOTE:
504 QUEUED_INSN (p) = body;
505 emit_insn (body);
506 break;
507
508#ifdef ENABLE_CHECKING
509 case SEQUENCE:
510 abort ();
511 break;
512#endif
513
514 default:
515 QUEUED_INSN (p) = emit_insn (body);
516 break;
41b083c4 517 }
2f937369 518
1bbd65cd 519 QUEUED_BODY (p) = 0;
bbf6f052
RK
520 pending_chain = QUEUED_NEXT (p);
521 }
522}
1bbd65cd
EB
523
524/* Perform all the pending incrementations. */
525
526void
527emit_queue (void)
528{
529 emit_insns_enqueued_after_mark (NULL_RTX);
530}
bbf6f052
RK
531\f
532/* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
536
537void
502b8322 538convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
539{
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
544 enum insn_code code;
545 rtx libcall;
546
547 /* rtx code for making an equivalent value. */
37d0b254
SE
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
550
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
553
554 if (to_real != from_real)
555 abort ();
556
1499e0a8
RK
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
559 TO here. */
560
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
566
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
568 abort ();
569
bbf6f052
RK
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
572 {
573 emit_move_insn (to, from);
574 return;
575 }
576
0b4565c9
BS
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
578 {
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
580 abort ();
3a94c984 581
0b4565c9 582 if (VECTOR_MODE_P (to_mode))
bafe341a 583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 584 else
bafe341a 585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
586
587 emit_move_insn (to, from);
588 return;
589 }
590
06765df1
R
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
592 {
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
595 return;
596 }
597
bbf6f052
RK
598 if (to_real)
599 {
642dfa8b 600 rtx value, insns;
85363ca0 601 convert_optab tab;
81d79e2c 602
e44846d6 603 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 604 tab = sext_optab;
e44846d6 605 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
85363ca0
ZW
606 tab = trunc_optab;
607 else
608 abort ();
2b01c326 609
85363ca0 610 /* Try converting directly if the insn is supported. */
2b01c326 611
85363ca0
ZW
612 code = tab->handlers[to_mode][from_mode].insn_code;
613 if (code != CODE_FOR_nothing)
b092b471 614 {
85363ca0
ZW
615 emit_unop_insn (code, to, from,
616 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
617 return;
618 }
b092b471 619
85363ca0
ZW
620 /* Otherwise use a libcall. */
621 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 622
85363ca0 623 if (!libcall)
b092b471 624 /* This conversion is not implemented yet. */
bbf6f052
RK
625 abort ();
626
642dfa8b 627 start_sequence ();
ebb1b59a 628 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 629 1, from, from_mode);
642dfa8b
BS
630 insns = get_insns ();
631 end_sequence ();
450b1728
EC
632 emit_libcall_block (insns, to, value,
633 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
634 from)
635 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
636 return;
637 }
638
85363ca0
ZW
639 /* Handle pointer conversion. */ /* SPEE 900220. */
640 /* Targets are expected to provide conversion insns between PxImode and
641 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
642 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
643 {
644 enum machine_mode full_mode
645 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
646
647 if (trunc_optab->handlers[to_mode][full_mode].insn_code
648 == CODE_FOR_nothing)
649 abort ();
650
651 if (full_mode != from_mode)
652 from = convert_to_mode (full_mode, from, unsignedp);
653 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
654 to, from, UNKNOWN);
655 return;
656 }
657 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
658 {
659 enum machine_mode full_mode
660 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
661
662 if (sext_optab->handlers[full_mode][from_mode].insn_code
663 == CODE_FOR_nothing)
664 abort ();
665
666 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
667 to, from, UNKNOWN);
668 if (to_mode == full_mode)
669 return;
670
a1105617 671 /* else proceed to integer conversions below. */
85363ca0
ZW
672 from_mode = full_mode;
673 }
674
bbf6f052
RK
675 /* Now both modes are integers. */
676
677 /* Handle expanding beyond a word. */
678 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
679 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
680 {
681 rtx insns;
682 rtx lowpart;
683 rtx fill_value;
684 rtx lowfrom;
685 int i;
686 enum machine_mode lowpart_mode;
687 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
688
689 /* Try converting directly if the insn is supported. */
690 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
691 != CODE_FOR_nothing)
692 {
cd1b4b44
RK
693 /* If FROM is a SUBREG, put it into a register. Do this
694 so that we always generate the same set of insns for
695 better cse'ing; if an intermediate assignment occurred,
696 we won't be doing the operation directly on the SUBREG. */
697 if (optimize > 0 && GET_CODE (from) == SUBREG)
698 from = force_reg (from_mode, from);
bbf6f052
RK
699 emit_unop_insn (code, to, from, equiv_code);
700 return;
701 }
702 /* Next, try converting via full word. */
703 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
704 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
705 != CODE_FOR_nothing))
706 {
a81fee56 707 if (GET_CODE (to) == REG)
6a2d136b
EB
708 {
709 if (reg_overlap_mentioned_p (to, from))
710 from = force_reg (from_mode, from);
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
712 }
bbf6f052
RK
713 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
714 emit_unop_insn (code, to,
715 gen_lowpart (word_mode, to), equiv_code);
716 return;
717 }
718
719 /* No special multiword conversion insn; do it by hand. */
720 start_sequence ();
721
5c5033c3
RK
722 /* Since we will turn this into a no conflict block, we must ensure
723 that the source does not overlap the target. */
724
725 if (reg_overlap_mentioned_p (to, from))
726 from = force_reg (from_mode, from);
727
bbf6f052
RK
728 /* Get a copy of FROM widened to a word, if necessary. */
729 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
730 lowpart_mode = word_mode;
731 else
732 lowpart_mode = from_mode;
733
734 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
735
736 lowpart = gen_lowpart (lowpart_mode, to);
737 emit_move_insn (lowpart, lowfrom);
738
739 /* Compute the value to put in each remaining word. */
740 if (unsignedp)
741 fill_value = const0_rtx;
742 else
743 {
744#ifdef HAVE_slt
745 if (HAVE_slt
a995e389 746 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
747 && STORE_FLAG_VALUE == -1)
748 {
906c4e36 749 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 750 lowpart_mode, 0);
bbf6f052
RK
751 fill_value = gen_reg_rtx (word_mode);
752 emit_insn (gen_slt (fill_value));
753 }
754 else
755#endif
756 {
757 fill_value
758 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
759 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 760 NULL_RTX, 0);
bbf6f052
RK
761 fill_value = convert_to_mode (word_mode, fill_value, 1);
762 }
763 }
764
765 /* Fill the remaining words. */
766 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
767 {
768 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
769 rtx subword = operand_subword (to, index, 1, to_mode);
770
771 if (subword == 0)
772 abort ();
773
774 if (fill_value != subword)
775 emit_move_insn (subword, fill_value);
776 }
777
778 insns = get_insns ();
779 end_sequence ();
780
906c4e36 781 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 782 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
783 return;
784 }
785
d3c64ee3
RS
786 /* Truncating multi-word to a word or less. */
787 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
788 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 789 {
431a6eca
JW
790 if (!((GET_CODE (from) == MEM
791 && ! MEM_VOLATILE_P (from)
792 && direct_load[(int) to_mode]
793 && ! mode_dependent_address_p (XEXP (from, 0)))
794 || GET_CODE (from) == REG
795 || GET_CODE (from) == SUBREG))
796 from = force_reg (from_mode, from);
bbf6f052
RK
797 convert_move (to, gen_lowpart (word_mode, from), 0);
798 return;
799 }
800
bbf6f052
RK
801 /* Now follow all the conversions between integers
802 no more than a word long. */
803
804 /* For truncation, usually we can just refer to FROM in a narrower mode. */
805 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
806 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 807 GET_MODE_BITSIZE (from_mode)))
bbf6f052 808 {
d3c64ee3
RS
809 if (!((GET_CODE (from) == MEM
810 && ! MEM_VOLATILE_P (from)
811 && direct_load[(int) to_mode]
812 && ! mode_dependent_address_p (XEXP (from, 0)))
813 || GET_CODE (from) == REG
814 || GET_CODE (from) == SUBREG))
815 from = force_reg (from_mode, from);
34aa3599
RK
816 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
817 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
818 from = copy_to_reg (from);
bbf6f052
RK
819 emit_move_insn (to, gen_lowpart (to_mode, from));
820 return;
821 }
822
d3c64ee3 823 /* Handle extension. */
bbf6f052
RK
824 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
825 {
826 /* Convert directly if that works. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
829 {
9413de45
RK
830 if (flag_force_mem)
831 from = force_not_mem (from);
832
bbf6f052
RK
833 emit_unop_insn (code, to, from, equiv_code);
834 return;
835 }
836 else
837 {
838 enum machine_mode intermediate;
2b28d92e
NC
839 rtx tmp;
840 tree shift_amount;
bbf6f052
RK
841
842 /* Search for a mode to convert via. */
843 for (intermediate = from_mode; intermediate != VOIDmode;
844 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
845 if (((can_extend_p (to_mode, intermediate, unsignedp)
846 != CODE_FOR_nothing)
847 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
848 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
849 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
850 && (can_extend_p (intermediate, from_mode, unsignedp)
851 != CODE_FOR_nothing))
852 {
853 convert_move (to, convert_to_mode (intermediate, from,
854 unsignedp), unsignedp);
855 return;
856 }
857
2b28d92e 858 /* No suitable intermediate mode.
3a94c984 859 Generate what we need with shifts. */
2b28d92e
NC
860 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
861 - GET_MODE_BITSIZE (from_mode), 0);
862 from = gen_lowpart (to_mode, force_reg (from_mode, from));
863 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
864 to, unsignedp);
3a94c984 865 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
866 to, unsignedp);
867 if (tmp != to)
868 emit_move_insn (to, tmp);
869 return;
bbf6f052
RK
870 }
871 }
872
3a94c984 873 /* Support special truncate insns for certain modes. */
85363ca0 874 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 875 {
85363ca0
ZW
876 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
877 to, from, UNKNOWN);
b9bcad65
RK
878 return;
879 }
880
bbf6f052
RK
881 /* Handle truncation of volatile memrefs, and so on;
882 the things that couldn't be truncated directly,
85363ca0
ZW
883 and for which there was no special instruction.
884
885 ??? Code above formerly short-circuited this, for most integer
886 mode pairs, with a force_reg in from_mode followed by a recursive
887 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
889 {
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
892 return;
893 }
894
895 /* Mode combination is not recognized. */
896 abort ();
897}
898
899/* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
5d901c31
RS
904 or by copying to a new temporary with conversion.
905
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
908
909rtx
502b8322 910convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
911{
912 return convert_modes (mode, VOIDmode, x, unsignedp);
913}
914
915/* Return an rtx for a value that would result
916 from converting X from mode OLDMODE to mode MODE.
917 Both modes may be floating, or both integer.
918 UNSIGNEDP is nonzero if X is an unsigned value.
919
920 This can be done by referring to a part of X in place
921 or by copying to a new temporary with conversion.
922
923 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
924
925 This function *must not* call protect_from_queue
926 except when putting X into an insn (in which case convert_move does it). */
927
928rtx
502b8322 929convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 930{
b3694847 931 rtx temp;
5ffe63ed 932
1499e0a8
RK
933 /* If FROM is a SUBREG that indicates that we have already done at least
934 the required extension, strip it. */
935
936 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
937 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
938 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
939 x = gen_lowpart (mode, x);
bbf6f052 940
64791b18
RK
941 if (GET_MODE (x) != VOIDmode)
942 oldmode = GET_MODE (x);
3a94c984 943
5ffe63ed 944 if (mode == oldmode)
bbf6f052
RK
945 return x;
946
947 /* There is one case that we must handle specially: If we are converting
906c4e36 948 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
949 we are to interpret the constant as unsigned, gen_lowpart will do
950 the wrong if the constant appears negative. What we want to do is
951 make the high-order word of the constant zero, not all ones. */
952
953 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 954 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 955 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
956 {
957 HOST_WIDE_INT val = INTVAL (x);
958
959 if (oldmode != VOIDmode
960 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
961 {
962 int width = GET_MODE_BITSIZE (oldmode);
963
964 /* We need to zero extend VAL. */
965 val &= ((HOST_WIDE_INT) 1 << width) - 1;
966 }
967
968 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
969 }
bbf6f052
RK
970
971 /* We can do this with a gen_lowpart if both desired and current modes
972 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
973 non-volatile MEM. Except for the constant case where MODE is no
974 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 975
ba2e110c
RK
976 if ((GET_CODE (x) == CONST_INT
977 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 978 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 979 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 980 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 981 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
982 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
983 && direct_load[(int) mode])
2bf29316 984 || (GET_CODE (x) == REG
006c9f4a
SE
985 && (! HARD_REGISTER_P (x)
986 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
987 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
988 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
989 {
990 /* ?? If we don't know OLDMODE, we have to assume here that
991 X does not need sign- or zero-extension. This may not be
992 the case, but it's the best we can do. */
993 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
994 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
995 {
996 HOST_WIDE_INT val = INTVAL (x);
997 int width = GET_MODE_BITSIZE (oldmode);
998
999 /* We must sign or zero-extend in this case. Start by
1000 zero-extending, then sign extend if we need to. */
1001 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1002 if (! unsignedp
1003 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1004 val |= (HOST_WIDE_INT) (-1) << width;
1005
2496c7bd 1006 return gen_int_mode (val, mode);
ba2e110c
RK
1007 }
1008
1009 return gen_lowpart (mode, x);
1010 }
bbf6f052 1011
ebe75517
JH
1012 /* Converting from integer constant into mode is always equivalent to an
1013 subreg operation. */
1014 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1015 {
1016 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1017 abort ();
1018 return simplify_gen_subreg (mode, x, oldmode, 0);
1019 }
1020
bbf6f052
RK
1021 temp = gen_reg_rtx (mode);
1022 convert_move (temp, x, unsignedp);
1023 return temp;
1024}
1025\f
cf5124f6
RS
1026/* STORE_MAX_PIECES is the number of bytes at a time that we can
1027 store efficiently. Due to internal GCC limitations, this is
1028 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1029 for an immediate constant. */
1030
1031#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1032
8fd3cf4e
JJ
1033/* Determine whether the LEN bytes can be moved by using several move
1034 instructions. Return nonzero if a call to move_by_pieces should
1035 succeed. */
1036
1037int
502b8322
AJ
1038can_move_by_pieces (unsigned HOST_WIDE_INT len,
1039 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1040{
1041 return MOVE_BY_PIECES_P (len, align);
1042}
1043
21d93687
RK
1044/* Generate several move instructions to copy LEN bytes from block FROM to
1045 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1046 and TO through protect_from_queue before calling.
566aa174 1047
21d93687
RK
1048 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1049 used to push FROM to the stack.
566aa174 1050
8fd3cf4e 1051 ALIGN is maximum stack alignment we can assume.
bbf6f052 1052
8fd3cf4e
JJ
1053 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1054 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1055 stpcpy. */
1056
1057rtx
502b8322
AJ
1058move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1059 unsigned int align, int endp)
bbf6f052
RK
1060{
1061 struct move_by_pieces data;
566aa174 1062 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1063 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1064 enum machine_mode mode = VOIDmode, tmode;
1065 enum insn_code icode;
bbf6f052 1066
f26aca6d
DD
1067 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1068
bbf6f052 1069 data.offset = 0;
bbf6f052 1070 data.from_addr = from_addr;
566aa174
JH
1071 if (to)
1072 {
1073 to_addr = XEXP (to, 0);
1074 data.to = to;
1075 data.autinc_to
1076 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1077 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1078 data.reverse
1079 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1080 }
1081 else
1082 {
1083 to_addr = NULL_RTX;
1084 data.to = NULL_RTX;
1085 data.autinc_to = 1;
1086#ifdef STACK_GROWS_DOWNWARD
1087 data.reverse = 1;
1088#else
1089 data.reverse = 0;
1090#endif
1091 }
1092 data.to_addr = to_addr;
bbf6f052 1093 data.from = from;
bbf6f052
RK
1094 data.autinc_from
1095 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1096 || GET_CODE (from_addr) == POST_INC
1097 || GET_CODE (from_addr) == POST_DEC);
1098
1099 data.explicit_inc_from = 0;
1100 data.explicit_inc_to = 0;
bbf6f052
RK
1101 if (data.reverse) data.offset = len;
1102 data.len = len;
1103
1104 /* If copying requires more than two move insns,
1105 copy addresses to registers (to make displacements shorter)
1106 and use post-increment if available. */
1107 if (!(data.autinc_from && data.autinc_to)
1108 && move_by_pieces_ninsns (len, align) > 2)
1109 {
3a94c984 1110 /* Find the mode of the largest move... */
fbe1758d
AM
1111 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1112 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1113 if (GET_MODE_SIZE (tmode) < max_size)
1114 mode = tmode;
1115
1116 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1117 {
1118 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1119 data.autinc_from = 1;
1120 data.explicit_inc_from = -1;
1121 }
fbe1758d 1122 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1123 {
1124 data.from_addr = copy_addr_to_reg (from_addr);
1125 data.autinc_from = 1;
1126 data.explicit_inc_from = 1;
1127 }
bbf6f052
RK
1128 if (!data.autinc_from && CONSTANT_P (from_addr))
1129 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1130 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1131 {
1132 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1133 data.autinc_to = 1;
1134 data.explicit_inc_to = -1;
1135 }
fbe1758d 1136 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1137 {
1138 data.to_addr = copy_addr_to_reg (to_addr);
1139 data.autinc_to = 1;
1140 data.explicit_inc_to = 1;
1141 }
bbf6f052
RK
1142 if (!data.autinc_to && CONSTANT_P (to_addr))
1143 data.to_addr = copy_addr_to_reg (to_addr);
1144 }
1145
e1565e65 1146 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1147 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1148 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1149
1150 /* First move what we can in the largest integer mode, then go to
1151 successively smaller modes. */
1152
1153 while (max_size > 1)
1154 {
e7c33f54
RK
1155 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1156 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1157 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1158 mode = tmode;
1159
1160 if (mode == VOIDmode)
1161 break;
1162
1163 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1164 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1165 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1166
1167 max_size = GET_MODE_SIZE (mode);
1168 }
1169
1170 /* The code above should have handled everything. */
2a8e278c 1171 if (data.len > 0)
bbf6f052 1172 abort ();
8fd3cf4e
JJ
1173
1174 if (endp)
1175 {
1176 rtx to1;
1177
1178 if (data.reverse)
1179 abort ();
1180 if (data.autinc_to)
1181 {
1182 if (endp == 2)
1183 {
1184 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1185 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1186 else
1187 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1188 -1));
1189 }
1190 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1191 data.offset);
1192 }
1193 else
1194 {
1195 if (endp == 2)
1196 --data.offset;
1197 to1 = adjust_address (data.to, QImode, data.offset);
1198 }
1199 return to1;
1200 }
1201 else
1202 return data.to;
bbf6f052
RK
1203}
1204
1205/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1206 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1207
3bdf5ad1 1208static unsigned HOST_WIDE_INT
502b8322 1209move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1210{
3bdf5ad1
RK
1211 unsigned HOST_WIDE_INT n_insns = 0;
1212 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1213
e1565e65 1214 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1215 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1216 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1217
1218 while (max_size > 1)
1219 {
1220 enum machine_mode mode = VOIDmode, tmode;
1221 enum insn_code icode;
1222
e7c33f54
RK
1223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1224 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1225 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1226 mode = tmode;
1227
1228 if (mode == VOIDmode)
1229 break;
1230
1231 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1232 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1233 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1234
1235 max_size = GET_MODE_SIZE (mode);
1236 }
1237
13c6f0d5
NS
1238 if (l)
1239 abort ();
bbf6f052
RK
1240 return n_insns;
1241}
1242
1243/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1244 with move instructions for mode MODE. GENFUN is the gen_... function
1245 to make a move insn for that mode. DATA has all the other info. */
1246
1247static void
502b8322
AJ
1248move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1249 struct move_by_pieces *data)
bbf6f052 1250{
3bdf5ad1 1251 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1252 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1253
1254 while (data->len >= size)
1255 {
3bdf5ad1
RK
1256 if (data->reverse)
1257 data->offset -= size;
1258
566aa174 1259 if (data->to)
3bdf5ad1 1260 {
566aa174 1261 if (data->autinc_to)
630036c6
JJ
1262 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1263 data->offset);
566aa174 1264 else
f4ef873c 1265 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1266 }
3bdf5ad1
RK
1267
1268 if (data->autinc_from)
630036c6
JJ
1269 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1270 data->offset);
3bdf5ad1 1271 else
f4ef873c 1272 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1273
940da324 1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1275 emit_insn (gen_add2_insn (data->to_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1277 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1278 emit_insn (gen_add2_insn (data->from_addr,
1279 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1280
566aa174
JH
1281 if (data->to)
1282 emit_insn ((*genfun) (to1, from1));
1283 else
21d93687
RK
1284 {
1285#ifdef PUSH_ROUNDING
1286 emit_single_push_insn (mode, from1, NULL);
1287#else
1288 abort ();
1289#endif
1290 }
3bdf5ad1 1291
940da324 1292 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1293 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1294 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1295 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1296
3bdf5ad1
RK
1297 if (! data->reverse)
1298 data->offset += size;
bbf6f052
RK
1299
1300 data->len -= size;
1301 }
1302}
1303\f
4ca79136
RH
1304/* Emit code to move a block Y to a block X. This may be done with
1305 string-move instructions, with multiple scalar move instructions,
1306 or with a library call.
bbf6f052 1307
4ca79136 1308 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1309 SIZE is an rtx that says how long they are.
19caa751 1310 ALIGN is the maximum alignment we can assume they have.
44bb111a 1311 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1312
e9a25f70
JL
1313 Return the address of the new block, if memcpy is called and returns it,
1314 0 otherwise. */
1315
1316rtx
502b8322 1317emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1318{
44bb111a 1319 bool may_use_call;
e9a25f70 1320 rtx retval = 0;
44bb111a
RH
1321 unsigned int align;
1322
1323 switch (method)
1324 {
1325 case BLOCK_OP_NORMAL:
1326 may_use_call = true;
1327 break;
1328
1329 case BLOCK_OP_CALL_PARM:
1330 may_use_call = block_move_libcall_safe_for_call_parm ();
1331
1332 /* Make inhibit_defer_pop nonzero around the library call
1333 to force it to pop the arguments right away. */
1334 NO_DEFER_POP;
1335 break;
1336
1337 case BLOCK_OP_NO_LIBCALL:
1338 may_use_call = false;
1339 break;
1340
1341 default:
1342 abort ();
1343 }
1344
1345 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1346
bbf6f052
RK
1347 if (GET_MODE (x) != BLKmode)
1348 abort ();
bbf6f052
RK
1349 if (GET_MODE (y) != BLKmode)
1350 abort ();
1351
1352 x = protect_from_queue (x, 1);
1353 y = protect_from_queue (y, 0);
5d901c31 1354 size = protect_from_queue (size, 0);
bbf6f052
RK
1355
1356 if (GET_CODE (x) != MEM)
1357 abort ();
1358 if (GET_CODE (y) != MEM)
1359 abort ();
1360 if (size == 0)
1361 abort ();
1362
cb38fd88
RH
1363 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1364 can be incorrect is coming from __builtin_memcpy. */
1365 if (GET_CODE (size) == CONST_INT)
1366 {
6972c506
JJ
1367 if (INTVAL (size) == 0)
1368 return 0;
1369
cb38fd88
RH
1370 x = shallow_copy_rtx (x);
1371 y = shallow_copy_rtx (y);
1372 set_mem_size (x, size);
1373 set_mem_size (y, size);
1374 }
1375
fbe1758d 1376 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1377 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1378 else if (emit_block_move_via_movstr (x, y, size, align))
1379 ;
44bb111a 1380 else if (may_use_call)
4ca79136 1381 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1382 else
1383 emit_block_move_via_loop (x, y, size, align);
1384
1385 if (method == BLOCK_OP_CALL_PARM)
1386 OK_DEFER_POP;
266007a7 1387
4ca79136
RH
1388 return retval;
1389}
266007a7 1390
502b8322 1391/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1392 block move libcall will not clobber any parameters which may have
1393 already been placed on the stack. */
1394
1395static bool
502b8322 1396block_move_libcall_safe_for_call_parm (void)
44bb111a 1397{
a357a6d4 1398 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1399 if (PUSH_ARGS)
1400 return true;
44bb111a 1401
450b1728 1402 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1403 an outgoing argument. */
1404#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1405 {
1406 tree fn = emit_block_move_libcall_fn (false);
1407 (void) fn;
1408 if (REG_PARM_STACK_SPACE (fn) != 0)
1409 return false;
1410 }
44bb111a 1411#endif
44bb111a 1412
a357a6d4
GK
1413 /* If any argument goes in memory, then it might clobber an outgoing
1414 argument. */
1415 {
1416 CUMULATIVE_ARGS args_so_far;
1417 tree fn, arg;
450b1728 1418
a357a6d4 1419 fn = emit_block_move_libcall_fn (false);
0f6937fe 1420 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1421
a357a6d4
GK
1422 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1423 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1424 {
1425 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1426 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1427 if (!tmp || !REG_P (tmp))
44bb111a 1428 return false;
a357a6d4
GK
1429#ifdef FUNCTION_ARG_PARTIAL_NREGS
1430 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1431 NULL_TREE, 1))
1432 return false;
1433#endif
1434 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1435 }
1436 }
1437 return true;
44bb111a
RH
1438}
1439
502b8322 1440/* A subroutine of emit_block_move. Expand a movstr pattern;
4ca79136 1441 return true if successful. */
3ef1eef4 1442
4ca79136 1443static bool
502b8322 1444emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1445{
4ca79136 1446 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1447 int save_volatile_ok = volatile_ok;
4ca79136 1448 enum machine_mode mode;
266007a7 1449
4ca79136
RH
1450 /* Since this is a move insn, we don't care about volatility. */
1451 volatile_ok = 1;
1452
ee960939
OH
1453 /* Try the most limited insn first, because there's no point
1454 including more than one in the machine description unless
1455 the more limited one has some advantage. */
1456
4ca79136
RH
1457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1458 mode = GET_MODE_WIDER_MODE (mode))
1459 {
1460 enum insn_code code = movstr_optab[(int) mode];
1461 insn_operand_predicate_fn pred;
1462
1463 if (code != CODE_FOR_nothing
1464 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1465 here because if SIZE is less than the mode mask, as it is
1466 returned by the macro, it will definitely be less than the
1467 actual mode mask. */
1468 && ((GET_CODE (size) == CONST_INT
1469 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1470 <= (GET_MODE_MASK (mode) >> 1)))
1471 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1472 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1473 || (*pred) (x, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1475 || (*pred) (y, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1477 || (*pred) (opalign, VOIDmode)))
1478 {
1479 rtx op2;
1480 rtx last = get_last_insn ();
1481 rtx pat;
1482
1483 op2 = convert_to_mode (mode, size, 1);
1484 pred = insn_data[(int) code].operand[2].predicate;
1485 if (pred != 0 && ! (*pred) (op2, mode))
1486 op2 = copy_to_mode_reg (mode, op2);
1487
1488 /* ??? When called via emit_block_move_for_call, it'd be
1489 nice if there were some way to inform the backend, so
1490 that it doesn't fail the expansion because it thinks
1491 emitting the libcall would be more efficient. */
1492
1493 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1494 if (pat)
1495 {
1496 emit_insn (pat);
a5e9c810 1497 volatile_ok = save_volatile_ok;
4ca79136 1498 return true;
bbf6f052 1499 }
4ca79136
RH
1500 else
1501 delete_insns_since (last);
bbf6f052 1502 }
4ca79136 1503 }
bbf6f052 1504
a5e9c810 1505 volatile_ok = save_volatile_ok;
4ca79136
RH
1506 return false;
1507}
3ef1eef4 1508
4ca79136
RH
1509/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1510 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1511
4ca79136 1512static rtx
502b8322 1513emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1514{
ee960939 1515 rtx dst_addr, src_addr;
4ca79136
RH
1516 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1517 enum machine_mode size_mode;
1518 rtx retval;
4bc973ae 1519
4ca79136 1520 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1521
ee960939
OH
1522 It is unsafe to save the value generated by protect_from_queue and reuse
1523 it later. Consider what happens if emit_queue is called before the
1524 return value from protect_from_queue is used.
4bc973ae 1525
ee960939
OH
1526 Expansion of the CALL_EXPR below will call emit_queue before we are
1527 finished emitting RTL for argument setup. So if we are not careful we
1528 could get the wrong value for an argument.
4bc973ae 1529
ee960939
OH
1530 To avoid this problem we go ahead and emit code to copy the addresses of
1531 DST and SRC and SIZE into new pseudos. We can then place those new
1532 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1533 emit_queue.
4bc973ae 1534
ee960939
OH
1535 Note this is not strictly needed for library calls since they do not call
1536 emit_queue before loading their arguments. However, we may need to have
1537 library calls call emit_queue in the future since failing to do so could
1538 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1539 arguments in registers. */
1540
1541 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1542 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1543
ee960939
OH
1544 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1545 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1546
1547 dst_tree = make_tree (ptr_type_node, dst_addr);
1548 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1549
1550 if (TARGET_MEM_FUNCTIONS)
1551 size_mode = TYPE_MODE (sizetype);
1552 else
1553 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 1554
4ca79136
RH
1555 size = convert_to_mode (size_mode, size, 1);
1556 size = copy_to_mode_reg (size_mode, size);
1557
1558 /* It is incorrect to use the libcall calling conventions to call
1559 memcpy in this context. This could be a user call to memcpy and
1560 the user may wish to examine the return value from memcpy. For
1561 targets where libcalls and normal calls have different conventions
1562 for returning pointers, we could end up generating incorrect code.
1563
1564 For convenience, we generate the call to bcopy this way as well. */
1565
4ca79136
RH
1566 if (TARGET_MEM_FUNCTIONS)
1567 size_tree = make_tree (sizetype, size);
1568 else
1569 size_tree = make_tree (unsigned_type_node, size);
1570
1571 fn = emit_block_move_libcall_fn (true);
1572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1573 if (TARGET_MEM_FUNCTIONS)
1574 {
1575 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1576 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1577 }
1578 else
1579 {
1580 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1582 }
1583
1584 /* Now we have to build up the CALL_EXPR itself. */
1585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1586 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1587 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1588
1589 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1590
ee960939
OH
1591 /* If we are initializing a readonly value, show the above call clobbered
1592 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1593 the delay slot scheduler might overlook conflicts and take nasty
1594 decisions. */
4ca79136 1595 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1596 add_function_usage_to
1597 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1598 gen_rtx_CLOBBER (VOIDmode, dst),
1599 NULL_RTX));
4ca79136 1600
ee960939 1601 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 1602}
52cf7115 1603
4ca79136
RH
1604/* A subroutine of emit_block_move_via_libcall. Create the tree node
1605 for the function we use for block copies. The first time FOR_CALL
1606 is true, we call assemble_external. */
52cf7115 1607
4ca79136
RH
1608static GTY(()) tree block_move_fn;
1609
9661b15f 1610void
502b8322 1611init_block_move_fn (const char *asmspec)
4ca79136 1612{
9661b15f 1613 if (!block_move_fn)
4ca79136 1614 {
8fd3cf4e 1615 tree args, fn;
9661b15f 1616
4ca79136 1617 if (TARGET_MEM_FUNCTIONS)
52cf7115 1618 {
4ca79136
RH
1619 fn = get_identifier ("memcpy");
1620 args = build_function_type_list (ptr_type_node, ptr_type_node,
1621 const_ptr_type_node, sizetype,
1622 NULL_TREE);
1623 }
1624 else
1625 {
1626 fn = get_identifier ("bcopy");
1627 args = build_function_type_list (void_type_node, const_ptr_type_node,
1628 ptr_type_node, unsigned_type_node,
1629 NULL_TREE);
52cf7115
JL
1630 }
1631
4ca79136
RH
1632 fn = build_decl (FUNCTION_DECL, fn, args);
1633 DECL_EXTERNAL (fn) = 1;
1634 TREE_PUBLIC (fn) = 1;
1635 DECL_ARTIFICIAL (fn) = 1;
1636 TREE_NOTHROW (fn) = 1;
66c60e67 1637
4ca79136 1638 block_move_fn = fn;
bbf6f052 1639 }
e9a25f70 1640
9661b15f
JJ
1641 if (asmspec)
1642 {
1643 SET_DECL_RTL (block_move_fn, NULL_RTX);
1644 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1645 }
1646}
1647
1648static tree
502b8322 1649emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1650{
1651 static bool emitted_extern;
1652
1653 if (!block_move_fn)
1654 init_block_move_fn (NULL);
1655
4ca79136
RH
1656 if (for_call && !emitted_extern)
1657 {
1658 emitted_extern = true;
9661b15f
JJ
1659 make_decl_rtl (block_move_fn, NULL);
1660 assemble_external (block_move_fn);
4ca79136
RH
1661 }
1662
9661b15f 1663 return block_move_fn;
bbf6f052 1664}
44bb111a
RH
1665
1666/* A subroutine of emit_block_move. Copy the data via an explicit
1667 loop. This is used only when libcalls are forbidden. */
1668/* ??? It'd be nice to copy in hunks larger than QImode. */
1669
1670static void
502b8322
AJ
1671emit_block_move_via_loop (rtx x, rtx y, rtx size,
1672 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1673{
1674 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1675 enum machine_mode iter_mode;
1676
1677 iter_mode = GET_MODE (size);
1678 if (iter_mode == VOIDmode)
1679 iter_mode = word_mode;
1680
1681 top_label = gen_label_rtx ();
1682 cmp_label = gen_label_rtx ();
1683 iter = gen_reg_rtx (iter_mode);
1684
1685 emit_move_insn (iter, const0_rtx);
1686
1687 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1688 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1689 do_pending_stack_adjust ();
1690
2e040219 1691 emit_note (NOTE_INSN_LOOP_BEG);
44bb111a
RH
1692
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1695
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1701
1702 emit_move_insn (x, y);
1703
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1706 if (tmp != iter)
1707 emit_move_insn (iter, tmp);
1708
2e040219 1709 emit_note (NOTE_INSN_LOOP_CONT);
44bb111a
RH
1710 emit_label (cmp_label);
1711
1712 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1713 true, top_label);
1714
2e040219 1715 emit_note (NOTE_INSN_LOOP_END);
44bb111a 1716}
bbf6f052
RK
1717\f
1718/* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1720
1721void
502b8322 1722move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1723{
1724 int i;
381127e8 1725#ifdef HAVE_load_multiple
3a94c984 1726 rtx pat;
381127e8
RL
1727 rtx last;
1728#endif
bbf6f052 1729
72bb9717
RK
1730 if (nregs == 0)
1731 return;
1732
bbf6f052
RK
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1735
1736 /* See if the machine can do this with a load multiple insn. */
1737#ifdef HAVE_load_multiple
c3a02afe 1738 if (HAVE_load_multiple)
bbf6f052 1739 {
c3a02afe 1740 last = get_last_insn ();
38a448ca 1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1742 GEN_INT (nregs));
1743 if (pat)
1744 {
1745 emit_insn (pat);
1746 return;
1747 }
1748 else
1749 delete_insns_since (last);
bbf6f052 1750 }
bbf6f052
RK
1751#endif
1752
1753 for (i = 0; i < nregs; i++)
38a448ca 1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1755 operand_subword_force (x, i, mode));
1756}
1757
1758/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1759 The number of registers to be filled is NREGS. */
0040593d 1760
bbf6f052 1761void
502b8322 1762move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1763{
1764 int i;
bbf6f052 1765
2954d7db
RK
1766 if (nregs == 0)
1767 return;
1768
bbf6f052
RK
1769 /* See if the machine can do this with a store multiple insn. */
1770#ifdef HAVE_store_multiple
c3a02afe 1771 if (HAVE_store_multiple)
bbf6f052 1772 {
c6b97fac
AM
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1775 GEN_INT (nregs));
c3a02afe
RK
1776 if (pat)
1777 {
1778 emit_insn (pat);
1779 return;
1780 }
1781 else
1782 delete_insns_since (last);
bbf6f052 1783 }
bbf6f052
RK
1784#endif
1785
1786 for (i = 0; i < nregs; i++)
1787 {
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1789
1790 if (tem == 0)
1791 abort ();
1792
38a448ca 1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1794 }
1795}
1796
084a1106
JDA
1797/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1802
1803rtx
502b8322 1804gen_group_rtx (rtx orig)
084a1106
JDA
1805{
1806 int i, length;
1807 rtx *tmps;
1808
1809 if (GET_CODE (orig) != PARALLEL)
1810 abort ();
1811
1812 length = XVECLEN (orig, 0);
703ad42b 1813 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1814
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1817
1818 if (i)
1819 tmps[0] = 0;
1820
1821 for (; i < length; i++)
1822 {
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1825
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1827 }
1828
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1830}
1831
6e985040
AM
1832/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1835 if not known. */
fffa9c1d
JW
1836
1837void
6e985040 1838emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1839{
aac5cc16
RH
1840 rtx *tmps, src;
1841 int start, i;
fffa9c1d 1842
aac5cc16 1843 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1844 abort ();
1845
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
aac5cc16
RH
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1849 start = 0;
fffa9c1d 1850 else
aac5cc16
RH
1851 start = 1;
1852
703ad42b 1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1854
aac5cc16
RH
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1857 {
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1861 int shift = 0;
1862
1863 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1865 {
6e985040
AM
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1868 if (
1869#ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1872#else
1873 BYTES_BIG_ENDIAN
1874#endif
1875 )
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1877 bytelen = ssize - bytepos;
1878 if (bytelen <= 0)
729a2125 1879 abort ();
aac5cc16
RH
1880 }
1881
f3ce87a9
DE
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1885 src = orig_src;
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1890 {
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1893 else
1894 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1895
f3ce87a9
DE
1896 emit_move_insn (src, orig_src);
1897 }
1898
aac5cc16
RH
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
6e985040
AM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1904 && bytelen == GET_MODE_SIZE (mode))
1905 {
1906 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1908 }
7c4a6db0
JW
1909 else if (GET_CODE (src) == CONCAT)
1910 {
015b1ad1
JDA
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1913
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1916 {
015b1ad1
JDA
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1920 to be extracted. */
1921 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
1922 if (! CONSTANT_P (tmps[i])
1923 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
cbb92744 1927 }
58f69841
JH
1928 else if (bytepos == 0)
1929 {
015b1ad1 1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1931 emit_move_insn (mem, src);
04050c69 1932 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1933 }
7c4a6db0
JW
1934 else
1935 abort ();
1936 }
9c0631a7
AH
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && GET_CODE (src) == REG)
1942 {
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1944 rtx mem;
1945
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1949 }
d3a16cbd
FJ
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1953 else if (CONSTANT_P (src)
2ee5437b
RH
1954 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1955 tmps[i] = src;
fffa9c1d 1956 else
19caa751
RK
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 1959 mode, mode, ssize);
fffa9c1d 1960
6e985040 1961 if (shift)
19caa751
RK
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 1964 }
19caa751 1965
3a94c984 1966 emit_queue ();
aac5cc16
RH
1967
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1971}
1972
084a1106
JDA
1973/* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1975
1976void
502b8322 1977emit_group_move (rtx dst, rtx src)
084a1106
JDA
1978{
1979 int i;
1980
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1984 abort ();
1985
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1990}
1991
6e985040
AM
1992/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1995 known. */
fffa9c1d
JW
1996
1997void
6e985040 1998emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1999{
aac5cc16
RH
2000 rtx *tmps, dst;
2001 int start, i;
fffa9c1d 2002
aac5cc16 2003 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2004 abort ();
2005
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
aac5cc16
RH
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2009 start = 0;
fffa9c1d 2010 else
aac5cc16
RH
2011 start = 1;
2012
703ad42b 2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2014
aac5cc16
RH
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2017 {
aac5cc16
RH
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2021 }
3a94c984 2022 emit_queue ();
fffa9c1d 2023
aac5cc16
RH
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2026 dst = orig_dst;
10a9f2be
JW
2027 if (GET_CODE (dst) == PARALLEL)
2028 {
2029 rtx temp;
2030
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2035 return;
2036
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2039 the temporary. */
2040
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
2044 return;
2045 }
75897075 2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2047 {
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
8ae91fc0 2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2051 }
aac5cc16
RH
2052
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2055 {
770ae6cc 2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2057 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2058 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2059 rtx dest = dst;
aac5cc16
RH
2060
2061 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2063 {
6e985040
AM
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2066 if (
2067#ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2070#else
2071 BYTES_BIG_ENDIAN
2072#endif
2073 )
aac5cc16
RH
2074 {
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2078 }
2079 bytelen = ssize - bytepos;
71bc0330 2080 }
fffa9c1d 2081
6ddae612
JJ
2082 if (GET_CODE (dst) == CONCAT)
2083 {
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2087 {
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2090 }
0d446150
JH
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2092 {
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2096 tmps[i]);
2097 dst = dest;
2098 break;
2099 }
6ddae612
JJ
2100 else
2101 abort ();
2102 }
2103
aac5cc16 2104 /* Optimize the access just a bit. */
6ddae612 2105 if (GET_CODE (dest) == MEM
6e985040
AM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2109 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2111 else
6ddae612 2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2113 mode, tmps[i], ssize);
fffa9c1d 2114 }
729a2125 2115
3a94c984 2116 emit_queue ();
aac5cc16
RH
2117
2118 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2119 if (orig_dst != dst)
aac5cc16 2120 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2121}
2122
c36fce9a
GRK
2123/* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2126
c988af2b
RS
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
c36fce9a
GRK
2131
2132rtx
502b8322 2133copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2134{
19caa751
RK
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2139
2140 if (tgtblk == 0)
2141 {
1da68f56
RK
2142 tgtblk = assign_temp (build_qualified_type (type,
2143 (TYPE_QUALS (type)
2144 | TYPE_QUAL_CONST)),
2145 0, 1, 1);
19caa751
RK
2146 preserve_temp_slots (tgtblk);
2147 }
3a94c984 2148
1ed1b4fb 2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2150 into a new pseudo which is a full word. */
0d7839da 2151
19caa751
RK
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 2155
c988af2b
RS
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2159
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2167 ? !BYTES_BIG_ENDIAN
2168 : BYTES_BIG_ENDIAN))
2169 padding_correction
19caa751
RK
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2171
2172 /* Copy the structure BITSIZE bites at a time.
3a94c984 2173
19caa751
RK
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2176 time. */
c988af2b 2177 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2180 {
3a94c984 2181 /* We need a new source operand each time xbitpos is on a
c988af2b 2182 word boundary and when xbitpos == padding_correction
19caa751
RK
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2185 || xbitpos == padding_correction)
b47f8cfc
JH
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2187 GET_MODE (srcreg));
19caa751
RK
2188
2189 /* We need a new destination operand each time bitpos is on
2190 a word boundary. */
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2193
19caa751
RK
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
04050c69
RK
2200 BITS_PER_WORD),
2201 BITS_PER_WORD);
19caa751
RK
2202 }
2203
2204 return tgtblk;
c36fce9a
GRK
2205}
2206
94b25f81
RK
2207/* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2209
2210void
502b8322 2211use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2212{
0304dfbb
DE
2213 if (GET_CODE (reg) != REG
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2215 abort ();
b3f8cf4a
RK
2216
2217 *call_fusage
38a448ca
RH
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2220}
2221
94b25f81
RK
2222/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2224
2225void
502b8322 2226use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2227{
0304dfbb 2228 int i;
bbf6f052 2229
0304dfbb
DE
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2231 abort ();
2232
2233 for (i = 0; i < nregs; i++)
e50126e8 2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2235}
fffa9c1d
JW
2236
2237/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240
2241void
502b8322 2242use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2243{
2244 int i;
2245
6bd35f86
DE
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2247 {
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2249
6bd35f86
DE
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
e9a25f70 2253 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2254 use_reg (call_fusage, reg);
2255 }
fffa9c1d 2256}
bbf6f052 2257\f
57814e5e 2258
cf5124f6
RS
2259/* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2264
57814e5e 2265int
502b8322
AJ
2266can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
57814e5e 2269{
98166639 2270 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2274 int reverse;
2275 rtx cst;
2276
2c430630
RS
2277 if (len == 0)
2278 return 1;
2279
4977bab6 2280 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2281 return 0;
2282
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2286
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2289
2290 for (reverse = 0;
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2292 reverse++)
2293 {
2294 l = len;
2295 mode = VOIDmode;
cf5124f6 2296 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2297 while (max_size > 1)
2298 {
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2302 mode = tmode;
2303
2304 if (mode == VOIDmode)
2305 break;
2306
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2310 {
2311 unsigned int size = GET_MODE_SIZE (mode);
2312
2313 while (l >= size)
2314 {
2315 if (reverse)
2316 offset -= size;
2317
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2320 return 0;
2321
2322 if (!reverse)
2323 offset += size;
2324
2325 l -= size;
2326 }
2327 }
2328
2329 max_size = GET_MODE_SIZE (mode);
2330 }
2331
2332 /* The code above should have handled everything. */
2333 if (l != 0)
2334 abort ();
2335 }
2336
2337 return 1;
2338}
2339
2340/* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2346 stpcpy. */
57814e5e 2347
8fd3cf4e 2348rtx
502b8322
AJ
2349store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2352{
2353 struct store_by_pieces data;
2354
2c430630
RS
2355 if (len == 0)
2356 {
2357 if (endp == 2)
2358 abort ();
2359 return to;
2360 }
2361
4977bab6 2362 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2363 abort ();
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2367 data.len = len;
2368 data.to = to;
2369 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2370 if (endp)
2371 {
2372 rtx to1;
2373
2374 if (data.reverse)
2375 abort ();
2376 if (data.autinc_to)
2377 {
2378 if (endp == 2)
2379 {
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2382 else
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2384 -1));
2385 }
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2387 data.offset);
2388 }
2389 else
2390 {
2391 if (endp == 2)
2392 --data.offset;
2393 to1 = adjust_address (data.to, QImode, data.offset);
2394 }
2395 return to1;
2396 }
2397 else
2398 return data.to;
57814e5e
JJ
2399}
2400
19caa751
RK
2401/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2404
2405static void
342e2b74 2406clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2407{
57814e5e
JJ
2408 struct store_by_pieces data;
2409
2c430630
RS
2410 if (len == 0)
2411 return;
2412
57814e5e 2413 data.constfun = clear_by_pieces_1;
df4ae160 2414 data.constfundata = NULL;
57814e5e
JJ
2415 data.len = len;
2416 data.to = to;
2417 store_by_pieces_1 (&data, align);
2418}
2419
2420/* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2422
2423static rtx
502b8322
AJ
2424clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2427{
2428 return const0_rtx;
2429}
2430
2431/* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2435
2436static void
502b8322
AJ
2437store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2439{
2440 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
9de08200 2444
57814e5e
JJ
2445 data->offset = 0;
2446 data->to_addr = to_addr;
2447 data->autinc_to
9de08200
RK
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2450
57814e5e
JJ
2451 data->explicit_inc_to = 0;
2452 data->reverse
9de08200 2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2454 if (data->reverse)
2455 data->offset = data->len;
9de08200 2456
57814e5e 2457 /* If storing requires more than two move insns,
9de08200
RK
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
57814e5e
JJ
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2462 {
3a94c984 2463 /* Determine the main mode we'll be using. */
fbe1758d
AM
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2467 mode = tmode;
2468
57814e5e 2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2470 {
57814e5e
JJ
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
9de08200 2474 }
3bdf5ad1 2475
57814e5e
JJ
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
9de08200 2478 {
57814e5e
JJ
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
9de08200 2482 }
3bdf5ad1 2483
57814e5e
JJ
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2486 }
2487
e1565e65 2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2490 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2491
57814e5e 2492 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2493 successively smaller modes. */
2494
2495 while (max_size > 1)
2496 {
9de08200
RK
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2500 mode = tmode;
2501
2502 if (mode == VOIDmode)
2503 break;
2504
2505 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2508
2509 max_size = GET_MODE_SIZE (mode);
2510 }
2511
2512 /* The code above should have handled everything. */
57814e5e 2513 if (data->len != 0)
9de08200
RK
2514 abort ();
2515}
2516
57814e5e 2517/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2520
2521static void
502b8322
AJ
2522store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
9de08200 2524{
3bdf5ad1 2525 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2526 rtx to1, cst;
9de08200
RK
2527
2528 while (data->len >= size)
2529 {
3bdf5ad1
RK
2530 if (data->reverse)
2531 data->offset -= size;
9de08200 2532
3bdf5ad1 2533 if (data->autinc_to)
630036c6
JJ
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 data->offset);
3a94c984 2536 else
f4ef873c 2537 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2538
940da324 2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2542
57814e5e
JJ
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2545
940da324 2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2548
3bdf5ad1
RK
2549 if (! data->reverse)
2550 data->offset += size;
9de08200
RK
2551
2552 data->len -= size;
2553 }
2554}
2555\f
19caa751 2556/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2557 its length in bytes. */
e9a25f70
JL
2558
2559rtx
502b8322 2560clear_storage (rtx object, rtx size)
bbf6f052 2561{
e9a25f70 2562 rtx retval = 0;
8ac61af7
RK
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2565
fcf1b822
RK
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2568 if (GET_MODE (object) != BLKmode
fcf1b822 2569 && GET_CODE (size) == CONST_INT
4ca79136 2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 else
bbf6f052 2573 {
9de08200
RK
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2576
6972c506 2577 if (size == const0_rtx)
2c430630
RS
2578 ;
2579 else if (GET_CODE (size) == CONST_INT
78762e3b 2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2581 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2582 else if (clear_storage_via_clrstr (object, size, align))
2583 ;
9de08200 2584 else
4ca79136
RH
2585 retval = clear_storage_via_libcall (object, size);
2586 }
2587
2588 return retval;
2589}
2590
2591/* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2593
2594static bool
502b8322 2595clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
4ca79136
RH
2596{
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2600
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2603
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2606 {
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2609
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
9de08200 2623 {
4ca79136
RH
2624 rtx op1;
2625 rtx last = get_last_insn ();
2626 rtx pat;
9de08200 2627
4ca79136
RH
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
9de08200 2632
4ca79136
RH
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2634 if (pat)
9de08200 2635 {
4ca79136
RH
2636 emit_insn (pat);
2637 return true;
2638 }
2639 else
2640 delete_insns_since (last);
2641 }
2642 }
9de08200 2643
4ca79136
RH
2644 return false;
2645}
9de08200 2646
4ca79136
RH
2647/* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
9de08200 2649
4ca79136 2650static rtx
502b8322 2651clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2652{
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2655 rtx retval;
9de08200 2656
4ca79136 2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2658
4ca79136
RH
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
52cf7115 2662
4ca79136
RH
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
52cf7115 2666
4ca79136
RH
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2670 emit_queue.
52cf7115 2671
4ca79136
RH
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2677
4ca79136 2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2679
4ca79136
RH
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2682 else
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
52cf7115 2686
4ca79136
RH
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
4bc973ae 2692
4ca79136 2693 For convenience, we generate the call to bzero this way as well. */
4bc973ae 2694
4ca79136
RH
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2698 else
2699 size_tree = make_tree (unsigned_type_node, size);
2700
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2706
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2711
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2713
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2719
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2721}
2722
2723/* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2726
2727static GTY(()) tree block_clear_fn;
66c60e67 2728
9661b15f 2729void
502b8322 2730init_block_clear_fn (const char *asmspec)
4ca79136 2731{
9661b15f 2732 if (!block_clear_fn)
4ca79136 2733 {
9661b15f
JJ
2734 tree fn, args;
2735
4ca79136
RH
2736 if (TARGET_MEM_FUNCTIONS)
2737 {
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2741 NULL_TREE);
2742 }
2743 else
2744 {
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
9de08200 2748 }
4ca79136
RH
2749
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2755
2756 block_clear_fn = fn;
bbf6f052 2757 }
e9a25f70 2758
9661b15f
JJ
2759 if (asmspec)
2760 {
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2763 }
2764}
2765
2766static tree
502b8322 2767clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2768{
2769 static bool emitted_extern;
2770
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2773
4ca79136
RH
2774 if (for_call && !emitted_extern)
2775 {
2776 emitted_extern = true;
9661b15f
JJ
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
4ca79136 2779 }
bbf6f052 2780
9661b15f 2781 return block_clear_fn;
4ca79136
RH
2782}
2783\f
bbf6f052
RK
2784/* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2788
2789 Return the last instruction emitted. */
2790
2791rtx
502b8322 2792emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2793{
2794 enum machine_mode mode = GET_MODE (x);
de1b33dd 2795 rtx y_cst = NULL_RTX;
0c19a26f 2796 rtx last_insn, set;
bbf6f052
RK
2797
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2800
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2802 abort ();
2803
ee5332b8
RH
2804 /* Never force constant_p_rtx to memory. */
2805 if (GET_CODE (y) == CONSTANT_P_RTX)
2806 ;
51286de6 2807 else if (CONSTANT_P (y))
de1b33dd 2808 {
51286de6 2809 if (optimize
075fc17a 2810 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2811 && (last_insn = compress_float_constant (x, y)))
2812 return last_insn;
2813
0c19a26f
RS
2814 y_cst = y;
2815
51286de6
RH
2816 if (!LEGITIMATE_CONSTANT_P (y))
2817 {
51286de6 2818 y = force_const_mem (mode, y);
3a04ff64
RH
2819
2820 /* If the target's cannot_force_const_mem prevented the spill,
2821 assume that the target's move expanders will also take care
2822 of the non-legitimate constant. */
2823 if (!y)
2824 y = y_cst;
51286de6 2825 }
de1b33dd 2826 }
bbf6f052
RK
2827
2828 /* If X or Y are memory references, verify that their addresses are valid
2829 for the machine. */
2830 if (GET_CODE (x) == MEM
2831 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2832 && ! push_operand (x, GET_MODE (x)))
2833 || (flag_force_addr
2834 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2835 x = validize_mem (x);
bbf6f052
RK
2836
2837 if (GET_CODE (y) == MEM
2838 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2839 || (flag_force_addr
2840 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2841 y = validize_mem (y);
bbf6f052
RK
2842
2843 if (mode == BLKmode)
2844 abort ();
2845
de1b33dd
AO
2846 last_insn = emit_move_insn_1 (x, y);
2847
0c19a26f
RS
2848 if (y_cst && GET_CODE (x) == REG
2849 && (set = single_set (last_insn)) != NULL_RTX
2850 && SET_DEST (set) == x
2851 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2852 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2853
2854 return last_insn;
261c4230
RS
2855}
2856
2857/* Low level part of emit_move_insn.
2858 Called just like emit_move_insn, but assumes X and Y
2859 are basically valid. */
2860
2861rtx
502b8322 2862emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2863{
2864 enum machine_mode mode = GET_MODE (x);
2865 enum machine_mode submode;
2866 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2867
dbbbbf3b 2868 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2869 abort ();
76bbe028 2870
bbf6f052
RK
2871 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2872 return
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2874
89742723 2875 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2876 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2877 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2878 && (mov_optab->handlers[(int) submode].insn_code
2879 != CODE_FOR_nothing))
2880 {
2881 /* Don't split destination if it is a stack push. */
2882 int stack = push_operand (x, GET_MODE (x));
7308a047 2883
79ce92d7 2884#ifdef PUSH_ROUNDING
0e9cbd11
KH
2885 /* In case we output to the stack, but the size is smaller than the
2886 machine can push exactly, we need to use move instructions. */
1a06f5fe 2887 if (stack
bb93b973
RK
2888 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2889 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2890 {
2891 rtx temp;
bb93b973 2892 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2893
2894 /* Do not use anti_adjust_stack, since we don't want to update
2895 stack_pointer_delta. */
2896 temp = expand_binop (Pmode,
2897#ifdef STACK_GROWS_DOWNWARD
2898 sub_optab,
2899#else
2900 add_optab,
2901#endif
2902 stack_pointer_rtx,
2903 GEN_INT
bb93b973
RK
2904 (PUSH_ROUNDING
2905 (GET_MODE_SIZE (GET_MODE (x)))),
2906 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2907
1a06f5fe
JH
2908 if (temp != stack_pointer_rtx)
2909 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2910
1a06f5fe
JH
2911#ifdef STACK_GROWS_DOWNWARD
2912 offset1 = 0;
2913 offset2 = GET_MODE_SIZE (submode);
2914#else
2915 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2916 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2917 + GET_MODE_SIZE (submode));
2918#endif
bb93b973 2919
1a06f5fe
JH
2920 emit_move_insn (change_address (x, submode,
2921 gen_rtx_PLUS (Pmode,
2922 stack_pointer_rtx,
2923 GEN_INT (offset1))),
2924 gen_realpart (submode, y));
2925 emit_move_insn (change_address (x, submode,
2926 gen_rtx_PLUS (Pmode,
2927 stack_pointer_rtx,
2928 GEN_INT (offset2))),
2929 gen_imagpart (submode, y));
2930 }
e9c0bd54 2931 else
79ce92d7 2932#endif
7308a047
RS
2933 /* If this is a stack, push the highpart first, so it
2934 will be in the argument order.
2935
2936 In that case, change_address is used only to convert
2937 the mode, not to change the address. */
e9c0bd54 2938 if (stack)
c937357e 2939 {
e33c0d66
RS
2940 /* Note that the real part always precedes the imag part in memory
2941 regardless of machine's endianness. */
c937357e 2942#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_imagpart (submode, y));
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
c937357e 2947#else
a79b3dc7
RS
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_realpart (submode, y));
2950 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2951 gen_imagpart (submode, y));
c937357e
RS
2952#endif
2953 }
2954 else
2955 {
235ae7be
DM
2956 rtx realpart_x, realpart_y;
2957 rtx imagpart_x, imagpart_y;
2958
405f63da
MM
2959 /* If this is a complex value with each part being smaller than a
2960 word, the usual calling sequence will likely pack the pieces into
2961 a single register. Unfortunately, SUBREG of hard registers only
2962 deals in terms of words, so we have a problem converting input
2963 arguments to the CONCAT of two registers that is used elsewhere
2964 for complex values. If this is before reload, we can copy it into
2965 memory and reload. FIXME, we should see about using extract and
2966 insert on integer registers, but complex short and complex char
2967 variables should be rarely used. */
3a94c984 2968 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2969 && (reload_in_progress | reload_completed) == 0)
2970 {
bb93b973
RK
2971 int packed_dest_p
2972 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2973 int packed_src_p
2974 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2975
2976 if (packed_dest_p || packed_src_p)
2977 {
2978 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2979 ? MODE_FLOAT : MODE_INT);
2980
1da68f56
RK
2981 enum machine_mode reg_mode
2982 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2983
2984 if (reg_mode != BLKmode)
2985 {
2986 rtx mem = assign_stack_temp (reg_mode,
2987 GET_MODE_SIZE (mode), 0);
f4ef873c 2988 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2989
1da68f56
RK
2990 cfun->cannot_inline
2991 = N_("function using short complex types cannot be inline");
405f63da
MM
2992
2993 if (packed_dest_p)
2994 {
2995 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2996
405f63da
MM
2997 emit_move_insn_1 (cmem, y);
2998 return emit_move_insn_1 (sreg, mem);
2999 }
3000 else
3001 {
3002 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3003
405f63da
MM
3004 emit_move_insn_1 (mem, sreg);
3005 return emit_move_insn_1 (x, cmem);
3006 }
3007 }
3008 }
3009 }
3010
235ae7be
DM
3011 realpart_x = gen_realpart (submode, x);
3012 realpart_y = gen_realpart (submode, y);
3013 imagpart_x = gen_imagpart (submode, x);
3014 imagpart_y = gen_imagpart (submode, y);
3015
3016 /* Show the output dies here. This is necessary for SUBREGs
3017 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3018 hard regs shouldn't appear here except as return values.
3019 We never want to emit such a clobber after reload. */
3020 if (x != y
235ae7be
DM
3021 && ! (reload_in_progress || reload_completed)
3022 && (GET_CODE (realpart_x) == SUBREG
3023 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3024 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3025
a79b3dc7
RS
3026 emit_move_insn (realpart_x, realpart_y);
3027 emit_move_insn (imagpart_x, imagpart_y);
c937357e 3028 }
7308a047 3029
7a1ab50a 3030 return get_last_insn ();
7308a047
RS
3031 }
3032
a3600c71
HPN
3033 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3034 find a mode to do it in. If we have a movcc, use it. Otherwise,
3035 find the MODE_INT mode of the same width. */
3036 else if (GET_MODE_CLASS (mode) == MODE_CC
3037 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3038 {
3039 enum insn_code insn_code;
3040 enum machine_mode tmode = VOIDmode;
3041 rtx x1 = x, y1 = y;
3042
3043 if (mode != CCmode
3044 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3045 tmode = CCmode;
3046 else
3047 for (tmode = QImode; tmode != VOIDmode;
3048 tmode = GET_MODE_WIDER_MODE (tmode))
3049 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3050 break;
3051
3052 if (tmode == VOIDmode)
3053 abort ();
3054
3055 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3056 may call change_address which is not appropriate if we were
3057 called when a reload was in progress. We don't have to worry
3058 about changing the address since the size in bytes is supposed to
3059 be the same. Copy the MEM to change the mode and move any
3060 substitutions from the old MEM to the new one. */
3061
3062 if (reload_in_progress)
3063 {
3064 x = gen_lowpart_common (tmode, x1);
3065 if (x == 0 && GET_CODE (x1) == MEM)
3066 {
3067 x = adjust_address_nv (x1, tmode, 0);
3068 copy_replacements (x1, x);
3069 }
3070
3071 y = gen_lowpart_common (tmode, y1);
3072 if (y == 0 && GET_CODE (y1) == MEM)
3073 {
3074 y = adjust_address_nv (y1, tmode, 0);
3075 copy_replacements (y1, y);
3076 }
3077 }
3078 else
3079 {
3080 x = gen_lowpart (tmode, x);
3081 y = gen_lowpart (tmode, y);
3082 }
502b8322 3083
a3600c71
HPN
3084 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3085 return emit_insn (GEN_FCN (insn_code) (x, y));
3086 }
3087
5581fc91
RS
3088 /* Try using a move pattern for the corresponding integer mode. This is
3089 only safe when simplify_subreg can convert MODE constants into integer
3090 constants. At present, it can only do this reliably if the value
3091 fits within a HOST_WIDE_INT. */
3092 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3093 && (submode = int_mode_for_mode (mode)) != BLKmode
3094 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3095 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3096 (simplify_gen_subreg (submode, x, mode, 0),
3097 simplify_gen_subreg (submode, y, mode, 0)));
3098
cffa2189
R
3099 /* This will handle any multi-word or full-word mode that lacks a move_insn
3100 pattern. However, you will get better code if you define such patterns,
bbf6f052 3101 even if they must turn into multiple assembler instructions. */
cffa2189 3102 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3103 {
3104 rtx last_insn = 0;
3ef1eef4 3105 rtx seq, inner;
235ae7be 3106 int need_clobber;
bb93b973 3107 int i;
3a94c984 3108
a98c9f1a
RK
3109#ifdef PUSH_ROUNDING
3110
3111 /* If X is a push on the stack, do the push now and replace
3112 X with a reference to the stack pointer. */
3113 if (push_operand (x, GET_MODE (x)))
3114 {
918a6124
GK
3115 rtx temp;
3116 enum rtx_code code;
0fb7aeda 3117
918a6124
GK
3118 /* Do not use anti_adjust_stack, since we don't want to update
3119 stack_pointer_delta. */
3120 temp = expand_binop (Pmode,
3121#ifdef STACK_GROWS_DOWNWARD
3122 sub_optab,
3123#else
3124 add_optab,
3125#endif
3126 stack_pointer_rtx,
3127 GEN_INT
bb93b973
RK
3128 (PUSH_ROUNDING
3129 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3130 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3131
0fb7aeda
KH
3132 if (temp != stack_pointer_rtx)
3133 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3134
3135 code = GET_CODE (XEXP (x, 0));
bb93b973 3136
918a6124
GK
3137 /* Just hope that small offsets off SP are OK. */
3138 if (code == POST_INC)
0fb7aeda 3139 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3140 GEN_INT (-((HOST_WIDE_INT)
3141 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3142 else if (code == POST_DEC)
0fb7aeda 3143 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3144 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3145 else
3146 temp = stack_pointer_rtx;
3147
3148 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3149 }
3150#endif
3a94c984 3151
3ef1eef4
RK
3152 /* If we are in reload, see if either operand is a MEM whose address
3153 is scheduled for replacement. */
3154 if (reload_in_progress && GET_CODE (x) == MEM
3155 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3156 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3157 if (reload_in_progress && GET_CODE (y) == MEM
3158 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3159 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3160
235ae7be 3161 start_sequence ();
15a7a8ec 3162
235ae7be 3163 need_clobber = 0;
bbf6f052 3164 for (i = 0;
3a94c984 3165 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3166 i++)
3167 {
3168 rtx xpart = operand_subword (x, i, 1, mode);
3169 rtx ypart = operand_subword (y, i, 1, mode);
3170
3171 /* If we can't get a part of Y, put Y into memory if it is a
3172 constant. Otherwise, force it into a register. If we still
3173 can't get a part of Y, abort. */
3174 if (ypart == 0 && CONSTANT_P (y))
3175 {
3176 y = force_const_mem (mode, y);
3177 ypart = operand_subword (y, i, 1, mode);
3178 }
3179 else if (ypart == 0)
3180 ypart = operand_subword_force (y, i, mode);
3181
3182 if (xpart == 0 || ypart == 0)
3183 abort ();
3184
235ae7be
DM
3185 need_clobber |= (GET_CODE (xpart) == SUBREG);
3186
bbf6f052
RK
3187 last_insn = emit_move_insn (xpart, ypart);
3188 }
6551fa4d 3189
2f937369 3190 seq = get_insns ();
235ae7be
DM
3191 end_sequence ();
3192
3193 /* Show the output dies here. This is necessary for SUBREGs
3194 of pseudos since we cannot track their lifetimes correctly;
3195 hard regs shouldn't appear here except as return values.
3196 We never want to emit such a clobber after reload. */
3197 if (x != y
3198 && ! (reload_in_progress || reload_completed)
3199 && need_clobber != 0)
bb93b973 3200 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3201
3202 emit_insn (seq);
3203
bbf6f052
RK
3204 return last_insn;
3205 }
3206 else
3207 abort ();
3208}
51286de6
RH
3209
3210/* If Y is representable exactly in a narrower mode, and the target can
3211 perform the extension directly from constant or memory, then emit the
3212 move as an extension. */
3213
3214static rtx
502b8322 3215compress_float_constant (rtx x, rtx y)
51286de6
RH
3216{
3217 enum machine_mode dstmode = GET_MODE (x);
3218 enum machine_mode orig_srcmode = GET_MODE (y);
3219 enum machine_mode srcmode;
3220 REAL_VALUE_TYPE r;
3221
3222 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3223
3224 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3225 srcmode != orig_srcmode;
3226 srcmode = GET_MODE_WIDER_MODE (srcmode))
3227 {
3228 enum insn_code ic;
3229 rtx trunc_y, last_insn;
3230
3231 /* Skip if the target can't extend this way. */
3232 ic = can_extend_p (dstmode, srcmode, 0);
3233 if (ic == CODE_FOR_nothing)
3234 continue;
3235
3236 /* Skip if the narrowed value isn't exact. */
3237 if (! exact_real_truncate (srcmode, &r))
3238 continue;
3239
3240 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3241
3242 if (LEGITIMATE_CONSTANT_P (trunc_y))
3243 {
3244 /* Skip if the target needs extra instructions to perform
3245 the extension. */
3246 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3247 continue;
3248 }
3249 else if (float_extend_from_mem[dstmode][srcmode])
3250 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3251 else
3252 continue;
3253
3254 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3255 last_insn = get_last_insn ();
3256
3257 if (GET_CODE (x) == REG)
0c19a26f 3258 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3259
3260 return last_insn;
3261 }
3262
3263 return NULL_RTX;
3264}
bbf6f052
RK
3265\f
3266/* Pushing data onto the stack. */
3267
3268/* Push a block of length SIZE (perhaps variable)
3269 and return an rtx to address the beginning of the block.
3270 Note that it is not possible for the value returned to be a QUEUED.
3271 The value may be virtual_outgoing_args_rtx.
3272
3273 EXTRA is the number of bytes of padding to push in addition to SIZE.
3274 BELOW nonzero means this padding comes at low addresses;
3275 otherwise, the padding comes at high addresses. */
3276
3277rtx
502b8322 3278push_block (rtx size, int extra, int below)
bbf6f052 3279{
b3694847 3280 rtx temp;
88f63c77
RK
3281
3282 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3283 if (CONSTANT_P (size))
3284 anti_adjust_stack (plus_constant (size, extra));
3285 else if (GET_CODE (size) == REG && extra == 0)
3286 anti_adjust_stack (size);
3287 else
3288 {
ce48579b 3289 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3290 if (extra != 0)
906c4e36 3291 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3292 temp, 0, OPTAB_LIB_WIDEN);
3293 anti_adjust_stack (temp);
3294 }
3295
f73ad30e 3296#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3297 if (0)
f73ad30e
JH
3298#else
3299 if (1)
bbf6f052 3300#endif
f73ad30e 3301 {
f73ad30e
JH
3302 temp = virtual_outgoing_args_rtx;
3303 if (extra != 0 && below)
3304 temp = plus_constant (temp, extra);
3305 }
3306 else
3307 {
3308 if (GET_CODE (size) == CONST_INT)
3309 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3310 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3311 else if (extra != 0 && !below)
3312 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3313 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3314 else
3315 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3316 negate_rtx (Pmode, size));
3317 }
bbf6f052
RK
3318
3319 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3320}
3321
21d93687
RK
3322#ifdef PUSH_ROUNDING
3323
566aa174 3324/* Emit single push insn. */
21d93687 3325
566aa174 3326static void
502b8322 3327emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3328{
566aa174 3329 rtx dest_addr;
918a6124 3330 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3331 rtx dest;
371b8fc0
JH
3332 enum insn_code icode;
3333 insn_operand_predicate_fn pred;
566aa174 3334
371b8fc0
JH
3335 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3336 /* If there is push pattern, use it. Otherwise try old way of throwing
3337 MEM representing push operation to move expander. */
3338 icode = push_optab->handlers[(int) mode].insn_code;
3339 if (icode != CODE_FOR_nothing)
3340 {
3341 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3342 && !((*pred) (x, mode))))
371b8fc0
JH
3343 x = force_reg (mode, x);
3344 emit_insn (GEN_FCN (icode) (x));
3345 return;
3346 }
566aa174
JH
3347 if (GET_MODE_SIZE (mode) == rounded_size)
3348 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3349 /* If we are to pad downward, adjust the stack pointer first and
3350 then store X into the stack location using an offset. This is
3351 because emit_move_insn does not know how to pad; it does not have
3352 access to type. */
3353 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3354 {
3355 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3356 HOST_WIDE_INT offset;
3357
3358 emit_move_insn (stack_pointer_rtx,
3359 expand_binop (Pmode,
3360#ifdef STACK_GROWS_DOWNWARD
3361 sub_optab,
3362#else
3363 add_optab,
3364#endif
3365 stack_pointer_rtx,
3366 GEN_INT (rounded_size),
3367 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3368
3369 offset = (HOST_WIDE_INT) padding_size;
3370#ifdef STACK_GROWS_DOWNWARD
3371 if (STACK_PUSH_CODE == POST_DEC)
3372 /* We have already decremented the stack pointer, so get the
3373 previous value. */
3374 offset += (HOST_WIDE_INT) rounded_size;
3375#else
3376 if (STACK_PUSH_CODE == POST_INC)
3377 /* We have already incremented the stack pointer, so get the
3378 previous value. */
3379 offset -= (HOST_WIDE_INT) rounded_size;
3380#endif
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3382 }
566aa174
JH
3383 else
3384 {
3385#ifdef STACK_GROWS_DOWNWARD
329d586f 3386 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3387 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3388 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3389#else
329d586f 3390 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3391 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3392 GEN_INT (rounded_size));
3393#endif
3394 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3395 }
3396
3397 dest = gen_rtx_MEM (mode, dest_addr);
3398
566aa174
JH
3399 if (type != 0)
3400 {
3401 set_mem_attributes (dest, type, 1);
c3d32120
RK
3402
3403 if (flag_optimize_sibling_calls)
3404 /* Function incoming arguments may overlap with sibling call
3405 outgoing arguments and we cannot allow reordering of reads
3406 from function arguments with stores to outgoing arguments
3407 of sibling calls. */
3408 set_mem_alias_set (dest, 0);
566aa174
JH
3409 }
3410 emit_move_insn (dest, x);
566aa174 3411}
21d93687 3412#endif
566aa174 3413
bbf6f052
RK
3414/* Generate code to push X onto the stack, assuming it has mode MODE and
3415 type TYPE.
3416 MODE is redundant except when X is a CONST_INT (since they don't
3417 carry mode info).
3418 SIZE is an rtx for the size of data to be copied (in bytes),
3419 needed only if X is BLKmode.
3420
f1eaaf73 3421 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3422
cd048831
RK
3423 If PARTIAL and REG are both nonzero, then copy that many of the first
3424 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3425 The amount of space pushed is decreased by PARTIAL words,
3426 rounded *down* to a multiple of PARM_BOUNDARY.
3427 REG must be a hard register in this case.
cd048831
RK
3428 If REG is zero but PARTIAL is not, take any all others actions for an
3429 argument partially in registers, but do not actually load any
3430 registers.
bbf6f052
RK
3431
3432 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3433 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3434
3435 On a machine that lacks real push insns, ARGS_ADDR is the address of
3436 the bottom of the argument block for this call. We use indexing off there
3437 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3438 argument block has not been preallocated.
3439
e5e809f4
JL
3440 ARGS_SO_FAR is the size of args previously pushed for this call.
3441
3442 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3443 for arguments passed in registers. If nonzero, it will be the number
3444 of bytes required. */
bbf6f052
RK
3445
3446void
502b8322
AJ
3447emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3448 unsigned int align, int partial, rtx reg, int extra,
3449 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3450 rtx alignment_pad)
bbf6f052
RK
3451{
3452 rtx xinner;
3453 enum direction stack_direction
3454#ifdef STACK_GROWS_DOWNWARD
3455 = downward;
3456#else
3457 = upward;
3458#endif
3459
3460 /* Decide where to pad the argument: `downward' for below,
3461 `upward' for above, or `none' for don't pad it.
3462 Default is below for small data on big-endian machines; else above. */
3463 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3464
0fb7aeda 3465 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3466 FIXME: why? */
3467 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3468 if (where_pad != none)
3469 where_pad = (where_pad == downward ? upward : downward);
3470
3471 xinner = x = protect_from_queue (x, 0);
3472
3473 if (mode == BLKmode)
3474 {
3475 /* Copy a block into the stack, entirely or partially. */
3476
b3694847 3477 rtx temp;
bbf6f052 3478 int used = partial * UNITS_PER_WORD;
531547e9 3479 int offset;
bbf6f052 3480 int skip;
3a94c984 3481
531547e9
FJ
3482 if (reg && GET_CODE (reg) == PARALLEL)
3483 {
3484 /* Use the size of the elt to compute offset. */
3485 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3486 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3487 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3488 }
3489 else
3490 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3491
bbf6f052
RK
3492 if (size == 0)
3493 abort ();
3494
3495 used -= offset;
3496
3497 /* USED is now the # of bytes we need not copy to the stack
3498 because registers will take care of them. */
3499
3500 if (partial != 0)
f4ef873c 3501 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3502
3503 /* If the partial register-part of the arg counts in its stack size,
3504 skip the part of stack space corresponding to the registers.
3505 Otherwise, start copying to the beginning of the stack space,
3506 by setting SKIP to 0. */
e5e809f4 3507 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3508
3509#ifdef PUSH_ROUNDING
3510 /* Do it with several push insns if that doesn't take lots of insns
3511 and if there is no difficulty with push insns that skip bytes
3512 on the stack for alignment purposes. */
3513 if (args_addr == 0
f73ad30e 3514 && PUSH_ARGS
bbf6f052
RK
3515 && GET_CODE (size) == CONST_INT
3516 && skip == 0
f26aca6d 3517 && MEM_ALIGN (xinner) >= align
15914757 3518 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3519 /* Here we avoid the case of a structure whose weak alignment
3520 forces many pushes of a small amount of data,
3521 and such small pushes do rounding that causes trouble. */
e1565e65 3522 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3523 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3524 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3525 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3526 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3527 {
3528 /* Push padding now if padding above and stack grows down,
3529 or if padding below and stack grows up.
3530 But if space already allocated, this has already been done. */
3531 if (extra && args_addr == 0
3532 && where_pad != none && where_pad != stack_direction)
906c4e36 3533 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3534
8fd3cf4e 3535 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3536 }
3537 else
3a94c984 3538#endif /* PUSH_ROUNDING */
bbf6f052 3539 {
7ab923cc
JJ
3540 rtx target;
3541
bbf6f052
RK
3542 /* Otherwise make space on the stack and copy the data
3543 to the address of that space. */
3544
3545 /* Deduct words put into registers from the size we must copy. */
3546 if (partial != 0)
3547 {
3548 if (GET_CODE (size) == CONST_INT)
906c4e36 3549 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3550 else
3551 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3552 GEN_INT (used), NULL_RTX, 0,
3553 OPTAB_LIB_WIDEN);
bbf6f052
RK
3554 }
3555
3556 /* Get the address of the stack space.
3557 In this case, we do not deal with EXTRA separately.
3558 A single stack adjust will do. */
3559 if (! args_addr)
3560 {
3561 temp = push_block (size, extra, where_pad == downward);
3562 extra = 0;
3563 }
3564 else if (GET_CODE (args_so_far) == CONST_INT)
3565 temp = memory_address (BLKmode,
3566 plus_constant (args_addr,
3567 skip + INTVAL (args_so_far)));
3568 else
3569 temp = memory_address (BLKmode,
38a448ca
RH
3570 plus_constant (gen_rtx_PLUS (Pmode,
3571 args_addr,
3572 args_so_far),
bbf6f052 3573 skip));
4ca79136
RH
3574
3575 if (!ACCUMULATE_OUTGOING_ARGS)
3576 {
3577 /* If the source is referenced relative to the stack pointer,
3578 copy it to another register to stabilize it. We do not need
3579 to do this if we know that we won't be changing sp. */
3580
3581 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3582 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3583 temp = copy_to_reg (temp);
3584 }
3585
3a94c984 3586 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3587
3a94c984
KH
3588 if (type != 0)
3589 {
3590 set_mem_attributes (target, type, 1);
3591 /* Function incoming arguments may overlap with sibling call
3592 outgoing arguments and we cannot allow reordering of reads
3593 from function arguments with stores to outgoing arguments
3594 of sibling calls. */
ba4828e0 3595 set_mem_alias_set (target, 0);
3a94c984 3596 }
4ca79136 3597
44bb111a
RH
3598 /* ALIGN may well be better aligned than TYPE, e.g. due to
3599 PARM_BOUNDARY. Assume the caller isn't lying. */
3600 set_mem_align (target, align);
4ca79136 3601
44bb111a 3602 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3603 }
3604 }
3605 else if (partial > 0)
3606 {
3607 /* Scalar partly in registers. */
3608
3609 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3610 int i;
3611 int not_stack;
3612 /* # words of start of argument
3613 that we must make space for but need not store. */
3614 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3615 int args_offset = INTVAL (args_so_far);
3616 int skip;
3617
3618 /* Push padding now if padding above and stack grows down,
3619 or if padding below and stack grows up.
3620 But if space already allocated, this has already been done. */
3621 if (extra && args_addr == 0
3622 && where_pad != none && where_pad != stack_direction)
906c4e36 3623 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3624
3625 /* If we make space by pushing it, we might as well push
3626 the real data. Otherwise, we can leave OFFSET nonzero
3627 and leave the space uninitialized. */
3628 if (args_addr == 0)
3629 offset = 0;
3630
3631 /* Now NOT_STACK gets the number of words that we don't need to
3632 allocate on the stack. */
3633 not_stack = partial - offset;
3634
3635 /* If the partial register-part of the arg counts in its stack size,
3636 skip the part of stack space corresponding to the registers.
3637 Otherwise, start copying to the beginning of the stack space,
3638 by setting SKIP to 0. */
e5e809f4 3639 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3640
3641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3642 x = validize_mem (force_const_mem (mode, x));
3643
3644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3645 SUBREGs of such registers are not allowed. */
3646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3648 x = copy_to_reg (x);
3649
3650 /* Loop over all the words allocated on the stack for this arg. */
3651 /* We can do it by words, because any scalar bigger than a word
3652 has a size a multiple of a word. */
3653#ifndef PUSH_ARGS_REVERSED
3654 for (i = not_stack; i < size; i++)
3655#else
3656 for (i = size - 1; i >= not_stack; i--)
3657#endif
3658 if (i >= not_stack + offset)
3659 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3661 0, args_addr,
3662 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3663 * UNITS_PER_WORD)),
4fc026cd 3664 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3665 }
3666 else
3667 {
3668 rtx addr;
3bdf5ad1 3669 rtx dest;
bbf6f052
RK
3670
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
906c4e36 3676 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3677
3678#ifdef PUSH_ROUNDING
f73ad30e 3679 if (args_addr == 0 && PUSH_ARGS)
566aa174 3680 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3681 else
3682#endif
921b3427
RK
3683 {
3684 if (GET_CODE (args_so_far) == CONST_INT)
3685 addr
3686 = memory_address (mode,
3a94c984 3687 plus_constant (args_addr,
921b3427 3688 INTVAL (args_so_far)));
3a94c984 3689 else
38a448ca
RH
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3691 args_so_far));
566aa174
JH
3692 dest = gen_rtx_MEM (mode, addr);
3693 if (type != 0)
3694 {
3695 set_mem_attributes (dest, type, 1);
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
ba4828e0 3700 set_mem_alias_set (dest, 0);
566aa174 3701 }
bbf6f052 3702
566aa174 3703 emit_move_insn (dest, x);
566aa174 3704 }
bbf6f052
RK
3705 }
3706
bbf6f052
RK
3707 /* If part should go in registers, copy that part
3708 into the appropriate registers. Do this now, at the end,
3709 since mem-to-mem copies above may do function calls. */
cd048831 3710 if (partial > 0 && reg != 0)
fffa9c1d
JW
3711 {
3712 /* Handle calls that pass values in multiple non-contiguous locations.
3713 The Irix 6 ABI has examples of this. */
3714 if (GET_CODE (reg) == PARALLEL)
6e985040 3715 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3716 else
3717 move_block_to_reg (REGNO (reg), x, partial, mode);
3718 }
bbf6f052
RK
3719
3720 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3721 anti_adjust_stack (GEN_INT (extra));
3a94c984 3722
3ea2292a 3723 if (alignment_pad && args_addr == 0)
4fc026cd 3724 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3725}
3726\f
296b4ed9
RK
3727/* Return X if X can be used as a subtarget in a sequence of arithmetic
3728 operations. */
3729
3730static rtx
502b8322 3731get_subtarget (rtx x)
296b4ed9
RK
3732{
3733 return ((x == 0
3734 /* Only registers can be subtargets. */
3735 || GET_CODE (x) != REG
3736 /* If the register is readonly, it can't be set more than once. */
3737 || RTX_UNCHANGING_P (x)
3738 /* Don't use hard regs to avoid extending their life. */
3739 || REGNO (x) < FIRST_PSEUDO_REGISTER
3740 /* Avoid subtargets inside loops,
3741 since they hide some invariant expressions. */
3742 || preserve_subexpressions_p ())
3743 ? 0 : x);
3744}
3745
bbf6f052
RK
3746/* Expand an assignment that stores the value of FROM into TO.
3747 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3748 (This may contain a QUEUED rtx;
3749 if the value is constant, this rtx is a constant.)
b90f141a 3750 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3751
3752rtx
b90f141a 3753expand_assignment (tree to, tree from, int want_value)
bbf6f052 3754{
b3694847 3755 rtx to_rtx = 0;
bbf6f052
RK
3756 rtx result;
3757
3758 /* Don't crash if the lhs of the assignment was erroneous. */
3759
3760 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3761 {
3762 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3763 return want_value ? result : NULL_RTX;
3764 }
bbf6f052
RK
3765
3766 /* Assignment of a structure component needs special treatment
3767 if the structure component's rtx is not simply a MEM.
6be58303
JW
3768 Assignment of an array element at a constant index, and assignment of
3769 an array element in an unaligned packed structure field, has the same
3770 problem. */
bbf6f052 3771
08293add 3772 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3773 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3774 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3775 {
3776 enum machine_mode mode1;
770ae6cc 3777 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3778 rtx orig_to_rtx;
7bb0943f 3779 tree offset;
bbf6f052
RK
3780 int unsignedp;
3781 int volatilep = 0;
0088fcb1
RK
3782 tree tem;
3783
3784 push_temp_slots ();
839c4796 3785 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3786 &unsignedp, &volatilep);
bbf6f052
RK
3787
3788 /* If we are going to use store_bit_field and extract_bit_field,
3789 make sure to_rtx will be safe for multiple use. */
3790
3791 if (mode1 == VOIDmode && want_value)
3792 tem = stabilize_reference (tem);
3793
1ed1b4fb
RK
3794 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3795
7bb0943f
RS
3796 if (offset != 0)
3797 {
e3c8ea67 3798 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3799
3800 if (GET_CODE (to_rtx) != MEM)
3801 abort ();
bd070e1a 3802
bd070e1a 3803#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3804 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3805 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3806#else
3807 if (GET_MODE (offset_rtx) != ptr_mode)
3808 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3809#endif
bd070e1a 3810
9a7b9f4f
JL
3811 /* A constant address in TO_RTX can have VOIDmode, we must not try
3812 to call force_reg for that case. Avoid that case. */
89752202
HB
3813 if (GET_CODE (to_rtx) == MEM
3814 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3815 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3816 && bitsize > 0
3a94c984 3817 && (bitpos % bitsize) == 0
89752202 3818 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3819 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3820 {
e3c8ea67 3821 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3822 bitpos = 0;
3823 }
3824
0d4903b8 3825 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3826 highest_pow2_factor_for_target (to,
3827 offset));
7bb0943f 3828 }
c5c76735 3829
998d7deb
RH
3830 if (GET_CODE (to_rtx) == MEM)
3831 {
998d7deb
RH
3832 /* If the field is at offset zero, we could have been given the
3833 DECL_RTX of the parent struct. Don't munge it. */
3834 to_rtx = shallow_copy_rtx (to_rtx);
3835
6f1087be 3836 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3837 }
effbcc6a 3838
a06ef755
RK
3839 /* Deal with volatile and readonly fields. The former is only done
3840 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3841 if (volatilep && GET_CODE (to_rtx) == MEM)
3842 {
3843 if (to_rtx == orig_to_rtx)
3844 to_rtx = copy_rtx (to_rtx);
3845 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3846 }
3847
956d6950 3848 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3849 && TREE_READONLY (TREE_OPERAND (to, 1))
3850 /* We can't assert that a MEM won't be set more than once
3851 if the component is not addressable because another
3852 non-addressable component may be referenced by the same MEM. */
3853 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
956d6950 3854 {
a06ef755 3855 if (to_rtx == orig_to_rtx)
956d6950 3856 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3857 RTX_UNCHANGING_P (to_rtx) = 1;
3858 }
3859
a84b4898 3860 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
3861 {
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3865 }
3866
a06ef755
RK
3867 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3868 (want_value
3869 /* Spurious cast for HPUX compiler. */
3870 ? ((enum machine_mode)
3871 TYPE_MODE (TREE_TYPE (to)))
3872 : VOIDmode),
3873 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3874
a06ef755
RK
3875 preserve_temp_slots (result);
3876 free_temp_slots ();
3877 pop_temp_slots ();
a69beca1 3878
a06ef755
RK
3879 /* If the value is meaningful, convert RESULT to the proper mode.
3880 Otherwise, return nothing. */
3881 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3882 TYPE_MODE (TREE_TYPE (from)),
3883 result,
8df83eae 3884 TYPE_UNSIGNED (TREE_TYPE (to)))
a06ef755 3885 : NULL_RTX);
bbf6f052
RK
3886 }
3887
cd1db108
RS
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3892 requires loading up part of an address in a separate insn.
3893
1858863b
JW
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
61f71b34 3898 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3900 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3901 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3902 {
0088fcb1
RK
3903 rtx value;
3904
3905 push_temp_slots ();
3906 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3907 if (to_rtx == 0)
37a08a29 3908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3909
fffa9c1d
JW
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3913 emit_group_load (to_rtx, value, TREE_TYPE (from),
3914 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3915 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3916 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3917 else
6419e5b0 3918 {
5ae6cd0d 3919 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3920 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3921 emit_move_insn (to_rtx, value);
3922 }
cd1db108
RS
3923 preserve_temp_slots (to_rtx);
3924 free_temp_slots ();
0088fcb1 3925 pop_temp_slots ();
709f5be1 3926 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3927 }
3928
bbf6f052
RK
3929 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3930 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3931
3932 if (to_rtx == 0)
37a08a29 3933 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3934
86d38d25 3935 /* Don't move directly into a return register. */
14a774a9
RK
3936 if (TREE_CODE (to) == RESULT_DECL
3937 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3938 {
0088fcb1
RK
3939 rtx temp;
3940
3941 push_temp_slots ();
3942 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3943
3944 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3945 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3946 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3947 else
3948 emit_move_insn (to_rtx, temp);
3949
86d38d25
RS
3950 preserve_temp_slots (to_rtx);
3951 free_temp_slots ();
0088fcb1 3952 pop_temp_slots ();
709f5be1 3953 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3954 }
3955
bbf6f052
RK
3956 /* In case we are returning the contents of an object which overlaps
3957 the place the value is being stored, use a safe function when copying
3958 a value through a pointer into a structure value return block. */
3959 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3960 && current_function_returns_struct
3961 && !current_function_returns_pcc_struct)
3962 {
0088fcb1
RK
3963 rtx from_rtx, size;
3964
3965 push_temp_slots ();
33a20d10 3966 size = expr_size (from);
37a08a29 3967 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3968
4ca79136
RH
3969 if (TARGET_MEM_FUNCTIONS)
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
8df83eae 3974 size, TYPE_UNSIGNED (sizetype)),
4ca79136
RH
3975 TYPE_MODE (sizetype));
3976 else
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size,
8df83eae 3982 TYPE_UNSIGNED (integer_type_node)),
4ca79136 3983 TYPE_MODE (integer_type_node));
bbf6f052
RK
3984
3985 preserve_temp_slots (to_rtx);
3986 free_temp_slots ();
0088fcb1 3987 pop_temp_slots ();
709f5be1 3988 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3989 }
3990
3991 /* Compute FROM and store the value in the rtx we got. */
3992
0088fcb1 3993 push_temp_slots ();
bbf6f052
RK
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3996 free_temp_slots ();
0088fcb1 3997 pop_temp_slots ();
709f5be1 3998 return want_value ? result : NULL_RTX;
bbf6f052
RK
3999}
4000
4001/* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
bbf6f052
RK
4003 TARGET may contain a QUEUED rtx.
4004
8403445a 4005 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4012
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4018 be more thorough?
4019
8403445a 4020 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4021 to catch quickly any cases where the caller uses the value
8403445a
AM
4022 and fails to set WANT_VALUE.
4023
4024 If WANT_VALUE & 2 is set, this is a store into a call param on the
4025 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4026
4027rtx
502b8322 4028store_expr (tree exp, rtx target, int want_value)
bbf6f052 4029{
b3694847 4030 rtx temp;
0fab64a3 4031 rtx alt_rtl = NULL_RTX;
1bbd65cd 4032 rtx mark = mark_queue ();
bbf6f052 4033 int dont_return_target = 0;
e5408e52 4034 int dont_store_target = 0;
bbf6f052 4035
847311f4
AL
4036 if (VOID_TYPE_P (TREE_TYPE (exp)))
4037 {
4038 /* C++ can generate ?: expressions with a throw expression in one
4039 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4040 store the throw expression's nonexistent result. */
847311f4
AL
4041 if (want_value)
4042 abort ();
4043 expand_expr (exp, const0_rtx, VOIDmode, 0);
4044 return NULL_RTX;
4045 }
bbf6f052
RK
4046 if (TREE_CODE (exp) == COMPOUND_EXPR)
4047 {
4048 /* Perform first part of compound expression, then assign from second
4049 part. */
8403445a
AM
4050 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4051 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4052 emit_queue ();
709f5be1 4053 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4054 }
4055 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4056 {
4057 /* For conditional expression, get safe form of the target. Then
4058 test the condition, doing the appropriate assignment on either
4059 side. This avoids the creation of unnecessary temporaries.
4060 For non-BLKmode, it is more efficient not to do this. */
4061
4062 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4063
4064 emit_queue ();
4065 target = protect_from_queue (target, 1);
4066
dabf8373 4067 do_pending_stack_adjust ();
bbf6f052
RK
4068 NO_DEFER_POP;
4069 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4070 start_cleanup_deferral ();
8403445a 4071 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4072 end_cleanup_deferral ();
bbf6f052
RK
4073 emit_queue ();
4074 emit_jump_insn (gen_jump (lab2));
4075 emit_barrier ();
4076 emit_label (lab1);
956d6950 4077 start_cleanup_deferral ();
8403445a 4078 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4079 end_cleanup_deferral ();
bbf6f052
RK
4080 emit_queue ();
4081 emit_label (lab2);
4082 OK_DEFER_POP;
a3a58acc 4083
8403445a 4084 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4085 }
bbf6f052 4086 else if (queued_subexp_p (target))
709f5be1
RS
4087 /* If target contains a postincrement, let's not risk
4088 using it as the place to generate the rhs. */
bbf6f052
RK
4089 {
4090 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4091 {
4092 /* Expand EXP into a new pseudo. */
4093 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4094 temp = expand_expr (exp, temp, GET_MODE (target),
4095 (want_value & 2
4096 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4097 }
4098 else
8403445a
AM
4099 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4100 (want_value & 2
4101 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4102
4103 /* If target is volatile, ANSI requires accessing the value
4104 *from* the target, if it is accessed. So make that happen.
4105 In no case return the target itself. */
8403445a 4106 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4107 dont_return_target = 1;
bbf6f052 4108 }
8403445a
AM
4109 else if ((want_value & 1) != 0
4110 && GET_CODE (target) == MEM
4111 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4112 && GET_MODE (target) != BLKmode)
4113 /* If target is in memory and caller wants value in a register instead,
4114 arrange that. Pass TARGET as target for expand_expr so that,
4115 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4116 We know expand_expr will not use the target in that case.
4117 Don't do this if TARGET is volatile because we are supposed
4118 to write it and then read it. */
4119 {
8403445a
AM
4120 temp = expand_expr (exp, target, GET_MODE (target),
4121 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4122 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4123 {
4124 /* If TEMP is already in the desired TARGET, only copy it from
4125 memory and don't store it there again. */
4126 if (temp == target
4127 || (rtx_equal_p (temp, target)
4128 && ! side_effects_p (temp) && ! side_effects_p (target)))
4129 dont_store_target = 1;
4130 temp = copy_to_reg (temp);
4131 }
12f06d17
CH
4132 dont_return_target = 1;
4133 }
1499e0a8 4134 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4135 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4136 than the declared mode, compute the result into its declared mode
4137 and then convert to the wider mode. Our value is the computed
4138 expression. */
4139 {
b76b08ef
RK
4140 rtx inner_target = 0;
4141
5a32d038 4142 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4143 which will often result in some optimizations. Do the conversion
4144 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4145 the extend. But don't do this if the type of EXP is a subtype
4146 of something else since then the conversion might involve
4147 more than just converting modes. */
8403445a
AM
4148 if ((want_value & 1) == 0
4149 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4150 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d 4151 {
8df83eae 4152 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 4153 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 4154 exp = convert
ae2bcd98 4155 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4156 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4157
ae2bcd98 4158 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
4159 (GET_MODE (SUBREG_REG (target)),
4160 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4161 exp);
b76b08ef
RK
4162
4163 inner_target = SUBREG_REG (target);
f635a84d 4164 }
3a94c984 4165
8403445a
AM
4166 temp = expand_expr (exp, inner_target, VOIDmode,
4167 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4168
7abec5be 4169 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4170 now so it gets done only once. Strictly speaking, this is
4171 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4172 overlaps TARGET. But not performing the load twice also
4173 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4174 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4175 temp = copy_to_reg (temp);
4176
b258707c
RS
4177 /* If TEMP is a VOIDmode constant, use convert_modes to make
4178 sure that we properly convert it. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4180 {
4181 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4182 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4183 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4184 GET_MODE (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4186 }
b258707c 4187
1499e0a8
RK
4188 convert_move (SUBREG_REG (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4190
4191 /* If we promoted a constant, change the mode back down to match
4192 target. Otherwise, the caller might get confused by a result whose
4193 mode is larger than expected. */
4194
8403445a 4195 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4196 {
b3ca30df
JJ
4197 if (GET_MODE (temp) != VOIDmode)
4198 {
4199 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4200 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4201 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4202 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4203 }
4204 else
4205 temp = convert_modes (GET_MODE (target),
4206 GET_MODE (SUBREG_REG (target)),
4207 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4208 }
4209
8403445a 4210 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4211 }
bbf6f052
RK
4212 else
4213 {
0fab64a3
MM
4214 temp = expand_expr_real (exp, target, GET_MODE (target),
4215 (want_value & 2
4216 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4217 &alt_rtl);
766f36c7 4218 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4219 If TARGET is a volatile mem ref, either return TARGET
4220 or return a reg copied *from* TARGET; ANSI requires this.
4221
4222 Otherwise, if TEMP is not TARGET, return TEMP
4223 if it is constant (for efficiency),
4224 or if we really want the correct value. */
bbf6f052
RK
4225 if (!(target && GET_CODE (target) == REG
4226 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4227 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4228 && ! rtx_equal_p (temp, target)
8403445a 4229 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4230 dont_return_target = 1;
4231 }
4232
b258707c
RS
4233 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4234 the same as that of TARGET, adjust the constant. This is needed, for
4235 example, in case it is a CONST_DOUBLE and we want only a word-sized
4236 value. */
4237 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4238 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4239 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4240 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 4241 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 4242
bbf6f052 4243 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4244 Convert the value to TARGET's type first if necessary and emit the
4245 pending incrementations that have been queued when expanding EXP.
4246 Note that we cannot emit the whole queue blindly because this will
4247 effectively disable the POST_INC optimization later.
4248
37a08a29 4249 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4250 one or both of them are volatile memory refs, we have to distinguish
4251 two cases:
4252 - expand_expr has used TARGET. In this case, we must not generate
4253 another copy. This can be detected by TARGET being equal according
4254 to == .
4255 - expand_expr has not used TARGET - that means that the source just
4256 happens to have the same RTX form. Since temp will have been created
4257 by expand_expr, it will compare unequal according to == .
4258 We must generate a copy in this case, to reach the correct number
4259 of volatile memory references. */
bbf6f052 4260
6036acbb 4261 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4262 || (temp != target && (side_effects_p (temp)
4263 || side_effects_p (target))))
e5408e52 4264 && TREE_CODE (exp) != ERROR_MARK
a9772b60 4265 && ! dont_store_target
9c5c5f2c
MM
4266 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4267 but TARGET is not valid memory reference, TEMP will differ
4268 from TARGET although it is really the same location. */
0fab64a3 4269 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
e56fc090
HPN
4270 /* If there's nothing to copy, don't bother. Don't call expr_size
4271 unless necessary, because some front-ends (C++) expr_size-hook
4272 aborts on objects that are not supposed to be bit-copied or
4273 bit-initialized. */
4274 && expr_size (exp) != const0_rtx)
bbf6f052 4275 {
1bbd65cd 4276 emit_insns_enqueued_after_mark (mark);
bbf6f052 4277 target = protect_from_queue (target, 1);
e6d55fd7 4278 temp = protect_from_queue (temp, 0);
bbf6f052 4279 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4280 && GET_MODE (temp) != VOIDmode)
bbf6f052 4281 {
8df83eae 4282 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4283 if (dont_return_target)
4284 {
4285 /* In this case, we will return TEMP,
4286 so make sure it has the proper mode.
4287 But don't forget to store the value into TARGET. */
4288 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4289 emit_move_insn (target, temp);
4290 }
4291 else
4292 convert_move (target, temp, unsignedp);
4293 }
4294
4295 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4296 {
c24ae149
RK
4297 /* Handle copying a string constant into an array. The string
4298 constant may be shorter than the array. So copy just the string's
4299 actual length, and clear the rest. First get the size of the data
4300 type of the string, which is actually the size of the target. */
4301 rtx size = expr_size (exp);
bbf6f052 4302
e87b4f3f
RS
4303 if (GET_CODE (size) == CONST_INT
4304 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4305 emit_block_move (target, temp, size,
4306 (want_value & 2
4307 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4308 else
bbf6f052 4309 {
e87b4f3f
RS
4310 /* Compute the size of the data to copy from the string. */
4311 tree copy_size
c03b7665 4312 = size_binop (MIN_EXPR,
b50d17a1 4313 make_tree (sizetype, size),
fed3cef0 4314 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4315 rtx copy_size_rtx
4316 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4317 (want_value & 2
4318 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4319 rtx label = 0;
4320
4321 /* Copy that much. */
267b28bd 4322 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4323 TYPE_UNSIGNED (sizetype));
8403445a
AM
4324 emit_block_move (target, temp, copy_size_rtx,
4325 (want_value & 2
4326 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4327
88f63c77
RK
4328 /* Figure out how much is left in TARGET that we have to clear.
4329 Do all calculations in ptr_mode. */
e87b4f3f
RS
4330 if (GET_CODE (copy_size_rtx) == CONST_INT)
4331 {
c24ae149
RK
4332 size = plus_constant (size, -INTVAL (copy_size_rtx));
4333 target = adjust_address (target, BLKmode,
4334 INTVAL (copy_size_rtx));
e87b4f3f
RS
4335 }
4336 else
4337 {
fa06ab5c 4338 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4339 copy_size_rtx, NULL_RTX, 0,
4340 OPTAB_LIB_WIDEN);
e87b4f3f 4341
c24ae149
RK
4342#ifdef POINTERS_EXTEND_UNSIGNED
4343 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4344 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4345 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4346#endif
4347
4348 target = offset_address (target, copy_size_rtx,
4349 highest_pow2_factor (copy_size));
e87b4f3f 4350 label = gen_label_rtx ();
c5d5d461 4351 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4352 GET_MODE (size), 0, label);
e87b4f3f
RS
4353 }
4354
4355 if (size != const0_rtx)
37a08a29 4356 clear_storage (target, size);
22619c3f 4357
e87b4f3f
RS
4358 if (label)
4359 emit_label (label);
bbf6f052
RK
4360 }
4361 }
fffa9c1d
JW
4362 /* Handle calls that return values in multiple non-contiguous locations.
4363 The Irix 6 ABI has examples of this. */
4364 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4365 emit_group_load (target, temp, TREE_TYPE (exp),
4366 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4367 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4368 emit_block_move (target, temp, expr_size (exp),
4369 (want_value & 2
4370 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4371 else
b0dccb00
RH
4372 {
4373 temp = force_operand (temp, target);
4374 if (temp != target)
4375 emit_move_insn (target, temp);
4376 }
bbf6f052 4377 }
709f5be1 4378
766f36c7 4379 /* If we don't want a value, return NULL_RTX. */
8403445a 4380 if ((want_value & 1) == 0)
766f36c7
RK
4381 return NULL_RTX;
4382
4383 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4384 ??? The latter test doesn't seem to make sense. */
4385 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4386 return temp;
766f36c7
RK
4387
4388 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4389 else if ((want_value & 1) != 0
4390 && GET_MODE (target) != BLKmode
766f36c7
RK
4391 && ! (GET_CODE (target) == REG
4392 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4393 return copy_to_reg (target);
3a94c984 4394
766f36c7 4395 else
709f5be1 4396 return target;
bbf6f052
RK
4397}
4398\f
40209195 4399/* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
9de08200
RK
4400
4401static int
502b8322 4402is_zeros_p (tree exp)
9de08200
RK
4403{
4404 tree elt;
4405
4406 switch (TREE_CODE (exp))
4407 {
4408 case CONVERT_EXPR:
4409 case NOP_EXPR:
4410 case NON_LVALUE_EXPR:
ed239f5a 4411 case VIEW_CONVERT_EXPR:
9de08200
RK
4412 return is_zeros_p (TREE_OPERAND (exp, 0));
4413
4414 case INTEGER_CST:
05bccae2 4415 return integer_zerop (exp);
9de08200
RK
4416
4417 case COMPLEX_CST:
4418 return
4419 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4420
4421 case REAL_CST:
41c9120b 4422 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4423
69ef87e2
AH
4424 case VECTOR_CST:
4425 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4426 elt = TREE_CHAIN (elt))
4427 if (!is_zeros_p (TREE_VALUE (elt)))
4428 return 0;
4429
4430 return 1;
4431
9de08200 4432 case CONSTRUCTOR:
e1a43f73
PB
4433 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4434 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4435 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4436 if (! is_zeros_p (TREE_VALUE (elt)))
4437 return 0;
4438
4439 return 1;
3a94c984 4440
e9a25f70
JL
4441 default:
4442 return 0;
9de08200 4443 }
9de08200
RK
4444}
4445
4446/* Return 1 if EXP contains mostly (3/4) zeros. */
4447
40209195 4448int
502b8322 4449mostly_zeros_p (tree exp)
9de08200 4450{
9de08200
RK
4451 if (TREE_CODE (exp) == CONSTRUCTOR)
4452 {
e1a43f73
PB
4453 int elts = 0, zeros = 0;
4454 tree elt = CONSTRUCTOR_ELTS (exp);
4455 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4456 {
4457 /* If there are no ranges of true bits, it is all zero. */
4458 return elt == NULL_TREE;
4459 }
4460 for (; elt; elt = TREE_CHAIN (elt))
4461 {
4462 /* We do not handle the case where the index is a RANGE_EXPR,
4463 so the statistic will be somewhat inaccurate.
4464 We do make a more accurate count in store_constructor itself,
4465 so since this function is only used for nested array elements,
0f41302f 4466 this should be close enough. */
e1a43f73
PB
4467 if (mostly_zeros_p (TREE_VALUE (elt)))
4468 zeros++;
4469 elts++;
4470 }
9de08200
RK
4471
4472 return 4 * zeros >= 3 * elts;
4473 }
4474
4475 return is_zeros_p (exp);
4476}
4477\f
e1a43f73
PB
4478/* Helper function for store_constructor.
4479 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4480 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4481 CLEARED is as for store_constructor.
23cb1766 4482 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4483
4484 This provides a recursive shortcut back to store_constructor when it isn't
4485 necessary to go through store_field. This is so that we can pass through
4486 the cleared field to let store_constructor know that we may not have to
4487 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4488
4489static void
502b8322
AJ
4490store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4491 HOST_WIDE_INT bitpos, enum machine_mode mode,
4492 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4493{
4494 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4495 && bitpos % BITS_PER_UNIT == 0
cc2902df 4496 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4497 let store_field do the bitfield handling. This is unlikely to
4498 generate unnecessary clear instructions anyways. */
4499 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4500 {
61cb205c
RK
4501 if (GET_CODE (target) == MEM)
4502 target
4503 = adjust_address (target,
4504 GET_MODE (target) == BLKmode
4505 || 0 != (bitpos
4506 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4507 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4508
e0339ef7 4509
04050c69 4510 /* Update the alias set, if required. */
10b76d73
RK
4511 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4512 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4513 {
4514 target = copy_rtx (target);
4515 set_mem_alias_set (target, alias_set);
4516 }
e0339ef7 4517
04050c69 4518 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4519 }
4520 else
a06ef755
RK
4521 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4522 alias_set);
e1a43f73
PB
4523}
4524
bbf6f052 4525/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4526 TARGET is either a REG or a MEM; we know it cannot conflict, since
4527 safe_from_p has been called.
b7010412
RK
4528 CLEARED is true if TARGET is known to have been zero'd.
4529 SIZE is the number of bytes of TARGET we are allowed to modify: this
4530 may not be the same as the size of EXP if we are assigning to a field
4531 which has been packed to exclude padding bits. */
bbf6f052
RK
4532
4533static void
502b8322 4534store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4535{
4af3895e 4536 tree type = TREE_TYPE (exp);
a5efcd63 4537#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4538 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4539#endif
4af3895e 4540
e44842fe
RK
4541 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4542 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4543 {
b3694847 4544 tree elt;
bbf6f052 4545
2c430630
RS
4546 /* If size is zero or the target is already cleared, do nothing. */
4547 if (size == 0 || cleared)
4548 cleared = 1;
04050c69 4549 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4550 else if ((TREE_CODE (type) == UNION_TYPE
4551 || TREE_CODE (type) == QUAL_UNION_TYPE)
4552 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4553 /* If the constructor is empty, clear the union. */
a59f8640 4554 {
04050c69
RK
4555 clear_storage (target, expr_size (exp));
4556 cleared = 1;
a59f8640 4557 }
4af3895e
JVA
4558
4559 /* If we are building a static constructor into a register,
4560 set the initial value as zero so we can fold the value into
67225c15
RK
4561 a constant. But if more than one register is involved,
4562 this probably loses. */
2c430630 4563 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4564 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4565 {
04050c69 4566 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4567 cleared = 1;
4568 }
4569
4570 /* If the constructor has fewer fields than the structure
4571 or if we are initializing the structure to mostly zeros,
0d97bf4c 4572 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4573 register whose mode size isn't equal to SIZE since clear_storage
4574 can't handle this case. */
2c430630
RS
4575 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4576 || mostly_zeros_p (exp))
fcf1b822 4577 && (GET_CODE (target) != REG
04050c69
RK
4578 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4579 == size)))
9de08200 4580 {
337f4314
RK
4581 rtx xtarget = target;
4582
4583 if (readonly_fields_p (type))
4584 {
4585 xtarget = copy_rtx (xtarget);
4586 RTX_UNCHANGING_P (xtarget) = 1;
4587 }
4588
4589 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4590 cleared = 1;
4591 }
04050c69
RK
4592
4593 if (! cleared)
38a448ca 4594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4595
4596 /* Store each element of the constructor into
4597 the corresponding field of TARGET. */
4598
4599 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4600 {
b3694847 4601 tree field = TREE_PURPOSE (elt);
34c73909 4602 tree value = TREE_VALUE (elt);
b3694847 4603 enum machine_mode mode;
770ae6cc
RK
4604 HOST_WIDE_INT bitsize;
4605 HOST_WIDE_INT bitpos = 0;
770ae6cc 4606 tree offset;
b50d17a1 4607 rtx to_rtx = target;
bbf6f052 4608
f32fd778
RS
4609 /* Just ignore missing fields.
4610 We cleared the whole structure, above,
4611 if any fields are missing. */
4612 if (field == 0)
4613 continue;
4614
8b6000fc 4615 if (cleared && is_zeros_p (value))
e1a43f73 4616 continue;
9de08200 4617
770ae6cc
RK
4618 if (host_integerp (DECL_SIZE (field), 1))
4619 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4620 else
4621 bitsize = -1;
4622
bbf6f052
RK
4623 mode = DECL_MODE (field);
4624 if (DECL_BIT_FIELD (field))
4625 mode = VOIDmode;
4626
770ae6cc
RK
4627 offset = DECL_FIELD_OFFSET (field);
4628 if (host_integerp (offset, 0)
4629 && host_integerp (bit_position (field), 0))
4630 {
4631 bitpos = int_bit_position (field);
4632 offset = 0;
4633 }
b50d17a1 4634 else
770ae6cc 4635 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4636
b50d17a1
RK
4637 if (offset)
4638 {
4639 rtx offset_rtx;
4640
6fce44af
RK
4641 offset
4642 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4643 make_tree (TREE_TYPE (exp),
4644 target));
bbf6f052 4645
b50d17a1
RK
4646 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4647 if (GET_CODE (to_rtx) != MEM)
4648 abort ();
4649
bd070e1a 4650#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4651 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4652 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4653#else
4654 if (GET_MODE (offset_rtx) != ptr_mode)
4655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4656#endif
bd070e1a 4657
0d4903b8
RK
4658 to_rtx = offset_address (to_rtx, offset_rtx,
4659 highest_pow2_factor (offset));
b50d17a1 4660 }
c5c76735 4661
4e44c1ef 4662 if (TREE_READONLY (field))
cf04eb80 4663 {
9151b3bf 4664 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4665 to_rtx = copy_rtx (to_rtx);
4666
cf04eb80
RK
4667 RTX_UNCHANGING_P (to_rtx) = 1;
4668 }
4669
34c73909
R
4670#ifdef WORD_REGISTER_OPERATIONS
4671 /* If this initializes a field that is smaller than a word, at the
4672 start of a word, try to widen it to a full word.
4673 This special case allows us to output C++ member function
4674 initializations in a form that the optimizers can understand. */
770ae6cc 4675 if (GET_CODE (target) == REG
34c73909
R
4676 && bitsize < BITS_PER_WORD
4677 && bitpos % BITS_PER_WORD == 0
4678 && GET_MODE_CLASS (mode) == MODE_INT
4679 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4680 && exp_size >= 0
4681 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4682 {
4683 tree type = TREE_TYPE (value);
04050c69 4684
34c73909
R
4685 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4686 {
ae2bcd98 4687 type = lang_hooks.types.type_for_size
8df83eae 4688 (BITS_PER_WORD, TYPE_UNSIGNED (type));
34c73909
R
4689 value = convert (type, value);
4690 }
04050c69 4691
34c73909
R
4692 if (BYTES_BIG_ENDIAN)
4693 value
4694 = fold (build (LSHIFT_EXPR, type, value,
4695 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4696 bitsize = BITS_PER_WORD;
4697 mode = word_mode;
4698 }
4699#endif
10b76d73
RK
4700
4701 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4702 && DECL_NONADDRESSABLE_P (field))
4703 {
4704 to_rtx = copy_rtx (to_rtx);
4705 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4706 }
4707
c5c76735 4708 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4709 value, type, cleared,
10b76d73 4710 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4711 }
4712 }
e6834654
SS
4713 else if (TREE_CODE (type) == ARRAY_TYPE
4714 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4715 {
b3694847
SS
4716 tree elt;
4717 int i;
e1a43f73 4718 int need_to_clear;
4af3895e 4719 tree domain = TYPE_DOMAIN (type);
4af3895e 4720 tree elttype = TREE_TYPE (type);
e6834654 4721 int const_bounds_p;
ae0ed63a
JM
4722 HOST_WIDE_INT minelt = 0;
4723 HOST_WIDE_INT maxelt = 0;
997404de
JH
4724 int icode = 0;
4725 rtx *vector = NULL;
4726 int elt_size = 0;
4727 unsigned n_elts = 0;
85f3d674 4728
e6834654
SS
4729 /* Vectors are like arrays, but the domain is stored via an array
4730 type indirectly. */
4731 if (TREE_CODE (type) == VECTOR_TYPE)
4732 {
4733 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4734 the same field as TYPE_DOMAIN, we are not guaranteed that
4735 it always will. */
4736 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4737 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
997404de
JH
4738 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4739 {
4740 enum machine_mode mode = GET_MODE (target);
4741
4742 icode = (int) vec_init_optab->handlers[mode].insn_code;
4743 if (icode != CODE_FOR_nothing)
4744 {
4745 unsigned int i;
4746
4747 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4748 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4749 vector = alloca (n_elts);
4750 for (i = 0; i < n_elts; i++)
4751 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4752 }
4753 }
e6834654
SS
4754 }
4755
4756 const_bounds_p = (TYPE_MIN_VALUE (domain)
4757 && TYPE_MAX_VALUE (domain)
4758 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4759 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4760
85f3d674
RK
4761 /* If we have constant bounds for the range of the type, get them. */
4762 if (const_bounds_p)
4763 {
4764 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4765 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4766 }
bbf6f052 4767
e1a43f73 4768 /* If the constructor has fewer elements than the array,
38e01259 4769 clear the whole array first. Similarly if this is
e1a43f73
PB
4770 static constructor of a non-BLKmode object. */
4771 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4772 need_to_clear = 1;
4773 else
4774 {
4775 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4776 need_to_clear = ! const_bounds_p;
4777
e1a43f73
PB
4778 /* This loop is a more accurate version of the loop in
4779 mostly_zeros_p (it handles RANGE_EXPR in an index).
4780 It is also needed to check for missing elements. */
4781 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4782 elt != NULL_TREE && ! need_to_clear;
df0faff1 4783 elt = TREE_CHAIN (elt))
e1a43f73
PB
4784 {
4785 tree index = TREE_PURPOSE (elt);
4786 HOST_WIDE_INT this_node_count;
19caa751 4787
e1a43f73
PB
4788 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4789 {
4790 tree lo_index = TREE_OPERAND (index, 0);
4791 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4792
19caa751
RK
4793 if (! host_integerp (lo_index, 1)
4794 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4795 {
4796 need_to_clear = 1;
4797 break;
4798 }
19caa751
RK
4799
4800 this_node_count = (tree_low_cst (hi_index, 1)
4801 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4802 }
4803 else
4804 this_node_count = 1;
85f3d674 4805
e1a43f73
PB
4806 count += this_node_count;
4807 if (mostly_zeros_p (TREE_VALUE (elt)))
4808 zero_count += this_node_count;
4809 }
85f3d674 4810
8e958f70 4811 /* Clear the entire array first if there are any missing elements,
0f41302f 4812 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4813 if (! need_to_clear
4814 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4815 need_to_clear = 1;
4816 }
85f3d674 4817
997404de 4818 if (need_to_clear && size > 0 && !vector)
9de08200
RK
4819 {
4820 if (! cleared)
725e58b1
RK
4821 {
4822 if (REG_P (target))
4823 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4824 else
4825 clear_storage (target, GEN_INT (size));
4826 }
9de08200
RK
4827 cleared = 1;
4828 }
df4556a3 4829 else if (REG_P (target))
bbf6f052 4830 /* Inform later passes that the old value is dead. */
38a448ca 4831 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4832
4833 /* Store each element of the constructor into
4834 the corresponding element of TARGET, determined
4835 by counting the elements. */
4836 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4837 elt;
4838 elt = TREE_CHAIN (elt), i++)
4839 {
b3694847 4840 enum machine_mode mode;
19caa751
RK
4841 HOST_WIDE_INT bitsize;
4842 HOST_WIDE_INT bitpos;
bbf6f052 4843 int unsignedp;
e1a43f73 4844 tree value = TREE_VALUE (elt);
03dc44a6
RS
4845 tree index = TREE_PURPOSE (elt);
4846 rtx xtarget = target;
bbf6f052 4847
e1a43f73
PB
4848 if (cleared && is_zeros_p (value))
4849 continue;
9de08200 4850
8df83eae 4851 unsignedp = TYPE_UNSIGNED (elttype);
14a774a9
RK
4852 mode = TYPE_MODE (elttype);
4853 if (mode == BLKmode)
19caa751
RK
4854 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4855 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4856 : -1);
14a774a9
RK
4857 else
4858 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4859
e1a43f73
PB
4860 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4861 {
4862 tree lo_index = TREE_OPERAND (index, 0);
4863 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 4864 rtx index_r, pos_rtx, loop_end;
e1a43f73 4865 struct nesting *loop;
05c0b405
PB
4866 HOST_WIDE_INT lo, hi, count;
4867 tree position;
e1a43f73 4868
997404de
JH
4869 if (vector)
4870 abort ();
4871
0f41302f 4872 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4873 if (const_bounds_p
4874 && host_integerp (lo_index, 0)
19caa751
RK
4875 && host_integerp (hi_index, 0)
4876 && (lo = tree_low_cst (lo_index, 0),
4877 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4878 count = hi - lo + 1,
4879 (GET_CODE (target) != MEM
4880 || count <= 2
19caa751
RK
4881 || (host_integerp (TYPE_SIZE (elttype), 1)
4882 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4883 <= 40 * 8)))))
e1a43f73 4884 {
05c0b405
PB
4885 lo -= minelt; hi -= minelt;
4886 for (; lo <= hi; lo++)
e1a43f73 4887 {
19caa751 4888 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
4889
4890 if (GET_CODE (target) == MEM
4891 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4892 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4893 && TYPE_NONALIASED_COMPONENT (type))
4894 {
4895 target = copy_rtx (target);
4896 MEM_KEEP_ALIAS_SET_P (target) = 1;
4897 }
4898
23cb1766 4899 store_constructor_field
04050c69
RK
4900 (target, bitsize, bitpos, mode, value, type, cleared,
4901 get_alias_set (elttype));
e1a43f73
PB
4902 }
4903 }
4904 else
4905 {
4977bab6 4906 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
4907 loop_end = gen_label_rtx ();
4908
8df83eae 4909 unsignedp = TYPE_UNSIGNED (domain);
e1a43f73
PB
4910
4911 index = build_decl (VAR_DECL, NULL_TREE, domain);
4912
19e7881c 4913 index_r
e1a43f73
PB
4914 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4915 &unsignedp, 0));
19e7881c 4916 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4917 if (TREE_CODE (value) == SAVE_EXPR
4918 && SAVE_EXPR_RTL (value) == 0)
4919 {
0f41302f
MS
4920 /* Make sure value gets expanded once before the
4921 loop. */
e1a43f73
PB
4922 expand_expr (value, const0_rtx, VOIDmode, 0);
4923 emit_queue ();
4924 }
4925 store_expr (lo_index, index_r, 0);
4926 loop = expand_start_loop (0);
4927
0f41302f 4928 /* Assign value to element index. */
fed3cef0
RK
4929 position
4930 = convert (ssizetype,
4931 fold (build (MINUS_EXPR, TREE_TYPE (index),
4932 index, TYPE_MIN_VALUE (domain))));
4933 position = size_binop (MULT_EXPR, position,
4934 convert (ssizetype,
4935 TYPE_SIZE_UNIT (elttype)));
4936
e1a43f73 4937 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
4938 xtarget = offset_address (target, pos_rtx,
4939 highest_pow2_factor (position));
4940 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4941 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 4942 store_constructor (value, xtarget, cleared,
b7010412 4943 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4944 else
4945 store_expr (value, xtarget, 0);
4946
4947 expand_exit_loop_if_false (loop,
4948 build (LT_EXPR, integer_type_node,
4949 index, hi_index));
4950
4951 expand_increment (build (PREINCREMENT_EXPR,
4952 TREE_TYPE (index),
7b8b9722 4953 index, integer_one_node), 0, 0);
e1a43f73
PB
4954 expand_end_loop ();
4955 emit_label (loop_end);
e1a43f73
PB
4956 }
4957 }
19caa751
RK
4958 else if ((index != 0 && ! host_integerp (index, 0))
4959 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4960 {
03dc44a6
RS
4961 tree position;
4962
997404de
JH
4963 if (vector)
4964 abort ();
4965
5b6c44ff 4966 if (index == 0)
fed3cef0 4967 index = ssize_int (1);
5b6c44ff 4968
e1a43f73 4969 if (minelt)
fed3cef0
RK
4970 index = convert (ssizetype,
4971 fold (build (MINUS_EXPR, index,
4972 TYPE_MIN_VALUE (domain))));
19caa751 4973
fed3cef0
RK
4974 position = size_binop (MULT_EXPR, index,
4975 convert (ssizetype,
4976 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
4977 xtarget = offset_address (target,
4978 expand_expr (position, 0, VOIDmode, 0),
4979 highest_pow2_factor (position));
4980 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4981 store_expr (value, xtarget, 0);
03dc44a6 4982 }
997404de
JH
4983 else if (vector)
4984 {
4985 int pos;
4986
4987 if (index != 0)
4988 pos = tree_low_cst (index, 0) - minelt;
4989 else
4990 pos = i;
4991 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4992 }
03dc44a6
RS
4993 else
4994 {
4995 if (index != 0)
19caa751
RK
4996 bitpos = ((tree_low_cst (index, 0) - minelt)
4997 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4998 else
19caa751
RK
4999 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5000
10b76d73 5001 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5002 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5003 && TYPE_NONALIASED_COMPONENT (type))
5004 {
5005 target = copy_rtx (target);
5006 MEM_KEEP_ALIAS_SET_P (target) = 1;
5007 }
9b9bd3b2
JH
5008 store_constructor_field (target, bitsize, bitpos, mode, value,
5009 type, cleared, get_alias_set (elttype));
03dc44a6 5010 }
bbf6f052 5011 }
997404de
JH
5012 if (vector)
5013 {
5014 emit_insn (GEN_FCN (icode) (target,
5015 gen_rtx_PARALLEL (GET_MODE (target),
5016 gen_rtvec_v (n_elts, vector))));
5017 }
bbf6f052 5018 }
19caa751 5019
3a94c984 5020 /* Set constructor assignments. */
071a6595
PB
5021 else if (TREE_CODE (type) == SET_TYPE)
5022 {
e1a43f73 5023 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5024 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5025 tree domain = TYPE_DOMAIN (type);
5026 tree domain_min, domain_max, bitlength;
5027
9faa82d8 5028 /* The default implementation strategy is to extract the constant
071a6595
PB
5029 parts of the constructor, use that to initialize the target,
5030 and then "or" in whatever non-constant ranges we need in addition.
5031
5032 If a large set is all zero or all ones, it is
5033 probably better to set it using memset (if available) or bzero.
5034 Also, if a large set has just a single range, it may also be
5035 better to first clear all the first clear the set (using
0f41302f 5036 bzero/memset), and set the bits we want. */
3a94c984 5037
0f41302f 5038 /* Check for all zeros. */
9376fcd6 5039 if (elt == NULL_TREE && size > 0)
071a6595 5040 {
e1a43f73 5041 if (!cleared)
8ac61af7 5042 clear_storage (target, GEN_INT (size));
071a6595
PB
5043 return;
5044 }
5045
071a6595
PB
5046 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5047 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5048 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5049 size_diffop (domain_max, domain_min),
5050 ssize_int (1));
071a6595 5051
19caa751 5052 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5053
5054 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5055 are "complicated" (more than one range), initialize (the
3a94c984 5056 constant parts) by copying from a constant. */
e1a43f73
PB
5057 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5058 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5059 {
19caa751 5060 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5061 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 5062 char *bit_buffer = alloca (nbits);
b4ee5a72 5063 HOST_WIDE_INT word = 0;
19caa751
RK
5064 unsigned int bit_pos = 0;
5065 unsigned int ibit = 0;
5066 unsigned int offset = 0; /* In bytes from beginning of set. */
5067
e1a43f73 5068 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5069 for (;;)
071a6595 5070 {
b4ee5a72
PB
5071 if (bit_buffer[ibit])
5072 {
b09f3348 5073 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5074 word |= (1 << (set_word_size - 1 - bit_pos));
5075 else
5076 word |= 1 << bit_pos;
5077 }
19caa751 5078
b4ee5a72
PB
5079 bit_pos++; ibit++;
5080 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5081 {
e1a43f73
PB
5082 if (word != 0 || ! cleared)
5083 {
053ee101 5084 rtx datum = gen_int_mode (word, mode);
e1a43f73 5085 rtx to_rtx;
19caa751 5086
0f41302f
MS
5087 /* The assumption here is that it is safe to use
5088 XEXP if the set is multi-word, but not if
5089 it's single-word. */
e1a43f73 5090 if (GET_CODE (target) == MEM)
f4ef873c 5091 to_rtx = adjust_address (target, mode, offset);
3a94c984 5092 else if (offset == 0)
e1a43f73
PB
5093 to_rtx = target;
5094 else
5095 abort ();
5096 emit_move_insn (to_rtx, datum);
5097 }
19caa751 5098
b4ee5a72
PB
5099 if (ibit == nbits)
5100 break;
5101 word = 0;
5102 bit_pos = 0;
5103 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5104 }
5105 }
071a6595 5106 }
e1a43f73 5107 else if (!cleared)
19caa751
RK
5108 /* Don't bother clearing storage if the set is all ones. */
5109 if (TREE_CHAIN (elt) != NULL_TREE
5110 || (TREE_PURPOSE (elt) == NULL_TREE
5111 ? nbits != 1
5112 : ( ! host_integerp (TREE_VALUE (elt), 0)
5113 || ! host_integerp (TREE_PURPOSE (elt), 0)
5114 || (tree_low_cst (TREE_VALUE (elt), 0)
5115 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5116 != (HOST_WIDE_INT) nbits))))
8ac61af7 5117 clear_storage (target, expr_size (exp));
3a94c984 5118
e1a43f73 5119 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5120 {
3a94c984 5121 /* Start of range of element or NULL. */
071a6595 5122 tree startbit = TREE_PURPOSE (elt);
3a94c984 5123 /* End of range of element, or element value. */
071a6595
PB
5124 tree endbit = TREE_VALUE (elt);
5125 HOST_WIDE_INT startb, endb;
19caa751 5126 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5127
5128 bitlength_rtx = expand_expr (bitlength,
19caa751 5129 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5130
3a94c984 5131 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5132 if (startbit == NULL_TREE)
5133 {
5134 startbit = save_expr (endbit);
5135 endbit = startbit;
5136 }
19caa751 5137
071a6595
PB
5138 startbit = convert (sizetype, startbit);
5139 endbit = convert (sizetype, endbit);
5140 if (! integer_zerop (domain_min))
5141 {
5142 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5143 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5144 }
3a94c984 5145 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5146 EXPAND_CONST_ADDRESS);
3a94c984 5147 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5148 EXPAND_CONST_ADDRESS);
5149
5150 if (REG_P (target))
5151 {
1da68f56
RK
5152 targetx
5153 = assign_temp
ae2bcd98 5154 ((build_qualified_type (lang_hooks.types.type_for_mode
b0c48229 5155 (GET_MODE (target), 0),
1da68f56
RK
5156 TYPE_QUAL_CONST)),
5157 0, 1, 1);
071a6595
PB
5158 emit_move_insn (targetx, target);
5159 }
19caa751 5160
071a6595
PB
5161 else if (GET_CODE (target) == MEM)
5162 targetx = target;
5163 else
5164 abort ();
5165
4ca79136
RH
5166 /* Optimization: If startbit and endbit are constants divisible
5167 by BITS_PER_UNIT, call memset instead. */
5168 if (TARGET_MEM_FUNCTIONS
5169 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5170 && TREE_CODE (endbit) == INTEGER_CST
5171 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5172 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5173 {
ebb1b59a 5174 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5175 VOIDmode, 3,
e1a43f73
PB
5176 plus_constant (XEXP (targetx, 0),
5177 startb / BITS_PER_UNIT),
071a6595 5178 Pmode,
3b6f75e2 5179 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5180 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5181 TYPE_MODE (sizetype));
071a6595
PB
5182 }
5183 else
68d28100
RH
5184 emit_library_call (setbits_libfunc, LCT_NORMAL,
5185 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5186 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5187 startbit_rtx, TYPE_MODE (sizetype),
5188 endbit_rtx, TYPE_MODE (sizetype));
5189
071a6595
PB
5190 if (REG_P (target))
5191 emit_move_insn (target, targetx);
5192 }
5193 }
bbf6f052
RK
5194
5195 else
5196 abort ();
5197}
5198
5199/* Store the value of EXP (an expression tree)
5200 into a subfield of TARGET which has mode MODE and occupies
5201 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5202 If MODE is VOIDmode, it means that we are storing into a bit-field.
5203
5204 If VALUE_MODE is VOIDmode, return nothing in particular.
5205 UNSIGNEDP is not used in this case.
5206
5207 Otherwise, return an rtx for the value stored. This rtx
5208 has mode VALUE_MODE if that is convenient to do.
5209 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5210
a06ef755 5211 TYPE is the type of the underlying object,
ece32014
MM
5212
5213 ALIAS_SET is the alias set for the destination. This value will
5214 (in general) be different from that for TARGET, since TARGET is a
5215 reference to the containing structure. */
bbf6f052
RK
5216
5217static rtx
502b8322
AJ
5218store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5219 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5220 int unsignedp, tree type, int alias_set)
bbf6f052 5221{
906c4e36 5222 HOST_WIDE_INT width_mask = 0;
bbf6f052 5223
e9a25f70
JL
5224 if (TREE_CODE (exp) == ERROR_MARK)
5225 return const0_rtx;
5226
2be6a7e9
RK
5227 /* If we have nothing to store, do nothing unless the expression has
5228 side-effects. */
5229 if (bitsize == 0)
5230 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5231 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5232 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5233
5234 /* If we are storing into an unaligned field of an aligned union that is
5235 in a register, we may have the mode of TARGET being an integer mode but
5236 MODE == BLKmode. In that case, get an aligned object whose size and
5237 alignment are the same as TARGET and store TARGET into it (we can avoid
5238 the store if the field being stored is the entire width of TARGET). Then
5239 call ourselves recursively to store the field into a BLKmode version of
5240 that object. Finally, load from the object into TARGET. This is not
5241 very efficient in general, but should only be slightly more expensive
5242 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5243 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5244 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5245
5246 if (mode == BLKmode
5247 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5248 {
85a43a2f 5249 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5250 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5251
8752c357 5252 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5253 emit_move_insn (object, target);
5254
a06ef755
RK
5255 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5256 alias_set);
bbf6f052
RK
5257
5258 emit_move_insn (target, object);
5259
a06ef755 5260 /* We want to return the BLKmode version of the data. */
46093b97 5261 return blk_object;
bbf6f052 5262 }
c3b247b4
JM
5263
5264 if (GET_CODE (target) == CONCAT)
5265 {
5266 /* We're storing into a struct containing a single __complex. */
5267
5268 if (bitpos != 0)
5269 abort ();
5270 return store_expr (exp, target, 0);
5271 }
bbf6f052
RK
5272
5273 /* If the structure is in a register or if the component
5274 is a bit field, we cannot use addressing to access it.
5275 Use bit-field techniques or SUBREG to store in it. */
5276
4fa52007 5277 if (mode == VOIDmode
6ab06cbb
JW
5278 || (mode != BLKmode && ! direct_store[(int) mode]
5279 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5280 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5281 || GET_CODE (target) == REG
c980ac49 5282 || GET_CODE (target) == SUBREG
ccc98036
RS
5283 /* If the field isn't aligned enough to store as an ordinary memref,
5284 store it as a bit field. */
15b19a7d 5285 || (mode != BLKmode
9e5f281f
OH
5286 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5287 || bitpos % GET_MODE_ALIGNMENT (mode))
5288 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5289 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5290 /* If the RHS and field are a constant size and the size of the
5291 RHS isn't the same size as the bitfield, we must use bitfield
5292 operations. */
05bccae2
RK
5293 || (bitsize >= 0
5294 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5295 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5296 {
906c4e36 5297 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5298
ef19912d
RK
5299 /* If BITSIZE is narrower than the size of the type of EXP
5300 we will be narrowing TEMP. Normally, what's wanted are the
5301 low-order bits. However, if EXP's type is a record and this is
5302 big-endian machine, we want the upper BITSIZE bits. */
5303 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5304 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5305 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5306 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5307 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5308 - bitsize),
c1853da7 5309 NULL_RTX, 1);
ef19912d 5310
bbd6cf73
RK
5311 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5312 MODE. */
5313 if (mode != VOIDmode && mode != BLKmode
5314 && mode != TYPE_MODE (TREE_TYPE (exp)))
5315 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5316
a281e72d
RK
5317 /* If the modes of TARGET and TEMP are both BLKmode, both
5318 must be in memory and BITPOS must be aligned on a byte
5319 boundary. If so, we simply do a block copy. */
5320 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5321 {
5322 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5323 || bitpos % BITS_PER_UNIT != 0)
5324 abort ();
5325
f4ef873c 5326 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5327 emit_block_move (target, temp,
a06ef755 5328 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5329 / BITS_PER_UNIT),
5330 BLOCK_OP_NORMAL);
a281e72d
RK
5331
5332 return value_mode == VOIDmode ? const0_rtx : target;
5333 }
5334
bbf6f052 5335 /* Store the value in the bitfield. */
a06ef755
RK
5336 store_bit_field (target, bitsize, bitpos, mode, temp,
5337 int_size_in_bytes (type));
5338
bbf6f052
RK
5339 if (value_mode != VOIDmode)
5340 {
04050c69
RK
5341 /* The caller wants an rtx for the value.
5342 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5343 if (width_mask != 0
5344 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5345 {
9074de27 5346 tree count;
5c4d7cfb 5347 enum machine_mode tmode;
86a2c12a 5348
5c4d7cfb 5349 tmode = GET_MODE (temp);
86a2c12a
RS
5350 if (tmode == VOIDmode)
5351 tmode = value_mode;
22273300
JJ
5352
5353 if (unsignedp)
5354 return expand_and (tmode, temp,
2496c7bd 5355 gen_int_mode (width_mask, tmode),
22273300
JJ
5356 NULL_RTX);
5357
5c4d7cfb
RS
5358 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5359 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5360 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5361 }
04050c69 5362
bbf6f052 5363 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5364 NULL_RTX, value_mode, VOIDmode,
a06ef755 5365 int_size_in_bytes (type));
bbf6f052
RK
5366 }
5367 return const0_rtx;
5368 }
5369 else
5370 {
5371 rtx addr = XEXP (target, 0);
a06ef755 5372 rtx to_rtx = target;
bbf6f052
RK
5373
5374 /* If a value is wanted, it must be the lhs;
5375 so make the address stable for multiple use. */
5376
5377 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5378 && ! CONSTANT_ADDRESS_P (addr)
5379 /* A frame-pointer reference is already stable. */
5380 && ! (GET_CODE (addr) == PLUS
5381 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5382 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5383 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5384 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5385
5386 /* Now build a reference to just the desired component. */
5387
a06ef755
RK
5388 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5389
5390 if (to_rtx == target)
5391 to_rtx = copy_rtx (to_rtx);
792760b9 5392
c6df88cb 5393 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5394 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5395 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5396
5397 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5398 }
5399}
5400\f
5401/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5402 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5403 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5404
5405 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5406 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5407 If the position of the field is variable, we store a tree
5408 giving the variable offset (in units) in *POFFSET.
5409 This offset is in addition to the bit position.
5410 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5411
5412 If any of the extraction expressions is volatile,
5413 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5414
5415 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5416 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5417 is redundant.
5418
5419 If the field describes a variable-sized object, *PMODE is set to
5420 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5421 this case, but the address of the object can be found. */
bbf6f052
RK
5422
5423tree
502b8322
AJ
5424get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5425 HOST_WIDE_INT *pbitpos, tree *poffset,
5426 enum machine_mode *pmode, int *punsignedp,
5427 int *pvolatilep)
bbf6f052
RK
5428{
5429 tree size_tree = 0;
5430 enum machine_mode mode = VOIDmode;
fed3cef0 5431 tree offset = size_zero_node;
770ae6cc 5432 tree bit_offset = bitsize_zero_node;
770ae6cc 5433 tree tem;
bbf6f052 5434
770ae6cc
RK
5435 /* First get the mode, signedness, and size. We do this from just the
5436 outermost expression. */
bbf6f052
RK
5437 if (TREE_CODE (exp) == COMPONENT_REF)
5438 {
5439 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5440 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5441 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5442
a150de29 5443 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5444 }
5445 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5446 {
5447 size_tree = TREE_OPERAND (exp, 1);
a150de29 5448 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5449 }
5450 else
5451 {
5452 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5453 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5454
ab87f8c8
JL
5455 if (mode == BLKmode)
5456 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5457 else
5458 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5459 }
3a94c984 5460
770ae6cc 5461 if (size_tree != 0)
bbf6f052 5462 {
770ae6cc 5463 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5464 mode = BLKmode, *pbitsize = -1;
5465 else
770ae6cc 5466 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5467 }
5468
5469 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5470 and find the ultimate containing object. */
bbf6f052
RK
5471 while (1)
5472 {
770ae6cc
RK
5473 if (TREE_CODE (exp) == BIT_FIELD_REF)
5474 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5475 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5476 {
770ae6cc
RK
5477 tree field = TREE_OPERAND (exp, 1);
5478 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5479
e7f3c83f
RK
5480 /* If this field hasn't been filled in yet, don't go
5481 past it. This should only happen when folding expressions
5482 made during type construction. */
770ae6cc 5483 if (this_offset == 0)
e7f3c83f 5484 break;
6fce44af
RK
5485 else
5486 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
e7f3c83f 5487
7156dead 5488 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5489 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5490 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5491
a06ef755 5492 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5493 }
7156dead 5494
b4e3fabb
RK
5495 else if (TREE_CODE (exp) == ARRAY_REF
5496 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5497 {
742920c7 5498 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5499 tree array = TREE_OPERAND (exp, 0);
5500 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5501 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5502 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5503
770ae6cc
RK
5504 /* We assume all arrays have sizes that are a multiple of a byte.
5505 First subtract the lower bound, if any, in the type of the
5506 index, then convert to sizetype and multiply by the size of the
5507 array element. */
5508 if (low_bound != 0 && ! integer_zerop (low_bound))
5509 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5510 index, low_bound));
f8dac6eb 5511
6fce44af
RK
5512 /* If the index has a self-referential type, instantiate it with
5513 the object; likewise fkor the component size. */
5514 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5515 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
770ae6cc
RK
5516 offset = size_binop (PLUS_EXPR, offset,
5517 size_binop (MULT_EXPR,
5518 convert (sizetype, index),
7156dead 5519 unit_size));
bbf6f052 5520 }
7156dead 5521
c1853da7
RK
5522 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5523 conversions that don't change the mode, and all view conversions
5524 except those that need to "step up" the alignment. */
bbf6f052 5525 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5526 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5527 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5528 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5529 && STRICT_ALIGNMENT
5530 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5531 < BIGGEST_ALIGNMENT)
5532 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5533 || TYPE_ALIGN_OK (TREE_TYPE
5534 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5535 && ! ((TREE_CODE (exp) == NOP_EXPR
5536 || TREE_CODE (exp) == CONVERT_EXPR)
5537 && (TYPE_MODE (TREE_TYPE (exp))
5538 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5539 break;
7bb0943f
RS
5540
5541 /* If any reference in the chain is volatile, the effect is volatile. */
5542 if (TREE_THIS_VOLATILE (exp))
5543 *pvolatilep = 1;
839c4796 5544
bbf6f052
RK
5545 exp = TREE_OPERAND (exp, 0);
5546 }
5547
770ae6cc
RK
5548 /* If OFFSET is constant, see if we can return the whole thing as a
5549 constant bit position. Otherwise, split it up. */
5550 if (host_integerp (offset, 0)
5551 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5552 bitsize_unit_node))
5553 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5554 && host_integerp (tem, 0))
5555 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5556 else
5557 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5558
bbf6f052 5559 *pmode = mode;
bbf6f052
RK
5560 return exp;
5561}
921b3427 5562
ed239f5a
RK
5563/* Return 1 if T is an expression that get_inner_reference handles. */
5564
5565int
502b8322 5566handled_component_p (tree t)
ed239f5a
RK
5567{
5568 switch (TREE_CODE (t))
5569 {
5570 case BIT_FIELD_REF:
5571 case COMPONENT_REF:
5572 case ARRAY_REF:
5573 case ARRAY_RANGE_REF:
5574 case NON_LVALUE_EXPR:
5575 case VIEW_CONVERT_EXPR:
5576 return 1;
5577
1a8c4ca6
EB
5578 /* ??? Sure they are handled, but get_inner_reference may return
5579 a different PBITSIZE, depending upon whether the expression is
5580 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5581 case NOP_EXPR:
5582 case CONVERT_EXPR:
5583 return (TYPE_MODE (TREE_TYPE (t))
5584 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5585
5586 default:
5587 return 0;
5588 }
5589}
bbf6f052 5590\f
3fe44edd
RK
5591/* Given an rtx VALUE that may contain additions and multiplications, return
5592 an equivalent value that just refers to a register, memory, or constant.
5593 This is done by generating instructions to perform the arithmetic and
5594 returning a pseudo-register containing the value.
c45a13a6
RK
5595
5596 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5597
5598rtx
502b8322 5599force_operand (rtx value, rtx target)
bbf6f052 5600{
8a28dbcc 5601 rtx op1, op2;
bbf6f052 5602 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5603 rtx subtarget = get_subtarget (target);
8a28dbcc 5604 enum rtx_code code = GET_CODE (value);
bbf6f052 5605
50654f6c
ZD
5606 /* Check for subreg applied to an expression produced by loop optimizer. */
5607 if (code == SUBREG
5608 && GET_CODE (SUBREG_REG (value)) != REG
5609 && GET_CODE (SUBREG_REG (value)) != MEM)
5610 {
5611 value = simplify_gen_subreg (GET_MODE (value),
5612 force_reg (GET_MODE (SUBREG_REG (value)),
5613 force_operand (SUBREG_REG (value),
5614 NULL_RTX)),
5615 GET_MODE (SUBREG_REG (value)),
5616 SUBREG_BYTE (value));
5617 code = GET_CODE (value);
5618 }
5619
8b015896 5620 /* Check for a PIC address load. */
8a28dbcc 5621 if ((code == PLUS || code == MINUS)
8b015896
RH
5622 && XEXP (value, 0) == pic_offset_table_rtx
5623 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5624 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5625 || GET_CODE (XEXP (value, 1)) == CONST))
5626 {
5627 if (!subtarget)
5628 subtarget = gen_reg_rtx (GET_MODE (value));
5629 emit_move_insn (subtarget, value);
5630 return subtarget;
5631 }
5632
8a28dbcc 5633 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5634 {
8a28dbcc
JH
5635 if (!target)
5636 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5637 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5638 code == ZERO_EXTEND);
5639 return target;
bbf6f052
RK
5640 }
5641
ec8e098d 5642 if (ARITHMETIC_P (value))
bbf6f052
RK
5643 {
5644 op2 = XEXP (value, 1);
8a28dbcc 5645 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5646 subtarget = 0;
8a28dbcc 5647 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5648 {
8a28dbcc 5649 code = PLUS;
bbf6f052
RK
5650 op2 = negate_rtx (GET_MODE (value), op2);
5651 }
5652
5653 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5654 operand a PLUS of a virtual register and something else. In that
5655 case, we want to emit the sum of the virtual register and the
5656 constant first and then add the other value. This allows virtual
5657 register instantiation to simply modify the constant rather than
5658 creating another one around this addition. */
5659 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5660 && GET_CODE (XEXP (value, 0)) == PLUS
5661 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5662 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5663 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5664 {
8a28dbcc
JH
5665 rtx temp = expand_simple_binop (GET_MODE (value), code,
5666 XEXP (XEXP (value, 0), 0), op2,
5667 subtarget, 0, OPTAB_LIB_WIDEN);
5668 return expand_simple_binop (GET_MODE (value), code, temp,
5669 force_operand (XEXP (XEXP (value,
5670 0), 1), 0),
5671 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5672 }
3a94c984 5673
8a28dbcc
JH
5674 op1 = force_operand (XEXP (value, 0), subtarget);
5675 op2 = force_operand (op2, NULL_RTX);
5676 switch (code)
5677 {
5678 case MULT:
5679 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5680 case DIV:
5681 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5682 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5683 target, 1, OPTAB_LIB_WIDEN);
5684 else
5685 return expand_divmod (0,
5686 FLOAT_MODE_P (GET_MODE (value))
5687 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5688 GET_MODE (value), op1, op2, target, 0);
5689 break;
5690 case MOD:
5691 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5692 target, 0);
5693 break;
5694 case UDIV:
5695 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5696 target, 1);
5697 break;
5698 case UMOD:
5699 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5700 target, 1);
5701 break;
5702 case ASHIFTRT:
5703 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5704 target, 0, OPTAB_LIB_WIDEN);
5705 break;
5706 default:
5707 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5708 target, 1, OPTAB_LIB_WIDEN);
5709 }
5710 }
ec8e098d 5711 if (UNARY_P (value))
8a28dbcc
JH
5712 {
5713 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5714 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5715 }
34e81b5a
RK
5716
5717#ifdef INSN_SCHEDULING
5718 /* On machines that have insn scheduling, we want all memory reference to be
5719 explicit, so we need to deal with such paradoxical SUBREGs. */
5720 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5721 && (GET_MODE_SIZE (GET_MODE (value))
5722 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5723 value
5724 = simplify_gen_subreg (GET_MODE (value),
5725 force_reg (GET_MODE (SUBREG_REG (value)),
5726 force_operand (SUBREG_REG (value),
5727 NULL_RTX)),
5728 GET_MODE (SUBREG_REG (value)),
5729 SUBREG_BYTE (value));
5730#endif
5731
bbf6f052
RK
5732 return value;
5733}
5734\f
bbf6f052 5735/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5736 EXP can reference X, which is being modified. TOP_P is nonzero if this
5737 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5738 for EXP, as opposed to a recursive call to this function.
5739
5740 It is always safe for this routine to return zero since it merely
5741 searches for optimization opportunities. */
bbf6f052 5742
8f17b5c5 5743int
502b8322 5744safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5745{
5746 rtx exp_rtl = 0;
5747 int i, nops;
1da68f56 5748 static tree save_expr_list;
bbf6f052 5749
6676e72f
RK
5750 if (x == 0
5751 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5752 have no way of allocating temporaries of variable size
5753 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5754 So we assume here that something at a higher level has prevented a
f4510f37 5755 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5756 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5757 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5758 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5759 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5760 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5761 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5762 != INTEGER_CST)
1da68f56
RK
5763 && GET_MODE (x) == BLKmode)
5764 /* If X is in the outgoing argument area, it is always safe. */
5765 || (GET_CODE (x) == MEM
5766 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5767 || (GET_CODE (XEXP (x, 0)) == PLUS
5768 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5769 return 1;
5770
5771 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5772 find the underlying pseudo. */
5773 if (GET_CODE (x) == SUBREG)
5774 {
5775 x = SUBREG_REG (x);
5776 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5777 return 0;
5778 }
5779
1da68f56
RK
5780 /* A SAVE_EXPR might appear many times in the expression passed to the
5781 top-level safe_from_p call, and if it has a complex subexpression,
5782 examining it multiple times could result in a combinatorial explosion.
7ef0daad 5783 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
1da68f56
RK
5784 with optimization took about 28 minutes to compile -- even though it was
5785 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5786 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5787 we have processed. Note that the only test of top_p was above. */
5788
5789 if (top_p)
5790 {
5791 int rtn;
5792 tree t;
5793
5794 save_expr_list = 0;
5795
5796 rtn = safe_from_p (x, exp, 0);
5797
5798 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5799 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5800
5801 return rtn;
5802 }
bbf6f052 5803
1da68f56 5804 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5805 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5806 {
5807 case 'd':
a9772b60 5808 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5809 break;
5810
5811 case 'c':
5812 return 1;
5813
5814 case 'x':
5815 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5816 {
5817 while (1)
5818 {
5819 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5820 return 0;
5821 exp = TREE_CHAIN (exp);
5822 if (!exp)
5823 return 1;
5824 if (TREE_CODE (exp) != TREE_LIST)
5825 return safe_from_p (x, exp, 0);
5826 }
5827 }
ff439b5f
CB
5828 else if (TREE_CODE (exp) == ERROR_MARK)
5829 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5830 else
5831 return 0;
5832
bbf6f052
RK
5833 case '2':
5834 case '<':
f8d4be57
CE
5835 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5836 return 0;
5d3cc252 5837 /* Fall through. */
f8d4be57
CE
5838
5839 case '1':
5840 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5841
5842 case 'e':
5843 case 'r':
5844 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5845 the expression. If it is set, we conflict iff we are that rtx or
5846 both are in memory. Otherwise, we check all operands of the
5847 expression recursively. */
5848
5849 switch (TREE_CODE (exp))
5850 {
5851 case ADDR_EXPR:
70072ed9
RK
5852 /* If the operand is static or we are static, we can't conflict.
5853 Likewise if we don't conflict with the operand at all. */
5854 if (staticp (TREE_OPERAND (exp, 0))
5855 || TREE_STATIC (exp)
5856 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5857 return 1;
5858
5859 /* Otherwise, the only way this can conflict is if we are taking
5860 the address of a DECL a that address if part of X, which is
5861 very rare. */
5862 exp = TREE_OPERAND (exp, 0);
5863 if (DECL_P (exp))
5864 {
5865 if (!DECL_RTL_SET_P (exp)
5866 || GET_CODE (DECL_RTL (exp)) != MEM)
5867 return 0;
5868 else
5869 exp_rtl = XEXP (DECL_RTL (exp), 0);
5870 }
5871 break;
bbf6f052
RK
5872
5873 case INDIRECT_REF:
1da68f56
RK
5874 if (GET_CODE (x) == MEM
5875 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5876 get_alias_set (exp)))
bbf6f052
RK
5877 return 0;
5878 break;
5879
5880 case CALL_EXPR:
f9808f81
MM
5881 /* Assume that the call will clobber all hard registers and
5882 all of memory. */
5883 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5884 || GET_CODE (x) == MEM)
5885 return 0;
bbf6f052
RK
5886 break;
5887
5888 case RTL_EXPR:
3bb5826a
RK
5889 /* If a sequence exists, we would have to scan every instruction
5890 in the sequence to see if it was safe. This is probably not
5891 worthwhile. */
5892 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5893 return 0;
5894
3bb5826a 5895 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5896 break;
5897
5898 case WITH_CLEANUP_EXPR:
6ad7895a 5899 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
5900 break;
5901
5dab5552 5902 case CLEANUP_POINT_EXPR:
e5e809f4 5903 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5904
bbf6f052
RK
5905 case SAVE_EXPR:
5906 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5907 if (exp_rtl)
5908 break;
5909
1da68f56
RK
5910 /* If we've already scanned this, don't do it again. Otherwise,
5911 show we've scanned it and record for clearing the flag if we're
5912 going on. */
5913 if (TREE_PRIVATE (exp))
5914 return 1;
ff439b5f 5915
1da68f56
RK
5916 TREE_PRIVATE (exp) = 1;
5917 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5918 {
1da68f56
RK
5919 TREE_PRIVATE (exp) = 0;
5920 return 0;
ff59bfe6 5921 }
1da68f56
RK
5922
5923 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5924 return 1;
bbf6f052 5925
8129842c
RS
5926 case BIND_EXPR:
5927 /* The only operand we look at is operand 1. The rest aren't
5928 part of the expression. */
e5e809f4 5929 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5930
e9a25f70
JL
5931 default:
5932 break;
bbf6f052
RK
5933 }
5934
5935 /* If we have an rtx, we do not need to scan our operands. */
5936 if (exp_rtl)
5937 break;
5938
8f17b5c5 5939 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5940 for (i = 0; i < nops; i++)
5941 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5942 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5943 return 0;
8f17b5c5
MM
5944
5945 /* If this is a language-specific tree code, it may require
5946 special handling. */
dbbbbf3b
JDA
5947 if ((unsigned int) TREE_CODE (exp)
5948 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 5949 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 5950 return 0;
bbf6f052
RK
5951 }
5952
5953 /* If we have an rtl, find any enclosed object. Then see if we conflict
5954 with it. */
5955 if (exp_rtl)
5956 {
5957 if (GET_CODE (exp_rtl) == SUBREG)
5958 {
5959 exp_rtl = SUBREG_REG (exp_rtl);
5960 if (GET_CODE (exp_rtl) == REG
5961 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5962 return 0;
5963 }
5964
5965 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5966 are memory and they conflict. */
bbf6f052
RK
5967 return ! (rtx_equal_p (x, exp_rtl)
5968 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 5969 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 5970 rtx_addr_varies_p)));
bbf6f052
RK
5971 }
5972
5973 /* If we reach here, it is safe. */
5974 return 1;
5975}
5976
01c8a7c8
RK
5977/* Subroutine of expand_expr: return rtx if EXP is a
5978 variable or parameter; else return 0. */
5979
5980static rtx
502b8322 5981var_rtx (tree exp)
01c8a7c8
RK
5982{
5983 STRIP_NOPS (exp);
5984 switch (TREE_CODE (exp))
5985 {
5986 case PARM_DECL:
5987 case VAR_DECL:
5988 return DECL_RTL (exp);
5989 default:
5990 return 0;
5991 }
5992}
14a774a9 5993\f
0d4903b8
RK
5994/* Return the highest power of two that EXP is known to be a multiple of.
5995 This is used in updating alignment of MEMs in array references. */
5996
9ceca302 5997static unsigned HOST_WIDE_INT
502b8322 5998highest_pow2_factor (tree exp)
0d4903b8 5999{
9ceca302 6000 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6001
6002 switch (TREE_CODE (exp))
6003 {
6004 case INTEGER_CST:
e0f1be5c
JJ
6005 /* We can find the lowest bit that's a one. If the low
6006 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6007 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6008 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6009 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6010 later ICE. */
e0f1be5c 6011 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6012 return BIGGEST_ALIGNMENT;
e0f1be5c 6013 else
0d4903b8 6014 {
e0f1be5c
JJ
6015 /* Note: tree_low_cst is intentionally not used here,
6016 we don't care about the upper bits. */
6017 c0 = TREE_INT_CST_LOW (exp);
6018 c0 &= -c0;
6019 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6020 }
6021 break;
6022
65a07688 6023 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6024 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6025 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6026 return MIN (c0, c1);
6027
6028 case MULT_EXPR:
6029 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6030 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6031 return c0 * c1;
6032
6033 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6034 case CEIL_DIV_EXPR:
65a07688
RK
6035 if (integer_pow2p (TREE_OPERAND (exp, 1))
6036 && host_integerp (TREE_OPERAND (exp, 1), 1))
6037 {
6038 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6039 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6040 return MAX (1, c0 / c1);
6041 }
6042 break;
0d4903b8
RK
6043
6044 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6045 case SAVE_EXPR:
0d4903b8
RK
6046 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6047
65a07688
RK
6048 case COMPOUND_EXPR:
6049 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6050
0d4903b8
RK
6051 case COND_EXPR:
6052 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6053 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6054 return MIN (c0, c1);
6055
6056 default:
6057 break;
6058 }
6059
6060 return 1;
6061}
818c0c94 6062
d50a16c4
EB
6063/* Similar, except that the alignment requirements of TARGET are
6064 taken into account. Assume it is at least as aligned as its
6065 type, unless it is a COMPONENT_REF in which case the layout of
6066 the structure gives the alignment. */
818c0c94 6067
9ceca302 6068static unsigned HOST_WIDE_INT
d50a16c4 6069highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6070{
d50a16c4 6071 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6072
6073 factor = highest_pow2_factor (exp);
d50a16c4
EB
6074 if (TREE_CODE (target) == COMPONENT_REF)
6075 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6076 else
6077 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6078 return MAX (factor, target_align);
818c0c94 6079}
0d4903b8 6080\f
eb698c58
RS
6081/* Subroutine of expand_expr. Expand the two operands of a binary
6082 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6083 The value may be stored in TARGET if TARGET is nonzero. The
6084 MODIFIER argument is as documented by expand_expr. */
6085
6086static void
6087expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6088 enum expand_modifier modifier)
6089{
6090 if (! safe_from_p (target, exp1, 1))
6091 target = 0;
6092 if (operand_equal_p (exp0, exp1, 0))
6093 {
6094 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6095 *op1 = copy_rtx (*op0);
6096 }
6097 else
6098 {
c67e6e14
RS
6099 /* If we need to preserve evaluation order, copy exp0 into its own
6100 temporary variable so that it can't be clobbered by exp1. */
6101 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6102 exp0 = save_expr (exp0);
eb698c58
RS
6103 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6104 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6105 }
6106}
6107
f47e9b4e 6108\f
bbf6f052
RK
6109/* expand_expr: generate code for computing expression EXP.
6110 An rtx for the computed value is returned. The value is never null.
6111 In the case of a void EXP, const0_rtx is returned.
6112
6113 The value may be stored in TARGET if TARGET is nonzero.
6114 TARGET is just a suggestion; callers must assume that
6115 the rtx returned may not be the same as TARGET.
6116
6117 If TARGET is CONST0_RTX, it means that the value will be ignored.
6118
6119 If TMODE is not VOIDmode, it suggests generating the
6120 result in mode TMODE. But this is done only when convenient.
6121 Otherwise, TMODE is ignored and the value generated in its natural mode.
6122 TMODE is just a suggestion; callers must assume that
6123 the rtx returned may not have mode TMODE.
6124
d6a5ac33
RK
6125 Note that TARGET may have neither TMODE nor MODE. In that case, it
6126 probably will not be used.
bbf6f052
RK
6127
6128 If MODIFIER is EXPAND_SUM then when EXP is an addition
6129 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6130 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6131 products as above, or REG or MEM, or constant.
6132 Ordinarily in such cases we would output mul or add instructions
6133 and then return a pseudo reg containing the sum.
6134
6135 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6136 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6137 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6138 This is used for outputting expressions used in initializers.
6139
6140 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6141 with a constant address even if that address is not normally legitimate.
8403445a
AM
6142 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6143
6144 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6145 a call parameter. Such targets require special care as we haven't yet
6146 marked TARGET so that it's safe from being trashed by libcalls. We
6147 don't want to use TARGET for anything but the final result;
6148 Intermediate values must go elsewhere. Additionally, calls to
0fab64a3
MM
6149 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6150
6151 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6152 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6153 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6154 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6155 recursively. */
bbf6f052
RK
6156
6157rtx
0fab64a3
MM
6158expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6159 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6160{
b3694847 6161 rtx op0, op1, temp;
bbf6f052 6162 tree type = TREE_TYPE (exp);
8df83eae 6163 int unsignedp;
b3694847
SS
6164 enum machine_mode mode;
6165 enum tree_code code = TREE_CODE (exp);
bbf6f052 6166 optab this_optab;
68557e14
ML
6167 rtx subtarget, original_target;
6168 int ignore;
bbf6f052
RK
6169 tree context;
6170
3a94c984 6171 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6172 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6173 {
6174 op0 = CONST0_RTX (tmode);
6175 if (op0 != 0)
6176 return op0;
6177 return const0_rtx;
6178 }
6179
6180 mode = TYPE_MODE (type);
8df83eae
RK
6181 unsignedp = TYPE_UNSIGNED (type);
6182
68557e14 6183 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6184 subtarget = get_subtarget (target);
68557e14
ML
6185 original_target = target;
6186 ignore = (target == const0_rtx
6187 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6188 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6189 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6190 && TREE_CODE (type) == VOID_TYPE));
6191
dd27116b
RK
6192 /* If we are going to ignore this result, we need only do something
6193 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6194 is, short-circuit the most common cases here. Note that we must
6195 not call expand_expr with anything but const0_rtx in case this
6196 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6197
dd27116b
RK
6198 if (ignore)
6199 {
6200 if (! TREE_SIDE_EFFECTS (exp))
6201 return const0_rtx;
6202
14a774a9
RK
6203 /* Ensure we reference a volatile object even if value is ignored, but
6204 don't do this if all we are doing is taking its address. */
dd27116b
RK
6205 if (TREE_THIS_VOLATILE (exp)
6206 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6207 && mode != VOIDmode && mode != BLKmode
6208 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6209 {
37a08a29 6210 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6211 if (GET_CODE (temp) == MEM)
6212 temp = copy_to_reg (temp);
6213 return const0_rtx;
6214 }
6215
14a774a9
RK
6216 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6217 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6218 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6219 modifier);
6220
14a774a9 6221 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6222 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6223 {
37a08a29
RK
6224 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6225 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6226 return const0_rtx;
6227 }
6228 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6229 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6230 /* If the second operand has no side effects, just evaluate
0f41302f 6231 the first. */
37a08a29
RK
6232 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6233 modifier);
14a774a9
RK
6234 else if (code == BIT_FIELD_REF)
6235 {
37a08a29
RK
6236 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6237 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6238 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6239 return const0_rtx;
6240 }
37a08a29 6241
90764a87 6242 target = 0;
dd27116b 6243 }
bbf6f052 6244
e44842fe
RK
6245 /* If will do cse, generate all results into pseudo registers
6246 since 1) that allows cse to find more things
6247 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6248 cannot support. An exception is a CONSTRUCTOR into a multi-word
6249 MEM: that's much more likely to be most efficient into the MEM.
6250 Another is a CALL_EXPR which must return in memory. */
e44842fe 6251
bbf6f052 6252 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6253 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6254 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6255 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6256 target = 0;
bbf6f052 6257
bbf6f052
RK
6258 switch (code)
6259 {
6260 case LABEL_DECL:
b552441b
RS
6261 {
6262 tree function = decl_function_context (exp);
046e4e36
ZW
6263 /* Labels in containing functions, or labels used from initializers,
6264 must be forced. */
6265 if (modifier == EXPAND_INITIALIZER
6266 || (function != current_function_decl
6267 && function != inline_function_decl
6268 && function != 0))
6269 temp = force_label_rtx (exp);
ab87f8c8 6270 else
046e4e36 6271 temp = label_rtx (exp);
c5c76735 6272
046e4e36 6273 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
d0977240
RK
6274 if (function != current_function_decl
6275 && function != inline_function_decl && function != 0)
26fcb35a
RS
6276 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6277 return temp;
b552441b 6278 }
bbf6f052
RK
6279
6280 case PARM_DECL:
1877be45 6281 if (!DECL_RTL_SET_P (exp))
bbf6f052 6282 {
ddd2d57e 6283 error ("%Jprior parameter's size depends on '%D'", exp, exp);
4af3895e 6284 return CONST0_RTX (mode);
bbf6f052
RK
6285 }
6286
0f41302f 6287 /* ... fall through ... */
d6a5ac33 6288
bbf6f052 6289 case VAR_DECL:
2dca20cd
RS
6290 /* If a static var's type was incomplete when the decl was written,
6291 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6292 if (DECL_SIZE (exp) == 0
6293 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6294 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6295 layout_decl (exp, 0);
921b3427 6296
0f41302f 6297 /* ... fall through ... */
d6a5ac33 6298
2dca20cd 6299 case FUNCTION_DECL:
bbf6f052
RK
6300 case RESULT_DECL:
6301 if (DECL_RTL (exp) == 0)
6302 abort ();
d6a5ac33 6303
e44842fe
RK
6304 /* Ensure variable marked as used even if it doesn't go through
6305 a parser. If it hasn't be used yet, write out an external
6306 definition. */
6307 if (! TREE_USED (exp))
6308 {
6309 assemble_external (exp);
6310 TREE_USED (exp) = 1;
6311 }
6312
dc6d66b3
RK
6313 /* Show we haven't gotten RTL for this yet. */
6314 temp = 0;
6315
bbf6f052
RK
6316 /* Handle variables inherited from containing functions. */
6317 context = decl_function_context (exp);
6318
6319 /* We treat inline_function_decl as an alias for the current function
6320 because that is the inline function whose vars, types, etc.
6321 are being merged into the current function.
6322 See expand_inline_function. */
d6a5ac33 6323
bbf6f052
RK
6324 if (context != 0 && context != current_function_decl
6325 && context != inline_function_decl
6326 /* If var is static, we don't need a static chain to access it. */
6327 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6328 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6329 {
6330 rtx addr;
6331
6332 /* Mark as non-local and addressable. */
81feeecb 6333 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6334 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6335 abort ();
ae2bcd98 6336 lang_hooks.mark_addressable (exp);
bbf6f052
RK
6337 if (GET_CODE (DECL_RTL (exp)) != MEM)
6338 abort ();
6339 addr = XEXP (DECL_RTL (exp), 0);
6340 if (GET_CODE (addr) == MEM)
792760b9
RK
6341 addr
6342 = replace_equiv_address (addr,
6343 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6344 else
6345 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6346
792760b9 6347 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6348 }
4af3895e 6349
bbf6f052
RK
6350 /* This is the case of an array whose size is to be determined
6351 from its initializer, while the initializer is still being parsed.
6352 See expand_decl. */
d6a5ac33 6353
dc6d66b3
RK
6354 else if (GET_CODE (DECL_RTL (exp)) == MEM
6355 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6356 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6357
6358 /* If DECL_RTL is memory, we are in the normal case and either
6359 the address is not valid or it is not a register and -fforce-addr
6360 is specified, get the address into a register. */
6361
dc6d66b3
RK
6362 else if (GET_CODE (DECL_RTL (exp)) == MEM
6363 && modifier != EXPAND_CONST_ADDRESS
6364 && modifier != EXPAND_SUM
6365 && modifier != EXPAND_INITIALIZER
6366 && (! memory_address_p (DECL_MODE (exp),
6367 XEXP (DECL_RTL (exp), 0))
6368 || (flag_force_addr
6369 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
0fab64a3
MM
6370 {
6371 if (alt_rtl)
6372 *alt_rtl = DECL_RTL (exp);
6373 temp = replace_equiv_address (DECL_RTL (exp),
6374 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6375 }
1499e0a8 6376
dc6d66b3 6377 /* If we got something, return it. But first, set the alignment
04956a1a 6378 if the address is a register. */
dc6d66b3
RK
6379 if (temp != 0)
6380 {
6381 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6382 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6383
6384 return temp;
6385 }
6386
1499e0a8
RK
6387 /* If the mode of DECL_RTL does not match that of the decl, it
6388 must be a promoted value. We return a SUBREG of the wanted mode,
6389 but mark it so that we know that it was already extended. */
6390
6391 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6392 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6393 {
1499e0a8
RK
6394 /* Get the signedness used for this variable. Ensure we get the
6395 same mode we got when the variable was declared. */
78911e8b 6396 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6397 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6398 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6399 abort ();
6400
ddef6bc7 6401 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6402 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6403 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6404 return temp;
6405 }
6406
bbf6f052
RK
6407 return DECL_RTL (exp);
6408
6409 case INTEGER_CST:
d8a50944 6410 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6411 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6412
d8a50944
RH
6413 /* ??? If overflow is set, fold will have done an incomplete job,
6414 which can result in (plus xx (const_int 0)), which can get
6415 simplified by validate_replace_rtx during virtual register
6416 instantiation, which can result in unrecognizable insns.
6417 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6418 if (TREE_CONSTANT_OVERFLOW (exp)
6419 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6420 temp = force_reg (mode, temp);
6421
6422 return temp;
6423
d744e06e
AH
6424 case VECTOR_CST:
6425 return const_vector_from_tree (exp);
6426
bbf6f052 6427 case CONST_DECL:
8403445a 6428 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6429
6430 case REAL_CST:
6431 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6432 which will be turned into memory by reload if necessary.
6433
bbf6f052
RK
6434 We used to force a register so that loop.c could see it. But
6435 this does not allow gen_* patterns to perform optimizations with
6436 the constants. It also produces two insns in cases like "x = 1.0;".
6437 On most machines, floating-point constants are not permitted in
6438 many insns, so we'd end up copying it to a register in any case.
6439
6440 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6441 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6442 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6443
6444 case COMPLEX_CST:
9ad58e09
RS
6445 /* Handle evaluating a complex constant in a CONCAT target. */
6446 if (original_target && GET_CODE (original_target) == CONCAT)
6447 {
6448 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6449 rtx rtarg, itarg;
6450
6451 rtarg = XEXP (original_target, 0);
6452 itarg = XEXP (original_target, 1);
6453
6454 /* Move the real and imaginary parts separately. */
6455 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6456 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6457
6458 if (op0 != rtarg)
6459 emit_move_insn (rtarg, op0);
6460 if (op1 != itarg)
6461 emit_move_insn (itarg, op1);
6462
6463 return original_target;
6464 }
6465
71c0e7fc 6466 /* ... fall through ... */
9ad58e09 6467
bbf6f052 6468 case STRING_CST:
afc6aaab 6469 temp = output_constant_def (exp, 1);
bbf6f052 6470
afc6aaab 6471 /* temp contains a constant address.
bbf6f052
RK
6472 On RISC machines where a constant address isn't valid,
6473 make some insns to get that address into a register. */
afc6aaab 6474 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6475 && modifier != EXPAND_INITIALIZER
6476 && modifier != EXPAND_SUM
afc6aaab
ZW
6477 && (! memory_address_p (mode, XEXP (temp, 0))
6478 || flag_force_addr))
6479 return replace_equiv_address (temp,
6480 copy_rtx (XEXP (temp, 0)));
6481 return temp;
bbf6f052 6482
bf1e5319 6483 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6484 {
6485 rtx to_return;
72954a4f
JM
6486 struct file_stack fs;
6487
6488 fs.location = input_location;
6489 fs.next = expr_wfl_stack;
b24f65cd 6490 input_filename = EXPR_WFL_FILENAME (exp);
d479d37f 6491 input_line = EXPR_WFL_LINENO (exp);
72954a4f 6492 expr_wfl_stack = &fs;
b24f65cd 6493 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
0cea056b 6494 emit_line_note (input_location);
6ad7895a 6495 /* Possibly avoid switching back and forth here. */
72954a4f
JM
6496 to_return = expand_expr (EXPR_WFL_NODE (exp),
6497 (ignore ? const0_rtx : target),
6498 tmode, modifier);
6499 if (expr_wfl_stack != &fs)
6500 abort ();
6501 input_location = fs.location;
6502 expr_wfl_stack = fs.next;
b24f65cd
APB
6503 return to_return;
6504 }
bf1e5319 6505
bbf6f052
RK
6506 case SAVE_EXPR:
6507 context = decl_function_context (exp);
d6a5ac33 6508
d0977240
RK
6509 /* If this SAVE_EXPR was at global context, assume we are an
6510 initialization function and move it into our context. */
6511 if (context == 0)
6512 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6513
bbf6f052
RK
6514 /* We treat inline_function_decl as an alias for the current function
6515 because that is the inline function whose vars, types, etc.
6516 are being merged into the current function.
6517 See expand_inline_function. */
6518 if (context == current_function_decl || context == inline_function_decl)
6519 context = 0;
6520
6521 /* If this is non-local, handle it. */
6522 if (context)
6523 {
d0977240
RK
6524 /* The following call just exists to abort if the context is
6525 not of a containing function. */
6526 find_function_data (context);
6527
bbf6f052
RK
6528 temp = SAVE_EXPR_RTL (exp);
6529 if (temp && GET_CODE (temp) == REG)
6530 {
f29a2bd1 6531 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6532 temp = SAVE_EXPR_RTL (exp);
6533 }
6534 if (temp == 0 || GET_CODE (temp) != MEM)
6535 abort ();
792760b9
RK
6536 return
6537 replace_equiv_address (temp,
6538 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6539 }
6540 if (SAVE_EXPR_RTL (exp) == 0)
6541 {
06089a8b
RK
6542 if (mode == VOIDmode)
6543 temp = const0_rtx;
6544 else
1da68f56
RK
6545 temp = assign_temp (build_qualified_type (type,
6546 (TYPE_QUALS (type)
6547 | TYPE_QUAL_CONST)),
6548 3, 0, 0);
1499e0a8 6549
bbf6f052 6550 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6551 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6552 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6553 save_expr_regs);
ff78f773
RK
6554
6555 /* If the mode of TEMP does not match that of the expression, it
6556 must be a promoted value. We pass store_expr a SUBREG of the
6557 wanted mode but mark it so that we know that it was already
3ac1a319 6558 extended. */
ff78f773
RK
6559
6560 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6561 {
ddef6bc7 6562 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6563 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6564 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6565 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6566 }
6567
4c7a0be9 6568 if (temp == const0_rtx)
37a08a29 6569 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6570 else
8403445a
AM
6571 store_expr (TREE_OPERAND (exp, 0), temp,
6572 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6573
6574 TREE_USED (exp) = 1;
bbf6f052 6575 }
1499e0a8
RK
6576
6577 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6578 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6579 but mark it so that we know that it was already extended. */
1499e0a8
RK
6580
6581 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6582 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6583 {
e70d22c8
RK
6584 /* Compute the signedness and make the proper SUBREG. */
6585 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6586 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6587 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6588 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6589 return temp;
6590 }
6591
bbf6f052
RK
6592 return SAVE_EXPR_RTL (exp);
6593
679163cf
MS
6594 case UNSAVE_EXPR:
6595 {
6596 rtx temp;
6597 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a 6598 TREE_OPERAND (exp, 0)
ae2bcd98 6599 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
679163cf
MS
6600 return temp;
6601 }
6602
70e6ca43
APB
6603 case GOTO_EXPR:
6604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6605 expand_goto (TREE_OPERAND (exp, 0));
6606 else
6607 expand_computed_goto (TREE_OPERAND (exp, 0));
6608 return const0_rtx;
6609
bbf6f052 6610 case EXIT_EXPR:
df4ae160 6611 expand_exit_loop_if_false (NULL,
e44842fe 6612 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6613 return const0_rtx;
6614
f42e28dd
APB
6615 case LABELED_BLOCK_EXPR:
6616 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6617 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6618 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6619 do_pending_stack_adjust ();
f42e28dd
APB
6620 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6621 return const0_rtx;
6622
6623 case EXIT_BLOCK_EXPR:
6624 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6625 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6626 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6627 return const0_rtx;
6628
bbf6f052 6629 case LOOP_EXPR:
0088fcb1 6630 push_temp_slots ();
bbf6f052 6631 expand_start_loop (1);
b0832fe1 6632 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6633 expand_end_loop ();
0088fcb1 6634 pop_temp_slots ();
bbf6f052
RK
6635
6636 return const0_rtx;
6637
6638 case BIND_EXPR:
6639 {
6640 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
6641
6642 /* Need to open a binding contour here because
e976b8b2 6643 if there are any cleanups they must be contained here. */
8e91754e 6644 expand_start_bindings (2);
bbf6f052 6645
2df53c0b
RS
6646 /* Mark the corresponding BLOCK for output in its proper place. */
6647 if (TREE_OPERAND (exp, 2) != 0
6648 && ! TREE_USED (TREE_OPERAND (exp, 2)))
ae2bcd98 6649 lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6650
6651 /* If VARS have not yet been expanded, expand them now. */
6652 while (vars)
6653 {
19e7881c 6654 if (!DECL_RTL_SET_P (vars))
4977bab6 6655 expand_decl (vars);
bbf6f052
RK
6656 expand_decl_init (vars);
6657 vars = TREE_CHAIN (vars);
6658 }
6659
37a08a29 6660 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
6661
6662 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6663
6664 return temp;
6665 }
6666
6667 case RTL_EXPR:
83b853c9
JM
6668 if (RTL_EXPR_SEQUENCE (exp))
6669 {
6670 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6671 abort ();
2f937369 6672 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
6673 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6674 }
64dc53f3
MM
6675 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6676 free_temps_for_rtl_expr (exp);
0fab64a3
MM
6677 if (alt_rtl)
6678 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
bbf6f052
RK
6679 return RTL_EXPR_RTL (exp);
6680
6681 case CONSTRUCTOR:
dd27116b
RK
6682 /* If we don't need the result, just ensure we evaluate any
6683 subexpressions. */
6684 if (ignore)
6685 {
6686 tree elt;
37a08a29 6687
dd27116b 6688 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6689 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6690
dd27116b
RK
6691 return const0_rtx;
6692 }
3207b172 6693
4af3895e
JVA
6694 /* All elts simple constants => refer to a constant in memory. But
6695 if this is a non-BLKmode mode, let it store a field at a time
6696 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6697 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6698 store directly into the target unless the type is large enough
6699 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6700 all operands are constant, put it in memory as well.
6701
6702 FIXME: Avoid trying to fill vector constructors piece-meal.
6703 Output them with output_constant_def below unless we're sure
6704 they're zeros. This should go away when vector initializers
6705 are treated like VECTOR_CST instead of arrays.
6706 */
dd27116b 6707 else if ((TREE_STATIC (exp)
3207b172 6708 && ((mode == BLKmode
e5e809f4 6709 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6710 || TREE_ADDRESSABLE (exp)
19caa751 6711 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6712 && (! MOVE_BY_PIECES_P
19caa751
RK
6713 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6714 TYPE_ALIGN (type)))
0fb7aeda
KH
6715 && ((TREE_CODE (type) == VECTOR_TYPE
6716 && !is_zeros_p (exp))
6717 || ! mostly_zeros_p (exp)))))
f59700f9
RK
6718 || ((modifier == EXPAND_INITIALIZER
6719 || modifier == EXPAND_CONST_ADDRESS)
6720 && TREE_CONSTANT (exp)))
bbf6f052 6721 {
bd7cf17e 6722 rtx constructor = output_constant_def (exp, 1);
19caa751 6723
b552441b
RS
6724 if (modifier != EXPAND_CONST_ADDRESS
6725 && modifier != EXPAND_INITIALIZER
792760b9
RK
6726 && modifier != EXPAND_SUM)
6727 constructor = validize_mem (constructor);
6728
bbf6f052
RK
6729 return constructor;
6730 }
bbf6f052
RK
6731 else
6732 {
e9ac02a6
JW
6733 /* Handle calls that pass values in multiple non-contiguous
6734 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6735 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6736 || GET_CODE (target) == PARALLEL
6737 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6738 target
6739 = assign_temp (build_qualified_type (type,
6740 (TYPE_QUALS (type)
6741 | (TREE_READONLY (exp)
6742 * TYPE_QUAL_CONST))),
c24ae149 6743 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6744
de8920be 6745 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6746 return target;
6747 }
6748
6749 case INDIRECT_REF:
6750 {
6751 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6752 tree index;
3a94c984
KH
6753 tree string = string_constant (exp1, &index);
6754
06eaa86f 6755 /* Try to optimize reads from const strings. */
0fb7aeda
KH
6756 if (string
6757 && TREE_CODE (string) == STRING_CST
6758 && TREE_CODE (index) == INTEGER_CST
05bccae2 6759 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
6760 && GET_MODE_CLASS (mode) == MODE_INT
6761 && GET_MODE_SIZE (mode) == 1
37a08a29 6762 && modifier != EXPAND_WRITE)
0fb7aeda 6763 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 6764 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6765
405f0da6
JW
6766 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6767 op0 = memory_address (mode, op0);
38a448ca 6768 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6769 set_mem_attributes (temp, exp, 0);
1125706f 6770
14a774a9
RK
6771 /* If we are writing to this object and its type is a record with
6772 readonly fields, we must mark it as readonly so it will
6773 conflict with readonly references to those fields. */
37a08a29 6774 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6775 RTX_UNCHANGING_P (temp) = 1;
6776
8c8a8e34
JW
6777 return temp;
6778 }
bbf6f052
RK
6779
6780 case ARRAY_REF:
742920c7
RK
6781 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6782 abort ();
bbf6f052 6783
bbf6f052 6784 {
742920c7
RK
6785 tree array = TREE_OPERAND (exp, 0);
6786 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6787 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6788 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6789 HOST_WIDE_INT i;
b50d17a1 6790
d4c89139
PB
6791 /* Optimize the special-case of a zero lower bound.
6792
6793 We convert the low_bound to sizetype to avoid some problems
6794 with constant folding. (E.g. suppose the lower bound is 1,
6795 and its mode is QI. Without the conversion, (ARRAY
6796 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6797 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6798
742920c7 6799 if (! integer_zerop (low_bound))
fed3cef0 6800 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6801
742920c7 6802 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6803 This is not done in fold so it won't happen inside &.
6804 Don't fold if this is for wide characters since it's too
6805 difficult to do correctly and this is a very rare case. */
742920c7 6806
017e1b43
RH
6807 if (modifier != EXPAND_CONST_ADDRESS
6808 && modifier != EXPAND_INITIALIZER
6809 && modifier != EXPAND_MEMORY
cb5fa0f8 6810 && TREE_CODE (array) == STRING_CST
742920c7 6811 && TREE_CODE (index) == INTEGER_CST
05bccae2 6812 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6813 && GET_MODE_CLASS (mode) == MODE_INT
6814 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6815 return gen_int_mode (TREE_STRING_POINTER (array)
6816 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6817
742920c7
RK
6818 /* If this is a constant index into a constant array,
6819 just get the value from the array. Handle both the cases when
6820 we have an explicit constructor and when our operand is a variable
6821 that was declared const. */
4af3895e 6822
017e1b43
RH
6823 if (modifier != EXPAND_CONST_ADDRESS
6824 && modifier != EXPAND_INITIALIZER
6825 && modifier != EXPAND_MEMORY
6826 && TREE_CODE (array) == CONSTRUCTOR
6827 && ! TREE_SIDE_EFFECTS (array)
05bccae2 6828 && TREE_CODE (index) == INTEGER_CST
3a94c984 6829 && 0 > compare_tree_int (index,
05bccae2
RK
6830 list_length (CONSTRUCTOR_ELTS
6831 (TREE_OPERAND (exp, 0)))))
742920c7 6832 {
05bccae2
RK
6833 tree elem;
6834
6835 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6836 i = TREE_INT_CST_LOW (index);
6837 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6838 ;
6839
6840 if (elem)
37a08a29
RK
6841 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6842 modifier);
742920c7 6843 }
3a94c984 6844
742920c7 6845 else if (optimize >= 1
cb5fa0f8
RK
6846 && modifier != EXPAND_CONST_ADDRESS
6847 && modifier != EXPAND_INITIALIZER
017e1b43 6848 && modifier != EXPAND_MEMORY
742920c7
RK
6849 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6850 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
6851 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6852 && targetm.binds_local_p (array))
742920c7 6853 {
08293add 6854 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6855 {
6856 tree init = DECL_INITIAL (array);
6857
742920c7
RK
6858 if (TREE_CODE (init) == CONSTRUCTOR)
6859 {
665f2503 6860 tree elem;
742920c7 6861
05bccae2 6862 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6863 (elem
6864 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6865 elem = TREE_CHAIN (elem))
6866 ;
6867
c54b0a5e 6868 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6869 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 6870 tmode, modifier);
742920c7
RK
6871 }
6872 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6873 && 0 > compare_tree_int (index,
6874 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6875 {
6876 tree type = TREE_TYPE (TREE_TYPE (init));
6877 enum machine_mode mode = TYPE_MODE (type);
6878
6879 if (GET_MODE_CLASS (mode) == MODE_INT
6880 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6881 return gen_int_mode (TREE_STRING_POINTER (init)
6882 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 6883 }
742920c7
RK
6884 }
6885 }
6886 }
afc6aaab 6887 goto normal_inner_ref;
bbf6f052
RK
6888
6889 case COMPONENT_REF:
4af3895e 6890 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
6891 appropriate field if it is present. */
6892 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
6893 {
6894 tree elt;
6895
6896 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6897 elt = TREE_CHAIN (elt))
86b5812c
RK
6898 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6899 /* We can normally use the value of the field in the
6900 CONSTRUCTOR. However, if this is a bitfield in
6901 an integral mode that we can fit in a HOST_WIDE_INT,
6902 we must mask only the number of bits in the bitfield,
6903 since this is done implicitly by the constructor. If
6904 the bitfield does not meet either of those conditions,
6905 we can't do this optimization. */
6906 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6907 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6908 == MODE_INT)
6909 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6910 <= HOST_BITS_PER_WIDE_INT))))
6911 {
8403445a
AM
6912 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6913 && modifier == EXPAND_STACK_PARM)
6914 target = 0;
3a94c984 6915 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6916 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6917 {
9df2c88c
RK
6918 HOST_WIDE_INT bitsize
6919 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
6920 enum machine_mode imode
6921 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6922
8df83eae 6923 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
86b5812c
RK
6924 {
6925 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 6926 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
6927 }
6928 else
6929 {
6930 tree count
e5e809f4
JL
6931 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6932 0);
86b5812c
RK
6933
6934 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6935 target, 0);
6936 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6937 target, 0);
6938 }
6939 }
6940
6941 return op0;
6942 }
4af3895e 6943 }
afc6aaab 6944 goto normal_inner_ref;
4af3895e 6945
afc6aaab
ZW
6946 case BIT_FIELD_REF:
6947 case ARRAY_RANGE_REF:
6948 normal_inner_ref:
bbf6f052
RK
6949 {
6950 enum machine_mode mode1;
770ae6cc 6951 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6952 tree offset;
bbf6f052 6953 int volatilep = 0;
839c4796 6954 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 6955 &mode1, &unsignedp, &volatilep);
f47e9b4e 6956 rtx orig_op0;
bbf6f052 6957
e7f3c83f
RK
6958 /* If we got back the original object, something is wrong. Perhaps
6959 we are evaluating an expression too early. In any event, don't
6960 infinitely recurse. */
6961 if (tem == exp)
6962 abort ();
6963
3d27140a 6964 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6965 computation, since it will need a temporary and TARGET is known
6966 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 6967
f47e9b4e
RK
6968 orig_op0 = op0
6969 = expand_expr (tem,
6970 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6971 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6972 != INTEGER_CST)
8403445a 6973 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
6974 ? target : NULL_RTX),
6975 VOIDmode,
6976 (modifier == EXPAND_INITIALIZER
8403445a
AM
6977 || modifier == EXPAND_CONST_ADDRESS
6978 || modifier == EXPAND_STACK_PARM)
f47e9b4e 6979 ? modifier : EXPAND_NORMAL);
bbf6f052 6980
8c8a8e34 6981 /* If this is a constant, put it into a register if it is a
14a774a9 6982 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6983 if (CONSTANT_P (op0))
6984 {
6985 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6986 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6987 && offset == 0)
8c8a8e34
JW
6988 op0 = force_reg (mode, op0);
6989 else
6990 op0 = validize_mem (force_const_mem (mode, op0));
6991 }
6992
8d2e5f72
RK
6993 /* Otherwise, if this object not in memory and we either have an
6994 offset or a BLKmode result, put it there. This case can't occur in
6995 C, but can in Ada if we have unchecked conversion of an expression
6996 from a scalar type to an array or record type or for an
6997 ARRAY_RANGE_REF whose type is BLKmode. */
6998 else if (GET_CODE (op0) != MEM
6999 && (offset != 0
7000 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7001 {
7002 /* If the operand is a SAVE_EXPR, we can deal with this by
7003 forcing the SAVE_EXPR into memory. */
7004 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7005 {
7006 put_var_into_stack (TREE_OPERAND (exp, 0),
7007 /*rescan=*/true);
7008 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7009 }
7010 else
7011 {
7012 tree nt
7013 = build_qualified_type (TREE_TYPE (tem),
7014 (TYPE_QUALS (TREE_TYPE (tem))
7015 | TYPE_QUAL_CONST));
7016 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7017
8d2e5f72
RK
7018 emit_move_insn (memloc, op0);
7019 op0 = memloc;
7020 }
7021 }
7022
7bb0943f
RS
7023 if (offset != 0)
7024 {
8403445a
AM
7025 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7026 EXPAND_SUM);
7bb0943f
RS
7027
7028 if (GET_CODE (op0) != MEM)
7029 abort ();
2d48c13d 7030
2d48c13d 7031#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7032 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7033 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7034#else
7035 if (GET_MODE (offset_rtx) != ptr_mode)
7036 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7037#endif
7038
e82407b5
EB
7039 if (GET_MODE (op0) == BLKmode
7040 /* A constant address in OP0 can have VOIDmode, we must
7041 not try to call force_reg in that case. */
efd07ca7 7042 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7043 && bitsize != 0
3a94c984 7044 && (bitpos % bitsize) == 0
89752202 7045 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7046 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7047 {
e3c8ea67 7048 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7049 bitpos = 0;
7050 }
7051
0d4903b8
RK
7052 op0 = offset_address (op0, offset_rtx,
7053 highest_pow2_factor (offset));
7bb0943f
RS
7054 }
7055
1ce7f3c2
RK
7056 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7057 record its alignment as BIGGEST_ALIGNMENT. */
7058 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7059 && is_aligning_offset (offset, tem))
7060 set_mem_align (op0, BIGGEST_ALIGNMENT);
7061
bbf6f052
RK
7062 /* Don't forget about volatility even if this is a bitfield. */
7063 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7064 {
f47e9b4e
RK
7065 if (op0 == orig_op0)
7066 op0 = copy_rtx (op0);
7067
bbf6f052
RK
7068 MEM_VOLATILE_P (op0) = 1;
7069 }
7070
010f87c4
JJ
7071 /* The following code doesn't handle CONCAT.
7072 Assume only bitpos == 0 can be used for CONCAT, due to
7073 one element arrays having the same mode as its element. */
7074 if (GET_CODE (op0) == CONCAT)
7075 {
7076 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7077 abort ();
7078 return op0;
7079 }
7080
ccc98036
RS
7081 /* In cases where an aligned union has an unaligned object
7082 as a field, we might be extracting a BLKmode value from
7083 an integer-mode (e.g., SImode) object. Handle this case
7084 by doing the extract into an object as wide as the field
7085 (which we know to be the width of a basic mode), then
cb5fa0f8 7086 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7087 if (mode1 == VOIDmode
ccc98036 7088 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7089 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7090 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7091 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7092 && modifier != EXPAND_CONST_ADDRESS
7093 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7094 /* If the field isn't aligned enough to fetch as a memref,
7095 fetch it as a bit field. */
7096 || (mode1 != BLKmode
9e5f281f 7097 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5
EB
7098 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7099 || (GET_CODE (op0) == MEM
7100 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7101 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7102 && ((modifier == EXPAND_CONST_ADDRESS
7103 || modifier == EXPAND_INITIALIZER)
7104 ? STRICT_ALIGNMENT
7105 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7106 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7107 /* If the type and the field are a constant size and the
7108 size of the type isn't the same size as the bitfield,
7109 we must use bitfield operations. */
7110 || (bitsize >= 0
7111 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7112 == INTEGER_CST)
7113 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7114 bitsize)))
bbf6f052 7115 {
bbf6f052
RK
7116 enum machine_mode ext_mode = mode;
7117
14a774a9
RK
7118 if (ext_mode == BLKmode
7119 && ! (target != 0 && GET_CODE (op0) == MEM
7120 && GET_CODE (target) == MEM
7121 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7122 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7123
7124 if (ext_mode == BLKmode)
a281e72d 7125 {
7a06d606
RK
7126 if (target == 0)
7127 target = assign_temp (type, 0, 1, 1);
7128
7129 if (bitsize == 0)
7130 return target;
7131
a281e72d
RK
7132 /* In this case, BITPOS must start at a byte boundary and
7133 TARGET, if specified, must be a MEM. */
7134 if (GET_CODE (op0) != MEM
7135 || (target != 0 && GET_CODE (target) != MEM)
7136 || bitpos % BITS_PER_UNIT != 0)
7137 abort ();
7138
7a06d606
RK
7139 emit_block_move (target,
7140 adjust_address (op0, VOIDmode,
7141 bitpos / BITS_PER_UNIT),
a06ef755 7142 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7143 / BITS_PER_UNIT),
8403445a
AM
7144 (modifier == EXPAND_STACK_PARM
7145 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7146
a281e72d
RK
7147 return target;
7148 }
bbf6f052 7149
dc6d66b3
RK
7150 op0 = validize_mem (op0);
7151
7152 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7153 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7154
8403445a
AM
7155 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7156 (modifier == EXPAND_STACK_PARM
7157 ? NULL_RTX : target),
7158 ext_mode, ext_mode,
bbf6f052 7159 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7160
7161 /* If the result is a record type and BITSIZE is narrower than
7162 the mode of OP0, an integral mode, and this is a big endian
7163 machine, we must put the field into the high-order bits. */
7164 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7165 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7166 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7167 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7168 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7169 - bitsize),
7170 op0, 1);
7171
daae8185
RK
7172 /* If the result type is BLKmode, store the data into a temporary
7173 of the appropriate type, but with the mode corresponding to the
7174 mode for the data we have (op0's mode). It's tempting to make
7175 this a constant type, since we know it's only being stored once,
7176 but that can cause problems if we are taking the address of this
7177 COMPONENT_REF because the MEM of any reference via that address
7178 will have flags corresponding to the type, which will not
7179 necessarily be constant. */
bbf6f052
RK
7180 if (mode == BLKmode)
7181 {
daae8185
RK
7182 rtx new
7183 = assign_stack_temp_for_type
7184 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7185
7186 emit_move_insn (new, op0);
7187 op0 = copy_rtx (new);
7188 PUT_MODE (op0, BLKmode);
c3d32120 7189 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7190 }
7191
7192 return op0;
7193 }
7194
05019f83
RK
7195 /* If the result is BLKmode, use that to access the object
7196 now as well. */
7197 if (mode == BLKmode)
7198 mode1 = BLKmode;
7199
bbf6f052
RK
7200 /* Get a reference to just this component. */
7201 if (modifier == EXPAND_CONST_ADDRESS
7202 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7203 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7204 else
f4ef873c 7205 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7206
f47e9b4e
RK
7207 if (op0 == orig_op0)
7208 op0 = copy_rtx (op0);
7209
3bdf5ad1 7210 set_mem_attributes (op0, exp, 0);
dc6d66b3 7211 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7212 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7213
bbf6f052 7214 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7215 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7216 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7217 || modifier == EXPAND_INITIALIZER)
bbf6f052 7218 return op0;
0d15e60c 7219 else if (target == 0)
bbf6f052 7220 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7221
bbf6f052
RK
7222 convert_move (target, op0, unsignedp);
7223 return target;
7224 }
7225
4a8d0c9c
RH
7226 case VTABLE_REF:
7227 {
7228 rtx insn, before = get_last_insn (), vtbl_ref;
7229
7230 /* Evaluate the interior expression. */
7231 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7232 tmode, modifier);
7233
7234 /* Get or create an instruction off which to hang a note. */
7235 if (REG_P (subtarget))
7236 {
7237 target = subtarget;
7238 insn = get_last_insn ();
7239 if (insn == before)
7240 abort ();
7241 if (! INSN_P (insn))
7242 insn = prev_nonnote_insn (insn);
7243 }
7244 else
7245 {
7246 target = gen_reg_rtx (GET_MODE (subtarget));
7247 insn = emit_move_insn (target, subtarget);
7248 }
7249
7250 /* Collect the data for the note. */
7251 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7252 vtbl_ref = plus_constant (vtbl_ref,
7253 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7254 /* Discard the initial CONST that was added. */
7255 vtbl_ref = XEXP (vtbl_ref, 0);
7256
7257 REG_NOTES (insn)
7258 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7259
7260 return target;
7261 }
7262
bbf6f052
RK
7263 /* Intended for a reference to a buffer of a file-object in Pascal.
7264 But it's not certain that a special tree code will really be
7265 necessary for these. INDIRECT_REF might work for them. */
7266 case BUFFER_REF:
7267 abort ();
7268
7308a047 7269 case IN_EXPR:
7308a047 7270 {
d6a5ac33
RK
7271 /* Pascal set IN expression.
7272
7273 Algorithm:
7274 rlo = set_low - (set_low%bits_per_word);
7275 the_word = set [ (index - rlo)/bits_per_word ];
7276 bit_index = index % bits_per_word;
7277 bitmask = 1 << bit_index;
7278 return !!(the_word & bitmask); */
7279
7308a047
RS
7280 tree set = TREE_OPERAND (exp, 0);
7281 tree index = TREE_OPERAND (exp, 1);
8df83eae 7282 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7308a047 7283 tree set_type = TREE_TYPE (set);
7308a047
RS
7284 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7285 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7286 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7287 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7288 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7289 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7290 rtx setaddr = XEXP (setval, 0);
7291 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7292 rtx rlow;
7293 rtx diff, quo, rem, addr, bit, result;
7308a047 7294
d6a5ac33
RK
7295 /* If domain is empty, answer is no. Likewise if index is constant
7296 and out of bounds. */
51723711 7297 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7298 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7299 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7300 || (TREE_CODE (index) == INTEGER_CST
7301 && TREE_CODE (set_low_bound) == INTEGER_CST
7302 && tree_int_cst_lt (index, set_low_bound))
7303 || (TREE_CODE (set_high_bound) == INTEGER_CST
7304 && TREE_CODE (index) == INTEGER_CST
7305 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7306 return const0_rtx;
7307
d6a5ac33
RK
7308 if (target == 0)
7309 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7310
7311 /* If we get here, we have to generate the code for both cases
7312 (in range and out of range). */
7313
7314 op0 = gen_label_rtx ();
7315 op1 = gen_label_rtx ();
7316
7317 if (! (GET_CODE (index_val) == CONST_INT
7318 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7319 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7320 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7321
7322 if (! (GET_CODE (index_val) == CONST_INT
7323 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7324 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7325 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7326
7327 /* Calculate the element number of bit zero in the first word
7328 of the set. */
7329 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7330 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7331 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7332 else
17938e57
RK
7333 rlow = expand_binop (index_mode, and_optab, lo_r,
7334 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7335 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7336
d6a5ac33
RK
7337 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7338 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7339
7340 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7341 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7342 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7343 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7344
7308a047 7345 addr = memory_address (byte_mode,
d6a5ac33
RK
7346 expand_binop (index_mode, add_optab, diff,
7347 setaddr, NULL_RTX, iunsignedp,
17938e57 7348 OPTAB_LIB_WIDEN));
d6a5ac33 7349
3a94c984 7350 /* Extract the bit we want to examine. */
7308a047 7351 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7352 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7353 make_tree (TREE_TYPE (index), rem),
7354 NULL_RTX, 1);
7355 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7356 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7357 1, OPTAB_LIB_WIDEN);
17938e57
RK
7358
7359 if (result != target)
7360 convert_move (target, result, 1);
7308a047
RS
7361
7362 /* Output the code to handle the out-of-range case. */
7363 emit_jump (op0);
7364 emit_label (op1);
7365 emit_move_insn (target, const0_rtx);
7366 emit_label (op0);
7367 return target;
7368 }
7369
bbf6f052 7370 case WITH_CLEANUP_EXPR:
6ad7895a 7371 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7372 {
6ad7895a 7373 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7374 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7375 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7376 CLEANUP_EH_ONLY (exp));
e976b8b2 7377
bbf6f052 7378 /* That's it for this cleanup. */
6ad7895a 7379 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7380 }
6ad7895a 7381 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7382
5dab5552
MS
7383 case CLEANUP_POINT_EXPR:
7384 {
e976b8b2
MS
7385 /* Start a new binding layer that will keep track of all cleanup
7386 actions to be performed. */
8e91754e 7387 expand_start_bindings (2);
e976b8b2 7388
d93d4205 7389 target_temp_slot_level = temp_slot_level;
e976b8b2 7390
37a08a29 7391 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7392 /* If we're going to use this value, load it up now. */
7393 if (! ignore)
7394 op0 = force_not_mem (op0);
d93d4205 7395 preserve_temp_slots (op0);
e976b8b2 7396 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7397 }
7398 return op0;
7399
bbf6f052
RK
7400 case CALL_EXPR:
7401 /* Check for a built-in function. */
7402 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7403 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7404 == FUNCTION_DECL)
bbf6f052 7405 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7406 {
c70eaeaf
KG
7407 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7408 == BUILT_IN_FRONTEND)
ae2bcd98 7409 /* ??? Use (*fun) form because expand_expr is a macro. */
8403445a 7410 return (*lang_hooks.expand_expr) (exp, original_target,
0fab64a3
MM
7411 tmode, modifier,
7412 alt_rtl);
c70eaeaf
KG
7413 else
7414 return expand_builtin (exp, target, subtarget, tmode, ignore);
7415 }
d6a5ac33 7416
8129842c 7417 return expand_call (exp, target, ignore);
bbf6f052
RK
7418
7419 case NON_LVALUE_EXPR:
7420 case NOP_EXPR:
7421 case CONVERT_EXPR:
7422 case REFERENCE_EXPR:
4a53008b 7423 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7424 return const0_rtx;
4a53008b 7425
bbf6f052
RK
7426 if (TREE_CODE (type) == UNION_TYPE)
7427 {
7428 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7429
c3d32120
RK
7430 /* If both input and output are BLKmode, this conversion isn't doing
7431 anything except possibly changing memory attribute. */
7432 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7433 {
7434 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7435 modifier);
7436
7437 result = copy_rtx (result);
7438 set_mem_attributes (result, exp, 0);
7439 return result;
7440 }
14a774a9 7441
bbf6f052 7442 if (target == 0)
cf7cb67e
JH
7443 {
7444 if (TYPE_MODE (type) != BLKmode)
7445 target = gen_reg_rtx (TYPE_MODE (type));
7446 else
7447 target = assign_temp (type, 0, 1, 1);
7448 }
d6a5ac33 7449
bbf6f052
RK
7450 if (GET_CODE (target) == MEM)
7451 /* Store data into beginning of memory target. */
7452 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7453 adjust_address (target, TYPE_MODE (valtype), 0),
7454 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7455
bbf6f052
RK
7456 else if (GET_CODE (target) == REG)
7457 /* Store this field into a union of the proper type. */
14a774a9
RK
7458 store_field (target,
7459 MIN ((int_size_in_bytes (TREE_TYPE
7460 (TREE_OPERAND (exp, 0)))
7461 * BITS_PER_UNIT),
8752c357 7462 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7463 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7464 VOIDmode, 0, type, 0);
bbf6f052
RK
7465 else
7466 abort ();
7467
7468 /* Return the entire union. */
7469 return target;
7470 }
d6a5ac33 7471
7f62854a
RK
7472 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7473 {
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7475 modifier);
7f62854a
RK
7476
7477 /* If the signedness of the conversion differs and OP0 is
7478 a promoted SUBREG, clear that indication since we now
7479 have to do the proper extension. */
8df83eae 7480 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7481 && GET_CODE (op0) == SUBREG)
7482 SUBREG_PROMOTED_VAR_P (op0) = 0;
7483
7484 return op0;
7485 }
7486
fdf473ae 7487 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7488 if (GET_MODE (op0) == mode)
7489 return op0;
12342f90 7490
d6a5ac33
RK
7491 /* If OP0 is a constant, just convert it into the proper mode. */
7492 if (CONSTANT_P (op0))
fdf473ae
RH
7493 {
7494 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7495 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7496
0fb7aeda 7497 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7498 return simplify_gen_subreg (mode, op0, inner_mode,
7499 subreg_lowpart_offset (mode,
7500 inner_mode));
7501 else
7502 return convert_modes (mode, inner_mode, op0,
8df83eae 7503 TYPE_UNSIGNED (inner_type));
fdf473ae 7504 }
12342f90 7505
26fcb35a 7506 if (modifier == EXPAND_INITIALIZER)
38a448ca 7507 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7508
bbf6f052 7509 if (target == 0)
d6a5ac33
RK
7510 return
7511 convert_to_mode (mode, op0,
8df83eae 7512 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7513 else
d6a5ac33 7514 convert_move (target, op0,
8df83eae 7515 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7516 return target;
7517
ed239f5a 7518 case VIEW_CONVERT_EXPR:
37a08a29 7519 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7520
7521 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7522 Otherwise, if neither mode is BLKmode and both are integral and within
7523 a word, we can use gen_lowpart. If neither is true, make sure the
7524 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7525 if (TYPE_MODE (type) == GET_MODE (op0))
7526 ;
7527 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7528 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7529 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7530 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7531 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7532 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7533 else if (GET_CODE (op0) != MEM)
ed239f5a 7534 {
c11c10d8
RK
7535 /* If the operand is not a MEM, force it into memory. Since we
7536 are going to be be changing the mode of the MEM, don't call
7537 force_const_mem for constants because we don't allow pool
7538 constants to change mode. */
ed239f5a 7539 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7540
c11c10d8
RK
7541 if (TREE_ADDRESSABLE (exp))
7542 abort ();
ed239f5a 7543
c11c10d8
RK
7544 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7545 target
7546 = assign_stack_temp_for_type
7547 (TYPE_MODE (inner_type),
7548 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7549
c11c10d8
RK
7550 emit_move_insn (target, op0);
7551 op0 = target;
ed239f5a
RK
7552 }
7553
c11c10d8
RK
7554 /* At this point, OP0 is in the correct mode. If the output type is such
7555 that the operand is known to be aligned, indicate that it is.
7556 Otherwise, we need only be concerned about alignment for non-BLKmode
7557 results. */
ed239f5a
RK
7558 if (GET_CODE (op0) == MEM)
7559 {
7560 op0 = copy_rtx (op0);
7561
ed239f5a
RK
7562 if (TYPE_ALIGN_OK (type))
7563 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7564 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7565 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7566 {
7567 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7568 HOST_WIDE_INT temp_size
7569 = MAX (int_size_in_bytes (inner_type),
7570 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7571 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7572 temp_size, 0, type);
c4e59f51 7573 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7574
c11c10d8
RK
7575 if (TREE_ADDRESSABLE (exp))
7576 abort ();
7577
ed239f5a
RK
7578 if (GET_MODE (op0) == BLKmode)
7579 emit_block_move (new_with_op0_mode, op0,
44bb111a 7580 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7581 (modifier == EXPAND_STACK_PARM
7582 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7583 else
7584 emit_move_insn (new_with_op0_mode, op0);
7585
7586 op0 = new;
7587 }
0fb7aeda 7588
c4e59f51 7589 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7590 }
7591
7592 return op0;
7593
bbf6f052 7594 case PLUS_EXPR:
91ce572a 7595 this_optab = ! unsignedp && flag_trapv
a9785c70 7596 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7597 ? addv_optab : add_optab;
bbf6f052
RK
7598
7599 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7600 something else, make sure we add the register to the constant and
7601 then to the other thing. This case can occur during strength
7602 reduction and doing it this way will produce better code if the
7603 frame pointer or argument pointer is eliminated.
7604
7605 fold-const.c will ensure that the constant is always in the inner
7606 PLUS_EXPR, so the only case we need to do anything about is if
7607 sp, ap, or fp is our second argument, in which case we must swap
7608 the innermost first argument and our second argument. */
7609
7610 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7611 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7612 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7613 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7614 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7615 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7616 {
7617 tree t = TREE_OPERAND (exp, 1);
7618
7619 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7620 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7621 }
7622
88f63c77 7623 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7624 something, we might be forming a constant. So try to use
7625 plus_constant. If it produces a sum and we can't accept it,
7626 use force_operand. This allows P = &ARR[const] to generate
7627 efficient code on machines where a SYMBOL_REF is not a valid
7628 address.
7629
7630 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7631 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7632 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7633 {
8403445a
AM
7634 if (modifier == EXPAND_STACK_PARM)
7635 target = 0;
c980ac49
RS
7636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7637 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7638 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7639 {
cbbc503e
JL
7640 rtx constant_part;
7641
c980ac49
RS
7642 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7643 EXPAND_SUM);
cbbc503e
JL
7644 /* Use immed_double_const to ensure that the constant is
7645 truncated according to the mode of OP1, then sign extended
7646 to a HOST_WIDE_INT. Using the constant directly can result
7647 in non-canonical RTL in a 64x32 cross compile. */
7648 constant_part
7649 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7650 (HOST_WIDE_INT) 0,
a5efcd63 7651 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7652 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7653 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7654 op1 = force_operand (op1, target);
7655 return op1;
7656 }
bbf6f052 7657
c980ac49
RS
7658 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7659 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7660 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7661 {
cbbc503e
JL
7662 rtx constant_part;
7663
c980ac49 7664 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7665 (modifier == EXPAND_INITIALIZER
7666 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7667 if (! CONSTANT_P (op0))
7668 {
7669 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7670 VOIDmode, modifier);
f0e9957a
RS
7671 /* Return a PLUS if modifier says it's OK. */
7672 if (modifier == EXPAND_SUM
7673 || modifier == EXPAND_INITIALIZER)
7674 return simplify_gen_binary (PLUS, mode, op0, op1);
7675 goto binop2;
c980ac49 7676 }
cbbc503e
JL
7677 /* Use immed_double_const to ensure that the constant is
7678 truncated according to the mode of OP1, then sign extended
7679 to a HOST_WIDE_INT. Using the constant directly can result
7680 in non-canonical RTL in a 64x32 cross compile. */
7681 constant_part
7682 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7683 (HOST_WIDE_INT) 0,
2a94e396 7684 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7685 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7686 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7687 op0 = force_operand (op0, target);
7688 return op0;
7689 }
bbf6f052
RK
7690 }
7691
7692 /* No sense saving up arithmetic to be done
7693 if it's all in the wrong mode to form part of an address.
7694 And force_operand won't know whether to sign-extend or
7695 zero-extend. */
7696 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7697 || mode != ptr_mode)
4ef7870a 7698 {
eb698c58
RS
7699 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7700 subtarget, &op0, &op1, 0);
6e7727eb
EB
7701 if (op0 == const0_rtx)
7702 return op1;
7703 if (op1 == const0_rtx)
7704 return op0;
4ef7870a
EB
7705 goto binop2;
7706 }
bbf6f052 7707
eb698c58
RS
7708 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7709 subtarget, &op0, &op1, modifier);
f0e9957a 7710 return simplify_gen_binary (PLUS, mode, op0, op1);
bbf6f052
RK
7711
7712 case MINUS_EXPR:
ea87523e
RK
7713 /* For initializers, we are allowed to return a MINUS of two
7714 symbolic constants. Here we handle all cases when both operands
7715 are constant. */
bbf6f052
RK
7716 /* Handle difference of two symbolic constants,
7717 for the sake of an initializer. */
7718 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7719 && really_constant_p (TREE_OPERAND (exp, 0))
7720 && really_constant_p (TREE_OPERAND (exp, 1)))
7721 {
eb698c58
RS
7722 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7723 NULL_RTX, &op0, &op1, modifier);
ea87523e 7724
ea87523e
RK
7725 /* If the last operand is a CONST_INT, use plus_constant of
7726 the negated constant. Else make the MINUS. */
7727 if (GET_CODE (op1) == CONST_INT)
7728 return plus_constant (op0, - INTVAL (op1));
7729 else
38a448ca 7730 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 7731 }
ae431183 7732
91ce572a
CC
7733 this_optab = ! unsignedp && flag_trapv
7734 && (GET_MODE_CLASS(mode) == MODE_INT)
7735 ? subv_optab : sub_optab;
1717e19e
UW
7736
7737 /* No sense saving up arithmetic to be done
7738 if it's all in the wrong mode to form part of an address.
7739 And force_operand won't know whether to sign-extend or
7740 zero-extend. */
7741 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7742 || mode != ptr_mode)
7743 goto binop;
7744
eb698c58
RS
7745 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7746 subtarget, &op0, &op1, modifier);
1717e19e
UW
7747
7748 /* Convert A - const to A + (-const). */
7749 if (GET_CODE (op1) == CONST_INT)
7750 {
7751 op1 = negate_rtx (mode, op1);
f0e9957a 7752 return simplify_gen_binary (PLUS, mode, op0, op1);
1717e19e
UW
7753 }
7754
7755 goto binop2;
bbf6f052
RK
7756
7757 case MULT_EXPR:
bbf6f052
RK
7758 /* If first operand is constant, swap them.
7759 Thus the following special case checks need only
7760 check the second operand. */
7761 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7762 {
b3694847 7763 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7764 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7765 TREE_OPERAND (exp, 1) = t1;
7766 }
7767
7768 /* Attempt to return something suitable for generating an
7769 indexed address, for machines that support that. */
7770
88f63c77 7771 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7772 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7773 {
48a5f2fa
DJ
7774 tree exp1 = TREE_OPERAND (exp, 1);
7775
921b3427
RK
7776 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7777 EXPAND_SUM);
bbf6f052 7778
bbf6f052 7779 if (GET_CODE (op0) != REG)
906c4e36 7780 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7781 if (GET_CODE (op0) != REG)
7782 op0 = copy_to_mode_reg (mode, op0);
7783
48a5f2fa
DJ
7784 return gen_rtx_MULT (mode, op0,
7785 gen_int_mode (tree_low_cst (exp1, 0),
7786 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
7787 }
7788
8403445a
AM
7789 if (modifier == EXPAND_STACK_PARM)
7790 target = 0;
7791
bbf6f052
RK
7792 /* Check for multiplying things that have been extended
7793 from a narrower type. If this machine supports multiplying
7794 in that narrower type with a result in the desired type,
7795 do it that way, and avoid the explicit type-conversion. */
7796 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7797 && TREE_CODE (type) == INTEGER_TYPE
7798 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7799 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7800 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7801 && int_fits_type_p (TREE_OPERAND (exp, 1),
7802 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7803 /* Don't use a widening multiply if a shift will do. */
7804 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7805 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7806 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7807 ||
7808 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
7809 && (TYPE_PRECISION (TREE_TYPE
7810 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7811 == TYPE_PRECISION (TREE_TYPE
7812 (TREE_OPERAND
7813 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
7814 /* If both operands are extended, they must either both
7815 be zero-extended or both be sign-extended. */
8df83eae
RK
7816 && (TYPE_UNSIGNED (TREE_TYPE
7817 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7818 == TYPE_UNSIGNED (TREE_TYPE
7819 (TREE_OPERAND
7820 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 7821 {
888d65b5
RS
7822 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7823 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 7824 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
7825 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7826 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7827
b10af0c8 7828 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7829 {
b10af0c8
TG
7830 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7831 {
b10af0c8 7832 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7833 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7834 TREE_OPERAND (exp, 1),
7835 NULL_RTX, &op0, &op1, 0);
b10af0c8 7836 else
eb698c58
RS
7837 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7838 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7839 NULL_RTX, &op0, &op1, 0);
b10af0c8
TG
7840 goto binop2;
7841 }
7842 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7843 && innermode == word_mode)
7844 {
888d65b5 7845 rtx htem, hipart;
b10af0c8
TG
7846 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7847 NULL_RTX, VOIDmode, 0);
7848 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7849 op1 = convert_modes (innermode, mode,
7850 expand_expr (TREE_OPERAND (exp, 1),
7851 NULL_RTX, VOIDmode, 0),
7852 unsignedp);
b10af0c8
TG
7853 else
7854 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7855 NULL_RTX, VOIDmode, 0);
7856 temp = expand_binop (mode, other_optab, op0, op1, target,
7857 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
7858 hipart = gen_highpart (innermode, temp);
7859 htem = expand_mult_highpart_adjust (innermode, hipart,
7860 op0, op1, hipart,
7861 zextend_p);
7862 if (htem != hipart)
7863 emit_move_insn (hipart, htem);
b10af0c8
TG
7864 return temp;
7865 }
bbf6f052
RK
7866 }
7867 }
eb698c58
RS
7868 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7869 subtarget, &op0, &op1, 0);
bbf6f052
RK
7870 return expand_mult (mode, op0, op1, target, unsignedp);
7871
7872 case TRUNC_DIV_EXPR:
7873 case FLOOR_DIV_EXPR:
7874 case CEIL_DIV_EXPR:
7875 case ROUND_DIV_EXPR:
7876 case EXACT_DIV_EXPR:
8403445a
AM
7877 if (modifier == EXPAND_STACK_PARM)
7878 target = 0;
bbf6f052
RK
7879 /* Possible optimization: compute the dividend with EXPAND_SUM
7880 then if the divisor is constant can optimize the case
7881 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7882 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7883 subtarget, &op0, &op1, 0);
bbf6f052
RK
7884 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7885
7886 case RDIV_EXPR:
b7e9703c
JH
7887 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7888 expensive divide. If not, combine will rebuild the original
7889 computation. */
7890 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 7891 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
7892 && !real_onep (TREE_OPERAND (exp, 0)))
7893 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7894 build (RDIV_EXPR, type,
7895 build_real (type, dconst1),
7896 TREE_OPERAND (exp, 1))),
8e37cba8 7897 target, tmode, modifier);
ef89d648 7898 this_optab = sdiv_optab;
bbf6f052
RK
7899 goto binop;
7900
7901 case TRUNC_MOD_EXPR:
7902 case FLOOR_MOD_EXPR:
7903 case CEIL_MOD_EXPR:
7904 case ROUND_MOD_EXPR:
8403445a
AM
7905 if (modifier == EXPAND_STACK_PARM)
7906 target = 0;
eb698c58
RS
7907 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7908 subtarget, &op0, &op1, 0);
bbf6f052
RK
7909 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7910
7911 case FIX_ROUND_EXPR:
7912 case FIX_FLOOR_EXPR:
7913 case FIX_CEIL_EXPR:
7914 abort (); /* Not used for C. */
7915
7916 case FIX_TRUNC_EXPR:
906c4e36 7917 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7918 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7919 target = gen_reg_rtx (mode);
7920 expand_fix (target, op0, unsignedp);
7921 return target;
7922
7923 case FLOAT_EXPR:
906c4e36 7924 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7925 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7926 target = gen_reg_rtx (mode);
7927 /* expand_float can't figure out what to do if FROM has VOIDmode.
7928 So give it the correct mode. With -O, cse will optimize this. */
7929 if (GET_MODE (op0) == VOIDmode)
7930 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7931 op0);
7932 expand_float (target, op0,
8df83eae 7933 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7934 return target;
7935
7936 case NEGATE_EXPR:
5b22bee8 7937 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7938 if (modifier == EXPAND_STACK_PARM)
7939 target = 0;
91ce572a 7940 temp = expand_unop (mode,
0fb7aeda
KH
7941 ! unsignedp && flag_trapv
7942 && (GET_MODE_CLASS(mode) == MODE_INT)
7943 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7944 if (temp == 0)
7945 abort ();
7946 return temp;
7947
7948 case ABS_EXPR:
7949 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7950 if (modifier == EXPAND_STACK_PARM)
7951 target = 0;
bbf6f052 7952
11017cc7 7953 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
7954 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7955 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 7956 abort ();
2d7050fd 7957
bbf6f052
RK
7958 /* Unsigned abs is simply the operand. Testing here means we don't
7959 risk generating incorrect code below. */
8df83eae 7960 if (TYPE_UNSIGNED (type))
bbf6f052
RK
7961 return op0;
7962
91ce572a 7963 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7964 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7965
7966 case MAX_EXPR:
7967 case MIN_EXPR:
7968 target = original_target;
8403445a
AM
7969 if (target == 0
7970 || modifier == EXPAND_STACK_PARM
fc155707 7971 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7972 || GET_MODE (target) != mode
bbf6f052
RK
7973 || (GET_CODE (target) == REG
7974 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7975 target = gen_reg_rtx (mode);
eb698c58
RS
7976 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7977 target, &op0, &op1, 0);
bbf6f052
RK
7978
7979 /* First try to do it with a special MIN or MAX instruction.
7980 If that does not win, use a conditional jump to select the proper
7981 value. */
288dc1ea 7982 this_optab = (unsignedp
bbf6f052
RK
7983 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7984 : (code == MIN_EXPR ? smin_optab : smax_optab));
7985
7986 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7987 OPTAB_WIDEN);
7988 if (temp != 0)
7989 return temp;
7990
fa2981d8
JW
7991 /* At this point, a MEM target is no longer useful; we will get better
7992 code without it. */
3a94c984 7993
fa2981d8
JW
7994 if (GET_CODE (target) == MEM)
7995 target = gen_reg_rtx (mode);
7996
e3be1116
RS
7997 /* If op1 was placed in target, swap op0 and op1. */
7998 if (target != op0 && target == op1)
7999 {
8000 rtx tem = op0;
8001 op0 = op1;
8002 op1 = tem;
8003 }
8004
ee456b1c
RK
8005 if (target != op0)
8006 emit_move_insn (target, op0);
d6a5ac33 8007
bbf6f052 8008 op0 = gen_label_rtx ();
d6a5ac33 8009
f81497d9
RS
8010 /* If this mode is an integer too wide to compare properly,
8011 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8012 if (GET_MODE_CLASS (mode) == MODE_INT
8013 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8014 {
f81497d9 8015 if (code == MAX_EXPR)
288dc1ea
EB
8016 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8017 NULL_RTX, op0);
bbf6f052 8018 else
288dc1ea
EB
8019 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8020 NULL_RTX, op0);
bbf6f052 8021 }
f81497d9
RS
8022 else
8023 {
b30f05db 8024 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
288dc1ea 8025 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
f81497d9 8026 }
b30f05db 8027 emit_move_insn (target, op1);
bbf6f052
RK
8028 emit_label (op0);
8029 return target;
8030
bbf6f052
RK
8031 case BIT_NOT_EXPR:
8032 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8033 if (modifier == EXPAND_STACK_PARM)
8034 target = 0;
bbf6f052
RK
8035 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8036 if (temp == 0)
8037 abort ();
8038 return temp;
8039
d6a5ac33
RK
8040 /* ??? Can optimize bitwise operations with one arg constant.
8041 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8042 and (a bitwise1 b) bitwise2 b (etc)
8043 but that is probably not worth while. */
8044
8045 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8046 boolean values when we want in all cases to compute both of them. In
8047 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8048 as actual zero-or-1 values and then bitwise anding. In cases where
8049 there cannot be any side effects, better code would be made by
8050 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8051 how to recognize those cases. */
8052
bbf6f052
RK
8053 case TRUTH_AND_EXPR:
8054 case BIT_AND_EXPR:
8055 this_optab = and_optab;
8056 goto binop;
8057
bbf6f052
RK
8058 case TRUTH_OR_EXPR:
8059 case BIT_IOR_EXPR:
8060 this_optab = ior_optab;
8061 goto binop;
8062
874726a8 8063 case TRUTH_XOR_EXPR:
bbf6f052
RK
8064 case BIT_XOR_EXPR:
8065 this_optab = xor_optab;
8066 goto binop;
8067
8068 case LSHIFT_EXPR:
8069 case RSHIFT_EXPR:
8070 case LROTATE_EXPR:
8071 case RROTATE_EXPR:
e5e809f4 8072 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8073 subtarget = 0;
8403445a
AM
8074 if (modifier == EXPAND_STACK_PARM)
8075 target = 0;
bbf6f052
RK
8076 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8077 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8078 unsignedp);
8079
d6a5ac33
RK
8080 /* Could determine the answer when only additive constants differ. Also,
8081 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8082 case LT_EXPR:
8083 case LE_EXPR:
8084 case GT_EXPR:
8085 case GE_EXPR:
8086 case EQ_EXPR:
8087 case NE_EXPR:
1eb8759b
RH
8088 case UNORDERED_EXPR:
8089 case ORDERED_EXPR:
8090 case UNLT_EXPR:
8091 case UNLE_EXPR:
8092 case UNGT_EXPR:
8093 case UNGE_EXPR:
8094 case UNEQ_EXPR:
8403445a
AM
8095 temp = do_store_flag (exp,
8096 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8097 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8098 if (temp != 0)
8099 return temp;
d6a5ac33 8100
0f41302f 8101 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8102 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8103 && original_target
8104 && GET_CODE (original_target) == REG
8105 && (GET_MODE (original_target)
8106 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8107 {
d6a5ac33
RK
8108 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8109 VOIDmode, 0);
8110
c0a3eeac
UW
8111 /* If temp is constant, we can just compute the result. */
8112 if (GET_CODE (temp) == CONST_INT)
8113 {
8114 if (INTVAL (temp) != 0)
8115 emit_move_insn (target, const1_rtx);
8116 else
8117 emit_move_insn (target, const0_rtx);
8118
8119 return target;
8120 }
8121
bbf6f052 8122 if (temp != original_target)
c0a3eeac
UW
8123 {
8124 enum machine_mode mode1 = GET_MODE (temp);
8125 if (mode1 == VOIDmode)
8126 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8127
c0a3eeac
UW
8128 temp = copy_to_mode_reg (mode1, temp);
8129 }
d6a5ac33 8130
bbf6f052 8131 op1 = gen_label_rtx ();
c5d5d461 8132 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8133 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8134 emit_move_insn (temp, const1_rtx);
8135 emit_label (op1);
8136 return temp;
8137 }
d6a5ac33 8138
bbf6f052
RK
8139 /* If no set-flag instruction, must generate a conditional
8140 store into a temporary variable. Drop through
8141 and handle this like && and ||. */
8142
8143 case TRUTH_ANDIF_EXPR:
8144 case TRUTH_ORIF_EXPR:
e44842fe 8145 if (! ignore
8403445a
AM
8146 && (target == 0
8147 || modifier == EXPAND_STACK_PARM
8148 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8149 /* Make sure we don't have a hard reg (such as function's return
8150 value) live across basic blocks, if not optimizing. */
8151 || (!optimize && GET_CODE (target) == REG
8152 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8153 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8154
8155 if (target)
8156 emit_clr_insn (target);
8157
bbf6f052
RK
8158 op1 = gen_label_rtx ();
8159 jumpifnot (exp, op1);
e44842fe
RK
8160
8161 if (target)
8162 emit_0_to_1_insn (target);
8163
bbf6f052 8164 emit_label (op1);
e44842fe 8165 return ignore ? const0_rtx : target;
bbf6f052
RK
8166
8167 case TRUTH_NOT_EXPR:
8403445a
AM
8168 if (modifier == EXPAND_STACK_PARM)
8169 target = 0;
bbf6f052
RK
8170 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8171 /* The parser is careful to generate TRUTH_NOT_EXPR
8172 only with operands that are always zero or one. */
906c4e36 8173 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8174 target, 1, OPTAB_LIB_WIDEN);
8175 if (temp == 0)
8176 abort ();
8177 return temp;
8178
8179 case COMPOUND_EXPR:
8180 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8181 emit_queue ();
0fab64a3
MM
8182 return expand_expr_real (TREE_OPERAND (exp, 1),
8183 (ignore ? const0_rtx : target),
8184 VOIDmode, modifier, alt_rtl);
bbf6f052
RK
8185
8186 case COND_EXPR:
ac01eace
RK
8187 /* If we would have a "singleton" (see below) were it not for a
8188 conversion in each arm, bring that conversion back out. */
8189 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8190 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8191 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8192 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8193 {
d6edb99e
ZW
8194 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8195 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8196
8197 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8198 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8199 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8200 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8201 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8202 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8203 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8204 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8205 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8206 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8207 TREE_OPERAND (exp, 0),
d6edb99e 8208 iftrue, iffalse)),
ac01eace
RK
8209 target, tmode, modifier);
8210 }
8211
bbf6f052
RK
8212 {
8213 /* Note that COND_EXPRs whose type is a structure or union
8214 are required to be constructed to contain assignments of
8215 a temporary variable, so that we can evaluate them here
8216 for side effect only. If type is void, we must do likewise. */
8217
8218 /* If an arm of the branch requires a cleanup,
8219 only that cleanup is performed. */
8220
8221 tree singleton = 0;
8222 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8223
8224 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8225 convert it to our mode, if necessary. */
8226 if (integer_onep (TREE_OPERAND (exp, 1))
8227 && integer_zerop (TREE_OPERAND (exp, 2))
8228 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8229 {
dd27116b
RK
8230 if (ignore)
8231 {
8232 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8233 modifier);
dd27116b
RK
8234 return const0_rtx;
8235 }
8236
8403445a
AM
8237 if (modifier == EXPAND_STACK_PARM)
8238 target = 0;
37a08a29 8239 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8240 if (GET_MODE (op0) == mode)
8241 return op0;
d6a5ac33 8242
bbf6f052
RK
8243 if (target == 0)
8244 target = gen_reg_rtx (mode);
8245 convert_move (target, op0, unsignedp);
8246 return target;
8247 }
8248
ac01eace
RK
8249 /* Check for X ? A + B : A. If we have this, we can copy A to the
8250 output and conditionally add B. Similarly for unary operations.
8251 Don't do this if X has side-effects because those side effects
8252 might affect A or B and the "?" operation is a sequence point in
8253 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8254
8255 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8256 && operand_equal_p (TREE_OPERAND (exp, 2),
8257 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8258 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8259 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8260 && operand_equal_p (TREE_OPERAND (exp, 1),
8261 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8262 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8263 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8264 && operand_equal_p (TREE_OPERAND (exp, 2),
8265 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8266 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8267 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8268 && operand_equal_p (TREE_OPERAND (exp, 1),
8269 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8270 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8271
01c8a7c8
RK
8272 /* If we are not to produce a result, we have no target. Otherwise,
8273 if a target was specified use it; it will not be used as an
3a94c984 8274 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8275 temporary. */
8276
8277 if (ignore)
8278 temp = 0;
8403445a
AM
8279 else if (modifier == EXPAND_STACK_PARM)
8280 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8281 else if (original_target
e5e809f4 8282 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8283 || (singleton && GET_CODE (original_target) == REG
8284 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8285 && original_target == var_rtx (singleton)))
8286 && GET_MODE (original_target) == mode
7c00d1fe
RK
8287#ifdef HAVE_conditional_move
8288 && (! can_conditionally_move_p (mode)
8289 || GET_CODE (original_target) == REG
8290 || TREE_ADDRESSABLE (type))
8291#endif
8125d7e9
BS
8292 && (GET_CODE (original_target) != MEM
8293 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8294 temp = original_target;
8295 else if (TREE_ADDRESSABLE (type))
8296 abort ();
8297 else
8298 temp = assign_temp (type, 0, 0, 1);
8299
ac01eace
RK
8300 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8301 do the test of X as a store-flag operation, do this as
8302 A + ((X != 0) << log C). Similarly for other simple binary
8303 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8304 if (temp && singleton && binary_op
bbf6f052
RK
8305 && (TREE_CODE (binary_op) == PLUS_EXPR
8306 || TREE_CODE (binary_op) == MINUS_EXPR
8307 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8308 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8309 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8310 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8311 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8312 {
8313 rtx result;
61f6c84f 8314 tree cond;
91ce572a 8315 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8316 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8317 ? addv_optab : add_optab)
8318 : TREE_CODE (binary_op) == MINUS_EXPR
8319 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8320 ? subv_optab : sub_optab)
8321 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8322 : xor_optab);
bbf6f052 8323
61f6c84f 8324 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8325 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8326 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8327 else
8328 cond = TREE_OPERAND (exp, 0);
bbf6f052 8329
61f6c84f
JJ
8330 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8331 ? temp : NULL_RTX),
bbf6f052
RK
8332 mode, BRANCH_COST <= 1);
8333
ac01eace
RK
8334 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8335 result = expand_shift (LSHIFT_EXPR, mode, result,
8336 build_int_2 (tree_log2
8337 (TREE_OPERAND
8338 (binary_op, 1)),
8339 0),
e5e809f4 8340 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8341 ? temp : NULL_RTX), 0);
8342
bbf6f052
RK
8343 if (result)
8344 {
906c4e36 8345 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8346 return expand_binop (mode, boptab, op1, result, temp,
8347 unsignedp, OPTAB_LIB_WIDEN);
8348 }
bbf6f052 8349 }
3a94c984 8350
dabf8373 8351 do_pending_stack_adjust ();
bbf6f052
RK
8352 NO_DEFER_POP;
8353 op0 = gen_label_rtx ();
8354
8355 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8356 {
8357 if (temp != 0)
8358 {
8359 /* If the target conflicts with the other operand of the
8360 binary op, we can't use it. Also, we can't use the target
8361 if it is a hard register, because evaluating the condition
8362 might clobber it. */
8363 if ((binary_op
e5e809f4 8364 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8365 || (GET_CODE (temp) == REG
8366 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8367 temp = gen_reg_rtx (mode);
8403445a
AM
8368 store_expr (singleton, temp,
8369 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8370 }
8371 else
906c4e36 8372 expand_expr (singleton,
2937cf87 8373 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8374 if (singleton == TREE_OPERAND (exp, 1))
8375 jumpif (TREE_OPERAND (exp, 0), op0);
8376 else
8377 jumpifnot (TREE_OPERAND (exp, 0), op0);
8378
956d6950 8379 start_cleanup_deferral ();
bbf6f052
RK
8380 if (binary_op && temp == 0)
8381 /* Just touch the other operand. */
8382 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8383 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8384 else if (binary_op)
8385 store_expr (build (TREE_CODE (binary_op), type,
8386 make_tree (type, temp),
8387 TREE_OPERAND (binary_op, 1)),
8403445a 8388 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8389 else
8390 store_expr (build1 (TREE_CODE (unary_op), type,
8391 make_tree (type, temp)),
8403445a 8392 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8393 op1 = op0;
bbf6f052 8394 }
bbf6f052
RK
8395 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8396 comparison operator. If we have one of these cases, set the
8397 output to A, branch on A (cse will merge these two references),
8398 then set the output to FOO. */
8399 else if (temp
8400 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8401 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8402 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8403 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8404 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8405 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8406 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8407 {
3a94c984
KH
8408 if (GET_CODE (temp) == REG
8409 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8410 temp = gen_reg_rtx (mode);
8403445a
AM
8411 store_expr (TREE_OPERAND (exp, 1), temp,
8412 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8413 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8414
956d6950 8415 start_cleanup_deferral ();
c37b68d4
RS
8416 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8417 store_expr (TREE_OPERAND (exp, 2), temp,
8418 modifier == EXPAND_STACK_PARM ? 2 : 0);
8419 else
8420 expand_expr (TREE_OPERAND (exp, 2),
8421 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8422 op1 = op0;
8423 }
8424 else if (temp
8425 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8426 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8428 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8429 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8430 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8431 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8432 {
3a94c984
KH
8433 if (GET_CODE (temp) == REG
8434 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8435 temp = gen_reg_rtx (mode);
8403445a
AM
8436 store_expr (TREE_OPERAND (exp, 2), temp,
8437 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8438 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8439
956d6950 8440 start_cleanup_deferral ();
c37b68d4
RS
8441 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8442 store_expr (TREE_OPERAND (exp, 1), temp,
8443 modifier == EXPAND_STACK_PARM ? 2 : 0);
8444 else
8445 expand_expr (TREE_OPERAND (exp, 1),
8446 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8447 op1 = op0;
8448 }
8449 else
8450 {
8451 op1 = gen_label_rtx ();
8452 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8453
956d6950 8454 start_cleanup_deferral ();
3a94c984 8455
2ac84cfe 8456 /* One branch of the cond can be void, if it never returns. For
3a94c984 8457 example A ? throw : E */
2ac84cfe 8458 if (temp != 0
3a94c984 8459 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8460 store_expr (TREE_OPERAND (exp, 1), temp,
8461 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8462 else
906c4e36
RK
8463 expand_expr (TREE_OPERAND (exp, 1),
8464 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8465 end_cleanup_deferral ();
bbf6f052
RK
8466 emit_queue ();
8467 emit_jump_insn (gen_jump (op1));
8468 emit_barrier ();
8469 emit_label (op0);
956d6950 8470 start_cleanup_deferral ();
2ac84cfe 8471 if (temp != 0
3a94c984 8472 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8473 store_expr (TREE_OPERAND (exp, 2), temp,
8474 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8475 else
906c4e36
RK
8476 expand_expr (TREE_OPERAND (exp, 2),
8477 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8478 }
8479
956d6950 8480 end_cleanup_deferral ();
bbf6f052
RK
8481
8482 emit_queue ();
8483 emit_label (op1);
8484 OK_DEFER_POP;
5dab5552 8485
bbf6f052
RK
8486 return temp;
8487 }
8488
8489 case TARGET_EXPR:
8490 {
8491 /* Something needs to be initialized, but we didn't know
8492 where that thing was when building the tree. For example,
8493 it could be the return value of a function, or a parameter
8494 to a function which lays down in the stack, or a temporary
8495 variable which must be passed by reference.
8496
8497 We guarantee that the expression will either be constructed
8498 or copied into our original target. */
8499
8500 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8501 tree cleanups = NULL_TREE;
5c062816 8502 tree exp1;
bbf6f052
RK
8503
8504 if (TREE_CODE (slot) != VAR_DECL)
8505 abort ();
8506
9c51f375
RK
8507 if (! ignore)
8508 target = original_target;
8509
6fbfac92
JM
8510 /* Set this here so that if we get a target that refers to a
8511 register variable that's already been used, put_reg_into_stack
3a94c984 8512 knows that it should fix up those uses. */
6fbfac92
JM
8513 TREE_USED (slot) = 1;
8514
bbf6f052
RK
8515 if (target == 0)
8516 {
19e7881c 8517 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8518 {
8519 target = DECL_RTL (slot);
5c062816 8520 /* If we have already expanded the slot, so don't do
ac993f4f 8521 it again. (mrs) */
5c062816
MS
8522 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8523 return target;
ac993f4f 8524 }
bbf6f052
RK
8525 else
8526 {
e9a25f70 8527 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8528 /* All temp slots at this level must not conflict. */
8529 preserve_temp_slots (target);
19e7881c 8530 SET_DECL_RTL (slot, target);
e9a25f70 8531 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8532 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 8533
e287fd6e
RK
8534 /* Since SLOT is not known to the called function
8535 to belong to its stack frame, we must build an explicit
8536 cleanup. This case occurs when we must build up a reference
8537 to pass the reference as an argument. In this case,
8538 it is very likely that such a reference need not be
8539 built here. */
8540
8541 if (TREE_OPERAND (exp, 2) == 0)
c88770e9 8542 TREE_OPERAND (exp, 2)
ae2bcd98 8543 = lang_hooks.maybe_build_cleanup (slot);
2a888d4c 8544 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8545 }
bbf6f052
RK
8546 }
8547 else
8548 {
8549 /* This case does occur, when expanding a parameter which
8550 needs to be constructed on the stack. The target
8551 is the actual stack address that we want to initialize.
8552 The function we call will perform the cleanup in this case. */
8553
8c042b47
RS
8554 /* If we have already assigned it space, use that space,
8555 not target that we were passed in, as our target
8556 parameter is only a hint. */
19e7881c 8557 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8558 {
8559 target = DECL_RTL (slot);
8560 /* If we have already expanded the slot, so don't do
8c042b47 8561 it again. (mrs) */
3a94c984
KH
8562 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8563 return target;
8c042b47 8564 }
21002281
JW
8565 else
8566 {
19e7881c 8567 SET_DECL_RTL (slot, target);
21002281
JW
8568 /* If we must have an addressable slot, then make sure that
8569 the RTL that we just stored in slot is OK. */
8570 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8571 put_var_into_stack (slot, /*rescan=*/true);
21002281 8572 }
bbf6f052
RK
8573 }
8574
4847c938 8575 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8576 /* Mark it as expanded. */
8577 TREE_OPERAND (exp, 1) = NULL_TREE;
8578
8403445a 8579 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 8580
659e5a7a 8581 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 8582
41531e5b 8583 return target;
bbf6f052
RK
8584 }
8585
8586 case INIT_EXPR:
8587 {
8588 tree lhs = TREE_OPERAND (exp, 0);
8589 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8590
b90f141a 8591 temp = expand_assignment (lhs, rhs, ! ignore);
bbf6f052
RK
8592 return temp;
8593 }
8594
8595 case MODIFY_EXPR:
8596 {
8597 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8598 That's so we don't compute a pointer and save it over a
8599 call. If lhs is simple, compute it first so we can give it
8600 as a target if the rhs is just a call. This avoids an
8601 extra temp and copy and that prevents a partial-subsumption
8602 which makes bad code. Actually we could treat
8603 component_ref's of vars like vars. */
bbf6f052
RK
8604
8605 tree lhs = TREE_OPERAND (exp, 0);
8606 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8607
8608 temp = 0;
8609
bbf6f052
RK
8610 /* Check for |= or &= of a bitfield of size one into another bitfield
8611 of size 1. In this case, (unless we need the result of the
8612 assignment) we can do this more efficiently with a
8613 test followed by an assignment, if necessary.
8614
8615 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8616 things change so we do, this code should be enhanced to
8617 support it. */
8618 if (ignore
8619 && TREE_CODE (lhs) == COMPONENT_REF
8620 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8621 || TREE_CODE (rhs) == BIT_AND_EXPR)
8622 && TREE_OPERAND (rhs, 0) == lhs
8623 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8624 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8625 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8626 {
8627 rtx label = gen_label_rtx ();
8628
8629 do_jump (TREE_OPERAND (rhs, 1),
8630 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8631 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8632 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8633 (TREE_CODE (rhs) == BIT_IOR_EXPR
8634 ? integer_one_node
8635 : integer_zero_node)),
b90f141a 8636 0);
e7c33f54 8637 do_pending_stack_adjust ();
bbf6f052
RK
8638 emit_label (label);
8639 return const0_rtx;
8640 }
8641
b90f141a 8642 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 8643
bbf6f052
RK
8644 return temp;
8645 }
8646
6e7f84a7
APB
8647 case RETURN_EXPR:
8648 if (!TREE_OPERAND (exp, 0))
8649 expand_null_return ();
8650 else
8651 expand_return (TREE_OPERAND (exp, 0));
8652 return const0_rtx;
8653
bbf6f052
RK
8654 case PREINCREMENT_EXPR:
8655 case PREDECREMENT_EXPR:
7b8b9722 8656 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8657
8658 case POSTINCREMENT_EXPR:
8659 case POSTDECREMENT_EXPR:
8660 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8661 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8662
8663 case ADDR_EXPR:
8403445a
AM
8664 if (modifier == EXPAND_STACK_PARM)
8665 target = 0;
bbf6f052
RK
8666 /* Are we taking the address of a nested function? */
8667 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8668 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8669 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8670 && ! TREE_STATIC (exp))
bbf6f052
RK
8671 {
8672 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8673 op0 = force_operand (op0, target);
8674 }
682ba3a6
RK
8675 /* If we are taking the address of something erroneous, just
8676 return a zero. */
8677 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8678 return const0_rtx;
d6b6783b
RK
8679 /* If we are taking the address of a constant and are at the
8680 top level, we have to use output_constant_def since we can't
8681 call force_const_mem at top level. */
8682 else if (cfun == 0
8683 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8684 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8685 == 'c')))
8686 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8687 else
8688 {
e287fd6e
RK
8689 /* We make sure to pass const0_rtx down if we came in with
8690 ignore set, to avoid doing the cleanups twice for something. */
8691 op0 = expand_expr (TREE_OPERAND (exp, 0),
8692 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8693 (modifier == EXPAND_INITIALIZER
8694 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8695
119af78a
RK
8696 /* If we are going to ignore the result, OP0 will have been set
8697 to const0_rtx, so just return it. Don't get confused and
8698 think we are taking the address of the constant. */
8699 if (ignore)
8700 return op0;
8701
73b7f58c
BS
8702 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8703 clever and returns a REG when given a MEM. */
8704 op0 = protect_from_queue (op0, 1);
3539e816 8705
c5c76735
JL
8706 /* We would like the object in memory. If it is a constant, we can
8707 have it be statically allocated into memory. For a non-constant,
8708 we need to allocate some memory and store the value into it. */
896102d0
RK
8709
8710 if (CONSTANT_P (op0))
8711 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8712 op0);
682ba3a6 8713 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 8714 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 8715 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 8716 {
6c7d86ec
RK
8717 /* If the operand is a SAVE_EXPR, we can deal with this by
8718 forcing the SAVE_EXPR into memory. */
8719 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8720 {
f29a2bd1
MM
8721 put_var_into_stack (TREE_OPERAND (exp, 0),
8722 /*rescan=*/true);
6c7d86ec
RK
8723 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8724 }
df6018fd 8725 else
6c7d86ec
RK
8726 {
8727 /* If this object is in a register, it can't be BLKmode. */
8728 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8729 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
8730
8731 if (GET_CODE (op0) == PARALLEL)
8732 /* Handle calls that pass values in multiple
8733 non-contiguous locations. The Irix 6 ABI has examples
8734 of this. */
6e985040 8735 emit_group_store (memloc, op0, inner_type,
6c7d86ec
RK
8736 int_size_in_bytes (inner_type));
8737 else
8738 emit_move_insn (memloc, op0);
0fb7aeda 8739
6c7d86ec
RK
8740 op0 = memloc;
8741 }
896102d0
RK
8742 }
8743
bbf6f052
RK
8744 if (GET_CODE (op0) != MEM)
8745 abort ();
3a94c984 8746
34e81b5a 8747 mark_temp_addr_taken (op0);
bbf6f052 8748 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8749 {
34e81b5a 8750 op0 = XEXP (op0, 0);
5ae6cd0d 8751 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 8752 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 8753 return op0;
88f63c77 8754 }
987c71d9 8755
c952ff4b
RK
8756 /* If OP0 is not aligned as least as much as the type requires, we
8757 need to make a temporary, copy OP0 to it, and take the address of
8758 the temporary. We want to use the alignment of the type, not of
8759 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8760 the test for BLKmode means that can't happen. The test for
8761 BLKmode is because we never make mis-aligned MEMs with
8762 non-BLKmode.
8763
8764 We don't need to do this at all if the machine doesn't have
8765 strict alignment. */
8766 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8767 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8768 > MEM_ALIGN (op0))
8769 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8770 {
8771 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 8772 rtx new;
a06ef755 8773
c3d32120
RK
8774 if (TYPE_ALIGN_OK (inner_type))
8775 abort ();
8776
bdaa131b
JM
8777 if (TREE_ADDRESSABLE (inner_type))
8778 {
8779 /* We can't make a bitwise copy of this object, so fail. */
8780 error ("cannot take the address of an unaligned member");
8781 return const0_rtx;
8782 }
8783
8784 new = assign_stack_temp_for_type
8785 (TYPE_MODE (inner_type),
8786 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8787 : int_size_in_bytes (inner_type),
8788 1, build_qualified_type (inner_type,
8789 (TYPE_QUALS (inner_type)
8790 | TYPE_QUAL_CONST)));
8791
44bb111a 8792 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
8793 (modifier == EXPAND_STACK_PARM
8794 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 8795
a06ef755
RK
8796 op0 = new;
8797 }
8798
bbf6f052
RK
8799 op0 = force_operand (XEXP (op0, 0), target);
8800 }
987c71d9 8801
05c8e58b
HPN
8802 if (flag_force_addr
8803 && GET_CODE (op0) != REG
8804 && modifier != EXPAND_CONST_ADDRESS
8805 && modifier != EXPAND_INITIALIZER
8806 && modifier != EXPAND_SUM)
987c71d9
RK
8807 op0 = force_reg (Pmode, op0);
8808
dc6d66b3
RK
8809 if (GET_CODE (op0) == REG
8810 && ! REG_USERVAR_P (op0))
bdb429a5 8811 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8812
5ae6cd0d 8813 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 8814 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8815
bbf6f052
RK
8816 return op0;
8817
8818 case ENTRY_VALUE_EXPR:
8819 abort ();
8820
7308a047
RS
8821 /* COMPLEX type for Extended Pascal & Fortran */
8822 case COMPLEX_EXPR:
8823 {
8824 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8825 rtx insns;
7308a047
RS
8826
8827 /* Get the rtx code of the operands. */
8828 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8829 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8830
8831 if (! target)
8832 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8833
6551fa4d 8834 start_sequence ();
7308a047
RS
8835
8836 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8837 emit_move_insn (gen_realpart (mode, target), op0);
8838 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8839
6551fa4d
JW
8840 insns = get_insns ();
8841 end_sequence ();
8842
7308a047 8843 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8844 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8845 each with a separate pseudo as destination.
8846 It's not correct for flow to treat them as a unit. */
6d6e61ce 8847 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8848 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8849 else
2f937369 8850 emit_insn (insns);
7308a047
RS
8851
8852 return target;
8853 }
8854
8855 case REALPART_EXPR:
2d7050fd
RS
8856 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8857 return gen_realpart (mode, op0);
3a94c984 8858
7308a047 8859 case IMAGPART_EXPR:
2d7050fd
RS
8860 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8861 return gen_imagpart (mode, op0);
7308a047
RS
8862
8863 case CONJ_EXPR:
8864 {
62acb978 8865 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8866 rtx imag_t;
6551fa4d 8867 rtx insns;
3a94c984
KH
8868
8869 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8870
8871 if (! target)
d6a5ac33 8872 target = gen_reg_rtx (mode);
3a94c984 8873
6551fa4d 8874 start_sequence ();
7308a047
RS
8875
8876 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8877 emit_move_insn (gen_realpart (partmode, target),
8878 gen_realpart (partmode, op0));
7308a047 8879
62acb978 8880 imag_t = gen_imagpart (partmode, target);
91ce572a 8881 temp = expand_unop (partmode,
0fb7aeda
KH
8882 ! unsignedp && flag_trapv
8883 && (GET_MODE_CLASS(partmode) == MODE_INT)
8884 ? negv_optab : neg_optab,
3a94c984 8885 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8886 if (temp != imag_t)
8887 emit_move_insn (imag_t, temp);
8888
6551fa4d
JW
8889 insns = get_insns ();
8890 end_sequence ();
8891
3a94c984 8892 /* Conjugate should appear as a single unit
d6a5ac33 8893 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8894 each with a separate pseudo as destination.
8895 It's not correct for flow to treat them as a unit. */
6d6e61ce 8896 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8897 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8898 else
2f937369 8899 emit_insn (insns);
7308a047
RS
8900
8901 return target;
8902 }
8903
e976b8b2
MS
8904 case TRY_CATCH_EXPR:
8905 {
8906 tree handler = TREE_OPERAND (exp, 1);
8907
8908 expand_eh_region_start ();
8909
8910 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8911
52a11cbf 8912 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8913
8914 return op0;
8915 }
8916
b335b813
PB
8917 case TRY_FINALLY_EXPR:
8918 {
8919 tree try_block = TREE_OPERAND (exp, 0);
8920 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 8921
8ad8135a 8922 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
8923 {
8924 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8925 is not sufficient, so we cannot expand the block twice.
8926 So we play games with GOTO_SUBROUTINE_EXPR to let us
8927 expand the thing only once. */
8ad8135a
RH
8928 /* When not optimizing, we go ahead with this form since
8929 (1) user breakpoints operate more predictably without
8930 code duplication, and
8931 (2) we're not running any of the global optimizers
8932 that would explode in time/space with the highly
8933 connected CFG created by the indirect branching. */
8943a0b4
RH
8934
8935 rtx finally_label = gen_label_rtx ();
8936 rtx done_label = gen_label_rtx ();
8937 rtx return_link = gen_reg_rtx (Pmode);
8938 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8939 (tree) finally_label, (tree) return_link);
8940 TREE_SIDE_EFFECTS (cleanup) = 1;
8941
8942 /* Start a new binding layer that will keep track of all cleanup
8943 actions to be performed. */
8944 expand_start_bindings (2);
8945 target_temp_slot_level = temp_slot_level;
8946
8947 expand_decl_cleanup (NULL_TREE, cleanup);
8948 op0 = expand_expr (try_block, target, tmode, modifier);
8949
8950 preserve_temp_slots (op0);
8951 expand_end_bindings (NULL_TREE, 0, 0);
8952 emit_jump (done_label);
8953 emit_label (finally_label);
8954 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8955 emit_indirect_jump (return_link);
8956 emit_label (done_label);
8957 }
8958 else
8959 {
8960 expand_start_bindings (2);
8961 target_temp_slot_level = temp_slot_level;
b335b813 8962
8943a0b4
RH
8963 expand_decl_cleanup (NULL_TREE, finally_block);
8964 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 8965
8943a0b4
RH
8966 preserve_temp_slots (op0);
8967 expand_end_bindings (NULL_TREE, 0, 0);
8968 }
b335b813 8969
b335b813
PB
8970 return op0;
8971 }
8972
3a94c984 8973 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8974 {
8975 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8976 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8977 rtx return_address = gen_label_rtx ();
3a94c984
KH
8978 emit_move_insn (return_link,
8979 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8980 emit_jump (subr);
8981 emit_label (return_address);
8982 return const0_rtx;
8983 }
8984
d3707adb
RH
8985 case VA_ARG_EXPR:
8986 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8987
52a11cbf 8988 case EXC_PTR_EXPR:
86c99549 8989 return get_exception_pointer (cfun);
52a11cbf 8990
67231816
RH
8991 case FDESC_EXPR:
8992 /* Function descriptors are not valid except for as
8993 initialization constants, and should not be expanded. */
8994 abort ();
8995
bbf6f052 8996 default:
ae2bcd98
RS
8997 /* ??? Use (*fun) form because expand_expr is a macro. */
8998 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
8999 modifier, alt_rtl);
bbf6f052
RK
9000 }
9001
9002 /* Here to do an ordinary binary operator, generating an instruction
9003 from the optab already placed in `this_optab'. */
9004 binop:
eb698c58
RS
9005 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9006 subtarget, &op0, &op1, 0);
bbf6f052 9007 binop2:
8403445a
AM
9008 if (modifier == EXPAND_STACK_PARM)
9009 target = 0;
bbf6f052
RK
9010 temp = expand_binop (mode, this_optab, op0, op1, target,
9011 unsignedp, OPTAB_LIB_WIDEN);
9012 if (temp == 0)
9013 abort ();
9014 return temp;
9015}
b93a436e 9016\f
1ce7f3c2
RK
9017/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9018 when applied to the address of EXP produces an address known to be
9019 aligned more than BIGGEST_ALIGNMENT. */
9020
9021static int
502b8322 9022is_aligning_offset (tree offset, tree exp)
1ce7f3c2 9023{
6fce44af 9024 /* Strip off any conversions. */
1ce7f3c2
RK
9025 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9026 || TREE_CODE (offset) == NOP_EXPR
6fce44af 9027 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
9028 offset = TREE_OPERAND (offset, 0);
9029
9030 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9031 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9032 if (TREE_CODE (offset) != BIT_AND_EXPR
9033 || !host_integerp (TREE_OPERAND (offset, 1), 1)
c0cfc691
OH
9034 || compare_tree_int (TREE_OPERAND (offset, 1),
9035 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
9036 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9037 return 0;
9038
9039 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9040 It must be NEGATE_EXPR. Then strip any more conversions. */
9041 offset = TREE_OPERAND (offset, 0);
9042 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9043 || TREE_CODE (offset) == NOP_EXPR
9044 || TREE_CODE (offset) == CONVERT_EXPR)
9045 offset = TREE_OPERAND (offset, 0);
9046
9047 if (TREE_CODE (offset) != NEGATE_EXPR)
9048 return 0;
9049
9050 offset = TREE_OPERAND (offset, 0);
9051 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9052 || TREE_CODE (offset) == NOP_EXPR
9053 || TREE_CODE (offset) == CONVERT_EXPR)
9054 offset = TREE_OPERAND (offset, 0);
9055
6fce44af
RK
9056 /* This must now be the address of EXP. */
9057 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
9058}
9059\f
e0a2f705 9060/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9061 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9062 in bytes within the string that ARG is accessing. The type of the
9063 offset will be `sizetype'. */
b93a436e 9064
28f4ec01 9065tree
502b8322 9066string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9067{
9068 STRIP_NOPS (arg);
9069
9070 if (TREE_CODE (arg) == ADDR_EXPR
9071 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9072 {
fed3cef0 9073 *ptr_offset = size_zero_node;
b93a436e
JL
9074 return TREE_OPERAND (arg, 0);
9075 }
9076 else if (TREE_CODE (arg) == PLUS_EXPR)
9077 {
9078 tree arg0 = TREE_OPERAND (arg, 0);
9079 tree arg1 = TREE_OPERAND (arg, 1);
9080
9081 STRIP_NOPS (arg0);
9082 STRIP_NOPS (arg1);
9083
9084 if (TREE_CODE (arg0) == ADDR_EXPR
9085 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9086 {
fed3cef0 9087 *ptr_offset = convert (sizetype, arg1);
b93a436e 9088 return TREE_OPERAND (arg0, 0);
bbf6f052 9089 }
b93a436e
JL
9090 else if (TREE_CODE (arg1) == ADDR_EXPR
9091 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9092 {
fed3cef0 9093 *ptr_offset = convert (sizetype, arg0);
b93a436e 9094 return TREE_OPERAND (arg1, 0);
bbf6f052 9095 }
b93a436e 9096 }
ca695ac9 9097
b93a436e
JL
9098 return 0;
9099}
ca695ac9 9100\f
b93a436e
JL
9101/* Expand code for a post- or pre- increment or decrement
9102 and return the RTX for the result.
9103 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9104
b93a436e 9105static rtx
502b8322 9106expand_increment (tree exp, int post, int ignore)
ca695ac9 9107{
b3694847
SS
9108 rtx op0, op1;
9109 rtx temp, value;
9110 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9111 optab this_optab = add_optab;
9112 int icode;
9113 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9114 int op0_is_copy = 0;
9115 int single_insn = 0;
9116 /* 1 means we can't store into OP0 directly,
9117 because it is a subreg narrower than a word,
9118 and we don't dare clobber the rest of the word. */
9119 int bad_subreg = 0;
1499e0a8 9120
b93a436e
JL
9121 /* Stabilize any component ref that might need to be
9122 evaluated more than once below. */
9123 if (!post
9124 || TREE_CODE (incremented) == BIT_FIELD_REF
9125 || (TREE_CODE (incremented) == COMPONENT_REF
9126 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9127 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9128 incremented = stabilize_reference (incremented);
9129 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9130 ones into save exprs so that they don't accidentally get evaluated
9131 more than once by the code below. */
9132 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9133 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9134 incremented = save_expr (incremented);
e9a25f70 9135
b93a436e
JL
9136 /* Compute the operands as RTX.
9137 Note whether OP0 is the actual lvalue or a copy of it:
9138 I believe it is a copy iff it is a register or subreg
6d2f8887 9139 and insns were generated in computing it. */
e9a25f70 9140
b93a436e 9141 temp = get_last_insn ();
37a08a29 9142 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9143
b93a436e
JL
9144 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9145 in place but instead must do sign- or zero-extension during assignment,
9146 so we copy it into a new register and let the code below use it as
9147 a copy.
e9a25f70 9148
b93a436e
JL
9149 Note that we can safely modify this SUBREG since it is know not to be
9150 shared (it was made by the expand_expr call above). */
9151
9152 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9153 {
9154 if (post)
9155 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9156 else
9157 bad_subreg = 1;
9158 }
9159 else if (GET_CODE (op0) == SUBREG
9160 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9161 {
9162 /* We cannot increment this SUBREG in place. If we are
9163 post-incrementing, get a copy of the old value. Otherwise,
9164 just mark that we cannot increment in place. */
9165 if (post)
9166 op0 = copy_to_reg (op0);
9167 else
9168 bad_subreg = 1;
e9a25f70
JL
9169 }
9170
b93a436e
JL
9171 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9172 && temp != get_last_insn ());
37a08a29 9173 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9174
b93a436e
JL
9175 /* Decide whether incrementing or decrementing. */
9176 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9177 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9178 this_optab = sub_optab;
9179
9180 /* Convert decrement by a constant into a negative increment. */
9181 if (this_optab == sub_optab
9182 && GET_CODE (op1) == CONST_INT)
ca695ac9 9183 {
3a94c984 9184 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9185 this_optab = add_optab;
ca695ac9 9186 }
1499e0a8 9187
91ce572a 9188 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9189 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9190
b93a436e
JL
9191 /* For a preincrement, see if we can do this with a single instruction. */
9192 if (!post)
9193 {
9194 icode = (int) this_optab->handlers[(int) mode].insn_code;
9195 if (icode != (int) CODE_FOR_nothing
9196 /* Make sure that OP0 is valid for operands 0 and 1
9197 of the insn we want to queue. */
a995e389
RH
9198 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9199 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9200 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9201 single_insn = 1;
9202 }
bbf6f052 9203
b93a436e
JL
9204 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9205 then we cannot just increment OP0. We must therefore contrive to
9206 increment the original value. Then, for postincrement, we can return
9207 OP0 since it is a copy of the old value. For preincrement, expand here
9208 unless we can do it with a single insn.
bbf6f052 9209
b93a436e
JL
9210 Likewise if storing directly into OP0 would clobber high bits
9211 we need to preserve (bad_subreg). */
9212 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9213 {
b93a436e
JL
9214 /* This is the easiest way to increment the value wherever it is.
9215 Problems with multiple evaluation of INCREMENTED are prevented
9216 because either (1) it is a component_ref or preincrement,
9217 in which case it was stabilized above, or (2) it is an array_ref
9218 with constant index in an array in a register, which is
9219 safe to reevaluate. */
9220 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9221 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9222 ? MINUS_EXPR : PLUS_EXPR),
9223 TREE_TYPE (exp),
9224 incremented,
9225 TREE_OPERAND (exp, 1));
a358cee0 9226
b93a436e
JL
9227 while (TREE_CODE (incremented) == NOP_EXPR
9228 || TREE_CODE (incremented) == CONVERT_EXPR)
9229 {
9230 newexp = convert (TREE_TYPE (incremented), newexp);
9231 incremented = TREE_OPERAND (incremented, 0);
9232 }
bbf6f052 9233
b90f141a 9234 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b93a436e
JL
9235 return post ? op0 : temp;
9236 }
bbf6f052 9237
b93a436e
JL
9238 if (post)
9239 {
9240 /* We have a true reference to the value in OP0.
9241 If there is an insn to add or subtract in this mode, queue it.
d91edf86 9242 Queuing the increment insn avoids the register shuffling
b93a436e
JL
9243 that often results if we must increment now and first save
9244 the old value for subsequent use. */
bbf6f052 9245
b93a436e
JL
9246#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9247 op0 = stabilize (op0);
9248#endif
41dfd40c 9249
b93a436e
JL
9250 icode = (int) this_optab->handlers[(int) mode].insn_code;
9251 if (icode != (int) CODE_FOR_nothing
9252 /* Make sure that OP0 is valid for operands 0 and 1
9253 of the insn we want to queue. */
a995e389
RH
9254 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9255 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9256 {
a995e389 9257 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9258 op1 = force_reg (mode, op1);
bbf6f052 9259
b93a436e
JL
9260 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9261 }
9262 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9263 {
9264 rtx addr = (general_operand (XEXP (op0, 0), mode)
9265 ? force_reg (Pmode, XEXP (op0, 0))
9266 : copy_to_reg (XEXP (op0, 0)));
9267 rtx temp, result;
ca695ac9 9268
792760b9 9269 op0 = replace_equiv_address (op0, addr);
b93a436e 9270 temp = force_reg (GET_MODE (op0), op0);
a995e389 9271 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9272 op1 = force_reg (mode, op1);
ca695ac9 9273
b93a436e
JL
9274 /* The increment queue is LIFO, thus we have to `queue'
9275 the instructions in reverse order. */
9276 enqueue_insn (op0, gen_move_insn (op0, temp));
9277 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9278 return result;
bbf6f052
RK
9279 }
9280 }
ca695ac9 9281
b93a436e
JL
9282 /* Preincrement, or we can't increment with one simple insn. */
9283 if (post)
9284 /* Save a copy of the value before inc or dec, to return it later. */
9285 temp = value = copy_to_reg (op0);
9286 else
9287 /* Arrange to return the incremented value. */
9288 /* Copy the rtx because expand_binop will protect from the queue,
9289 and the results of that would be invalid for us to return
9290 if our caller does emit_queue before using our result. */
9291 temp = copy_rtx (value = op0);
bbf6f052 9292
b93a436e 9293 /* Increment however we can. */
37a08a29 9294 op1 = expand_binop (mode, this_optab, value, op1, op0,
8df83eae 9295 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9296
b93a436e
JL
9297 /* Make sure the value is stored into OP0. */
9298 if (op1 != op0)
9299 emit_move_insn (op0, op1);
5718612f 9300
b93a436e
JL
9301 return temp;
9302}
9303\f
b93a436e
JL
9304/* Generate code to calculate EXP using a store-flag instruction
9305 and return an rtx for the result. EXP is either a comparison
9306 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9307
b93a436e 9308 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9309
cc2902df 9310 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9311 cheap.
ca695ac9 9312
b93a436e
JL
9313 Return zero if there is no suitable set-flag instruction
9314 available on this machine.
ca695ac9 9315
b93a436e
JL
9316 Once expand_expr has been called on the arguments of the comparison,
9317 we are committed to doing the store flag, since it is not safe to
9318 re-evaluate the expression. We emit the store-flag insn by calling
9319 emit_store_flag, but only expand the arguments if we have a reason
9320 to believe that emit_store_flag will be successful. If we think that
9321 it will, but it isn't, we have to simulate the store-flag with a
9322 set/jump/set sequence. */
ca695ac9 9323
b93a436e 9324static rtx
502b8322 9325do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9326{
9327 enum rtx_code code;
9328 tree arg0, arg1, type;
9329 tree tem;
9330 enum machine_mode operand_mode;
9331 int invert = 0;
9332 int unsignedp;
9333 rtx op0, op1;
9334 enum insn_code icode;
9335 rtx subtarget = target;
381127e8 9336 rtx result, label;
ca695ac9 9337
b93a436e
JL
9338 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9339 result at the end. We can't simply invert the test since it would
9340 have already been inverted if it were valid. This case occurs for
9341 some floating-point comparisons. */
ca695ac9 9342
b93a436e
JL
9343 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9344 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9345
b93a436e
JL
9346 arg0 = TREE_OPERAND (exp, 0);
9347 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9348
9349 /* Don't crash if the comparison was erroneous. */
9350 if (arg0 == error_mark_node || arg1 == error_mark_node)
9351 return const0_rtx;
9352
b93a436e
JL
9353 type = TREE_TYPE (arg0);
9354 operand_mode = TYPE_MODE (type);
8df83eae 9355 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 9356
b93a436e
JL
9357 /* We won't bother with BLKmode store-flag operations because it would mean
9358 passing a lot of information to emit_store_flag. */
9359 if (operand_mode == BLKmode)
9360 return 0;
ca695ac9 9361
b93a436e
JL
9362 /* We won't bother with store-flag operations involving function pointers
9363 when function pointers must be canonicalized before comparisons. */
9364#ifdef HAVE_canonicalize_funcptr_for_compare
9365 if (HAVE_canonicalize_funcptr_for_compare
9366 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9367 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9368 == FUNCTION_TYPE))
9369 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9370 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9371 == FUNCTION_TYPE))))
9372 return 0;
ca695ac9
JB
9373#endif
9374
b93a436e
JL
9375 STRIP_NOPS (arg0);
9376 STRIP_NOPS (arg1);
ca695ac9 9377
b93a436e
JL
9378 /* Get the rtx comparison code to use. We know that EXP is a comparison
9379 operation of some type. Some comparisons against 1 and -1 can be
9380 converted to comparisons with zero. Do so here so that the tests
9381 below will be aware that we have a comparison with zero. These
9382 tests will not catch constants in the first operand, but constants
9383 are rarely passed as the first operand. */
ca695ac9 9384
b93a436e
JL
9385 switch (TREE_CODE (exp))
9386 {
9387 case EQ_EXPR:
9388 code = EQ;
bbf6f052 9389 break;
b93a436e
JL
9390 case NE_EXPR:
9391 code = NE;
bbf6f052 9392 break;
b93a436e
JL
9393 case LT_EXPR:
9394 if (integer_onep (arg1))
9395 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9396 else
9397 code = unsignedp ? LTU : LT;
ca695ac9 9398 break;
b93a436e
JL
9399 case LE_EXPR:
9400 if (! unsignedp && integer_all_onesp (arg1))
9401 arg1 = integer_zero_node, code = LT;
9402 else
9403 code = unsignedp ? LEU : LE;
ca695ac9 9404 break;
b93a436e
JL
9405 case GT_EXPR:
9406 if (! unsignedp && integer_all_onesp (arg1))
9407 arg1 = integer_zero_node, code = GE;
9408 else
9409 code = unsignedp ? GTU : GT;
9410 break;
9411 case GE_EXPR:
9412 if (integer_onep (arg1))
9413 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9414 else
9415 code = unsignedp ? GEU : GE;
ca695ac9 9416 break;
1eb8759b
RH
9417
9418 case UNORDERED_EXPR:
9419 code = UNORDERED;
9420 break;
9421 case ORDERED_EXPR:
9422 code = ORDERED;
9423 break;
9424 case UNLT_EXPR:
9425 code = UNLT;
9426 break;
9427 case UNLE_EXPR:
9428 code = UNLE;
9429 break;
9430 case UNGT_EXPR:
9431 code = UNGT;
9432 break;
9433 case UNGE_EXPR:
9434 code = UNGE;
9435 break;
9436 case UNEQ_EXPR:
9437 code = UNEQ;
9438 break;
1eb8759b 9439
ca695ac9 9440 default:
b93a436e 9441 abort ();
bbf6f052 9442 }
bbf6f052 9443
b93a436e
JL
9444 /* Put a constant second. */
9445 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9446 {
9447 tem = arg0; arg0 = arg1; arg1 = tem;
9448 code = swap_condition (code);
ca695ac9 9449 }
bbf6f052 9450
b93a436e
JL
9451 /* If this is an equality or inequality test of a single bit, we can
9452 do this by shifting the bit being tested to the low-order bit and
9453 masking the result with the constant 1. If the condition was EQ,
9454 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9455 than an scc insn even if we have it.
9456
9457 The code to make this transformation was moved into fold_single_bit_test,
9458 so we just call into the folder and expand its result. */
d39985fa 9459
b93a436e
JL
9460 if ((code == NE || code == EQ)
9461 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9462 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 9463 {
ae2bcd98 9464 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 9465 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9466 arg0, arg1, type),
60cd4dae
JL
9467 target, VOIDmode, EXPAND_NORMAL);
9468 }
bbf6f052 9469
b93a436e 9470 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9471 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9472 return 0;
1eb8759b 9473
b93a436e
JL
9474 icode = setcc_gen_code[(int) code];
9475 if (icode == CODE_FOR_nothing
a995e389 9476 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9477 {
b93a436e
JL
9478 /* We can only do this if it is one of the special cases that
9479 can be handled without an scc insn. */
9480 if ((code == LT && integer_zerop (arg1))
9481 || (! only_cheap && code == GE && integer_zerop (arg1)))
9482 ;
9483 else if (BRANCH_COST >= 0
9484 && ! only_cheap && (code == NE || code == EQ)
9485 && TREE_CODE (type) != REAL_TYPE
9486 && ((abs_optab->handlers[(int) operand_mode].insn_code
9487 != CODE_FOR_nothing)
9488 || (ffs_optab->handlers[(int) operand_mode].insn_code
9489 != CODE_FOR_nothing)))
9490 ;
9491 else
9492 return 0;
ca695ac9 9493 }
3a94c984 9494
296b4ed9 9495 if (! get_subtarget (target)
e3be1116 9496 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9497 subtarget = 0;
9498
eb698c58 9499 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9500
9501 if (target == 0)
9502 target = gen_reg_rtx (mode);
9503
9504 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9505 because, if the emit_store_flag does anything it will succeed and
9506 OP0 and OP1 will not be used subsequently. */
ca695ac9 9507
b93a436e
JL
9508 result = emit_store_flag (target, code,
9509 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9510 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9511 operand_mode, unsignedp, 1);
ca695ac9 9512
b93a436e
JL
9513 if (result)
9514 {
9515 if (invert)
9516 result = expand_binop (mode, xor_optab, result, const1_rtx,
9517 result, 0, OPTAB_LIB_WIDEN);
9518 return result;
ca695ac9 9519 }
bbf6f052 9520
b93a436e
JL
9521 /* If this failed, we have to do this with set/compare/jump/set code. */
9522 if (GET_CODE (target) != REG
9523 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9524 target = gen_reg_rtx (GET_MODE (target));
9525
9526 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9527 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9528 operand_mode, NULL_RTX);
b93a436e
JL
9529 if (GET_CODE (result) == CONST_INT)
9530 return (((result == const0_rtx && ! invert)
9531 || (result != const0_rtx && invert))
9532 ? const0_rtx : const1_rtx);
ca695ac9 9533
8f08e8c0
JL
9534 /* The code of RESULT may not match CODE if compare_from_rtx
9535 decided to swap its operands and reverse the original code.
9536
9537 We know that compare_from_rtx returns either a CONST_INT or
9538 a new comparison code, so it is safe to just extract the
9539 code from RESULT. */
9540 code = GET_CODE (result);
9541
b93a436e
JL
9542 label = gen_label_rtx ();
9543 if (bcc_gen_fctn[(int) code] == 0)
9544 abort ();
0f41302f 9545
b93a436e
JL
9546 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9547 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9548 emit_label (label);
bbf6f052 9549
b93a436e 9550 return target;
ca695ac9 9551}
b93a436e 9552\f
b93a436e 9553
ad82abb8
ZW
9554/* Stubs in case we haven't got a casesi insn. */
9555#ifndef HAVE_casesi
9556# define HAVE_casesi 0
9557# define gen_casesi(a, b, c, d, e) (0)
9558# define CODE_FOR_casesi CODE_FOR_nothing
9559#endif
9560
9561/* If the machine does not have a case insn that compares the bounds,
9562 this means extra overhead for dispatch tables, which raises the
9563 threshold for using them. */
9564#ifndef CASE_VALUES_THRESHOLD
9565#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9566#endif /* CASE_VALUES_THRESHOLD */
9567
9568unsigned int
502b8322 9569case_values_threshold (void)
ad82abb8
ZW
9570{
9571 return CASE_VALUES_THRESHOLD;
9572}
9573
9574/* Attempt to generate a casesi instruction. Returns 1 if successful,
9575 0 otherwise (i.e. if there is no casesi instruction). */
9576int
502b8322
AJ
9577try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9578 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9579{
9580 enum machine_mode index_mode = SImode;
9581 int index_bits = GET_MODE_BITSIZE (index_mode);
9582 rtx op1, op2, index;
9583 enum machine_mode op_mode;
9584
9585 if (! HAVE_casesi)
9586 return 0;
9587
9588 /* Convert the index to SImode. */
9589 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9590 {
9591 enum machine_mode omode = TYPE_MODE (index_type);
9592 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9593
9594 /* We must handle the endpoints in the original mode. */
9595 index_expr = build (MINUS_EXPR, index_type,
9596 index_expr, minval);
9597 minval = integer_zero_node;
9598 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9599 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9600 omode, 1, default_label);
ad82abb8
ZW
9601 /* Now we can safely truncate. */
9602 index = convert_to_mode (index_mode, index, 0);
9603 }
9604 else
9605 {
9606 if (TYPE_MODE (index_type) != index_mode)
9607 {
ae2bcd98 9608 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 9609 (index_bits, 0), index_expr);
ad82abb8
ZW
9610 index_type = TREE_TYPE (index_expr);
9611 }
9612
9613 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9614 }
9615 emit_queue ();
9616 index = protect_from_queue (index, 0);
9617 do_pending_stack_adjust ();
9618
9619 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9620 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9621 (index, op_mode))
9622 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9623
ad82abb8
ZW
9624 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9625
9626 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9627 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 9628 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
9629 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9630 (op1, op_mode))
9631 op1 = copy_to_mode_reg (op_mode, op1);
9632
9633 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9634
9635 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9636 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 9637 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
9638 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9639 (op2, op_mode))
9640 op2 = copy_to_mode_reg (op_mode, op2);
9641
9642 emit_jump_insn (gen_casesi (index, op1, op2,
9643 table_label, default_label));
9644 return 1;
9645}
9646
9647/* Attempt to generate a tablejump instruction; same concept. */
9648#ifndef HAVE_tablejump
9649#define HAVE_tablejump 0
9650#define gen_tablejump(x, y) (0)
9651#endif
9652
9653/* Subroutine of the next function.
9654
9655 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9656 in the table already subtracted.
9657 MODE is its expected mode (needed if INDEX is constant).
9658 RANGE is the length of the jump table.
9659 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9660
b93a436e
JL
9661 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9662 index value is out of range. */
0f41302f 9663
ad82abb8 9664static void
502b8322
AJ
9665do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9666 rtx default_label)
ca695ac9 9667{
b3694847 9668 rtx temp, vector;
88d3b7f0 9669
74f6d071
JH
9670 if (INTVAL (range) > cfun->max_jumptable_ents)
9671 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9672
b93a436e
JL
9673 /* Do an unsigned comparison (in the proper mode) between the index
9674 expression and the value which represents the length of the range.
9675 Since we just finished subtracting the lower bound of the range
9676 from the index expression, this comparison allows us to simultaneously
9677 check that the original index expression value is both greater than
9678 or equal to the minimum value of the range and less than or equal to
9679 the maximum value of the range. */
709f5be1 9680
c5d5d461 9681 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9682 default_label);
bbf6f052 9683
b93a436e
JL
9684 /* If index is in range, it must fit in Pmode.
9685 Convert to Pmode so we can index with it. */
9686 if (mode != Pmode)
9687 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9688
ba228239 9689 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9690 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9691 and break_out_memory_refs will go to work on it and mess it up. */
9692#ifdef PIC_CASE_VECTOR_ADDRESS
9693 if (flag_pic && GET_CODE (index) != REG)
9694 index = copy_to_mode_reg (Pmode, index);
9695#endif
ca695ac9 9696
b93a436e
JL
9697 /* If flag_force_addr were to affect this address
9698 it could interfere with the tricky assumptions made
9699 about addresses that contain label-refs,
9700 which may be valid only very near the tablejump itself. */
9701 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9702 GET_MODE_SIZE, because this indicates how large insns are. The other
9703 uses should all be Pmode, because they are addresses. This code
9704 could fail if addresses and insns are not the same size. */
9705 index = gen_rtx_PLUS (Pmode,
9706 gen_rtx_MULT (Pmode, index,
9707 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9708 gen_rtx_LABEL_REF (Pmode, table_label));
9709#ifdef PIC_CASE_VECTOR_ADDRESS
9710 if (flag_pic)
9711 index = PIC_CASE_VECTOR_ADDRESS (index);
9712 else
bbf6f052 9713#endif
b93a436e
JL
9714 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9715 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9716 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9717 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 9718 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
9719 convert_move (temp, vector, 0);
9720
9721 emit_jump_insn (gen_tablejump (temp, table_label));
9722
9723 /* If we are generating PIC code or if the table is PC-relative, the
9724 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9725 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9726 emit_barrier ();
bbf6f052 9727}
b93a436e 9728
ad82abb8 9729int
502b8322
AJ
9730try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9731 rtx table_label, rtx default_label)
ad82abb8
ZW
9732{
9733 rtx index;
9734
9735 if (! HAVE_tablejump)
9736 return 0;
9737
9738 index_expr = fold (build (MINUS_EXPR, index_type,
9739 convert (index_type, index_expr),
9740 convert (index_type, minval)));
9741 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9742 emit_queue ();
9743 index = protect_from_queue (index, 0);
9744 do_pending_stack_adjust ();
9745
9746 do_tablejump (index, TYPE_MODE (index_type),
9747 convert_modes (TYPE_MODE (index_type),
9748 TYPE_MODE (TREE_TYPE (range)),
9749 expand_expr (range, NULL_RTX,
9750 VOIDmode, 0),
8df83eae 9751 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
9752 table_label, default_label);
9753 return 1;
9754}
e2500fed 9755
cb2a532e
AH
9756/* Nonzero if the mode is a valid vector mode for this architecture.
9757 This returns nonzero even if there is no hardware support for the
9758 vector mode, but we can emulate with narrower modes. */
9759
9760int
502b8322 9761vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9762{
9763 enum mode_class class = GET_MODE_CLASS (mode);
9764 enum machine_mode innermode;
9765
9766 /* Doh! What's going on? */
9767 if (class != MODE_VECTOR_INT
9768 && class != MODE_VECTOR_FLOAT)
9769 return 0;
9770
9771 /* Hardware support. Woo hoo! */
9772 if (VECTOR_MODE_SUPPORTED_P (mode))
9773 return 1;
9774
9775 innermode = GET_MODE_INNER (mode);
9776
9777 /* We should probably return 1 if requesting V4DI and we have no DI,
9778 but we have V2DI, but this is probably very unlikely. */
9779
9780 /* If we have support for the inner mode, we can safely emulate it.
9781 We may not have V2DI, but me can emulate with a pair of DIs. */
9782 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9783}
9784
d744e06e
AH
9785/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9786static rtx
502b8322 9787const_vector_from_tree (tree exp)
d744e06e
AH
9788{
9789 rtvec v;
9790 int units, i;
9791 tree link, elt;
9792 enum machine_mode inner, mode;
9793
9794 mode = TYPE_MODE (TREE_TYPE (exp));
9795
9796 if (is_zeros_p (exp))
9797 return CONST0_RTX (mode);
9798
9799 units = GET_MODE_NUNITS (mode);
9800 inner = GET_MODE_INNER (mode);
9801
9802 v = rtvec_alloc (units);
9803
9804 link = TREE_VECTOR_CST_ELTS (exp);
9805 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9806 {
9807 elt = TREE_VALUE (link);
9808
9809 if (TREE_CODE (elt) == REAL_CST)
9810 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9811 inner);
9812 else
9813 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9814 TREE_INT_CST_HIGH (elt),
9815 inner);
9816 }
9817
5f6c070d
AH
9818 /* Initialize remaining elements to 0. */
9819 for (; i < units; ++i)
9820 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9821
d744e06e
AH
9822 return gen_rtx_raw_CONST_VECTOR (mode, v);
9823}
9824
e2500fed 9825#include "gt-expr.h"
This page took 4.330108 seconds and 5 git commands to generate.