]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
* doc/cni.sgml: Removed, merged into gcj.texi.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
6de9cd9a 50#include "tree-iterator.h"
c988af2b 51#include "target.h"
bbf6f052 52
bbf6f052 53/* Decide whether a function's arguments should be processed
bbc8a071
RK
54 from first to last or from last to first.
55
56 They should if the stack and args grow in opposite directions, but
57 only if we have push insns. */
bbf6f052 58
bbf6f052 59#ifdef PUSH_ROUNDING
bbc8a071 60
2da4124d 61#ifndef PUSH_ARGS_REVERSED
3319a347 62#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 63#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 64#endif
2da4124d 65#endif
bbc8a071 66
bbf6f052
RK
67#endif
68
69#ifndef STACK_PUSH_CODE
70#ifdef STACK_GROWS_DOWNWARD
71#define STACK_PUSH_CODE PRE_DEC
72#else
73#define STACK_PUSH_CODE PRE_INC
74#endif
75#endif
76
4ca79136
RH
77/* Convert defined/undefined to boolean. */
78#ifdef TARGET_MEM_FUNCTIONS
79#undef TARGET_MEM_FUNCTIONS
80#define TARGET_MEM_FUNCTIONS 1
81#else
82#define TARGET_MEM_FUNCTIONS 0
83#endif
84
85
bbf6f052
RK
86/* If this is nonzero, we do not bother generating VOLATILE
87 around volatile memory references, and we are willing to
88 output indirect addresses. If cse is to follow, we reject
89 indirect addresses so a useful potential cse is generated;
90 if it is used only once, instruction combination will produce
91 the same indirect address eventually. */
92int cse_not_expected;
93
4969d05d
RK
94/* This structure is used by move_by_pieces to describe the move to
95 be performed. */
4969d05d
RK
96struct move_by_pieces
97{
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
3bdf5ad1
RK
106 unsigned HOST_WIDE_INT len;
107 HOST_WIDE_INT offset;
4969d05d
RK
108 int reverse;
109};
110
57814e5e 111/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
112 be performed. */
113
57814e5e 114struct store_by_pieces
9de08200
RK
115{
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
3bdf5ad1
RK
120 unsigned HOST_WIDE_INT len;
121 HOST_WIDE_INT offset;
502b8322 122 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 123 void *constfundata;
9de08200
RK
124 int reverse;
125};
126
502b8322
AJ
127static rtx enqueue_insn (rtx, rtx);
128static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
129 unsigned int);
130static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
131 struct move_by_pieces *);
132static bool block_move_libcall_safe_for_call_parm (void);
133static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
134static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
135static tree emit_block_move_libcall_fn (int);
136static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
137static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
138static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
139static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
140static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
141 struct store_by_pieces *);
142static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
143static rtx clear_storage_via_libcall (rtx, rtx);
144static tree clear_storage_libcall_fn (int);
145static rtx compress_float_constant (rtx, rtx);
146static rtx get_subtarget (rtx);
502b8322
AJ
147static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153static rtx var_rtx (tree);
154
155static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
d50a16c4 156static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
157
158static int is_aligning_offset (tree, tree);
159static rtx expand_increment (tree, int, int);
eb698c58
RS
160static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
502b8322 162static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 163#ifdef PUSH_ROUNDING
502b8322 164static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 165#endif
502b8322
AJ
166static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167static rtx const_vector_from_tree (tree);
bbf6f052 168
4fa52007
RK
169/* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
172
173static char direct_load[NUM_MACHINE_MODES];
174static char direct_store[NUM_MACHINE_MODES];
175
51286de6
RH
176/* Record for each mode whether we can float-extend from memory. */
177
178static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
179
fbe1758d 180/* This macro is used to determine whether move_by_pieces should be called
3a94c984 181 to perform a structure copy. */
fbe1758d 182#ifndef MOVE_BY_PIECES_P
19caa751 183#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
185#endif
186
78762e3b
RS
187/* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189#ifndef CLEAR_BY_PIECES_P
190#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
192#endif
193
4977bab6
ZW
194/* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197#ifndef STORE_BY_PIECES_P
198#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
199#endif
200
266007a7 201/* This array records the insn_code of insns to perform block moves. */
e6677db3 202enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 203
9de08200
RK
204/* This array records the insn_code of insns to perform block clears. */
205enum insn_code clrstr_optab[NUM_MACHINE_MODES];
206
118355a0
ZW
207/* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211
cc2902df 212/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
213
214#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 215#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 216#endif
bbf6f052 217\f
4fa52007 218/* This is run once per compilation to set up which modes can be used
266007a7 219 directly in memory and to initialize the block move optab. */
4fa52007
RK
220
221void
502b8322 222init_expr_once (void)
4fa52007
RK
223{
224 rtx insn, pat;
225 enum machine_mode mode;
cff48d8f 226 int num_clobbers;
9ec36da5 227 rtx mem, mem1;
bf1660a6 228 rtx reg;
9ec36da5 229
e2549997
RS
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
9ec36da5
JL
233 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
234 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 235
bf1660a6
JL
236 /* A scratch register we can modify in-place below to avoid
237 useless RTL allocations. */
238 reg = gen_rtx_REG (VOIDmode, -1);
239
1f8c3c5b
RH
240 insn = rtx_alloc (INSN);
241 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
242 PATTERN (insn) = pat;
4fa52007
RK
243
244 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
245 mode = (enum machine_mode) ((int) mode + 1))
246 {
247 int regno;
4fa52007
RK
248
249 direct_load[(int) mode] = direct_store[(int) mode] = 0;
250 PUT_MODE (mem, mode);
e2549997 251 PUT_MODE (mem1, mode);
bf1660a6 252 PUT_MODE (reg, mode);
4fa52007 253
e6fe56a4
RK
254 /* See if there is some register that can be used in this mode and
255 directly loaded or stored from memory. */
256
7308a047
RS
257 if (mode != VOIDmode && mode != BLKmode)
258 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
259 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 regno++)
261 {
262 if (! HARD_REGNO_MODE_OK (regno, mode))
263 continue;
e6fe56a4 264
bf1660a6 265 REGNO (reg) = regno;
e6fe56a4 266
7308a047
RS
267 SET_SRC (pat) = mem;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
e6fe56a4 271
e2549997
RS
272 SET_SRC (pat) = mem1;
273 SET_DEST (pat) = reg;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_load[(int) mode] = 1;
276
7308a047
RS
277 SET_SRC (pat) = reg;
278 SET_DEST (pat) = mem;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
e2549997
RS
281
282 SET_SRC (pat) = reg;
283 SET_DEST (pat) = mem1;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_store[(int) mode] = 1;
7308a047 286 }
4fa52007
RK
287 }
288
51286de6
RH
289 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
290
291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
292 mode = GET_MODE_WIDER_MODE (mode))
293 {
294 enum machine_mode srcmode;
295 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 296 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
297 {
298 enum insn_code ic;
299
300 ic = can_extend_p (mode, srcmode, 0);
301 if (ic == CODE_FOR_nothing)
302 continue;
303
304 PUT_MODE (mem, srcmode);
0fb7aeda 305
51286de6
RH
306 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
307 float_extend_from_mem[mode][srcmode] = true;
308 }
309 }
4fa52007 310}
cff48d8f 311
bbf6f052
RK
312/* This is run at the start of compiling a function. */
313
314void
502b8322 315init_expr (void)
bbf6f052 316{
3a70d621 317 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052
RK
318}
319
49ad7cfa 320/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 321
bbf6f052 322void
502b8322 323finish_expr_for_function (void)
bbf6f052 324{
49ad7cfa
BS
325 if (pending_chain)
326 abort ();
bbf6f052
RK
327}
328\f
329/* Manage the queue of increment instructions to be output
330 for POSTINCREMENT_EXPR expressions, etc. */
331
bbf6f052
RK
332/* Queue up to increment (or change) VAR later. BODY says how:
333 BODY should be the same thing you would pass to emit_insn
334 to increment right away. It will go to emit_insn later on.
335
336 The value is a QUEUED expression to be used in place of VAR
337 where you want to guarantee the pre-incrementation value of VAR. */
338
339static rtx
502b8322 340enqueue_insn (rtx var, rtx body)
bbf6f052 341{
c5c76735
JL
342 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
343 body, pending_chain);
bbf6f052
RK
344 return pending_chain;
345}
346
347/* Use protect_from_queue to convert a QUEUED expression
348 into something that you can put immediately into an instruction.
349 If the queued incrementation has not happened yet,
350 protect_from_queue returns the variable itself.
351 If the incrementation has happened, protect_from_queue returns a temp
352 that contains a copy of the old value of the variable.
353
354 Any time an rtx which might possibly be a QUEUED is to be put
355 into an instruction, it must be passed through protect_from_queue first.
356 QUEUED expressions are not meaningful in instructions.
357
358 Do not pass a value through protect_from_queue and then hold
359 on to it for a while before putting it in an instruction!
360 If the queue is flushed in between, incorrect code will result. */
361
362rtx
502b8322 363protect_from_queue (rtx x, int modify)
bbf6f052 364{
b3694847 365 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
366
367#if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
370 return x;
371#endif
372
373 if (code != QUEUED)
374 {
e9baa644
RK
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
379 shared. */
bbf6f052
RK
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
382 {
f1ec5147
RK
383 rtx y = XEXP (x, 0);
384 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 385
bbf6f052
RK
386 if (QUEUED_INSN (y))
387 {
f1ec5147
RK
388 rtx temp = gen_reg_rtx (GET_MODE (x));
389
e9baa644 390 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
391 QUEUED_INSN (y));
392 return temp;
393 }
f1ec5147 394
73b7f58c
BS
395 /* Copy the address into a pseudo, so that the returned value
396 remains correct across calls to emit_queue. */
f1ec5147 397 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 398 }
f1ec5147 399
bbf6f052
RK
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
402 if (code == MEM)
3f15938e
RS
403 {
404 rtx tem = protect_from_queue (XEXP (x, 0), 0);
405 if (tem != XEXP (x, 0))
406 {
407 x = copy_rtx (x);
408 XEXP (x, 0) = tem;
409 }
410 }
bbf6f052
RK
411 else if (code == PLUS || code == MULT)
412 {
3f15938e
RS
413 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
414 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
415 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
416 {
417 x = copy_rtx (x);
418 XEXP (x, 0) = new0;
419 XEXP (x, 1) = new1;
420 }
bbf6f052
RK
421 }
422 return x;
423 }
73b7f58c
BS
424 /* If the increment has not happened, use the variable itself. Copy it
425 into a new pseudo so that the value remains correct across calls to
426 emit_queue. */
bbf6f052 427 if (QUEUED_INSN (x) == 0)
73b7f58c 428 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
429 /* If the increment has happened and a pre-increment copy exists,
430 use that copy. */
431 if (QUEUED_COPY (x) != 0)
432 return QUEUED_COPY (x);
433 /* The increment has happened but we haven't set up a pre-increment copy.
434 Set one up now, and use it. */
435 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
436 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
437 QUEUED_INSN (x));
438 return QUEUED_COPY (x);
439}
440
441/* Return nonzero if X contains a QUEUED expression:
442 if it contains anything that will be altered by a queued increment.
443 We handle only combinations of MEM, PLUS, MINUS and MULT operators
444 since memory addresses generally contain only those. */
445
1f06ee8d 446int
502b8322 447queued_subexp_p (rtx x)
bbf6f052 448{
b3694847 449 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
450 switch (code)
451 {
452 case QUEUED:
453 return 1;
454 case MEM:
455 return queued_subexp_p (XEXP (x, 0));
456 case MULT:
457 case PLUS:
458 case MINUS:
e9a25f70
JL
459 return (queued_subexp_p (XEXP (x, 0))
460 || queued_subexp_p (XEXP (x, 1)));
461 default:
462 return 0;
bbf6f052 463 }
bbf6f052
RK
464}
465
1bbd65cd
EB
466/* Retrieve a mark on the queue. */
467
468static rtx
469mark_queue (void)
470{
471 return pending_chain;
472}
bbf6f052 473
1bbd65cd
EB
474/* Perform all the pending incrementations that have been enqueued
475 after MARK was retrieved. If MARK is null, perform all the
476 pending incrementations. */
477
478static void
479emit_insns_enqueued_after_mark (rtx mark)
bbf6f052 480{
b3694847 481 rtx p;
1bbd65cd
EB
482
483 /* The marked incrementation may have been emitted in the meantime
484 through a call to emit_queue. In this case, the mark is not valid
485 anymore so do nothing. */
486 if (mark && ! QUEUED_BODY (mark))
487 return;
488
489 while ((p = pending_chain) != mark)
bbf6f052 490 {
41b083c4
R
491 rtx body = QUEUED_BODY (p);
492
2f937369
DM
493 switch (GET_CODE (body))
494 {
495 case INSN:
496 case JUMP_INSN:
497 case CALL_INSN:
498 case CODE_LABEL:
499 case BARRIER:
500 case NOTE:
501 QUEUED_INSN (p) = body;
502 emit_insn (body);
503 break;
504
505#ifdef ENABLE_CHECKING
506 case SEQUENCE:
507 abort ();
508 break;
509#endif
510
511 default:
512 QUEUED_INSN (p) = emit_insn (body);
513 break;
41b083c4 514 }
2f937369 515
1bbd65cd 516 QUEUED_BODY (p) = 0;
bbf6f052
RK
517 pending_chain = QUEUED_NEXT (p);
518 }
519}
1bbd65cd
EB
520
521/* Perform all the pending incrementations. */
522
523void
524emit_queue (void)
525{
526 emit_insns_enqueued_after_mark (NULL_RTX);
527}
bbf6f052
RK
528\f
529/* Copy data from FROM to TO, where the machine modes are not the same.
530 Both modes may be integer, or both may be floating.
531 UNSIGNEDP should be nonzero if FROM is an unsigned type.
532 This causes zero-extension instead of sign-extension. */
533
534void
502b8322 535convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
536{
537 enum machine_mode to_mode = GET_MODE (to);
538 enum machine_mode from_mode = GET_MODE (from);
539 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
540 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
541 enum insn_code code;
542 rtx libcall;
543
544 /* rtx code for making an equivalent value. */
37d0b254
SE
545 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
546 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
547
548 to = protect_from_queue (to, 1);
549 from = protect_from_queue (from, 0);
550
551 if (to_real != from_real)
552 abort ();
553
6de9cd9a
DN
554 /* If the source and destination are already the same, then there's
555 nothing to do. */
556 if (to == from)
557 return;
558
1499e0a8
RK
559 /* If FROM is a SUBREG that indicates that we have already done at least
560 the required extension, strip it. We don't handle such SUBREGs as
561 TO here. */
562
563 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
564 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
565 >= GET_MODE_SIZE (to_mode))
566 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
567 from = gen_lowpart (to_mode, from), from_mode = to_mode;
568
569 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
570 abort ();
571
bbf6f052
RK
572 if (to_mode == from_mode
573 || (from_mode == VOIDmode && CONSTANT_P (from)))
574 {
575 emit_move_insn (to, from);
576 return;
577 }
578
0b4565c9
BS
579 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
580 {
581 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
582 abort ();
3a94c984 583
0b4565c9 584 if (VECTOR_MODE_P (to_mode))
bafe341a 585 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 586 else
bafe341a 587 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
588
589 emit_move_insn (to, from);
590 return;
591 }
592
06765df1
R
593 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
594 {
595 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
596 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
597 return;
598 }
599
bbf6f052
RK
600 if (to_real)
601 {
642dfa8b 602 rtx value, insns;
85363ca0 603 convert_optab tab;
81d79e2c 604
e44846d6 605 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 606 tab = sext_optab;
e44846d6 607 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
85363ca0
ZW
608 tab = trunc_optab;
609 else
610 abort ();
2b01c326 611
85363ca0 612 /* Try converting directly if the insn is supported. */
2b01c326 613
85363ca0
ZW
614 code = tab->handlers[to_mode][from_mode].insn_code;
615 if (code != CODE_FOR_nothing)
b092b471 616 {
85363ca0
ZW
617 emit_unop_insn (code, to, from,
618 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
619 return;
620 }
b092b471 621
85363ca0
ZW
622 /* Otherwise use a libcall. */
623 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 624
85363ca0 625 if (!libcall)
b092b471 626 /* This conversion is not implemented yet. */
bbf6f052
RK
627 abort ();
628
642dfa8b 629 start_sequence ();
ebb1b59a 630 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 631 1, from, from_mode);
642dfa8b
BS
632 insns = get_insns ();
633 end_sequence ();
450b1728
EC
634 emit_libcall_block (insns, to, value,
635 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
636 from)
637 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
638 return;
639 }
640
85363ca0
ZW
641 /* Handle pointer conversion. */ /* SPEE 900220. */
642 /* Targets are expected to provide conversion insns between PxImode and
643 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
644 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
645 {
646 enum machine_mode full_mode
647 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
648
649 if (trunc_optab->handlers[to_mode][full_mode].insn_code
650 == CODE_FOR_nothing)
651 abort ();
652
653 if (full_mode != from_mode)
654 from = convert_to_mode (full_mode, from, unsignedp);
655 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
656 to, from, UNKNOWN);
657 return;
658 }
659 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
660 {
661 enum machine_mode full_mode
662 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
663
664 if (sext_optab->handlers[full_mode][from_mode].insn_code
665 == CODE_FOR_nothing)
666 abort ();
667
668 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
669 to, from, UNKNOWN);
670 if (to_mode == full_mode)
671 return;
672
a1105617 673 /* else proceed to integer conversions below. */
85363ca0
ZW
674 from_mode = full_mode;
675 }
676
bbf6f052
RK
677 /* Now both modes are integers. */
678
679 /* Handle expanding beyond a word. */
680 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
681 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
682 {
683 rtx insns;
684 rtx lowpart;
685 rtx fill_value;
686 rtx lowfrom;
687 int i;
688 enum machine_mode lowpart_mode;
689 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
690
691 /* Try converting directly if the insn is supported. */
692 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
693 != CODE_FOR_nothing)
694 {
cd1b4b44
RK
695 /* If FROM is a SUBREG, put it into a register. Do this
696 so that we always generate the same set of insns for
697 better cse'ing; if an intermediate assignment occurred,
698 we won't be doing the operation directly on the SUBREG. */
699 if (optimize > 0 && GET_CODE (from) == SUBREG)
700 from = force_reg (from_mode, from);
bbf6f052
RK
701 emit_unop_insn (code, to, from, equiv_code);
702 return;
703 }
704 /* Next, try converting via full word. */
705 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
706 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
707 != CODE_FOR_nothing))
708 {
a81fee56 709 if (GET_CODE (to) == REG)
6a2d136b
EB
710 {
711 if (reg_overlap_mentioned_p (to, from))
712 from = force_reg (from_mode, from);
713 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
714 }
bbf6f052
RK
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
718 return;
719 }
720
721 /* No special multiword conversion insn; do it by hand. */
722 start_sequence ();
723
5c5033c3
RK
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
726
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
729
bbf6f052
RK
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
733 else
734 lowpart_mode = from_mode;
735
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
737
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
740
741 /* Compute the value to put in each remaining word. */
742 if (unsignedp)
743 fill_value = const0_rtx;
744 else
745 {
746#ifdef HAVE_slt
747 if (HAVE_slt
a995e389 748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
749 && STORE_FLAG_VALUE == -1)
750 {
906c4e36 751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 752 lowpart_mode, 0);
bbf6f052
RK
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
755 }
756 else
757#endif
758 {
759 fill_value
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 762 NULL_RTX, 0);
bbf6f052
RK
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
764 }
765 }
766
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
769 {
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
772
773 if (subword == 0)
774 abort ();
775
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
778 }
779
780 insns = get_insns ();
781 end_sequence ();
782
906c4e36 783 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
785 return;
786 }
787
d3c64ee3
RS
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 791 {
431a6eca
JW
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
bbf6f052
RK
799 convert_move (to, gen_lowpart (word_mode, from), 0);
800 return;
801 }
802
bbf6f052
RK
803 /* Now follow all the conversions between integers
804 no more than a word long. */
805
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 809 GET_MODE_BITSIZE (from_mode)))
bbf6f052 810 {
d3c64ee3
RS
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
34aa3599
RK
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
bbf6f052
RK
821 emit_move_insn (to, gen_lowpart (to_mode, from));
822 return;
823 }
824
d3c64ee3 825 /* Handle extension. */
bbf6f052
RK
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
827 {
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
831 {
9413de45
RK
832 if (flag_force_mem)
833 from = force_not_mem (from);
834
bbf6f052
RK
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
837 }
838 else
839 {
840 enum machine_mode intermediate;
2b28d92e
NC
841 rtx tmp;
842 tree shift_amount;
bbf6f052
RK
843
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 != CODE_FOR_nothing)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
854 {
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
857 return;
858 }
859
2b28d92e 860 /* No suitable intermediate mode.
3a94c984 861 Generate what we need with shifts. */
2b28d92e
NC
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 to, unsignedp);
3a94c984 867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
868 to, unsignedp);
869 if (tmp != to)
870 emit_move_insn (to, tmp);
871 return;
bbf6f052
RK
872 }
873 }
874
3a94c984 875 /* Support special truncate insns for certain modes. */
85363ca0 876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 877 {
85363ca0
ZW
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 to, from, UNKNOWN);
b9bcad65
RK
880 return;
881 }
882
bbf6f052
RK
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
85363ca0
ZW
885 and for which there was no special instruction.
886
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
891 {
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
894 return;
895 }
896
897 /* Mode combination is not recognized. */
898 abort ();
899}
900
901/* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
5d901c31
RS
906 or by copying to a new temporary with conversion.
907
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
910
911rtx
502b8322 912convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
913{
914 return convert_modes (mode, VOIDmode, x, unsignedp);
915}
916
917/* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
921
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
924
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
926
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
929
930rtx
502b8322 931convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 932{
b3694847 933 rtx temp;
5ffe63ed 934
1499e0a8
RK
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
937
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
bbf6f052 942
64791b18
RK
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
3a94c984 945
5ffe63ed 946 if (mode == oldmode)
bbf6f052
RK
947 return x;
948
949 /* There is one case that we must handle specially: If we are converting
906c4e36 950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
954
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
958 {
959 HOST_WIDE_INT val = INTVAL (x);
960
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
963 {
964 int width = GET_MODE_BITSIZE (oldmode);
965
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
968 }
969
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
971 }
bbf6f052
RK
972
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 977
ba2e110c
RK
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 980 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 981 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 982 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
2bf29316 986 || (GET_CODE (x) == REG
006c9f4a
SE
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
991 {
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
997 {
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1000
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 if (! unsignedp
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1007
2496c7bd 1008 return gen_int_mode (val, mode);
ba2e110c
RK
1009 }
1010
1011 return gen_lowpart (mode, x);
1012 }
bbf6f052 1013
ebe75517
JH
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1017 {
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 abort ();
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1021 }
1022
bbf6f052
RK
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1025 return temp;
1026}
1027\f
cf5124f6
RS
1028/* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1032
1033#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1034
8fd3cf4e
JJ
1035/* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1037 succeed. */
1038
1039int
502b8322
AJ
1040can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1042{
1043 return MOVE_BY_PIECES_P (len, align);
1044}
1045
21d93687
RK
1046/* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
566aa174 1049
21d93687
RK
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
566aa174 1052
8fd3cf4e 1053 ALIGN is maximum stack alignment we can assume.
bbf6f052 1054
8fd3cf4e
JJ
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1057 stpcpy. */
1058
1059rtx
502b8322
AJ
1060move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
bbf6f052
RK
1062{
1063 struct move_by_pieces data;
566aa174 1064 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
bbf6f052 1068
f26aca6d
DD
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1070
bbf6f052 1071 data.offset = 0;
bbf6f052 1072 data.from_addr = from_addr;
566aa174
JH
1073 if (to)
1074 {
1075 to_addr = XEXP (to, 0);
1076 data.to = to;
1077 data.autinc_to
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 data.reverse
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1082 }
1083 else
1084 {
1085 to_addr = NULL_RTX;
1086 data.to = NULL_RTX;
1087 data.autinc_to = 1;
1088#ifdef STACK_GROWS_DOWNWARD
1089 data.reverse = 1;
1090#else
1091 data.reverse = 0;
1092#endif
1093 }
1094 data.to_addr = to_addr;
bbf6f052 1095 data.from = from;
bbf6f052
RK
1096 data.autinc_from
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1100
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
bbf6f052
RK
1103 if (data.reverse) data.offset = len;
1104 data.len = len;
1105
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1111 {
3a94c984 1112 /* Find the mode of the largest move... */
fbe1758d
AM
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1116 mode = tmode;
1117
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1119 {
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1123 }
fbe1758d 1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1125 {
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1129 }
bbf6f052
RK
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1133 {
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.autinc_to = 1;
1136 data.explicit_inc_to = -1;
1137 }
fbe1758d 1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1139 {
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.autinc_to = 1;
1142 data.explicit_inc_to = 1;
1143 }
bbf6f052
RK
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1146 }
1147
e1565e65 1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1151
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1154
1155 while (max_size > 1)
1156 {
e7c33f54
RK
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1160 mode = tmode;
1161
1162 if (mode == VOIDmode)
1163 break;
1164
1165 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1168
1169 max_size = GET_MODE_SIZE (mode);
1170 }
1171
1172 /* The code above should have handled everything. */
2a8e278c 1173 if (data.len > 0)
bbf6f052 1174 abort ();
8fd3cf4e
JJ
1175
1176 if (endp)
1177 {
1178 rtx to1;
1179
1180 if (data.reverse)
1181 abort ();
1182 if (data.autinc_to)
1183 {
1184 if (endp == 2)
1185 {
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 else
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 -1));
1191 }
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1193 data.offset);
1194 }
1195 else
1196 {
1197 if (endp == 2)
1198 --data.offset;
1199 to1 = adjust_address (data.to, QImode, data.offset);
1200 }
1201 return to1;
1202 }
1203 else
1204 return data.to;
bbf6f052
RK
1205}
1206
1207/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1208 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1209
3bdf5ad1 1210static unsigned HOST_WIDE_INT
502b8322 1211move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1212{
3bdf5ad1
RK
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1215
e1565e65 1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1218 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1219
1220 while (max_size > 1)
1221 {
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1224
e7c33f54
RK
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1228 mode = tmode;
1229
1230 if (mode == VOIDmode)
1231 break;
1232
1233 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1236
1237 max_size = GET_MODE_SIZE (mode);
1238 }
1239
13c6f0d5
NS
1240 if (l)
1241 abort ();
bbf6f052
RK
1242 return n_insns;
1243}
1244
1245/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1248
1249static void
502b8322
AJ
1250move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
bbf6f052 1252{
3bdf5ad1 1253 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1254 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1255
1256 while (data->len >= size)
1257 {
3bdf5ad1
RK
1258 if (data->reverse)
1259 data->offset -= size;
1260
566aa174 1261 if (data->to)
3bdf5ad1 1262 {
566aa174 1263 if (data->autinc_to)
630036c6
JJ
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 data->offset);
566aa174 1266 else
f4ef873c 1267 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1268 }
3bdf5ad1
RK
1269
1270 if (data->autinc_from)
630036c6
JJ
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 data->offset);
3bdf5ad1 1273 else
f4ef873c 1274 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1275
940da324 1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1282
566aa174
JH
1283 if (data->to)
1284 emit_insn ((*genfun) (to1, from1));
1285 else
21d93687
RK
1286 {
1287#ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1289#else
1290 abort ();
1291#endif
1292 }
3bdf5ad1 1293
940da324 1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1298
3bdf5ad1
RK
1299 if (! data->reverse)
1300 data->offset += size;
bbf6f052
RK
1301
1302 data->len -= size;
1303 }
1304}
1305\f
4ca79136
RH
1306/* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
bbf6f052 1309
4ca79136 1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1311 SIZE is an rtx that says how long they are.
19caa751 1312 ALIGN is the maximum alignment we can assume they have.
44bb111a 1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1314
e9a25f70
JL
1315 Return the address of the new block, if memcpy is called and returns it,
1316 0 otherwise. */
1317
1318rtx
502b8322 1319emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1320{
44bb111a 1321 bool may_use_call;
e9a25f70 1322 rtx retval = 0;
44bb111a
RH
1323 unsigned int align;
1324
1325 switch (method)
1326 {
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1329 break;
1330
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1333
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1336 NO_DEFER_POP;
1337 break;
1338
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1341 break;
1342
1343 default:
1344 abort ();
1345 }
1346
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1348
bbf6f052
RK
1349 if (GET_MODE (x) != BLKmode)
1350 abort ();
bbf6f052
RK
1351 if (GET_MODE (y) != BLKmode)
1352 abort ();
1353
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
5d901c31 1356 size = protect_from_queue (size, 0);
bbf6f052
RK
1357
1358 if (GET_CODE (x) != MEM)
1359 abort ();
1360 if (GET_CODE (y) != MEM)
1361 abort ();
1362 if (size == 0)
1363 abort ();
1364
cb38fd88
RH
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1368 {
6972c506
JJ
1369 if (INTVAL (size) == 0)
1370 return 0;
1371
cb38fd88
RH
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1376 }
1377
fbe1758d 1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1379 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1381 ;
44bb111a 1382 else if (may_use_call)
4ca79136 1383 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1384 else
1385 emit_block_move_via_loop (x, y, size, align);
1386
1387 if (method == BLOCK_OP_CALL_PARM)
1388 OK_DEFER_POP;
266007a7 1389
4ca79136
RH
1390 return retval;
1391}
266007a7 1392
502b8322 1393/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1396
1397static bool
502b8322 1398block_move_libcall_safe_for_call_parm (void)
44bb111a 1399{
a357a6d4 1400 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1401 if (PUSH_ARGS)
1402 return true;
44bb111a 1403
450b1728 1404 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1405 an outgoing argument. */
1406#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1407 {
1408 tree fn = emit_block_move_libcall_fn (false);
1409 (void) fn;
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 return false;
1412 }
44bb111a 1413#endif
44bb111a 1414
a357a6d4
GK
1415 /* If any argument goes in memory, then it might clobber an outgoing
1416 argument. */
1417 {
1418 CUMULATIVE_ARGS args_so_far;
1419 tree fn, arg;
450b1728 1420
a357a6d4 1421 fn = emit_block_move_libcall_fn (false);
0f6937fe 1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1423
a357a6d4
GK
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1426 {
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
44bb111a 1430 return false;
a357a6d4
GK
1431#ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1433 NULL_TREE, 1))
1434 return false;
1435#endif
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1437 }
1438 }
1439 return true;
44bb111a
RH
1440}
1441
502b8322 1442/* A subroutine of emit_block_move. Expand a movstr pattern;
4ca79136 1443 return true if successful. */
3ef1eef4 1444
4ca79136 1445static bool
502b8322 1446emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1447{
4ca79136 1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1449 int save_volatile_ok = volatile_ok;
4ca79136 1450 enum machine_mode mode;
266007a7 1451
4ca79136
RH
1452 /* Since this is a move insn, we don't care about volatility. */
1453 volatile_ok = 1;
1454
ee960939
OH
1455 /* Try the most limited insn first, because there's no point
1456 including more than one in the machine description unless
1457 the more limited one has some advantage. */
1458
4ca79136
RH
1459 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1460 mode = GET_MODE_WIDER_MODE (mode))
1461 {
1462 enum insn_code code = movstr_optab[(int) mode];
1463 insn_operand_predicate_fn pred;
1464
1465 if (code != CODE_FOR_nothing
1466 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1467 here because if SIZE is less than the mode mask, as it is
1468 returned by the macro, it will definitely be less than the
1469 actual mode mask. */
1470 && ((GET_CODE (size) == CONST_INT
1471 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1472 <= (GET_MODE_MASK (mode) >> 1)))
1473 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1474 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1475 || (*pred) (x, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1477 || (*pred) (y, BLKmode))
1478 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1479 || (*pred) (opalign, VOIDmode)))
1480 {
1481 rtx op2;
1482 rtx last = get_last_insn ();
1483 rtx pat;
1484
1485 op2 = convert_to_mode (mode, size, 1);
1486 pred = insn_data[(int) code].operand[2].predicate;
1487 if (pred != 0 && ! (*pred) (op2, mode))
1488 op2 = copy_to_mode_reg (mode, op2);
1489
1490 /* ??? When called via emit_block_move_for_call, it'd be
1491 nice if there were some way to inform the backend, so
1492 that it doesn't fail the expansion because it thinks
1493 emitting the libcall would be more efficient. */
1494
1495 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1496 if (pat)
1497 {
1498 emit_insn (pat);
a5e9c810 1499 volatile_ok = save_volatile_ok;
4ca79136 1500 return true;
bbf6f052 1501 }
4ca79136
RH
1502 else
1503 delete_insns_since (last);
bbf6f052 1504 }
4ca79136 1505 }
bbf6f052 1506
a5e9c810 1507 volatile_ok = save_volatile_ok;
4ca79136
RH
1508 return false;
1509}
3ef1eef4 1510
4ca79136
RH
1511/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1512 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1513
4ca79136 1514static rtx
502b8322 1515emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1516{
ee960939 1517 rtx dst_addr, src_addr;
4ca79136
RH
1518 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1519 enum machine_mode size_mode;
1520 rtx retval;
4bc973ae 1521
4ca79136 1522 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1523
ee960939
OH
1524 It is unsafe to save the value generated by protect_from_queue and reuse
1525 it later. Consider what happens if emit_queue is called before the
1526 return value from protect_from_queue is used.
4bc973ae 1527
ee960939
OH
1528 Expansion of the CALL_EXPR below will call emit_queue before we are
1529 finished emitting RTL for argument setup. So if we are not careful we
1530 could get the wrong value for an argument.
4bc973ae 1531
ee960939
OH
1532 To avoid this problem we go ahead and emit code to copy the addresses of
1533 DST and SRC and SIZE into new pseudos. We can then place those new
1534 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1535 emit_queue.
4bc973ae 1536
ee960939
OH
1537 Note this is not strictly needed for library calls since they do not call
1538 emit_queue before loading their arguments. However, we may need to have
1539 library calls call emit_queue in the future since failing to do so could
1540 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1541 arguments in registers. */
1542
1543 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1544 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1545
ee960939
OH
1546 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1547 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1548
1549 dst_tree = make_tree (ptr_type_node, dst_addr);
1550 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1551
1552 if (TARGET_MEM_FUNCTIONS)
1553 size_mode = TYPE_MODE (sizetype);
1554 else
1555 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 1556
4ca79136
RH
1557 size = convert_to_mode (size_mode, size, 1);
1558 size = copy_to_mode_reg (size_mode, size);
1559
1560 /* It is incorrect to use the libcall calling conventions to call
1561 memcpy in this context. This could be a user call to memcpy and
1562 the user may wish to examine the return value from memcpy. For
1563 targets where libcalls and normal calls have different conventions
1564 for returning pointers, we could end up generating incorrect code.
1565
1566 For convenience, we generate the call to bcopy this way as well. */
1567
4ca79136
RH
1568 if (TARGET_MEM_FUNCTIONS)
1569 size_tree = make_tree (sizetype, size);
1570 else
1571 size_tree = make_tree (unsigned_type_node, size);
1572
1573 fn = emit_block_move_libcall_fn (true);
1574 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1575 if (TARGET_MEM_FUNCTIONS)
1576 {
1577 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1578 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1579 }
1580 else
1581 {
1582 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1583 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1584 }
1585
1586 /* Now we have to build up the CALL_EXPR itself. */
1587 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1588 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1589 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1590
1591 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1592
ee960939
OH
1593 /* If we are initializing a readonly value, show the above call clobbered
1594 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1595 the delay slot scheduler might overlook conflicts and take nasty
1596 decisions. */
4ca79136 1597 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1598 add_function_usage_to
1599 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1600 gen_rtx_CLOBBER (VOIDmode, dst),
1601 NULL_RTX));
4ca79136 1602
ee960939 1603 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 1604}
52cf7115 1605
4ca79136
RH
1606/* A subroutine of emit_block_move_via_libcall. Create the tree node
1607 for the function we use for block copies. The first time FOR_CALL
1608 is true, we call assemble_external. */
52cf7115 1609
4ca79136
RH
1610static GTY(()) tree block_move_fn;
1611
9661b15f 1612void
502b8322 1613init_block_move_fn (const char *asmspec)
4ca79136 1614{
9661b15f 1615 if (!block_move_fn)
4ca79136 1616 {
8fd3cf4e 1617 tree args, fn;
9661b15f 1618
4ca79136 1619 if (TARGET_MEM_FUNCTIONS)
52cf7115 1620 {
4ca79136
RH
1621 fn = get_identifier ("memcpy");
1622 args = build_function_type_list (ptr_type_node, ptr_type_node,
1623 const_ptr_type_node, sizetype,
1624 NULL_TREE);
1625 }
1626 else
1627 {
1628 fn = get_identifier ("bcopy");
1629 args = build_function_type_list (void_type_node, const_ptr_type_node,
1630 ptr_type_node, unsigned_type_node,
1631 NULL_TREE);
52cf7115
JL
1632 }
1633
4ca79136
RH
1634 fn = build_decl (FUNCTION_DECL, fn, args);
1635 DECL_EXTERNAL (fn) = 1;
1636 TREE_PUBLIC (fn) = 1;
1637 DECL_ARTIFICIAL (fn) = 1;
1638 TREE_NOTHROW (fn) = 1;
66c60e67 1639
4ca79136 1640 block_move_fn = fn;
bbf6f052 1641 }
e9a25f70 1642
9661b15f
JJ
1643 if (asmspec)
1644 {
1645 SET_DECL_RTL (block_move_fn, NULL_RTX);
1646 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1647 }
1648}
1649
1650static tree
502b8322 1651emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1652{
1653 static bool emitted_extern;
1654
1655 if (!block_move_fn)
1656 init_block_move_fn (NULL);
1657
4ca79136
RH
1658 if (for_call && !emitted_extern)
1659 {
1660 emitted_extern = true;
9661b15f
JJ
1661 make_decl_rtl (block_move_fn, NULL);
1662 assemble_external (block_move_fn);
4ca79136
RH
1663 }
1664
9661b15f 1665 return block_move_fn;
bbf6f052 1666}
44bb111a
RH
1667
1668/* A subroutine of emit_block_move. Copy the data via an explicit
1669 loop. This is used only when libcalls are forbidden. */
1670/* ??? It'd be nice to copy in hunks larger than QImode. */
1671
1672static void
502b8322
AJ
1673emit_block_move_via_loop (rtx x, rtx y, rtx size,
1674 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1675{
1676 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1677 enum machine_mode iter_mode;
1678
1679 iter_mode = GET_MODE (size);
1680 if (iter_mode == VOIDmode)
1681 iter_mode = word_mode;
1682
1683 top_label = gen_label_rtx ();
1684 cmp_label = gen_label_rtx ();
1685 iter = gen_reg_rtx (iter_mode);
1686
1687 emit_move_insn (iter, const0_rtx);
1688
1689 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1690 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1691 do_pending_stack_adjust ();
1692
44bb111a
RH
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1695
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1701
1702 emit_move_insn (x, y);
1703
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1706 if (tmp != iter)
1707 emit_move_insn (iter, tmp);
1708
44bb111a
RH
1709 emit_label (cmp_label);
1710
1711 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1712 true, top_label);
44bb111a 1713}
bbf6f052
RK
1714\f
1715/* Copy all or part of a value X into registers starting at REGNO.
1716 The number of registers to be filled is NREGS. */
1717
1718void
502b8322 1719move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1720{
1721 int i;
381127e8 1722#ifdef HAVE_load_multiple
3a94c984 1723 rtx pat;
381127e8
RL
1724 rtx last;
1725#endif
bbf6f052 1726
72bb9717
RK
1727 if (nregs == 0)
1728 return;
1729
bbf6f052
RK
1730 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1731 x = validize_mem (force_const_mem (mode, x));
1732
1733 /* See if the machine can do this with a load multiple insn. */
1734#ifdef HAVE_load_multiple
c3a02afe 1735 if (HAVE_load_multiple)
bbf6f052 1736 {
c3a02afe 1737 last = get_last_insn ();
38a448ca 1738 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1739 GEN_INT (nregs));
1740 if (pat)
1741 {
1742 emit_insn (pat);
1743 return;
1744 }
1745 else
1746 delete_insns_since (last);
bbf6f052 1747 }
bbf6f052
RK
1748#endif
1749
1750 for (i = 0; i < nregs; i++)
38a448ca 1751 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1752 operand_subword_force (x, i, mode));
1753}
1754
1755/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1756 The number of registers to be filled is NREGS. */
0040593d 1757
bbf6f052 1758void
502b8322 1759move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1760{
1761 int i;
bbf6f052 1762
2954d7db
RK
1763 if (nregs == 0)
1764 return;
1765
bbf6f052
RK
1766 /* See if the machine can do this with a store multiple insn. */
1767#ifdef HAVE_store_multiple
c3a02afe 1768 if (HAVE_store_multiple)
bbf6f052 1769 {
c6b97fac
AM
1770 rtx last = get_last_insn ();
1771 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1772 GEN_INT (nregs));
c3a02afe
RK
1773 if (pat)
1774 {
1775 emit_insn (pat);
1776 return;
1777 }
1778 else
1779 delete_insns_since (last);
bbf6f052 1780 }
bbf6f052
RK
1781#endif
1782
1783 for (i = 0; i < nregs; i++)
1784 {
1785 rtx tem = operand_subword (x, i, 1, BLKmode);
1786
1787 if (tem == 0)
1788 abort ();
1789
38a448ca 1790 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1791 }
1792}
1793
084a1106
JDA
1794/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1795 ORIG, where ORIG is a non-consecutive group of registers represented by
1796 a PARALLEL. The clone is identical to the original except in that the
1797 original set of registers is replaced by a new set of pseudo registers.
1798 The new set has the same modes as the original set. */
1799
1800rtx
502b8322 1801gen_group_rtx (rtx orig)
084a1106
JDA
1802{
1803 int i, length;
1804 rtx *tmps;
1805
1806 if (GET_CODE (orig) != PARALLEL)
1807 abort ();
1808
1809 length = XVECLEN (orig, 0);
703ad42b 1810 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1811
1812 /* Skip a NULL entry in first slot. */
1813 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1814
1815 if (i)
1816 tmps[0] = 0;
1817
1818 for (; i < length; i++)
1819 {
1820 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1821 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1822
1823 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1824 }
1825
1826 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1827}
1828
6e985040
AM
1829/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1830 where DST is non-consecutive registers represented by a PARALLEL.
1831 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1832 if not known. */
fffa9c1d
JW
1833
1834void
6e985040 1835emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1836{
aac5cc16
RH
1837 rtx *tmps, src;
1838 int start, i;
fffa9c1d 1839
aac5cc16 1840 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1841 abort ();
1842
1843 /* Check for a NULL entry, used to indicate that the parameter goes
1844 both on the stack and in registers. */
aac5cc16
RH
1845 if (XEXP (XVECEXP (dst, 0, 0), 0))
1846 start = 0;
fffa9c1d 1847 else
aac5cc16
RH
1848 start = 1;
1849
703ad42b 1850 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1851
aac5cc16
RH
1852 /* Process the pieces. */
1853 for (i = start; i < XVECLEN (dst, 0); i++)
1854 {
1855 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1856 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1857 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1858 int shift = 0;
1859
1860 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1861 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1862 {
6e985040
AM
1863 /* Arrange to shift the fragment to where it belongs.
1864 extract_bit_field loads to the lsb of the reg. */
1865 if (
1866#ifdef BLOCK_REG_PADDING
1867 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1868 == (BYTES_BIG_ENDIAN ? upward : downward)
1869#else
1870 BYTES_BIG_ENDIAN
1871#endif
1872 )
1873 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1874 bytelen = ssize - bytepos;
1875 if (bytelen <= 0)
729a2125 1876 abort ();
aac5cc16
RH
1877 }
1878
f3ce87a9
DE
1879 /* If we won't be loading directly from memory, protect the real source
1880 from strange tricks we might play; but make sure that the source can
1881 be loaded directly into the destination. */
1882 src = orig_src;
1883 if (GET_CODE (orig_src) != MEM
1884 && (!CONSTANT_P (orig_src)
1885 || (GET_MODE (orig_src) != mode
1886 && GET_MODE (orig_src) != VOIDmode)))
1887 {
1888 if (GET_MODE (orig_src) == VOIDmode)
1889 src = gen_reg_rtx (mode);
1890 else
1891 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1892
f3ce87a9
DE
1893 emit_move_insn (src, orig_src);
1894 }
1895
aac5cc16
RH
1896 /* Optimize the access just a bit. */
1897 if (GET_CODE (src) == MEM
6e985040
AM
1898 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1899 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1900 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1901 && bytelen == GET_MODE_SIZE (mode))
1902 {
1903 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1904 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1905 }
7c4a6db0
JW
1906 else if (GET_CODE (src) == CONCAT)
1907 {
015b1ad1
JDA
1908 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1909 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1910
1911 if ((bytepos == 0 && bytelen == slen0)
1912 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1913 {
015b1ad1
JDA
1914 /* The following assumes that the concatenated objects all
1915 have the same size. In this case, a simple calculation
1916 can be used to determine the object and the bit field
1917 to be extracted. */
1918 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
1919 if (! CONSTANT_P (tmps[i])
1920 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1921 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
1922 (bytepos % slen0) * BITS_PER_UNIT,
1923 1, NULL_RTX, mode, mode, ssize);
cbb92744 1924 }
58f69841
JH
1925 else if (bytepos == 0)
1926 {
015b1ad1 1927 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1928 emit_move_insn (mem, src);
04050c69 1929 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1930 }
7c4a6db0
JW
1931 else
1932 abort ();
1933 }
9c0631a7
AH
1934 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1935 SIMD register, which is currently broken. While we get GCC
1936 to emit proper RTL for these cases, let's dump to memory. */
1937 else if (VECTOR_MODE_P (GET_MODE (dst))
1938 && GET_CODE (src) == REG)
1939 {
1940 int slen = GET_MODE_SIZE (GET_MODE (src));
1941 rtx mem;
1942
1943 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1944 emit_move_insn (mem, src);
1945 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1946 }
d3a16cbd
FJ
1947 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1948 && XVECLEN (dst, 0) > 1)
1949 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1950 else if (CONSTANT_P (src)
2ee5437b
RH
1951 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1952 tmps[i] = src;
fffa9c1d 1953 else
19caa751
RK
1954 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1955 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 1956 mode, mode, ssize);
fffa9c1d 1957
6e985040 1958 if (shift)
19caa751
RK
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 1961 }
19caa751 1962
3a94c984 1963 emit_queue ();
aac5cc16
RH
1964
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1968}
1969
084a1106
JDA
1970/* Emit code to move a block SRC to block DST, where SRC and DST are
1971 non-consecutive groups of registers, each represented by a PARALLEL. */
1972
1973void
502b8322 1974emit_group_move (rtx dst, rtx src)
084a1106
JDA
1975{
1976 int i;
1977
1978 if (GET_CODE (src) != PARALLEL
1979 || GET_CODE (dst) != PARALLEL
1980 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1981 abort ();
1982
1983 /* Skip first entry if NULL. */
1984 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1985 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1986 XEXP (XVECEXP (src, 0, i), 0));
1987}
1988
6e985040
AM
1989/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1990 where SRC is non-consecutive registers represented by a PARALLEL.
1991 SSIZE represents the total size of block ORIG_DST, or -1 if not
1992 known. */
fffa9c1d
JW
1993
1994void
6e985040 1995emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1996{
aac5cc16
RH
1997 rtx *tmps, dst;
1998 int start, i;
fffa9c1d 1999
aac5cc16 2000 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2001 abort ();
2002
2003 /* Check for a NULL entry, used to indicate that the parameter goes
2004 both on the stack and in registers. */
aac5cc16
RH
2005 if (XEXP (XVECEXP (src, 0, 0), 0))
2006 start = 0;
fffa9c1d 2007 else
aac5cc16
RH
2008 start = 1;
2009
703ad42b 2010 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2011
aac5cc16
RH
2012 /* Copy the (probable) hard regs into pseudos. */
2013 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2014 {
aac5cc16
RH
2015 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2016 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2017 emit_move_insn (tmps[i], reg);
2018 }
3a94c984 2019 emit_queue ();
fffa9c1d 2020
aac5cc16
RH
2021 /* If we won't be storing directly into memory, protect the real destination
2022 from strange tricks we might play. */
2023 dst = orig_dst;
10a9f2be
JW
2024 if (GET_CODE (dst) == PARALLEL)
2025 {
2026 rtx temp;
2027
2028 /* We can get a PARALLEL dst if there is a conditional expression in
2029 a return statement. In that case, the dst and src are the same,
2030 so no action is necessary. */
2031 if (rtx_equal_p (dst, src))
2032 return;
2033
2034 /* It is unclear if we can ever reach here, but we may as well handle
2035 it. Allocate a temporary, and split this into a store/load to/from
2036 the temporary. */
2037
2038 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
2039 emit_group_store (temp, src, type, ssize);
2040 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
2041 return;
2042 }
75897075 2043 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2044 {
2045 dst = gen_reg_rtx (GET_MODE (orig_dst));
2046 /* Make life a bit easier for combine. */
8ae91fc0 2047 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2048 }
aac5cc16
RH
2049
2050 /* Process the pieces. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2052 {
770ae6cc 2053 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2054 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2055 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2056 rtx dest = dst;
aac5cc16
RH
2057
2058 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2059 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2060 {
6e985040
AM
2061 /* store_bit_field always takes its value from the lsb.
2062 Move the fragment to the lsb if it's not already there. */
2063 if (
2064#ifdef BLOCK_REG_PADDING
2065 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2066 == (BYTES_BIG_ENDIAN ? upward : downward)
2067#else
2068 BYTES_BIG_ENDIAN
2069#endif
2070 )
aac5cc16
RH
2071 {
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2075 }
2076 bytelen = ssize - bytepos;
71bc0330 2077 }
fffa9c1d 2078
6ddae612
JJ
2079 if (GET_CODE (dst) == CONCAT)
2080 {
2081 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2082 dest = XEXP (dst, 0);
2083 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2084 {
2085 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2086 dest = XEXP (dst, 1);
2087 }
0d446150
JH
2088 else if (bytepos == 0 && XVECLEN (src, 0))
2089 {
2090 dest = assign_stack_temp (GET_MODE (dest),
2091 GET_MODE_SIZE (GET_MODE (dest)), 0);
2092 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2093 tmps[i]);
2094 dst = dest;
2095 break;
2096 }
6ddae612
JJ
2097 else
2098 abort ();
2099 }
2100
aac5cc16 2101 /* Optimize the access just a bit. */
6ddae612 2102 if (GET_CODE (dest) == MEM
6e985040
AM
2103 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2104 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2105 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2106 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2107 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2108 else
6ddae612 2109 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2110 mode, tmps[i], ssize);
fffa9c1d 2111 }
729a2125 2112
3a94c984 2113 emit_queue ();
aac5cc16
RH
2114
2115 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2116 if (orig_dst != dst)
aac5cc16 2117 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2118}
2119
c36fce9a
GRK
2120/* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2123
c988af2b
RS
2124 The purpose of this routine is to handle functions that return
2125 BLKmode structures in registers. Some machines (the PA for example)
2126 want to return all small structures in registers regardless of the
2127 structure's alignment. */
c36fce9a
GRK
2128
2129rtx
502b8322 2130copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2131{
19caa751
RK
2132 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2133 rtx src = NULL, dst = NULL;
2134 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2135 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2136
2137 if (tgtblk == 0)
2138 {
1da68f56
RK
2139 tgtblk = assign_temp (build_qualified_type (type,
2140 (TYPE_QUALS (type)
2141 | TYPE_QUAL_CONST)),
2142 0, 1, 1);
19caa751
RK
2143 preserve_temp_slots (tgtblk);
2144 }
3a94c984 2145
1ed1b4fb 2146 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2147 into a new pseudo which is a full word. */
0d7839da 2148
19caa751
RK
2149 if (GET_MODE (srcreg) != BLKmode
2150 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 2151 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 2152
c988af2b
RS
2153 /* If the structure doesn't take up a whole number of words, see whether
2154 SRCREG is padded on the left or on the right. If it's on the left,
2155 set PADDING_CORRECTION to the number of bits to skip.
2156
2157 In most ABIs, the structure will be returned at the least end of
2158 the register, which translates to right padding on little-endian
2159 targets and left padding on big-endian targets. The opposite
2160 holds if the structure is returned at the most significant
2161 end of the register. */
2162 if (bytes % UNITS_PER_WORD != 0
2163 && (targetm.calls.return_in_msb (type)
2164 ? !BYTES_BIG_ENDIAN
2165 : BYTES_BIG_ENDIAN))
2166 padding_correction
19caa751
RK
2167 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2168
2169 /* Copy the structure BITSIZE bites at a time.
3a94c984 2170
19caa751
RK
2171 We could probably emit more efficient code for machines which do not use
2172 strict alignment, but it doesn't seem worth the effort at the current
2173 time. */
c988af2b 2174 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2175 bitpos < bytes * BITS_PER_UNIT;
2176 bitpos += bitsize, xbitpos += bitsize)
2177 {
3a94c984 2178 /* We need a new source operand each time xbitpos is on a
c988af2b 2179 word boundary and when xbitpos == padding_correction
19caa751
RK
2180 (the first time through). */
2181 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2182 || xbitpos == padding_correction)
b47f8cfc
JH
2183 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2184 GET_MODE (srcreg));
19caa751
RK
2185
2186 /* We need a new destination operand each time bitpos is on
2187 a word boundary. */
2188 if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2190
19caa751
RK
2191 /* Use xbitpos for the source extraction (right justified) and
2192 xbitpos for the destination store (left justified). */
2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2194 extract_bit_field (src, bitsize,
2195 xbitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, word_mode, word_mode,
04050c69
RK
2197 BITS_PER_WORD),
2198 BITS_PER_WORD);
19caa751
RK
2199 }
2200
2201 return tgtblk;
c36fce9a
GRK
2202}
2203
94b25f81
RK
2204/* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2206
2207void
502b8322 2208use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2209{
0304dfbb
DE
2210 if (GET_CODE (reg) != REG
2211 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2212 abort ();
b3f8cf4a
RK
2213
2214 *call_fusage
38a448ca
RH
2215 = gen_rtx_EXPR_LIST (VOIDmode,
2216 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2217}
2218
94b25f81
RK
2219/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2220 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2221
2222void
502b8322 2223use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2224{
0304dfbb 2225 int i;
bbf6f052 2226
0304dfbb
DE
2227 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2228 abort ();
2229
2230 for (i = 0; i < nregs; i++)
e50126e8 2231 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2232}
fffa9c1d
JW
2233
2234/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2235 PARALLEL REGS. This is for calls that pass values in multiple
2236 non-contiguous locations. The Irix 6 ABI has examples of this. */
2237
2238void
502b8322 2239use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2240{
2241 int i;
2242
6bd35f86
DE
2243 for (i = 0; i < XVECLEN (regs, 0); i++)
2244 {
2245 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2246
6bd35f86
DE
2247 /* A NULL entry means the parameter goes both on the stack and in
2248 registers. This can also be a MEM for targets that pass values
2249 partially on the stack and partially in registers. */
e9a25f70 2250 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2251 use_reg (call_fusage, reg);
2252 }
fffa9c1d 2253}
bbf6f052 2254\f
57814e5e 2255
cf5124f6
RS
2256/* Determine whether the LEN bytes generated by CONSTFUN can be
2257 stored to memory using several move instructions. CONSTFUNDATA is
2258 a pointer which will be passed as argument in every CONSTFUN call.
2259 ALIGN is maximum alignment we can assume. Return nonzero if a
2260 call to store_by_pieces should succeed. */
2261
57814e5e 2262int
502b8322
AJ
2263can_store_by_pieces (unsigned HOST_WIDE_INT len,
2264 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2265 void *constfundata, unsigned int align)
57814e5e 2266{
98166639 2267 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2268 HOST_WIDE_INT offset = 0;
2269 enum machine_mode mode, tmode;
2270 enum insn_code icode;
2271 int reverse;
2272 rtx cst;
2273
2c430630
RS
2274 if (len == 0)
2275 return 1;
2276
4977bab6 2277 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2278 return 0;
2279
2280 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2281 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2282 align = MOVE_MAX * BITS_PER_UNIT;
2283
2284 /* We would first store what we can in the largest integer mode, then go to
2285 successively smaller modes. */
2286
2287 for (reverse = 0;
2288 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2289 reverse++)
2290 {
2291 l = len;
2292 mode = VOIDmode;
cf5124f6 2293 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2294 while (max_size > 1)
2295 {
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2299 mode = tmode;
2300
2301 if (mode == VOIDmode)
2302 break;
2303
2304 icode = mov_optab->handlers[(int) mode].insn_code;
2305 if (icode != CODE_FOR_nothing
2306 && align >= GET_MODE_ALIGNMENT (mode))
2307 {
2308 unsigned int size = GET_MODE_SIZE (mode);
2309
2310 while (l >= size)
2311 {
2312 if (reverse)
2313 offset -= size;
2314
2315 cst = (*constfun) (constfundata, offset, mode);
2316 if (!LEGITIMATE_CONSTANT_P (cst))
2317 return 0;
2318
2319 if (!reverse)
2320 offset += size;
2321
2322 l -= size;
2323 }
2324 }
2325
2326 max_size = GET_MODE_SIZE (mode);
2327 }
2328
2329 /* The code above should have handled everything. */
2330 if (l != 0)
2331 abort ();
2332 }
2333
2334 return 1;
2335}
2336
2337/* Generate several move instructions to store LEN bytes generated by
2338 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2339 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2340 ALIGN is maximum alignment we can assume.
2341 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2342 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2343 stpcpy. */
57814e5e 2344
8fd3cf4e 2345rtx
502b8322
AJ
2346store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2347 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2348 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2349{
2350 struct store_by_pieces data;
2351
2c430630
RS
2352 if (len == 0)
2353 {
2354 if (endp == 2)
2355 abort ();
2356 return to;
2357 }
2358
4977bab6 2359 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2360 abort ();
2361 to = protect_from_queue (to, 1);
2362 data.constfun = constfun;
2363 data.constfundata = constfundata;
2364 data.len = len;
2365 data.to = to;
2366 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2367 if (endp)
2368 {
2369 rtx to1;
2370
2371 if (data.reverse)
2372 abort ();
2373 if (data.autinc_to)
2374 {
2375 if (endp == 2)
2376 {
2377 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2378 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2379 else
2380 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2381 -1));
2382 }
2383 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2384 data.offset);
2385 }
2386 else
2387 {
2388 if (endp == 2)
2389 --data.offset;
2390 to1 = adjust_address (data.to, QImode, data.offset);
2391 }
2392 return to1;
2393 }
2394 else
2395 return data.to;
57814e5e
JJ
2396}
2397
19caa751
RK
2398/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2399 rtx with BLKmode). The caller must pass TO through protect_from_queue
2400 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2401
2402static void
342e2b74 2403clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2404{
57814e5e
JJ
2405 struct store_by_pieces data;
2406
2c430630
RS
2407 if (len == 0)
2408 return;
2409
57814e5e 2410 data.constfun = clear_by_pieces_1;
df4ae160 2411 data.constfundata = NULL;
57814e5e
JJ
2412 data.len = len;
2413 data.to = to;
2414 store_by_pieces_1 (&data, align);
2415}
2416
2417/* Callback routine for clear_by_pieces.
2418 Return const0_rtx unconditionally. */
2419
2420static rtx
502b8322
AJ
2421clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2422 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2423 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2424{
2425 return const0_rtx;
2426}
2427
2428/* Subroutine of clear_by_pieces and store_by_pieces.
2429 Generate several move instructions to store LEN bytes of block TO. (A MEM
2430 rtx with BLKmode). The caller must pass TO through protect_from_queue
2431 before calling. ALIGN is maximum alignment we can assume. */
2432
2433static void
502b8322
AJ
2434store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2435 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2436{
2437 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2438 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2439 enum machine_mode mode = VOIDmode, tmode;
2440 enum insn_code icode;
9de08200 2441
57814e5e
JJ
2442 data->offset = 0;
2443 data->to_addr = to_addr;
2444 data->autinc_to
9de08200
RK
2445 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2446 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2447
57814e5e
JJ
2448 data->explicit_inc_to = 0;
2449 data->reverse
9de08200 2450 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2451 if (data->reverse)
2452 data->offset = data->len;
9de08200 2453
57814e5e 2454 /* If storing requires more than two move insns,
9de08200
RK
2455 copy addresses to registers (to make displacements shorter)
2456 and use post-increment if available. */
57814e5e
JJ
2457 if (!data->autinc_to
2458 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2459 {
3a94c984 2460 /* Determine the main mode we'll be using. */
fbe1758d
AM
2461 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2462 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2463 if (GET_MODE_SIZE (tmode) < max_size)
2464 mode = tmode;
2465
57814e5e 2466 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2467 {
57814e5e
JJ
2468 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2469 data->autinc_to = 1;
2470 data->explicit_inc_to = -1;
9de08200 2471 }
3bdf5ad1 2472
57814e5e
JJ
2473 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2474 && ! data->autinc_to)
9de08200 2475 {
57814e5e
JJ
2476 data->to_addr = copy_addr_to_reg (to_addr);
2477 data->autinc_to = 1;
2478 data->explicit_inc_to = 1;
9de08200 2479 }
3bdf5ad1 2480
57814e5e
JJ
2481 if ( !data->autinc_to && CONSTANT_P (to_addr))
2482 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2483 }
2484
e1565e65 2485 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2486 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2487 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2488
57814e5e 2489 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2490 successively smaller modes. */
2491
2492 while (max_size > 1)
2493 {
9de08200
RK
2494 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2495 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2496 if (GET_MODE_SIZE (tmode) < max_size)
2497 mode = tmode;
2498
2499 if (mode == VOIDmode)
2500 break;
2501
2502 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2503 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2504 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2505
2506 max_size = GET_MODE_SIZE (mode);
2507 }
2508
2509 /* The code above should have handled everything. */
57814e5e 2510 if (data->len != 0)
9de08200
RK
2511 abort ();
2512}
2513
57814e5e 2514/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2515 with move instructions for mode MODE. GENFUN is the gen_... function
2516 to make a move insn for that mode. DATA has all the other info. */
2517
2518static void
502b8322
AJ
2519store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2520 struct store_by_pieces *data)
9de08200 2521{
3bdf5ad1 2522 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2523 rtx to1, cst;
9de08200
RK
2524
2525 while (data->len >= size)
2526 {
3bdf5ad1
RK
2527 if (data->reverse)
2528 data->offset -= size;
9de08200 2529
3bdf5ad1 2530 if (data->autinc_to)
630036c6
JJ
2531 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2532 data->offset);
3a94c984 2533 else
f4ef873c 2534 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2535
940da324 2536 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2537 emit_insn (gen_add2_insn (data->to_addr,
2538 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2539
57814e5e
JJ
2540 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2541 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2542
940da324 2543 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2545
3bdf5ad1
RK
2546 if (! data->reverse)
2547 data->offset += size;
9de08200
RK
2548
2549 data->len -= size;
2550 }
2551}
2552\f
19caa751 2553/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2554 its length in bytes. */
e9a25f70
JL
2555
2556rtx
502b8322 2557clear_storage (rtx object, rtx size)
bbf6f052 2558{
e9a25f70 2559 rtx retval = 0;
8ac61af7
RK
2560 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2562
fcf1b822
RK
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2565 if (GET_MODE (object) != BLKmode
fcf1b822 2566 && GET_CODE (size) == CONST_INT
4ca79136 2567 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2568 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2569 else
bbf6f052 2570 {
9de08200
RK
2571 object = protect_from_queue (object, 1);
2572 size = protect_from_queue (size, 0);
2573
6972c506 2574 if (size == const0_rtx)
2c430630
RS
2575 ;
2576 else if (GET_CODE (size) == CONST_INT
78762e3b 2577 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2578 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2579 else if (clear_storage_via_clrstr (object, size, align))
2580 ;
9de08200 2581 else
4ca79136
RH
2582 retval = clear_storage_via_libcall (object, size);
2583 }
2584
2585 return retval;
2586}
2587
2588/* A subroutine of clear_storage. Expand a clrstr pattern;
2589 return true if successful. */
2590
2591static bool
502b8322 2592clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
4ca79136
RH
2593{
2594 /* Try the most limited insn first, because there's no point
2595 including more than one in the machine description unless
2596 the more limited one has some advantage. */
2597
2598 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2599 enum machine_mode mode;
2600
2601 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2602 mode = GET_MODE_WIDER_MODE (mode))
2603 {
2604 enum insn_code code = clrstr_optab[(int) mode];
2605 insn_operand_predicate_fn pred;
2606
2607 if (code != CODE_FOR_nothing
2608 /* We don't need MODE to be narrower than
2609 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2610 the mode mask, as it is returned by the macro, it will
2611 definitely be less than the actual mode mask. */
2612 && ((GET_CODE (size) == CONST_INT
2613 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2614 <= (GET_MODE_MASK (mode) >> 1)))
2615 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2616 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2617 || (*pred) (object, BLKmode))
2618 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2619 || (*pred) (opalign, VOIDmode)))
9de08200 2620 {
4ca79136
RH
2621 rtx op1;
2622 rtx last = get_last_insn ();
2623 rtx pat;
9de08200 2624
4ca79136
RH
2625 op1 = convert_to_mode (mode, size, 1);
2626 pred = insn_data[(int) code].operand[1].predicate;
2627 if (pred != 0 && ! (*pred) (op1, mode))
2628 op1 = copy_to_mode_reg (mode, op1);
9de08200 2629
4ca79136
RH
2630 pat = GEN_FCN ((int) code) (object, op1, opalign);
2631 if (pat)
9de08200 2632 {
4ca79136
RH
2633 emit_insn (pat);
2634 return true;
2635 }
2636 else
2637 delete_insns_since (last);
2638 }
2639 }
9de08200 2640
4ca79136
RH
2641 return false;
2642}
9de08200 2643
4ca79136
RH
2644/* A subroutine of clear_storage. Expand a call to memset or bzero.
2645 Return the return value of memset, 0 otherwise. */
9de08200 2646
4ca79136 2647static rtx
502b8322 2648clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2649{
2650 tree call_expr, arg_list, fn, object_tree, size_tree;
2651 enum machine_mode size_mode;
2652 rtx retval;
9de08200 2653
4ca79136 2654 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2655
4ca79136
RH
2656 It is unsafe to save the value generated by protect_from_queue
2657 and reuse it later. Consider what happens if emit_queue is
2658 called before the return value from protect_from_queue is used.
52cf7115 2659
4ca79136
RH
2660 Expansion of the CALL_EXPR below will call emit_queue before
2661 we are finished emitting RTL for argument setup. So if we are
2662 not careful we could get the wrong value for an argument.
52cf7115 2663
4ca79136
RH
2664 To avoid this problem we go ahead and emit code to copy OBJECT
2665 and SIZE into new pseudos. We can then place those new pseudos
2666 into an RTL_EXPR and use them later, even after a call to
2667 emit_queue.
52cf7115 2668
4ca79136
RH
2669 Note this is not strictly needed for library calls since they
2670 do not call emit_queue before loading their arguments. However,
2671 we may need to have library calls call emit_queue in the future
2672 since failing to do so could cause problems for targets which
2673 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2674
4ca79136 2675 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2676
4ca79136
RH
2677 if (TARGET_MEM_FUNCTIONS)
2678 size_mode = TYPE_MODE (sizetype);
2679 else
2680 size_mode = TYPE_MODE (unsigned_type_node);
2681 size = convert_to_mode (size_mode, size, 1);
2682 size = copy_to_mode_reg (size_mode, size);
52cf7115 2683
4ca79136
RH
2684 /* It is incorrect to use the libcall calling conventions to call
2685 memset in this context. This could be a user call to memset and
2686 the user may wish to examine the return value from memset. For
2687 targets where libcalls and normal calls have different conventions
2688 for returning pointers, we could end up generating incorrect code.
4bc973ae 2689
4ca79136 2690 For convenience, we generate the call to bzero this way as well. */
4bc973ae 2691
4ca79136
RH
2692 object_tree = make_tree (ptr_type_node, object);
2693 if (TARGET_MEM_FUNCTIONS)
2694 size_tree = make_tree (sizetype, size);
2695 else
2696 size_tree = make_tree (unsigned_type_node, size);
2697
2698 fn = clear_storage_libcall_fn (true);
2699 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2700 if (TARGET_MEM_FUNCTIONS)
2701 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2702 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2703
2704 /* Now we have to build up the CALL_EXPR itself. */
2705 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2706 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2707 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2708
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2710
2711 /* If we are initializing a readonly value, show the above call
2712 clobbered it. Otherwise, a load from it may erroneously be
2713 hoisted from a loop. */
2714 if (RTX_UNCHANGING_P (object))
2715 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2716
2717 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2718}
2719
2720/* A subroutine of clear_storage_via_libcall. Create the tree node
2721 for the function we use for block clears. The first time FOR_CALL
2722 is true, we call assemble_external. */
2723
2724static GTY(()) tree block_clear_fn;
66c60e67 2725
9661b15f 2726void
502b8322 2727init_block_clear_fn (const char *asmspec)
4ca79136 2728{
9661b15f 2729 if (!block_clear_fn)
4ca79136 2730 {
9661b15f
JJ
2731 tree fn, args;
2732
4ca79136
RH
2733 if (TARGET_MEM_FUNCTIONS)
2734 {
2735 fn = get_identifier ("memset");
2736 args = build_function_type_list (ptr_type_node, ptr_type_node,
2737 integer_type_node, sizetype,
2738 NULL_TREE);
2739 }
2740 else
2741 {
2742 fn = get_identifier ("bzero");
2743 args = build_function_type_list (void_type_node, ptr_type_node,
2744 unsigned_type_node, NULL_TREE);
9de08200 2745 }
4ca79136
RH
2746
2747 fn = build_decl (FUNCTION_DECL, fn, args);
2748 DECL_EXTERNAL (fn) = 1;
2749 TREE_PUBLIC (fn) = 1;
2750 DECL_ARTIFICIAL (fn) = 1;
2751 TREE_NOTHROW (fn) = 1;
2752
2753 block_clear_fn = fn;
bbf6f052 2754 }
e9a25f70 2755
9661b15f
JJ
2756 if (asmspec)
2757 {
2758 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2759 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2760 }
2761}
2762
2763static tree
502b8322 2764clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2765{
2766 static bool emitted_extern;
2767
2768 if (!block_clear_fn)
2769 init_block_clear_fn (NULL);
2770
4ca79136
RH
2771 if (for_call && !emitted_extern)
2772 {
2773 emitted_extern = true;
9661b15f
JJ
2774 make_decl_rtl (block_clear_fn, NULL);
2775 assemble_external (block_clear_fn);
4ca79136 2776 }
bbf6f052 2777
9661b15f 2778 return block_clear_fn;
4ca79136
RH
2779}
2780\f
bbf6f052
RK
2781/* Generate code to copy Y into X.
2782 Both Y and X must have the same mode, except that
2783 Y can be a constant with VOIDmode.
2784 This mode cannot be BLKmode; use emit_block_move for that.
2785
2786 Return the last instruction emitted. */
2787
2788rtx
502b8322 2789emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2790{
2791 enum machine_mode mode = GET_MODE (x);
de1b33dd 2792 rtx y_cst = NULL_RTX;
0c19a26f 2793 rtx last_insn, set;
bbf6f052
RK
2794
2795 x = protect_from_queue (x, 1);
2796 y = protect_from_queue (y, 0);
2797
2798 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2799 abort ();
2800
6de9cd9a 2801 if (CONSTANT_P (y))
de1b33dd 2802 {
51286de6 2803 if (optimize
075fc17a 2804 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2805 && (last_insn = compress_float_constant (x, y)))
2806 return last_insn;
2807
0c19a26f
RS
2808 y_cst = y;
2809
51286de6
RH
2810 if (!LEGITIMATE_CONSTANT_P (y))
2811 {
51286de6 2812 y = force_const_mem (mode, y);
3a04ff64
RH
2813
2814 /* If the target's cannot_force_const_mem prevented the spill,
2815 assume that the target's move expanders will also take care
2816 of the non-legitimate constant. */
2817 if (!y)
2818 y = y_cst;
51286de6 2819 }
de1b33dd 2820 }
bbf6f052
RK
2821
2822 /* If X or Y are memory references, verify that their addresses are valid
2823 for the machine. */
2824 if (GET_CODE (x) == MEM
2825 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2826 && ! push_operand (x, GET_MODE (x)))
2827 || (flag_force_addr
2828 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2829 x = validize_mem (x);
bbf6f052
RK
2830
2831 if (GET_CODE (y) == MEM
2832 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2833 || (flag_force_addr
2834 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2835 y = validize_mem (y);
bbf6f052
RK
2836
2837 if (mode == BLKmode)
2838 abort ();
2839
de1b33dd
AO
2840 last_insn = emit_move_insn_1 (x, y);
2841
0c19a26f
RS
2842 if (y_cst && GET_CODE (x) == REG
2843 && (set = single_set (last_insn)) != NULL_RTX
2844 && SET_DEST (set) == x
2845 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2846 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2847
2848 return last_insn;
261c4230
RS
2849}
2850
2851/* Low level part of emit_move_insn.
2852 Called just like emit_move_insn, but assumes X and Y
2853 are basically valid. */
2854
2855rtx
502b8322 2856emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2857{
2858 enum machine_mode mode = GET_MODE (x);
2859 enum machine_mode submode;
2860 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2861
dbbbbf3b 2862 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2863 abort ();
76bbe028 2864
bbf6f052
RK
2865 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2866 return
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2868
89742723 2869 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2870 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2871 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2872 && (mov_optab->handlers[(int) submode].insn_code
2873 != CODE_FOR_nothing))
2874 {
2875 /* Don't split destination if it is a stack push. */
2876 int stack = push_operand (x, GET_MODE (x));
7308a047 2877
79ce92d7 2878#ifdef PUSH_ROUNDING
0e9cbd11
KH
2879 /* In case we output to the stack, but the size is smaller than the
2880 machine can push exactly, we need to use move instructions. */
1a06f5fe 2881 if (stack
bb93b973
RK
2882 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2883 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2884 {
2885 rtx temp;
bb93b973 2886 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2887
2888 /* Do not use anti_adjust_stack, since we don't want to update
2889 stack_pointer_delta. */
2890 temp = expand_binop (Pmode,
2891#ifdef STACK_GROWS_DOWNWARD
2892 sub_optab,
2893#else
2894 add_optab,
2895#endif
2896 stack_pointer_rtx,
2897 GEN_INT
bb93b973
RK
2898 (PUSH_ROUNDING
2899 (GET_MODE_SIZE (GET_MODE (x)))),
2900 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2901
1a06f5fe
JH
2902 if (temp != stack_pointer_rtx)
2903 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2904
1a06f5fe
JH
2905#ifdef STACK_GROWS_DOWNWARD
2906 offset1 = 0;
2907 offset2 = GET_MODE_SIZE (submode);
2908#else
2909 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2910 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2911 + GET_MODE_SIZE (submode));
2912#endif
bb93b973 2913
1a06f5fe
JH
2914 emit_move_insn (change_address (x, submode,
2915 gen_rtx_PLUS (Pmode,
2916 stack_pointer_rtx,
2917 GEN_INT (offset1))),
2918 gen_realpart (submode, y));
2919 emit_move_insn (change_address (x, submode,
2920 gen_rtx_PLUS (Pmode,
2921 stack_pointer_rtx,
2922 GEN_INT (offset2))),
2923 gen_imagpart (submode, y));
2924 }
e9c0bd54 2925 else
79ce92d7 2926#endif
7308a047
RS
2927 /* If this is a stack, push the highpart first, so it
2928 will be in the argument order.
2929
2930 In that case, change_address is used only to convert
2931 the mode, not to change the address. */
e9c0bd54 2932 if (stack)
c937357e 2933 {
e33c0d66
RS
2934 /* Note that the real part always precedes the imag part in memory
2935 regardless of machine's endianness. */
c937357e 2936#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2937 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2938 gen_imagpart (submode, y));
2939 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2940 gen_realpart (submode, y));
c937357e 2941#else
a79b3dc7
RS
2942 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2943 gen_realpart (submode, y));
2944 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2945 gen_imagpart (submode, y));
c937357e
RS
2946#endif
2947 }
2948 else
2949 {
235ae7be
DM
2950 rtx realpart_x, realpart_y;
2951 rtx imagpart_x, imagpart_y;
2952
405f63da
MM
2953 /* If this is a complex value with each part being smaller than a
2954 word, the usual calling sequence will likely pack the pieces into
2955 a single register. Unfortunately, SUBREG of hard registers only
2956 deals in terms of words, so we have a problem converting input
2957 arguments to the CONCAT of two registers that is used elsewhere
2958 for complex values. If this is before reload, we can copy it into
2959 memory and reload. FIXME, we should see about using extract and
2960 insert on integer registers, but complex short and complex char
2961 variables should be rarely used. */
3a94c984 2962 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2963 && (reload_in_progress | reload_completed) == 0)
2964 {
bb93b973
RK
2965 int packed_dest_p
2966 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2967 int packed_src_p
2968 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2969
2970 if (packed_dest_p || packed_src_p)
2971 {
2972 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2973 ? MODE_FLOAT : MODE_INT);
2974
1da68f56
RK
2975 enum machine_mode reg_mode
2976 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2977
2978 if (reg_mode != BLKmode)
2979 {
2980 rtx mem = assign_stack_temp (reg_mode,
2981 GET_MODE_SIZE (mode), 0);
f4ef873c 2982 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2983
405f63da
MM
2984 if (packed_dest_p)
2985 {
2986 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2987
405f63da
MM
2988 emit_move_insn_1 (cmem, y);
2989 return emit_move_insn_1 (sreg, mem);
2990 }
2991 else
2992 {
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 2994
405f63da
MM
2995 emit_move_insn_1 (mem, sreg);
2996 return emit_move_insn_1 (x, cmem);
2997 }
2998 }
2999 }
3000 }
3001
235ae7be
DM
3002 realpart_x = gen_realpart (submode, x);
3003 realpart_y = gen_realpart (submode, y);
3004 imagpart_x = gen_imagpart (submode, x);
3005 imagpart_y = gen_imagpart (submode, y);
3006
3007 /* Show the output dies here. This is necessary for SUBREGs
3008 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3009 hard regs shouldn't appear here except as return values.
3010 We never want to emit such a clobber after reload. */
3011 if (x != y
235ae7be
DM
3012 && ! (reload_in_progress || reload_completed)
3013 && (GET_CODE (realpart_x) == SUBREG
3014 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3015 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3016
a79b3dc7
RS
3017 emit_move_insn (realpart_x, realpart_y);
3018 emit_move_insn (imagpart_x, imagpart_y);
c937357e 3019 }
7308a047 3020
7a1ab50a 3021 return get_last_insn ();
7308a047
RS
3022 }
3023
a3600c71
HPN
3024 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3025 find a mode to do it in. If we have a movcc, use it. Otherwise,
3026 find the MODE_INT mode of the same width. */
3027 else if (GET_MODE_CLASS (mode) == MODE_CC
3028 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3029 {
3030 enum insn_code insn_code;
3031 enum machine_mode tmode = VOIDmode;
3032 rtx x1 = x, y1 = y;
3033
3034 if (mode != CCmode
3035 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3036 tmode = CCmode;
3037 else
3038 for (tmode = QImode; tmode != VOIDmode;
3039 tmode = GET_MODE_WIDER_MODE (tmode))
3040 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3041 break;
3042
3043 if (tmode == VOIDmode)
3044 abort ();
3045
3046 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3047 may call change_address which is not appropriate if we were
3048 called when a reload was in progress. We don't have to worry
3049 about changing the address since the size in bytes is supposed to
3050 be the same. Copy the MEM to change the mode and move any
3051 substitutions from the old MEM to the new one. */
3052
3053 if (reload_in_progress)
3054 {
3055 x = gen_lowpart_common (tmode, x1);
3056 if (x == 0 && GET_CODE (x1) == MEM)
3057 {
3058 x = adjust_address_nv (x1, tmode, 0);
3059 copy_replacements (x1, x);
3060 }
3061
3062 y = gen_lowpart_common (tmode, y1);
3063 if (y == 0 && GET_CODE (y1) == MEM)
3064 {
3065 y = adjust_address_nv (y1, tmode, 0);
3066 copy_replacements (y1, y);
3067 }
3068 }
3069 else
3070 {
3071 x = gen_lowpart (tmode, x);
3072 y = gen_lowpart (tmode, y);
3073 }
502b8322 3074
a3600c71
HPN
3075 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3076 return emit_insn (GEN_FCN (insn_code) (x, y));
3077 }
3078
5581fc91
RS
3079 /* Try using a move pattern for the corresponding integer mode. This is
3080 only safe when simplify_subreg can convert MODE constants into integer
3081 constants. At present, it can only do this reliably if the value
3082 fits within a HOST_WIDE_INT. */
3083 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3084 && (submode = int_mode_for_mode (mode)) != BLKmode
3085 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3086 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3087 (simplify_gen_subreg (submode, x, mode, 0),
3088 simplify_gen_subreg (submode, y, mode, 0)));
3089
cffa2189
R
3090 /* This will handle any multi-word or full-word mode that lacks a move_insn
3091 pattern. However, you will get better code if you define such patterns,
bbf6f052 3092 even if they must turn into multiple assembler instructions. */
cffa2189 3093 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3094 {
3095 rtx last_insn = 0;
3ef1eef4 3096 rtx seq, inner;
235ae7be 3097 int need_clobber;
bb93b973 3098 int i;
3a94c984 3099
a98c9f1a
RK
3100#ifdef PUSH_ROUNDING
3101
3102 /* If X is a push on the stack, do the push now and replace
3103 X with a reference to the stack pointer. */
3104 if (push_operand (x, GET_MODE (x)))
3105 {
918a6124
GK
3106 rtx temp;
3107 enum rtx_code code;
0fb7aeda 3108
918a6124
GK
3109 /* Do not use anti_adjust_stack, since we don't want to update
3110 stack_pointer_delta. */
3111 temp = expand_binop (Pmode,
3112#ifdef STACK_GROWS_DOWNWARD
3113 sub_optab,
3114#else
3115 add_optab,
3116#endif
3117 stack_pointer_rtx,
3118 GEN_INT
bb93b973
RK
3119 (PUSH_ROUNDING
3120 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3121 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3122
0fb7aeda
KH
3123 if (temp != stack_pointer_rtx)
3124 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3125
3126 code = GET_CODE (XEXP (x, 0));
bb93b973 3127
918a6124
GK
3128 /* Just hope that small offsets off SP are OK. */
3129 if (code == POST_INC)
0fb7aeda 3130 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3131 GEN_INT (-((HOST_WIDE_INT)
3132 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3133 else if (code == POST_DEC)
0fb7aeda 3134 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3135 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3136 else
3137 temp = stack_pointer_rtx;
3138
3139 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3140 }
3141#endif
3a94c984 3142
3ef1eef4
RK
3143 /* If we are in reload, see if either operand is a MEM whose address
3144 is scheduled for replacement. */
3145 if (reload_in_progress && GET_CODE (x) == MEM
3146 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3147 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3148 if (reload_in_progress && GET_CODE (y) == MEM
3149 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3150 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3151
235ae7be 3152 start_sequence ();
15a7a8ec 3153
235ae7be 3154 need_clobber = 0;
bbf6f052 3155 for (i = 0;
3a94c984 3156 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3157 i++)
3158 {
3159 rtx xpart = operand_subword (x, i, 1, mode);
3160 rtx ypart = operand_subword (y, i, 1, mode);
3161
3162 /* If we can't get a part of Y, put Y into memory if it is a
3163 constant. Otherwise, force it into a register. If we still
3164 can't get a part of Y, abort. */
3165 if (ypart == 0 && CONSTANT_P (y))
3166 {
3167 y = force_const_mem (mode, y);
3168 ypart = operand_subword (y, i, 1, mode);
3169 }
3170 else if (ypart == 0)
3171 ypart = operand_subword_force (y, i, mode);
3172
3173 if (xpart == 0 || ypart == 0)
3174 abort ();
3175
235ae7be
DM
3176 need_clobber |= (GET_CODE (xpart) == SUBREG);
3177
bbf6f052
RK
3178 last_insn = emit_move_insn (xpart, ypart);
3179 }
6551fa4d 3180
2f937369 3181 seq = get_insns ();
235ae7be
DM
3182 end_sequence ();
3183
3184 /* Show the output dies here. This is necessary for SUBREGs
3185 of pseudos since we cannot track their lifetimes correctly;
3186 hard regs shouldn't appear here except as return values.
3187 We never want to emit such a clobber after reload. */
3188 if (x != y
3189 && ! (reload_in_progress || reload_completed)
3190 && need_clobber != 0)
bb93b973 3191 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3192
3193 emit_insn (seq);
3194
bbf6f052
RK
3195 return last_insn;
3196 }
3197 else
3198 abort ();
3199}
51286de6
RH
3200
3201/* If Y is representable exactly in a narrower mode, and the target can
3202 perform the extension directly from constant or memory, then emit the
3203 move as an extension. */
3204
3205static rtx
502b8322 3206compress_float_constant (rtx x, rtx y)
51286de6
RH
3207{
3208 enum machine_mode dstmode = GET_MODE (x);
3209 enum machine_mode orig_srcmode = GET_MODE (y);
3210 enum machine_mode srcmode;
3211 REAL_VALUE_TYPE r;
3212
3213 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3214
3215 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3216 srcmode != orig_srcmode;
3217 srcmode = GET_MODE_WIDER_MODE (srcmode))
3218 {
3219 enum insn_code ic;
3220 rtx trunc_y, last_insn;
3221
3222 /* Skip if the target can't extend this way. */
3223 ic = can_extend_p (dstmode, srcmode, 0);
3224 if (ic == CODE_FOR_nothing)
3225 continue;
3226
3227 /* Skip if the narrowed value isn't exact. */
3228 if (! exact_real_truncate (srcmode, &r))
3229 continue;
3230
3231 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3232
3233 if (LEGITIMATE_CONSTANT_P (trunc_y))
3234 {
3235 /* Skip if the target needs extra instructions to perform
3236 the extension. */
3237 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3238 continue;
3239 }
3240 else if (float_extend_from_mem[dstmode][srcmode])
3241 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3242 else
3243 continue;
3244
3245 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3246 last_insn = get_last_insn ();
3247
3248 if (GET_CODE (x) == REG)
0c19a26f 3249 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3250
3251 return last_insn;
3252 }
3253
3254 return NULL_RTX;
3255}
bbf6f052
RK
3256\f
3257/* Pushing data onto the stack. */
3258
3259/* Push a block of length SIZE (perhaps variable)
3260 and return an rtx to address the beginning of the block.
3261 Note that it is not possible for the value returned to be a QUEUED.
3262 The value may be virtual_outgoing_args_rtx.
3263
3264 EXTRA is the number of bytes of padding to push in addition to SIZE.
3265 BELOW nonzero means this padding comes at low addresses;
3266 otherwise, the padding comes at high addresses. */
3267
3268rtx
502b8322 3269push_block (rtx size, int extra, int below)
bbf6f052 3270{
b3694847 3271 rtx temp;
88f63c77
RK
3272
3273 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3274 if (CONSTANT_P (size))
3275 anti_adjust_stack (plus_constant (size, extra));
3276 else if (GET_CODE (size) == REG && extra == 0)
3277 anti_adjust_stack (size);
3278 else
3279 {
ce48579b 3280 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3281 if (extra != 0)
906c4e36 3282 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3283 temp, 0, OPTAB_LIB_WIDEN);
3284 anti_adjust_stack (temp);
3285 }
3286
f73ad30e 3287#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3288 if (0)
f73ad30e
JH
3289#else
3290 if (1)
bbf6f052 3291#endif
f73ad30e 3292 {
f73ad30e
JH
3293 temp = virtual_outgoing_args_rtx;
3294 if (extra != 0 && below)
3295 temp = plus_constant (temp, extra);
3296 }
3297 else
3298 {
3299 if (GET_CODE (size) == CONST_INT)
3300 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3301 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3302 else if (extra != 0 && !below)
3303 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3304 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3305 else
3306 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3307 negate_rtx (Pmode, size));
3308 }
bbf6f052
RK
3309
3310 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3311}
3312
21d93687
RK
3313#ifdef PUSH_ROUNDING
3314
566aa174 3315/* Emit single push insn. */
21d93687 3316
566aa174 3317static void
502b8322 3318emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3319{
566aa174 3320 rtx dest_addr;
918a6124 3321 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3322 rtx dest;
371b8fc0
JH
3323 enum insn_code icode;
3324 insn_operand_predicate_fn pred;
566aa174 3325
371b8fc0
JH
3326 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3327 /* If there is push pattern, use it. Otherwise try old way of throwing
3328 MEM representing push operation to move expander. */
3329 icode = push_optab->handlers[(int) mode].insn_code;
3330 if (icode != CODE_FOR_nothing)
3331 {
3332 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3333 && !((*pred) (x, mode))))
371b8fc0
JH
3334 x = force_reg (mode, x);
3335 emit_insn (GEN_FCN (icode) (x));
3336 return;
3337 }
566aa174
JH
3338 if (GET_MODE_SIZE (mode) == rounded_size)
3339 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3340 /* If we are to pad downward, adjust the stack pointer first and
3341 then store X into the stack location using an offset. This is
3342 because emit_move_insn does not know how to pad; it does not have
3343 access to type. */
3344 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3345 {
3346 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3347 HOST_WIDE_INT offset;
3348
3349 emit_move_insn (stack_pointer_rtx,
3350 expand_binop (Pmode,
3351#ifdef STACK_GROWS_DOWNWARD
3352 sub_optab,
3353#else
3354 add_optab,
3355#endif
3356 stack_pointer_rtx,
3357 GEN_INT (rounded_size),
3358 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3359
3360 offset = (HOST_WIDE_INT) padding_size;
3361#ifdef STACK_GROWS_DOWNWARD
3362 if (STACK_PUSH_CODE == POST_DEC)
3363 /* We have already decremented the stack pointer, so get the
3364 previous value. */
3365 offset += (HOST_WIDE_INT) rounded_size;
3366#else
3367 if (STACK_PUSH_CODE == POST_INC)
3368 /* We have already incremented the stack pointer, so get the
3369 previous value. */
3370 offset -= (HOST_WIDE_INT) rounded_size;
3371#endif
3372 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3373 }
566aa174
JH
3374 else
3375 {
3376#ifdef STACK_GROWS_DOWNWARD
329d586f 3377 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3378 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3379 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3380#else
329d586f 3381 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3382 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3383 GEN_INT (rounded_size));
3384#endif
3385 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3386 }
3387
3388 dest = gen_rtx_MEM (mode, dest_addr);
3389
566aa174
JH
3390 if (type != 0)
3391 {
3392 set_mem_attributes (dest, type, 1);
c3d32120
RK
3393
3394 if (flag_optimize_sibling_calls)
3395 /* Function incoming arguments may overlap with sibling call
3396 outgoing arguments and we cannot allow reordering of reads
3397 from function arguments with stores to outgoing arguments
3398 of sibling calls. */
3399 set_mem_alias_set (dest, 0);
566aa174
JH
3400 }
3401 emit_move_insn (dest, x);
566aa174 3402}
21d93687 3403#endif
566aa174 3404
bbf6f052
RK
3405/* Generate code to push X onto the stack, assuming it has mode MODE and
3406 type TYPE.
3407 MODE is redundant except when X is a CONST_INT (since they don't
3408 carry mode info).
3409 SIZE is an rtx for the size of data to be copied (in bytes),
3410 needed only if X is BLKmode.
3411
f1eaaf73 3412 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3413
cd048831
RK
3414 If PARTIAL and REG are both nonzero, then copy that many of the first
3415 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3416 The amount of space pushed is decreased by PARTIAL words,
3417 rounded *down* to a multiple of PARM_BOUNDARY.
3418 REG must be a hard register in this case.
cd048831
RK
3419 If REG is zero but PARTIAL is not, take any all others actions for an
3420 argument partially in registers, but do not actually load any
3421 registers.
bbf6f052
RK
3422
3423 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3424 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3425
3426 On a machine that lacks real push insns, ARGS_ADDR is the address of
3427 the bottom of the argument block for this call. We use indexing off there
3428 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3429 argument block has not been preallocated.
3430
e5e809f4
JL
3431 ARGS_SO_FAR is the size of args previously pushed for this call.
3432
3433 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3434 for arguments passed in registers. If nonzero, it will be the number
3435 of bytes required. */
bbf6f052
RK
3436
3437void
502b8322
AJ
3438emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3439 unsigned int align, int partial, rtx reg, int extra,
3440 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3441 rtx alignment_pad)
bbf6f052
RK
3442{
3443 rtx xinner;
3444 enum direction stack_direction
3445#ifdef STACK_GROWS_DOWNWARD
3446 = downward;
3447#else
3448 = upward;
3449#endif
3450
3451 /* Decide where to pad the argument: `downward' for below,
3452 `upward' for above, or `none' for don't pad it.
3453 Default is below for small data on big-endian machines; else above. */
3454 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3455
0fb7aeda 3456 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3457 FIXME: why? */
3458 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3459 if (where_pad != none)
3460 where_pad = (where_pad == downward ? upward : downward);
3461
3462 xinner = x = protect_from_queue (x, 0);
3463
3464 if (mode == BLKmode)
3465 {
3466 /* Copy a block into the stack, entirely or partially. */
3467
b3694847 3468 rtx temp;
bbf6f052 3469 int used = partial * UNITS_PER_WORD;
531547e9 3470 int offset;
bbf6f052 3471 int skip;
3a94c984 3472
531547e9
FJ
3473 if (reg && GET_CODE (reg) == PARALLEL)
3474 {
3475 /* Use the size of the elt to compute offset. */
3476 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3477 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3478 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3479 }
3480 else
3481 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3482
bbf6f052
RK
3483 if (size == 0)
3484 abort ();
3485
3486 used -= offset;
3487
3488 /* USED is now the # of bytes we need not copy to the stack
3489 because registers will take care of them. */
3490
3491 if (partial != 0)
f4ef873c 3492 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3493
3494 /* If the partial register-part of the arg counts in its stack size,
3495 skip the part of stack space corresponding to the registers.
3496 Otherwise, start copying to the beginning of the stack space,
3497 by setting SKIP to 0. */
e5e809f4 3498 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3499
3500#ifdef PUSH_ROUNDING
3501 /* Do it with several push insns if that doesn't take lots of insns
3502 and if there is no difficulty with push insns that skip bytes
3503 on the stack for alignment purposes. */
3504 if (args_addr == 0
f73ad30e 3505 && PUSH_ARGS
bbf6f052
RK
3506 && GET_CODE (size) == CONST_INT
3507 && skip == 0
f26aca6d 3508 && MEM_ALIGN (xinner) >= align
15914757 3509 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3510 /* Here we avoid the case of a structure whose weak alignment
3511 forces many pushes of a small amount of data,
3512 and such small pushes do rounding that causes trouble. */
e1565e65 3513 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3514 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3515 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3516 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3517 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3518 {
3519 /* Push padding now if padding above and stack grows down,
3520 or if padding below and stack grows up.
3521 But if space already allocated, this has already been done. */
3522 if (extra && args_addr == 0
3523 && where_pad != none && where_pad != stack_direction)
906c4e36 3524 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3525
8fd3cf4e 3526 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3527 }
3528 else
3a94c984 3529#endif /* PUSH_ROUNDING */
bbf6f052 3530 {
7ab923cc
JJ
3531 rtx target;
3532
bbf6f052
RK
3533 /* Otherwise make space on the stack and copy the data
3534 to the address of that space. */
3535
3536 /* Deduct words put into registers from the size we must copy. */
3537 if (partial != 0)
3538 {
3539 if (GET_CODE (size) == CONST_INT)
906c4e36 3540 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3541 else
3542 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3543 GEN_INT (used), NULL_RTX, 0,
3544 OPTAB_LIB_WIDEN);
bbf6f052
RK
3545 }
3546
3547 /* Get the address of the stack space.
3548 In this case, we do not deal with EXTRA separately.
3549 A single stack adjust will do. */
3550 if (! args_addr)
3551 {
3552 temp = push_block (size, extra, where_pad == downward);
3553 extra = 0;
3554 }
3555 else if (GET_CODE (args_so_far) == CONST_INT)
3556 temp = memory_address (BLKmode,
3557 plus_constant (args_addr,
3558 skip + INTVAL (args_so_far)));
3559 else
3560 temp = memory_address (BLKmode,
38a448ca
RH
3561 plus_constant (gen_rtx_PLUS (Pmode,
3562 args_addr,
3563 args_so_far),
bbf6f052 3564 skip));
4ca79136
RH
3565
3566 if (!ACCUMULATE_OUTGOING_ARGS)
3567 {
3568 /* If the source is referenced relative to the stack pointer,
3569 copy it to another register to stabilize it. We do not need
3570 to do this if we know that we won't be changing sp. */
3571
3572 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3573 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3574 temp = copy_to_reg (temp);
3575 }
3576
3a94c984 3577 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3578
3a94c984
KH
3579 if (type != 0)
3580 {
3581 set_mem_attributes (target, type, 1);
3582 /* Function incoming arguments may overlap with sibling call
3583 outgoing arguments and we cannot allow reordering of reads
3584 from function arguments with stores to outgoing arguments
3585 of sibling calls. */
ba4828e0 3586 set_mem_alias_set (target, 0);
3a94c984 3587 }
4ca79136 3588
44bb111a
RH
3589 /* ALIGN may well be better aligned than TYPE, e.g. due to
3590 PARM_BOUNDARY. Assume the caller isn't lying. */
3591 set_mem_align (target, align);
4ca79136 3592
44bb111a 3593 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3594 }
3595 }
3596 else if (partial > 0)
3597 {
3598 /* Scalar partly in registers. */
3599
3600 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3601 int i;
3602 int not_stack;
3603 /* # words of start of argument
3604 that we must make space for but need not store. */
3605 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3606 int args_offset = INTVAL (args_so_far);
3607 int skip;
3608
3609 /* Push padding now if padding above and stack grows down,
3610 or if padding below and stack grows up.
3611 But if space already allocated, this has already been done. */
3612 if (extra && args_addr == 0
3613 && where_pad != none && where_pad != stack_direction)
906c4e36 3614 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3615
3616 /* If we make space by pushing it, we might as well push
3617 the real data. Otherwise, we can leave OFFSET nonzero
3618 and leave the space uninitialized. */
3619 if (args_addr == 0)
3620 offset = 0;
3621
3622 /* Now NOT_STACK gets the number of words that we don't need to
3623 allocate on the stack. */
3624 not_stack = partial - offset;
3625
3626 /* If the partial register-part of the arg counts in its stack size,
3627 skip the part of stack space corresponding to the registers.
3628 Otherwise, start copying to the beginning of the stack space,
3629 by setting SKIP to 0. */
e5e809f4 3630 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3631
3632 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3633 x = validize_mem (force_const_mem (mode, x));
3634
3635 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3636 SUBREGs of such registers are not allowed. */
3637 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3638 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3639 x = copy_to_reg (x);
3640
3641 /* Loop over all the words allocated on the stack for this arg. */
3642 /* We can do it by words, because any scalar bigger than a word
3643 has a size a multiple of a word. */
3644#ifndef PUSH_ARGS_REVERSED
3645 for (i = not_stack; i < size; i++)
3646#else
3647 for (i = size - 1; i >= not_stack; i--)
3648#endif
3649 if (i >= not_stack + offset)
3650 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3651 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3652 0, args_addr,
3653 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3654 * UNITS_PER_WORD)),
4fc026cd 3655 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3656 }
3657 else
3658 {
3659 rtx addr;
3bdf5ad1 3660 rtx dest;
bbf6f052
RK
3661
3662 /* Push padding now if padding above and stack grows down,
3663 or if padding below and stack grows up.
3664 But if space already allocated, this has already been done. */
3665 if (extra && args_addr == 0
3666 && where_pad != none && where_pad != stack_direction)
906c4e36 3667 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3668
3669#ifdef PUSH_ROUNDING
f73ad30e 3670 if (args_addr == 0 && PUSH_ARGS)
566aa174 3671 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3672 else
3673#endif
921b3427
RK
3674 {
3675 if (GET_CODE (args_so_far) == CONST_INT)
3676 addr
3677 = memory_address (mode,
3a94c984 3678 plus_constant (args_addr,
921b3427 3679 INTVAL (args_so_far)));
3a94c984 3680 else
38a448ca
RH
3681 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3682 args_so_far));
566aa174
JH
3683 dest = gen_rtx_MEM (mode, addr);
3684 if (type != 0)
3685 {
3686 set_mem_attributes (dest, type, 1);
3687 /* Function incoming arguments may overlap with sibling call
3688 outgoing arguments and we cannot allow reordering of reads
3689 from function arguments with stores to outgoing arguments
3690 of sibling calls. */
ba4828e0 3691 set_mem_alias_set (dest, 0);
566aa174 3692 }
bbf6f052 3693
566aa174 3694 emit_move_insn (dest, x);
566aa174 3695 }
bbf6f052
RK
3696 }
3697
bbf6f052
RK
3698 /* If part should go in registers, copy that part
3699 into the appropriate registers. Do this now, at the end,
3700 since mem-to-mem copies above may do function calls. */
cd048831 3701 if (partial > 0 && reg != 0)
fffa9c1d
JW
3702 {
3703 /* Handle calls that pass values in multiple non-contiguous locations.
3704 The Irix 6 ABI has examples of this. */
3705 if (GET_CODE (reg) == PARALLEL)
6e985040 3706 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3707 else
3708 move_block_to_reg (REGNO (reg), x, partial, mode);
3709 }
bbf6f052
RK
3710
3711 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3712 anti_adjust_stack (GEN_INT (extra));
3a94c984 3713
3ea2292a 3714 if (alignment_pad && args_addr == 0)
4fc026cd 3715 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3716}
3717\f
296b4ed9
RK
3718/* Return X if X can be used as a subtarget in a sequence of arithmetic
3719 operations. */
3720
3721static rtx
502b8322 3722get_subtarget (rtx x)
296b4ed9
RK
3723{
3724 return ((x == 0
3725 /* Only registers can be subtargets. */
3726 || GET_CODE (x) != REG
3727 /* If the register is readonly, it can't be set more than once. */
3728 || RTX_UNCHANGING_P (x)
3729 /* Don't use hard regs to avoid extending their life. */
3730 || REGNO (x) < FIRST_PSEUDO_REGISTER
3731 /* Avoid subtargets inside loops,
3732 since they hide some invariant expressions. */
3733 || preserve_subexpressions_p ())
3734 ? 0 : x);
3735}
3736
bbf6f052
RK
3737/* Expand an assignment that stores the value of FROM into TO.
3738 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3739 (This may contain a QUEUED rtx;
3740 if the value is constant, this rtx is a constant.)
b90f141a 3741 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3742
3743rtx
b90f141a 3744expand_assignment (tree to, tree from, int want_value)
bbf6f052 3745{
b3694847 3746 rtx to_rtx = 0;
bbf6f052
RK
3747 rtx result;
3748
3749 /* Don't crash if the lhs of the assignment was erroneous. */
3750
3751 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3752 {
3753 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3754 return want_value ? result : NULL_RTX;
3755 }
bbf6f052
RK
3756
3757 /* Assignment of a structure component needs special treatment
3758 if the structure component's rtx is not simply a MEM.
6be58303
JW
3759 Assignment of an array element at a constant index, and assignment of
3760 an array element in an unaligned packed structure field, has the same
3761 problem. */
bbf6f052 3762
08293add 3763 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3764 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3765 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3766 {
3767 enum machine_mode mode1;
770ae6cc 3768 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3769 rtx orig_to_rtx;
7bb0943f 3770 tree offset;
bbf6f052
RK
3771 int unsignedp;
3772 int volatilep = 0;
0088fcb1
RK
3773 tree tem;
3774
3775 push_temp_slots ();
839c4796 3776 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3777 &unsignedp, &volatilep);
bbf6f052
RK
3778
3779 /* If we are going to use store_bit_field and extract_bit_field,
3780 make sure to_rtx will be safe for multiple use. */
3781
3782 if (mode1 == VOIDmode && want_value)
3783 tem = stabilize_reference (tem);
3784
1ed1b4fb
RK
3785 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3786
7bb0943f
RS
3787 if (offset != 0)
3788 {
e3c8ea67 3789 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3790
3791 if (GET_CODE (to_rtx) != MEM)
3792 abort ();
bd070e1a 3793
bd070e1a 3794#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3795 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3796 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3797#else
3798 if (GET_MODE (offset_rtx) != ptr_mode)
3799 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3800#endif
bd070e1a 3801
9a7b9f4f
JL
3802 /* A constant address in TO_RTX can have VOIDmode, we must not try
3803 to call force_reg for that case. Avoid that case. */
89752202
HB
3804 if (GET_CODE (to_rtx) == MEM
3805 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3806 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3807 && bitsize > 0
3a94c984 3808 && (bitpos % bitsize) == 0
89752202 3809 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3810 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3811 {
e3c8ea67 3812 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3813 bitpos = 0;
3814 }
3815
0d4903b8 3816 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3817 highest_pow2_factor_for_target (to,
3818 offset));
7bb0943f 3819 }
c5c76735 3820
998d7deb
RH
3821 if (GET_CODE (to_rtx) == MEM)
3822 {
998d7deb
RH
3823 /* If the field is at offset zero, we could have been given the
3824 DECL_RTX of the parent struct. Don't munge it. */
3825 to_rtx = shallow_copy_rtx (to_rtx);
3826
6f1087be 3827 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3828 }
effbcc6a 3829
a06ef755
RK
3830 /* Deal with volatile and readonly fields. The former is only done
3831 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3832 if (volatilep && GET_CODE (to_rtx) == MEM)
3833 {
3834 if (to_rtx == orig_to_rtx)
3835 to_rtx = copy_rtx (to_rtx);
3836 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3837 }
3838
956d6950 3839 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3840 && TREE_READONLY (TREE_OPERAND (to, 1))
3841 /* We can't assert that a MEM won't be set more than once
3842 if the component is not addressable because another
3843 non-addressable component may be referenced by the same MEM. */
3844 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
956d6950 3845 {
a06ef755 3846 if (to_rtx == orig_to_rtx)
956d6950 3847 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3848 RTX_UNCHANGING_P (to_rtx) = 1;
3849 }
3850
a84b4898 3851 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
3852 {
3853 if (to_rtx == orig_to_rtx)
3854 to_rtx = copy_rtx (to_rtx);
3855 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3856 }
3857
a06ef755
RK
3858 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3859 (want_value
3860 /* Spurious cast for HPUX compiler. */
3861 ? ((enum machine_mode)
3862 TYPE_MODE (TREE_TYPE (to)))
3863 : VOIDmode),
3864 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3865
a06ef755
RK
3866 preserve_temp_slots (result);
3867 free_temp_slots ();
3868 pop_temp_slots ();
a69beca1 3869
a06ef755
RK
3870 /* If the value is meaningful, convert RESULT to the proper mode.
3871 Otherwise, return nothing. */
3872 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3873 TYPE_MODE (TREE_TYPE (from)),
3874 result,
8df83eae 3875 TYPE_UNSIGNED (TREE_TYPE (to)))
a06ef755 3876 : NULL_RTX);
bbf6f052
RK
3877 }
3878
cd1db108
RS
3879 /* If the rhs is a function call and its value is not an aggregate,
3880 call the function before we start to compute the lhs.
3881 This is needed for correct code for cases such as
3882 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3883 requires loading up part of an address in a separate insn.
3884
1858863b
JW
3885 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3886 since it might be a promoted variable where the zero- or sign- extension
3887 needs to be done. Handling this in the normal way is safe because no
3888 computation is done before the call. */
61f71b34 3889 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3891 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3892 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3893 {
0088fcb1
RK
3894 rtx value;
3895
3896 push_temp_slots ();
3897 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3898 if (to_rtx == 0)
37a08a29 3899 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3900
fffa9c1d
JW
3901 /* Handle calls that return values in multiple non-contiguous locations.
3902 The Irix 6 ABI has examples of this. */
3903 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3904 emit_group_load (to_rtx, value, TREE_TYPE (from),
3905 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3906 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3907 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3908 else
6419e5b0 3909 {
5ae6cd0d 3910 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3911 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3912 emit_move_insn (to_rtx, value);
3913 }
cd1db108
RS
3914 preserve_temp_slots (to_rtx);
3915 free_temp_slots ();
0088fcb1 3916 pop_temp_slots ();
709f5be1 3917 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3918 }
3919
bbf6f052
RK
3920 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3921 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3922
3923 if (to_rtx == 0)
37a08a29 3924 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3925
86d38d25 3926 /* Don't move directly into a return register. */
14a774a9
RK
3927 if (TREE_CODE (to) == RESULT_DECL
3928 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3929 {
0088fcb1
RK
3930 rtx temp;
3931
3932 push_temp_slots ();
3933 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3934
3935 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3936 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3937 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3938 else
3939 emit_move_insn (to_rtx, temp);
3940
86d38d25
RS
3941 preserve_temp_slots (to_rtx);
3942 free_temp_slots ();
0088fcb1 3943 pop_temp_slots ();
709f5be1 3944 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3945 }
3946
bbf6f052
RK
3947 /* In case we are returning the contents of an object which overlaps
3948 the place the value is being stored, use a safe function when copying
3949 a value through a pointer into a structure value return block. */
3950 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3951 && current_function_returns_struct
3952 && !current_function_returns_pcc_struct)
3953 {
0088fcb1
RK
3954 rtx from_rtx, size;
3955
3956 push_temp_slots ();
33a20d10 3957 size = expr_size (from);
37a08a29 3958 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3959
4ca79136
RH
3960 if (TARGET_MEM_FUNCTIONS)
3961 emit_library_call (memmove_libfunc, LCT_NORMAL,
3962 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3963 XEXP (from_rtx, 0), Pmode,
3964 convert_to_mode (TYPE_MODE (sizetype),
8df83eae 3965 size, TYPE_UNSIGNED (sizetype)),
4ca79136
RH
3966 TYPE_MODE (sizetype));
3967 else
3968 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3970 XEXP (to_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (integer_type_node),
3972 size,
8df83eae 3973 TYPE_UNSIGNED (integer_type_node)),
4ca79136 3974 TYPE_MODE (integer_type_node));
bbf6f052
RK
3975
3976 preserve_temp_slots (to_rtx);
3977 free_temp_slots ();
0088fcb1 3978 pop_temp_slots ();
709f5be1 3979 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3980 }
3981
3982 /* Compute FROM and store the value in the rtx we got. */
3983
0088fcb1 3984 push_temp_slots ();
bbf6f052
RK
3985 result = store_expr (from, to_rtx, want_value);
3986 preserve_temp_slots (result);
3987 free_temp_slots ();
0088fcb1 3988 pop_temp_slots ();
709f5be1 3989 return want_value ? result : NULL_RTX;
bbf6f052
RK
3990}
3991
3992/* Generate code for computing expression EXP,
3993 and storing the value into TARGET.
bbf6f052
RK
3994 TARGET may contain a QUEUED rtx.
3995
8403445a 3996 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
3997 not in TARGET, so that we can be sure to use the proper
3998 value in a containing expression even if TARGET has something
3999 else stored in it. If possible, we copy the value through a pseudo
4000 and return that pseudo. Or, if the value is constant, we try to
4001 return the constant. In some cases, we return a pseudo
4002 copied *from* TARGET.
4003
4004 If the mode is BLKmode then we may return TARGET itself.
4005 It turns out that in BLKmode it doesn't cause a problem.
4006 because C has no operators that could combine two different
4007 assignments into the same BLKmode object with different values
4008 with no sequence point. Will other languages need this to
4009 be more thorough?
4010
8403445a 4011 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4012 to catch quickly any cases where the caller uses the value
8403445a
AM
4013 and fails to set WANT_VALUE.
4014
4015 If WANT_VALUE & 2 is set, this is a store into a call param on the
4016 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4017
4018rtx
502b8322 4019store_expr (tree exp, rtx target, int want_value)
bbf6f052 4020{
b3694847 4021 rtx temp;
0fab64a3 4022 rtx alt_rtl = NULL_RTX;
1bbd65cd 4023 rtx mark = mark_queue ();
bbf6f052 4024 int dont_return_target = 0;
e5408e52 4025 int dont_store_target = 0;
bbf6f052 4026
847311f4
AL
4027 if (VOID_TYPE_P (TREE_TYPE (exp)))
4028 {
4029 /* C++ can generate ?: expressions with a throw expression in one
4030 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4031 store the throw expression's nonexistent result. */
847311f4
AL
4032 if (want_value)
4033 abort ();
4034 expand_expr (exp, const0_rtx, VOIDmode, 0);
4035 return NULL_RTX;
4036 }
bbf6f052
RK
4037 if (TREE_CODE (exp) == COMPOUND_EXPR)
4038 {
4039 /* Perform first part of compound expression, then assign from second
4040 part. */
8403445a
AM
4041 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4042 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4043 emit_queue ();
709f5be1 4044 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4045 }
4046 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4047 {
4048 /* For conditional expression, get safe form of the target. Then
4049 test the condition, doing the appropriate assignment on either
4050 side. This avoids the creation of unnecessary temporaries.
4051 For non-BLKmode, it is more efficient not to do this. */
4052
4053 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4054
4055 emit_queue ();
4056 target = protect_from_queue (target, 1);
4057
dabf8373 4058 do_pending_stack_adjust ();
bbf6f052
RK
4059 NO_DEFER_POP;
4060 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4061 start_cleanup_deferral ();
8403445a 4062 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4063 end_cleanup_deferral ();
bbf6f052
RK
4064 emit_queue ();
4065 emit_jump_insn (gen_jump (lab2));
4066 emit_barrier ();
4067 emit_label (lab1);
956d6950 4068 start_cleanup_deferral ();
8403445a 4069 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4070 end_cleanup_deferral ();
bbf6f052
RK
4071 emit_queue ();
4072 emit_label (lab2);
4073 OK_DEFER_POP;
a3a58acc 4074
8403445a 4075 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4076 }
bbf6f052 4077 else if (queued_subexp_p (target))
709f5be1
RS
4078 /* If target contains a postincrement, let's not risk
4079 using it as the place to generate the rhs. */
bbf6f052
RK
4080 {
4081 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4082 {
4083 /* Expand EXP into a new pseudo. */
4084 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4085 temp = expand_expr (exp, temp, GET_MODE (target),
4086 (want_value & 2
4087 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4088 }
4089 else
8403445a
AM
4090 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4091 (want_value & 2
4092 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4093
4094 /* If target is volatile, ANSI requires accessing the value
4095 *from* the target, if it is accessed. So make that happen.
4096 In no case return the target itself. */
8403445a 4097 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4098 dont_return_target = 1;
bbf6f052 4099 }
8403445a
AM
4100 else if ((want_value & 1) != 0
4101 && GET_CODE (target) == MEM
4102 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4103 && GET_MODE (target) != BLKmode)
4104 /* If target is in memory and caller wants value in a register instead,
4105 arrange that. Pass TARGET as target for expand_expr so that,
4106 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4107 We know expand_expr will not use the target in that case.
4108 Don't do this if TARGET is volatile because we are supposed
4109 to write it and then read it. */
4110 {
8403445a
AM
4111 temp = expand_expr (exp, target, GET_MODE (target),
4112 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4113 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4114 {
4115 /* If TEMP is already in the desired TARGET, only copy it from
4116 memory and don't store it there again. */
4117 if (temp == target
4118 || (rtx_equal_p (temp, target)
4119 && ! side_effects_p (temp) && ! side_effects_p (target)))
4120 dont_store_target = 1;
4121 temp = copy_to_reg (temp);
4122 }
12f06d17
CH
4123 dont_return_target = 1;
4124 }
1499e0a8 4125 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4126 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4127 than the declared mode, compute the result into its declared mode
4128 and then convert to the wider mode. Our value is the computed
4129 expression. */
4130 {
b76b08ef
RK
4131 rtx inner_target = 0;
4132
5a32d038 4133 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4134 which will often result in some optimizations. Do the conversion
4135 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4136 the extend. But don't do this if the type of EXP is a subtype
4137 of something else since then the conversion might involve
4138 more than just converting modes. */
8403445a
AM
4139 if ((want_value & 1) == 0
4140 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4141 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d 4142 {
8df83eae 4143 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 4144 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 4145 exp = convert
ae2bcd98 4146 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4147 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4148
ae2bcd98 4149 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
4150 (GET_MODE (SUBREG_REG (target)),
4151 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4152 exp);
b76b08ef
RK
4153
4154 inner_target = SUBREG_REG (target);
f635a84d 4155 }
3a94c984 4156
8403445a
AM
4157 temp = expand_expr (exp, inner_target, VOIDmode,
4158 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4159
7abec5be 4160 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4161 now so it gets done only once. Strictly speaking, this is
4162 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4163 overlaps TARGET. But not performing the load twice also
4164 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4165 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4166 temp = copy_to_reg (temp);
4167
b258707c
RS
4168 /* If TEMP is a VOIDmode constant, use convert_modes to make
4169 sure that we properly convert it. */
4170 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4171 {
4172 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4173 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4174 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4175 GET_MODE (target), temp,
4176 SUBREG_PROMOTED_UNSIGNED_P (target));
4177 }
b258707c 4178
1499e0a8
RK
4179 convert_move (SUBREG_REG (target), temp,
4180 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4181
4182 /* If we promoted a constant, change the mode back down to match
4183 target. Otherwise, the caller might get confused by a result whose
4184 mode is larger than expected. */
4185
8403445a 4186 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4187 {
b3ca30df
JJ
4188 if (GET_MODE (temp) != VOIDmode)
4189 {
4190 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4191 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4192 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4193 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4194 }
4195 else
4196 temp = convert_modes (GET_MODE (target),
4197 GET_MODE (SUBREG_REG (target)),
4198 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4199 }
4200
8403445a 4201 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4202 }
bbf6f052
RK
4203 else
4204 {
0fab64a3
MM
4205 temp = expand_expr_real (exp, target, GET_MODE (target),
4206 (want_value & 2
4207 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4208 &alt_rtl);
766f36c7 4209 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4210 If TARGET is a volatile mem ref, either return TARGET
4211 or return a reg copied *from* TARGET; ANSI requires this.
4212
4213 Otherwise, if TEMP is not TARGET, return TEMP
4214 if it is constant (for efficiency),
4215 or if we really want the correct value. */
bbf6f052
RK
4216 if (!(target && GET_CODE (target) == REG
4217 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4218 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4219 && ! rtx_equal_p (temp, target)
8403445a 4220 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4221 dont_return_target = 1;
4222 }
4223
b258707c
RS
4224 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4225 the same as that of TARGET, adjust the constant. This is needed, for
4226 example, in case it is a CONST_DOUBLE and we want only a word-sized
4227 value. */
4228 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4229 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4230 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4231 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 4232 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 4233
bbf6f052 4234 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4235 Convert the value to TARGET's type first if necessary and emit the
4236 pending incrementations that have been queued when expanding EXP.
4237 Note that we cannot emit the whole queue blindly because this will
4238 effectively disable the POST_INC optimization later.
4239
37a08a29 4240 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4241 one or both of them are volatile memory refs, we have to distinguish
4242 two cases:
4243 - expand_expr has used TARGET. In this case, we must not generate
4244 another copy. This can be detected by TARGET being equal according
4245 to == .
4246 - expand_expr has not used TARGET - that means that the source just
4247 happens to have the same RTX form. Since temp will have been created
4248 by expand_expr, it will compare unequal according to == .
4249 We must generate a copy in this case, to reach the correct number
4250 of volatile memory references. */
bbf6f052 4251
6036acbb 4252 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4253 || (temp != target && (side_effects_p (temp)
4254 || side_effects_p (target))))
e5408e52 4255 && TREE_CODE (exp) != ERROR_MARK
a9772b60 4256 && ! dont_store_target
9c5c5f2c
MM
4257 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4258 but TARGET is not valid memory reference, TEMP will differ
4259 from TARGET although it is really the same location. */
0fab64a3 4260 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
e56fc090
HPN
4261 /* If there's nothing to copy, don't bother. Don't call expr_size
4262 unless necessary, because some front-ends (C++) expr_size-hook
4263 aborts on objects that are not supposed to be bit-copied or
4264 bit-initialized. */
4265 && expr_size (exp) != const0_rtx)
bbf6f052 4266 {
1bbd65cd 4267 emit_insns_enqueued_after_mark (mark);
bbf6f052 4268 target = protect_from_queue (target, 1);
e6d55fd7 4269 temp = protect_from_queue (temp, 0);
bbf6f052 4270 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4271 && GET_MODE (temp) != VOIDmode)
bbf6f052 4272 {
8df83eae 4273 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4274 if (dont_return_target)
4275 {
4276 /* In this case, we will return TEMP,
4277 so make sure it has the proper mode.
4278 But don't forget to store the value into TARGET. */
4279 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4280 emit_move_insn (target, temp);
4281 }
4282 else
4283 convert_move (target, temp, unsignedp);
4284 }
4285
4286 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4287 {
c24ae149
RK
4288 /* Handle copying a string constant into an array. The string
4289 constant may be shorter than the array. So copy just the string's
4290 actual length, and clear the rest. First get the size of the data
4291 type of the string, which is actually the size of the target. */
4292 rtx size = expr_size (exp);
bbf6f052 4293
e87b4f3f
RS
4294 if (GET_CODE (size) == CONST_INT
4295 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4296 emit_block_move (target, temp, size,
4297 (want_value & 2
4298 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4299 else
bbf6f052 4300 {
e87b4f3f
RS
4301 /* Compute the size of the data to copy from the string. */
4302 tree copy_size
c03b7665 4303 = size_binop (MIN_EXPR,
b50d17a1 4304 make_tree (sizetype, size),
fed3cef0 4305 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4306 rtx copy_size_rtx
4307 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4308 (want_value & 2
4309 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4310 rtx label = 0;
4311
4312 /* Copy that much. */
267b28bd 4313 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4314 TYPE_UNSIGNED (sizetype));
8403445a
AM
4315 emit_block_move (target, temp, copy_size_rtx,
4316 (want_value & 2
4317 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4318
88f63c77
RK
4319 /* Figure out how much is left in TARGET that we have to clear.
4320 Do all calculations in ptr_mode. */
e87b4f3f
RS
4321 if (GET_CODE (copy_size_rtx) == CONST_INT)
4322 {
c24ae149
RK
4323 size = plus_constant (size, -INTVAL (copy_size_rtx));
4324 target = adjust_address (target, BLKmode,
4325 INTVAL (copy_size_rtx));
e87b4f3f
RS
4326 }
4327 else
4328 {
fa06ab5c 4329 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4330 copy_size_rtx, NULL_RTX, 0,
4331 OPTAB_LIB_WIDEN);
e87b4f3f 4332
c24ae149
RK
4333#ifdef POINTERS_EXTEND_UNSIGNED
4334 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4335 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4336 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4337#endif
4338
4339 target = offset_address (target, copy_size_rtx,
4340 highest_pow2_factor (copy_size));
e87b4f3f 4341 label = gen_label_rtx ();
c5d5d461 4342 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4343 GET_MODE (size), 0, label);
e87b4f3f
RS
4344 }
4345
4346 if (size != const0_rtx)
37a08a29 4347 clear_storage (target, size);
22619c3f 4348
e87b4f3f
RS
4349 if (label)
4350 emit_label (label);
bbf6f052
RK
4351 }
4352 }
fffa9c1d
JW
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4356 emit_group_load (target, temp, TREE_TYPE (exp),
4357 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4358 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4359 emit_block_move (target, temp, expr_size (exp),
4360 (want_value & 2
4361 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4362 else
b0dccb00
RH
4363 {
4364 temp = force_operand (temp, target);
4365 if (temp != target)
4366 emit_move_insn (target, temp);
4367 }
bbf6f052 4368 }
709f5be1 4369
766f36c7 4370 /* If we don't want a value, return NULL_RTX. */
8403445a 4371 if ((want_value & 1) == 0)
766f36c7
RK
4372 return NULL_RTX;
4373
4374 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4375 ??? The latter test doesn't seem to make sense. */
4376 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4377 return temp;
766f36c7
RK
4378
4379 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4380 else if ((want_value & 1) != 0
4381 && GET_MODE (target) != BLKmode
766f36c7
RK
4382 && ! (GET_CODE (target) == REG
4383 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4384 return copy_to_reg (target);
3a94c984 4385
766f36c7 4386 else
709f5be1 4387 return target;
bbf6f052
RK
4388}
4389\f
6de9cd9a
DN
4390/* Examine CTOR. Discover how many scalar fields are set to non-zero
4391 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4392 are set to non-constant values and place it in *P_NC_ELTS. */
9de08200 4393
6de9cd9a
DN
4394static void
4395categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4396 HOST_WIDE_INT *p_nc_elts)
9de08200 4397{
6de9cd9a
DN
4398 HOST_WIDE_INT nz_elts, nc_elts;
4399 tree list;
9de08200 4400
6de9cd9a
DN
4401 nz_elts = 0;
4402 nc_elts = 0;
4403
4404 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
9de08200 4405 {
6de9cd9a
DN
4406 tree value = TREE_VALUE (list);
4407 tree purpose = TREE_PURPOSE (list);
4408 HOST_WIDE_INT mult;
9de08200 4409
6de9cd9a
DN
4410 mult = 1;
4411 if (TREE_CODE (purpose) == RANGE_EXPR)
4412 {
4413 tree lo_index = TREE_OPERAND (purpose, 0);
4414 tree hi_index = TREE_OPERAND (purpose, 1);
9de08200 4415
6de9cd9a
DN
4416 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4417 mult = (tree_low_cst (hi_index, 1)
4418 - tree_low_cst (lo_index, 1) + 1);
4419 }
9de08200 4420
6de9cd9a
DN
4421 switch (TREE_CODE (value))
4422 {
4423 case CONSTRUCTOR:
4424 {
4425 HOST_WIDE_INT nz = 0, nc = 0;
4426 categorize_ctor_elements_1 (value, &nz, &nc);
4427 nz_elts += mult * nz;
4428 nc_elts += mult * nc;
4429 }
4430 break;
9de08200 4431
6de9cd9a
DN
4432 case INTEGER_CST:
4433 case REAL_CST:
4434 if (!initializer_zerop (value))
4435 nz_elts += mult;
4436 break;
4437 case COMPLEX_CST:
4438 if (!initializer_zerop (TREE_REALPART (value)))
4439 nz_elts += mult;
4440 if (!initializer_zerop (TREE_IMAGPART (value)))
4441 nz_elts += mult;
4442 break;
4443 case VECTOR_CST:
4444 {
4445 tree v;
4446 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4447 if (!initializer_zerop (TREE_VALUE (v)))
4448 nz_elts += mult;
4449 }
4450 break;
69ef87e2 4451
6de9cd9a
DN
4452 default:
4453 nz_elts += mult;
4454 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4455 nc_elts += mult;
4456 break;
4457 }
4458 }
69ef87e2 4459
6de9cd9a
DN
4460 *p_nz_elts += nz_elts;
4461 *p_nc_elts += nc_elts;
4462}
4463
4464void
4465categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4466 HOST_WIDE_INT *p_nc_elts)
4467{
4468 *p_nz_elts = 0;
4469 *p_nc_elts = 0;
4470 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4471}
4472
4473/* Count the number of scalars in TYPE. Return -1 on overflow or
4474 variable-sized. */
4475
4476HOST_WIDE_INT
4477count_type_elements (tree type)
4478{
4479 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4480 switch (TREE_CODE (type))
4481 {
4482 case ARRAY_TYPE:
4483 {
4484 tree telts = array_type_nelts (type);
4485 if (telts && host_integerp (telts, 1))
4486 {
4487 HOST_WIDE_INT n = tree_low_cst (telts, 1);
4488 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4489 if (n == 0)
4490 return 0;
4491 if (max / n < m)
4492 return n * m;
4493 }
4494 return -1;
4495 }
4496
4497 case RECORD_TYPE:
4498 {
4499 HOST_WIDE_INT n = 0, t;
4500 tree f;
4501
4502 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4503 if (TREE_CODE (f) == FIELD_DECL)
4504 {
4505 t = count_type_elements (TREE_TYPE (f));
4506 if (t < 0)
4507 return -1;
4508 n += t;
4509 }
4510
4511 return n;
4512 }
9de08200 4513
6de9cd9a
DN
4514 case UNION_TYPE:
4515 case QUAL_UNION_TYPE:
4516 {
4517 /* Ho hum. How in the world do we guess here? Clearly it isn't
4518 right to count the fields. Guess based on the number of words. */
4519 HOST_WIDE_INT n = int_size_in_bytes (type);
4520 if (n < 0)
4521 return -1;
4522 return n / UNITS_PER_WORD;
4523 }
4524
4525 case COMPLEX_TYPE:
4526 return 2;
4527
4528 case VECTOR_TYPE:
4529 /* ??? This is broke. We should encode the vector width in the tree. */
4530 return GET_MODE_NUNITS (TYPE_MODE (type));
4531
4532 case INTEGER_TYPE:
4533 case REAL_TYPE:
4534 case ENUMERAL_TYPE:
4535 case BOOLEAN_TYPE:
4536 case CHAR_TYPE:
4537 case POINTER_TYPE:
4538 case OFFSET_TYPE:
4539 case REFERENCE_TYPE:
9de08200 4540 return 1;
3a94c984 4541
6de9cd9a
DN
4542 case VOID_TYPE:
4543 case METHOD_TYPE:
4544 case FILE_TYPE:
4545 case SET_TYPE:
4546 case FUNCTION_TYPE:
4547 case LANG_TYPE:
e9a25f70 4548 default:
6de9cd9a 4549 abort ();
9de08200 4550 }
9de08200
RK
4551}
4552
4553/* Return 1 if EXP contains mostly (3/4) zeros. */
4554
40209195 4555int
502b8322 4556mostly_zeros_p (tree exp)
9de08200 4557{
9de08200 4558 if (TREE_CODE (exp) == CONSTRUCTOR)
6de9cd9a 4559
9de08200 4560 {
6de9cd9a
DN
4561 HOST_WIDE_INT nz_elts, nc_elts, elts;
4562
4563 /* If there are no ranges of true bits, it is all zero. */
e1a43f73 4564 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
6de9cd9a
DN
4565 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4566
4567 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4568 elts = count_type_elements (TREE_TYPE (exp));
9de08200 4569
6de9cd9a 4570 return nz_elts < elts / 4;
9de08200
RK
4571 }
4572
6de9cd9a 4573 return initializer_zerop (exp);
9de08200
RK
4574}
4575\f
e1a43f73
PB
4576/* Helper function for store_constructor.
4577 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4578 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4579 CLEARED is as for store_constructor.
23cb1766 4580 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4581
4582 This provides a recursive shortcut back to store_constructor when it isn't
4583 necessary to go through store_field. This is so that we can pass through
4584 the cleared field to let store_constructor know that we may not have to
4585 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4586
4587static void
502b8322
AJ
4588store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4589 HOST_WIDE_INT bitpos, enum machine_mode mode,
4590 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4591{
4592 if (TREE_CODE (exp) == CONSTRUCTOR
6c89c39a
RK
4593 /* We can only call store_constructor recursively if the size and
4594 bit position are on a byte boundary. */
23ccec44 4595 && bitpos % BITS_PER_UNIT == 0
6c89c39a 4596 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
cc2902df 4597 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4598 let store_field do the bitfield handling. This is unlikely to
4599 generate unnecessary clear instructions anyways. */
4600 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4601 {
61cb205c
RK
4602 if (GET_CODE (target) == MEM)
4603 target
4604 = adjust_address (target,
4605 GET_MODE (target) == BLKmode
4606 || 0 != (bitpos
4607 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4608 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4609
e0339ef7 4610
04050c69 4611 /* Update the alias set, if required. */
10b76d73
RK
4612 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4613 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4614 {
4615 target = copy_rtx (target);
4616 set_mem_alias_set (target, alias_set);
4617 }
e0339ef7 4618
dbb5c281 4619 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4620 }
4621 else
a06ef755
RK
4622 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4623 alias_set);
e1a43f73
PB
4624}
4625
bbf6f052 4626/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4627 TARGET is either a REG or a MEM; we know it cannot conflict, since
4628 safe_from_p has been called.
dbb5c281
RK
4629 CLEARED is true if TARGET is known to have been zero'd.
4630 SIZE is the number of bytes of TARGET we are allowed to modify: this
b7010412
RK
4631 may not be the same as the size of EXP if we are assigning to a field
4632 which has been packed to exclude padding bits. */
bbf6f052
RK
4633
4634static void
502b8322 4635store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4636{
4af3895e 4637 tree type = TREE_TYPE (exp);
a5efcd63 4638#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4639 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4640#endif
4af3895e 4641
e44842fe
RK
4642 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4643 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4644 {
b3694847 4645 tree elt;
bbf6f052 4646
dbb5c281
RK
4647 /* If size is zero or the target is already cleared, do nothing. */
4648 if (size == 0 || cleared)
2c430630 4649 cleared = 1;
04050c69 4650 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4651 else if ((TREE_CODE (type) == UNION_TYPE
4652 || TREE_CODE (type) == QUAL_UNION_TYPE)
4653 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4654 /* If the constructor is empty, clear the union. */
a59f8640 4655 {
dbb5c281 4656 clear_storage (target, expr_size (exp));
04050c69 4657 cleared = 1;
a59f8640 4658 }
4af3895e
JVA
4659
4660 /* If we are building a static constructor into a register,
4661 set the initial value as zero so we can fold the value into
67225c15
RK
4662 a constant. But if more than one register is involved,
4663 this probably loses. */
2c430630 4664 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4665 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4666 {
04050c69 4667 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4668 cleared = 1;
4669 }
4670
4671 /* If the constructor has fewer fields than the structure
4672 or if we are initializing the structure to mostly zeros,
0d97bf4c 4673 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4674 register whose mode size isn't equal to SIZE since clear_storage
4675 can't handle this case. */
2c430630
RS
4676 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4677 || mostly_zeros_p (exp))
fcf1b822 4678 && (GET_CODE (target) != REG
dbb5c281 4679 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
04050c69 4680 == size)))
9de08200 4681 {
337f4314
RK
4682 rtx xtarget = target;
4683
4684 if (readonly_fields_p (type))
4685 {
4686 xtarget = copy_rtx (xtarget);
4687 RTX_UNCHANGING_P (xtarget) = 1;
4688 }
4689
dbb5c281 4690 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4691 cleared = 1;
4692 }
dbb5c281
RK
4693
4694 if (! cleared)
4695 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4696
4697 /* Store each element of the constructor into
4698 the corresponding field of TARGET. */
4699
4700 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4701 {
b3694847 4702 tree field = TREE_PURPOSE (elt);
34c73909 4703 tree value = TREE_VALUE (elt);
b3694847 4704 enum machine_mode mode;
770ae6cc
RK
4705 HOST_WIDE_INT bitsize;
4706 HOST_WIDE_INT bitpos = 0;
770ae6cc 4707 tree offset;
b50d17a1 4708 rtx to_rtx = target;
bbf6f052 4709
f32fd778
RS
4710 /* Just ignore missing fields.
4711 We cleared the whole structure, above,
4712 if any fields are missing. */
4713 if (field == 0)
4714 continue;
4715
6de9cd9a 4716 if (cleared && initializer_zerop (value))
e1a43f73 4717 continue;
9de08200 4718
770ae6cc
RK
4719 if (host_integerp (DECL_SIZE (field), 1))
4720 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4721 else
4722 bitsize = -1;
4723
bbf6f052
RK
4724 mode = DECL_MODE (field);
4725 if (DECL_BIT_FIELD (field))
4726 mode = VOIDmode;
4727
770ae6cc
RK
4728 offset = DECL_FIELD_OFFSET (field);
4729 if (host_integerp (offset, 0)
4730 && host_integerp (bit_position (field), 0))
4731 {
4732 bitpos = int_bit_position (field);
4733 offset = 0;
4734 }
b50d17a1 4735 else
770ae6cc 4736 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4737
b50d17a1
RK
4738 if (offset)
4739 {
4740 rtx offset_rtx;
4741
6fce44af
RK
4742 offset
4743 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4744 make_tree (TREE_TYPE (exp),
4745 target));
bbf6f052 4746
b50d17a1
RK
4747 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4748 if (GET_CODE (to_rtx) != MEM)
4749 abort ();
4750
bd070e1a 4751#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4752 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4753 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4754#else
4755 if (GET_MODE (offset_rtx) != ptr_mode)
4756 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4757#endif
bd070e1a 4758
0d4903b8
RK
4759 to_rtx = offset_address (to_rtx, offset_rtx,
4760 highest_pow2_factor (offset));
b50d17a1 4761 }
c5c76735 4762
4e44c1ef 4763 if (TREE_READONLY (field))
cf04eb80 4764 {
9151b3bf 4765 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4766 to_rtx = copy_rtx (to_rtx);
4767
cf04eb80
RK
4768 RTX_UNCHANGING_P (to_rtx) = 1;
4769 }
4770
34c73909
R
4771#ifdef WORD_REGISTER_OPERATIONS
4772 /* If this initializes a field that is smaller than a word, at the
4773 start of a word, try to widen it to a full word.
4774 This special case allows us to output C++ member function
4775 initializations in a form that the optimizers can understand. */
770ae6cc 4776 if (GET_CODE (target) == REG
34c73909
R
4777 && bitsize < BITS_PER_WORD
4778 && bitpos % BITS_PER_WORD == 0
4779 && GET_MODE_CLASS (mode) == MODE_INT
4780 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4781 && exp_size >= 0
4782 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4783 {
4784 tree type = TREE_TYPE (value);
04050c69 4785
34c73909
R
4786 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4787 {
ae2bcd98 4788 type = lang_hooks.types.type_for_size
8df83eae 4789 (BITS_PER_WORD, TYPE_UNSIGNED (type));
34c73909
R
4790 value = convert (type, value);
4791 }
04050c69 4792
34c73909
R
4793 if (BYTES_BIG_ENDIAN)
4794 value
4795 = fold (build (LSHIFT_EXPR, type, value,
4796 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4797 bitsize = BITS_PER_WORD;
4798 mode = word_mode;
4799 }
4800#endif
10b76d73
RK
4801
4802 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4803 && DECL_NONADDRESSABLE_P (field))
4804 {
4805 to_rtx = copy_rtx (to_rtx);
4806 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4807 }
4808
c5c76735 4809 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4810 value, type, cleared,
10b76d73 4811 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4812 }
4813 }
e6834654
SS
4814 else if (TREE_CODE (type) == ARRAY_TYPE
4815 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4816 {
b3694847
SS
4817 tree elt;
4818 int i;
e1a43f73 4819 int need_to_clear;
5c5214a9 4820 tree domain;
4af3895e 4821 tree elttype = TREE_TYPE (type);
e6834654 4822 int const_bounds_p;
ae0ed63a
JM
4823 HOST_WIDE_INT minelt = 0;
4824 HOST_WIDE_INT maxelt = 0;
997404de
JH
4825 int icode = 0;
4826 rtx *vector = NULL;
4827 int elt_size = 0;
4828 unsigned n_elts = 0;
85f3d674 4829
5c5214a9
ZW
4830 if (TREE_CODE (type) == ARRAY_TYPE)
4831 domain = TYPE_DOMAIN (type);
4832 else
4833 /* Vectors do not have domains; look up the domain of
4834 the array embedded in the debug representation type.
4835 FIXME Would probably be more efficient to treat vectors
4836 separately from arrays. */
e6834654 4837 {
e6834654
SS
4838 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4839 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
997404de
JH
4840 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4841 {
4842 enum machine_mode mode = GET_MODE (target);
4843
4844 icode = (int) vec_init_optab->handlers[mode].insn_code;
4845 if (icode != CODE_FOR_nothing)
4846 {
4847 unsigned int i;
4848
4849 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4850 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4851 vector = alloca (n_elts);
4852 for (i = 0; i < n_elts; i++)
4853 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4854 }
4855 }
e6834654
SS
4856 }
4857
4858 const_bounds_p = (TYPE_MIN_VALUE (domain)
4859 && TYPE_MAX_VALUE (domain)
4860 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4861 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4862
85f3d674
RK
4863 /* If we have constant bounds for the range of the type, get them. */
4864 if (const_bounds_p)
4865 {
4866 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4867 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4868 }
bbf6f052 4869
e1a43f73 4870 /* If the constructor has fewer elements than the array,
38e01259 4871 clear the whole array first. Similarly if this is
e1a43f73
PB
4872 static constructor of a non-BLKmode object. */
4873 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4874 need_to_clear = 1;
4875 else
4876 {
4877 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4878 need_to_clear = ! const_bounds_p;
4879
e1a43f73
PB
4880 /* This loop is a more accurate version of the loop in
4881 mostly_zeros_p (it handles RANGE_EXPR in an index).
4882 It is also needed to check for missing elements. */
4883 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4884 elt != NULL_TREE && ! need_to_clear;
df0faff1 4885 elt = TREE_CHAIN (elt))
e1a43f73
PB
4886 {
4887 tree index = TREE_PURPOSE (elt);
4888 HOST_WIDE_INT this_node_count;
19caa751 4889
e1a43f73
PB
4890 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4891 {
4892 tree lo_index = TREE_OPERAND (index, 0);
4893 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4894
19caa751
RK
4895 if (! host_integerp (lo_index, 1)
4896 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4897 {
4898 need_to_clear = 1;
4899 break;
4900 }
19caa751
RK
4901
4902 this_node_count = (tree_low_cst (hi_index, 1)
4903 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4904 }
4905 else
4906 this_node_count = 1;
85f3d674 4907
e1a43f73
PB
4908 count += this_node_count;
4909 if (mostly_zeros_p (TREE_VALUE (elt)))
4910 zero_count += this_node_count;
4911 }
85f3d674 4912
8e958f70 4913 /* Clear the entire array first if there are any missing elements,
0f41302f 4914 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4915 if (! need_to_clear
4916 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4917 need_to_clear = 1;
4918 }
85f3d674 4919
997404de 4920 if (need_to_clear && size > 0 && !vector)
9de08200
RK
4921 {
4922 if (! cleared)
725e58b1
RK
4923 {
4924 if (REG_P (target))
4925 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4926 else
dbb5c281 4927 clear_storage (target, GEN_INT (size));
725e58b1 4928 }
dbb5c281 4929 cleared = 1;
9de08200 4930 }
df4556a3 4931 else if (REG_P (target))
dbb5c281
RK
4932 /* Inform later passes that the old value is dead. */
4933 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4934
4935 /* Store each element of the constructor into
4936 the corresponding element of TARGET, determined
4937 by counting the elements. */
4938 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4939 elt;
4940 elt = TREE_CHAIN (elt), i++)
4941 {
b3694847 4942 enum machine_mode mode;
19caa751
RK
4943 HOST_WIDE_INT bitsize;
4944 HOST_WIDE_INT bitpos;
bbf6f052 4945 int unsignedp;
e1a43f73 4946 tree value = TREE_VALUE (elt);
03dc44a6
RS
4947 tree index = TREE_PURPOSE (elt);
4948 rtx xtarget = target;
bbf6f052 4949
6de9cd9a 4950 if (cleared && initializer_zerop (value))
e1a43f73 4951 continue;
9de08200 4952
8df83eae 4953 unsignedp = TYPE_UNSIGNED (elttype);
14a774a9
RK
4954 mode = TYPE_MODE (elttype);
4955 if (mode == BLKmode)
19caa751
RK
4956 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4957 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4958 : -1);
14a774a9
RK
4959 else
4960 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4961
e1a43f73
PB
4962 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4963 {
4964 tree lo_index = TREE_OPERAND (index, 0);
4965 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 4966 rtx index_r, pos_rtx, loop_end;
e1a43f73 4967 struct nesting *loop;
05c0b405
PB
4968 HOST_WIDE_INT lo, hi, count;
4969 tree position;
e1a43f73 4970
997404de
JH
4971 if (vector)
4972 abort ();
4973
0f41302f 4974 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4975 if (const_bounds_p
4976 && host_integerp (lo_index, 0)
19caa751
RK
4977 && host_integerp (hi_index, 0)
4978 && (lo = tree_low_cst (lo_index, 0),
4979 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4980 count = hi - lo + 1,
4981 (GET_CODE (target) != MEM
4982 || count <= 2
19caa751
RK
4983 || (host_integerp (TYPE_SIZE (elttype), 1)
4984 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4985 <= 40 * 8)))))
e1a43f73 4986 {
05c0b405
PB
4987 lo -= minelt; hi -= minelt;
4988 for (; lo <= hi; lo++)
e1a43f73 4989 {
19caa751 4990 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
4991
4992 if (GET_CODE (target) == MEM
4993 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4994 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4995 && TYPE_NONALIASED_COMPONENT (type))
4996 {
4997 target = copy_rtx (target);
4998 MEM_KEEP_ALIAS_SET_P (target) = 1;
4999 }
5000
23cb1766 5001 store_constructor_field
04050c69
RK
5002 (target, bitsize, bitpos, mode, value, type, cleared,
5003 get_alias_set (elttype));
e1a43f73
PB
5004 }
5005 }
5006 else
5007 {
4977bab6 5008 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
5009 loop_end = gen_label_rtx ();
5010
8df83eae 5011 unsignedp = TYPE_UNSIGNED (domain);
e1a43f73
PB
5012
5013 index = build_decl (VAR_DECL, NULL_TREE, domain);
5014
19e7881c 5015 index_r
e1a43f73
PB
5016 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5017 &unsignedp, 0));
19e7881c 5018 SET_DECL_RTL (index, index_r);
e1a43f73
PB
5019 if (TREE_CODE (value) == SAVE_EXPR
5020 && SAVE_EXPR_RTL (value) == 0)
5021 {
0f41302f
MS
5022 /* Make sure value gets expanded once before the
5023 loop. */
e1a43f73
PB
5024 expand_expr (value, const0_rtx, VOIDmode, 0);
5025 emit_queue ();
5026 }
5027 store_expr (lo_index, index_r, 0);
5028 loop = expand_start_loop (0);
5029
0f41302f 5030 /* Assign value to element index. */
fed3cef0
RK
5031 position
5032 = convert (ssizetype,
5033 fold (build (MINUS_EXPR, TREE_TYPE (index),
5034 index, TYPE_MIN_VALUE (domain))));
5035 position = size_binop (MULT_EXPR, position,
5036 convert (ssizetype,
5037 TYPE_SIZE_UNIT (elttype)));
5038
e1a43f73 5039 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
5040 xtarget = offset_address (target, pos_rtx,
5041 highest_pow2_factor (position));
5042 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5043 if (TREE_CODE (value) == CONSTRUCTOR)
dbb5c281
RK
5044 store_constructor (value, xtarget, cleared,
5045 bitsize / BITS_PER_UNIT);
e1a43f73
PB
5046 else
5047 store_expr (value, xtarget, 0);
5048
5049 expand_exit_loop_if_false (loop,
5050 build (LT_EXPR, integer_type_node,
5051 index, hi_index));
5052
5053 expand_increment (build (PREINCREMENT_EXPR,
5054 TREE_TYPE (index),
7b8b9722 5055 index, integer_one_node), 0, 0);
e1a43f73
PB
5056 expand_end_loop ();
5057 emit_label (loop_end);
e1a43f73
PB
5058 }
5059 }
19caa751
RK
5060 else if ((index != 0 && ! host_integerp (index, 0))
5061 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 5062 {
03dc44a6
RS
5063 tree position;
5064
997404de
JH
5065 if (vector)
5066 abort ();
5067
5b6c44ff 5068 if (index == 0)
fed3cef0 5069 index = ssize_int (1);
5b6c44ff 5070
e1a43f73 5071 if (minelt)
fed3cef0
RK
5072 index = convert (ssizetype,
5073 fold (build (MINUS_EXPR, index,
5074 TYPE_MIN_VALUE (domain))));
19caa751 5075
fed3cef0
RK
5076 position = size_binop (MULT_EXPR, index,
5077 convert (ssizetype,
5078 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
5079 xtarget = offset_address (target,
5080 expand_expr (position, 0, VOIDmode, 0),
5081 highest_pow2_factor (position));
5082 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5083 store_expr (value, xtarget, 0);
03dc44a6 5084 }
997404de
JH
5085 else if (vector)
5086 {
5087 int pos;
5088
5089 if (index != 0)
5090 pos = tree_low_cst (index, 0) - minelt;
5091 else
5092 pos = i;
5093 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5094 }
03dc44a6
RS
5095 else
5096 {
5097 if (index != 0)
19caa751
RK
5098 bitpos = ((tree_low_cst (index, 0) - minelt)
5099 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 5100 else
19caa751
RK
5101 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5102
10b76d73 5103 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5104 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5105 && TYPE_NONALIASED_COMPONENT (type))
5106 {
5107 target = copy_rtx (target);
5108 MEM_KEEP_ALIAS_SET_P (target) = 1;
5109 }
9b9bd3b2
JH
5110 store_constructor_field (target, bitsize, bitpos, mode, value,
5111 type, cleared, get_alias_set (elttype));
03dc44a6 5112 }
bbf6f052 5113 }
997404de
JH
5114 if (vector)
5115 {
5116 emit_insn (GEN_FCN (icode) (target,
5117 gen_rtx_PARALLEL (GET_MODE (target),
5118 gen_rtvec_v (n_elts, vector))));
5119 }
bbf6f052 5120 }
19caa751 5121
3a94c984 5122 /* Set constructor assignments. */
071a6595
PB
5123 else if (TREE_CODE (type) == SET_TYPE)
5124 {
e1a43f73 5125 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5126 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5127 tree domain = TYPE_DOMAIN (type);
5128 tree domain_min, domain_max, bitlength;
5129
9faa82d8 5130 /* The default implementation strategy is to extract the constant
071a6595
PB
5131 parts of the constructor, use that to initialize the target,
5132 and then "or" in whatever non-constant ranges we need in addition.
5133
5134 If a large set is all zero or all ones, it is
5135 probably better to set it using memset (if available) or bzero.
5136 Also, if a large set has just a single range, it may also be
5137 better to first clear all the first clear the set (using
0f41302f 5138 bzero/memset), and set the bits we want. */
3a94c984 5139
0f41302f 5140 /* Check for all zeros. */
9376fcd6 5141 if (elt == NULL_TREE && size > 0)
071a6595 5142 {
dbb5c281
RK
5143 if (!cleared)
5144 clear_storage (target, GEN_INT (size));
071a6595
PB
5145 return;
5146 }
5147
071a6595
PB
5148 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5149 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5150 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5151 size_diffop (domain_max, domain_min),
5152 ssize_int (1));
071a6595 5153
19caa751 5154 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5155
5156 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5157 are "complicated" (more than one range), initialize (the
3a94c984 5158 constant parts) by copying from a constant. */
e1a43f73
PB
5159 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5160 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5161 {
19caa751 5162 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5163 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 5164 char *bit_buffer = alloca (nbits);
b4ee5a72 5165 HOST_WIDE_INT word = 0;
19caa751
RK
5166 unsigned int bit_pos = 0;
5167 unsigned int ibit = 0;
5168 unsigned int offset = 0; /* In bytes from beginning of set. */
5169
e1a43f73 5170 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5171 for (;;)
071a6595 5172 {
b4ee5a72
PB
5173 if (bit_buffer[ibit])
5174 {
b09f3348 5175 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5176 word |= (1 << (set_word_size - 1 - bit_pos));
5177 else
5178 word |= 1 << bit_pos;
5179 }
19caa751 5180
b4ee5a72
PB
5181 bit_pos++; ibit++;
5182 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5183 {
dbb5c281 5184 if (word != 0 || ! cleared)
e1a43f73 5185 {
053ee101 5186 rtx datum = gen_int_mode (word, mode);
e1a43f73 5187 rtx to_rtx;
19caa751 5188
0f41302f
MS
5189 /* The assumption here is that it is safe to use
5190 XEXP if the set is multi-word, but not if
5191 it's single-word. */
e1a43f73 5192 if (GET_CODE (target) == MEM)
f4ef873c 5193 to_rtx = adjust_address (target, mode, offset);
3a94c984 5194 else if (offset == 0)
e1a43f73
PB
5195 to_rtx = target;
5196 else
5197 abort ();
5198 emit_move_insn (to_rtx, datum);
5199 }
19caa751 5200
b4ee5a72
PB
5201 if (ibit == nbits)
5202 break;
5203 word = 0;
5204 bit_pos = 0;
5205 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5206 }
5207 }
071a6595 5208 }
dbb5c281 5209 else if (!cleared)
19caa751
RK
5210 /* Don't bother clearing storage if the set is all ones. */
5211 if (TREE_CHAIN (elt) != NULL_TREE
5212 || (TREE_PURPOSE (elt) == NULL_TREE
5213 ? nbits != 1
5214 : ( ! host_integerp (TREE_VALUE (elt), 0)
5215 || ! host_integerp (TREE_PURPOSE (elt), 0)
5216 || (tree_low_cst (TREE_VALUE (elt), 0)
5217 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5218 != (HOST_WIDE_INT) nbits))))
dbb5c281 5219 clear_storage (target, expr_size (exp));
3a94c984 5220
e1a43f73 5221 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5222 {
3a94c984 5223 /* Start of range of element or NULL. */
071a6595 5224 tree startbit = TREE_PURPOSE (elt);
3a94c984 5225 /* End of range of element, or element value. */
071a6595
PB
5226 tree endbit = TREE_VALUE (elt);
5227 HOST_WIDE_INT startb, endb;
19caa751 5228 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5229
5230 bitlength_rtx = expand_expr (bitlength,
19caa751 5231 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5232
3a94c984 5233 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5234 if (startbit == NULL_TREE)
5235 {
5236 startbit = save_expr (endbit);
5237 endbit = startbit;
5238 }
19caa751 5239
071a6595
PB
5240 startbit = convert (sizetype, startbit);
5241 endbit = convert (sizetype, endbit);
5242 if (! integer_zerop (domain_min))
5243 {
5244 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5245 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5246 }
3a94c984 5247 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5248 EXPAND_CONST_ADDRESS);
3a94c984 5249 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5250 EXPAND_CONST_ADDRESS);
5251
5252 if (REG_P (target))
5253 {
1da68f56
RK
5254 targetx
5255 = assign_temp
ae2bcd98 5256 ((build_qualified_type (lang_hooks.types.type_for_mode
b0c48229 5257 (GET_MODE (target), 0),
1da68f56
RK
5258 TYPE_QUAL_CONST)),
5259 0, 1, 1);
071a6595
PB
5260 emit_move_insn (targetx, target);
5261 }
19caa751 5262
071a6595
PB
5263 else if (GET_CODE (target) == MEM)
5264 targetx = target;
5265 else
5266 abort ();
5267
4ca79136
RH
5268 /* Optimization: If startbit and endbit are constants divisible
5269 by BITS_PER_UNIT, call memset instead. */
5270 if (TARGET_MEM_FUNCTIONS
5271 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5272 && TREE_CODE (endbit) == INTEGER_CST
5273 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5274 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5275 {
ebb1b59a 5276 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5277 VOIDmode, 3,
e1a43f73
PB
5278 plus_constant (XEXP (targetx, 0),
5279 startb / BITS_PER_UNIT),
071a6595 5280 Pmode,
3b6f75e2 5281 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5282 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5283 TYPE_MODE (sizetype));
071a6595
PB
5284 }
5285 else
68d28100
RH
5286 emit_library_call (setbits_libfunc, LCT_NORMAL,
5287 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5288 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5289 startbit_rtx, TYPE_MODE (sizetype),
5290 endbit_rtx, TYPE_MODE (sizetype));
5291
071a6595
PB
5292 if (REG_P (target))
5293 emit_move_insn (target, targetx);
5294 }
5295 }
bbf6f052
RK
5296
5297 else
5298 abort ();
5299}
5300
5301/* Store the value of EXP (an expression tree)
5302 into a subfield of TARGET which has mode MODE and occupies
5303 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5304 If MODE is VOIDmode, it means that we are storing into a bit-field.
5305
5306 If VALUE_MODE is VOIDmode, return nothing in particular.
5307 UNSIGNEDP is not used in this case.
5308
5309 Otherwise, return an rtx for the value stored. This rtx
5310 has mode VALUE_MODE if that is convenient to do.
5311 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5312
a06ef755 5313 TYPE is the type of the underlying object,
ece32014
MM
5314
5315 ALIAS_SET is the alias set for the destination. This value will
5316 (in general) be different from that for TARGET, since TARGET is a
5317 reference to the containing structure. */
bbf6f052
RK
5318
5319static rtx
502b8322
AJ
5320store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5321 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5322 int unsignedp, tree type, int alias_set)
bbf6f052 5323{
906c4e36 5324 HOST_WIDE_INT width_mask = 0;
bbf6f052 5325
e9a25f70
JL
5326 if (TREE_CODE (exp) == ERROR_MARK)
5327 return const0_rtx;
5328
2be6a7e9
RK
5329 /* If we have nothing to store, do nothing unless the expression has
5330 side-effects. */
5331 if (bitsize == 0)
5332 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5333 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5334 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5335
5336 /* If we are storing into an unaligned field of an aligned union that is
5337 in a register, we may have the mode of TARGET being an integer mode but
5338 MODE == BLKmode. In that case, get an aligned object whose size and
5339 alignment are the same as TARGET and store TARGET into it (we can avoid
5340 the store if the field being stored is the entire width of TARGET). Then
5341 call ourselves recursively to store the field into a BLKmode version of
5342 that object. Finally, load from the object into TARGET. This is not
5343 very efficient in general, but should only be slightly more expensive
5344 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5345 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5346 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5347
5348 if (mode == BLKmode
5349 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5350 {
85a43a2f 5351 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5352 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5353
8752c357 5354 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5355 emit_move_insn (object, target);
5356
a06ef755
RK
5357 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5358 alias_set);
bbf6f052
RK
5359
5360 emit_move_insn (target, object);
5361
a06ef755 5362 /* We want to return the BLKmode version of the data. */
46093b97 5363 return blk_object;
bbf6f052 5364 }
c3b247b4
JM
5365
5366 if (GET_CODE (target) == CONCAT)
5367 {
5368 /* We're storing into a struct containing a single __complex. */
5369
5370 if (bitpos != 0)
5371 abort ();
6de9cd9a 5372 return store_expr (exp, target, value_mode != VOIDmode);
c3b247b4 5373 }
bbf6f052
RK
5374
5375 /* If the structure is in a register or if the component
5376 is a bit field, we cannot use addressing to access it.
5377 Use bit-field techniques or SUBREG to store in it. */
5378
4fa52007 5379 if (mode == VOIDmode
6ab06cbb
JW
5380 || (mode != BLKmode && ! direct_store[(int) mode]
5381 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5382 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5383 || GET_CODE (target) == REG
c980ac49 5384 || GET_CODE (target) == SUBREG
ccc98036
RS
5385 /* If the field isn't aligned enough to store as an ordinary memref,
5386 store it as a bit field. */
15b19a7d 5387 || (mode != BLKmode
9e5f281f
OH
5388 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5389 || bitpos % GET_MODE_ALIGNMENT (mode))
5390 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5391 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5392 /* If the RHS and field are a constant size and the size of the
5393 RHS isn't the same size as the bitfield, we must use bitfield
5394 operations. */
05bccae2
RK
5395 || (bitsize >= 0
5396 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5397 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5398 {
906c4e36 5399 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5400
ef19912d
RK
5401 /* If BITSIZE is narrower than the size of the type of EXP
5402 we will be narrowing TEMP. Normally, what's wanted are the
5403 low-order bits. However, if EXP's type is a record and this is
5404 big-endian machine, we want the upper BITSIZE bits. */
5405 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5406 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5407 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5408 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5409 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5410 - bitsize),
c1853da7 5411 NULL_RTX, 1);
ef19912d 5412
bbd6cf73
RK
5413 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5414 MODE. */
5415 if (mode != VOIDmode && mode != BLKmode
5416 && mode != TYPE_MODE (TREE_TYPE (exp)))
5417 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5418
a281e72d
RK
5419 /* If the modes of TARGET and TEMP are both BLKmode, both
5420 must be in memory and BITPOS must be aligned on a byte
5421 boundary. If so, we simply do a block copy. */
5422 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5423 {
5424 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5425 || bitpos % BITS_PER_UNIT != 0)
5426 abort ();
5427
f4ef873c 5428 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5429 emit_block_move (target, temp,
a06ef755 5430 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5431 / BITS_PER_UNIT),
5432 BLOCK_OP_NORMAL);
a281e72d
RK
5433
5434 return value_mode == VOIDmode ? const0_rtx : target;
5435 }
5436
bbf6f052 5437 /* Store the value in the bitfield. */
a06ef755
RK
5438 store_bit_field (target, bitsize, bitpos, mode, temp,
5439 int_size_in_bytes (type));
5440
bbf6f052
RK
5441 if (value_mode != VOIDmode)
5442 {
04050c69
RK
5443 /* The caller wants an rtx for the value.
5444 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5445 if (width_mask != 0
5446 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5447 {
9074de27 5448 tree count;
5c4d7cfb 5449 enum machine_mode tmode;
86a2c12a 5450
5c4d7cfb 5451 tmode = GET_MODE (temp);
86a2c12a
RS
5452 if (tmode == VOIDmode)
5453 tmode = value_mode;
22273300
JJ
5454
5455 if (unsignedp)
5456 return expand_and (tmode, temp,
2496c7bd 5457 gen_int_mode (width_mask, tmode),
22273300
JJ
5458 NULL_RTX);
5459
5c4d7cfb
RS
5460 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5461 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5462 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5463 }
04050c69 5464
bbf6f052 5465 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5466 NULL_RTX, value_mode, VOIDmode,
a06ef755 5467 int_size_in_bytes (type));
bbf6f052
RK
5468 }
5469 return const0_rtx;
5470 }
5471 else
5472 {
5473 rtx addr = XEXP (target, 0);
a06ef755 5474 rtx to_rtx = target;
bbf6f052
RK
5475
5476 /* If a value is wanted, it must be the lhs;
5477 so make the address stable for multiple use. */
5478
5479 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5480 && ! CONSTANT_ADDRESS_P (addr)
5481 /* A frame-pointer reference is already stable. */
5482 && ! (GET_CODE (addr) == PLUS
5483 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5484 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5485 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5486 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5487
5488 /* Now build a reference to just the desired component. */
5489
a06ef755
RK
5490 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5491
5492 if (to_rtx == target)
5493 to_rtx = copy_rtx (to_rtx);
792760b9 5494
c6df88cb 5495 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5496 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5497 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5498
5499 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5500 }
5501}
5502\f
5503/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5504 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5505 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5506
5507 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5508 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5509 If the position of the field is variable, we store a tree
5510 giving the variable offset (in units) in *POFFSET.
5511 This offset is in addition to the bit position.
5512 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5513
5514 If any of the extraction expressions is volatile,
5515 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5516
5517 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5518 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5519 is redundant.
5520
5521 If the field describes a variable-sized object, *PMODE is set to
5522 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5523 this case, but the address of the object can be found. */
bbf6f052
RK
5524
5525tree
502b8322
AJ
5526get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5527 HOST_WIDE_INT *pbitpos, tree *poffset,
5528 enum machine_mode *pmode, int *punsignedp,
5529 int *pvolatilep)
bbf6f052
RK
5530{
5531 tree size_tree = 0;
5532 enum machine_mode mode = VOIDmode;
fed3cef0 5533 tree offset = size_zero_node;
770ae6cc 5534 tree bit_offset = bitsize_zero_node;
770ae6cc 5535 tree tem;
bbf6f052 5536
770ae6cc
RK
5537 /* First get the mode, signedness, and size. We do this from just the
5538 outermost expression. */
bbf6f052
RK
5539 if (TREE_CODE (exp) == COMPONENT_REF)
5540 {
5541 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5542 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5543 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5544
a150de29 5545 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5546 }
5547 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5548 {
5549 size_tree = TREE_OPERAND (exp, 1);
a150de29 5550 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5551 }
5552 else
5553 {
5554 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5555 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5556
ab87f8c8
JL
5557 if (mode == BLKmode)
5558 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5559 else
5560 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5561 }
3a94c984 5562
770ae6cc 5563 if (size_tree != 0)
bbf6f052 5564 {
770ae6cc 5565 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5566 mode = BLKmode, *pbitsize = -1;
5567 else
770ae6cc 5568 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5569 }
5570
5571 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5572 and find the ultimate containing object. */
bbf6f052
RK
5573 while (1)
5574 {
770ae6cc
RK
5575 if (TREE_CODE (exp) == BIT_FIELD_REF)
5576 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5577 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5578 {
770ae6cc
RK
5579 tree field = TREE_OPERAND (exp, 1);
5580 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5581
e7f3c83f
RK
5582 /* If this field hasn't been filled in yet, don't go
5583 past it. This should only happen when folding expressions
5584 made during type construction. */
770ae6cc 5585 if (this_offset == 0)
e7f3c83f 5586 break;
6fce44af
RK
5587 else
5588 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
e7f3c83f 5589
7156dead 5590 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5591 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5592 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5593
a06ef755 5594 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5595 }
7156dead 5596
b4e3fabb
RK
5597 else if (TREE_CODE (exp) == ARRAY_REF
5598 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5599 {
742920c7 5600 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5601 tree array = TREE_OPERAND (exp, 0);
5602 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5603 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5604 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5605
770ae6cc
RK
5606 /* We assume all arrays have sizes that are a multiple of a byte.
5607 First subtract the lower bound, if any, in the type of the
5608 index, then convert to sizetype and multiply by the size of the
5609 array element. */
5610 if (low_bound != 0 && ! integer_zerop (low_bound))
5611 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5612 index, low_bound));
f8dac6eb 5613
6fce44af 5614 /* If the index has a self-referential type, instantiate it with
d7d23035 5615 the object; likewise for the component size. */
6fce44af
RK
5616 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5617 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
770ae6cc
RK
5618 offset = size_binop (PLUS_EXPR, offset,
5619 size_binop (MULT_EXPR,
5620 convert (sizetype, index),
7156dead 5621 unit_size));
bbf6f052 5622 }
7156dead 5623
c1853da7
RK
5624 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5625 conversions that don't change the mode, and all view conversions
5626 except those that need to "step up" the alignment. */
bbf6f052 5627 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5628 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5629 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5630 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5631 && STRICT_ALIGNMENT
5632 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5633 < BIGGEST_ALIGNMENT)
5634 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5635 || TYPE_ALIGN_OK (TREE_TYPE
5636 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5637 && ! ((TREE_CODE (exp) == NOP_EXPR
5638 || TREE_CODE (exp) == CONVERT_EXPR)
5639 && (TYPE_MODE (TREE_TYPE (exp))
5640 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5641 break;
7bb0943f
RS
5642
5643 /* If any reference in the chain is volatile, the effect is volatile. */
5644 if (TREE_THIS_VOLATILE (exp))
5645 *pvolatilep = 1;
839c4796 5646
bbf6f052
RK
5647 exp = TREE_OPERAND (exp, 0);
5648 }
5649
770ae6cc
RK
5650 /* If OFFSET is constant, see if we can return the whole thing as a
5651 constant bit position. Otherwise, split it up. */
5652 if (host_integerp (offset, 0)
5653 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5654 bitsize_unit_node))
5655 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5656 && host_integerp (tem, 0))
5657 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5658 else
5659 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5660
bbf6f052 5661 *pmode = mode;
bbf6f052
RK
5662 return exp;
5663}
921b3427 5664
ed239f5a
RK
5665/* Return 1 if T is an expression that get_inner_reference handles. */
5666
5667int
502b8322 5668handled_component_p (tree t)
ed239f5a
RK
5669{
5670 switch (TREE_CODE (t))
5671 {
5672 case BIT_FIELD_REF:
5673 case COMPONENT_REF:
5674 case ARRAY_REF:
5675 case ARRAY_RANGE_REF:
5676 case NON_LVALUE_EXPR:
5677 case VIEW_CONVERT_EXPR:
5678 return 1;
5679
1a8c4ca6
EB
5680 /* ??? Sure they are handled, but get_inner_reference may return
5681 a different PBITSIZE, depending upon whether the expression is
5682 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5683 case NOP_EXPR:
5684 case CONVERT_EXPR:
5685 return (TYPE_MODE (TREE_TYPE (t))
5686 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5687
5688 default:
5689 return 0;
5690 }
5691}
bbf6f052 5692\f
3fe44edd
RK
5693/* Given an rtx VALUE that may contain additions and multiplications, return
5694 an equivalent value that just refers to a register, memory, or constant.
5695 This is done by generating instructions to perform the arithmetic and
5696 returning a pseudo-register containing the value.
c45a13a6
RK
5697
5698 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5699
5700rtx
502b8322 5701force_operand (rtx value, rtx target)
bbf6f052 5702{
8a28dbcc 5703 rtx op1, op2;
bbf6f052 5704 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5705 rtx subtarget = get_subtarget (target);
8a28dbcc 5706 enum rtx_code code = GET_CODE (value);
bbf6f052 5707
50654f6c
ZD
5708 /* Check for subreg applied to an expression produced by loop optimizer. */
5709 if (code == SUBREG
5710 && GET_CODE (SUBREG_REG (value)) != REG
5711 && GET_CODE (SUBREG_REG (value)) != MEM)
5712 {
5713 value = simplify_gen_subreg (GET_MODE (value),
5714 force_reg (GET_MODE (SUBREG_REG (value)),
5715 force_operand (SUBREG_REG (value),
5716 NULL_RTX)),
5717 GET_MODE (SUBREG_REG (value)),
5718 SUBREG_BYTE (value));
5719 code = GET_CODE (value);
5720 }
5721
8b015896 5722 /* Check for a PIC address load. */
8a28dbcc 5723 if ((code == PLUS || code == MINUS)
8b015896
RH
5724 && XEXP (value, 0) == pic_offset_table_rtx
5725 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5726 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5727 || GET_CODE (XEXP (value, 1)) == CONST))
5728 {
5729 if (!subtarget)
5730 subtarget = gen_reg_rtx (GET_MODE (value));
5731 emit_move_insn (subtarget, value);
5732 return subtarget;
5733 }
5734
8a28dbcc 5735 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5736 {
8a28dbcc
JH
5737 if (!target)
5738 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5739 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5740 code == ZERO_EXTEND);
5741 return target;
bbf6f052
RK
5742 }
5743
ec8e098d 5744 if (ARITHMETIC_P (value))
bbf6f052
RK
5745 {
5746 op2 = XEXP (value, 1);
8a28dbcc 5747 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5748 subtarget = 0;
8a28dbcc 5749 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5750 {
8a28dbcc 5751 code = PLUS;
bbf6f052
RK
5752 op2 = negate_rtx (GET_MODE (value), op2);
5753 }
5754
5755 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5756 operand a PLUS of a virtual register and something else. In that
5757 case, we want to emit the sum of the virtual register and the
5758 constant first and then add the other value. This allows virtual
5759 register instantiation to simply modify the constant rather than
5760 creating another one around this addition. */
5761 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5762 && GET_CODE (XEXP (value, 0)) == PLUS
5763 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5764 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5765 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5766 {
8a28dbcc
JH
5767 rtx temp = expand_simple_binop (GET_MODE (value), code,
5768 XEXP (XEXP (value, 0), 0), op2,
5769 subtarget, 0, OPTAB_LIB_WIDEN);
5770 return expand_simple_binop (GET_MODE (value), code, temp,
5771 force_operand (XEXP (XEXP (value,
5772 0), 1), 0),
5773 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5774 }
3a94c984 5775
8a28dbcc
JH
5776 op1 = force_operand (XEXP (value, 0), subtarget);
5777 op2 = force_operand (op2, NULL_RTX);
5778 switch (code)
5779 {
5780 case MULT:
5781 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5782 case DIV:
5783 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5784 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5785 target, 1, OPTAB_LIB_WIDEN);
5786 else
5787 return expand_divmod (0,
5788 FLOAT_MODE_P (GET_MODE (value))
5789 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5790 GET_MODE (value), op1, op2, target, 0);
5791 break;
5792 case MOD:
5793 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5794 target, 0);
5795 break;
5796 case UDIV:
5797 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5798 target, 1);
5799 break;
5800 case UMOD:
5801 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5802 target, 1);
5803 break;
5804 case ASHIFTRT:
5805 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5806 target, 0, OPTAB_LIB_WIDEN);
5807 break;
5808 default:
5809 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5810 target, 1, OPTAB_LIB_WIDEN);
5811 }
5812 }
ec8e098d 5813 if (UNARY_P (value))
8a28dbcc
JH
5814 {
5815 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5816 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5817 }
34e81b5a
RK
5818
5819#ifdef INSN_SCHEDULING
5820 /* On machines that have insn scheduling, we want all memory reference to be
5821 explicit, so we need to deal with such paradoxical SUBREGs. */
5822 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5823 && (GET_MODE_SIZE (GET_MODE (value))
5824 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5825 value
5826 = simplify_gen_subreg (GET_MODE (value),
5827 force_reg (GET_MODE (SUBREG_REG (value)),
5828 force_operand (SUBREG_REG (value),
5829 NULL_RTX)),
5830 GET_MODE (SUBREG_REG (value)),
5831 SUBREG_BYTE (value));
5832#endif
5833
bbf6f052
RK
5834 return value;
5835}
5836\f
bbf6f052 5837/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5838 EXP can reference X, which is being modified. TOP_P is nonzero if this
5839 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5840 for EXP, as opposed to a recursive call to this function.
5841
5842 It is always safe for this routine to return zero since it merely
5843 searches for optimization opportunities. */
bbf6f052 5844
8f17b5c5 5845int
502b8322 5846safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5847{
5848 rtx exp_rtl = 0;
5849 int i, nops;
1da68f56 5850 static tree save_expr_list;
bbf6f052 5851
6676e72f
RK
5852 if (x == 0
5853 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5854 have no way of allocating temporaries of variable size
5855 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5856 So we assume here that something at a higher level has prevented a
f4510f37 5857 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5858 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5859 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5860 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5861 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5862 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5863 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5864 != INTEGER_CST)
1da68f56
RK
5865 && GET_MODE (x) == BLKmode)
5866 /* If X is in the outgoing argument area, it is always safe. */
5867 || (GET_CODE (x) == MEM
5868 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5869 || (GET_CODE (XEXP (x, 0)) == PLUS
5870 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5871 return 1;
5872
5873 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5874 find the underlying pseudo. */
5875 if (GET_CODE (x) == SUBREG)
5876 {
5877 x = SUBREG_REG (x);
5878 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5879 return 0;
5880 }
5881
1da68f56
RK
5882 /* A SAVE_EXPR might appear many times in the expression passed to the
5883 top-level safe_from_p call, and if it has a complex subexpression,
5884 examining it multiple times could result in a combinatorial explosion.
7ef0daad 5885 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
1da68f56
RK
5886 with optimization took about 28 minutes to compile -- even though it was
5887 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5888 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5889 we have processed. Note that the only test of top_p was above. */
5890
5891 if (top_p)
5892 {
5893 int rtn;
5894 tree t;
5895
5896 save_expr_list = 0;
5897
5898 rtn = safe_from_p (x, exp, 0);
5899
5900 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5901 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5902
5903 return rtn;
5904 }
bbf6f052 5905
1da68f56 5906 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5907 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5908 {
5909 case 'd':
a9772b60 5910 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5911 break;
5912
5913 case 'c':
5914 return 1;
5915
5916 case 'x':
5917 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5918 {
5919 while (1)
5920 {
5921 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5922 return 0;
5923 exp = TREE_CHAIN (exp);
5924 if (!exp)
5925 return 1;
5926 if (TREE_CODE (exp) != TREE_LIST)
5927 return safe_from_p (x, exp, 0);
5928 }
5929 }
ff439b5f
CB
5930 else if (TREE_CODE (exp) == ERROR_MARK)
5931 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5932 else
5933 return 0;
5934
bbf6f052
RK
5935 case '2':
5936 case '<':
f8d4be57
CE
5937 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5938 return 0;
5d3cc252 5939 /* Fall through. */
f8d4be57
CE
5940
5941 case '1':
5942 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5943
5944 case 'e':
5945 case 'r':
5946 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5947 the expression. If it is set, we conflict iff we are that rtx or
5948 both are in memory. Otherwise, we check all operands of the
5949 expression recursively. */
5950
5951 switch (TREE_CODE (exp))
5952 {
5953 case ADDR_EXPR:
70072ed9
RK
5954 /* If the operand is static or we are static, we can't conflict.
5955 Likewise if we don't conflict with the operand at all. */
5956 if (staticp (TREE_OPERAND (exp, 0))
5957 || TREE_STATIC (exp)
5958 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5959 return 1;
5960
5961 /* Otherwise, the only way this can conflict is if we are taking
5962 the address of a DECL a that address if part of X, which is
5963 very rare. */
5964 exp = TREE_OPERAND (exp, 0);
5965 if (DECL_P (exp))
5966 {
5967 if (!DECL_RTL_SET_P (exp)
5968 || GET_CODE (DECL_RTL (exp)) != MEM)
5969 return 0;
5970 else
5971 exp_rtl = XEXP (DECL_RTL (exp), 0);
5972 }
5973 break;
bbf6f052
RK
5974
5975 case INDIRECT_REF:
1da68f56
RK
5976 if (GET_CODE (x) == MEM
5977 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5978 get_alias_set (exp)))
bbf6f052
RK
5979 return 0;
5980 break;
5981
5982 case CALL_EXPR:
f9808f81
MM
5983 /* Assume that the call will clobber all hard registers and
5984 all of memory. */
5985 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5986 || GET_CODE (x) == MEM)
5987 return 0;
bbf6f052
RK
5988 break;
5989
5990 case RTL_EXPR:
3bb5826a
RK
5991 /* If a sequence exists, we would have to scan every instruction
5992 in the sequence to see if it was safe. This is probably not
5993 worthwhile. */
5994 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5995 return 0;
5996
3bb5826a 5997 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5998 break;
5999
6000 case WITH_CLEANUP_EXPR:
6ad7895a 6001 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
6002 break;
6003
5dab5552 6004 case CLEANUP_POINT_EXPR:
e5e809f4 6005 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 6006
bbf6f052
RK
6007 case SAVE_EXPR:
6008 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
6009 if (exp_rtl)
6010 break;
6011
1da68f56
RK
6012 /* If we've already scanned this, don't do it again. Otherwise,
6013 show we've scanned it and record for clearing the flag if we're
6014 going on. */
6015 if (TREE_PRIVATE (exp))
6016 return 1;
ff439b5f 6017
1da68f56
RK
6018 TREE_PRIVATE (exp) = 1;
6019 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 6020 {
1da68f56
RK
6021 TREE_PRIVATE (exp) = 0;
6022 return 0;
ff59bfe6 6023 }
1da68f56
RK
6024
6025 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 6026 return 1;
bbf6f052 6027
8129842c
RS
6028 case BIND_EXPR:
6029 /* The only operand we look at is operand 1. The rest aren't
6030 part of the expression. */
e5e809f4 6031 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 6032
e9a25f70
JL
6033 default:
6034 break;
bbf6f052
RK
6035 }
6036
6037 /* If we have an rtx, we do not need to scan our operands. */
6038 if (exp_rtl)
6039 break;
6040
8f17b5c5 6041 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
6042 for (i = 0; i < nops; i++)
6043 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6044 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6045 return 0;
8f17b5c5
MM
6046
6047 /* If this is a language-specific tree code, it may require
6048 special handling. */
dbbbbf3b
JDA
6049 if ((unsigned int) TREE_CODE (exp)
6050 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 6051 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 6052 return 0;
bbf6f052
RK
6053 }
6054
6055 /* If we have an rtl, find any enclosed object. Then see if we conflict
6056 with it. */
6057 if (exp_rtl)
6058 {
6059 if (GET_CODE (exp_rtl) == SUBREG)
6060 {
6061 exp_rtl = SUBREG_REG (exp_rtl);
6062 if (GET_CODE (exp_rtl) == REG
6063 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6064 return 0;
6065 }
6066
6067 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6068 are memory and they conflict. */
bbf6f052
RK
6069 return ! (rtx_equal_p (x, exp_rtl)
6070 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 6071 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6072 rtx_addr_varies_p)));
bbf6f052
RK
6073 }
6074
6075 /* If we reach here, it is safe. */
6076 return 1;
6077}
6078
01c8a7c8
RK
6079/* Subroutine of expand_expr: return rtx if EXP is a
6080 variable or parameter; else return 0. */
6081
6082static rtx
502b8322 6083var_rtx (tree exp)
01c8a7c8
RK
6084{
6085 STRIP_NOPS (exp);
6086 switch (TREE_CODE (exp))
6087 {
6088 case PARM_DECL:
6089 case VAR_DECL:
6090 return DECL_RTL (exp);
6091 default:
6092 return 0;
6093 }
6094}
14a774a9 6095\f
0d4903b8
RK
6096/* Return the highest power of two that EXP is known to be a multiple of.
6097 This is used in updating alignment of MEMs in array references. */
6098
9ceca302 6099static unsigned HOST_WIDE_INT
502b8322 6100highest_pow2_factor (tree exp)
0d4903b8 6101{
9ceca302 6102 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6103
6104 switch (TREE_CODE (exp))
6105 {
6106 case INTEGER_CST:
e0f1be5c
JJ
6107 /* We can find the lowest bit that's a one. If the low
6108 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6109 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6110 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6111 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6112 later ICE. */
e0f1be5c 6113 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6114 return BIGGEST_ALIGNMENT;
e0f1be5c 6115 else
0d4903b8 6116 {
e0f1be5c
JJ
6117 /* Note: tree_low_cst is intentionally not used here,
6118 we don't care about the upper bits. */
6119 c0 = TREE_INT_CST_LOW (exp);
6120 c0 &= -c0;
6121 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6122 }
6123 break;
6124
65a07688 6125 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6126 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6127 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6128 return MIN (c0, c1);
6129
6130 case MULT_EXPR:
6131 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6132 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6133 return c0 * c1;
6134
6135 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6136 case CEIL_DIV_EXPR:
65a07688
RK
6137 if (integer_pow2p (TREE_OPERAND (exp, 1))
6138 && host_integerp (TREE_OPERAND (exp, 1), 1))
6139 {
6140 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6141 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6142 return MAX (1, c0 / c1);
6143 }
6144 break;
0d4903b8
RK
6145
6146 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6147 case SAVE_EXPR:
0d4903b8
RK
6148 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6149
65a07688
RK
6150 case COMPOUND_EXPR:
6151 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6152
0d4903b8
RK
6153 case COND_EXPR:
6154 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6155 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6156 return MIN (c0, c1);
6157
6158 default:
6159 break;
6160 }
6161
6162 return 1;
6163}
818c0c94 6164
d50a16c4
EB
6165/* Similar, except that the alignment requirements of TARGET are
6166 taken into account. Assume it is at least as aligned as its
6167 type, unless it is a COMPONENT_REF in which case the layout of
6168 the structure gives the alignment. */
818c0c94 6169
9ceca302 6170static unsigned HOST_WIDE_INT
d50a16c4 6171highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6172{
d50a16c4 6173 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6174
6175 factor = highest_pow2_factor (exp);
d50a16c4
EB
6176 if (TREE_CODE (target) == COMPONENT_REF)
6177 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6178 else
6179 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6180 return MAX (factor, target_align);
818c0c94 6181}
0d4903b8 6182\f
6de9cd9a
DN
6183/* Expands variable VAR. */
6184
6185void
6186expand_var (tree var)
6187{
6188 if (DECL_EXTERNAL (var))
6189 return;
6190
6191 if (TREE_STATIC (var))
6192 /* If this is an inlined copy of a static local variable,
6193 look up the original decl. */
6194 var = DECL_ORIGIN (var);
6195
6196 if (TREE_STATIC (var)
6197 ? !TREE_ASM_WRITTEN (var)
6198 : !DECL_RTL_SET_P (var))
6199 {
6200 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6201 {
6202 /* Prepare a mem & address for the decl. */
6203 rtx x;
6204
6205 if (TREE_STATIC (var))
6206 abort ();
6207
6208 x = gen_rtx_MEM (DECL_MODE (var),
6209 gen_reg_rtx (Pmode));
6210
6211 set_mem_attributes (x, var, 1);
6212 SET_DECL_RTL (var, x);
6213 }
6214 else if ((*lang_hooks.expand_decl) (var))
6215 /* OK. */;
6216 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6217 expand_decl (var);
6218 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6219 rest_of_decl_compilation (var, NULL, 0, 0);
6220 else if (TREE_CODE (var) == TYPE_DECL
6221 || TREE_CODE (var) == CONST_DECL
6222 || TREE_CODE (var) == FUNCTION_DECL
6223 || TREE_CODE (var) == LABEL_DECL)
6224 /* No expansion needed. */;
6225 else
6226 abort ();
6227 }
6228}
6229
6230/* Expands declarations of variables in list VARS. */
6231
6232static void
6233expand_vars (tree vars)
6234{
6235 for (; vars; vars = TREE_CHAIN (vars))
6236 {
6237 tree var = vars;
6238
6239 if (DECL_EXTERNAL (var))
6240 continue;
6241
6242 expand_var (var);
6243 expand_decl_init (var);
6244 }
6245}
6246
eb698c58
RS
6247/* Subroutine of expand_expr. Expand the two operands of a binary
6248 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6249 The value may be stored in TARGET if TARGET is nonzero. The
6250 MODIFIER argument is as documented by expand_expr. */
6251
6252static void
6253expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6254 enum expand_modifier modifier)
6255{
6256 if (! safe_from_p (target, exp1, 1))
6257 target = 0;
6258 if (operand_equal_p (exp0, exp1, 0))
6259 {
6260 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6261 *op1 = copy_rtx (*op0);
6262 }
6263 else
6264 {
c67e6e14
RS
6265 /* If we need to preserve evaluation order, copy exp0 into its own
6266 temporary variable so that it can't be clobbered by exp1. */
6267 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6268 exp0 = save_expr (exp0);
eb698c58
RS
6269 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6270 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6271 }
6272}
6273
f47e9b4e 6274\f
bbf6f052
RK
6275/* expand_expr: generate code for computing expression EXP.
6276 An rtx for the computed value is returned. The value is never null.
6277 In the case of a void EXP, const0_rtx is returned.
6278
6279 The value may be stored in TARGET if TARGET is nonzero.
6280 TARGET is just a suggestion; callers must assume that
6281 the rtx returned may not be the same as TARGET.
6282
6283 If TARGET is CONST0_RTX, it means that the value will be ignored.
6284
6285 If TMODE is not VOIDmode, it suggests generating the
6286 result in mode TMODE. But this is done only when convenient.
6287 Otherwise, TMODE is ignored and the value generated in its natural mode.
6288 TMODE is just a suggestion; callers must assume that
6289 the rtx returned may not have mode TMODE.
6290
d6a5ac33
RK
6291 Note that TARGET may have neither TMODE nor MODE. In that case, it
6292 probably will not be used.
bbf6f052
RK
6293
6294 If MODIFIER is EXPAND_SUM then when EXP is an addition
6295 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6296 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6297 products as above, or REG or MEM, or constant.
6298 Ordinarily in such cases we would output mul or add instructions
6299 and then return a pseudo reg containing the sum.
6300
6301 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6302 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6303 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6304 This is used for outputting expressions used in initializers.
6305
6306 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6307 with a constant address even if that address is not normally legitimate.
8403445a
AM
6308 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6309
6310 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6311 a call parameter. Such targets require special care as we haven't yet
6312 marked TARGET so that it's safe from being trashed by libcalls. We
6313 don't want to use TARGET for anything but the final result;
6314 Intermediate values must go elsewhere. Additionally, calls to
0fab64a3
MM
6315 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6316
6317 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6318 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6319 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6320 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6321 recursively. */
bbf6f052 6322
6de9cd9a
DN
6323static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6324 enum expand_modifier, rtx *);
6325
bbf6f052 6326rtx
0fab64a3
MM
6327expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6328 enum expand_modifier modifier, rtx *alt_rtl)
6de9cd9a
DN
6329{
6330 int rn = -1;
6331 rtx ret, last = NULL;
6332
6333 /* Handle ERROR_MARK before anybody tries to access its type. */
6334 if (TREE_CODE (exp) == ERROR_MARK
6335 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6336 {
6337 ret = CONST0_RTX (tmode);
6338 return ret ? ret : const0_rtx;
6339 }
6340
6341 if (flag_non_call_exceptions)
6342 {
6343 rn = lookup_stmt_eh_region (exp);
6344 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6345 if (rn >= 0)
6346 last = get_last_insn ();
6347 }
6348
6349 /* If this is an expression of some kind and it has an associated line
6350 number, then emit the line number before expanding the expression.
6351
6352 We need to save and restore the file and line information so that
6353 errors discovered during expansion are emitted with the right
6354 information. It would be better of the diagnostic routines
6355 used the file/line information embedded in the tree nodes rather
6356 than globals. */
6357 if (cfun && EXPR_HAS_LOCATION (exp))
6358 {
6359 location_t saved_location = input_location;
6360 input_location = EXPR_LOCATION (exp);
6361 emit_line_note (input_location);
6362
6363 /* Record where the insns produced belong. */
6364 if (cfun->dont_emit_block_notes)
6365 record_block_change (TREE_BLOCK (exp));
6366
6367 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6368
6369 input_location = saved_location;
6370 }
6371 else
6372 {
6373 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6374 }
6375
6376 /* If using non-call exceptions, mark all insns that may trap.
6377 expand_call() will mark CALL_INSNs before we get to this code,
6378 but it doesn't handle libcalls, and these may trap. */
6379 if (rn >= 0)
6380 {
6381 rtx insn;
6382 for (insn = next_real_insn (last); insn;
6383 insn = next_real_insn (insn))
6384 {
6385 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6386 /* If we want exceptions for non-call insns, any
6387 may_trap_p instruction may throw. */
6388 && GET_CODE (PATTERN (insn)) != CLOBBER
6389 && GET_CODE (PATTERN (insn)) != USE
6390 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6391 {
6392 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6393 REG_NOTES (insn));
6394 }
6395 }
6396 }
6397
6398 return ret;
6399}
6400
6401static rtx
6402expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6403 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6404{
b3694847 6405 rtx op0, op1, temp;
bbf6f052 6406 tree type = TREE_TYPE (exp);
8df83eae 6407 int unsignedp;
b3694847
SS
6408 enum machine_mode mode;
6409 enum tree_code code = TREE_CODE (exp);
bbf6f052 6410 optab this_optab;
68557e14
ML
6411 rtx subtarget, original_target;
6412 int ignore;
bbf6f052
RK
6413 tree context;
6414
68557e14 6415 mode = TYPE_MODE (type);
8df83eae
RK
6416 unsignedp = TYPE_UNSIGNED (type);
6417
68557e14 6418 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6419 subtarget = get_subtarget (target);
68557e14
ML
6420 original_target = target;
6421 ignore = (target == const0_rtx
6422 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6423 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6424 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6425 && TREE_CODE (type) == VOID_TYPE));
6426
dd27116b
RK
6427 /* If we are going to ignore this result, we need only do something
6428 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6429 is, short-circuit the most common cases here. Note that we must
6430 not call expand_expr with anything but const0_rtx in case this
6431 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6432
dd27116b
RK
6433 if (ignore)
6434 {
6435 if (! TREE_SIDE_EFFECTS (exp))
6436 return const0_rtx;
6437
14a774a9
RK
6438 /* Ensure we reference a volatile object even if value is ignored, but
6439 don't do this if all we are doing is taking its address. */
dd27116b
RK
6440 if (TREE_THIS_VOLATILE (exp)
6441 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6442 && mode != VOIDmode && mode != BLKmode
6443 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6444 {
37a08a29 6445 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6446 if (GET_CODE (temp) == MEM)
6447 temp = copy_to_reg (temp);
6448 return const0_rtx;
6449 }
6450
14a774a9
RK
6451 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6452 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6453 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6454 modifier);
6455
14a774a9 6456 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6457 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6458 {
37a08a29
RK
6459 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6460 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6461 return const0_rtx;
6462 }
6463 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6464 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6465 /* If the second operand has no side effects, just evaluate
0f41302f 6466 the first. */
37a08a29
RK
6467 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6468 modifier);
14a774a9
RK
6469 else if (code == BIT_FIELD_REF)
6470 {
37a08a29
RK
6471 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6472 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6473 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6474 return const0_rtx;
6475 }
37a08a29 6476
90764a87 6477 target = 0;
dd27116b 6478 }
bbf6f052 6479
e44842fe
RK
6480 /* If will do cse, generate all results into pseudo registers
6481 since 1) that allows cse to find more things
6482 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6483 cannot support. An exception is a CONSTRUCTOR into a multi-word
6484 MEM: that's much more likely to be most efficient into the MEM.
6485 Another is a CALL_EXPR which must return in memory. */
e44842fe 6486
bbf6f052 6487 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6488 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6489 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6490 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6491 target = 0;
bbf6f052 6492
bbf6f052
RK
6493 switch (code)
6494 {
6495 case LABEL_DECL:
b552441b
RS
6496 {
6497 tree function = decl_function_context (exp);
c5c76735 6498
6de9cd9a
DN
6499 temp = label_rtx (exp);
6500 temp = gen_rtx_LABEL_REF (Pmode, temp);
6501
d0977240 6502 if (function != current_function_decl
6de9cd9a
DN
6503 && function != 0)
6504 LABEL_REF_NONLOCAL_P (temp) = 1;
6505
6506 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
26fcb35a 6507 return temp;
b552441b 6508 }
bbf6f052
RK
6509
6510 case PARM_DECL:
1877be45 6511 if (!DECL_RTL_SET_P (exp))
bbf6f052 6512 {
ddd2d57e 6513 error ("%Jprior parameter's size depends on '%D'", exp, exp);
4af3895e 6514 return CONST0_RTX (mode);
bbf6f052
RK
6515 }
6516
0f41302f 6517 /* ... fall through ... */
d6a5ac33 6518
bbf6f052 6519 case VAR_DECL:
2dca20cd
RS
6520 /* If a static var's type was incomplete when the decl was written,
6521 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6522 if (DECL_SIZE (exp) == 0
6523 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6524 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6525 layout_decl (exp, 0);
921b3427 6526
0f41302f 6527 /* ... fall through ... */
d6a5ac33 6528
2dca20cd 6529 case FUNCTION_DECL:
bbf6f052
RK
6530 case RESULT_DECL:
6531 if (DECL_RTL (exp) == 0)
6532 abort ();
d6a5ac33 6533
e44842fe
RK
6534 /* Ensure variable marked as used even if it doesn't go through
6535 a parser. If it hasn't be used yet, write out an external
6536 definition. */
6537 if (! TREE_USED (exp))
6538 {
6539 assemble_external (exp);
6540 TREE_USED (exp) = 1;
6541 }
6542
dc6d66b3
RK
6543 /* Show we haven't gotten RTL for this yet. */
6544 temp = 0;
6545
bbf6f052
RK
6546 /* Handle variables inherited from containing functions. */
6547 context = decl_function_context (exp);
6548
bbf6f052 6549 if (context != 0 && context != current_function_decl
bbf6f052
RK
6550 /* If var is static, we don't need a static chain to access it. */
6551 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6552 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6553 {
6554 rtx addr;
6555
6556 /* Mark as non-local and addressable. */
81feeecb 6557 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6558 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6559 abort ();
ae2bcd98 6560 lang_hooks.mark_addressable (exp);
bbf6f052
RK
6561 if (GET_CODE (DECL_RTL (exp)) != MEM)
6562 abort ();
6563 addr = XEXP (DECL_RTL (exp), 0);
6564 if (GET_CODE (addr) == MEM)
792760b9
RK
6565 addr
6566 = replace_equiv_address (addr,
6567 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6568 else
6569 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6570
792760b9 6571 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6572 }
4af3895e 6573
bbf6f052
RK
6574 /* This is the case of an array whose size is to be determined
6575 from its initializer, while the initializer is still being parsed.
6576 See expand_decl. */
d6a5ac33 6577
dc6d66b3
RK
6578 else if (GET_CODE (DECL_RTL (exp)) == MEM
6579 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6580 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6581
6582 /* If DECL_RTL is memory, we are in the normal case and either
6583 the address is not valid or it is not a register and -fforce-addr
6584 is specified, get the address into a register. */
6585
dc6d66b3
RK
6586 else if (GET_CODE (DECL_RTL (exp)) == MEM
6587 && modifier != EXPAND_CONST_ADDRESS
6588 && modifier != EXPAND_SUM
6589 && modifier != EXPAND_INITIALIZER
6590 && (! memory_address_p (DECL_MODE (exp),
6591 XEXP (DECL_RTL (exp), 0))
6592 || (flag_force_addr
6593 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
0fab64a3
MM
6594 {
6595 if (alt_rtl)
6596 *alt_rtl = DECL_RTL (exp);
6597 temp = replace_equiv_address (DECL_RTL (exp),
6598 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6599 }
1499e0a8 6600
dc6d66b3 6601 /* If we got something, return it. But first, set the alignment
04956a1a 6602 if the address is a register. */
dc6d66b3
RK
6603 if (temp != 0)
6604 {
6605 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6606 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6607
6608 return temp;
6609 }
6610
1499e0a8
RK
6611 /* If the mode of DECL_RTL does not match that of the decl, it
6612 must be a promoted value. We return a SUBREG of the wanted mode,
6613 but mark it so that we know that it was already extended. */
6614
6615 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6616 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6617 {
1499e0a8
RK
6618 /* Get the signedness used for this variable. Ensure we get the
6619 same mode we got when the variable was declared. */
78911e8b 6620 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6621 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6622 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6623 abort ();
6624
ddef6bc7 6625 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6626 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6627 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6628 return temp;
6629 }
6630
bbf6f052
RK
6631 return DECL_RTL (exp);
6632
6633 case INTEGER_CST:
d8a50944 6634 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6635 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6636
d8a50944
RH
6637 /* ??? If overflow is set, fold will have done an incomplete job,
6638 which can result in (plus xx (const_int 0)), which can get
6639 simplified by validate_replace_rtx during virtual register
6640 instantiation, which can result in unrecognizable insns.
6641 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6642 if (TREE_CONSTANT_OVERFLOW (exp)
6643 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6644 temp = force_reg (mode, temp);
6645
6646 return temp;
6647
d744e06e
AH
6648 case VECTOR_CST:
6649 return const_vector_from_tree (exp);
6650
bbf6f052 6651 case CONST_DECL:
8403445a 6652 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6653
6654 case REAL_CST:
6655 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6656 which will be turned into memory by reload if necessary.
6657
bbf6f052
RK
6658 We used to force a register so that loop.c could see it. But
6659 this does not allow gen_* patterns to perform optimizations with
6660 the constants. It also produces two insns in cases like "x = 1.0;".
6661 On most machines, floating-point constants are not permitted in
6662 many insns, so we'd end up copying it to a register in any case.
6663
6664 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6665 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6666 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6667
6668 case COMPLEX_CST:
9ad58e09
RS
6669 /* Handle evaluating a complex constant in a CONCAT target. */
6670 if (original_target && GET_CODE (original_target) == CONCAT)
6671 {
6672 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6673 rtx rtarg, itarg;
6674
6675 rtarg = XEXP (original_target, 0);
6676 itarg = XEXP (original_target, 1);
6677
6678 /* Move the real and imaginary parts separately. */
6679 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6680 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6681
6682 if (op0 != rtarg)
6683 emit_move_insn (rtarg, op0);
6684 if (op1 != itarg)
6685 emit_move_insn (itarg, op1);
6686
6687 return original_target;
6688 }
6689
71c0e7fc 6690 /* ... fall through ... */
9ad58e09 6691
bbf6f052 6692 case STRING_CST:
afc6aaab 6693 temp = output_constant_def (exp, 1);
bbf6f052 6694
afc6aaab 6695 /* temp contains a constant address.
bbf6f052
RK
6696 On RISC machines where a constant address isn't valid,
6697 make some insns to get that address into a register. */
afc6aaab 6698 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6699 && modifier != EXPAND_INITIALIZER
6700 && modifier != EXPAND_SUM
afc6aaab
ZW
6701 && (! memory_address_p (mode, XEXP (temp, 0))
6702 || flag_force_addr))
6703 return replace_equiv_address (temp,
6704 copy_rtx (XEXP (temp, 0)));
6705 return temp;
bbf6f052
RK
6706
6707 case SAVE_EXPR:
6708 context = decl_function_context (exp);
d6a5ac33 6709
d0977240
RK
6710 /* If this SAVE_EXPR was at global context, assume we are an
6711 initialization function and move it into our context. */
6712 if (context == 0)
6713 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6714
6de9cd9a 6715 if (context == current_function_decl)
bbf6f052
RK
6716 context = 0;
6717
6718 /* If this is non-local, handle it. */
6719 if (context)
6720 {
d0977240
RK
6721 /* The following call just exists to abort if the context is
6722 not of a containing function. */
6723 find_function_data (context);
6724
bbf6f052
RK
6725 temp = SAVE_EXPR_RTL (exp);
6726 if (temp && GET_CODE (temp) == REG)
6727 {
f29a2bd1 6728 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6729 temp = SAVE_EXPR_RTL (exp);
6730 }
6731 if (temp == 0 || GET_CODE (temp) != MEM)
6732 abort ();
792760b9
RK
6733 return
6734 replace_equiv_address (temp,
6735 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6736 }
6737 if (SAVE_EXPR_RTL (exp) == 0)
6738 {
06089a8b
RK
6739 if (mode == VOIDmode)
6740 temp = const0_rtx;
6741 else
1da68f56
RK
6742 temp = assign_temp (build_qualified_type (type,
6743 (TYPE_QUALS (type)
6744 | TYPE_QUAL_CONST)),
6745 3, 0, 0);
1499e0a8 6746
bbf6f052 6747 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6748 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6749 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6750 save_expr_regs);
ff78f773
RK
6751
6752 /* If the mode of TEMP does not match that of the expression, it
6753 must be a promoted value. We pass store_expr a SUBREG of the
6754 wanted mode but mark it so that we know that it was already
3ac1a319 6755 extended. */
ff78f773
RK
6756
6757 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6758 {
ddef6bc7 6759 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6760 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6761 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6762 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6763 }
6764
4c7a0be9 6765 if (temp == const0_rtx)
37a08a29 6766 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6767 else
8403445a
AM
6768 store_expr (TREE_OPERAND (exp, 0), temp,
6769 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6770
6771 TREE_USED (exp) = 1;
bbf6f052 6772 }
1499e0a8
RK
6773
6774 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6775 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6776 but mark it so that we know that it was already extended. */
1499e0a8
RK
6777
6778 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6779 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6780 {
e70d22c8
RK
6781 /* Compute the signedness and make the proper SUBREG. */
6782 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6783 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6784 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6785 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6786 return temp;
6787 }
6788
bbf6f052
RK
6789 return SAVE_EXPR_RTL (exp);
6790
679163cf
MS
6791 case UNSAVE_EXPR:
6792 {
6793 rtx temp;
6794 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a 6795 TREE_OPERAND (exp, 0)
ae2bcd98 6796 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
679163cf
MS
6797 return temp;
6798 }
6799
70e6ca43
APB
6800 case GOTO_EXPR:
6801 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6802 expand_goto (TREE_OPERAND (exp, 0));
6803 else
6804 expand_computed_goto (TREE_OPERAND (exp, 0));
6805 return const0_rtx;
6806
bbf6f052 6807 case EXIT_EXPR:
df4ae160 6808 expand_exit_loop_if_false (NULL,
e44842fe 6809 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6810 return const0_rtx;
6811
f42e28dd
APB
6812 case LABELED_BLOCK_EXPR:
6813 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6814 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6815 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6816 do_pending_stack_adjust ();
f42e28dd
APB
6817 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6818 return const0_rtx;
6819
6820 case EXIT_BLOCK_EXPR:
6821 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6822 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6823 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6824 return const0_rtx;
6825
bbf6f052 6826 case LOOP_EXPR:
0088fcb1 6827 push_temp_slots ();
bbf6f052 6828 expand_start_loop (1);
b0832fe1 6829 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6830 expand_end_loop ();
0088fcb1 6831 pop_temp_slots ();
bbf6f052
RK
6832
6833 return const0_rtx;
6834
6835 case BIND_EXPR:
6836 {
6de9cd9a
DN
6837 tree block = BIND_EXPR_BLOCK (exp);
6838 int mark_ends;
bbf6f052 6839
6de9cd9a
DN
6840 if (TREE_CODE (BIND_EXPR_BODY (exp)) != RTL_EXPR)
6841 {
6842 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6843 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6844 mark_ends = (block != NULL_TREE);
6845 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6846 }
6847 else
6848 {
6849 /* If we're not in functions-as-trees mode, we've already emitted
6850 those notes into our RTL_EXPR, so we just want to splice our BLOCK
6851 into the enclosing one. */
6852 mark_ends = 0;
bbf6f052 6853
6de9cd9a
DN
6854 /* Need to open a binding contour here because
6855 if there are any cleanups they must be contained here. */
6856 expand_start_bindings_and_block (2, NULL_TREE);
bbf6f052 6857
6de9cd9a
DN
6858 /* Mark the corresponding BLOCK for output in its proper place. */
6859 if (block)
6860 {
6861 if (TREE_USED (block))
6862 abort ();
6863 (*lang_hooks.decls.insert_block) (block);
6864 }
bbf6f052
RK
6865 }
6866
6de9cd9a
DN
6867 /* If VARS have not yet been expanded, expand them now. */
6868 expand_vars (BIND_EXPR_VARS (exp));
6869
6870 /* TARGET was clobbered early in this function. The correct
6871 indicator or whether or not we need the value of this
6872 expression is the IGNORE variable. */
6873 temp = expand_expr (BIND_EXPR_BODY (exp),
6874 ignore ? const0_rtx : target,
6875 tmode, modifier);
bbf6f052 6876
6de9cd9a 6877 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
bbf6f052
RK
6878
6879 return temp;
6880 }
6881
6882 case RTL_EXPR:
83b853c9
JM
6883 if (RTL_EXPR_SEQUENCE (exp))
6884 {
6885 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6886 abort ();
2f937369 6887 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
6888 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6889 }
64dc53f3
MM
6890 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6891 free_temps_for_rtl_expr (exp);
0fab64a3
MM
6892 if (alt_rtl)
6893 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
bbf6f052
RK
6894 return RTL_EXPR_RTL (exp);
6895
6896 case CONSTRUCTOR:
dd27116b
RK
6897 /* If we don't need the result, just ensure we evaluate any
6898 subexpressions. */
6899 if (ignore)
6900 {
6901 tree elt;
37a08a29 6902
dd27116b 6903 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6904 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6905
dd27116b
RK
6906 return const0_rtx;
6907 }
3207b172 6908
4af3895e
JVA
6909 /* All elts simple constants => refer to a constant in memory. But
6910 if this is a non-BLKmode mode, let it store a field at a time
6911 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6912 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6913 store directly into the target unless the type is large enough
6914 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6915 all operands are constant, put it in memory as well.
6916
6917 FIXME: Avoid trying to fill vector constructors piece-meal.
6918 Output them with output_constant_def below unless we're sure
6919 they're zeros. This should go away when vector initializers
6920 are treated like VECTOR_CST instead of arrays.
6921 */
dd27116b 6922 else if ((TREE_STATIC (exp)
3207b172 6923 && ((mode == BLKmode
e5e809f4 6924 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6925 || TREE_ADDRESSABLE (exp)
19caa751 6926 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6927 && (! MOVE_BY_PIECES_P
19caa751
RK
6928 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6929 TYPE_ALIGN (type)))
6de9cd9a 6930 && ! mostly_zeros_p (exp))))
f59700f9
RK
6931 || ((modifier == EXPAND_INITIALIZER
6932 || modifier == EXPAND_CONST_ADDRESS)
6933 && TREE_CONSTANT (exp)))
bbf6f052 6934 {
bd7cf17e 6935 rtx constructor = output_constant_def (exp, 1);
19caa751 6936
b552441b
RS
6937 if (modifier != EXPAND_CONST_ADDRESS
6938 && modifier != EXPAND_INITIALIZER
792760b9
RK
6939 && modifier != EXPAND_SUM)
6940 constructor = validize_mem (constructor);
6941
bbf6f052
RK
6942 return constructor;
6943 }
bbf6f052
RK
6944 else
6945 {
e9ac02a6
JW
6946 /* Handle calls that pass values in multiple non-contiguous
6947 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6948 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6949 || GET_CODE (target) == PARALLEL
6950 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6951 target
6952 = assign_temp (build_qualified_type (type,
6953 (TYPE_QUALS (type)
6954 | (TREE_READONLY (exp)
6955 * TYPE_QUAL_CONST))),
c24ae149 6956 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6957
dbb5c281 6958 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6959 return target;
6960 }
6961
6962 case INDIRECT_REF:
6963 {
6964 tree exp1 = TREE_OPERAND (exp, 0);
3a94c984 6965
6de9cd9a
DN
6966 if (modifier != EXPAND_WRITE)
6967 {
6968 tree t;
6969
6970 t = fold_read_from_constant_string (exp);
6971 if (t)
6972 return expand_expr (t, target, tmode, modifier);
6973 }
bbf6f052 6974
405f0da6
JW
6975 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6976 op0 = memory_address (mode, op0);
38a448ca 6977 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6978 set_mem_attributes (temp, exp, 0);
1125706f 6979
14a774a9
RK
6980 /* If we are writing to this object and its type is a record with
6981 readonly fields, we must mark it as readonly so it will
6982 conflict with readonly references to those fields. */
37a08a29 6983 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6984 RTX_UNCHANGING_P (temp) = 1;
6985
8c8a8e34
JW
6986 return temp;
6987 }
bbf6f052
RK
6988
6989 case ARRAY_REF:
6de9cd9a
DN
6990
6991#ifdef ENABLE_CHECKING
742920c7
RK
6992 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6993 abort ();
6de9cd9a 6994#endif
bbf6f052 6995
bbf6f052 6996 {
742920c7
RK
6997 tree array = TREE_OPERAND (exp, 0);
6998 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6999 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 7000 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 7001 HOST_WIDE_INT i;
b50d17a1 7002
d4c89139
PB
7003 /* Optimize the special-case of a zero lower bound.
7004
7005 We convert the low_bound to sizetype to avoid some problems
7006 with constant folding. (E.g. suppose the lower bound is 1,
7007 and its mode is QI. Without the conversion, (ARRAY
7008 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 7009 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 7010
742920c7 7011 if (! integer_zerop (low_bound))
fed3cef0 7012 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 7013
742920c7 7014 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7015 This is not done in fold so it won't happen inside &.
7016 Don't fold if this is for wide characters since it's too
7017 difficult to do correctly and this is a very rare case. */
742920c7 7018
017e1b43
RH
7019 if (modifier != EXPAND_CONST_ADDRESS
7020 && modifier != EXPAND_INITIALIZER
6de9cd9a
DN
7021 && modifier != EXPAND_MEMORY)
7022 {
7023 tree t = fold_read_from_constant_string (exp);
7024
7025 if (t)
7026 return expand_expr (t, target, tmode, modifier);
7027 }
bbf6f052 7028
742920c7
RK
7029 /* If this is a constant index into a constant array,
7030 just get the value from the array. Handle both the cases when
7031 we have an explicit constructor and when our operand is a variable
7032 that was declared const. */
4af3895e 7033
017e1b43
RH
7034 if (modifier != EXPAND_CONST_ADDRESS
7035 && modifier != EXPAND_INITIALIZER
7036 && modifier != EXPAND_MEMORY
7037 && TREE_CODE (array) == CONSTRUCTOR
7038 && ! TREE_SIDE_EFFECTS (array)
05bccae2 7039 && TREE_CODE (index) == INTEGER_CST
3a94c984 7040 && 0 > compare_tree_int (index,
05bccae2
RK
7041 list_length (CONSTRUCTOR_ELTS
7042 (TREE_OPERAND (exp, 0)))))
742920c7 7043 {
05bccae2
RK
7044 tree elem;
7045
7046 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7047 i = TREE_INT_CST_LOW (index);
7048 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7049 ;
7050
7051 if (elem)
37a08a29
RK
7052 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7053 modifier);
742920c7 7054 }
3a94c984 7055
742920c7 7056 else if (optimize >= 1
cb5fa0f8
RK
7057 && modifier != EXPAND_CONST_ADDRESS
7058 && modifier != EXPAND_INITIALIZER
017e1b43 7059 && modifier != EXPAND_MEMORY
742920c7
RK
7060 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7061 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
7062 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7063 && targetm.binds_local_p (array))
742920c7 7064 {
08293add 7065 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7066 {
7067 tree init = DECL_INITIAL (array);
7068
742920c7
RK
7069 if (TREE_CODE (init) == CONSTRUCTOR)
7070 {
665f2503 7071 tree elem;
742920c7 7072
05bccae2 7073 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7074 (elem
7075 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7076 elem = TREE_CHAIN (elem))
7077 ;
7078
c54b0a5e 7079 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7080 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7081 tmode, modifier);
742920c7
RK
7082 }
7083 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7084 && 0 > compare_tree_int (index,
7085 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7086 {
7087 tree type = TREE_TYPE (TREE_TYPE (init));
7088 enum machine_mode mode = TYPE_MODE (type);
7089
7090 if (GET_MODE_CLASS (mode) == MODE_INT
7091 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7092 return gen_int_mode (TREE_STRING_POINTER (init)
7093 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7094 }
742920c7
RK
7095 }
7096 }
7097 }
afc6aaab 7098 goto normal_inner_ref;
bbf6f052
RK
7099
7100 case COMPONENT_REF:
4af3895e 7101 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7102 appropriate field if it is present. */
7103 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
7104 {
7105 tree elt;
7106
7107 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7108 elt = TREE_CHAIN (elt))
86b5812c
RK
7109 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7110 /* We can normally use the value of the field in the
7111 CONSTRUCTOR. However, if this is a bitfield in
7112 an integral mode that we can fit in a HOST_WIDE_INT,
7113 we must mask only the number of bits in the bitfield,
7114 since this is done implicitly by the constructor. If
7115 the bitfield does not meet either of those conditions,
7116 we can't do this optimization. */
7117 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7118 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7119 == MODE_INT)
7120 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7121 <= HOST_BITS_PER_WIDE_INT))))
7122 {
8403445a
AM
7123 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7124 && modifier == EXPAND_STACK_PARM)
7125 target = 0;
3a94c984 7126 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7127 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7128 {
9df2c88c
RK
7129 HOST_WIDE_INT bitsize
7130 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7131 enum machine_mode imode
7132 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 7133
8df83eae 7134 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
86b5812c
RK
7135 {
7136 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7137 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7138 }
7139 else
7140 {
7141 tree count
e5e809f4
JL
7142 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7143 0);
86b5812c
RK
7144
7145 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7146 target, 0);
7147 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7148 target, 0);
7149 }
7150 }
7151
7152 return op0;
7153 }
4af3895e 7154 }
afc6aaab 7155 goto normal_inner_ref;
4af3895e 7156
afc6aaab
ZW
7157 case BIT_FIELD_REF:
7158 case ARRAY_RANGE_REF:
7159 normal_inner_ref:
bbf6f052
RK
7160 {
7161 enum machine_mode mode1;
770ae6cc 7162 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7163 tree offset;
bbf6f052 7164 int volatilep = 0;
839c4796 7165 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7166 &mode1, &unsignedp, &volatilep);
f47e9b4e 7167 rtx orig_op0;
bbf6f052 7168
e7f3c83f
RK
7169 /* If we got back the original object, something is wrong. Perhaps
7170 we are evaluating an expression too early. In any event, don't
7171 infinitely recurse. */
7172 if (tem == exp)
7173 abort ();
7174
3d27140a 7175 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7176 computation, since it will need a temporary and TARGET is known
7177 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7178
f47e9b4e
RK
7179 orig_op0 = op0
7180 = expand_expr (tem,
7181 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7182 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7183 != INTEGER_CST)
8403445a 7184 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7185 ? target : NULL_RTX),
7186 VOIDmode,
7187 (modifier == EXPAND_INITIALIZER
8403445a
AM
7188 || modifier == EXPAND_CONST_ADDRESS
7189 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7190 ? modifier : EXPAND_NORMAL);
bbf6f052 7191
8c8a8e34 7192 /* If this is a constant, put it into a register if it is a
14a774a9 7193 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7194 if (CONSTANT_P (op0))
7195 {
7196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7197 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7198 && offset == 0)
8c8a8e34
JW
7199 op0 = force_reg (mode, op0);
7200 else
7201 op0 = validize_mem (force_const_mem (mode, op0));
7202 }
7203
8d2e5f72
RK
7204 /* Otherwise, if this object not in memory and we either have an
7205 offset or a BLKmode result, put it there. This case can't occur in
7206 C, but can in Ada if we have unchecked conversion of an expression
7207 from a scalar type to an array or record type or for an
7208 ARRAY_RANGE_REF whose type is BLKmode. */
7209 else if (GET_CODE (op0) != MEM
7210 && (offset != 0
7211 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7212 {
7213 /* If the operand is a SAVE_EXPR, we can deal with this by
7214 forcing the SAVE_EXPR into memory. */
7215 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7216 {
7217 put_var_into_stack (TREE_OPERAND (exp, 0),
7218 /*rescan=*/true);
7219 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7220 }
7221 else
7222 {
7223 tree nt
7224 = build_qualified_type (TREE_TYPE (tem),
7225 (TYPE_QUALS (TREE_TYPE (tem))
7226 | TYPE_QUAL_CONST));
7227 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7228
8d2e5f72
RK
7229 emit_move_insn (memloc, op0);
7230 op0 = memloc;
7231 }
7232 }
7233
7bb0943f
RS
7234 if (offset != 0)
7235 {
8403445a
AM
7236 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7237 EXPAND_SUM);
7bb0943f
RS
7238
7239 if (GET_CODE (op0) != MEM)
7240 abort ();
2d48c13d 7241
2d48c13d 7242#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7243 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7244 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7245#else
7246 if (GET_MODE (offset_rtx) != ptr_mode)
7247 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7248#endif
7249
e82407b5
EB
7250 if (GET_MODE (op0) == BLKmode
7251 /* A constant address in OP0 can have VOIDmode, we must
7252 not try to call force_reg in that case. */
efd07ca7 7253 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7254 && bitsize != 0
3a94c984 7255 && (bitpos % bitsize) == 0
89752202 7256 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7257 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7258 {
e3c8ea67 7259 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7260 bitpos = 0;
7261 }
7262
0d4903b8
RK
7263 op0 = offset_address (op0, offset_rtx,
7264 highest_pow2_factor (offset));
7bb0943f
RS
7265 }
7266
1ce7f3c2
RK
7267 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7268 record its alignment as BIGGEST_ALIGNMENT. */
7269 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7270 && is_aligning_offset (offset, tem))
7271 set_mem_align (op0, BIGGEST_ALIGNMENT);
7272
bbf6f052
RK
7273 /* Don't forget about volatility even if this is a bitfield. */
7274 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7275 {
f47e9b4e
RK
7276 if (op0 == orig_op0)
7277 op0 = copy_rtx (op0);
7278
bbf6f052
RK
7279 MEM_VOLATILE_P (op0) = 1;
7280 }
7281
010f87c4
JJ
7282 /* The following code doesn't handle CONCAT.
7283 Assume only bitpos == 0 can be used for CONCAT, due to
7284 one element arrays having the same mode as its element. */
7285 if (GET_CODE (op0) == CONCAT)
7286 {
7287 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7288 abort ();
7289 return op0;
7290 }
7291
ccc98036
RS
7292 /* In cases where an aligned union has an unaligned object
7293 as a field, we might be extracting a BLKmode value from
7294 an integer-mode (e.g., SImode) object. Handle this case
7295 by doing the extract into an object as wide as the field
7296 (which we know to be the width of a basic mode), then
cb5fa0f8 7297 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7298 if (mode1 == VOIDmode
ccc98036 7299 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7300 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7301 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7302 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7303 && modifier != EXPAND_CONST_ADDRESS
7304 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7305 /* If the field isn't aligned enough to fetch as a memref,
7306 fetch it as a bit field. */
7307 || (mode1 != BLKmode
9e5f281f 7308 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5
EB
7309 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7310 || (GET_CODE (op0) == MEM
7311 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7312 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7313 && ((modifier == EXPAND_CONST_ADDRESS
7314 || modifier == EXPAND_INITIALIZER)
7315 ? STRICT_ALIGNMENT
7316 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7317 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7318 /* If the type and the field are a constant size and the
7319 size of the type isn't the same size as the bitfield,
7320 we must use bitfield operations. */
7321 || (bitsize >= 0
7322 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7323 == INTEGER_CST)
7324 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7325 bitsize)))
bbf6f052 7326 {
bbf6f052
RK
7327 enum machine_mode ext_mode = mode;
7328
14a774a9
RK
7329 if (ext_mode == BLKmode
7330 && ! (target != 0 && GET_CODE (op0) == MEM
7331 && GET_CODE (target) == MEM
7332 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7333 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7334
7335 if (ext_mode == BLKmode)
a281e72d 7336 {
7a06d606
RK
7337 if (target == 0)
7338 target = assign_temp (type, 0, 1, 1);
7339
7340 if (bitsize == 0)
7341 return target;
7342
a281e72d
RK
7343 /* In this case, BITPOS must start at a byte boundary and
7344 TARGET, if specified, must be a MEM. */
7345 if (GET_CODE (op0) != MEM
7346 || (target != 0 && GET_CODE (target) != MEM)
7347 || bitpos % BITS_PER_UNIT != 0)
7348 abort ();
7349
7a06d606
RK
7350 emit_block_move (target,
7351 adjust_address (op0, VOIDmode,
7352 bitpos / BITS_PER_UNIT),
a06ef755 7353 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7354 / BITS_PER_UNIT),
8403445a
AM
7355 (modifier == EXPAND_STACK_PARM
7356 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7357
a281e72d
RK
7358 return target;
7359 }
bbf6f052 7360
dc6d66b3
RK
7361 op0 = validize_mem (op0);
7362
7363 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7364 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7365
8403445a
AM
7366 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7367 (modifier == EXPAND_STACK_PARM
7368 ? NULL_RTX : target),
7369 ext_mode, ext_mode,
bbf6f052 7370 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7371
7372 /* If the result is a record type and BITSIZE is narrower than
7373 the mode of OP0, an integral mode, and this is a big endian
7374 machine, we must put the field into the high-order bits. */
7375 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7376 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7377 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7378 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7379 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7380 - bitsize),
7381 op0, 1);
7382
daae8185
RK
7383 /* If the result type is BLKmode, store the data into a temporary
7384 of the appropriate type, but with the mode corresponding to the
7385 mode for the data we have (op0's mode). It's tempting to make
7386 this a constant type, since we know it's only being stored once,
7387 but that can cause problems if we are taking the address of this
7388 COMPONENT_REF because the MEM of any reference via that address
7389 will have flags corresponding to the type, which will not
7390 necessarily be constant. */
bbf6f052
RK
7391 if (mode == BLKmode)
7392 {
daae8185
RK
7393 rtx new
7394 = assign_stack_temp_for_type
7395 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7396
7397 emit_move_insn (new, op0);
7398 op0 = copy_rtx (new);
7399 PUT_MODE (op0, BLKmode);
c3d32120 7400 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7401 }
7402
7403 return op0;
7404 }
7405
05019f83
RK
7406 /* If the result is BLKmode, use that to access the object
7407 now as well. */
7408 if (mode == BLKmode)
7409 mode1 = BLKmode;
7410
bbf6f052
RK
7411 /* Get a reference to just this component. */
7412 if (modifier == EXPAND_CONST_ADDRESS
7413 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7414 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7415 else
f4ef873c 7416 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7417
f47e9b4e
RK
7418 if (op0 == orig_op0)
7419 op0 = copy_rtx (op0);
7420
3bdf5ad1 7421 set_mem_attributes (op0, exp, 0);
dc6d66b3 7422 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7423 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7424
bbf6f052 7425 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7426 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7427 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7428 || modifier == EXPAND_INITIALIZER)
bbf6f052 7429 return op0;
0d15e60c 7430 else if (target == 0)
bbf6f052 7431 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7432
bbf6f052
RK
7433 convert_move (target, op0, unsignedp);
7434 return target;
7435 }
7436
4a8d0c9c
RH
7437 case VTABLE_REF:
7438 {
7439 rtx insn, before = get_last_insn (), vtbl_ref;
7440
7441 /* Evaluate the interior expression. */
7442 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7443 tmode, modifier);
7444
7445 /* Get or create an instruction off which to hang a note. */
7446 if (REG_P (subtarget))
7447 {
7448 target = subtarget;
7449 insn = get_last_insn ();
7450 if (insn == before)
7451 abort ();
7452 if (! INSN_P (insn))
7453 insn = prev_nonnote_insn (insn);
7454 }
7455 else
7456 {
7457 target = gen_reg_rtx (GET_MODE (subtarget));
7458 insn = emit_move_insn (target, subtarget);
7459 }
7460
7461 /* Collect the data for the note. */
7462 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7463 vtbl_ref = plus_constant (vtbl_ref,
7464 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7465 /* Discard the initial CONST that was added. */
7466 vtbl_ref = XEXP (vtbl_ref, 0);
7467
7468 REG_NOTES (insn)
7469 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7470
7471 return target;
7472 }
7473
bbf6f052
RK
7474 /* Intended for a reference to a buffer of a file-object in Pascal.
7475 But it's not certain that a special tree code will really be
7476 necessary for these. INDIRECT_REF might work for them. */
7477 case BUFFER_REF:
7478 abort ();
7479
7308a047 7480 case IN_EXPR:
7308a047 7481 {
d6a5ac33
RK
7482 /* Pascal set IN expression.
7483
7484 Algorithm:
7485 rlo = set_low - (set_low%bits_per_word);
7486 the_word = set [ (index - rlo)/bits_per_word ];
7487 bit_index = index % bits_per_word;
7488 bitmask = 1 << bit_index;
7489 return !!(the_word & bitmask); */
7490
7308a047
RS
7491 tree set = TREE_OPERAND (exp, 0);
7492 tree index = TREE_OPERAND (exp, 1);
8df83eae 7493 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7308a047 7494 tree set_type = TREE_TYPE (set);
7308a047
RS
7495 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7496 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7497 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7498 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7499 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7500 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7501 rtx setaddr = XEXP (setval, 0);
7502 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7503 rtx rlow;
7504 rtx diff, quo, rem, addr, bit, result;
7308a047 7505
d6a5ac33
RK
7506 /* If domain is empty, answer is no. Likewise if index is constant
7507 and out of bounds. */
51723711 7508 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7509 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7510 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7511 || (TREE_CODE (index) == INTEGER_CST
7512 && TREE_CODE (set_low_bound) == INTEGER_CST
7513 && tree_int_cst_lt (index, set_low_bound))
7514 || (TREE_CODE (set_high_bound) == INTEGER_CST
7515 && TREE_CODE (index) == INTEGER_CST
7516 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7517 return const0_rtx;
7518
d6a5ac33
RK
7519 if (target == 0)
7520 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7521
7522 /* If we get here, we have to generate the code for both cases
7523 (in range and out of range). */
7524
7525 op0 = gen_label_rtx ();
7526 op1 = gen_label_rtx ();
7527
7528 if (! (GET_CODE (index_val) == CONST_INT
7529 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7530 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7531 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7532
7533 if (! (GET_CODE (index_val) == CONST_INT
7534 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7535 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7536 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7537
7538 /* Calculate the element number of bit zero in the first word
7539 of the set. */
7540 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7541 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7542 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7543 else
17938e57
RK
7544 rlow = expand_binop (index_mode, and_optab, lo_r,
7545 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7546 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7547
d6a5ac33
RK
7548 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7549 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7550
7551 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7552 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7553 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7554 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7555
7308a047 7556 addr = memory_address (byte_mode,
d6a5ac33
RK
7557 expand_binop (index_mode, add_optab, diff,
7558 setaddr, NULL_RTX, iunsignedp,
17938e57 7559 OPTAB_LIB_WIDEN));
d6a5ac33 7560
3a94c984 7561 /* Extract the bit we want to examine. */
7308a047 7562 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7563 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7564 make_tree (TREE_TYPE (index), rem),
7565 NULL_RTX, 1);
7566 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7567 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7568 1, OPTAB_LIB_WIDEN);
17938e57
RK
7569
7570 if (result != target)
7571 convert_move (target, result, 1);
7308a047
RS
7572
7573 /* Output the code to handle the out-of-range case. */
7574 emit_jump (op0);
7575 emit_label (op1);
7576 emit_move_insn (target, const0_rtx);
7577 emit_label (op0);
7578 return target;
7579 }
7580
bbf6f052 7581 case WITH_CLEANUP_EXPR:
6ad7895a 7582 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7583 {
6ad7895a 7584 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7585 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7586 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7587 CLEANUP_EH_ONLY (exp));
e976b8b2 7588
bbf6f052 7589 /* That's it for this cleanup. */
6ad7895a 7590 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7591 }
6ad7895a 7592 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7593
5dab5552
MS
7594 case CLEANUP_POINT_EXPR:
7595 {
e976b8b2
MS
7596 /* Start a new binding layer that will keep track of all cleanup
7597 actions to be performed. */
8e91754e 7598 expand_start_bindings (2);
e976b8b2 7599
d93d4205 7600 target_temp_slot_level = temp_slot_level;
e976b8b2 7601
37a08a29 7602 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7603 /* If we're going to use this value, load it up now. */
7604 if (! ignore)
7605 op0 = force_not_mem (op0);
d93d4205 7606 preserve_temp_slots (op0);
e976b8b2 7607 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7608 }
7609 return op0;
7610
bbf6f052
RK
7611 case CALL_EXPR:
7612 /* Check for a built-in function. */
7613 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7614 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7615 == FUNCTION_DECL)
bbf6f052 7616 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7617 {
c70eaeaf
KG
7618 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7619 == BUILT_IN_FRONTEND)
ae2bcd98 7620 /* ??? Use (*fun) form because expand_expr is a macro. */
8403445a 7621 return (*lang_hooks.expand_expr) (exp, original_target,
0fab64a3
MM
7622 tmode, modifier,
7623 alt_rtl);
c70eaeaf
KG
7624 else
7625 return expand_builtin (exp, target, subtarget, tmode, ignore);
7626 }
d6a5ac33 7627
8129842c 7628 return expand_call (exp, target, ignore);
bbf6f052
RK
7629
7630 case NON_LVALUE_EXPR:
7631 case NOP_EXPR:
7632 case CONVERT_EXPR:
7633 case REFERENCE_EXPR:
4a53008b 7634 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7635 return const0_rtx;
4a53008b 7636
bbf6f052
RK
7637 if (TREE_CODE (type) == UNION_TYPE)
7638 {
7639 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7640
c3d32120
RK
7641 /* If both input and output are BLKmode, this conversion isn't doing
7642 anything except possibly changing memory attribute. */
7643 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7644 {
7645 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7646 modifier);
7647
7648 result = copy_rtx (result);
7649 set_mem_attributes (result, exp, 0);
7650 return result;
7651 }
14a774a9 7652
bbf6f052 7653 if (target == 0)
cf7cb67e
JH
7654 {
7655 if (TYPE_MODE (type) != BLKmode)
7656 target = gen_reg_rtx (TYPE_MODE (type));
7657 else
7658 target = assign_temp (type, 0, 1, 1);
7659 }
d6a5ac33 7660
bbf6f052
RK
7661 if (GET_CODE (target) == MEM)
7662 /* Store data into beginning of memory target. */
7663 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7664 adjust_address (target, TYPE_MODE (valtype), 0),
7665 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7666
bbf6f052
RK
7667 else if (GET_CODE (target) == REG)
7668 /* Store this field into a union of the proper type. */
14a774a9
RK
7669 store_field (target,
7670 MIN ((int_size_in_bytes (TREE_TYPE
7671 (TREE_OPERAND (exp, 0)))
7672 * BITS_PER_UNIT),
8752c357 7673 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7674 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7675 VOIDmode, 0, type, 0);
bbf6f052
RK
7676 else
7677 abort ();
7678
7679 /* Return the entire union. */
7680 return target;
7681 }
d6a5ac33 7682
7f62854a
RK
7683 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7684 {
7685 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7686 modifier);
7f62854a
RK
7687
7688 /* If the signedness of the conversion differs and OP0 is
7689 a promoted SUBREG, clear that indication since we now
7690 have to do the proper extension. */
8df83eae 7691 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7692 && GET_CODE (op0) == SUBREG)
7693 SUBREG_PROMOTED_VAR_P (op0) = 0;
7694
7695 return op0;
7696 }
7697
fdf473ae 7698 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7699 if (GET_MODE (op0) == mode)
7700 return op0;
12342f90 7701
d6a5ac33
RK
7702 /* If OP0 is a constant, just convert it into the proper mode. */
7703 if (CONSTANT_P (op0))
fdf473ae
RH
7704 {
7705 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7706 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7707
0fb7aeda 7708 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7709 return simplify_gen_subreg (mode, op0, inner_mode,
7710 subreg_lowpart_offset (mode,
7711 inner_mode));
7712 else
7713 return convert_modes (mode, inner_mode, op0,
8df83eae 7714 TYPE_UNSIGNED (inner_type));
fdf473ae 7715 }
12342f90 7716
26fcb35a 7717 if (modifier == EXPAND_INITIALIZER)
38a448ca 7718 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7719
bbf6f052 7720 if (target == 0)
d6a5ac33
RK
7721 return
7722 convert_to_mode (mode, op0,
8df83eae 7723 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7724 else
d6a5ac33 7725 convert_move (target, op0,
8df83eae 7726 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7727 return target;
7728
ed239f5a 7729 case VIEW_CONVERT_EXPR:
37a08a29 7730 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7731
7732 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7733 Otherwise, if neither mode is BLKmode and both are integral and within
7734 a word, we can use gen_lowpart. If neither is true, make sure the
7735 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7736 if (TYPE_MODE (type) == GET_MODE (op0))
7737 ;
7738 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7739 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7740 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7741 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7742 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7743 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7744 else if (GET_CODE (op0) != MEM)
ed239f5a 7745 {
c11c10d8
RK
7746 /* If the operand is not a MEM, force it into memory. Since we
7747 are going to be be changing the mode of the MEM, don't call
7748 force_const_mem for constants because we don't allow pool
7749 constants to change mode. */
ed239f5a 7750 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7751
c11c10d8
RK
7752 if (TREE_ADDRESSABLE (exp))
7753 abort ();
ed239f5a 7754
c11c10d8
RK
7755 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7756 target
7757 = assign_stack_temp_for_type
7758 (TYPE_MODE (inner_type),
7759 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7760
c11c10d8
RK
7761 emit_move_insn (target, op0);
7762 op0 = target;
ed239f5a
RK
7763 }
7764
c11c10d8
RK
7765 /* At this point, OP0 is in the correct mode. If the output type is such
7766 that the operand is known to be aligned, indicate that it is.
7767 Otherwise, we need only be concerned about alignment for non-BLKmode
7768 results. */
ed239f5a
RK
7769 if (GET_CODE (op0) == MEM)
7770 {
7771 op0 = copy_rtx (op0);
7772
ed239f5a
RK
7773 if (TYPE_ALIGN_OK (type))
7774 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7775 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7776 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7777 {
7778 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7779 HOST_WIDE_INT temp_size
7780 = MAX (int_size_in_bytes (inner_type),
7781 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7782 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7783 temp_size, 0, type);
c4e59f51 7784 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7785
c11c10d8
RK
7786 if (TREE_ADDRESSABLE (exp))
7787 abort ();
7788
ed239f5a
RK
7789 if (GET_MODE (op0) == BLKmode)
7790 emit_block_move (new_with_op0_mode, op0,
44bb111a 7791 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7792 (modifier == EXPAND_STACK_PARM
7793 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7794 else
7795 emit_move_insn (new_with_op0_mode, op0);
7796
7797 op0 = new;
7798 }
0fb7aeda 7799
c4e59f51 7800 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7801 }
7802
7803 return op0;
7804
bbf6f052 7805 case PLUS_EXPR:
91ce572a 7806 this_optab = ! unsignedp && flag_trapv
a9785c70 7807 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7808 ? addv_optab : add_optab;
bbf6f052
RK
7809
7810 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7811 something else, make sure we add the register to the constant and
7812 then to the other thing. This case can occur during strength
7813 reduction and doing it this way will produce better code if the
7814 frame pointer or argument pointer is eliminated.
7815
7816 fold-const.c will ensure that the constant is always in the inner
7817 PLUS_EXPR, so the only case we need to do anything about is if
7818 sp, ap, or fp is our second argument, in which case we must swap
7819 the innermost first argument and our second argument. */
7820
7821 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7822 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7823 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7824 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7825 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7826 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7827 {
7828 tree t = TREE_OPERAND (exp, 1);
7829
7830 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7831 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7832 }
7833
88f63c77 7834 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7835 something, we might be forming a constant. So try to use
7836 plus_constant. If it produces a sum and we can't accept it,
7837 use force_operand. This allows P = &ARR[const] to generate
7838 efficient code on machines where a SYMBOL_REF is not a valid
7839 address.
7840
7841 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7842 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7843 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7844 {
8403445a
AM
7845 if (modifier == EXPAND_STACK_PARM)
7846 target = 0;
c980ac49
RS
7847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7848 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7849 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7850 {
cbbc503e
JL
7851 rtx constant_part;
7852
c980ac49
RS
7853 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7854 EXPAND_SUM);
cbbc503e
JL
7855 /* Use immed_double_const to ensure that the constant is
7856 truncated according to the mode of OP1, then sign extended
7857 to a HOST_WIDE_INT. Using the constant directly can result
7858 in non-canonical RTL in a 64x32 cross compile. */
7859 constant_part
7860 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7861 (HOST_WIDE_INT) 0,
a5efcd63 7862 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7863 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7864 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7865 op1 = force_operand (op1, target);
7866 return op1;
7867 }
bbf6f052 7868
c980ac49
RS
7869 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7870 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7871 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7872 {
cbbc503e
JL
7873 rtx constant_part;
7874
c980ac49 7875 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7876 (modifier == EXPAND_INITIALIZER
7877 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7878 if (! CONSTANT_P (op0))
7879 {
7880 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7881 VOIDmode, modifier);
f0e9957a
RS
7882 /* Return a PLUS if modifier says it's OK. */
7883 if (modifier == EXPAND_SUM
7884 || modifier == EXPAND_INITIALIZER)
7885 return simplify_gen_binary (PLUS, mode, op0, op1);
7886 goto binop2;
c980ac49 7887 }
cbbc503e
JL
7888 /* Use immed_double_const to ensure that the constant is
7889 truncated according to the mode of OP1, then sign extended
7890 to a HOST_WIDE_INT. Using the constant directly can result
7891 in non-canonical RTL in a 64x32 cross compile. */
7892 constant_part
7893 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7894 (HOST_WIDE_INT) 0,
2a94e396 7895 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7896 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7897 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7898 op0 = force_operand (op0, target);
7899 return op0;
7900 }
bbf6f052
RK
7901 }
7902
7903 /* No sense saving up arithmetic to be done
7904 if it's all in the wrong mode to form part of an address.
7905 And force_operand won't know whether to sign-extend or
7906 zero-extend. */
7907 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7908 || mode != ptr_mode)
4ef7870a 7909 {
eb698c58
RS
7910 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7911 subtarget, &op0, &op1, 0);
6e7727eb
EB
7912 if (op0 == const0_rtx)
7913 return op1;
7914 if (op1 == const0_rtx)
7915 return op0;
4ef7870a
EB
7916 goto binop2;
7917 }
bbf6f052 7918
eb698c58
RS
7919 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7920 subtarget, &op0, &op1, modifier);
f0e9957a 7921 return simplify_gen_binary (PLUS, mode, op0, op1);
bbf6f052
RK
7922
7923 case MINUS_EXPR:
ea87523e
RK
7924 /* For initializers, we are allowed to return a MINUS of two
7925 symbolic constants. Here we handle all cases when both operands
7926 are constant. */
bbf6f052
RK
7927 /* Handle difference of two symbolic constants,
7928 for the sake of an initializer. */
7929 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7930 && really_constant_p (TREE_OPERAND (exp, 0))
7931 && really_constant_p (TREE_OPERAND (exp, 1)))
7932 {
eb698c58
RS
7933 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7934 NULL_RTX, &op0, &op1, modifier);
ea87523e 7935
ea87523e
RK
7936 /* If the last operand is a CONST_INT, use plus_constant of
7937 the negated constant. Else make the MINUS. */
7938 if (GET_CODE (op1) == CONST_INT)
7939 return plus_constant (op0, - INTVAL (op1));
7940 else
38a448ca 7941 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 7942 }
ae431183 7943
91ce572a
CC
7944 this_optab = ! unsignedp && flag_trapv
7945 && (GET_MODE_CLASS(mode) == MODE_INT)
7946 ? subv_optab : sub_optab;
1717e19e
UW
7947
7948 /* No sense saving up arithmetic to be done
7949 if it's all in the wrong mode to form part of an address.
7950 And force_operand won't know whether to sign-extend or
7951 zero-extend. */
7952 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7953 || mode != ptr_mode)
7954 goto binop;
7955
eb698c58
RS
7956 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7957 subtarget, &op0, &op1, modifier);
1717e19e
UW
7958
7959 /* Convert A - const to A + (-const). */
7960 if (GET_CODE (op1) == CONST_INT)
7961 {
7962 op1 = negate_rtx (mode, op1);
f0e9957a 7963 return simplify_gen_binary (PLUS, mode, op0, op1);
1717e19e
UW
7964 }
7965
7966 goto binop2;
bbf6f052
RK
7967
7968 case MULT_EXPR:
bbf6f052
RK
7969 /* If first operand is constant, swap them.
7970 Thus the following special case checks need only
7971 check the second operand. */
7972 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7973 {
b3694847 7974 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7975 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7976 TREE_OPERAND (exp, 1) = t1;
7977 }
7978
7979 /* Attempt to return something suitable for generating an
7980 indexed address, for machines that support that. */
7981
88f63c77 7982 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7983 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7984 {
48a5f2fa
DJ
7985 tree exp1 = TREE_OPERAND (exp, 1);
7986
921b3427
RK
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7988 EXPAND_SUM);
bbf6f052 7989
bbf6f052 7990 if (GET_CODE (op0) != REG)
906c4e36 7991 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7992 if (GET_CODE (op0) != REG)
7993 op0 = copy_to_mode_reg (mode, op0);
7994
48a5f2fa
DJ
7995 return gen_rtx_MULT (mode, op0,
7996 gen_int_mode (tree_low_cst (exp1, 0),
7997 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
7998 }
7999
8403445a
AM
8000 if (modifier == EXPAND_STACK_PARM)
8001 target = 0;
8002
bbf6f052
RK
8003 /* Check for multiplying things that have been extended
8004 from a narrower type. If this machine supports multiplying
8005 in that narrower type with a result in the desired type,
8006 do it that way, and avoid the explicit type-conversion. */
8007 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8008 && TREE_CODE (type) == INTEGER_TYPE
8009 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8010 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8011 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8012 && int_fits_type_p (TREE_OPERAND (exp, 1),
8013 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8014 /* Don't use a widening multiply if a shift will do. */
8015 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 8016 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
8017 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8018 ||
8019 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
8020 && (TYPE_PRECISION (TREE_TYPE
8021 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8022 == TYPE_PRECISION (TREE_TYPE
8023 (TREE_OPERAND
8024 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
8025 /* If both operands are extended, they must either both
8026 be zero-extended or both be sign-extended. */
8df83eae
RK
8027 && (TYPE_UNSIGNED (TREE_TYPE
8028 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8029 == TYPE_UNSIGNED (TREE_TYPE
8030 (TREE_OPERAND
8031 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 8032 {
888d65b5
RS
8033 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8034 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 8035 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
8036 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8037 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8038
b10af0c8 8039 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 8040 {
b10af0c8
TG
8041 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8042 {
b10af0c8 8043 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
8044 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8045 TREE_OPERAND (exp, 1),
8046 NULL_RTX, &op0, &op1, 0);
b10af0c8 8047 else
eb698c58
RS
8048 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8049 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8050 NULL_RTX, &op0, &op1, 0);
b10af0c8
TG
8051 goto binop2;
8052 }
8053 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8054 && innermode == word_mode)
8055 {
888d65b5 8056 rtx htem, hipart;
b10af0c8
TG
8057 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8058 NULL_RTX, VOIDmode, 0);
8059 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
8060 op1 = convert_modes (innermode, mode,
8061 expand_expr (TREE_OPERAND (exp, 1),
8062 NULL_RTX, VOIDmode, 0),
8063 unsignedp);
b10af0c8
TG
8064 else
8065 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8066 NULL_RTX, VOIDmode, 0);
8067 temp = expand_binop (mode, other_optab, op0, op1, target,
8068 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
8069 hipart = gen_highpart (innermode, temp);
8070 htem = expand_mult_highpart_adjust (innermode, hipart,
8071 op0, op1, hipart,
8072 zextend_p);
8073 if (htem != hipart)
8074 emit_move_insn (hipart, htem);
b10af0c8
TG
8075 return temp;
8076 }
bbf6f052
RK
8077 }
8078 }
eb698c58
RS
8079 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8080 subtarget, &op0, &op1, 0);
bbf6f052
RK
8081 return expand_mult (mode, op0, op1, target, unsignedp);
8082
8083 case TRUNC_DIV_EXPR:
8084 case FLOOR_DIV_EXPR:
8085 case CEIL_DIV_EXPR:
8086 case ROUND_DIV_EXPR:
8087 case EXACT_DIV_EXPR:
8403445a
AM
8088 if (modifier == EXPAND_STACK_PARM)
8089 target = 0;
bbf6f052
RK
8090 /* Possible optimization: compute the dividend with EXPAND_SUM
8091 then if the divisor is constant can optimize the case
8092 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
8093 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8094 subtarget, &op0, &op1, 0);
bbf6f052
RK
8095 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8096
8097 case RDIV_EXPR:
b7e9703c
JH
8098 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8099 expensive divide. If not, combine will rebuild the original
8100 computation. */
8101 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 8102 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
8103 && !real_onep (TREE_OPERAND (exp, 0)))
8104 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8105 build (RDIV_EXPR, type,
8106 build_real (type, dconst1),
8107 TREE_OPERAND (exp, 1))),
8e37cba8 8108 target, tmode, modifier);
ef89d648 8109 this_optab = sdiv_optab;
bbf6f052
RK
8110 goto binop;
8111
8112 case TRUNC_MOD_EXPR:
8113 case FLOOR_MOD_EXPR:
8114 case CEIL_MOD_EXPR:
8115 case ROUND_MOD_EXPR:
8403445a
AM
8116 if (modifier == EXPAND_STACK_PARM)
8117 target = 0;
eb698c58
RS
8118 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8119 subtarget, &op0, &op1, 0);
bbf6f052
RK
8120 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8121
8122 case FIX_ROUND_EXPR:
8123 case FIX_FLOOR_EXPR:
8124 case FIX_CEIL_EXPR:
8125 abort (); /* Not used for C. */
8126
8127 case FIX_TRUNC_EXPR:
906c4e36 8128 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8129 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8130 target = gen_reg_rtx (mode);
8131 expand_fix (target, op0, unsignedp);
8132 return target;
8133
8134 case FLOAT_EXPR:
906c4e36 8135 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8136 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8137 target = gen_reg_rtx (mode);
8138 /* expand_float can't figure out what to do if FROM has VOIDmode.
8139 So give it the correct mode. With -O, cse will optimize this. */
8140 if (GET_MODE (op0) == VOIDmode)
8141 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8142 op0);
8143 expand_float (target, op0,
8df83eae 8144 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
8145 return target;
8146
8147 case NEGATE_EXPR:
5b22bee8 8148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8149 if (modifier == EXPAND_STACK_PARM)
8150 target = 0;
91ce572a 8151 temp = expand_unop (mode,
0fb7aeda
KH
8152 ! unsignedp && flag_trapv
8153 && (GET_MODE_CLASS(mode) == MODE_INT)
8154 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
8155 if (temp == 0)
8156 abort ();
8157 return temp;
8158
8159 case ABS_EXPR:
8160 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8161 if (modifier == EXPAND_STACK_PARM)
8162 target = 0;
bbf6f052 8163
11017cc7 8164 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
8165 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8166 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 8167 abort ();
2d7050fd 8168
bbf6f052
RK
8169 /* Unsigned abs is simply the operand. Testing here means we don't
8170 risk generating incorrect code below. */
8df83eae 8171 if (TYPE_UNSIGNED (type))
bbf6f052
RK
8172 return op0;
8173
91ce572a 8174 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8175 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8176
8177 case MAX_EXPR:
8178 case MIN_EXPR:
8179 target = original_target;
8403445a
AM
8180 if (target == 0
8181 || modifier == EXPAND_STACK_PARM
fc155707 8182 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8183 || GET_MODE (target) != mode
bbf6f052
RK
8184 || (GET_CODE (target) == REG
8185 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8186 target = gen_reg_rtx (mode);
eb698c58
RS
8187 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8188 target, &op0, &op1, 0);
bbf6f052
RK
8189
8190 /* First try to do it with a special MIN or MAX instruction.
8191 If that does not win, use a conditional jump to select the proper
8192 value. */
288dc1ea 8193 this_optab = (unsignedp
bbf6f052
RK
8194 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8195 : (code == MIN_EXPR ? smin_optab : smax_optab));
8196
8197 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8198 OPTAB_WIDEN);
8199 if (temp != 0)
8200 return temp;
8201
fa2981d8
JW
8202 /* At this point, a MEM target is no longer useful; we will get better
8203 code without it. */
3a94c984 8204
fa2981d8
JW
8205 if (GET_CODE (target) == MEM)
8206 target = gen_reg_rtx (mode);
8207
e3be1116
RS
8208 /* If op1 was placed in target, swap op0 and op1. */
8209 if (target != op0 && target == op1)
8210 {
8211 rtx tem = op0;
8212 op0 = op1;
8213 op1 = tem;
8214 }
8215
ee456b1c
RK
8216 if (target != op0)
8217 emit_move_insn (target, op0);
d6a5ac33 8218
bbf6f052 8219 op0 = gen_label_rtx ();
d6a5ac33 8220
f81497d9
RS
8221 /* If this mode is an integer too wide to compare properly,
8222 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8223 if (GET_MODE_CLASS (mode) == MODE_INT
8224 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8225 {
f81497d9 8226 if (code == MAX_EXPR)
288dc1ea
EB
8227 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8228 NULL_RTX, op0);
bbf6f052 8229 else
288dc1ea
EB
8230 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8231 NULL_RTX, op0);
bbf6f052 8232 }
f81497d9
RS
8233 else
8234 {
b30f05db 8235 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
288dc1ea 8236 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
f81497d9 8237 }
b30f05db 8238 emit_move_insn (target, op1);
bbf6f052
RK
8239 emit_label (op0);
8240 return target;
8241
bbf6f052
RK
8242 case BIT_NOT_EXPR:
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8244 if (modifier == EXPAND_STACK_PARM)
8245 target = 0;
bbf6f052
RK
8246 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8247 if (temp == 0)
8248 abort ();
8249 return temp;
8250
d6a5ac33
RK
8251 /* ??? Can optimize bitwise operations with one arg constant.
8252 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8253 and (a bitwise1 b) bitwise2 b (etc)
8254 but that is probably not worth while. */
8255
8256 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8257 boolean values when we want in all cases to compute both of them. In
8258 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8259 as actual zero-or-1 values and then bitwise anding. In cases where
8260 there cannot be any side effects, better code would be made by
8261 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8262 how to recognize those cases. */
8263
bbf6f052
RK
8264 case TRUTH_AND_EXPR:
8265 case BIT_AND_EXPR:
8266 this_optab = and_optab;
8267 goto binop;
8268
bbf6f052
RK
8269 case TRUTH_OR_EXPR:
8270 case BIT_IOR_EXPR:
8271 this_optab = ior_optab;
8272 goto binop;
8273
874726a8 8274 case TRUTH_XOR_EXPR:
bbf6f052
RK
8275 case BIT_XOR_EXPR:
8276 this_optab = xor_optab;
8277 goto binop;
8278
8279 case LSHIFT_EXPR:
8280 case RSHIFT_EXPR:
8281 case LROTATE_EXPR:
8282 case RROTATE_EXPR:
e5e809f4 8283 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8284 subtarget = 0;
8403445a
AM
8285 if (modifier == EXPAND_STACK_PARM)
8286 target = 0;
bbf6f052
RK
8287 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8288 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8289 unsignedp);
8290
d6a5ac33
RK
8291 /* Could determine the answer when only additive constants differ. Also,
8292 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8293 case LT_EXPR:
8294 case LE_EXPR:
8295 case GT_EXPR:
8296 case GE_EXPR:
8297 case EQ_EXPR:
8298 case NE_EXPR:
1eb8759b
RH
8299 case UNORDERED_EXPR:
8300 case ORDERED_EXPR:
8301 case UNLT_EXPR:
8302 case UNLE_EXPR:
8303 case UNGT_EXPR:
8304 case UNGE_EXPR:
8305 case UNEQ_EXPR:
8403445a
AM
8306 temp = do_store_flag (exp,
8307 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8308 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8309 if (temp != 0)
8310 return temp;
d6a5ac33 8311
0f41302f 8312 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8313 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8314 && original_target
8315 && GET_CODE (original_target) == REG
8316 && (GET_MODE (original_target)
8317 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8318 {
d6a5ac33
RK
8319 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8320 VOIDmode, 0);
8321
c0a3eeac
UW
8322 /* If temp is constant, we can just compute the result. */
8323 if (GET_CODE (temp) == CONST_INT)
8324 {
8325 if (INTVAL (temp) != 0)
8326 emit_move_insn (target, const1_rtx);
8327 else
8328 emit_move_insn (target, const0_rtx);
8329
8330 return target;
8331 }
8332
bbf6f052 8333 if (temp != original_target)
c0a3eeac
UW
8334 {
8335 enum machine_mode mode1 = GET_MODE (temp);
8336 if (mode1 == VOIDmode)
8337 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8338
c0a3eeac
UW
8339 temp = copy_to_mode_reg (mode1, temp);
8340 }
d6a5ac33 8341
bbf6f052 8342 op1 = gen_label_rtx ();
c5d5d461 8343 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8344 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8345 emit_move_insn (temp, const1_rtx);
8346 emit_label (op1);
8347 return temp;
8348 }
d6a5ac33 8349
bbf6f052
RK
8350 /* If no set-flag instruction, must generate a conditional
8351 store into a temporary variable. Drop through
8352 and handle this like && and ||. */
8353
8354 case TRUTH_ANDIF_EXPR:
8355 case TRUTH_ORIF_EXPR:
e44842fe 8356 if (! ignore
8403445a
AM
8357 && (target == 0
8358 || modifier == EXPAND_STACK_PARM
8359 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8360 /* Make sure we don't have a hard reg (such as function's return
8361 value) live across basic blocks, if not optimizing. */
8362 || (!optimize && GET_CODE (target) == REG
8363 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8364 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8365
8366 if (target)
8367 emit_clr_insn (target);
8368
bbf6f052
RK
8369 op1 = gen_label_rtx ();
8370 jumpifnot (exp, op1);
e44842fe
RK
8371
8372 if (target)
8373 emit_0_to_1_insn (target);
8374
bbf6f052 8375 emit_label (op1);
e44842fe 8376 return ignore ? const0_rtx : target;
bbf6f052
RK
8377
8378 case TRUTH_NOT_EXPR:
8403445a
AM
8379 if (modifier == EXPAND_STACK_PARM)
8380 target = 0;
bbf6f052
RK
8381 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8382 /* The parser is careful to generate TRUTH_NOT_EXPR
8383 only with operands that are always zero or one. */
906c4e36 8384 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8385 target, 1, OPTAB_LIB_WIDEN);
8386 if (temp == 0)
8387 abort ();
8388 return temp;
8389
8390 case COMPOUND_EXPR:
8391 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8392 emit_queue ();
0fab64a3
MM
8393 return expand_expr_real (TREE_OPERAND (exp, 1),
8394 (ignore ? const0_rtx : target),
8395 VOIDmode, modifier, alt_rtl);
bbf6f052 8396
6de9cd9a
DN
8397 case STATEMENT_LIST:
8398 {
8399 tree_stmt_iterator iter;
8400
8401 if (!ignore)
8402 abort ();
8403
8404 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8405 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8406 }
8407 return const0_rtx;
8408
bbf6f052 8409 case COND_EXPR:
6de9cd9a
DN
8410 /* If it's void, we don't need to worry about computing a value. */
8411 if (VOID_TYPE_P (TREE_TYPE (exp)))
8412 {
8413 tree pred = TREE_OPERAND (exp, 0);
8414 tree then_ = TREE_OPERAND (exp, 1);
8415 tree else_ = TREE_OPERAND (exp, 2);
8416
8417 /* If we do not have any pending cleanups or stack_levels
8418 to restore, and at least one arm of the COND_EXPR is a
8419 GOTO_EXPR to a local label, then we can emit more efficient
8420 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8421 if (! optimize
8422 || containing_blocks_have_cleanups_or_stack_level ())
8423 ;
8424 else if (TREE_CODE (then_) == GOTO_EXPR
8425 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8426 {
8427 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8428 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8429 }
8430 else if (TREE_CODE (else_) == GOTO_EXPR
8431 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8432 {
8433 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8434 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8435 }
8436
8437 /* Just use the 'if' machinery. */
8438 expand_start_cond (pred, 0);
8439 start_cleanup_deferral ();
8440 expand_expr (then_, const0_rtx, VOIDmode, 0);
8441
8442 exp = else_;
8443
8444 /* Iterate over 'else if's instead of recursing. */
8445 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8446 {
8447 expand_start_else ();
8448 if (EXPR_HAS_LOCATION (exp))
8449 {
8450 emit_line_note (EXPR_LOCATION (exp));
8451 if (cfun->dont_emit_block_notes)
8452 record_block_change (TREE_BLOCK (exp));
8453 }
8454 expand_elseif (TREE_OPERAND (exp, 0));
8455 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8456 }
8457 /* Don't emit the jump and label if there's no 'else' clause. */
8458 if (TREE_SIDE_EFFECTS (exp))
8459 {
8460 expand_start_else ();
8461 expand_expr (exp, const0_rtx, VOIDmode, 0);
8462 }
8463 end_cleanup_deferral ();
8464 expand_end_cond ();
8465 return const0_rtx;
8466 }
8467
ac01eace
RK
8468 /* If we would have a "singleton" (see below) were it not for a
8469 conversion in each arm, bring that conversion back out. */
8470 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8471 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8472 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8473 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8474 {
d6edb99e
ZW
8475 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8476 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8477
8478 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8479 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8480 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8481 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8482 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8483 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8484 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8485 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8486 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8487 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8488 TREE_OPERAND (exp, 0),
d6edb99e 8489 iftrue, iffalse)),
ac01eace
RK
8490 target, tmode, modifier);
8491 }
8492
bbf6f052
RK
8493 {
8494 /* Note that COND_EXPRs whose type is a structure or union
8495 are required to be constructed to contain assignments of
8496 a temporary variable, so that we can evaluate them here
8497 for side effect only. If type is void, we must do likewise. */
8498
8499 /* If an arm of the branch requires a cleanup,
8500 only that cleanup is performed. */
8501
8502 tree singleton = 0;
8503 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8504
8505 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8506 convert it to our mode, if necessary. */
8507 if (integer_onep (TREE_OPERAND (exp, 1))
8508 && integer_zerop (TREE_OPERAND (exp, 2))
8509 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8510 {
dd27116b
RK
8511 if (ignore)
8512 {
8513 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8514 modifier);
dd27116b
RK
8515 return const0_rtx;
8516 }
8517
8403445a
AM
8518 if (modifier == EXPAND_STACK_PARM)
8519 target = 0;
37a08a29 8520 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8521 if (GET_MODE (op0) == mode)
8522 return op0;
d6a5ac33 8523
bbf6f052
RK
8524 if (target == 0)
8525 target = gen_reg_rtx (mode);
8526 convert_move (target, op0, unsignedp);
8527 return target;
8528 }
8529
ac01eace
RK
8530 /* Check for X ? A + B : A. If we have this, we can copy A to the
8531 output and conditionally add B. Similarly for unary operations.
8532 Don't do this if X has side-effects because those side effects
8533 might affect A or B and the "?" operation is a sequence point in
8534 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8535
8536 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8537 && operand_equal_p (TREE_OPERAND (exp, 2),
8538 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8539 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8540 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8541 && operand_equal_p (TREE_OPERAND (exp, 1),
8542 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8543 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8544 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8545 && operand_equal_p (TREE_OPERAND (exp, 2),
8546 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8547 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8548 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8549 && operand_equal_p (TREE_OPERAND (exp, 1),
8550 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8551 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8552
01c8a7c8
RK
8553 /* If we are not to produce a result, we have no target. Otherwise,
8554 if a target was specified use it; it will not be used as an
3a94c984 8555 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8556 temporary. */
8557
8558 if (ignore)
8559 temp = 0;
8403445a
AM
8560 else if (modifier == EXPAND_STACK_PARM)
8561 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8562 else if (original_target
e5e809f4 8563 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8564 || (singleton && GET_CODE (original_target) == REG
8565 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8566 && original_target == var_rtx (singleton)))
8567 && GET_MODE (original_target) == mode
7c00d1fe
RK
8568#ifdef HAVE_conditional_move
8569 && (! can_conditionally_move_p (mode)
8570 || GET_CODE (original_target) == REG
8571 || TREE_ADDRESSABLE (type))
8572#endif
8125d7e9
BS
8573 && (GET_CODE (original_target) != MEM
8574 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8575 temp = original_target;
8576 else if (TREE_ADDRESSABLE (type))
8577 abort ();
8578 else
8579 temp = assign_temp (type, 0, 0, 1);
8580
ac01eace
RK
8581 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8582 do the test of X as a store-flag operation, do this as
8583 A + ((X != 0) << log C). Similarly for other simple binary
8584 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8585 if (temp && singleton && binary_op
bbf6f052
RK
8586 && (TREE_CODE (binary_op) == PLUS_EXPR
8587 || TREE_CODE (binary_op) == MINUS_EXPR
8588 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8589 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8590 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8591 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8592 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8593 {
8594 rtx result;
61f6c84f 8595 tree cond;
91ce572a 8596 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8597 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8598 ? addv_optab : add_optab)
8599 : TREE_CODE (binary_op) == MINUS_EXPR
8600 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8601 ? subv_optab : sub_optab)
8602 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8603 : xor_optab);
bbf6f052 8604
61f6c84f 8605 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8606 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8607 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8608 else
8609 cond = TREE_OPERAND (exp, 0);
bbf6f052 8610
61f6c84f
JJ
8611 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8612 ? temp : NULL_RTX),
bbf6f052
RK
8613 mode, BRANCH_COST <= 1);
8614
ac01eace
RK
8615 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8616 result = expand_shift (LSHIFT_EXPR, mode, result,
8617 build_int_2 (tree_log2
8618 (TREE_OPERAND
8619 (binary_op, 1)),
8620 0),
e5e809f4 8621 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8622 ? temp : NULL_RTX), 0);
8623
bbf6f052
RK
8624 if (result)
8625 {
906c4e36 8626 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8627 return expand_binop (mode, boptab, op1, result, temp,
8628 unsignedp, OPTAB_LIB_WIDEN);
8629 }
bbf6f052 8630 }
3a94c984 8631
dabf8373 8632 do_pending_stack_adjust ();
bbf6f052
RK
8633 NO_DEFER_POP;
8634 op0 = gen_label_rtx ();
8635
8636 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8637 {
8638 if (temp != 0)
8639 {
8640 /* If the target conflicts with the other operand of the
8641 binary op, we can't use it. Also, we can't use the target
8642 if it is a hard register, because evaluating the condition
8643 might clobber it. */
8644 if ((binary_op
e5e809f4 8645 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8646 || (GET_CODE (temp) == REG
8647 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8648 temp = gen_reg_rtx (mode);
8403445a
AM
8649 store_expr (singleton, temp,
8650 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8651 }
8652 else
906c4e36 8653 expand_expr (singleton,
2937cf87 8654 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8655 if (singleton == TREE_OPERAND (exp, 1))
8656 jumpif (TREE_OPERAND (exp, 0), op0);
8657 else
8658 jumpifnot (TREE_OPERAND (exp, 0), op0);
8659
956d6950 8660 start_cleanup_deferral ();
bbf6f052
RK
8661 if (binary_op && temp == 0)
8662 /* Just touch the other operand. */
8663 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8664 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8665 else if (binary_op)
8666 store_expr (build (TREE_CODE (binary_op), type,
8667 make_tree (type, temp),
8668 TREE_OPERAND (binary_op, 1)),
8403445a 8669 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8670 else
8671 store_expr (build1 (TREE_CODE (unary_op), type,
8672 make_tree (type, temp)),
8403445a 8673 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8674 op1 = op0;
bbf6f052 8675 }
bbf6f052
RK
8676 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8677 comparison operator. If we have one of these cases, set the
8678 output to A, branch on A (cse will merge these two references),
8679 then set the output to FOO. */
8680 else if (temp
8681 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8682 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8683 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8684 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8685 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8686 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8687 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8688 {
3a94c984
KH
8689 if (GET_CODE (temp) == REG
8690 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8691 temp = gen_reg_rtx (mode);
8403445a
AM
8692 store_expr (TREE_OPERAND (exp, 1), temp,
8693 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8694 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8695
956d6950 8696 start_cleanup_deferral ();
c37b68d4
RS
8697 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8698 store_expr (TREE_OPERAND (exp, 2), temp,
8699 modifier == EXPAND_STACK_PARM ? 2 : 0);
8700 else
8701 expand_expr (TREE_OPERAND (exp, 2),
8702 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8703 op1 = op0;
8704 }
8705 else if (temp
8706 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8707 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8708 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8709 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8710 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8711 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8712 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8713 {
3a94c984
KH
8714 if (GET_CODE (temp) == REG
8715 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8716 temp = gen_reg_rtx (mode);
8403445a
AM
8717 store_expr (TREE_OPERAND (exp, 2), temp,
8718 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8719 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8720
956d6950 8721 start_cleanup_deferral ();
c37b68d4
RS
8722 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8723 store_expr (TREE_OPERAND (exp, 1), temp,
8724 modifier == EXPAND_STACK_PARM ? 2 : 0);
8725 else
8726 expand_expr (TREE_OPERAND (exp, 1),
8727 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8728 op1 = op0;
8729 }
8730 else
8731 {
8732 op1 = gen_label_rtx ();
8733 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8734
956d6950 8735 start_cleanup_deferral ();
3a94c984 8736
2ac84cfe 8737 /* One branch of the cond can be void, if it never returns. For
3a94c984 8738 example A ? throw : E */
2ac84cfe 8739 if (temp != 0
3a94c984 8740 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8741 store_expr (TREE_OPERAND (exp, 1), temp,
8742 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8743 else
906c4e36
RK
8744 expand_expr (TREE_OPERAND (exp, 1),
8745 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8746 end_cleanup_deferral ();
bbf6f052
RK
8747 emit_queue ();
8748 emit_jump_insn (gen_jump (op1));
8749 emit_barrier ();
8750 emit_label (op0);
956d6950 8751 start_cleanup_deferral ();
2ac84cfe 8752 if (temp != 0
3a94c984 8753 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8754 store_expr (TREE_OPERAND (exp, 2), temp,
8755 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8756 else
906c4e36
RK
8757 expand_expr (TREE_OPERAND (exp, 2),
8758 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8759 }
8760
956d6950 8761 end_cleanup_deferral ();
bbf6f052
RK
8762
8763 emit_queue ();
8764 emit_label (op1);
8765 OK_DEFER_POP;
5dab5552 8766
bbf6f052
RK
8767 return temp;
8768 }
8769
8770 case TARGET_EXPR:
8771 {
8772 /* Something needs to be initialized, but we didn't know
8773 where that thing was when building the tree. For example,
8774 it could be the return value of a function, or a parameter
8775 to a function which lays down in the stack, or a temporary
8776 variable which must be passed by reference.
8777
8778 We guarantee that the expression will either be constructed
8779 or copied into our original target. */
8780
8781 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8782 tree cleanups = NULL_TREE;
5c062816 8783 tree exp1;
bbf6f052
RK
8784
8785 if (TREE_CODE (slot) != VAR_DECL)
8786 abort ();
8787
9c51f375
RK
8788 if (! ignore)
8789 target = original_target;
8790
6fbfac92
JM
8791 /* Set this here so that if we get a target that refers to a
8792 register variable that's already been used, put_reg_into_stack
3a94c984 8793 knows that it should fix up those uses. */
6fbfac92
JM
8794 TREE_USED (slot) = 1;
8795
bbf6f052
RK
8796 if (target == 0)
8797 {
19e7881c 8798 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8799 {
8800 target = DECL_RTL (slot);
5c062816 8801 /* If we have already expanded the slot, so don't do
ac993f4f 8802 it again. (mrs) */
5c062816
MS
8803 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8804 return target;
ac993f4f 8805 }
bbf6f052
RK
8806 else
8807 {
e9a25f70 8808 target = assign_temp (type, 2, 0, 1);
19e7881c 8809 SET_DECL_RTL (slot, target);
e9a25f70 8810 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8811 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 8812
e287fd6e
RK
8813 /* Since SLOT is not known to the called function
8814 to belong to its stack frame, we must build an explicit
8815 cleanup. This case occurs when we must build up a reference
8816 to pass the reference as an argument. In this case,
8817 it is very likely that such a reference need not be
8818 built here. */
8819
8820 if (TREE_OPERAND (exp, 2) == 0)
c88770e9 8821 TREE_OPERAND (exp, 2)
ae2bcd98 8822 = lang_hooks.maybe_build_cleanup (slot);
2a888d4c 8823 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8824 }
bbf6f052
RK
8825 }
8826 else
8827 {
8828 /* This case does occur, when expanding a parameter which
8829 needs to be constructed on the stack. The target
8830 is the actual stack address that we want to initialize.
8831 The function we call will perform the cleanup in this case. */
8832
8c042b47
RS
8833 /* If we have already assigned it space, use that space,
8834 not target that we were passed in, as our target
8835 parameter is only a hint. */
19e7881c 8836 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8837 {
8838 target = DECL_RTL (slot);
8839 /* If we have already expanded the slot, so don't do
8c042b47 8840 it again. (mrs) */
3a94c984
KH
8841 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8842 return target;
8c042b47 8843 }
21002281
JW
8844 else
8845 {
19e7881c 8846 SET_DECL_RTL (slot, target);
21002281
JW
8847 /* If we must have an addressable slot, then make sure that
8848 the RTL that we just stored in slot is OK. */
8849 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8850 put_var_into_stack (slot, /*rescan=*/true);
21002281 8851 }
bbf6f052
RK
8852 }
8853
4847c938 8854 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8855 /* Mark it as expanded. */
8856 TREE_OPERAND (exp, 1) = NULL_TREE;
8857
8403445a 8858 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 8859
659e5a7a 8860 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 8861
41531e5b 8862 return target;
bbf6f052
RK
8863 }
8864
8865 case INIT_EXPR:
8866 {
8867 tree lhs = TREE_OPERAND (exp, 0);
8868 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8869
b90f141a 8870 temp = expand_assignment (lhs, rhs, ! ignore);
bbf6f052
RK
8871 return temp;
8872 }
8873
8874 case MODIFY_EXPR:
8875 {
8876 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8877 That's so we don't compute a pointer and save it over a
8878 call. If lhs is simple, compute it first so we can give it
8879 as a target if the rhs is just a call. This avoids an
8880 extra temp and copy and that prevents a partial-subsumption
8881 which makes bad code. Actually we could treat
8882 component_ref's of vars like vars. */
bbf6f052
RK
8883
8884 tree lhs = TREE_OPERAND (exp, 0);
8885 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8886
8887 temp = 0;
8888
bbf6f052
RK
8889 /* Check for |= or &= of a bitfield of size one into another bitfield
8890 of size 1. In this case, (unless we need the result of the
8891 assignment) we can do this more efficiently with a
8892 test followed by an assignment, if necessary.
8893
8894 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8895 things change so we do, this code should be enhanced to
8896 support it. */
8897 if (ignore
8898 && TREE_CODE (lhs) == COMPONENT_REF
8899 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8900 || TREE_CODE (rhs) == BIT_AND_EXPR)
8901 && TREE_OPERAND (rhs, 0) == lhs
8902 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8903 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8904 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8905 {
8906 rtx label = gen_label_rtx ();
8907
8908 do_jump (TREE_OPERAND (rhs, 1),
8909 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8910 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8911 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8912 (TREE_CODE (rhs) == BIT_IOR_EXPR
8913 ? integer_one_node
8914 : integer_zero_node)),
b90f141a 8915 0);
e7c33f54 8916 do_pending_stack_adjust ();
bbf6f052
RK
8917 emit_label (label);
8918 return const0_rtx;
8919 }
8920
b90f141a 8921 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 8922
bbf6f052
RK
8923 return temp;
8924 }
8925
6e7f84a7
APB
8926 case RETURN_EXPR:
8927 if (!TREE_OPERAND (exp, 0))
8928 expand_null_return ();
8929 else
8930 expand_return (TREE_OPERAND (exp, 0));
8931 return const0_rtx;
8932
bbf6f052
RK
8933 case PREINCREMENT_EXPR:
8934 case PREDECREMENT_EXPR:
7b8b9722 8935 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8936
8937 case POSTINCREMENT_EXPR:
8938 case POSTDECREMENT_EXPR:
8939 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8940 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8941
8942 case ADDR_EXPR:
8403445a
AM
8943 if (modifier == EXPAND_STACK_PARM)
8944 target = 0;
682ba3a6
RK
8945 /* If we are taking the address of something erroneous, just
8946 return a zero. */
6de9cd9a 8947 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
682ba3a6 8948 return const0_rtx;
d6b6783b
RK
8949 /* If we are taking the address of a constant and are at the
8950 top level, we have to use output_constant_def since we can't
8951 call force_const_mem at top level. */
8952 else if (cfun == 0
8953 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8954 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8955 == 'c')))
8956 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8957 else
8958 {
e287fd6e
RK
8959 /* We make sure to pass const0_rtx down if we came in with
8960 ignore set, to avoid doing the cleanups twice for something. */
8961 op0 = expand_expr (TREE_OPERAND (exp, 0),
8962 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8963 (modifier == EXPAND_INITIALIZER
8964 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8965
119af78a
RK
8966 /* If we are going to ignore the result, OP0 will have been set
8967 to const0_rtx, so just return it. Don't get confused and
8968 think we are taking the address of the constant. */
8969 if (ignore)
8970 return op0;
8971
73b7f58c
BS
8972 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8973 clever and returns a REG when given a MEM. */
8974 op0 = protect_from_queue (op0, 1);
3539e816 8975
c5c76735
JL
8976 /* We would like the object in memory. If it is a constant, we can
8977 have it be statically allocated into memory. For a non-constant,
8978 we need to allocate some memory and store the value into it. */
896102d0
RK
8979
8980 if (CONSTANT_P (op0))
8981 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8982 op0);
682ba3a6 8983 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 8984 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 8985 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 8986 {
6c7d86ec
RK
8987 /* If the operand is a SAVE_EXPR, we can deal with this by
8988 forcing the SAVE_EXPR into memory. */
8989 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8990 {
f29a2bd1
MM
8991 put_var_into_stack (TREE_OPERAND (exp, 0),
8992 /*rescan=*/true);
6c7d86ec
RK
8993 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8994 }
df6018fd 8995 else
6c7d86ec
RK
8996 {
8997 /* If this object is in a register, it can't be BLKmode. */
8998 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8999 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
9000
9001 if (GET_CODE (op0) == PARALLEL)
9002 /* Handle calls that pass values in multiple
9003 non-contiguous locations. The Irix 6 ABI has examples
9004 of this. */
6e985040 9005 emit_group_store (memloc, op0, inner_type,
6c7d86ec
RK
9006 int_size_in_bytes (inner_type));
9007 else
9008 emit_move_insn (memloc, op0);
0fb7aeda 9009
6c7d86ec
RK
9010 op0 = memloc;
9011 }
896102d0
RK
9012 }
9013
bbf6f052
RK
9014 if (GET_CODE (op0) != MEM)
9015 abort ();
3a94c984 9016
34e81b5a 9017 mark_temp_addr_taken (op0);
bbf6f052 9018 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 9019 {
34e81b5a 9020 op0 = XEXP (op0, 0);
5ae6cd0d 9021 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 9022 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 9023 return op0;
88f63c77 9024 }
987c71d9 9025
c952ff4b
RK
9026 /* If OP0 is not aligned as least as much as the type requires, we
9027 need to make a temporary, copy OP0 to it, and take the address of
9028 the temporary. We want to use the alignment of the type, not of
9029 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9030 the test for BLKmode means that can't happen. The test for
9031 BLKmode is because we never make mis-aligned MEMs with
9032 non-BLKmode.
9033
9034 We don't need to do this at all if the machine doesn't have
9035 strict alignment. */
9036 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9037 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
9038 > MEM_ALIGN (op0))
9039 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
9040 {
9041 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 9042 rtx new;
a06ef755 9043
c3d32120
RK
9044 if (TYPE_ALIGN_OK (inner_type))
9045 abort ();
9046
bdaa131b
JM
9047 if (TREE_ADDRESSABLE (inner_type))
9048 {
9049 /* We can't make a bitwise copy of this object, so fail. */
9050 error ("cannot take the address of an unaligned member");
9051 return const0_rtx;
9052 }
9053
9054 new = assign_stack_temp_for_type
9055 (TYPE_MODE (inner_type),
9056 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9057 : int_size_in_bytes (inner_type),
9058 1, build_qualified_type (inner_type,
9059 (TYPE_QUALS (inner_type)
9060 | TYPE_QUAL_CONST)));
9061
44bb111a 9062 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
9063 (modifier == EXPAND_STACK_PARM
9064 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 9065
a06ef755
RK
9066 op0 = new;
9067 }
9068
bbf6f052
RK
9069 op0 = force_operand (XEXP (op0, 0), target);
9070 }
987c71d9 9071
05c8e58b
HPN
9072 if (flag_force_addr
9073 && GET_CODE (op0) != REG
9074 && modifier != EXPAND_CONST_ADDRESS
9075 && modifier != EXPAND_INITIALIZER
9076 && modifier != EXPAND_SUM)
987c71d9
RK
9077 op0 = force_reg (Pmode, op0);
9078
dc6d66b3
RK
9079 if (GET_CODE (op0) == REG
9080 && ! REG_USERVAR_P (op0))
bdb429a5 9081 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 9082
5ae6cd0d 9083 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 9084 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 9085
bbf6f052
RK
9086 return op0;
9087
9088 case ENTRY_VALUE_EXPR:
9089 abort ();
9090
7308a047
RS
9091 /* COMPLEX type for Extended Pascal & Fortran */
9092 case COMPLEX_EXPR:
9093 {
9094 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 9095 rtx insns;
7308a047
RS
9096
9097 /* Get the rtx code of the operands. */
9098 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9099 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9100
9101 if (! target)
9102 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9103
6551fa4d 9104 start_sequence ();
7308a047
RS
9105
9106 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
9107 emit_move_insn (gen_realpart (mode, target), op0);
9108 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 9109
6551fa4d
JW
9110 insns = get_insns ();
9111 end_sequence ();
9112
7308a047 9113 /* Complex construction should appear as a single unit. */
6551fa4d
JW
9114 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9115 each with a separate pseudo as destination.
9116 It's not correct for flow to treat them as a unit. */
6d6e61ce 9117 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9118 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9119 else
2f937369 9120 emit_insn (insns);
7308a047
RS
9121
9122 return target;
9123 }
9124
9125 case REALPART_EXPR:
2d7050fd
RS
9126 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9127 return gen_realpart (mode, op0);
3a94c984 9128
7308a047 9129 case IMAGPART_EXPR:
2d7050fd
RS
9130 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9131 return gen_imagpart (mode, op0);
7308a047
RS
9132
9133 case CONJ_EXPR:
9134 {
62acb978 9135 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 9136 rtx imag_t;
6551fa4d 9137 rtx insns;
3a94c984
KH
9138
9139 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
9140
9141 if (! target)
d6a5ac33 9142 target = gen_reg_rtx (mode);
3a94c984 9143
6551fa4d 9144 start_sequence ();
7308a047
RS
9145
9146 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
9147 emit_move_insn (gen_realpart (partmode, target),
9148 gen_realpart (partmode, op0));
7308a047 9149
62acb978 9150 imag_t = gen_imagpart (partmode, target);
91ce572a 9151 temp = expand_unop (partmode,
0fb7aeda
KH
9152 ! unsignedp && flag_trapv
9153 && (GET_MODE_CLASS(partmode) == MODE_INT)
9154 ? negv_optab : neg_optab,
3a94c984 9155 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
9156 if (temp != imag_t)
9157 emit_move_insn (imag_t, temp);
9158
6551fa4d
JW
9159 insns = get_insns ();
9160 end_sequence ();
9161
3a94c984 9162 /* Conjugate should appear as a single unit
d6a5ac33 9163 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
9164 each with a separate pseudo as destination.
9165 It's not correct for flow to treat them as a unit. */
6d6e61ce 9166 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9167 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9168 else
2f937369 9169 emit_insn (insns);
7308a047
RS
9170
9171 return target;
9172 }
9173
6de9cd9a
DN
9174 case RESX_EXPR:
9175 expand_resx_expr (exp);
9176 return const0_rtx;
9177
e976b8b2
MS
9178 case TRY_CATCH_EXPR:
9179 {
9180 tree handler = TREE_OPERAND (exp, 1);
9181
9182 expand_eh_region_start ();
e976b8b2 9183 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6de9cd9a 9184 expand_eh_handler (handler);
e976b8b2
MS
9185
9186 return op0;
9187 }
9188
6de9cd9a
DN
9189 case CATCH_EXPR:
9190 expand_start_catch (CATCH_TYPES (exp));
9191 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9192 expand_end_catch ();
9193 return const0_rtx;
9194
9195 case EH_FILTER_EXPR:
9196 /* Should have been handled in expand_eh_handler. */
9197 abort ();
9198
b335b813
PB
9199 case TRY_FINALLY_EXPR:
9200 {
9201 tree try_block = TREE_OPERAND (exp, 0);
9202 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 9203
6de9cd9a
DN
9204 if ((!optimize && lang_protect_cleanup_actions == NULL)
9205 || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
9206 {
9207 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9208 is not sufficient, so we cannot expand the block twice.
9209 So we play games with GOTO_SUBROUTINE_EXPR to let us
9210 expand the thing only once. */
8ad8135a
RH
9211 /* When not optimizing, we go ahead with this form since
9212 (1) user breakpoints operate more predictably without
9213 code duplication, and
9214 (2) we're not running any of the global optimizers
9215 that would explode in time/space with the highly
9216 connected CFG created by the indirect branching. */
8943a0b4
RH
9217
9218 rtx finally_label = gen_label_rtx ();
9219 rtx done_label = gen_label_rtx ();
9220 rtx return_link = gen_reg_rtx (Pmode);
9221 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9222 (tree) finally_label, (tree) return_link);
9223 TREE_SIDE_EFFECTS (cleanup) = 1;
9224
9225 /* Start a new binding layer that will keep track of all cleanup
9226 actions to be performed. */
9227 expand_start_bindings (2);
9228 target_temp_slot_level = temp_slot_level;
9229
9230 expand_decl_cleanup (NULL_TREE, cleanup);
9231 op0 = expand_expr (try_block, target, tmode, modifier);
9232
9233 preserve_temp_slots (op0);
9234 expand_end_bindings (NULL_TREE, 0, 0);
9235 emit_jump (done_label);
9236 emit_label (finally_label);
9237 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9238 emit_indirect_jump (return_link);
9239 emit_label (done_label);
9240 }
9241 else
9242 {
9243 expand_start_bindings (2);
9244 target_temp_slot_level = temp_slot_level;
b335b813 9245
8943a0b4
RH
9246 expand_decl_cleanup (NULL_TREE, finally_block);
9247 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9248
8943a0b4
RH
9249 preserve_temp_slots (op0);
9250 expand_end_bindings (NULL_TREE, 0, 0);
9251 }
b335b813 9252
b335b813
PB
9253 return op0;
9254 }
9255
3a94c984 9256 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9257 {
9258 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9259 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9260 rtx return_address = gen_label_rtx ();
3a94c984
KH
9261 emit_move_insn (return_link,
9262 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9263 emit_jump (subr);
9264 emit_label (return_address);
9265 return const0_rtx;
9266 }
9267
d3707adb
RH
9268 case VA_ARG_EXPR:
9269 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9270
52a11cbf 9271 case EXC_PTR_EXPR:
86c99549 9272 return get_exception_pointer (cfun);
52a11cbf 9273
6de9cd9a
DN
9274 case FILTER_EXPR:
9275 return get_exception_filter (cfun);
9276
67231816
RH
9277 case FDESC_EXPR:
9278 /* Function descriptors are not valid except for as
9279 initialization constants, and should not be expanded. */
9280 abort ();
9281
6de9cd9a
DN
9282 case SWITCH_EXPR:
9283 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9284 "switch");
9285 if (SWITCH_BODY (exp))
9286 expand_expr_stmt (SWITCH_BODY (exp));
9287 if (SWITCH_LABELS (exp))
9288 {
9289 tree duplicate = 0;
9290 tree vec = SWITCH_LABELS (exp);
9291 size_t i, n = TREE_VEC_LENGTH (vec);
9292
9293 for (i = 0; i < n; ++i)
9294 {
9295 tree elt = TREE_VEC_ELT (vec, i);
9296 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9297 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9298 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9299
9300 tree case_low = CASE_LOW (elt);
9301 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9302 if (case_low && case_high)
9303 {
9304 /* Case label is less than minimum for type. */
9305 if ((tree_int_cst_compare (case_low, min_value) < 0)
9306 && (tree_int_cst_compare (case_high, min_value) < 0))
9307 {
9308 warning ("case label value %d is less than minimum value for type",
9309 TREE_INT_CST (case_low));
9310 continue;
9311 }
9312
9313 /* Case value is greater than maximum for type. */
9314 if ((tree_int_cst_compare (case_low, max_value) > 0)
9315 && (tree_int_cst_compare (case_high, max_value) > 0))
9316 {
9317 warning ("case label value %d exceeds maximum value for type",
9318 TREE_INT_CST (case_high));
9319 continue;
9320 }
9321
9322 /* Saturate lower case label value to minimum. */
9323 if ((tree_int_cst_compare (case_high, min_value) >= 0)
9324 && (tree_int_cst_compare (case_low, min_value) < 0))
9325 {
9326 warning ("lower value %d in case label range less than minimum value for type",
9327 TREE_INT_CST (case_low));
9328 case_low = min_value;
9329 }
9330
9331 /* Saturate upper case label value to maximum. */
9332 if ((tree_int_cst_compare (case_low, max_value) <= 0)
9333 && (tree_int_cst_compare (case_high, max_value) > 0))
9334 {
9335 warning ("upper value %d in case label range exceeds maximum value for type",
9336 TREE_INT_CST (case_high));
9337 case_high = max_value;
9338 }
9339 }
9340
9341 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9342 if (duplicate)
9343 abort ();
9344 }
9345 }
9346 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9347 return const0_rtx;
9348
9349 case LABEL_EXPR:
9350 expand_label (TREE_OPERAND (exp, 0));
9351 return const0_rtx;
9352
9353 case CASE_LABEL_EXPR:
9354 {
9355 tree duplicate = 0;
9356 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9357 &duplicate, false);
9358 if (duplicate)
9359 abort ();
9360 return const0_rtx;
9361 }
9362
9363 case ASM_EXPR:
9364 expand_asm_expr (exp);
9365 return const0_rtx;
9366
bbf6f052 9367 default:
ae2bcd98
RS
9368 /* ??? Use (*fun) form because expand_expr is a macro. */
9369 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
9370 modifier, alt_rtl);
bbf6f052
RK
9371 }
9372
9373 /* Here to do an ordinary binary operator, generating an instruction
9374 from the optab already placed in `this_optab'. */
9375 binop:
eb698c58
RS
9376 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9377 subtarget, &op0, &op1, 0);
bbf6f052 9378 binop2:
8403445a
AM
9379 if (modifier == EXPAND_STACK_PARM)
9380 target = 0;
bbf6f052
RK
9381 temp = expand_binop (mode, this_optab, op0, op1, target,
9382 unsignedp, OPTAB_LIB_WIDEN);
9383 if (temp == 0)
9384 abort ();
9385 return temp;
9386}
b93a436e 9387\f
1ce7f3c2
RK
9388/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9389 when applied to the address of EXP produces an address known to be
9390 aligned more than BIGGEST_ALIGNMENT. */
9391
9392static int
502b8322 9393is_aligning_offset (tree offset, tree exp)
1ce7f3c2 9394{
6fce44af 9395 /* Strip off any conversions. */
1ce7f3c2
RK
9396 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9397 || TREE_CODE (offset) == NOP_EXPR
6fce44af 9398 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
9399 offset = TREE_OPERAND (offset, 0);
9400
9401 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9402 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9403 if (TREE_CODE (offset) != BIT_AND_EXPR
9404 || !host_integerp (TREE_OPERAND (offset, 1), 1)
c0cfc691
OH
9405 || compare_tree_int (TREE_OPERAND (offset, 1),
9406 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
9407 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9408 return 0;
9409
9410 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9411 It must be NEGATE_EXPR. Then strip any more conversions. */
9412 offset = TREE_OPERAND (offset, 0);
9413 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9414 || TREE_CODE (offset) == NOP_EXPR
9415 || TREE_CODE (offset) == CONVERT_EXPR)
9416 offset = TREE_OPERAND (offset, 0);
9417
9418 if (TREE_CODE (offset) != NEGATE_EXPR)
9419 return 0;
9420
9421 offset = TREE_OPERAND (offset, 0);
9422 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9423 || TREE_CODE (offset) == NOP_EXPR
9424 || TREE_CODE (offset) == CONVERT_EXPR)
9425 offset = TREE_OPERAND (offset, 0);
9426
6fce44af
RK
9427 /* This must now be the address of EXP. */
9428 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
9429}
9430\f
e0a2f705 9431/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9432 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9433 in bytes within the string that ARG is accessing. The type of the
9434 offset will be `sizetype'. */
b93a436e 9435
28f4ec01 9436tree
502b8322 9437string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9438{
9439 STRIP_NOPS (arg);
9440
9441 if (TREE_CODE (arg) == ADDR_EXPR
9442 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9443 {
fed3cef0 9444 *ptr_offset = size_zero_node;
b93a436e
JL
9445 return TREE_OPERAND (arg, 0);
9446 }
6de9cd9a
DN
9447 if (TREE_CODE (arg) == ADDR_EXPR
9448 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9449 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9450 {
9451 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9452 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9453 }
b93a436e
JL
9454 else if (TREE_CODE (arg) == PLUS_EXPR)
9455 {
9456 tree arg0 = TREE_OPERAND (arg, 0);
9457 tree arg1 = TREE_OPERAND (arg, 1);
9458
9459 STRIP_NOPS (arg0);
9460 STRIP_NOPS (arg1);
9461
9462 if (TREE_CODE (arg0) == ADDR_EXPR
9463 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9464 {
fed3cef0 9465 *ptr_offset = convert (sizetype, arg1);
b93a436e 9466 return TREE_OPERAND (arg0, 0);
bbf6f052 9467 }
b93a436e
JL
9468 else if (TREE_CODE (arg1) == ADDR_EXPR
9469 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9470 {
fed3cef0 9471 *ptr_offset = convert (sizetype, arg0);
b93a436e 9472 return TREE_OPERAND (arg1, 0);
bbf6f052 9473 }
b93a436e 9474 }
ca695ac9 9475
b93a436e
JL
9476 return 0;
9477}
ca695ac9 9478\f
b93a436e
JL
9479/* Expand code for a post- or pre- increment or decrement
9480 and return the RTX for the result.
9481 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9482
b93a436e 9483static rtx
502b8322 9484expand_increment (tree exp, int post, int ignore)
ca695ac9 9485{
b3694847
SS
9486 rtx op0, op1;
9487 rtx temp, value;
9488 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9489 optab this_optab = add_optab;
9490 int icode;
9491 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9492 int op0_is_copy = 0;
9493 int single_insn = 0;
9494 /* 1 means we can't store into OP0 directly,
9495 because it is a subreg narrower than a word,
9496 and we don't dare clobber the rest of the word. */
9497 int bad_subreg = 0;
1499e0a8 9498
b93a436e
JL
9499 /* Stabilize any component ref that might need to be
9500 evaluated more than once below. */
9501 if (!post
9502 || TREE_CODE (incremented) == BIT_FIELD_REF
9503 || (TREE_CODE (incremented) == COMPONENT_REF
9504 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9505 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9506 incremented = stabilize_reference (incremented);
9507 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9508 ones into save exprs so that they don't accidentally get evaluated
9509 more than once by the code below. */
9510 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9511 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9512 incremented = save_expr (incremented);
e9a25f70 9513
b93a436e
JL
9514 /* Compute the operands as RTX.
9515 Note whether OP0 is the actual lvalue or a copy of it:
9516 I believe it is a copy iff it is a register or subreg
6d2f8887 9517 and insns were generated in computing it. */
e9a25f70 9518
b93a436e 9519 temp = get_last_insn ();
37a08a29 9520 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9521
b93a436e
JL
9522 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9523 in place but instead must do sign- or zero-extension during assignment,
9524 so we copy it into a new register and let the code below use it as
9525 a copy.
e9a25f70 9526
b93a436e
JL
9527 Note that we can safely modify this SUBREG since it is know not to be
9528 shared (it was made by the expand_expr call above). */
9529
9530 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9531 {
9532 if (post)
9533 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9534 else
9535 bad_subreg = 1;
9536 }
9537 else if (GET_CODE (op0) == SUBREG
9538 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9539 {
9540 /* We cannot increment this SUBREG in place. If we are
9541 post-incrementing, get a copy of the old value. Otherwise,
9542 just mark that we cannot increment in place. */
9543 if (post)
9544 op0 = copy_to_reg (op0);
9545 else
9546 bad_subreg = 1;
e9a25f70
JL
9547 }
9548
b93a436e
JL
9549 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9550 && temp != get_last_insn ());
37a08a29 9551 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9552
b93a436e
JL
9553 /* Decide whether incrementing or decrementing. */
9554 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9555 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9556 this_optab = sub_optab;
9557
9558 /* Convert decrement by a constant into a negative increment. */
9559 if (this_optab == sub_optab
9560 && GET_CODE (op1) == CONST_INT)
ca695ac9 9561 {
3a94c984 9562 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9563 this_optab = add_optab;
ca695ac9 9564 }
1499e0a8 9565
91ce572a 9566 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9567 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9568
b93a436e
JL
9569 /* For a preincrement, see if we can do this with a single instruction. */
9570 if (!post)
9571 {
9572 icode = (int) this_optab->handlers[(int) mode].insn_code;
9573 if (icode != (int) CODE_FOR_nothing
9574 /* Make sure that OP0 is valid for operands 0 and 1
9575 of the insn we want to queue. */
a995e389
RH
9576 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9577 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9578 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9579 single_insn = 1;
9580 }
bbf6f052 9581
b93a436e
JL
9582 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9583 then we cannot just increment OP0. We must therefore contrive to
9584 increment the original value. Then, for postincrement, we can return
9585 OP0 since it is a copy of the old value. For preincrement, expand here
9586 unless we can do it with a single insn.
bbf6f052 9587
b93a436e
JL
9588 Likewise if storing directly into OP0 would clobber high bits
9589 we need to preserve (bad_subreg). */
9590 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9591 {
b93a436e
JL
9592 /* This is the easiest way to increment the value wherever it is.
9593 Problems with multiple evaluation of INCREMENTED are prevented
9594 because either (1) it is a component_ref or preincrement,
9595 in which case it was stabilized above, or (2) it is an array_ref
9596 with constant index in an array in a register, which is
9597 safe to reevaluate. */
9598 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9599 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9600 ? MINUS_EXPR : PLUS_EXPR),
9601 TREE_TYPE (exp),
9602 incremented,
9603 TREE_OPERAND (exp, 1));
a358cee0 9604
b93a436e
JL
9605 while (TREE_CODE (incremented) == NOP_EXPR
9606 || TREE_CODE (incremented) == CONVERT_EXPR)
9607 {
9608 newexp = convert (TREE_TYPE (incremented), newexp);
9609 incremented = TREE_OPERAND (incremented, 0);
9610 }
bbf6f052 9611
b90f141a 9612 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b93a436e
JL
9613 return post ? op0 : temp;
9614 }
bbf6f052 9615
b93a436e
JL
9616 if (post)
9617 {
9618 /* We have a true reference to the value in OP0.
9619 If there is an insn to add or subtract in this mode, queue it.
d91edf86 9620 Queuing the increment insn avoids the register shuffling
b93a436e
JL
9621 that often results if we must increment now and first save
9622 the old value for subsequent use. */
bbf6f052 9623
b93a436e
JL
9624#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9625 op0 = stabilize (op0);
9626#endif
41dfd40c 9627
b93a436e
JL
9628 icode = (int) this_optab->handlers[(int) mode].insn_code;
9629 if (icode != (int) CODE_FOR_nothing
9630 /* Make sure that OP0 is valid for operands 0 and 1
9631 of the insn we want to queue. */
a995e389
RH
9632 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9633 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9634 {
a995e389 9635 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9636 op1 = force_reg (mode, op1);
bbf6f052 9637
b93a436e
JL
9638 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9639 }
9640 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9641 {
9642 rtx addr = (general_operand (XEXP (op0, 0), mode)
9643 ? force_reg (Pmode, XEXP (op0, 0))
9644 : copy_to_reg (XEXP (op0, 0)));
9645 rtx temp, result;
ca695ac9 9646
792760b9 9647 op0 = replace_equiv_address (op0, addr);
b93a436e 9648 temp = force_reg (GET_MODE (op0), op0);
a995e389 9649 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9650 op1 = force_reg (mode, op1);
ca695ac9 9651
b93a436e
JL
9652 /* The increment queue is LIFO, thus we have to `queue'
9653 the instructions in reverse order. */
9654 enqueue_insn (op0, gen_move_insn (op0, temp));
9655 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9656 return result;
bbf6f052
RK
9657 }
9658 }
ca695ac9 9659
b93a436e
JL
9660 /* Preincrement, or we can't increment with one simple insn. */
9661 if (post)
9662 /* Save a copy of the value before inc or dec, to return it later. */
9663 temp = value = copy_to_reg (op0);
9664 else
9665 /* Arrange to return the incremented value. */
9666 /* Copy the rtx because expand_binop will protect from the queue,
9667 and the results of that would be invalid for us to return
9668 if our caller does emit_queue before using our result. */
9669 temp = copy_rtx (value = op0);
bbf6f052 9670
b93a436e 9671 /* Increment however we can. */
37a08a29 9672 op1 = expand_binop (mode, this_optab, value, op1, op0,
8df83eae 9673 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9674
b93a436e
JL
9675 /* Make sure the value is stored into OP0. */
9676 if (op1 != op0)
9677 emit_move_insn (op0, op1);
5718612f 9678
b93a436e
JL
9679 return temp;
9680}
9681\f
b93a436e
JL
9682/* Generate code to calculate EXP using a store-flag instruction
9683 and return an rtx for the result. EXP is either a comparison
9684 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9685
b93a436e 9686 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9687
cc2902df 9688 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9689 cheap.
ca695ac9 9690
b93a436e
JL
9691 Return zero if there is no suitable set-flag instruction
9692 available on this machine.
ca695ac9 9693
b93a436e
JL
9694 Once expand_expr has been called on the arguments of the comparison,
9695 we are committed to doing the store flag, since it is not safe to
9696 re-evaluate the expression. We emit the store-flag insn by calling
9697 emit_store_flag, but only expand the arguments if we have a reason
9698 to believe that emit_store_flag will be successful. If we think that
9699 it will, but it isn't, we have to simulate the store-flag with a
9700 set/jump/set sequence. */
ca695ac9 9701
b93a436e 9702static rtx
502b8322 9703do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9704{
9705 enum rtx_code code;
9706 tree arg0, arg1, type;
9707 tree tem;
9708 enum machine_mode operand_mode;
9709 int invert = 0;
9710 int unsignedp;
9711 rtx op0, op1;
9712 enum insn_code icode;
9713 rtx subtarget = target;
381127e8 9714 rtx result, label;
ca695ac9 9715
b93a436e
JL
9716 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9717 result at the end. We can't simply invert the test since it would
9718 have already been inverted if it were valid. This case occurs for
9719 some floating-point comparisons. */
ca695ac9 9720
b93a436e
JL
9721 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9722 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9723
b93a436e
JL
9724 arg0 = TREE_OPERAND (exp, 0);
9725 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9726
9727 /* Don't crash if the comparison was erroneous. */
9728 if (arg0 == error_mark_node || arg1 == error_mark_node)
9729 return const0_rtx;
9730
b93a436e
JL
9731 type = TREE_TYPE (arg0);
9732 operand_mode = TYPE_MODE (type);
8df83eae 9733 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 9734
b93a436e
JL
9735 /* We won't bother with BLKmode store-flag operations because it would mean
9736 passing a lot of information to emit_store_flag. */
9737 if (operand_mode == BLKmode)
9738 return 0;
ca695ac9 9739
b93a436e
JL
9740 /* We won't bother with store-flag operations involving function pointers
9741 when function pointers must be canonicalized before comparisons. */
9742#ifdef HAVE_canonicalize_funcptr_for_compare
9743 if (HAVE_canonicalize_funcptr_for_compare
9744 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9745 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9746 == FUNCTION_TYPE))
9747 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9748 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9749 == FUNCTION_TYPE))))
9750 return 0;
ca695ac9
JB
9751#endif
9752
b93a436e
JL
9753 STRIP_NOPS (arg0);
9754 STRIP_NOPS (arg1);
ca695ac9 9755
b93a436e
JL
9756 /* Get the rtx comparison code to use. We know that EXP is a comparison
9757 operation of some type. Some comparisons against 1 and -1 can be
9758 converted to comparisons with zero. Do so here so that the tests
9759 below will be aware that we have a comparison with zero. These
9760 tests will not catch constants in the first operand, but constants
9761 are rarely passed as the first operand. */
ca695ac9 9762
b93a436e
JL
9763 switch (TREE_CODE (exp))
9764 {
9765 case EQ_EXPR:
9766 code = EQ;
bbf6f052 9767 break;
b93a436e
JL
9768 case NE_EXPR:
9769 code = NE;
bbf6f052 9770 break;
b93a436e
JL
9771 case LT_EXPR:
9772 if (integer_onep (arg1))
9773 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9774 else
9775 code = unsignedp ? LTU : LT;
ca695ac9 9776 break;
b93a436e
JL
9777 case LE_EXPR:
9778 if (! unsignedp && integer_all_onesp (arg1))
9779 arg1 = integer_zero_node, code = LT;
9780 else
9781 code = unsignedp ? LEU : LE;
ca695ac9 9782 break;
b93a436e
JL
9783 case GT_EXPR:
9784 if (! unsignedp && integer_all_onesp (arg1))
9785 arg1 = integer_zero_node, code = GE;
9786 else
9787 code = unsignedp ? GTU : GT;
9788 break;
9789 case GE_EXPR:
9790 if (integer_onep (arg1))
9791 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9792 else
9793 code = unsignedp ? GEU : GE;
ca695ac9 9794 break;
1eb8759b
RH
9795
9796 case UNORDERED_EXPR:
9797 code = UNORDERED;
9798 break;
9799 case ORDERED_EXPR:
9800 code = ORDERED;
9801 break;
9802 case UNLT_EXPR:
9803 code = UNLT;
9804 break;
9805 case UNLE_EXPR:
9806 code = UNLE;
9807 break;
9808 case UNGT_EXPR:
9809 code = UNGT;
9810 break;
9811 case UNGE_EXPR:
9812 code = UNGE;
9813 break;
9814 case UNEQ_EXPR:
9815 code = UNEQ;
9816 break;
1eb8759b 9817
ca695ac9 9818 default:
b93a436e 9819 abort ();
bbf6f052 9820 }
bbf6f052 9821
b93a436e
JL
9822 /* Put a constant second. */
9823 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9824 {
9825 tem = arg0; arg0 = arg1; arg1 = tem;
9826 code = swap_condition (code);
ca695ac9 9827 }
bbf6f052 9828
b93a436e
JL
9829 /* If this is an equality or inequality test of a single bit, we can
9830 do this by shifting the bit being tested to the low-order bit and
9831 masking the result with the constant 1. If the condition was EQ,
9832 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9833 than an scc insn even if we have it.
9834
9835 The code to make this transformation was moved into fold_single_bit_test,
9836 so we just call into the folder and expand its result. */
d39985fa 9837
b93a436e
JL
9838 if ((code == NE || code == EQ)
9839 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9840 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 9841 {
ae2bcd98 9842 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 9843 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9844 arg0, arg1, type),
60cd4dae
JL
9845 target, VOIDmode, EXPAND_NORMAL);
9846 }
bbf6f052 9847
b93a436e 9848 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9849 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9850 return 0;
1eb8759b 9851
b93a436e
JL
9852 icode = setcc_gen_code[(int) code];
9853 if (icode == CODE_FOR_nothing
a995e389 9854 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9855 {
b93a436e
JL
9856 /* We can only do this if it is one of the special cases that
9857 can be handled without an scc insn. */
9858 if ((code == LT && integer_zerop (arg1))
9859 || (! only_cheap && code == GE && integer_zerop (arg1)))
9860 ;
9861 else if (BRANCH_COST >= 0
9862 && ! only_cheap && (code == NE || code == EQ)
9863 && TREE_CODE (type) != REAL_TYPE
9864 && ((abs_optab->handlers[(int) operand_mode].insn_code
9865 != CODE_FOR_nothing)
9866 || (ffs_optab->handlers[(int) operand_mode].insn_code
9867 != CODE_FOR_nothing)))
9868 ;
9869 else
9870 return 0;
ca695ac9 9871 }
3a94c984 9872
296b4ed9 9873 if (! get_subtarget (target)
e3be1116 9874 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9875 subtarget = 0;
9876
eb698c58 9877 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9878
9879 if (target == 0)
9880 target = gen_reg_rtx (mode);
9881
9882 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9883 because, if the emit_store_flag does anything it will succeed and
9884 OP0 and OP1 will not be used subsequently. */
ca695ac9 9885
b93a436e
JL
9886 result = emit_store_flag (target, code,
9887 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9888 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9889 operand_mode, unsignedp, 1);
ca695ac9 9890
b93a436e
JL
9891 if (result)
9892 {
9893 if (invert)
9894 result = expand_binop (mode, xor_optab, result, const1_rtx,
9895 result, 0, OPTAB_LIB_WIDEN);
9896 return result;
ca695ac9 9897 }
bbf6f052 9898
b93a436e
JL
9899 /* If this failed, we have to do this with set/compare/jump/set code. */
9900 if (GET_CODE (target) != REG
9901 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9902 target = gen_reg_rtx (GET_MODE (target));
9903
9904 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9905 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9906 operand_mode, NULL_RTX);
b93a436e
JL
9907 if (GET_CODE (result) == CONST_INT)
9908 return (((result == const0_rtx && ! invert)
9909 || (result != const0_rtx && invert))
9910 ? const0_rtx : const1_rtx);
ca695ac9 9911
8f08e8c0
JL
9912 /* The code of RESULT may not match CODE if compare_from_rtx
9913 decided to swap its operands and reverse the original code.
9914
9915 We know that compare_from_rtx returns either a CONST_INT or
9916 a new comparison code, so it is safe to just extract the
9917 code from RESULT. */
9918 code = GET_CODE (result);
9919
b93a436e
JL
9920 label = gen_label_rtx ();
9921 if (bcc_gen_fctn[(int) code] == 0)
9922 abort ();
0f41302f 9923
b93a436e
JL
9924 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9925 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9926 emit_label (label);
bbf6f052 9927
b93a436e 9928 return target;
ca695ac9 9929}
b93a436e 9930\f
b93a436e 9931
ad82abb8
ZW
9932/* Stubs in case we haven't got a casesi insn. */
9933#ifndef HAVE_casesi
9934# define HAVE_casesi 0
9935# define gen_casesi(a, b, c, d, e) (0)
9936# define CODE_FOR_casesi CODE_FOR_nothing
9937#endif
9938
9939/* If the machine does not have a case insn that compares the bounds,
9940 this means extra overhead for dispatch tables, which raises the
9941 threshold for using them. */
9942#ifndef CASE_VALUES_THRESHOLD
9943#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9944#endif /* CASE_VALUES_THRESHOLD */
9945
9946unsigned int
502b8322 9947case_values_threshold (void)
ad82abb8
ZW
9948{
9949 return CASE_VALUES_THRESHOLD;
9950}
9951
9952/* Attempt to generate a casesi instruction. Returns 1 if successful,
9953 0 otherwise (i.e. if there is no casesi instruction). */
9954int
502b8322
AJ
9955try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9956 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9957{
9958 enum machine_mode index_mode = SImode;
9959 int index_bits = GET_MODE_BITSIZE (index_mode);
9960 rtx op1, op2, index;
9961 enum machine_mode op_mode;
9962
9963 if (! HAVE_casesi)
9964 return 0;
9965
9966 /* Convert the index to SImode. */
9967 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9968 {
9969 enum machine_mode omode = TYPE_MODE (index_type);
9970 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9971
9972 /* We must handle the endpoints in the original mode. */
9973 index_expr = build (MINUS_EXPR, index_type,
9974 index_expr, minval);
9975 minval = integer_zero_node;
9976 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9977 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9978 omode, 1, default_label);
ad82abb8
ZW
9979 /* Now we can safely truncate. */
9980 index = convert_to_mode (index_mode, index, 0);
9981 }
9982 else
9983 {
9984 if (TYPE_MODE (index_type) != index_mode)
9985 {
ae2bcd98 9986 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 9987 (index_bits, 0), index_expr);
ad82abb8
ZW
9988 index_type = TREE_TYPE (index_expr);
9989 }
9990
9991 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9992 }
9993 emit_queue ();
9994 index = protect_from_queue (index, 0);
9995 do_pending_stack_adjust ();
9996
9997 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9998 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9999 (index, op_mode))
10000 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10001
ad82abb8
ZW
10002 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10003
10004 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10005 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 10006 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
10007 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10008 (op1, op_mode))
10009 op1 = copy_to_mode_reg (op_mode, op1);
10010
10011 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10012
10013 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10014 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 10015 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
10016 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10017 (op2, op_mode))
10018 op2 = copy_to_mode_reg (op_mode, op2);
10019
10020 emit_jump_insn (gen_casesi (index, op1, op2,
10021 table_label, default_label));
10022 return 1;
10023}
10024
10025/* Attempt to generate a tablejump instruction; same concept. */
10026#ifndef HAVE_tablejump
10027#define HAVE_tablejump 0
10028#define gen_tablejump(x, y) (0)
10029#endif
10030
10031/* Subroutine of the next function.
10032
10033 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10034 in the table already subtracted.
10035 MODE is its expected mode (needed if INDEX is constant).
10036 RANGE is the length of the jump table.
10037 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10038
b93a436e
JL
10039 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10040 index value is out of range. */
0f41302f 10041
ad82abb8 10042static void
502b8322
AJ
10043do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10044 rtx default_label)
ca695ac9 10045{
b3694847 10046 rtx temp, vector;
88d3b7f0 10047
74f6d071
JH
10048 if (INTVAL (range) > cfun->max_jumptable_ents)
10049 cfun->max_jumptable_ents = INTVAL (range);
1877be45 10050
b93a436e
JL
10051 /* Do an unsigned comparison (in the proper mode) between the index
10052 expression and the value which represents the length of the range.
10053 Since we just finished subtracting the lower bound of the range
10054 from the index expression, this comparison allows us to simultaneously
10055 check that the original index expression value is both greater than
10056 or equal to the minimum value of the range and less than or equal to
10057 the maximum value of the range. */
709f5be1 10058
c5d5d461 10059 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 10060 default_label);
bbf6f052 10061
b93a436e
JL
10062 /* If index is in range, it must fit in Pmode.
10063 Convert to Pmode so we can index with it. */
10064 if (mode != Pmode)
10065 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10066
ba228239 10067 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
10068 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10069 and break_out_memory_refs will go to work on it and mess it up. */
10070#ifdef PIC_CASE_VECTOR_ADDRESS
10071 if (flag_pic && GET_CODE (index) != REG)
10072 index = copy_to_mode_reg (Pmode, index);
10073#endif
ca695ac9 10074
b93a436e
JL
10075 /* If flag_force_addr were to affect this address
10076 it could interfere with the tricky assumptions made
10077 about addresses that contain label-refs,
10078 which may be valid only very near the tablejump itself. */
10079 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10080 GET_MODE_SIZE, because this indicates how large insns are. The other
10081 uses should all be Pmode, because they are addresses. This code
10082 could fail if addresses and insns are not the same size. */
10083 index = gen_rtx_PLUS (Pmode,
10084 gen_rtx_MULT (Pmode, index,
10085 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10086 gen_rtx_LABEL_REF (Pmode, table_label));
10087#ifdef PIC_CASE_VECTOR_ADDRESS
10088 if (flag_pic)
10089 index = PIC_CASE_VECTOR_ADDRESS (index);
10090 else
bbf6f052 10091#endif
b93a436e
JL
10092 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10093 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10094 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10095 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 10096 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
10097 convert_move (temp, vector, 0);
10098
10099 emit_jump_insn (gen_tablejump (temp, table_label));
10100
10101 /* If we are generating PIC code or if the table is PC-relative, the
10102 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10103 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10104 emit_barrier ();
bbf6f052 10105}
b93a436e 10106
ad82abb8 10107int
502b8322
AJ
10108try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10109 rtx table_label, rtx default_label)
ad82abb8
ZW
10110{
10111 rtx index;
10112
10113 if (! HAVE_tablejump)
10114 return 0;
10115
10116 index_expr = fold (build (MINUS_EXPR, index_type,
10117 convert (index_type, index_expr),
10118 convert (index_type, minval)));
10119 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10120 emit_queue ();
10121 index = protect_from_queue (index, 0);
10122 do_pending_stack_adjust ();
10123
10124 do_tablejump (index, TYPE_MODE (index_type),
10125 convert_modes (TYPE_MODE (index_type),
10126 TYPE_MODE (TREE_TYPE (range)),
10127 expand_expr (range, NULL_RTX,
10128 VOIDmode, 0),
8df83eae 10129 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
10130 table_label, default_label);
10131 return 1;
10132}
e2500fed 10133
cb2a532e
AH
10134/* Nonzero if the mode is a valid vector mode for this architecture.
10135 This returns nonzero even if there is no hardware support for the
10136 vector mode, but we can emulate with narrower modes. */
10137
10138int
502b8322 10139vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
10140{
10141 enum mode_class class = GET_MODE_CLASS (mode);
10142 enum machine_mode innermode;
10143
10144 /* Doh! What's going on? */
10145 if (class != MODE_VECTOR_INT
10146 && class != MODE_VECTOR_FLOAT)
10147 return 0;
10148
10149 /* Hardware support. Woo hoo! */
10150 if (VECTOR_MODE_SUPPORTED_P (mode))
10151 return 1;
10152
10153 innermode = GET_MODE_INNER (mode);
10154
10155 /* We should probably return 1 if requesting V4DI and we have no DI,
10156 but we have V2DI, but this is probably very unlikely. */
10157
10158 /* If we have support for the inner mode, we can safely emulate it.
10159 We may not have V2DI, but me can emulate with a pair of DIs. */
10160 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10161}
10162
d744e06e
AH
10163/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10164static rtx
502b8322 10165const_vector_from_tree (tree exp)
d744e06e
AH
10166{
10167 rtvec v;
10168 int units, i;
10169 tree link, elt;
10170 enum machine_mode inner, mode;
10171
10172 mode = TYPE_MODE (TREE_TYPE (exp));
10173
6de9cd9a 10174 if (initializer_zerop (exp))
d744e06e
AH
10175 return CONST0_RTX (mode);
10176
10177 units = GET_MODE_NUNITS (mode);
10178 inner = GET_MODE_INNER (mode);
10179
10180 v = rtvec_alloc (units);
10181
10182 link = TREE_VECTOR_CST_ELTS (exp);
10183 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10184 {
10185 elt = TREE_VALUE (link);
10186
10187 if (TREE_CODE (elt) == REAL_CST)
10188 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10189 inner);
10190 else
10191 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10192 TREE_INT_CST_HIGH (elt),
10193 inner);
10194 }
10195
5f6c070d
AH
10196 /* Initialize remaining elements to 0. */
10197 for (; i < units; ++i)
10198 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10199
d744e06e
AH
10200 return gen_rtx_raw_CONST_VECTOR (mode, v);
10201}
10202
e2500fed 10203#include "gt-expr.h"
This page took 4.180718 seconds and 5 git commands to generate.