]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
cppcharset.c: Use the correct return type for the fallback iconv macro.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8e37cba8 3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
bbf6f052 50
bbf6f052 51/* Decide whether a function's arguments should be processed
bbc8a071
RK
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
bbf6f052 56
bbf6f052 57#ifdef PUSH_ROUNDING
bbc8a071 58
2da4124d 59#ifndef PUSH_ARGS_REVERSED
3319a347 60#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 61#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 62#endif
2da4124d 63#endif
bbc8a071 64
bbf6f052
RK
65#endif
66
67#ifndef STACK_PUSH_CODE
68#ifdef STACK_GROWS_DOWNWARD
69#define STACK_PUSH_CODE PRE_DEC
70#else
71#define STACK_PUSH_CODE PRE_INC
72#endif
73#endif
74
18543a22
ILT
75/* Assume that case vectors are not pc-relative. */
76#ifndef CASE_VECTOR_PC_RELATIVE
77#define CASE_VECTOR_PC_RELATIVE 0
78#endif
79
4ca79136
RH
80/* Convert defined/undefined to boolean. */
81#ifdef TARGET_MEM_FUNCTIONS
82#undef TARGET_MEM_FUNCTIONS
83#define TARGET_MEM_FUNCTIONS 1
84#else
85#define TARGET_MEM_FUNCTIONS 0
86#endif
87
88
bbf6f052
RK
89/* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95int cse_not_expected;
96
14a774a9 97/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
1cff8964 98tree placeholder_list = 0;
14a774a9 99
4969d05d
RK
100/* This structure is used by move_by_pieces to describe the move to
101 be performed. */
4969d05d
RK
102struct move_by_pieces
103{
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
3bdf5ad1
RK
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
4969d05d
RK
114 int reverse;
115};
116
57814e5e 117/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
118 be performed. */
119
57814e5e 120struct store_by_pieces
9de08200
RK
121{
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
3bdf5ad1
RK
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
502b8322 128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 129 void *constfundata;
9de08200
RK
130 int reverse;
131};
132
502b8322
AJ
133static rtx enqueue_insn (rtx, rtx);
134static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138static bool block_move_libcall_safe_for_call_parm (void);
139static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141static tree emit_block_move_libcall_fn (int);
142static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149static rtx clear_storage_via_libcall (rtx, rtx);
150static tree clear_storage_libcall_fn (int);
151static rtx compress_float_constant (rtx, rtx);
152static rtx get_subtarget (rtx);
153static int is_zeros_p (tree);
154static int mostly_zeros_p (tree);
155static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161static rtx var_rtx (tree);
162
163static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165
166static int is_aligning_offset (tree, tree);
167static rtx expand_increment (tree, int, int);
168static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 169#ifdef PUSH_ROUNDING
502b8322 170static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 171#endif
502b8322
AJ
172static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
173static rtx const_vector_from_tree (tree);
bbf6f052 174
4fa52007
RK
175/* Record for each mode whether we can move a register directly to or
176 from an object of that mode in memory. If we can't, we won't try
177 to use that mode directly when accessing a field of that mode. */
178
179static char direct_load[NUM_MACHINE_MODES];
180static char direct_store[NUM_MACHINE_MODES];
181
51286de6
RH
182/* Record for each mode whether we can float-extend from memory. */
183
184static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
185
7e24ffc9
HPN
186/* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
188
189#ifndef MOVE_RATIO
266007a7 190#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
191#define MOVE_RATIO 2
192#else
3a94c984 193/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 194#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
195#endif
196#endif
e87b4f3f 197
fbe1758d 198/* This macro is used to determine whether move_by_pieces should be called
3a94c984 199 to perform a structure copy. */
fbe1758d 200#ifndef MOVE_BY_PIECES_P
19caa751 201#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
203#endif
204
78762e3b
RS
205/* If a clear memory operation would take CLEAR_RATIO or more simple
206 move-instruction sequences, we will do a clrstr or libcall instead. */
207
208#ifndef CLEAR_RATIO
209#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
210#define CLEAR_RATIO 2
211#else
212/* If we are optimizing for space, cut down the default clear ratio. */
213#define CLEAR_RATIO (optimize_size ? 3 : 15)
214#endif
215#endif
216
217/* This macro is used to determine whether clear_by_pieces should be
218 called to clear storage. */
219#ifndef CLEAR_BY_PIECES_P
220#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
221 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
222#endif
223
4977bab6
ZW
224/* This macro is used to determine whether store_by_pieces should be
225 called to "memset" storage with byte values other than zero, or
226 to "memcpy" storage when the source is a constant string. */
227#ifndef STORE_BY_PIECES_P
228#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
229#endif
230
266007a7 231/* This array records the insn_code of insns to perform block moves. */
e6677db3 232enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 233
9de08200
RK
234/* This array records the insn_code of insns to perform block clears. */
235enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236
cc2902df 237/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
238
239#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 240#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 241#endif
bbf6f052 242\f
4fa52007 243/* This is run once per compilation to set up which modes can be used
266007a7 244 directly in memory and to initialize the block move optab. */
4fa52007
RK
245
246void
502b8322 247init_expr_once (void)
4fa52007
RK
248{
249 rtx insn, pat;
250 enum machine_mode mode;
cff48d8f 251 int num_clobbers;
9ec36da5 252 rtx mem, mem1;
bf1660a6 253 rtx reg;
9ec36da5 254
e2549997
RS
255 /* Try indexing by frame ptr and try by stack ptr.
256 It is known that on the Convex the stack ptr isn't a valid index.
257 With luck, one or the other is valid on any machine. */
9ec36da5
JL
258 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
259 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 260
bf1660a6
JL
261 /* A scratch register we can modify in-place below to avoid
262 useless RTL allocations. */
263 reg = gen_rtx_REG (VOIDmode, -1);
264
1f8c3c5b
RH
265 insn = rtx_alloc (INSN);
266 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
267 PATTERN (insn) = pat;
4fa52007
RK
268
269 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
270 mode = (enum machine_mode) ((int) mode + 1))
271 {
272 int regno;
4fa52007
RK
273
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
e2549997 276 PUT_MODE (mem1, mode);
bf1660a6 277 PUT_MODE (reg, mode);
4fa52007 278
e6fe56a4
RK
279 /* See if there is some register that can be used in this mode and
280 directly loaded or stored from memory. */
281
7308a047
RS
282 if (mode != VOIDmode && mode != BLKmode)
283 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
284 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285 regno++)
286 {
287 if (! HARD_REGNO_MODE_OK (regno, mode))
288 continue;
e6fe56a4 289
bf1660a6 290 REGNO (reg) = regno;
e6fe56a4 291
7308a047
RS
292 SET_SRC (pat) = mem;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
e6fe56a4 296
e2549997
RS
297 SET_SRC (pat) = mem1;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
301
7308a047
RS
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
e2549997
RS
306
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem1;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
7308a047 311 }
4fa52007
RK
312 }
313
51286de6
RH
314 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
315
316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
317 mode = GET_MODE_WIDER_MODE (mode))
318 {
319 enum machine_mode srcmode;
320 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 321 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
322 {
323 enum insn_code ic;
324
325 ic = can_extend_p (mode, srcmode, 0);
326 if (ic == CODE_FOR_nothing)
327 continue;
328
329 PUT_MODE (mem, srcmode);
0fb7aeda 330
51286de6
RH
331 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
332 float_extend_from_mem[mode][srcmode] = true;
333 }
334 }
4fa52007 335}
cff48d8f 336
bbf6f052
RK
337/* This is run at the start of compiling a function. */
338
339void
502b8322 340init_expr (void)
bbf6f052 341{
e2500fed 342 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
bbf6f052 343
49ad7cfa 344 pending_chain = 0;
bbf6f052 345 pending_stack_adjust = 0;
1503a7ec 346 stack_pointer_delta = 0;
bbf6f052 347 inhibit_defer_pop = 0;
bbf6f052 348 saveregs_value = 0;
0006469d 349 apply_args_value = 0;
e87b4f3f 350 forced_labels = 0;
bbf6f052
RK
351}
352
49ad7cfa 353/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 354
bbf6f052 355void
502b8322 356finish_expr_for_function (void)
bbf6f052 357{
49ad7cfa
BS
358 if (pending_chain)
359 abort ();
bbf6f052
RK
360}
361\f
362/* Manage the queue of increment instructions to be output
363 for POSTINCREMENT_EXPR expressions, etc. */
364
bbf6f052
RK
365/* Queue up to increment (or change) VAR later. BODY says how:
366 BODY should be the same thing you would pass to emit_insn
367 to increment right away. It will go to emit_insn later on.
368
369 The value is a QUEUED expression to be used in place of VAR
370 where you want to guarantee the pre-incrementation value of VAR. */
371
372static rtx
502b8322 373enqueue_insn (rtx var, rtx body)
bbf6f052 374{
c5c76735
JL
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
bbf6f052
RK
377 return pending_chain;
378}
379
380/* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
386
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
390
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
394
395rtx
502b8322 396protect_from_queue (rtx x, int modify)
bbf6f052 397{
b3694847 398 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
399
400#if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
403 return x;
404#endif
405
406 if (code != QUEUED)
407 {
e9baa644
RK
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
412 shared. */
bbf6f052
RK
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
415 {
f1ec5147
RK
416 rtx y = XEXP (x, 0);
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 418
bbf6f052
RK
419 if (QUEUED_INSN (y))
420 {
f1ec5147
RK
421 rtx temp = gen_reg_rtx (GET_MODE (x));
422
e9baa644 423 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
424 QUEUED_INSN (y));
425 return temp;
426 }
f1ec5147 427
73b7f58c
BS
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
f1ec5147 430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 431 }
f1ec5147 432
bbf6f052
RK
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
435 if (code == MEM)
3f15938e
RS
436 {
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
439 {
440 x = copy_rtx (x);
441 XEXP (x, 0) = tem;
442 }
443 }
bbf6f052
RK
444 else if (code == PLUS || code == MULT)
445 {
3f15938e
RS
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
449 {
450 x = copy_rtx (x);
451 XEXP (x, 0) = new0;
452 XEXP (x, 1) = new1;
453 }
bbf6f052
RK
454 }
455 return x;
456 }
73b7f58c
BS
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
459 emit_queue. */
bbf6f052 460 if (QUEUED_INSN (x) == 0)
73b7f58c 461 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
462 /* If the increment has happened and a pre-increment copy exists,
463 use that copy. */
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
470 QUEUED_INSN (x));
471 return QUEUED_COPY (x);
472}
473
474/* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
478
1f06ee8d 479int
502b8322 480queued_subexp_p (rtx x)
bbf6f052 481{
b3694847 482 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
483 switch (code)
484 {
485 case QUEUED:
486 return 1;
487 case MEM:
488 return queued_subexp_p (XEXP (x, 0));
489 case MULT:
490 case PLUS:
491 case MINUS:
e9a25f70
JL
492 return (queued_subexp_p (XEXP (x, 0))
493 || queued_subexp_p (XEXP (x, 1)));
494 default:
495 return 0;
bbf6f052 496 }
bbf6f052
RK
497}
498
499/* Perform all the pending incrementations. */
500
501void
502b8322 502emit_queue (void)
bbf6f052 503{
b3694847 504 rtx p;
381127e8 505 while ((p = pending_chain))
bbf6f052 506 {
41b083c4
R
507 rtx body = QUEUED_BODY (p);
508
2f937369
DM
509 switch (GET_CODE (body))
510 {
511 case INSN:
512 case JUMP_INSN:
513 case CALL_INSN:
514 case CODE_LABEL:
515 case BARRIER:
516 case NOTE:
517 QUEUED_INSN (p) = body;
518 emit_insn (body);
519 break;
520
521#ifdef ENABLE_CHECKING
522 case SEQUENCE:
523 abort ();
524 break;
525#endif
526
527 default:
528 QUEUED_INSN (p) = emit_insn (body);
529 break;
41b083c4 530 }
2f937369 531
bbf6f052
RK
532 pending_chain = QUEUED_NEXT (p);
533 }
534}
bbf6f052
RK
535\f
536/* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
540
541void
502b8322 542convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
543{
544 enum machine_mode to_mode = GET_MODE (to);
545 enum machine_mode from_mode = GET_MODE (from);
546 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
547 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
548 enum insn_code code;
549 rtx libcall;
550
551 /* rtx code for making an equivalent value. */
37d0b254
SE
552 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
553 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
554
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
557
558 if (to_real != from_real)
559 abort ();
560
1499e0a8
RK
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
563 TO here. */
564
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
570
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
572 abort ();
573
bbf6f052
RK
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
576 {
577 emit_move_insn (to, from);
578 return;
579 }
580
0b4565c9
BS
581 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
582 {
583 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
584 abort ();
3a94c984 585
0b4565c9 586 if (VECTOR_MODE_P (to_mode))
bafe341a 587 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 588 else
bafe341a 589 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
590
591 emit_move_insn (to, from);
592 return;
593 }
594
06765df1
R
595 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
596 {
597 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
598 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
599 return;
600 }
601
0b4565c9
BS
602 if (to_real != from_real)
603 abort ();
604
bbf6f052
RK
605 if (to_real)
606 {
642dfa8b 607 rtx value, insns;
81d79e2c 608
2b01c326 609 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 610 {
2b01c326
RK
611 /* Try converting directly if the insn is supported. */
612 if ((code = can_extend_p (to_mode, from_mode, 0))
613 != CODE_FOR_nothing)
614 {
615 emit_unop_insn (code, to, from, UNKNOWN);
616 return;
617 }
bbf6f052 618 }
3a94c984 619
b424402e
RS
620#ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
622 {
623 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
624 return;
625 }
626#endif
704af6a1
JL
627#ifdef HAVE_trunctqfqf2
628 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
b424402e
RS
634#ifdef HAVE_truncsfqf2
635 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641#ifdef HAVE_truncdfqf2
642 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
648#ifdef HAVE_truncxfqf2
649 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 {
651 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
652 return;
653 }
654#endif
655#ifdef HAVE_trunctfqf2
656 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 {
658 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
03747aa3
RK
662
663#ifdef HAVE_trunctqfhf2
664 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 {
666 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
667 return;
668 }
669#endif
b424402e
RS
670#ifdef HAVE_truncsfhf2
671 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
677#ifdef HAVE_truncdfhf2
678 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
684#ifdef HAVE_truncxfhf2
685 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
691#ifdef HAVE_trunctfhf2
692 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 {
694 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
2b01c326
RK
698
699#ifdef HAVE_truncsftqf2
700 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
703 return;
704 }
705#endif
706#ifdef HAVE_truncdftqf2
707 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
710 return;
711 }
712#endif
713#ifdef HAVE_truncxftqf2
714 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
717 return;
718 }
719#endif
720#ifdef HAVE_trunctftqf2
721 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 {
723 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
724 return;
725 }
726#endif
727
bbf6f052
RK
728#ifdef HAVE_truncdfsf2
729 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 {
731 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
732 return;
733 }
734#endif
b092b471
JW
735#ifdef HAVE_truncxfsf2
736 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
bbf6f052
RK
742#ifdef HAVE_trunctfsf2
743 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
b092b471
JW
749#ifdef HAVE_truncxfdf2
750 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 {
752 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
753 return;
754 }
755#endif
bbf6f052
RK
756#ifdef HAVE_trunctfdf2
757 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
760 return;
761 }
762#endif
763
b092b471
JW
764 libcall = (rtx) 0;
765 switch (from_mode)
766 {
767 case SFmode:
768 switch (to_mode)
769 {
770 case DFmode:
771 libcall = extendsfdf2_libfunc;
772 break;
773
774 case XFmode:
775 libcall = extendsfxf2_libfunc;
776 break;
777
778 case TFmode:
779 libcall = extendsftf2_libfunc;
780 break;
3a94c984 781
e9a25f70
JL
782 default:
783 break;
b092b471
JW
784 }
785 break;
786
787 case DFmode:
788 switch (to_mode)
789 {
790 case SFmode:
791 libcall = truncdfsf2_libfunc;
792 break;
793
794 case XFmode:
795 libcall = extenddfxf2_libfunc;
796 break;
797
798 case TFmode:
799 libcall = extenddftf2_libfunc;
800 break;
3a94c984 801
e9a25f70
JL
802 default:
803 break;
b092b471
JW
804 }
805 break;
806
807 case XFmode:
808 switch (to_mode)
809 {
810 case SFmode:
811 libcall = truncxfsf2_libfunc;
812 break;
813
814 case DFmode:
815 libcall = truncxfdf2_libfunc;
816 break;
3a94c984 817
e9a25f70
JL
818 default:
819 break;
b092b471
JW
820 }
821 break;
822
823 case TFmode:
824 switch (to_mode)
825 {
826 case SFmode:
827 libcall = trunctfsf2_libfunc;
828 break;
829
830 case DFmode:
831 libcall = trunctfdf2_libfunc;
832 break;
3a94c984 833
e9a25f70
JL
834 default:
835 break;
b092b471
JW
836 }
837 break;
3a94c984 838
e9a25f70
JL
839 default:
840 break;
b092b471
JW
841 }
842
843 if (libcall == (rtx) 0)
844 /* This conversion is not implemented yet. */
bbf6f052
RK
845 abort ();
846
642dfa8b 847 start_sequence ();
ebb1b59a 848 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 849 1, from, from_mode);
642dfa8b
BS
850 insns = get_insns ();
851 end_sequence ();
852 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
853 from));
bbf6f052
RK
854 return;
855 }
856
857 /* Now both modes are integers. */
858
859 /* Handle expanding beyond a word. */
860 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
861 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
862 {
863 rtx insns;
864 rtx lowpart;
865 rtx fill_value;
866 rtx lowfrom;
867 int i;
868 enum machine_mode lowpart_mode;
869 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
870
871 /* Try converting directly if the insn is supported. */
872 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
873 != CODE_FOR_nothing)
874 {
cd1b4b44
RK
875 /* If FROM is a SUBREG, put it into a register. Do this
876 so that we always generate the same set of insns for
877 better cse'ing; if an intermediate assignment occurred,
878 we won't be doing the operation directly on the SUBREG. */
879 if (optimize > 0 && GET_CODE (from) == SUBREG)
880 from = force_reg (from_mode, from);
bbf6f052
RK
881 emit_unop_insn (code, to, from, equiv_code);
882 return;
883 }
884 /* Next, try converting via full word. */
885 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
886 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
887 != CODE_FOR_nothing))
888 {
a81fee56 889 if (GET_CODE (to) == REG)
38a448ca 890 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
891 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
892 emit_unop_insn (code, to,
893 gen_lowpart (word_mode, to), equiv_code);
894 return;
895 }
896
897 /* No special multiword conversion insn; do it by hand. */
898 start_sequence ();
899
5c5033c3
RK
900 /* Since we will turn this into a no conflict block, we must ensure
901 that the source does not overlap the target. */
902
903 if (reg_overlap_mentioned_p (to, from))
904 from = force_reg (from_mode, from);
905
bbf6f052
RK
906 /* Get a copy of FROM widened to a word, if necessary. */
907 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
908 lowpart_mode = word_mode;
909 else
910 lowpart_mode = from_mode;
911
912 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
913
914 lowpart = gen_lowpart (lowpart_mode, to);
915 emit_move_insn (lowpart, lowfrom);
916
917 /* Compute the value to put in each remaining word. */
918 if (unsignedp)
919 fill_value = const0_rtx;
920 else
921 {
922#ifdef HAVE_slt
923 if (HAVE_slt
a995e389 924 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
925 && STORE_FLAG_VALUE == -1)
926 {
906c4e36 927 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 928 lowpart_mode, 0);
bbf6f052
RK
929 fill_value = gen_reg_rtx (word_mode);
930 emit_insn (gen_slt (fill_value));
931 }
932 else
933#endif
934 {
935 fill_value
936 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
937 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 938 NULL_RTX, 0);
bbf6f052
RK
939 fill_value = convert_to_mode (word_mode, fill_value, 1);
940 }
941 }
942
943 /* Fill the remaining words. */
944 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
945 {
946 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
947 rtx subword = operand_subword (to, index, 1, to_mode);
948
949 if (subword == 0)
950 abort ();
951
952 if (fill_value != subword)
953 emit_move_insn (subword, fill_value);
954 }
955
956 insns = get_insns ();
957 end_sequence ();
958
906c4e36 959 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 960 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
961 return;
962 }
963
d3c64ee3
RS
964 /* Truncating multi-word to a word or less. */
965 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
966 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 967 {
431a6eca
JW
968 if (!((GET_CODE (from) == MEM
969 && ! MEM_VOLATILE_P (from)
970 && direct_load[(int) to_mode]
971 && ! mode_dependent_address_p (XEXP (from, 0)))
972 || GET_CODE (from) == REG
973 || GET_CODE (from) == SUBREG))
974 from = force_reg (from_mode, from);
bbf6f052
RK
975 convert_move (to, gen_lowpart (word_mode, from), 0);
976 return;
977 }
978
3a94c984 979 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
980 if (to_mode == PQImode)
981 {
982 if (from_mode != QImode)
983 from = convert_to_mode (QImode, from, unsignedp);
984
985#ifdef HAVE_truncqipqi2
986 if (HAVE_truncqipqi2)
987 {
988 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
989 return;
990 }
991#endif /* HAVE_truncqipqi2 */
992 abort ();
993 }
994
995 if (from_mode == PQImode)
996 {
997 if (to_mode != QImode)
998 {
999 from = convert_to_mode (QImode, from, unsignedp);
1000 from_mode = QImode;
1001 }
1002 else
1003 {
1004#ifdef HAVE_extendpqiqi2
1005 if (HAVE_extendpqiqi2)
1006 {
1007 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1008 return;
1009 }
1010#endif /* HAVE_extendpqiqi2 */
1011 abort ();
1012 }
1013 }
1014
bbf6f052
RK
1015 if (to_mode == PSImode)
1016 {
1017 if (from_mode != SImode)
1018 from = convert_to_mode (SImode, from, unsignedp);
1019
1f584163
DE
1020#ifdef HAVE_truncsipsi2
1021 if (HAVE_truncsipsi2)
bbf6f052 1022 {
1f584163 1023 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1024 return;
1025 }
1f584163 1026#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1027 abort ();
1028 }
1029
1030 if (from_mode == PSImode)
1031 {
1032 if (to_mode != SImode)
1033 {
1034 from = convert_to_mode (SImode, from, unsignedp);
1035 from_mode = SImode;
1036 }
1037 else
1038 {
1f584163 1039#ifdef HAVE_extendpsisi2
43d75418 1040 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 1041 {
1f584163 1042 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1043 return;
1044 }
1f584163 1045#endif /* HAVE_extendpsisi2 */
43d75418
R
1046#ifdef HAVE_zero_extendpsisi2
1047 if (unsignedp && HAVE_zero_extendpsisi2)
1048 {
1049 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1050 return;
1051 }
1052#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1053 abort ();
1054 }
1055 }
1056
0407367d
RK
1057 if (to_mode == PDImode)
1058 {
1059 if (from_mode != DImode)
1060 from = convert_to_mode (DImode, from, unsignedp);
1061
1062#ifdef HAVE_truncdipdi2
1063 if (HAVE_truncdipdi2)
1064 {
1065 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1066 return;
1067 }
1068#endif /* HAVE_truncdipdi2 */
1069 abort ();
1070 }
1071
1072 if (from_mode == PDImode)
1073 {
1074 if (to_mode != DImode)
1075 {
1076 from = convert_to_mode (DImode, from, unsignedp);
1077 from_mode = DImode;
1078 }
1079 else
1080 {
1081#ifdef HAVE_extendpdidi2
1082 if (HAVE_extendpdidi2)
1083 {
1084 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1085 return;
1086 }
1087#endif /* HAVE_extendpdidi2 */
1088 abort ();
1089 }
1090 }
1091
bbf6f052
RK
1092 /* Now follow all the conversions between integers
1093 no more than a word long. */
1094
1095 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1096 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1098 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1099 {
d3c64ee3
RS
1100 if (!((GET_CODE (from) == MEM
1101 && ! MEM_VOLATILE_P (from)
1102 && direct_load[(int) to_mode]
1103 && ! mode_dependent_address_p (XEXP (from, 0)))
1104 || GET_CODE (from) == REG
1105 || GET_CODE (from) == SUBREG))
1106 from = force_reg (from_mode, from);
34aa3599
RK
1107 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1108 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1109 from = copy_to_reg (from);
bbf6f052
RK
1110 emit_move_insn (to, gen_lowpart (to_mode, from));
1111 return;
1112 }
1113
d3c64ee3 1114 /* Handle extension. */
bbf6f052
RK
1115 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1116 {
1117 /* Convert directly if that works. */
1118 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1119 != CODE_FOR_nothing)
1120 {
9413de45
RK
1121 if (flag_force_mem)
1122 from = force_not_mem (from);
1123
bbf6f052
RK
1124 emit_unop_insn (code, to, from, equiv_code);
1125 return;
1126 }
1127 else
1128 {
1129 enum machine_mode intermediate;
2b28d92e
NC
1130 rtx tmp;
1131 tree shift_amount;
bbf6f052
RK
1132
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1139 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1140 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1141 && (can_extend_p (intermediate, from_mode, unsignedp)
1142 != CODE_FOR_nothing))
1143 {
1144 convert_move (to, convert_to_mode (intermediate, from,
1145 unsignedp), unsignedp);
1146 return;
1147 }
1148
2b28d92e 1149 /* No suitable intermediate mode.
3a94c984 1150 Generate what we need with shifts. */
2b28d92e
NC
1151 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1152 - GET_MODE_BITSIZE (from_mode), 0);
1153 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1154 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1155 to, unsignedp);
3a94c984 1156 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1157 to, unsignedp);
1158 if (tmp != to)
1159 emit_move_insn (to, tmp);
1160 return;
bbf6f052
RK
1161 }
1162 }
1163
3a94c984 1164 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1165
1166 if (from_mode == DImode && to_mode == SImode)
1167 {
1168#ifdef HAVE_truncdisi2
1169 if (HAVE_truncdisi2)
1170 {
1171 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1172 return;
1173 }
1174#endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == DImode && to_mode == HImode)
1180 {
1181#ifdef HAVE_truncdihi2
1182 if (HAVE_truncdihi2)
1183 {
1184 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1185 return;
1186 }
1187#endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == DImode && to_mode == QImode)
1193 {
1194#ifdef HAVE_truncdiqi2
1195 if (HAVE_truncdiqi2)
1196 {
1197 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1198 return;
1199 }
1200#endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == SImode && to_mode == HImode)
1206 {
1207#ifdef HAVE_truncsihi2
1208 if (HAVE_truncsihi2)
1209 {
1210 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1211 return;
1212 }
1213#endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 if (from_mode == SImode && to_mode == QImode)
1219 {
1220#ifdef HAVE_truncsiqi2
1221 if (HAVE_truncsiqi2)
1222 {
1223 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1224 return;
1225 }
1226#endif
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 return;
1229 }
1230
1231 if (from_mode == HImode && to_mode == QImode)
1232 {
1233#ifdef HAVE_trunchiqi2
1234 if (HAVE_trunchiqi2)
1235 {
1236 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1237 return;
1238 }
1239#endif
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 return;
1242 }
1243
b9bcad65
RK
1244 if (from_mode == TImode && to_mode == DImode)
1245 {
1246#ifdef HAVE_trunctidi2
1247 if (HAVE_trunctidi2)
1248 {
1249 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1250 return;
1251 }
1252#endif
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 return;
1255 }
1256
1257 if (from_mode == TImode && to_mode == SImode)
1258 {
1259#ifdef HAVE_trunctisi2
1260 if (HAVE_trunctisi2)
1261 {
1262 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1263 return;
1264 }
1265#endif
1266 convert_move (to, force_reg (from_mode, from), unsignedp);
1267 return;
1268 }
1269
1270 if (from_mode == TImode && to_mode == HImode)
1271 {
1272#ifdef HAVE_trunctihi2
1273 if (HAVE_trunctihi2)
1274 {
1275 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1276 return;
1277 }
1278#endif
1279 convert_move (to, force_reg (from_mode, from), unsignedp);
1280 return;
1281 }
1282
1283 if (from_mode == TImode && to_mode == QImode)
1284 {
1285#ifdef HAVE_trunctiqi2
1286 if (HAVE_trunctiqi2)
1287 {
1288 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1289 return;
1290 }
1291#endif
1292 convert_move (to, force_reg (from_mode, from), unsignedp);
1293 return;
1294 }
1295
bbf6f052
RK
1296 /* Handle truncation of volatile memrefs, and so on;
1297 the things that couldn't be truncated directly,
1298 and for which there was no special instruction. */
1299 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1300 {
1301 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1302 emit_move_insn (to, temp);
1303 return;
1304 }
1305
1306 /* Mode combination is not recognized. */
1307 abort ();
1308}
1309
1310/* Return an rtx for a value that would result
1311 from converting X to mode MODE.
1312 Both X and MODE may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1314 This can be done by referring to a part of X in place
5d901c31
RS
1315 or by copying to a new temporary with conversion.
1316
1317 This function *must not* call protect_from_queue
1318 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1319
1320rtx
502b8322 1321convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
1322{
1323 return convert_modes (mode, VOIDmode, x, unsignedp);
1324}
1325
1326/* Return an rtx for a value that would result
1327 from converting X from mode OLDMODE to mode MODE.
1328 Both modes may be floating, or both integer.
1329 UNSIGNEDP is nonzero if X is an unsigned value.
1330
1331 This can be done by referring to a part of X in place
1332 or by copying to a new temporary with conversion.
1333
1334 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1335
1336 This function *must not* call protect_from_queue
1337 except when putting X into an insn (in which case convert_move does it). */
1338
1339rtx
502b8322 1340convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 1341{
b3694847 1342 rtx temp;
5ffe63ed 1343
1499e0a8
RK
1344 /* If FROM is a SUBREG that indicates that we have already done at least
1345 the required extension, strip it. */
1346
1347 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1348 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1349 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1350 x = gen_lowpart (mode, x);
bbf6f052 1351
64791b18
RK
1352 if (GET_MODE (x) != VOIDmode)
1353 oldmode = GET_MODE (x);
3a94c984 1354
5ffe63ed 1355 if (mode == oldmode)
bbf6f052
RK
1356 return x;
1357
1358 /* There is one case that we must handle specially: If we are converting
906c4e36 1359 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1360 we are to interpret the constant as unsigned, gen_lowpart will do
1361 the wrong if the constant appears negative. What we want to do is
1362 make the high-order word of the constant zero, not all ones. */
1363
1364 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1365 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1366 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1367 {
1368 HOST_WIDE_INT val = INTVAL (x);
1369
1370 if (oldmode != VOIDmode
1371 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1372 {
1373 int width = GET_MODE_BITSIZE (oldmode);
1374
1375 /* We need to zero extend VAL. */
1376 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1377 }
1378
1379 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1380 }
bbf6f052
RK
1381
1382 /* We can do this with a gen_lowpart if both desired and current modes
1383 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1384 non-volatile MEM. Except for the constant case where MODE is no
1385 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1386
ba2e110c
RK
1387 if ((GET_CODE (x) == CONST_INT
1388 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1389 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1390 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1391 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1392 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1393 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1394 && direct_load[(int) mode])
2bf29316 1395 || (GET_CODE (x) == REG
006c9f4a
SE
1396 && (! HARD_REGISTER_P (x)
1397 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1399 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1400 {
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1405 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1406 {
1407 HOST_WIDE_INT val = INTVAL (x);
1408 int width = GET_MODE_BITSIZE (oldmode);
1409
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1413 if (! unsignedp
1414 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1415 val |= (HOST_WIDE_INT) (-1) << width;
1416
2496c7bd 1417 return gen_int_mode (val, mode);
ba2e110c
RK
1418 }
1419
1420 return gen_lowpart (mode, x);
1421 }
bbf6f052
RK
1422
1423 temp = gen_reg_rtx (mode);
1424 convert_move (temp, x, unsignedp);
1425 return temp;
1426}
1427\f
fbe1758d 1428/* This macro is used to determine what the largest unit size that
3a94c984 1429 move_by_pieces can use is. */
fbe1758d
AM
1430
1431/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1432 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1433 number of bytes we can move with a single instruction. */
fbe1758d
AM
1434
1435#ifndef MOVE_MAX_PIECES
1436#define MOVE_MAX_PIECES MOVE_MAX
1437#endif
1438
cf5124f6
RS
1439/* STORE_MAX_PIECES is the number of bytes at a time that we can
1440 store efficiently. Due to internal GCC limitations, this is
1441 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1442 for an immediate constant. */
1443
1444#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1445
8fd3cf4e
JJ
1446/* Determine whether the LEN bytes can be moved by using several move
1447 instructions. Return nonzero if a call to move_by_pieces should
1448 succeed. */
1449
1450int
502b8322
AJ
1451can_move_by_pieces (unsigned HOST_WIDE_INT len,
1452 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1453{
1454 return MOVE_BY_PIECES_P (len, align);
1455}
1456
21d93687
RK
1457/* Generate several move instructions to copy LEN bytes from block FROM to
1458 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1459 and TO through protect_from_queue before calling.
566aa174 1460
21d93687
RK
1461 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1462 used to push FROM to the stack.
566aa174 1463
8fd3cf4e 1464 ALIGN is maximum stack alignment we can assume.
bbf6f052 1465
8fd3cf4e
JJ
1466 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1467 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1468 stpcpy. */
1469
1470rtx
502b8322
AJ
1471move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1472 unsigned int align, int endp)
bbf6f052
RK
1473{
1474 struct move_by_pieces data;
566aa174 1475 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
bbf6f052 1479
f26aca6d
DD
1480 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1481
bbf6f052 1482 data.offset = 0;
bbf6f052 1483 data.from_addr = from_addr;
566aa174
JH
1484 if (to)
1485 {
1486 to_addr = XEXP (to, 0);
1487 data.to = to;
1488 data.autinc_to
1489 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1490 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1491 data.reverse
1492 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1493 }
1494 else
1495 {
1496 to_addr = NULL_RTX;
1497 data.to = NULL_RTX;
1498 data.autinc_to = 1;
1499#ifdef STACK_GROWS_DOWNWARD
1500 data.reverse = 1;
1501#else
1502 data.reverse = 0;
1503#endif
1504 }
1505 data.to_addr = to_addr;
bbf6f052 1506 data.from = from;
bbf6f052
RK
1507 data.autinc_from
1508 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1509 || GET_CODE (from_addr) == POST_INC
1510 || GET_CODE (from_addr) == POST_DEC);
1511
1512 data.explicit_inc_from = 0;
1513 data.explicit_inc_to = 0;
bbf6f052
RK
1514 if (data.reverse) data.offset = len;
1515 data.len = len;
1516
1517 /* If copying requires more than two move insns,
1518 copy addresses to registers (to make displacements shorter)
1519 and use post-increment if available. */
1520 if (!(data.autinc_from && data.autinc_to)
1521 && move_by_pieces_ninsns (len, align) > 2)
1522 {
3a94c984 1523 /* Find the mode of the largest move... */
fbe1758d
AM
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1528
1529 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1530 {
1531 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = -1;
1534 }
fbe1758d 1535 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1536 {
1537 data.from_addr = copy_addr_to_reg (from_addr);
1538 data.autinc_from = 1;
1539 data.explicit_inc_from = 1;
1540 }
bbf6f052
RK
1541 if (!data.autinc_from && CONSTANT_P (from_addr))
1542 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1543 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1544 {
1545 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1546 data.autinc_to = 1;
1547 data.explicit_inc_to = -1;
1548 }
fbe1758d 1549 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1550 {
1551 data.to_addr = copy_addr_to_reg (to_addr);
1552 data.autinc_to = 1;
1553 data.explicit_inc_to = 1;
1554 }
bbf6f052
RK
1555 if (!data.autinc_to && CONSTANT_P (to_addr))
1556 data.to_addr = copy_addr_to_reg (to_addr);
1557 }
1558
e1565e65 1559 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1560 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1561 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1562
1563 /* First move what we can in the largest integer mode, then go to
1564 successively smaller modes. */
1565
1566 while (max_size > 1)
1567 {
e7c33f54
RK
1568 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1569 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1570 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1571 mode = tmode;
1572
1573 if (mode == VOIDmode)
1574 break;
1575
1576 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1577 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1578 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1579
1580 max_size = GET_MODE_SIZE (mode);
1581 }
1582
1583 /* The code above should have handled everything. */
2a8e278c 1584 if (data.len > 0)
bbf6f052 1585 abort ();
8fd3cf4e
JJ
1586
1587 if (endp)
1588 {
1589 rtx to1;
1590
1591 if (data.reverse)
1592 abort ();
1593 if (data.autinc_to)
1594 {
1595 if (endp == 2)
1596 {
1597 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1598 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1599 else
1600 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1601 -1));
1602 }
1603 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1604 data.offset);
1605 }
1606 else
1607 {
1608 if (endp == 2)
1609 --data.offset;
1610 to1 = adjust_address (data.to, QImode, data.offset);
1611 }
1612 return to1;
1613 }
1614 else
1615 return data.to;
bbf6f052
RK
1616}
1617
1618/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1619 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1620
3bdf5ad1 1621static unsigned HOST_WIDE_INT
502b8322 1622move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1623{
3bdf5ad1
RK
1624 unsigned HOST_WIDE_INT n_insns = 0;
1625 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1626
e1565e65 1627 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1628 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1629 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1630
1631 while (max_size > 1)
1632 {
1633 enum machine_mode mode = VOIDmode, tmode;
1634 enum insn_code icode;
1635
e7c33f54
RK
1636 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1637 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1638 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1639 mode = tmode;
1640
1641 if (mode == VOIDmode)
1642 break;
1643
1644 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1645 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1646 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1647
1648 max_size = GET_MODE_SIZE (mode);
1649 }
1650
13c6f0d5
NS
1651 if (l)
1652 abort ();
bbf6f052
RK
1653 return n_insns;
1654}
1655
1656/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1657 with move instructions for mode MODE. GENFUN is the gen_... function
1658 to make a move insn for that mode. DATA has all the other info. */
1659
1660static void
502b8322
AJ
1661move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1662 struct move_by_pieces *data)
bbf6f052 1663{
3bdf5ad1 1664 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1665 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1666
1667 while (data->len >= size)
1668 {
3bdf5ad1
RK
1669 if (data->reverse)
1670 data->offset -= size;
1671
566aa174 1672 if (data->to)
3bdf5ad1 1673 {
566aa174 1674 if (data->autinc_to)
630036c6
JJ
1675 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1676 data->offset);
566aa174 1677 else
f4ef873c 1678 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1679 }
3bdf5ad1
RK
1680
1681 if (data->autinc_from)
630036c6
JJ
1682 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1683 data->offset);
3bdf5ad1 1684 else
f4ef873c 1685 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1686
940da324 1687 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1688 emit_insn (gen_add2_insn (data->to_addr,
1689 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1690 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1691 emit_insn (gen_add2_insn (data->from_addr,
1692 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1693
566aa174
JH
1694 if (data->to)
1695 emit_insn ((*genfun) (to1, from1));
1696 else
21d93687
RK
1697 {
1698#ifdef PUSH_ROUNDING
1699 emit_single_push_insn (mode, from1, NULL);
1700#else
1701 abort ();
1702#endif
1703 }
3bdf5ad1 1704
940da324 1705 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1706 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1707 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1708 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1709
3bdf5ad1
RK
1710 if (! data->reverse)
1711 data->offset += size;
bbf6f052
RK
1712
1713 data->len -= size;
1714 }
1715}
1716\f
4ca79136
RH
1717/* Emit code to move a block Y to a block X. This may be done with
1718 string-move instructions, with multiple scalar move instructions,
1719 or with a library call.
bbf6f052 1720
4ca79136 1721 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1722 SIZE is an rtx that says how long they are.
19caa751 1723 ALIGN is the maximum alignment we can assume they have.
44bb111a 1724 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1725
e9a25f70
JL
1726 Return the address of the new block, if memcpy is called and returns it,
1727 0 otherwise. */
1728
1729rtx
502b8322 1730emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1731{
44bb111a 1732 bool may_use_call;
e9a25f70 1733 rtx retval = 0;
44bb111a
RH
1734 unsigned int align;
1735
1736 switch (method)
1737 {
1738 case BLOCK_OP_NORMAL:
1739 may_use_call = true;
1740 break;
1741
1742 case BLOCK_OP_CALL_PARM:
1743 may_use_call = block_move_libcall_safe_for_call_parm ();
1744
1745 /* Make inhibit_defer_pop nonzero around the library call
1746 to force it to pop the arguments right away. */
1747 NO_DEFER_POP;
1748 break;
1749
1750 case BLOCK_OP_NO_LIBCALL:
1751 may_use_call = false;
1752 break;
1753
1754 default:
1755 abort ();
1756 }
1757
1758 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1759
bbf6f052
RK
1760 if (GET_MODE (x) != BLKmode)
1761 abort ();
bbf6f052
RK
1762 if (GET_MODE (y) != BLKmode)
1763 abort ();
1764
1765 x = protect_from_queue (x, 1);
1766 y = protect_from_queue (y, 0);
5d901c31 1767 size = protect_from_queue (size, 0);
bbf6f052
RK
1768
1769 if (GET_CODE (x) != MEM)
1770 abort ();
1771 if (GET_CODE (y) != MEM)
1772 abort ();
1773 if (size == 0)
1774 abort ();
1775
cb38fd88
RH
1776 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1777 can be incorrect is coming from __builtin_memcpy. */
1778 if (GET_CODE (size) == CONST_INT)
1779 {
1780 x = shallow_copy_rtx (x);
1781 y = shallow_copy_rtx (y);
1782 set_mem_size (x, size);
1783 set_mem_size (y, size);
1784 }
1785
fbe1758d 1786 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1787 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1788 else if (emit_block_move_via_movstr (x, y, size, align))
1789 ;
44bb111a 1790 else if (may_use_call)
4ca79136 1791 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1792 else
1793 emit_block_move_via_loop (x, y, size, align);
1794
1795 if (method == BLOCK_OP_CALL_PARM)
1796 OK_DEFER_POP;
266007a7 1797
4ca79136
RH
1798 return retval;
1799}
266007a7 1800
502b8322 1801/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1802 block move libcall will not clobber any parameters which may have
1803 already been placed on the stack. */
1804
1805static bool
502b8322 1806block_move_libcall_safe_for_call_parm (void)
44bb111a
RH
1807{
1808 if (PUSH_ARGS)
1809 return true;
1810 else
1811 {
1812 /* Check to see whether memcpy takes all register arguments. */
1813 static enum {
1814 takes_regs_uninit, takes_regs_no, takes_regs_yes
1815 } takes_regs = takes_regs_uninit;
1816
1817 switch (takes_regs)
1818 {
1819 case takes_regs_uninit:
1820 {
1821 CUMULATIVE_ARGS args_so_far;
1822 tree fn, arg;
1823
1824 fn = emit_block_move_libcall_fn (false);
1825 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1826
1827 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1828 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1829 {
98c0d8d1 1830 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
44bb111a
RH
1831 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1832 if (!tmp || !REG_P (tmp))
1833 goto fail_takes_regs;
1834#ifdef FUNCTION_ARG_PARTIAL_NREGS
1835 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1836 NULL_TREE, 1))
1837 goto fail_takes_regs;
1838#endif
1839 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1840 }
1841 }
1842 takes_regs = takes_regs_yes;
1843 /* FALLTHRU */
1844
1845 case takes_regs_yes:
1846 return true;
1847
1848 fail_takes_regs:
1849 takes_regs = takes_regs_no;
1850 /* FALLTHRU */
1851 case takes_regs_no:
1852 return false;
1853
1854 default:
1855 abort ();
1856 }
1857 }
1858}
1859
502b8322 1860/* A subroutine of emit_block_move. Expand a movstr pattern;
4ca79136 1861 return true if successful. */
3ef1eef4 1862
4ca79136 1863static bool
502b8322 1864emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1865{
4ca79136
RH
1866 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1867 enum machine_mode mode;
266007a7 1868
4ca79136
RH
1869 /* Since this is a move insn, we don't care about volatility. */
1870 volatile_ok = 1;
1871
ee960939
OH
1872 /* Try the most limited insn first, because there's no point
1873 including more than one in the machine description unless
1874 the more limited one has some advantage. */
1875
4ca79136
RH
1876 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1877 mode = GET_MODE_WIDER_MODE (mode))
1878 {
1879 enum insn_code code = movstr_optab[(int) mode];
1880 insn_operand_predicate_fn pred;
1881
1882 if (code != CODE_FOR_nothing
1883 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1884 here because if SIZE is less than the mode mask, as it is
1885 returned by the macro, it will definitely be less than the
1886 actual mode mask. */
1887 && ((GET_CODE (size) == CONST_INT
1888 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1889 <= (GET_MODE_MASK (mode) >> 1)))
1890 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1891 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1892 || (*pred) (x, BLKmode))
1893 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1894 || (*pred) (y, BLKmode))
1895 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1896 || (*pred) (opalign, VOIDmode)))
1897 {
1898 rtx op2;
1899 rtx last = get_last_insn ();
1900 rtx pat;
1901
1902 op2 = convert_to_mode (mode, size, 1);
1903 pred = insn_data[(int) code].operand[2].predicate;
1904 if (pred != 0 && ! (*pred) (op2, mode))
1905 op2 = copy_to_mode_reg (mode, op2);
1906
1907 /* ??? When called via emit_block_move_for_call, it'd be
1908 nice if there were some way to inform the backend, so
1909 that it doesn't fail the expansion because it thinks
1910 emitting the libcall would be more efficient. */
1911
1912 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1913 if (pat)
1914 {
1915 emit_insn (pat);
1916 volatile_ok = 0;
1917 return true;
bbf6f052 1918 }
4ca79136
RH
1919 else
1920 delete_insns_since (last);
bbf6f052 1921 }
4ca79136 1922 }
bbf6f052 1923
4ca79136
RH
1924 volatile_ok = 0;
1925 return false;
1926}
3ef1eef4 1927
4ca79136
RH
1928/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1929 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1930
4ca79136 1931static rtx
502b8322 1932emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1933{
ee960939 1934 rtx dst_addr, src_addr;
4ca79136
RH
1935 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1936 enum machine_mode size_mode;
1937 rtx retval;
4bc973ae 1938
4ca79136 1939 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1940
ee960939
OH
1941 It is unsafe to save the value generated by protect_from_queue and reuse
1942 it later. Consider what happens if emit_queue is called before the
1943 return value from protect_from_queue is used.
4bc973ae 1944
ee960939
OH
1945 Expansion of the CALL_EXPR below will call emit_queue before we are
1946 finished emitting RTL for argument setup. So if we are not careful we
1947 could get the wrong value for an argument.
4bc973ae 1948
ee960939
OH
1949 To avoid this problem we go ahead and emit code to copy the addresses of
1950 DST and SRC and SIZE into new pseudos. We can then place those new
1951 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1952 emit_queue.
4bc973ae 1953
ee960939
OH
1954 Note this is not strictly needed for library calls since they do not call
1955 emit_queue before loading their arguments. However, we may need to have
1956 library calls call emit_queue in the future since failing to do so could
1957 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1958 arguments in registers. */
1959
1960 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1961 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1962
ee960939
OH
1963#ifdef POINTERS_EXTEND_UNSIGNED
1964 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1965 src_addr = convert_memory_address (ptr_mode, src_addr);
1966#endif
1967
1968 dst_tree = make_tree (ptr_type_node, dst_addr);
1969 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1970
1971 if (TARGET_MEM_FUNCTIONS)
1972 size_mode = TYPE_MODE (sizetype);
1973 else
1974 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 1975
4ca79136
RH
1976 size = convert_to_mode (size_mode, size, 1);
1977 size = copy_to_mode_reg (size_mode, size);
1978
1979 /* It is incorrect to use the libcall calling conventions to call
1980 memcpy in this context. This could be a user call to memcpy and
1981 the user may wish to examine the return value from memcpy. For
1982 targets where libcalls and normal calls have different conventions
1983 for returning pointers, we could end up generating incorrect code.
1984
1985 For convenience, we generate the call to bcopy this way as well. */
1986
4ca79136
RH
1987 if (TARGET_MEM_FUNCTIONS)
1988 size_tree = make_tree (sizetype, size);
1989 else
1990 size_tree = make_tree (unsigned_type_node, size);
1991
1992 fn = emit_block_move_libcall_fn (true);
1993 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1994 if (TARGET_MEM_FUNCTIONS)
1995 {
1996 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1997 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1998 }
1999 else
2000 {
2001 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2002 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2003 }
2004
2005 /* Now we have to build up the CALL_EXPR itself. */
2006 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2007 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2008 call_expr, arg_list, NULL_TREE);
2009 TREE_SIDE_EFFECTS (call_expr) = 1;
2010
2011 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2012
ee960939
OH
2013 /* If we are initializing a readonly value, show the above call clobbered
2014 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2015 the delay slot scheduler might overlook conflicts and take nasty
2016 decisions. */
4ca79136 2017 if (RTX_UNCHANGING_P (dst))
ee960939
OH
2018 add_function_usage_to
2019 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2020 gen_rtx_CLOBBER (VOIDmode, dst),
2021 NULL_RTX));
4ca79136 2022
ee960939 2023 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 2024}
52cf7115 2025
4ca79136
RH
2026/* A subroutine of emit_block_move_via_libcall. Create the tree node
2027 for the function we use for block copies. The first time FOR_CALL
2028 is true, we call assemble_external. */
52cf7115 2029
4ca79136
RH
2030static GTY(()) tree block_move_fn;
2031
9661b15f 2032void
502b8322 2033init_block_move_fn (const char *asmspec)
4ca79136 2034{
9661b15f 2035 if (!block_move_fn)
4ca79136 2036 {
8fd3cf4e 2037 tree args, fn;
9661b15f 2038
4ca79136 2039 if (TARGET_MEM_FUNCTIONS)
52cf7115 2040 {
4ca79136
RH
2041 fn = get_identifier ("memcpy");
2042 args = build_function_type_list (ptr_type_node, ptr_type_node,
2043 const_ptr_type_node, sizetype,
2044 NULL_TREE);
2045 }
2046 else
2047 {
2048 fn = get_identifier ("bcopy");
2049 args = build_function_type_list (void_type_node, const_ptr_type_node,
2050 ptr_type_node, unsigned_type_node,
2051 NULL_TREE);
52cf7115
JL
2052 }
2053
4ca79136
RH
2054 fn = build_decl (FUNCTION_DECL, fn, args);
2055 DECL_EXTERNAL (fn) = 1;
2056 TREE_PUBLIC (fn) = 1;
2057 DECL_ARTIFICIAL (fn) = 1;
2058 TREE_NOTHROW (fn) = 1;
66c60e67 2059
4ca79136 2060 block_move_fn = fn;
bbf6f052 2061 }
e9a25f70 2062
9661b15f
JJ
2063 if (asmspec)
2064 {
2065 SET_DECL_RTL (block_move_fn, NULL_RTX);
2066 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2067 }
2068}
2069
2070static tree
502b8322 2071emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
2072{
2073 static bool emitted_extern;
2074
2075 if (!block_move_fn)
2076 init_block_move_fn (NULL);
2077
4ca79136
RH
2078 if (for_call && !emitted_extern)
2079 {
2080 emitted_extern = true;
9661b15f
JJ
2081 make_decl_rtl (block_move_fn, NULL);
2082 assemble_external (block_move_fn);
4ca79136
RH
2083 }
2084
9661b15f 2085 return block_move_fn;
bbf6f052 2086}
44bb111a
RH
2087
2088/* A subroutine of emit_block_move. Copy the data via an explicit
2089 loop. This is used only when libcalls are forbidden. */
2090/* ??? It'd be nice to copy in hunks larger than QImode. */
2091
2092static void
502b8322
AJ
2093emit_block_move_via_loop (rtx x, rtx y, rtx size,
2094 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
2095{
2096 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2097 enum machine_mode iter_mode;
2098
2099 iter_mode = GET_MODE (size);
2100 if (iter_mode == VOIDmode)
2101 iter_mode = word_mode;
2102
2103 top_label = gen_label_rtx ();
2104 cmp_label = gen_label_rtx ();
2105 iter = gen_reg_rtx (iter_mode);
2106
2107 emit_move_insn (iter, const0_rtx);
2108
2109 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2110 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2111 do_pending_stack_adjust ();
2112
2e040219 2113 emit_note (NOTE_INSN_LOOP_BEG);
44bb111a
RH
2114
2115 emit_jump (cmp_label);
2116 emit_label (top_label);
2117
2118 tmp = convert_modes (Pmode, iter_mode, iter, true);
2119 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2120 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2121 x = change_address (x, QImode, x_addr);
2122 y = change_address (y, QImode, y_addr);
2123
2124 emit_move_insn (x, y);
2125
2126 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2127 true, OPTAB_LIB_WIDEN);
2128 if (tmp != iter)
2129 emit_move_insn (iter, tmp);
2130
2e040219 2131 emit_note (NOTE_INSN_LOOP_CONT);
44bb111a
RH
2132 emit_label (cmp_label);
2133
2134 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2135 true, top_label);
2136
2e040219 2137 emit_note (NOTE_INSN_LOOP_END);
44bb111a 2138}
bbf6f052
RK
2139\f
2140/* Copy all or part of a value X into registers starting at REGNO.
2141 The number of registers to be filled is NREGS. */
2142
2143void
502b8322 2144move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
2145{
2146 int i;
381127e8 2147#ifdef HAVE_load_multiple
3a94c984 2148 rtx pat;
381127e8
RL
2149 rtx last;
2150#endif
bbf6f052 2151
72bb9717
RK
2152 if (nregs == 0)
2153 return;
2154
bbf6f052
RK
2155 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2156 x = validize_mem (force_const_mem (mode, x));
2157
2158 /* See if the machine can do this with a load multiple insn. */
2159#ifdef HAVE_load_multiple
c3a02afe 2160 if (HAVE_load_multiple)
bbf6f052 2161 {
c3a02afe 2162 last = get_last_insn ();
38a448ca 2163 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
2164 GEN_INT (nregs));
2165 if (pat)
2166 {
2167 emit_insn (pat);
2168 return;
2169 }
2170 else
2171 delete_insns_since (last);
bbf6f052 2172 }
bbf6f052
RK
2173#endif
2174
2175 for (i = 0; i < nregs; i++)
38a448ca 2176 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
2177 operand_subword_force (x, i, mode));
2178}
2179
2180/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 2181 The number of registers to be filled is NREGS. */
0040593d 2182
bbf6f052 2183void
502b8322 2184move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
2185{
2186 int i;
bbf6f052 2187
2954d7db
RK
2188 if (nregs == 0)
2189 return;
2190
bbf6f052
RK
2191 /* See if the machine can do this with a store multiple insn. */
2192#ifdef HAVE_store_multiple
c3a02afe 2193 if (HAVE_store_multiple)
bbf6f052 2194 {
c6b97fac
AM
2195 rtx last = get_last_insn ();
2196 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2197 GEN_INT (nregs));
c3a02afe
RK
2198 if (pat)
2199 {
2200 emit_insn (pat);
2201 return;
2202 }
2203 else
2204 delete_insns_since (last);
bbf6f052 2205 }
bbf6f052
RK
2206#endif
2207
2208 for (i = 0; i < nregs; i++)
2209 {
2210 rtx tem = operand_subword (x, i, 1, BLKmode);
2211
2212 if (tem == 0)
2213 abort ();
2214
38a448ca 2215 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
2216 }
2217}
2218
084a1106
JDA
2219/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2220 ORIG, where ORIG is a non-consecutive group of registers represented by
2221 a PARALLEL. The clone is identical to the original except in that the
2222 original set of registers is replaced by a new set of pseudo registers.
2223 The new set has the same modes as the original set. */
2224
2225rtx
502b8322 2226gen_group_rtx (rtx orig)
084a1106
JDA
2227{
2228 int i, length;
2229 rtx *tmps;
2230
2231 if (GET_CODE (orig) != PARALLEL)
2232 abort ();
2233
2234 length = XVECLEN (orig, 0);
2235 tmps = (rtx *) alloca (sizeof (rtx) * length);
2236
2237 /* Skip a NULL entry in first slot. */
2238 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2239
2240 if (i)
2241 tmps[0] = 0;
2242
2243 for (; i < length; i++)
2244 {
2245 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2246 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2247
2248 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2249 }
2250
2251 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2252}
2253
aac5cc16
RH
2254/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2255 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2256 block SRC in bytes, or -1 if not known. */
d6a7951f 2257/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
aac5cc16
RH
2258 the balance will be in what would be the low-order memory addresses, i.e.
2259 left justified for big endian, right justified for little endian. This
2260 happens to be true for the targets currently using this support. If this
2261 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2262 would be needed. */
fffa9c1d
JW
2263
2264void
502b8322 2265emit_group_load (rtx dst, rtx orig_src, int ssize)
fffa9c1d 2266{
aac5cc16
RH
2267 rtx *tmps, src;
2268 int start, i;
fffa9c1d 2269
aac5cc16 2270 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
2271 abort ();
2272
2273 /* Check for a NULL entry, used to indicate that the parameter goes
2274 both on the stack and in registers. */
aac5cc16
RH
2275 if (XEXP (XVECEXP (dst, 0, 0), 0))
2276 start = 0;
fffa9c1d 2277 else
aac5cc16
RH
2278 start = 1;
2279
3a94c984 2280 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 2281
aac5cc16
RH
2282 /* Process the pieces. */
2283 for (i = start; i < XVECLEN (dst, 0); i++)
2284 {
2285 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
2286 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2287 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2288 int shift = 0;
2289
2290 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2291 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
2292 {
2293 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2294 bytelen = ssize - bytepos;
2295 if (bytelen <= 0)
729a2125 2296 abort ();
aac5cc16
RH
2297 }
2298
f3ce87a9
DE
2299 /* If we won't be loading directly from memory, protect the real source
2300 from strange tricks we might play; but make sure that the source can
2301 be loaded directly into the destination. */
2302 src = orig_src;
2303 if (GET_CODE (orig_src) != MEM
2304 && (!CONSTANT_P (orig_src)
2305 || (GET_MODE (orig_src) != mode
2306 && GET_MODE (orig_src) != VOIDmode)))
2307 {
2308 if (GET_MODE (orig_src) == VOIDmode)
2309 src = gen_reg_rtx (mode);
2310 else
2311 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 2312
f3ce87a9
DE
2313 emit_move_insn (src, orig_src);
2314 }
2315
aac5cc16
RH
2316 /* Optimize the access just a bit. */
2317 if (GET_CODE (src) == MEM
04050c69 2318 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
729a2125 2319 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2320 && bytelen == GET_MODE_SIZE (mode))
2321 {
2322 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2323 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2324 }
7c4a6db0
JW
2325 else if (GET_CODE (src) == CONCAT)
2326 {
015b1ad1
JDA
2327 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2328 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2329
2330 if ((bytepos == 0 && bytelen == slen0)
2331 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 2332 {
015b1ad1
JDA
2333 /* The following assumes that the concatenated objects all
2334 have the same size. In this case, a simple calculation
2335 can be used to determine the object and the bit field
2336 to be extracted. */
2337 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
2338 if (! CONSTANT_P (tmps[i])
2339 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2340 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
2341 (bytepos % slen0) * BITS_PER_UNIT,
2342 1, NULL_RTX, mode, mode, ssize);
cbb92744 2343 }
58f69841
JH
2344 else if (bytepos == 0)
2345 {
015b1ad1 2346 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 2347 emit_move_insn (mem, src);
04050c69 2348 tmps[i] = adjust_address (mem, mode, 0);
58f69841 2349 }
7c4a6db0
JW
2350 else
2351 abort ();
2352 }
9c0631a7
AH
2353 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2354 SIMD register, which is currently broken. While we get GCC
2355 to emit proper RTL for these cases, let's dump to memory. */
2356 else if (VECTOR_MODE_P (GET_MODE (dst))
2357 && GET_CODE (src) == REG)
2358 {
2359 int slen = GET_MODE_SIZE (GET_MODE (src));
2360 rtx mem;
2361
2362 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2363 emit_move_insn (mem, src);
2364 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2365 }
f3ce87a9 2366 else if (CONSTANT_P (src)
2ee5437b
RH
2367 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2368 tmps[i] = src;
fffa9c1d 2369 else
19caa751
RK
2370 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2371 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 2372 mode, mode, ssize);
fffa9c1d 2373
aac5cc16 2374 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2375 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2376 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2377 }
19caa751 2378
3a94c984 2379 emit_queue ();
aac5cc16
RH
2380
2381 /* Copy the extracted pieces into the proper (probable) hard regs. */
2382 for (i = start; i < XVECLEN (dst, 0); i++)
2383 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2384}
2385
084a1106
JDA
2386/* Emit code to move a block SRC to block DST, where SRC and DST are
2387 non-consecutive groups of registers, each represented by a PARALLEL. */
2388
2389void
502b8322 2390emit_group_move (rtx dst, rtx src)
084a1106
JDA
2391{
2392 int i;
2393
2394 if (GET_CODE (src) != PARALLEL
2395 || GET_CODE (dst) != PARALLEL
2396 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2397 abort ();
2398
2399 /* Skip first entry if NULL. */
2400 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2401 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2402 XEXP (XVECEXP (src, 0, i), 0));
2403}
2404
aac5cc16
RH
2405/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2406 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2407 block DST, or -1 if not known. */
fffa9c1d
JW
2408
2409void
502b8322 2410emit_group_store (rtx orig_dst, rtx src, int ssize)
fffa9c1d 2411{
aac5cc16
RH
2412 rtx *tmps, dst;
2413 int start, i;
fffa9c1d 2414
aac5cc16 2415 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2416 abort ();
2417
2418 /* Check for a NULL entry, used to indicate that the parameter goes
2419 both on the stack and in registers. */
aac5cc16
RH
2420 if (XEXP (XVECEXP (src, 0, 0), 0))
2421 start = 0;
fffa9c1d 2422 else
aac5cc16
RH
2423 start = 1;
2424
3a94c984 2425 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2426
aac5cc16
RH
2427 /* Copy the (probable) hard regs into pseudos. */
2428 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2429 {
aac5cc16
RH
2430 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2431 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2432 emit_move_insn (tmps[i], reg);
2433 }
3a94c984 2434 emit_queue ();
fffa9c1d 2435
aac5cc16
RH
2436 /* If we won't be storing directly into memory, protect the real destination
2437 from strange tricks we might play. */
2438 dst = orig_dst;
10a9f2be
JW
2439 if (GET_CODE (dst) == PARALLEL)
2440 {
2441 rtx temp;
2442
2443 /* We can get a PARALLEL dst if there is a conditional expression in
2444 a return statement. In that case, the dst and src are the same,
2445 so no action is necessary. */
2446 if (rtx_equal_p (dst, src))
2447 return;
2448
2449 /* It is unclear if we can ever reach here, but we may as well handle
2450 it. Allocate a temporary, and split this into a store/load to/from
2451 the temporary. */
2452
2453 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
04050c69
RK
2454 emit_group_store (temp, src, ssize);
2455 emit_group_load (dst, temp, ssize);
10a9f2be
JW
2456 return;
2457 }
75897075 2458 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2459 {
2460 dst = gen_reg_rtx (GET_MODE (orig_dst));
2461 /* Make life a bit easier for combine. */
8ae91fc0 2462 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2463 }
aac5cc16
RH
2464
2465 /* Process the pieces. */
2466 for (i = start; i < XVECLEN (src, 0); i++)
2467 {
770ae6cc 2468 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2469 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2470 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2471 rtx dest = dst;
aac5cc16
RH
2472
2473 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2474 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2475 {
aac5cc16
RH
2476 if (BYTES_BIG_ENDIAN)
2477 {
2478 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2479 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2480 tmps[i], 0, OPTAB_WIDEN);
2481 }
2482 bytelen = ssize - bytepos;
71bc0330 2483 }
fffa9c1d 2484
6ddae612
JJ
2485 if (GET_CODE (dst) == CONCAT)
2486 {
2487 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2488 dest = XEXP (dst, 0);
2489 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2490 {
2491 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2492 dest = XEXP (dst, 1);
2493 }
0d446150
JH
2494 else if (bytepos == 0 && XVECLEN (src, 0))
2495 {
2496 dest = assign_stack_temp (GET_MODE (dest),
2497 GET_MODE_SIZE (GET_MODE (dest)), 0);
2498 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2499 tmps[i]);
2500 dst = dest;
2501 break;
2502 }
6ddae612
JJ
2503 else
2504 abort ();
2505 }
2506
aac5cc16 2507 /* Optimize the access just a bit. */
6ddae612
JJ
2508 if (GET_CODE (dest) == MEM
2509 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
729a2125 2510 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2511 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2512 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2513 else
6ddae612 2514 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2515 mode, tmps[i], ssize);
fffa9c1d 2516 }
729a2125 2517
3a94c984 2518 emit_queue ();
aac5cc16
RH
2519
2520 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2521 if (orig_dst != dst)
aac5cc16 2522 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2523}
2524
c36fce9a
GRK
2525/* Generate code to copy a BLKmode object of TYPE out of a
2526 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2527 is null, a stack temporary is created. TGTBLK is returned.
2528
2529 The primary purpose of this routine is to handle functions
2530 that return BLKmode structures in registers. Some machines
2531 (the PA for example) want to return all small structures
3a94c984 2532 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2533
2534rtx
502b8322 2535copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2536{
19caa751
RK
2537 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2538 rtx src = NULL, dst = NULL;
2539 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2540 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2541
2542 if (tgtblk == 0)
2543 {
1da68f56
RK
2544 tgtblk = assign_temp (build_qualified_type (type,
2545 (TYPE_QUALS (type)
2546 | TYPE_QUAL_CONST)),
2547 0, 1, 1);
19caa751
RK
2548 preserve_temp_slots (tgtblk);
2549 }
3a94c984 2550
1ed1b4fb 2551 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2552 into a new pseudo which is a full word. */
0d7839da 2553
19caa751
RK
2554 if (GET_MODE (srcreg) != BLKmode
2555 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2556 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751
RK
2557
2558 /* Structures whose size is not a multiple of a word are aligned
2559 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2560 machine, this means we must skip the empty high order bytes when
2561 calculating the bit offset. */
0d7839da 2562 if (BYTES_BIG_ENDIAN
0d7839da 2563 && bytes % UNITS_PER_WORD)
19caa751
RK
2564 big_endian_correction
2565 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2566
2567 /* Copy the structure BITSIZE bites at a time.
3a94c984 2568
19caa751
RK
2569 We could probably emit more efficient code for machines which do not use
2570 strict alignment, but it doesn't seem worth the effort at the current
2571 time. */
2572 for (bitpos = 0, xbitpos = big_endian_correction;
2573 bitpos < bytes * BITS_PER_UNIT;
2574 bitpos += bitsize, xbitpos += bitsize)
2575 {
3a94c984 2576 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2577 word boundary and when xbitpos == big_endian_correction
2578 (the first time through). */
2579 if (xbitpos % BITS_PER_WORD == 0
2580 || xbitpos == big_endian_correction)
b47f8cfc
JH
2581 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2582 GET_MODE (srcreg));
19caa751
RK
2583
2584 /* We need a new destination operand each time bitpos is on
2585 a word boundary. */
2586 if (bitpos % BITS_PER_WORD == 0)
2587 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2588
19caa751
RK
2589 /* Use xbitpos for the source extraction (right justified) and
2590 xbitpos for the destination store (left justified). */
2591 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2592 extract_bit_field (src, bitsize,
2593 xbitpos % BITS_PER_WORD, 1,
2594 NULL_RTX, word_mode, word_mode,
04050c69
RK
2595 BITS_PER_WORD),
2596 BITS_PER_WORD);
19caa751
RK
2597 }
2598
2599 return tgtblk;
c36fce9a
GRK
2600}
2601
94b25f81
RK
2602/* Add a USE expression for REG to the (possibly empty) list pointed
2603 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2604
2605void
502b8322 2606use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2607{
0304dfbb
DE
2608 if (GET_CODE (reg) != REG
2609 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2610 abort ();
b3f8cf4a
RK
2611
2612 *call_fusage
38a448ca
RH
2613 = gen_rtx_EXPR_LIST (VOIDmode,
2614 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2615}
2616
94b25f81
RK
2617/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2618 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2619
2620void
502b8322 2621use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2622{
0304dfbb 2623 int i;
bbf6f052 2624
0304dfbb
DE
2625 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2626 abort ();
2627
2628 for (i = 0; i < nregs; i++)
e50126e8 2629 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2630}
fffa9c1d
JW
2631
2632/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2633 PARALLEL REGS. This is for calls that pass values in multiple
2634 non-contiguous locations. The Irix 6 ABI has examples of this. */
2635
2636void
502b8322 2637use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2638{
2639 int i;
2640
6bd35f86
DE
2641 for (i = 0; i < XVECLEN (regs, 0); i++)
2642 {
2643 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2644
6bd35f86
DE
2645 /* A NULL entry means the parameter goes both on the stack and in
2646 registers. This can also be a MEM for targets that pass values
2647 partially on the stack and partially in registers. */
e9a25f70 2648 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2649 use_reg (call_fusage, reg);
2650 }
fffa9c1d 2651}
bbf6f052 2652\f
57814e5e 2653
cf5124f6
RS
2654/* Determine whether the LEN bytes generated by CONSTFUN can be
2655 stored to memory using several move instructions. CONSTFUNDATA is
2656 a pointer which will be passed as argument in every CONSTFUN call.
2657 ALIGN is maximum alignment we can assume. Return nonzero if a
2658 call to store_by_pieces should succeed. */
2659
57814e5e 2660int
502b8322
AJ
2661can_store_by_pieces (unsigned HOST_WIDE_INT len,
2662 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2663 void *constfundata, unsigned int align)
57814e5e 2664{
98166639 2665 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2666 HOST_WIDE_INT offset = 0;
2667 enum machine_mode mode, tmode;
2668 enum insn_code icode;
2669 int reverse;
2670 rtx cst;
2671
4977bab6 2672 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2673 return 0;
2674
2675 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2676 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2677 align = MOVE_MAX * BITS_PER_UNIT;
2678
2679 /* We would first store what we can in the largest integer mode, then go to
2680 successively smaller modes. */
2681
2682 for (reverse = 0;
2683 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2684 reverse++)
2685 {
2686 l = len;
2687 mode = VOIDmode;
cf5124f6 2688 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2689 while (max_size > 1)
2690 {
2691 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2692 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2693 if (GET_MODE_SIZE (tmode) < max_size)
2694 mode = tmode;
2695
2696 if (mode == VOIDmode)
2697 break;
2698
2699 icode = mov_optab->handlers[(int) mode].insn_code;
2700 if (icode != CODE_FOR_nothing
2701 && align >= GET_MODE_ALIGNMENT (mode))
2702 {
2703 unsigned int size = GET_MODE_SIZE (mode);
2704
2705 while (l >= size)
2706 {
2707 if (reverse)
2708 offset -= size;
2709
2710 cst = (*constfun) (constfundata, offset, mode);
2711 if (!LEGITIMATE_CONSTANT_P (cst))
2712 return 0;
2713
2714 if (!reverse)
2715 offset += size;
2716
2717 l -= size;
2718 }
2719 }
2720
2721 max_size = GET_MODE_SIZE (mode);
2722 }
2723
2724 /* The code above should have handled everything. */
2725 if (l != 0)
2726 abort ();
2727 }
2728
2729 return 1;
2730}
2731
2732/* Generate several move instructions to store LEN bytes generated by
2733 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2734 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2735 ALIGN is maximum alignment we can assume.
2736 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2737 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2738 stpcpy. */
57814e5e 2739
8fd3cf4e 2740rtx
502b8322
AJ
2741store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2742 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2743 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2744{
2745 struct store_by_pieces data;
2746
4977bab6 2747 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2748 abort ();
2749 to = protect_from_queue (to, 1);
2750 data.constfun = constfun;
2751 data.constfundata = constfundata;
2752 data.len = len;
2753 data.to = to;
2754 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2755 if (endp)
2756 {
2757 rtx to1;
2758
2759 if (data.reverse)
2760 abort ();
2761 if (data.autinc_to)
2762 {
2763 if (endp == 2)
2764 {
2765 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2766 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2767 else
2768 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2769 -1));
2770 }
2771 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2772 data.offset);
2773 }
2774 else
2775 {
2776 if (endp == 2)
2777 --data.offset;
2778 to1 = adjust_address (data.to, QImode, data.offset);
2779 }
2780 return to1;
2781 }
2782 else
2783 return data.to;
57814e5e
JJ
2784}
2785
19caa751
RK
2786/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2787 rtx with BLKmode). The caller must pass TO through protect_from_queue
2788 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2789
2790static void
342e2b74 2791clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2792{
57814e5e
JJ
2793 struct store_by_pieces data;
2794
2795 data.constfun = clear_by_pieces_1;
df4ae160 2796 data.constfundata = NULL;
57814e5e
JJ
2797 data.len = len;
2798 data.to = to;
2799 store_by_pieces_1 (&data, align);
2800}
2801
2802/* Callback routine for clear_by_pieces.
2803 Return const0_rtx unconditionally. */
2804
2805static rtx
502b8322
AJ
2806clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2807 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2808 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2809{
2810 return const0_rtx;
2811}
2812
2813/* Subroutine of clear_by_pieces and store_by_pieces.
2814 Generate several move instructions to store LEN bytes of block TO. (A MEM
2815 rtx with BLKmode). The caller must pass TO through protect_from_queue
2816 before calling. ALIGN is maximum alignment we can assume. */
2817
2818static void
502b8322
AJ
2819store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2820 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2821{
2822 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2823 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2824 enum machine_mode mode = VOIDmode, tmode;
2825 enum insn_code icode;
9de08200 2826
57814e5e
JJ
2827 data->offset = 0;
2828 data->to_addr = to_addr;
2829 data->autinc_to
9de08200
RK
2830 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2831 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2832
57814e5e
JJ
2833 data->explicit_inc_to = 0;
2834 data->reverse
9de08200 2835 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2836 if (data->reverse)
2837 data->offset = data->len;
9de08200 2838
57814e5e 2839 /* If storing requires more than two move insns,
9de08200
RK
2840 copy addresses to registers (to make displacements shorter)
2841 and use post-increment if available. */
57814e5e
JJ
2842 if (!data->autinc_to
2843 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2844 {
3a94c984 2845 /* Determine the main mode we'll be using. */
fbe1758d
AM
2846 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2847 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2848 if (GET_MODE_SIZE (tmode) < max_size)
2849 mode = tmode;
2850
57814e5e 2851 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2852 {
57814e5e
JJ
2853 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2854 data->autinc_to = 1;
2855 data->explicit_inc_to = -1;
9de08200 2856 }
3bdf5ad1 2857
57814e5e
JJ
2858 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2859 && ! data->autinc_to)
9de08200 2860 {
57814e5e
JJ
2861 data->to_addr = copy_addr_to_reg (to_addr);
2862 data->autinc_to = 1;
2863 data->explicit_inc_to = 1;
9de08200 2864 }
3bdf5ad1 2865
57814e5e
JJ
2866 if ( !data->autinc_to && CONSTANT_P (to_addr))
2867 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2868 }
2869
e1565e65 2870 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2871 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2872 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2873
57814e5e 2874 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2875 successively smaller modes. */
2876
2877 while (max_size > 1)
2878 {
9de08200
RK
2879 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2880 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2881 if (GET_MODE_SIZE (tmode) < max_size)
2882 mode = tmode;
2883
2884 if (mode == VOIDmode)
2885 break;
2886
2887 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2888 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2889 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2890
2891 max_size = GET_MODE_SIZE (mode);
2892 }
2893
2894 /* The code above should have handled everything. */
57814e5e 2895 if (data->len != 0)
9de08200
RK
2896 abort ();
2897}
2898
57814e5e 2899/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2900 with move instructions for mode MODE. GENFUN is the gen_... function
2901 to make a move insn for that mode. DATA has all the other info. */
2902
2903static void
502b8322
AJ
2904store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2905 struct store_by_pieces *data)
9de08200 2906{
3bdf5ad1 2907 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2908 rtx to1, cst;
9de08200
RK
2909
2910 while (data->len >= size)
2911 {
3bdf5ad1
RK
2912 if (data->reverse)
2913 data->offset -= size;
9de08200 2914
3bdf5ad1 2915 if (data->autinc_to)
630036c6
JJ
2916 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2917 data->offset);
3a94c984 2918 else
f4ef873c 2919 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2920
940da324 2921 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2922 emit_insn (gen_add2_insn (data->to_addr,
2923 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2924
57814e5e
JJ
2925 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2926 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2927
940da324 2928 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2929 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2930
3bdf5ad1
RK
2931 if (! data->reverse)
2932 data->offset += size;
9de08200
RK
2933
2934 data->len -= size;
2935 }
2936}
2937\f
19caa751 2938/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2939 its length in bytes. */
e9a25f70
JL
2940
2941rtx
502b8322 2942clear_storage (rtx object, rtx size)
bbf6f052 2943{
e9a25f70 2944 rtx retval = 0;
8ac61af7
RK
2945 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2946 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2947
fcf1b822
RK
2948 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2949 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2950 if (GET_MODE (object) != BLKmode
fcf1b822 2951 && GET_CODE (size) == CONST_INT
4ca79136 2952 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2953 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2954 else
bbf6f052 2955 {
9de08200
RK
2956 object = protect_from_queue (object, 1);
2957 size = protect_from_queue (size, 0);
2958
2959 if (GET_CODE (size) == CONST_INT
78762e3b 2960 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2961 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2962 else if (clear_storage_via_clrstr (object, size, align))
2963 ;
9de08200 2964 else
4ca79136
RH
2965 retval = clear_storage_via_libcall (object, size);
2966 }
2967
2968 return retval;
2969}
2970
2971/* A subroutine of clear_storage. Expand a clrstr pattern;
2972 return true if successful. */
2973
2974static bool
502b8322 2975clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
4ca79136
RH
2976{
2977 /* Try the most limited insn first, because there's no point
2978 including more than one in the machine description unless
2979 the more limited one has some advantage. */
2980
2981 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2982 enum machine_mode mode;
2983
2984 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2985 mode = GET_MODE_WIDER_MODE (mode))
2986 {
2987 enum insn_code code = clrstr_optab[(int) mode];
2988 insn_operand_predicate_fn pred;
2989
2990 if (code != CODE_FOR_nothing
2991 /* We don't need MODE to be narrower than
2992 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2993 the mode mask, as it is returned by the macro, it will
2994 definitely be less than the actual mode mask. */
2995 && ((GET_CODE (size) == CONST_INT
2996 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2997 <= (GET_MODE_MASK (mode) >> 1)))
2998 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2999 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3000 || (*pred) (object, BLKmode))
3001 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3002 || (*pred) (opalign, VOIDmode)))
9de08200 3003 {
4ca79136
RH
3004 rtx op1;
3005 rtx last = get_last_insn ();
3006 rtx pat;
9de08200 3007
4ca79136
RH
3008 op1 = convert_to_mode (mode, size, 1);
3009 pred = insn_data[(int) code].operand[1].predicate;
3010 if (pred != 0 && ! (*pred) (op1, mode))
3011 op1 = copy_to_mode_reg (mode, op1);
9de08200 3012
4ca79136
RH
3013 pat = GEN_FCN ((int) code) (object, op1, opalign);
3014 if (pat)
9de08200 3015 {
4ca79136
RH
3016 emit_insn (pat);
3017 return true;
3018 }
3019 else
3020 delete_insns_since (last);
3021 }
3022 }
9de08200 3023
4ca79136
RH
3024 return false;
3025}
9de08200 3026
4ca79136
RH
3027/* A subroutine of clear_storage. Expand a call to memset or bzero.
3028 Return the return value of memset, 0 otherwise. */
9de08200 3029
4ca79136 3030static rtx
502b8322 3031clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
3032{
3033 tree call_expr, arg_list, fn, object_tree, size_tree;
3034 enum machine_mode size_mode;
3035 rtx retval;
9de08200 3036
4ca79136 3037 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 3038
4ca79136
RH
3039 It is unsafe to save the value generated by protect_from_queue
3040 and reuse it later. Consider what happens if emit_queue is
3041 called before the return value from protect_from_queue is used.
52cf7115 3042
4ca79136
RH
3043 Expansion of the CALL_EXPR below will call emit_queue before
3044 we are finished emitting RTL for argument setup. So if we are
3045 not careful we could get the wrong value for an argument.
52cf7115 3046
4ca79136
RH
3047 To avoid this problem we go ahead and emit code to copy OBJECT
3048 and SIZE into new pseudos. We can then place those new pseudos
3049 into an RTL_EXPR and use them later, even after a call to
3050 emit_queue.
52cf7115 3051
4ca79136
RH
3052 Note this is not strictly needed for library calls since they
3053 do not call emit_queue before loading their arguments. However,
3054 we may need to have library calls call emit_queue in the future
3055 since failing to do so could cause problems for targets which
3056 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 3057
4ca79136 3058 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 3059
4ca79136
RH
3060 if (TARGET_MEM_FUNCTIONS)
3061 size_mode = TYPE_MODE (sizetype);
3062 else
3063 size_mode = TYPE_MODE (unsigned_type_node);
3064 size = convert_to_mode (size_mode, size, 1);
3065 size = copy_to_mode_reg (size_mode, size);
52cf7115 3066
4ca79136
RH
3067 /* It is incorrect to use the libcall calling conventions to call
3068 memset in this context. This could be a user call to memset and
3069 the user may wish to examine the return value from memset. For
3070 targets where libcalls and normal calls have different conventions
3071 for returning pointers, we could end up generating incorrect code.
4bc973ae 3072
4ca79136 3073 For convenience, we generate the call to bzero this way as well. */
4bc973ae 3074
4ca79136
RH
3075 object_tree = make_tree (ptr_type_node, object);
3076 if (TARGET_MEM_FUNCTIONS)
3077 size_tree = make_tree (sizetype, size);
3078 else
3079 size_tree = make_tree (unsigned_type_node, size);
3080
3081 fn = clear_storage_libcall_fn (true);
3082 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3083 if (TARGET_MEM_FUNCTIONS)
3084 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3085 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3086
3087 /* Now we have to build up the CALL_EXPR itself. */
3088 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3089 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3090 call_expr, arg_list, NULL_TREE);
3091 TREE_SIDE_EFFECTS (call_expr) = 1;
3092
3093 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3094
3095 /* If we are initializing a readonly value, show the above call
3096 clobbered it. Otherwise, a load from it may erroneously be
3097 hoisted from a loop. */
3098 if (RTX_UNCHANGING_P (object))
3099 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3100
3101 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3102}
3103
3104/* A subroutine of clear_storage_via_libcall. Create the tree node
3105 for the function we use for block clears. The first time FOR_CALL
3106 is true, we call assemble_external. */
3107
3108static GTY(()) tree block_clear_fn;
66c60e67 3109
9661b15f 3110void
502b8322 3111init_block_clear_fn (const char *asmspec)
4ca79136 3112{
9661b15f 3113 if (!block_clear_fn)
4ca79136 3114 {
9661b15f
JJ
3115 tree fn, args;
3116
4ca79136
RH
3117 if (TARGET_MEM_FUNCTIONS)
3118 {
3119 fn = get_identifier ("memset");
3120 args = build_function_type_list (ptr_type_node, ptr_type_node,
3121 integer_type_node, sizetype,
3122 NULL_TREE);
3123 }
3124 else
3125 {
3126 fn = get_identifier ("bzero");
3127 args = build_function_type_list (void_type_node, ptr_type_node,
3128 unsigned_type_node, NULL_TREE);
9de08200 3129 }
4ca79136
RH
3130
3131 fn = build_decl (FUNCTION_DECL, fn, args);
3132 DECL_EXTERNAL (fn) = 1;
3133 TREE_PUBLIC (fn) = 1;
3134 DECL_ARTIFICIAL (fn) = 1;
3135 TREE_NOTHROW (fn) = 1;
3136
3137 block_clear_fn = fn;
bbf6f052 3138 }
e9a25f70 3139
9661b15f
JJ
3140 if (asmspec)
3141 {
3142 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3143 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3144 }
3145}
3146
3147static tree
502b8322 3148clear_storage_libcall_fn (int for_call)
9661b15f
JJ
3149{
3150 static bool emitted_extern;
3151
3152 if (!block_clear_fn)
3153 init_block_clear_fn (NULL);
3154
4ca79136
RH
3155 if (for_call && !emitted_extern)
3156 {
3157 emitted_extern = true;
9661b15f
JJ
3158 make_decl_rtl (block_clear_fn, NULL);
3159 assemble_external (block_clear_fn);
4ca79136 3160 }
bbf6f052 3161
9661b15f 3162 return block_clear_fn;
4ca79136
RH
3163}
3164\f
bbf6f052
RK
3165/* Generate code to copy Y into X.
3166 Both Y and X must have the same mode, except that
3167 Y can be a constant with VOIDmode.
3168 This mode cannot be BLKmode; use emit_block_move for that.
3169
3170 Return the last instruction emitted. */
3171
3172rtx
502b8322 3173emit_move_insn (rtx x, rtx y)
bbf6f052
RK
3174{
3175 enum machine_mode mode = GET_MODE (x);
de1b33dd 3176 rtx y_cst = NULL_RTX;
0c19a26f 3177 rtx last_insn, set;
bbf6f052
RK
3178
3179 x = protect_from_queue (x, 1);
3180 y = protect_from_queue (y, 0);
3181
3182 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3183 abort ();
3184
ee5332b8
RH
3185 /* Never force constant_p_rtx to memory. */
3186 if (GET_CODE (y) == CONSTANT_P_RTX)
3187 ;
51286de6 3188 else if (CONSTANT_P (y))
de1b33dd 3189 {
51286de6 3190 if (optimize
075fc17a 3191 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
3192 && (last_insn = compress_float_constant (x, y)))
3193 return last_insn;
3194
0c19a26f
RS
3195 y_cst = y;
3196
51286de6
RH
3197 if (!LEGITIMATE_CONSTANT_P (y))
3198 {
51286de6 3199 y = force_const_mem (mode, y);
3a04ff64
RH
3200
3201 /* If the target's cannot_force_const_mem prevented the spill,
3202 assume that the target's move expanders will also take care
3203 of the non-legitimate constant. */
3204 if (!y)
3205 y = y_cst;
51286de6 3206 }
de1b33dd 3207 }
bbf6f052
RK
3208
3209 /* If X or Y are memory references, verify that their addresses are valid
3210 for the machine. */
3211 if (GET_CODE (x) == MEM
3212 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3213 && ! push_operand (x, GET_MODE (x)))
3214 || (flag_force_addr
3215 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 3216 x = validize_mem (x);
bbf6f052
RK
3217
3218 if (GET_CODE (y) == MEM
3219 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3220 || (flag_force_addr
3221 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 3222 y = validize_mem (y);
bbf6f052
RK
3223
3224 if (mode == BLKmode)
3225 abort ();
3226
de1b33dd
AO
3227 last_insn = emit_move_insn_1 (x, y);
3228
0c19a26f
RS
3229 if (y_cst && GET_CODE (x) == REG
3230 && (set = single_set (last_insn)) != NULL_RTX
3231 && SET_DEST (set) == x
3232 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 3233 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
3234
3235 return last_insn;
261c4230
RS
3236}
3237
3238/* Low level part of emit_move_insn.
3239 Called just like emit_move_insn, but assumes X and Y
3240 are basically valid. */
3241
3242rtx
502b8322 3243emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
3244{
3245 enum machine_mode mode = GET_MODE (x);
3246 enum machine_mode submode;
3247 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 3248
dbbbbf3b 3249 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 3250 abort ();
76bbe028 3251
bbf6f052
RK
3252 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3253 return
3254 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3255
89742723 3256 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 3257 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 3258 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
3259 && (mov_optab->handlers[(int) submode].insn_code
3260 != CODE_FOR_nothing))
3261 {
3262 /* Don't split destination if it is a stack push. */
3263 int stack = push_operand (x, GET_MODE (x));
7308a047 3264
79ce92d7 3265#ifdef PUSH_ROUNDING
0e9cbd11
KH
3266 /* In case we output to the stack, but the size is smaller than the
3267 machine can push exactly, we need to use move instructions. */
1a06f5fe 3268 if (stack
bb93b973
RK
3269 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3270 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
3271 {
3272 rtx temp;
bb93b973 3273 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
3274
3275 /* Do not use anti_adjust_stack, since we don't want to update
3276 stack_pointer_delta. */
3277 temp = expand_binop (Pmode,
3278#ifdef STACK_GROWS_DOWNWARD
3279 sub_optab,
3280#else
3281 add_optab,
3282#endif
3283 stack_pointer_rtx,
3284 GEN_INT
bb93b973
RK
3285 (PUSH_ROUNDING
3286 (GET_MODE_SIZE (GET_MODE (x)))),
3287 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3288
1a06f5fe
JH
3289 if (temp != stack_pointer_rtx)
3290 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 3291
1a06f5fe
JH
3292#ifdef STACK_GROWS_DOWNWARD
3293 offset1 = 0;
3294 offset2 = GET_MODE_SIZE (submode);
3295#else
3296 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3297 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3298 + GET_MODE_SIZE (submode));
3299#endif
bb93b973 3300
1a06f5fe
JH
3301 emit_move_insn (change_address (x, submode,
3302 gen_rtx_PLUS (Pmode,
3303 stack_pointer_rtx,
3304 GEN_INT (offset1))),
3305 gen_realpart (submode, y));
3306 emit_move_insn (change_address (x, submode,
3307 gen_rtx_PLUS (Pmode,
3308 stack_pointer_rtx,
3309 GEN_INT (offset2))),
3310 gen_imagpart (submode, y));
3311 }
e9c0bd54 3312 else
79ce92d7 3313#endif
7308a047
RS
3314 /* If this is a stack, push the highpart first, so it
3315 will be in the argument order.
3316
3317 In that case, change_address is used only to convert
3318 the mode, not to change the address. */
e9c0bd54 3319 if (stack)
c937357e 3320 {
e33c0d66
RS
3321 /* Note that the real part always precedes the imag part in memory
3322 regardless of machine's endianness. */
c937357e 3323#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
3324 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3325 gen_imagpart (submode, y));
3326 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3327 gen_realpart (submode, y));
c937357e 3328#else
a79b3dc7
RS
3329 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3330 gen_realpart (submode, y));
3331 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3332 gen_imagpart (submode, y));
c937357e
RS
3333#endif
3334 }
3335 else
3336 {
235ae7be
DM
3337 rtx realpart_x, realpart_y;
3338 rtx imagpart_x, imagpart_y;
3339
405f63da
MM
3340 /* If this is a complex value with each part being smaller than a
3341 word, the usual calling sequence will likely pack the pieces into
3342 a single register. Unfortunately, SUBREG of hard registers only
3343 deals in terms of words, so we have a problem converting input
3344 arguments to the CONCAT of two registers that is used elsewhere
3345 for complex values. If this is before reload, we can copy it into
3346 memory and reload. FIXME, we should see about using extract and
3347 insert on integer registers, but complex short and complex char
3348 variables should be rarely used. */
3a94c984 3349 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
3350 && (reload_in_progress | reload_completed) == 0)
3351 {
bb93b973
RK
3352 int packed_dest_p
3353 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3354 int packed_src_p
3355 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
3356
3357 if (packed_dest_p || packed_src_p)
3358 {
3359 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3360 ? MODE_FLOAT : MODE_INT);
3361
1da68f56
RK
3362 enum machine_mode reg_mode
3363 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
3364
3365 if (reg_mode != BLKmode)
3366 {
3367 rtx mem = assign_stack_temp (reg_mode,
3368 GET_MODE_SIZE (mode), 0);
f4ef873c 3369 rtx cmem = adjust_address (mem, mode, 0);
405f63da 3370
1da68f56
RK
3371 cfun->cannot_inline
3372 = N_("function using short complex types cannot be inline");
405f63da
MM
3373
3374 if (packed_dest_p)
3375 {
3376 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 3377
405f63da
MM
3378 emit_move_insn_1 (cmem, y);
3379 return emit_move_insn_1 (sreg, mem);
3380 }
3381 else
3382 {
3383 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3384
405f63da
MM
3385 emit_move_insn_1 (mem, sreg);
3386 return emit_move_insn_1 (x, cmem);
3387 }
3388 }
3389 }
3390 }
3391
235ae7be
DM
3392 realpart_x = gen_realpart (submode, x);
3393 realpart_y = gen_realpart (submode, y);
3394 imagpart_x = gen_imagpart (submode, x);
3395 imagpart_y = gen_imagpart (submode, y);
3396
3397 /* Show the output dies here. This is necessary for SUBREGs
3398 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3399 hard regs shouldn't appear here except as return values.
3400 We never want to emit such a clobber after reload. */
3401 if (x != y
235ae7be
DM
3402 && ! (reload_in_progress || reload_completed)
3403 && (GET_CODE (realpart_x) == SUBREG
3404 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3405 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3406
a79b3dc7
RS
3407 emit_move_insn (realpart_x, realpart_y);
3408 emit_move_insn (imagpart_x, imagpart_y);
c937357e 3409 }
7308a047 3410
7a1ab50a 3411 return get_last_insn ();
7308a047
RS
3412 }
3413
a3600c71
HPN
3414 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3415 find a mode to do it in. If we have a movcc, use it. Otherwise,
3416 find the MODE_INT mode of the same width. */
3417 else if (GET_MODE_CLASS (mode) == MODE_CC
3418 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3419 {
3420 enum insn_code insn_code;
3421 enum machine_mode tmode = VOIDmode;
3422 rtx x1 = x, y1 = y;
3423
3424 if (mode != CCmode
3425 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3426 tmode = CCmode;
3427 else
3428 for (tmode = QImode; tmode != VOIDmode;
3429 tmode = GET_MODE_WIDER_MODE (tmode))
3430 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3431 break;
3432
3433 if (tmode == VOIDmode)
3434 abort ();
3435
3436 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3437 may call change_address which is not appropriate if we were
3438 called when a reload was in progress. We don't have to worry
3439 about changing the address since the size in bytes is supposed to
3440 be the same. Copy the MEM to change the mode and move any
3441 substitutions from the old MEM to the new one. */
3442
3443 if (reload_in_progress)
3444 {
3445 x = gen_lowpart_common (tmode, x1);
3446 if (x == 0 && GET_CODE (x1) == MEM)
3447 {
3448 x = adjust_address_nv (x1, tmode, 0);
3449 copy_replacements (x1, x);
3450 }
3451
3452 y = gen_lowpart_common (tmode, y1);
3453 if (y == 0 && GET_CODE (y1) == MEM)
3454 {
3455 y = adjust_address_nv (y1, tmode, 0);
3456 copy_replacements (y1, y);
3457 }
3458 }
3459 else
3460 {
3461 x = gen_lowpart (tmode, x);
3462 y = gen_lowpart (tmode, y);
3463 }
502b8322 3464
a3600c71
HPN
3465 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3466 return emit_insn (GEN_FCN (insn_code) (x, y));
3467 }
3468
cffa2189
R
3469 /* This will handle any multi-word or full-word mode that lacks a move_insn
3470 pattern. However, you will get better code if you define such patterns,
bbf6f052 3471 even if they must turn into multiple assembler instructions. */
cffa2189 3472 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3473 {
3474 rtx last_insn = 0;
3ef1eef4 3475 rtx seq, inner;
235ae7be 3476 int need_clobber;
bb93b973 3477 int i;
3a94c984 3478
a98c9f1a
RK
3479#ifdef PUSH_ROUNDING
3480
3481 /* If X is a push on the stack, do the push now and replace
3482 X with a reference to the stack pointer. */
3483 if (push_operand (x, GET_MODE (x)))
3484 {
918a6124
GK
3485 rtx temp;
3486 enum rtx_code code;
0fb7aeda 3487
918a6124
GK
3488 /* Do not use anti_adjust_stack, since we don't want to update
3489 stack_pointer_delta. */
3490 temp = expand_binop (Pmode,
3491#ifdef STACK_GROWS_DOWNWARD
3492 sub_optab,
3493#else
3494 add_optab,
3495#endif
3496 stack_pointer_rtx,
3497 GEN_INT
bb93b973
RK
3498 (PUSH_ROUNDING
3499 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3500 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3501
0fb7aeda
KH
3502 if (temp != stack_pointer_rtx)
3503 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3504
3505 code = GET_CODE (XEXP (x, 0));
bb93b973 3506
918a6124
GK
3507 /* Just hope that small offsets off SP are OK. */
3508 if (code == POST_INC)
0fb7aeda 3509 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3510 GEN_INT (-((HOST_WIDE_INT)
3511 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3512 else if (code == POST_DEC)
0fb7aeda 3513 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3514 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3515 else
3516 temp = stack_pointer_rtx;
3517
3518 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3519 }
3520#endif
3a94c984 3521
3ef1eef4
RK
3522 /* If we are in reload, see if either operand is a MEM whose address
3523 is scheduled for replacement. */
3524 if (reload_in_progress && GET_CODE (x) == MEM
3525 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3526 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3527 if (reload_in_progress && GET_CODE (y) == MEM
3528 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3529 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3530
235ae7be 3531 start_sequence ();
15a7a8ec 3532
235ae7be 3533 need_clobber = 0;
bbf6f052 3534 for (i = 0;
3a94c984 3535 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3536 i++)
3537 {
3538 rtx xpart = operand_subword (x, i, 1, mode);
3539 rtx ypart = operand_subword (y, i, 1, mode);
3540
3541 /* If we can't get a part of Y, put Y into memory if it is a
3542 constant. Otherwise, force it into a register. If we still
3543 can't get a part of Y, abort. */
3544 if (ypart == 0 && CONSTANT_P (y))
3545 {
3546 y = force_const_mem (mode, y);
3547 ypart = operand_subword (y, i, 1, mode);
3548 }
3549 else if (ypart == 0)
3550 ypart = operand_subword_force (y, i, mode);
3551
3552 if (xpart == 0 || ypart == 0)
3553 abort ();
3554
235ae7be
DM
3555 need_clobber |= (GET_CODE (xpart) == SUBREG);
3556
bbf6f052
RK
3557 last_insn = emit_move_insn (xpart, ypart);
3558 }
6551fa4d 3559
2f937369 3560 seq = get_insns ();
235ae7be
DM
3561 end_sequence ();
3562
3563 /* Show the output dies here. This is necessary for SUBREGs
3564 of pseudos since we cannot track their lifetimes correctly;
3565 hard regs shouldn't appear here except as return values.
3566 We never want to emit such a clobber after reload. */
3567 if (x != y
3568 && ! (reload_in_progress || reload_completed)
3569 && need_clobber != 0)
bb93b973 3570 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3571
3572 emit_insn (seq);
3573
bbf6f052
RK
3574 return last_insn;
3575 }
3576 else
3577 abort ();
3578}
51286de6
RH
3579
3580/* If Y is representable exactly in a narrower mode, and the target can
3581 perform the extension directly from constant or memory, then emit the
3582 move as an extension. */
3583
3584static rtx
502b8322 3585compress_float_constant (rtx x, rtx y)
51286de6
RH
3586{
3587 enum machine_mode dstmode = GET_MODE (x);
3588 enum machine_mode orig_srcmode = GET_MODE (y);
3589 enum machine_mode srcmode;
3590 REAL_VALUE_TYPE r;
3591
3592 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3593
3594 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3595 srcmode != orig_srcmode;
3596 srcmode = GET_MODE_WIDER_MODE (srcmode))
3597 {
3598 enum insn_code ic;
3599 rtx trunc_y, last_insn;
3600
3601 /* Skip if the target can't extend this way. */
3602 ic = can_extend_p (dstmode, srcmode, 0);
3603 if (ic == CODE_FOR_nothing)
3604 continue;
3605
3606 /* Skip if the narrowed value isn't exact. */
3607 if (! exact_real_truncate (srcmode, &r))
3608 continue;
3609
3610 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3611
3612 if (LEGITIMATE_CONSTANT_P (trunc_y))
3613 {
3614 /* Skip if the target needs extra instructions to perform
3615 the extension. */
3616 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3617 continue;
3618 }
3619 else if (float_extend_from_mem[dstmode][srcmode])
3620 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3621 else
3622 continue;
3623
3624 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3625 last_insn = get_last_insn ();
3626
3627 if (GET_CODE (x) == REG)
0c19a26f 3628 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3629
3630 return last_insn;
3631 }
3632
3633 return NULL_RTX;
3634}
bbf6f052
RK
3635\f
3636/* Pushing data onto the stack. */
3637
3638/* Push a block of length SIZE (perhaps variable)
3639 and return an rtx to address the beginning of the block.
3640 Note that it is not possible for the value returned to be a QUEUED.
3641 The value may be virtual_outgoing_args_rtx.
3642
3643 EXTRA is the number of bytes of padding to push in addition to SIZE.
3644 BELOW nonzero means this padding comes at low addresses;
3645 otherwise, the padding comes at high addresses. */
3646
3647rtx
502b8322 3648push_block (rtx size, int extra, int below)
bbf6f052 3649{
b3694847 3650 rtx temp;
88f63c77
RK
3651
3652 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3653 if (CONSTANT_P (size))
3654 anti_adjust_stack (plus_constant (size, extra));
3655 else if (GET_CODE (size) == REG && extra == 0)
3656 anti_adjust_stack (size);
3657 else
3658 {
ce48579b 3659 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3660 if (extra != 0)
906c4e36 3661 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3662 temp, 0, OPTAB_LIB_WIDEN);
3663 anti_adjust_stack (temp);
3664 }
3665
f73ad30e 3666#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3667 if (0)
f73ad30e
JH
3668#else
3669 if (1)
bbf6f052 3670#endif
f73ad30e 3671 {
f73ad30e
JH
3672 temp = virtual_outgoing_args_rtx;
3673 if (extra != 0 && below)
3674 temp = plus_constant (temp, extra);
3675 }
3676 else
3677 {
3678 if (GET_CODE (size) == CONST_INT)
3679 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3680 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3681 else if (extra != 0 && !below)
3682 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3683 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3684 else
3685 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3686 negate_rtx (Pmode, size));
3687 }
bbf6f052
RK
3688
3689 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3690}
3691
21d93687
RK
3692#ifdef PUSH_ROUNDING
3693
566aa174 3694/* Emit single push insn. */
21d93687 3695
566aa174 3696static void
502b8322 3697emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3698{
566aa174 3699 rtx dest_addr;
918a6124 3700 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3701 rtx dest;
371b8fc0
JH
3702 enum insn_code icode;
3703 insn_operand_predicate_fn pred;
566aa174 3704
371b8fc0
JH
3705 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3706 /* If there is push pattern, use it. Otherwise try old way of throwing
3707 MEM representing push operation to move expander. */
3708 icode = push_optab->handlers[(int) mode].insn_code;
3709 if (icode != CODE_FOR_nothing)
3710 {
3711 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3712 && !((*pred) (x, mode))))
371b8fc0
JH
3713 x = force_reg (mode, x);
3714 emit_insn (GEN_FCN (icode) (x));
3715 return;
3716 }
566aa174
JH
3717 if (GET_MODE_SIZE (mode) == rounded_size)
3718 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3719 /* If we are to pad downward, adjust the stack pointer first and
3720 then store X into the stack location using an offset. This is
3721 because emit_move_insn does not know how to pad; it does not have
3722 access to type. */
3723 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3724 {
3725 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3726 HOST_WIDE_INT offset;
3727
3728 emit_move_insn (stack_pointer_rtx,
3729 expand_binop (Pmode,
3730#ifdef STACK_GROWS_DOWNWARD
3731 sub_optab,
3732#else
3733 add_optab,
3734#endif
3735 stack_pointer_rtx,
3736 GEN_INT (rounded_size),
3737 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3738
3739 offset = (HOST_WIDE_INT) padding_size;
3740#ifdef STACK_GROWS_DOWNWARD
3741 if (STACK_PUSH_CODE == POST_DEC)
3742 /* We have already decremented the stack pointer, so get the
3743 previous value. */
3744 offset += (HOST_WIDE_INT) rounded_size;
3745#else
3746 if (STACK_PUSH_CODE == POST_INC)
3747 /* We have already incremented the stack pointer, so get the
3748 previous value. */
3749 offset -= (HOST_WIDE_INT) rounded_size;
3750#endif
3751 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3752 }
566aa174
JH
3753 else
3754 {
3755#ifdef STACK_GROWS_DOWNWARD
329d586f 3756 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3757 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3758 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3759#else
329d586f 3760 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3761 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3762 GEN_INT (rounded_size));
3763#endif
3764 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3765 }
3766
3767 dest = gen_rtx_MEM (mode, dest_addr);
3768
566aa174
JH
3769 if (type != 0)
3770 {
3771 set_mem_attributes (dest, type, 1);
c3d32120
RK
3772
3773 if (flag_optimize_sibling_calls)
3774 /* Function incoming arguments may overlap with sibling call
3775 outgoing arguments and we cannot allow reordering of reads
3776 from function arguments with stores to outgoing arguments
3777 of sibling calls. */
3778 set_mem_alias_set (dest, 0);
566aa174
JH
3779 }
3780 emit_move_insn (dest, x);
566aa174 3781}
21d93687 3782#endif
566aa174 3783
bbf6f052
RK
3784/* Generate code to push X onto the stack, assuming it has mode MODE and
3785 type TYPE.
3786 MODE is redundant except when X is a CONST_INT (since they don't
3787 carry mode info).
3788 SIZE is an rtx for the size of data to be copied (in bytes),
3789 needed only if X is BLKmode.
3790
f1eaaf73 3791 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3792
cd048831
RK
3793 If PARTIAL and REG are both nonzero, then copy that many of the first
3794 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3795 The amount of space pushed is decreased by PARTIAL words,
3796 rounded *down* to a multiple of PARM_BOUNDARY.
3797 REG must be a hard register in this case.
cd048831
RK
3798 If REG is zero but PARTIAL is not, take any all others actions for an
3799 argument partially in registers, but do not actually load any
3800 registers.
bbf6f052
RK
3801
3802 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3803 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3804
3805 On a machine that lacks real push insns, ARGS_ADDR is the address of
3806 the bottom of the argument block for this call. We use indexing off there
3807 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3808 argument block has not been preallocated.
3809
e5e809f4
JL
3810 ARGS_SO_FAR is the size of args previously pushed for this call.
3811
3812 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3813 for arguments passed in registers. If nonzero, it will be the number
3814 of bytes required. */
bbf6f052
RK
3815
3816void
502b8322
AJ
3817emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3818 unsigned int align, int partial, rtx reg, int extra,
3819 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3820 rtx alignment_pad)
bbf6f052
RK
3821{
3822 rtx xinner;
3823 enum direction stack_direction
3824#ifdef STACK_GROWS_DOWNWARD
3825 = downward;
3826#else
3827 = upward;
3828#endif
3829
3830 /* Decide where to pad the argument: `downward' for below,
3831 `upward' for above, or `none' for don't pad it.
3832 Default is below for small data on big-endian machines; else above. */
3833 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3834
0fb7aeda 3835 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3836 FIXME: why? */
3837 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3838 if (where_pad != none)
3839 where_pad = (where_pad == downward ? upward : downward);
3840
3841 xinner = x = protect_from_queue (x, 0);
3842
3843 if (mode == BLKmode)
3844 {
3845 /* Copy a block into the stack, entirely or partially. */
3846
b3694847 3847 rtx temp;
bbf6f052
RK
3848 int used = partial * UNITS_PER_WORD;
3849 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3850 int skip;
3a94c984 3851
bbf6f052
RK
3852 if (size == 0)
3853 abort ();
3854
3855 used -= offset;
3856
3857 /* USED is now the # of bytes we need not copy to the stack
3858 because registers will take care of them. */
3859
3860 if (partial != 0)
f4ef873c 3861 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3862
3863 /* If the partial register-part of the arg counts in its stack size,
3864 skip the part of stack space corresponding to the registers.
3865 Otherwise, start copying to the beginning of the stack space,
3866 by setting SKIP to 0. */
e5e809f4 3867 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3868
3869#ifdef PUSH_ROUNDING
3870 /* Do it with several push insns if that doesn't take lots of insns
3871 and if there is no difficulty with push insns that skip bytes
3872 on the stack for alignment purposes. */
3873 if (args_addr == 0
f73ad30e 3874 && PUSH_ARGS
bbf6f052
RK
3875 && GET_CODE (size) == CONST_INT
3876 && skip == 0
f26aca6d 3877 && MEM_ALIGN (xinner) >= align
15914757 3878 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3879 /* Here we avoid the case of a structure whose weak alignment
3880 forces many pushes of a small amount of data,
3881 and such small pushes do rounding that causes trouble. */
e1565e65 3882 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3883 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3884 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3885 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3886 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3887 {
3888 /* Push padding now if padding above and stack grows down,
3889 or if padding below and stack grows up.
3890 But if space already allocated, this has already been done. */
3891 if (extra && args_addr == 0
3892 && where_pad != none && where_pad != stack_direction)
906c4e36 3893 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3894
8fd3cf4e 3895 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3896 }
3897 else
3a94c984 3898#endif /* PUSH_ROUNDING */
bbf6f052 3899 {
7ab923cc
JJ
3900 rtx target;
3901
bbf6f052
RK
3902 /* Otherwise make space on the stack and copy the data
3903 to the address of that space. */
3904
3905 /* Deduct words put into registers from the size we must copy. */
3906 if (partial != 0)
3907 {
3908 if (GET_CODE (size) == CONST_INT)
906c4e36 3909 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3910 else
3911 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3912 GEN_INT (used), NULL_RTX, 0,
3913 OPTAB_LIB_WIDEN);
bbf6f052
RK
3914 }
3915
3916 /* Get the address of the stack space.
3917 In this case, we do not deal with EXTRA separately.
3918 A single stack adjust will do. */
3919 if (! args_addr)
3920 {
3921 temp = push_block (size, extra, where_pad == downward);
3922 extra = 0;
3923 }
3924 else if (GET_CODE (args_so_far) == CONST_INT)
3925 temp = memory_address (BLKmode,
3926 plus_constant (args_addr,
3927 skip + INTVAL (args_so_far)));
3928 else
3929 temp = memory_address (BLKmode,
38a448ca
RH
3930 plus_constant (gen_rtx_PLUS (Pmode,
3931 args_addr,
3932 args_so_far),
bbf6f052 3933 skip));
4ca79136
RH
3934
3935 if (!ACCUMULATE_OUTGOING_ARGS)
3936 {
3937 /* If the source is referenced relative to the stack pointer,
3938 copy it to another register to stabilize it. We do not need
3939 to do this if we know that we won't be changing sp. */
3940
3941 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3942 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3943 temp = copy_to_reg (temp);
3944 }
3945
3a94c984 3946 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3947
3a94c984
KH
3948 if (type != 0)
3949 {
3950 set_mem_attributes (target, type, 1);
3951 /* Function incoming arguments may overlap with sibling call
3952 outgoing arguments and we cannot allow reordering of reads
3953 from function arguments with stores to outgoing arguments
3954 of sibling calls. */
ba4828e0 3955 set_mem_alias_set (target, 0);
3a94c984 3956 }
4ca79136 3957
44bb111a
RH
3958 /* ALIGN may well be better aligned than TYPE, e.g. due to
3959 PARM_BOUNDARY. Assume the caller isn't lying. */
3960 set_mem_align (target, align);
4ca79136 3961
44bb111a 3962 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3963 }
3964 }
3965 else if (partial > 0)
3966 {
3967 /* Scalar partly in registers. */
3968
3969 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3970 int i;
3971 int not_stack;
3972 /* # words of start of argument
3973 that we must make space for but need not store. */
3974 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3975 int args_offset = INTVAL (args_so_far);
3976 int skip;
3977
3978 /* Push padding now if padding above and stack grows down,
3979 or if padding below and stack grows up.
3980 But if space already allocated, this has already been done. */
3981 if (extra && args_addr == 0
3982 && where_pad != none && where_pad != stack_direction)
906c4e36 3983 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3984
3985 /* If we make space by pushing it, we might as well push
3986 the real data. Otherwise, we can leave OFFSET nonzero
3987 and leave the space uninitialized. */
3988 if (args_addr == 0)
3989 offset = 0;
3990
3991 /* Now NOT_STACK gets the number of words that we don't need to
3992 allocate on the stack. */
3993 not_stack = partial - offset;
3994
3995 /* If the partial register-part of the arg counts in its stack size,
3996 skip the part of stack space corresponding to the registers.
3997 Otherwise, start copying to the beginning of the stack space,
3998 by setting SKIP to 0. */
e5e809f4 3999 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
4000
4001 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4002 x = validize_mem (force_const_mem (mode, x));
4003
4004 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4005 SUBREGs of such registers are not allowed. */
4006 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4007 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4008 x = copy_to_reg (x);
4009
4010 /* Loop over all the words allocated on the stack for this arg. */
4011 /* We can do it by words, because any scalar bigger than a word
4012 has a size a multiple of a word. */
4013#ifndef PUSH_ARGS_REVERSED
4014 for (i = not_stack; i < size; i++)
4015#else
4016 for (i = size - 1; i >= not_stack; i--)
4017#endif
4018 if (i >= not_stack + offset)
4019 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
4020 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4021 0, args_addr,
4022 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 4023 * UNITS_PER_WORD)),
4fc026cd 4024 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
4025 }
4026 else
4027 {
4028 rtx addr;
3bdf5ad1 4029 rtx dest;
bbf6f052
RK
4030
4031 /* Push padding now if padding above and stack grows down,
4032 or if padding below and stack grows up.
4033 But if space already allocated, this has already been done. */
4034 if (extra && args_addr == 0
4035 && where_pad != none && where_pad != stack_direction)
906c4e36 4036 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
4037
4038#ifdef PUSH_ROUNDING
f73ad30e 4039 if (args_addr == 0 && PUSH_ARGS)
566aa174 4040 emit_single_push_insn (mode, x, type);
bbf6f052
RK
4041 else
4042#endif
921b3427
RK
4043 {
4044 if (GET_CODE (args_so_far) == CONST_INT)
4045 addr
4046 = memory_address (mode,
3a94c984 4047 plus_constant (args_addr,
921b3427 4048 INTVAL (args_so_far)));
3a94c984 4049 else
38a448ca
RH
4050 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4051 args_so_far));
566aa174
JH
4052 dest = gen_rtx_MEM (mode, addr);
4053 if (type != 0)
4054 {
4055 set_mem_attributes (dest, type, 1);
4056 /* Function incoming arguments may overlap with sibling call
4057 outgoing arguments and we cannot allow reordering of reads
4058 from function arguments with stores to outgoing arguments
4059 of sibling calls. */
ba4828e0 4060 set_mem_alias_set (dest, 0);
566aa174 4061 }
bbf6f052 4062
566aa174 4063 emit_move_insn (dest, x);
566aa174 4064 }
bbf6f052
RK
4065 }
4066
bbf6f052
RK
4067 /* If part should go in registers, copy that part
4068 into the appropriate registers. Do this now, at the end,
4069 since mem-to-mem copies above may do function calls. */
cd048831 4070 if (partial > 0 && reg != 0)
fffa9c1d
JW
4071 {
4072 /* Handle calls that pass values in multiple non-contiguous locations.
4073 The Irix 6 ABI has examples of this. */
4074 if (GET_CODE (reg) == PARALLEL)
04050c69 4075 emit_group_load (reg, x, -1); /* ??? size? */
fffa9c1d
JW
4076 else
4077 move_block_to_reg (REGNO (reg), x, partial, mode);
4078 }
bbf6f052
RK
4079
4080 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 4081 anti_adjust_stack (GEN_INT (extra));
3a94c984 4082
3ea2292a 4083 if (alignment_pad && args_addr == 0)
4fc026cd 4084 anti_adjust_stack (alignment_pad);
bbf6f052
RK
4085}
4086\f
296b4ed9
RK
4087/* Return X if X can be used as a subtarget in a sequence of arithmetic
4088 operations. */
4089
4090static rtx
502b8322 4091get_subtarget (rtx x)
296b4ed9
RK
4092{
4093 return ((x == 0
4094 /* Only registers can be subtargets. */
4095 || GET_CODE (x) != REG
4096 /* If the register is readonly, it can't be set more than once. */
4097 || RTX_UNCHANGING_P (x)
4098 /* Don't use hard regs to avoid extending their life. */
4099 || REGNO (x) < FIRST_PSEUDO_REGISTER
4100 /* Avoid subtargets inside loops,
4101 since they hide some invariant expressions. */
4102 || preserve_subexpressions_p ())
4103 ? 0 : x);
4104}
4105
bbf6f052
RK
4106/* Expand an assignment that stores the value of FROM into TO.
4107 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
4108 (This may contain a QUEUED rtx;
4109 if the value is constant, this rtx is a constant.)
4110 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
4111
4112 SUGGEST_REG is no longer actually used.
4113 It used to mean, copy the value through a register
4114 and return that register, if that is possible.
709f5be1 4115 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
4116
4117rtx
502b8322
AJ
4118expand_assignment (tree to, tree from, int want_value,
4119 int suggest_reg ATTRIBUTE_UNUSED)
bbf6f052 4120{
b3694847 4121 rtx to_rtx = 0;
bbf6f052
RK
4122 rtx result;
4123
4124 /* Don't crash if the lhs of the assignment was erroneous. */
4125
4126 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
4127 {
4128 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4129 return want_value ? result : NULL_RTX;
4130 }
bbf6f052
RK
4131
4132 /* Assignment of a structure component needs special treatment
4133 if the structure component's rtx is not simply a MEM.
6be58303
JW
4134 Assignment of an array element at a constant index, and assignment of
4135 an array element in an unaligned packed structure field, has the same
4136 problem. */
bbf6f052 4137
08293add 4138 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
4139 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4140 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
4141 {
4142 enum machine_mode mode1;
770ae6cc 4143 HOST_WIDE_INT bitsize, bitpos;
a06ef755 4144 rtx orig_to_rtx;
7bb0943f 4145 tree offset;
bbf6f052
RK
4146 int unsignedp;
4147 int volatilep = 0;
0088fcb1
RK
4148 tree tem;
4149
4150 push_temp_slots ();
839c4796 4151 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 4152 &unsignedp, &volatilep);
bbf6f052
RK
4153
4154 /* If we are going to use store_bit_field and extract_bit_field,
4155 make sure to_rtx will be safe for multiple use. */
4156
4157 if (mode1 == VOIDmode && want_value)
4158 tem = stabilize_reference (tem);
4159
1ed1b4fb
RK
4160 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4161
7bb0943f
RS
4162 if (offset != 0)
4163 {
e3c8ea67 4164 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
4165
4166 if (GET_CODE (to_rtx) != MEM)
4167 abort ();
bd070e1a 4168
bd070e1a 4169#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4170 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4171 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4172#else
4173 if (GET_MODE (offset_rtx) != ptr_mode)
4174 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4175#endif
bd070e1a 4176
9a7b9f4f
JL
4177 /* A constant address in TO_RTX can have VOIDmode, we must not try
4178 to call force_reg for that case. Avoid that case. */
89752202
HB
4179 if (GET_CODE (to_rtx) == MEM
4180 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 4181 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 4182 && bitsize > 0
3a94c984 4183 && (bitpos % bitsize) == 0
89752202 4184 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 4185 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 4186 {
e3c8ea67 4187 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
4188 bitpos = 0;
4189 }
4190
0d4903b8 4191 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
4192 highest_pow2_factor_for_type (TREE_TYPE (to),
4193 offset));
7bb0943f 4194 }
c5c76735 4195
998d7deb
RH
4196 if (GET_CODE (to_rtx) == MEM)
4197 {
998d7deb
RH
4198 /* If the field is at offset zero, we could have been given the
4199 DECL_RTX of the parent struct. Don't munge it. */
4200 to_rtx = shallow_copy_rtx (to_rtx);
4201
6f1087be 4202 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 4203 }
effbcc6a 4204
a06ef755
RK
4205 /* Deal with volatile and readonly fields. The former is only done
4206 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4207 if (volatilep && GET_CODE (to_rtx) == MEM)
4208 {
4209 if (to_rtx == orig_to_rtx)
4210 to_rtx = copy_rtx (to_rtx);
4211 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
4212 }
4213
956d6950
JL
4214 if (TREE_CODE (to) == COMPONENT_REF
4215 && TREE_READONLY (TREE_OPERAND (to, 1)))
4216 {
a06ef755 4217 if (to_rtx == orig_to_rtx)
956d6950 4218 to_rtx = copy_rtx (to_rtx);
956d6950
JL
4219 RTX_UNCHANGING_P (to_rtx) = 1;
4220 }
4221
a84b4898 4222 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
4223 {
4224 if (to_rtx == orig_to_rtx)
4225 to_rtx = copy_rtx (to_rtx);
4226 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4227 }
4228
a06ef755
RK
4229 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4230 (want_value
4231 /* Spurious cast for HPUX compiler. */
4232 ? ((enum machine_mode)
4233 TYPE_MODE (TREE_TYPE (to)))
4234 : VOIDmode),
4235 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 4236
a06ef755
RK
4237 preserve_temp_slots (result);
4238 free_temp_slots ();
4239 pop_temp_slots ();
a69beca1 4240
a06ef755
RK
4241 /* If the value is meaningful, convert RESULT to the proper mode.
4242 Otherwise, return nothing. */
4243 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4244 TYPE_MODE (TREE_TYPE (from)),
4245 result,
4246 TREE_UNSIGNED (TREE_TYPE (to)))
4247 : NULL_RTX);
bbf6f052
RK
4248 }
4249
cd1db108
RS
4250 /* If the rhs is a function call and its value is not an aggregate,
4251 call the function before we start to compute the lhs.
4252 This is needed for correct code for cases such as
4253 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4254 requires loading up part of an address in a separate insn.
4255
1858863b
JW
4256 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4257 since it might be a promoted variable where the zero- or sign- extension
4258 needs to be done. Handling this in the normal way is safe because no
4259 computation is done before the call. */
1ad87b63 4260 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 4261 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
4262 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4263 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 4264 {
0088fcb1
RK
4265 rtx value;
4266
4267 push_temp_slots ();
4268 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 4269 if (to_rtx == 0)
37a08a29 4270 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4271
fffa9c1d
JW
4272 /* Handle calls that return values in multiple non-contiguous locations.
4273 The Irix 6 ABI has examples of this. */
4274 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4275 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4276 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4277 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4278 else
6419e5b0
DT
4279 {
4280#ifdef POINTERS_EXTEND_UNSIGNED
0d4903b8
RK
4281 if (POINTER_TYPE_P (TREE_TYPE (to))
4282 && GET_MODE (to_rtx) != GET_MODE (value))
6419e5b0
DT
4283 value = convert_memory_address (GET_MODE (to_rtx), value);
4284#endif
4285 emit_move_insn (to_rtx, value);
4286 }
cd1db108
RS
4287 preserve_temp_slots (to_rtx);
4288 free_temp_slots ();
0088fcb1 4289 pop_temp_slots ();
709f5be1 4290 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
4291 }
4292
bbf6f052
RK
4293 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4294 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4295
4296 if (to_rtx == 0)
37a08a29 4297 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4298
86d38d25 4299 /* Don't move directly into a return register. */
14a774a9
RK
4300 if (TREE_CODE (to) == RESULT_DECL
4301 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4302 {
0088fcb1
RK
4303 rtx temp;
4304
4305 push_temp_slots ();
4306 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4307
4308 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4309 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4310 else
4311 emit_move_insn (to_rtx, temp);
4312
86d38d25
RS
4313 preserve_temp_slots (to_rtx);
4314 free_temp_slots ();
0088fcb1 4315 pop_temp_slots ();
709f5be1 4316 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
4317 }
4318
bbf6f052
RK
4319 /* In case we are returning the contents of an object which overlaps
4320 the place the value is being stored, use a safe function when copying
4321 a value through a pointer into a structure value return block. */
4322 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4323 && current_function_returns_struct
4324 && !current_function_returns_pcc_struct)
4325 {
0088fcb1
RK
4326 rtx from_rtx, size;
4327
4328 push_temp_slots ();
33a20d10 4329 size = expr_size (from);
37a08a29 4330 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 4331
4ca79136
RH
4332 if (TARGET_MEM_FUNCTIONS)
4333 emit_library_call (memmove_libfunc, LCT_NORMAL,
4334 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4335 XEXP (from_rtx, 0), Pmode,
4336 convert_to_mode (TYPE_MODE (sizetype),
4337 size, TREE_UNSIGNED (sizetype)),
4338 TYPE_MODE (sizetype));
4339 else
4340 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4341 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4342 XEXP (to_rtx, 0), Pmode,
4343 convert_to_mode (TYPE_MODE (integer_type_node),
4344 size,
4345 TREE_UNSIGNED (integer_type_node)),
4346 TYPE_MODE (integer_type_node));
bbf6f052
RK
4347
4348 preserve_temp_slots (to_rtx);
4349 free_temp_slots ();
0088fcb1 4350 pop_temp_slots ();
709f5be1 4351 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
4352 }
4353
4354 /* Compute FROM and store the value in the rtx we got. */
4355
0088fcb1 4356 push_temp_slots ();
bbf6f052
RK
4357 result = store_expr (from, to_rtx, want_value);
4358 preserve_temp_slots (result);
4359 free_temp_slots ();
0088fcb1 4360 pop_temp_slots ();
709f5be1 4361 return want_value ? result : NULL_RTX;
bbf6f052
RK
4362}
4363
4364/* Generate code for computing expression EXP,
4365 and storing the value into TARGET.
bbf6f052
RK
4366 TARGET may contain a QUEUED rtx.
4367
8403445a 4368 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
4369 not in TARGET, so that we can be sure to use the proper
4370 value in a containing expression even if TARGET has something
4371 else stored in it. If possible, we copy the value through a pseudo
4372 and return that pseudo. Or, if the value is constant, we try to
4373 return the constant. In some cases, we return a pseudo
4374 copied *from* TARGET.
4375
4376 If the mode is BLKmode then we may return TARGET itself.
4377 It turns out that in BLKmode it doesn't cause a problem.
4378 because C has no operators that could combine two different
4379 assignments into the same BLKmode object with different values
4380 with no sequence point. Will other languages need this to
4381 be more thorough?
4382
8403445a 4383 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4384 to catch quickly any cases where the caller uses the value
8403445a
AM
4385 and fails to set WANT_VALUE.
4386
4387 If WANT_VALUE & 2 is set, this is a store into a call param on the
4388 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4389
4390rtx
502b8322 4391store_expr (tree exp, rtx target, int want_value)
bbf6f052 4392{
b3694847 4393 rtx temp;
bbf6f052 4394 int dont_return_target = 0;
e5408e52 4395 int dont_store_target = 0;
bbf6f052 4396
847311f4
AL
4397 if (VOID_TYPE_P (TREE_TYPE (exp)))
4398 {
4399 /* C++ can generate ?: expressions with a throw expression in one
4400 branch and an rvalue in the other. Here, we resolve attempts to
71c0e7fc 4401 store the throw expression's nonexistant result. */
847311f4
AL
4402 if (want_value)
4403 abort ();
4404 expand_expr (exp, const0_rtx, VOIDmode, 0);
4405 return NULL_RTX;
4406 }
bbf6f052
RK
4407 if (TREE_CODE (exp) == COMPOUND_EXPR)
4408 {
4409 /* Perform first part of compound expression, then assign from second
4410 part. */
8403445a
AM
4411 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4412 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4413 emit_queue ();
709f5be1 4414 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4415 }
4416 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4417 {
4418 /* For conditional expression, get safe form of the target. Then
4419 test the condition, doing the appropriate assignment on either
4420 side. This avoids the creation of unnecessary temporaries.
4421 For non-BLKmode, it is more efficient not to do this. */
4422
4423 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4424
4425 emit_queue ();
4426 target = protect_from_queue (target, 1);
4427
dabf8373 4428 do_pending_stack_adjust ();
bbf6f052
RK
4429 NO_DEFER_POP;
4430 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4431 start_cleanup_deferral ();
8403445a 4432 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4433 end_cleanup_deferral ();
bbf6f052
RK
4434 emit_queue ();
4435 emit_jump_insn (gen_jump (lab2));
4436 emit_barrier ();
4437 emit_label (lab1);
956d6950 4438 start_cleanup_deferral ();
8403445a 4439 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4440 end_cleanup_deferral ();
bbf6f052
RK
4441 emit_queue ();
4442 emit_label (lab2);
4443 OK_DEFER_POP;
a3a58acc 4444
8403445a 4445 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4446 }
bbf6f052 4447 else if (queued_subexp_p (target))
709f5be1
RS
4448 /* If target contains a postincrement, let's not risk
4449 using it as the place to generate the rhs. */
bbf6f052
RK
4450 {
4451 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4452 {
4453 /* Expand EXP into a new pseudo. */
4454 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4455 temp = expand_expr (exp, temp, GET_MODE (target),
4456 (want_value & 2
4457 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4458 }
4459 else
8403445a
AM
4460 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4461 (want_value & 2
4462 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4463
4464 /* If target is volatile, ANSI requires accessing the value
4465 *from* the target, if it is accessed. So make that happen.
4466 In no case return the target itself. */
8403445a 4467 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4468 dont_return_target = 1;
bbf6f052 4469 }
8403445a
AM
4470 else if ((want_value & 1) != 0
4471 && GET_CODE (target) == MEM
4472 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4473 && GET_MODE (target) != BLKmode)
4474 /* If target is in memory and caller wants value in a register instead,
4475 arrange that. Pass TARGET as target for expand_expr so that,
4476 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4477 We know expand_expr will not use the target in that case.
4478 Don't do this if TARGET is volatile because we are supposed
4479 to write it and then read it. */
4480 {
8403445a
AM
4481 temp = expand_expr (exp, target, GET_MODE (target),
4482 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4483 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4484 {
4485 /* If TEMP is already in the desired TARGET, only copy it from
4486 memory and don't store it there again. */
4487 if (temp == target
4488 || (rtx_equal_p (temp, target)
4489 && ! side_effects_p (temp) && ! side_effects_p (target)))
4490 dont_store_target = 1;
4491 temp = copy_to_reg (temp);
4492 }
12f06d17
CH
4493 dont_return_target = 1;
4494 }
1499e0a8 4495 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4496 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4497 than the declared mode, compute the result into its declared mode
4498 and then convert to the wider mode. Our value is the computed
4499 expression. */
4500 {
b76b08ef
RK
4501 rtx inner_target = 0;
4502
5a32d038 4503 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4504 which will often result in some optimizations. Do the conversion
4505 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4506 the extend. But don't do this if the type of EXP is a subtype
4507 of something else since then the conversion might involve
4508 more than just converting modes. */
8403445a
AM
4509 if ((want_value & 1) == 0
4510 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4511 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4512 {
4513 if (TREE_UNSIGNED (TREE_TYPE (exp))
4514 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4515 exp = convert
4516 ((*lang_hooks.types.signed_or_unsigned_type)
4517 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4518
b0c48229
NB
4519 exp = convert ((*lang_hooks.types.type_for_mode)
4520 (GET_MODE (SUBREG_REG (target)),
4521 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4522 exp);
b76b08ef
RK
4523
4524 inner_target = SUBREG_REG (target);
f635a84d 4525 }
3a94c984 4526
8403445a
AM
4527 temp = expand_expr (exp, inner_target, VOIDmode,
4528 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4529
7abec5be 4530 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4531 now so it gets done only once. Strictly speaking, this is
4532 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4533 overlaps TARGET. But not performing the load twice also
4534 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4535 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4536 temp = copy_to_reg (temp);
4537
b258707c
RS
4538 /* If TEMP is a VOIDmode constant, use convert_modes to make
4539 sure that we properly convert it. */
4540 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4541 {
4542 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4543 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4544 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4545 GET_MODE (target), temp,
4546 SUBREG_PROMOTED_UNSIGNED_P (target));
4547 }
b258707c 4548
1499e0a8
RK
4549 convert_move (SUBREG_REG (target), temp,
4550 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4551
4552 /* If we promoted a constant, change the mode back down to match
4553 target. Otherwise, the caller might get confused by a result whose
4554 mode is larger than expected. */
4555
8403445a 4556 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4557 {
b3ca30df
JJ
4558 if (GET_MODE (temp) != VOIDmode)
4559 {
4560 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4561 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4562 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4563 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4564 }
4565 else
4566 temp = convert_modes (GET_MODE (target),
4567 GET_MODE (SUBREG_REG (target)),
4568 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4569 }
4570
8403445a 4571 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4572 }
bbf6f052
RK
4573 else
4574 {
8403445a
AM
4575 temp = expand_expr (exp, target, GET_MODE (target),
4576 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
766f36c7 4577 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4578 If TARGET is a volatile mem ref, either return TARGET
4579 or return a reg copied *from* TARGET; ANSI requires this.
4580
4581 Otherwise, if TEMP is not TARGET, return TEMP
4582 if it is constant (for efficiency),
4583 or if we really want the correct value. */
bbf6f052
RK
4584 if (!(target && GET_CODE (target) == REG
4585 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4586 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4587 && ! rtx_equal_p (temp, target)
8403445a 4588 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4589 dont_return_target = 1;
4590 }
4591
b258707c
RS
4592 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4593 the same as that of TARGET, adjust the constant. This is needed, for
4594 example, in case it is a CONST_DOUBLE and we want only a word-sized
4595 value. */
4596 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4597 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4598 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4599 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4600 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4601
bbf6f052 4602 /* If value was not generated in the target, store it there.
37a08a29
RK
4603 Convert the value to TARGET's type first if necessary.
4604 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4605 one or both of them are volatile memory refs, we have to distinguish
4606 two cases:
4607 - expand_expr has used TARGET. In this case, we must not generate
4608 another copy. This can be detected by TARGET being equal according
4609 to == .
4610 - expand_expr has not used TARGET - that means that the source just
4611 happens to have the same RTX form. Since temp will have been created
4612 by expand_expr, it will compare unequal according to == .
4613 We must generate a copy in this case, to reach the correct number
4614 of volatile memory references. */
bbf6f052 4615
6036acbb 4616 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4617 || (temp != target && (side_effects_p (temp)
4618 || side_effects_p (target))))
e5408e52 4619 && TREE_CODE (exp) != ERROR_MARK
a9772b60
JJ
4620 && ! dont_store_target
4621 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4622 but TARGET is not valid memory reference, TEMP will differ
4623 from TARGET although it is really the same location. */
4624 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
e56fc090
HPN
4625 || target != DECL_RTL_IF_SET (exp))
4626 /* If there's nothing to copy, don't bother. Don't call expr_size
4627 unless necessary, because some front-ends (C++) expr_size-hook
4628 aborts on objects that are not supposed to be bit-copied or
4629 bit-initialized. */
4630 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4631 {
4632 target = protect_from_queue (target, 1);
4633 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4634 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4635 {
4636 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4637 if (dont_return_target)
4638 {
4639 /* In this case, we will return TEMP,
4640 so make sure it has the proper mode.
4641 But don't forget to store the value into TARGET. */
4642 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4643 emit_move_insn (target, temp);
4644 }
4645 else
4646 convert_move (target, temp, unsignedp);
4647 }
4648
4649 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4650 {
c24ae149
RK
4651 /* Handle copying a string constant into an array. The string
4652 constant may be shorter than the array. So copy just the string's
4653 actual length, and clear the rest. First get the size of the data
4654 type of the string, which is actually the size of the target. */
4655 rtx size = expr_size (exp);
bbf6f052 4656
e87b4f3f
RS
4657 if (GET_CODE (size) == CONST_INT
4658 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4659 emit_block_move (target, temp, size,
4660 (want_value & 2
4661 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4662 else
bbf6f052 4663 {
e87b4f3f
RS
4664 /* Compute the size of the data to copy from the string. */
4665 tree copy_size
c03b7665 4666 = size_binop (MIN_EXPR,
b50d17a1 4667 make_tree (sizetype, size),
fed3cef0 4668 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4669 rtx copy_size_rtx
4670 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4671 (want_value & 2
4672 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4673 rtx label = 0;
4674
4675 /* Copy that much. */
267b28bd
SE
4676 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4677 TREE_UNSIGNED (sizetype));
8403445a
AM
4678 emit_block_move (target, temp, copy_size_rtx,
4679 (want_value & 2
4680 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4681
88f63c77
RK
4682 /* Figure out how much is left in TARGET that we have to clear.
4683 Do all calculations in ptr_mode. */
e87b4f3f
RS
4684 if (GET_CODE (copy_size_rtx) == CONST_INT)
4685 {
c24ae149
RK
4686 size = plus_constant (size, -INTVAL (copy_size_rtx));
4687 target = adjust_address (target, BLKmode,
4688 INTVAL (copy_size_rtx));
e87b4f3f
RS
4689 }
4690 else
4691 {
fa06ab5c 4692 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4693 copy_size_rtx, NULL_RTX, 0,
4694 OPTAB_LIB_WIDEN);
e87b4f3f 4695
c24ae149
RK
4696#ifdef POINTERS_EXTEND_UNSIGNED
4697 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd
SE
4698 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4699 TREE_UNSIGNED (sizetype));
c24ae149
RK
4700#endif
4701
4702 target = offset_address (target, copy_size_rtx,
4703 highest_pow2_factor (copy_size));
e87b4f3f 4704 label = gen_label_rtx ();
c5d5d461 4705 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4706 GET_MODE (size), 0, label);
e87b4f3f
RS
4707 }
4708
4709 if (size != const0_rtx)
37a08a29 4710 clear_storage (target, size);
22619c3f 4711
e87b4f3f
RS
4712 if (label)
4713 emit_label (label);
bbf6f052
RK
4714 }
4715 }
fffa9c1d
JW
4716 /* Handle calls that return values in multiple non-contiguous locations.
4717 The Irix 6 ABI has examples of this. */
4718 else if (GET_CODE (target) == PARALLEL)
04050c69 4719 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4720 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4721 emit_block_move (target, temp, expr_size (exp),
4722 (want_value & 2
4723 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052
RK
4724 else
4725 emit_move_insn (target, temp);
4726 }
709f5be1 4727
766f36c7 4728 /* If we don't want a value, return NULL_RTX. */
8403445a 4729 if ((want_value & 1) == 0)
766f36c7
RK
4730 return NULL_RTX;
4731
4732 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4733 ??? The latter test doesn't seem to make sense. */
4734 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4735 return temp;
766f36c7
RK
4736
4737 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4738 else if ((want_value & 1) != 0
4739 && GET_MODE (target) != BLKmode
766f36c7
RK
4740 && ! (GET_CODE (target) == REG
4741 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4742 return copy_to_reg (target);
3a94c984 4743
766f36c7 4744 else
709f5be1 4745 return target;
bbf6f052
RK
4746}
4747\f
9de08200
RK
4748/* Return 1 if EXP just contains zeros. */
4749
4750static int
502b8322 4751is_zeros_p (tree exp)
9de08200
RK
4752{
4753 tree elt;
4754
4755 switch (TREE_CODE (exp))
4756 {
4757 case CONVERT_EXPR:
4758 case NOP_EXPR:
4759 case NON_LVALUE_EXPR:
ed239f5a 4760 case VIEW_CONVERT_EXPR:
9de08200
RK
4761 return is_zeros_p (TREE_OPERAND (exp, 0));
4762
4763 case INTEGER_CST:
05bccae2 4764 return integer_zerop (exp);
9de08200
RK
4765
4766 case COMPLEX_CST:
4767 return
4768 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4769
4770 case REAL_CST:
41c9120b 4771 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4772
69ef87e2
AH
4773 case VECTOR_CST:
4774 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4775 elt = TREE_CHAIN (elt))
4776 if (!is_zeros_p (TREE_VALUE (elt)))
4777 return 0;
4778
4779 return 1;
4780
9de08200 4781 case CONSTRUCTOR:
e1a43f73
PB
4782 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4783 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4784 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4785 if (! is_zeros_p (TREE_VALUE (elt)))
4786 return 0;
4787
4788 return 1;
3a94c984 4789
e9a25f70
JL
4790 default:
4791 return 0;
9de08200 4792 }
9de08200
RK
4793}
4794
4795/* Return 1 if EXP contains mostly (3/4) zeros. */
4796
4797static int
502b8322 4798mostly_zeros_p (tree exp)
9de08200 4799{
9de08200
RK
4800 if (TREE_CODE (exp) == CONSTRUCTOR)
4801 {
e1a43f73
PB
4802 int elts = 0, zeros = 0;
4803 tree elt = CONSTRUCTOR_ELTS (exp);
4804 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4805 {
4806 /* If there are no ranges of true bits, it is all zero. */
4807 return elt == NULL_TREE;
4808 }
4809 for (; elt; elt = TREE_CHAIN (elt))
4810 {
4811 /* We do not handle the case where the index is a RANGE_EXPR,
4812 so the statistic will be somewhat inaccurate.
4813 We do make a more accurate count in store_constructor itself,
4814 so since this function is only used for nested array elements,
0f41302f 4815 this should be close enough. */
e1a43f73
PB
4816 if (mostly_zeros_p (TREE_VALUE (elt)))
4817 zeros++;
4818 elts++;
4819 }
9de08200
RK
4820
4821 return 4 * zeros >= 3 * elts;
4822 }
4823
4824 return is_zeros_p (exp);
4825}
4826\f
e1a43f73
PB
4827/* Helper function for store_constructor.
4828 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4829 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4830 CLEARED is as for store_constructor.
23cb1766 4831 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4832
4833 This provides a recursive shortcut back to store_constructor when it isn't
4834 necessary to go through store_field. This is so that we can pass through
4835 the cleared field to let store_constructor know that we may not have to
4836 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4837
4838static void
502b8322
AJ
4839store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4840 HOST_WIDE_INT bitpos, enum machine_mode mode,
4841 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4842{
4843 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4844 && bitpos % BITS_PER_UNIT == 0
cc2902df 4845 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4846 let store_field do the bitfield handling. This is unlikely to
4847 generate unnecessary clear instructions anyways. */
4848 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4849 {
61cb205c
RK
4850 if (GET_CODE (target) == MEM)
4851 target
4852 = adjust_address (target,
4853 GET_MODE (target) == BLKmode
4854 || 0 != (bitpos
4855 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4856 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4857
e0339ef7 4858
04050c69 4859 /* Update the alias set, if required. */
10b76d73
RK
4860 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4861 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4862 {
4863 target = copy_rtx (target);
4864 set_mem_alias_set (target, alias_set);
4865 }
e0339ef7 4866
04050c69 4867 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4868 }
4869 else
a06ef755
RK
4870 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4871 alias_set);
e1a43f73
PB
4872}
4873
bbf6f052 4874/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4875 TARGET is either a REG or a MEM; we know it cannot conflict, since
4876 safe_from_p has been called.
b7010412
RK
4877 CLEARED is true if TARGET is known to have been zero'd.
4878 SIZE is the number of bytes of TARGET we are allowed to modify: this
4879 may not be the same as the size of EXP if we are assigning to a field
4880 which has been packed to exclude padding bits. */
bbf6f052
RK
4881
4882static void
502b8322 4883store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4884{
4af3895e 4885 tree type = TREE_TYPE (exp);
a5efcd63 4886#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4887 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4888#endif
4af3895e 4889
e44842fe
RK
4890 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4891 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4892 {
b3694847 4893 tree elt;
bbf6f052 4894
04050c69 4895 /* We either clear the aggregate or indicate the value is dead. */
dd1db5ec
RK
4896 if ((TREE_CODE (type) == UNION_TYPE
4897 || TREE_CODE (type) == QUAL_UNION_TYPE)
04050c69
RK
4898 && ! cleared
4899 && ! CONSTRUCTOR_ELTS (exp))
4900 /* If the constructor is empty, clear the union. */
a59f8640 4901 {
04050c69
RK
4902 clear_storage (target, expr_size (exp));
4903 cleared = 1;
a59f8640 4904 }
4af3895e
JVA
4905
4906 /* If we are building a static constructor into a register,
4907 set the initial value as zero so we can fold the value into
67225c15
RK
4908 a constant. But if more than one register is involved,
4909 this probably loses. */
04050c69 4910 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4911 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4912 {
04050c69 4913 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4914 cleared = 1;
4915 }
4916
4917 /* If the constructor has fewer fields than the structure
4918 or if we are initializing the structure to mostly zeros,
0d97bf4c 4919 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4920 register whose mode size isn't equal to SIZE since clear_storage
4921 can't handle this case. */
04050c69 4922 else if (! cleared && size > 0
9376fcd6 4923 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4924 != fields_length (type))
fcf1b822
RK
4925 || mostly_zeros_p (exp))
4926 && (GET_CODE (target) != REG
04050c69
RK
4927 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4928 == size)))
9de08200 4929 {
337f4314
RK
4930 rtx xtarget = target;
4931
4932 if (readonly_fields_p (type))
4933 {
4934 xtarget = copy_rtx (xtarget);
4935 RTX_UNCHANGING_P (xtarget) = 1;
4936 }
4937
4938 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4939 cleared = 1;
4940 }
04050c69
RK
4941
4942 if (! cleared)
38a448ca 4943 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4944
4945 /* Store each element of the constructor into
4946 the corresponding field of TARGET. */
4947
4948 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4949 {
b3694847 4950 tree field = TREE_PURPOSE (elt);
34c73909 4951 tree value = TREE_VALUE (elt);
b3694847 4952 enum machine_mode mode;
770ae6cc
RK
4953 HOST_WIDE_INT bitsize;
4954 HOST_WIDE_INT bitpos = 0;
770ae6cc 4955 tree offset;
b50d17a1 4956 rtx to_rtx = target;
bbf6f052 4957
f32fd778
RS
4958 /* Just ignore missing fields.
4959 We cleared the whole structure, above,
4960 if any fields are missing. */
4961 if (field == 0)
4962 continue;
4963
8b6000fc 4964 if (cleared && is_zeros_p (value))
e1a43f73 4965 continue;
9de08200 4966
770ae6cc
RK
4967 if (host_integerp (DECL_SIZE (field), 1))
4968 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4969 else
4970 bitsize = -1;
4971
bbf6f052
RK
4972 mode = DECL_MODE (field);
4973 if (DECL_BIT_FIELD (field))
4974 mode = VOIDmode;
4975
770ae6cc
RK
4976 offset = DECL_FIELD_OFFSET (field);
4977 if (host_integerp (offset, 0)
4978 && host_integerp (bit_position (field), 0))
4979 {
4980 bitpos = int_bit_position (field);
4981 offset = 0;
4982 }
b50d17a1 4983 else
770ae6cc 4984 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4985
b50d17a1
RK
4986 if (offset)
4987 {
4988 rtx offset_rtx;
4989
7a6cdb44 4990 if (CONTAINS_PLACEHOLDER_P (offset))
7fa96708 4991 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4992 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4993
b50d17a1
RK
4994 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4995 if (GET_CODE (to_rtx) != MEM)
4996 abort ();
4997
bd070e1a 4998#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4999 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 5000 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
5001#else
5002 if (GET_MODE (offset_rtx) != ptr_mode)
5003 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 5004#endif
bd070e1a 5005
0d4903b8
RK
5006 to_rtx = offset_address (to_rtx, offset_rtx,
5007 highest_pow2_factor (offset));
b50d17a1 5008 }
c5c76735 5009
cf04eb80
RK
5010 if (TREE_READONLY (field))
5011 {
9151b3bf 5012 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
5013 to_rtx = copy_rtx (to_rtx);
5014
cf04eb80
RK
5015 RTX_UNCHANGING_P (to_rtx) = 1;
5016 }
5017
34c73909
R
5018#ifdef WORD_REGISTER_OPERATIONS
5019 /* If this initializes a field that is smaller than a word, at the
5020 start of a word, try to widen it to a full word.
5021 This special case allows us to output C++ member function
5022 initializations in a form that the optimizers can understand. */
770ae6cc 5023 if (GET_CODE (target) == REG
34c73909
R
5024 && bitsize < BITS_PER_WORD
5025 && bitpos % BITS_PER_WORD == 0
5026 && GET_MODE_CLASS (mode) == MODE_INT
5027 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
5028 && exp_size >= 0
5029 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
5030 {
5031 tree type = TREE_TYPE (value);
04050c69 5032
34c73909
R
5033 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5034 {
b0c48229
NB
5035 type = (*lang_hooks.types.type_for_size)
5036 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
5037 value = convert (type, value);
5038 }
04050c69 5039
34c73909
R
5040 if (BYTES_BIG_ENDIAN)
5041 value
5042 = fold (build (LSHIFT_EXPR, type, value,
5043 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5044 bitsize = BITS_PER_WORD;
5045 mode = word_mode;
5046 }
5047#endif
10b76d73
RK
5048
5049 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5050 && DECL_NONADDRESSABLE_P (field))
5051 {
5052 to_rtx = copy_rtx (to_rtx);
5053 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5054 }
5055
c5c76735 5056 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 5057 value, type, cleared,
10b76d73 5058 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
5059 }
5060 }
e6834654
SS
5061 else if (TREE_CODE (type) == ARRAY_TYPE
5062 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 5063 {
b3694847
SS
5064 tree elt;
5065 int i;
e1a43f73 5066 int need_to_clear;
4af3895e 5067 tree domain = TYPE_DOMAIN (type);
4af3895e 5068 tree elttype = TREE_TYPE (type);
e6834654 5069 int const_bounds_p;
ae0ed63a
JM
5070 HOST_WIDE_INT minelt = 0;
5071 HOST_WIDE_INT maxelt = 0;
85f3d674 5072
e6834654
SS
5073 /* Vectors are like arrays, but the domain is stored via an array
5074 type indirectly. */
5075 if (TREE_CODE (type) == VECTOR_TYPE)
5076 {
5077 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5078 the same field as TYPE_DOMAIN, we are not guaranteed that
5079 it always will. */
5080 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5081 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5082 }
5083
5084 const_bounds_p = (TYPE_MIN_VALUE (domain)
5085 && TYPE_MAX_VALUE (domain)
5086 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5087 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5088
85f3d674
RK
5089 /* If we have constant bounds for the range of the type, get them. */
5090 if (const_bounds_p)
5091 {
5092 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5093 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5094 }
bbf6f052 5095
e1a43f73 5096 /* If the constructor has fewer elements than the array,
38e01259 5097 clear the whole array first. Similarly if this is
e1a43f73
PB
5098 static constructor of a non-BLKmode object. */
5099 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5100 need_to_clear = 1;
5101 else
5102 {
5103 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
5104 need_to_clear = ! const_bounds_p;
5105
e1a43f73
PB
5106 /* This loop is a more accurate version of the loop in
5107 mostly_zeros_p (it handles RANGE_EXPR in an index).
5108 It is also needed to check for missing elements. */
5109 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 5110 elt != NULL_TREE && ! need_to_clear;
df0faff1 5111 elt = TREE_CHAIN (elt))
e1a43f73
PB
5112 {
5113 tree index = TREE_PURPOSE (elt);
5114 HOST_WIDE_INT this_node_count;
19caa751 5115
e1a43f73
PB
5116 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5117 {
5118 tree lo_index = TREE_OPERAND (index, 0);
5119 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 5120
19caa751
RK
5121 if (! host_integerp (lo_index, 1)
5122 || ! host_integerp (hi_index, 1))
e1a43f73
PB
5123 {
5124 need_to_clear = 1;
5125 break;
5126 }
19caa751
RK
5127
5128 this_node_count = (tree_low_cst (hi_index, 1)
5129 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
5130 }
5131 else
5132 this_node_count = 1;
85f3d674 5133
e1a43f73
PB
5134 count += this_node_count;
5135 if (mostly_zeros_p (TREE_VALUE (elt)))
5136 zero_count += this_node_count;
5137 }
85f3d674 5138
8e958f70 5139 /* Clear the entire array first if there are any missing elements,
0f41302f 5140 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
5141 if (! need_to_clear
5142 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
5143 need_to_clear = 1;
5144 }
85f3d674 5145
9376fcd6 5146 if (need_to_clear && size > 0)
9de08200
RK
5147 {
5148 if (! cleared)
725e58b1
RK
5149 {
5150 if (REG_P (target))
5151 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5152 else
5153 clear_storage (target, GEN_INT (size));
5154 }
9de08200
RK
5155 cleared = 1;
5156 }
df4556a3 5157 else if (REG_P (target))
bbf6f052 5158 /* Inform later passes that the old value is dead. */
38a448ca 5159 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
5160
5161 /* Store each element of the constructor into
5162 the corresponding element of TARGET, determined
5163 by counting the elements. */
5164 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5165 elt;
5166 elt = TREE_CHAIN (elt), i++)
5167 {
b3694847 5168 enum machine_mode mode;
19caa751
RK
5169 HOST_WIDE_INT bitsize;
5170 HOST_WIDE_INT bitpos;
bbf6f052 5171 int unsignedp;
e1a43f73 5172 tree value = TREE_VALUE (elt);
03dc44a6
RS
5173 tree index = TREE_PURPOSE (elt);
5174 rtx xtarget = target;
bbf6f052 5175
e1a43f73
PB
5176 if (cleared && is_zeros_p (value))
5177 continue;
9de08200 5178
bbf6f052 5179 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
5180 mode = TYPE_MODE (elttype);
5181 if (mode == BLKmode)
19caa751
RK
5182 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5183 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5184 : -1);
14a774a9
RK
5185 else
5186 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5187
e1a43f73
PB
5188 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5189 {
5190 tree lo_index = TREE_OPERAND (index, 0);
5191 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 5192 rtx index_r, pos_rtx, loop_end;
e1a43f73 5193 struct nesting *loop;
05c0b405
PB
5194 HOST_WIDE_INT lo, hi, count;
5195 tree position;
e1a43f73 5196
0f41302f 5197 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
5198 if (const_bounds_p
5199 && host_integerp (lo_index, 0)
19caa751
RK
5200 && host_integerp (hi_index, 0)
5201 && (lo = tree_low_cst (lo_index, 0),
5202 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
5203 count = hi - lo + 1,
5204 (GET_CODE (target) != MEM
5205 || count <= 2
19caa751
RK
5206 || (host_integerp (TYPE_SIZE (elttype), 1)
5207 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5208 <= 40 * 8)))))
e1a43f73 5209 {
05c0b405
PB
5210 lo -= minelt; hi -= minelt;
5211 for (; lo <= hi; lo++)
e1a43f73 5212 {
19caa751 5213 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
5214
5215 if (GET_CODE (target) == MEM
5216 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5217 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5218 && TYPE_NONALIASED_COMPONENT (type))
5219 {
5220 target = copy_rtx (target);
5221 MEM_KEEP_ALIAS_SET_P (target) = 1;
5222 }
5223
23cb1766 5224 store_constructor_field
04050c69
RK
5225 (target, bitsize, bitpos, mode, value, type, cleared,
5226 get_alias_set (elttype));
e1a43f73
PB
5227 }
5228 }
5229 else
5230 {
4977bab6 5231 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
5232 loop_end = gen_label_rtx ();
5233
5234 unsignedp = TREE_UNSIGNED (domain);
5235
5236 index = build_decl (VAR_DECL, NULL_TREE, domain);
5237
19e7881c 5238 index_r
e1a43f73
PB
5239 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5240 &unsignedp, 0));
19e7881c 5241 SET_DECL_RTL (index, index_r);
e1a43f73
PB
5242 if (TREE_CODE (value) == SAVE_EXPR
5243 && SAVE_EXPR_RTL (value) == 0)
5244 {
0f41302f
MS
5245 /* Make sure value gets expanded once before the
5246 loop. */
e1a43f73
PB
5247 expand_expr (value, const0_rtx, VOIDmode, 0);
5248 emit_queue ();
5249 }
5250 store_expr (lo_index, index_r, 0);
5251 loop = expand_start_loop (0);
5252
0f41302f 5253 /* Assign value to element index. */
fed3cef0
RK
5254 position
5255 = convert (ssizetype,
5256 fold (build (MINUS_EXPR, TREE_TYPE (index),
5257 index, TYPE_MIN_VALUE (domain))));
5258 position = size_binop (MULT_EXPR, position,
5259 convert (ssizetype,
5260 TYPE_SIZE_UNIT (elttype)));
5261
e1a43f73 5262 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
5263 xtarget = offset_address (target, pos_rtx,
5264 highest_pow2_factor (position));
5265 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5266 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 5267 store_constructor (value, xtarget, cleared,
b7010412 5268 bitsize / BITS_PER_UNIT);
e1a43f73
PB
5269 else
5270 store_expr (value, xtarget, 0);
5271
5272 expand_exit_loop_if_false (loop,
5273 build (LT_EXPR, integer_type_node,
5274 index, hi_index));
5275
5276 expand_increment (build (PREINCREMENT_EXPR,
5277 TREE_TYPE (index),
7b8b9722 5278 index, integer_one_node), 0, 0);
e1a43f73
PB
5279 expand_end_loop ();
5280 emit_label (loop_end);
e1a43f73
PB
5281 }
5282 }
19caa751
RK
5283 else if ((index != 0 && ! host_integerp (index, 0))
5284 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 5285 {
03dc44a6
RS
5286 tree position;
5287
5b6c44ff 5288 if (index == 0)
fed3cef0 5289 index = ssize_int (1);
5b6c44ff 5290
e1a43f73 5291 if (minelt)
fed3cef0
RK
5292 index = convert (ssizetype,
5293 fold (build (MINUS_EXPR, index,
5294 TYPE_MIN_VALUE (domain))));
19caa751 5295
fed3cef0
RK
5296 position = size_binop (MULT_EXPR, index,
5297 convert (ssizetype,
5298 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
5299 xtarget = offset_address (target,
5300 expand_expr (position, 0, VOIDmode, 0),
5301 highest_pow2_factor (position));
5302 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5303 store_expr (value, xtarget, 0);
03dc44a6
RS
5304 }
5305 else
5306 {
5307 if (index != 0)
19caa751
RK
5308 bitpos = ((tree_low_cst (index, 0) - minelt)
5309 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 5310 else
19caa751
RK
5311 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5312
10b76d73 5313 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5314 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5315 && TYPE_NONALIASED_COMPONENT (type))
5316 {
5317 target = copy_rtx (target);
5318 MEM_KEEP_ALIAS_SET_P (target) = 1;
5319 }
5320
c5c76735 5321 store_constructor_field (target, bitsize, bitpos, mode, value,
04050c69 5322 type, cleared, get_alias_set (elttype));
23cb1766 5323
03dc44a6 5324 }
bbf6f052
RK
5325 }
5326 }
19caa751 5327
3a94c984 5328 /* Set constructor assignments. */
071a6595
PB
5329 else if (TREE_CODE (type) == SET_TYPE)
5330 {
e1a43f73 5331 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5332 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5333 tree domain = TYPE_DOMAIN (type);
5334 tree domain_min, domain_max, bitlength;
5335
9faa82d8 5336 /* The default implementation strategy is to extract the constant
071a6595
PB
5337 parts of the constructor, use that to initialize the target,
5338 and then "or" in whatever non-constant ranges we need in addition.
5339
5340 If a large set is all zero or all ones, it is
5341 probably better to set it using memset (if available) or bzero.
5342 Also, if a large set has just a single range, it may also be
5343 better to first clear all the first clear the set (using
0f41302f 5344 bzero/memset), and set the bits we want. */
3a94c984 5345
0f41302f 5346 /* Check for all zeros. */
9376fcd6 5347 if (elt == NULL_TREE && size > 0)
071a6595 5348 {
e1a43f73 5349 if (!cleared)
8ac61af7 5350 clear_storage (target, GEN_INT (size));
071a6595
PB
5351 return;
5352 }
5353
071a6595
PB
5354 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5355 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5356 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5357 size_diffop (domain_max, domain_min),
5358 ssize_int (1));
071a6595 5359
19caa751 5360 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5361
5362 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5363 are "complicated" (more than one range), initialize (the
3a94c984 5364 constant parts) by copying from a constant. */
e1a43f73
PB
5365 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5366 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5367 {
19caa751 5368 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5369 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 5370 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 5371 HOST_WIDE_INT word = 0;
19caa751
RK
5372 unsigned int bit_pos = 0;
5373 unsigned int ibit = 0;
5374 unsigned int offset = 0; /* In bytes from beginning of set. */
5375
e1a43f73 5376 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5377 for (;;)
071a6595 5378 {
b4ee5a72
PB
5379 if (bit_buffer[ibit])
5380 {
b09f3348 5381 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5382 word |= (1 << (set_word_size - 1 - bit_pos));
5383 else
5384 word |= 1 << bit_pos;
5385 }
19caa751 5386
b4ee5a72
PB
5387 bit_pos++; ibit++;
5388 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5389 {
e1a43f73
PB
5390 if (word != 0 || ! cleared)
5391 {
5392 rtx datum = GEN_INT (word);
5393 rtx to_rtx;
19caa751 5394
0f41302f
MS
5395 /* The assumption here is that it is safe to use
5396 XEXP if the set is multi-word, but not if
5397 it's single-word. */
e1a43f73 5398 if (GET_CODE (target) == MEM)
f4ef873c 5399 to_rtx = adjust_address (target, mode, offset);
3a94c984 5400 else if (offset == 0)
e1a43f73
PB
5401 to_rtx = target;
5402 else
5403 abort ();
5404 emit_move_insn (to_rtx, datum);
5405 }
19caa751 5406
b4ee5a72
PB
5407 if (ibit == nbits)
5408 break;
5409 word = 0;
5410 bit_pos = 0;
5411 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5412 }
5413 }
071a6595 5414 }
e1a43f73 5415 else if (!cleared)
19caa751
RK
5416 /* Don't bother clearing storage if the set is all ones. */
5417 if (TREE_CHAIN (elt) != NULL_TREE
5418 || (TREE_PURPOSE (elt) == NULL_TREE
5419 ? nbits != 1
5420 : ( ! host_integerp (TREE_VALUE (elt), 0)
5421 || ! host_integerp (TREE_PURPOSE (elt), 0)
5422 || (tree_low_cst (TREE_VALUE (elt), 0)
5423 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5424 != (HOST_WIDE_INT) nbits))))
8ac61af7 5425 clear_storage (target, expr_size (exp));
3a94c984 5426
e1a43f73 5427 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5428 {
3a94c984 5429 /* Start of range of element or NULL. */
071a6595 5430 tree startbit = TREE_PURPOSE (elt);
3a94c984 5431 /* End of range of element, or element value. */
071a6595
PB
5432 tree endbit = TREE_VALUE (elt);
5433 HOST_WIDE_INT startb, endb;
19caa751 5434 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5435
5436 bitlength_rtx = expand_expr (bitlength,
19caa751 5437 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5438
3a94c984 5439 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5440 if (startbit == NULL_TREE)
5441 {
5442 startbit = save_expr (endbit);
5443 endbit = startbit;
5444 }
19caa751 5445
071a6595
PB
5446 startbit = convert (sizetype, startbit);
5447 endbit = convert (sizetype, endbit);
5448 if (! integer_zerop (domain_min))
5449 {
5450 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5451 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5452 }
3a94c984 5453 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5454 EXPAND_CONST_ADDRESS);
3a94c984 5455 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5456 EXPAND_CONST_ADDRESS);
5457
5458 if (REG_P (target))
5459 {
1da68f56
RK
5460 targetx
5461 = assign_temp
b0c48229
NB
5462 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5463 (GET_MODE (target), 0),
1da68f56
RK
5464 TYPE_QUAL_CONST)),
5465 0, 1, 1);
071a6595
PB
5466 emit_move_insn (targetx, target);
5467 }
19caa751 5468
071a6595
PB
5469 else if (GET_CODE (target) == MEM)
5470 targetx = target;
5471 else
5472 abort ();
5473
4ca79136
RH
5474 /* Optimization: If startbit and endbit are constants divisible
5475 by BITS_PER_UNIT, call memset instead. */
5476 if (TARGET_MEM_FUNCTIONS
5477 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5478 && TREE_CODE (endbit) == INTEGER_CST
5479 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5480 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5481 {
ebb1b59a 5482 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5483 VOIDmode, 3,
e1a43f73
PB
5484 plus_constant (XEXP (targetx, 0),
5485 startb / BITS_PER_UNIT),
071a6595 5486 Pmode,
3b6f75e2 5487 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5488 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5489 TYPE_MODE (sizetype));
071a6595
PB
5490 }
5491 else
68d28100
RH
5492 emit_library_call (setbits_libfunc, LCT_NORMAL,
5493 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5494 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5495 startbit_rtx, TYPE_MODE (sizetype),
5496 endbit_rtx, TYPE_MODE (sizetype));
5497
071a6595
PB
5498 if (REG_P (target))
5499 emit_move_insn (target, targetx);
5500 }
5501 }
bbf6f052
RK
5502
5503 else
5504 abort ();
5505}
5506
5507/* Store the value of EXP (an expression tree)
5508 into a subfield of TARGET which has mode MODE and occupies
5509 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5510 If MODE is VOIDmode, it means that we are storing into a bit-field.
5511
5512 If VALUE_MODE is VOIDmode, return nothing in particular.
5513 UNSIGNEDP is not used in this case.
5514
5515 Otherwise, return an rtx for the value stored. This rtx
5516 has mode VALUE_MODE if that is convenient to do.
5517 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5518
a06ef755 5519 TYPE is the type of the underlying object,
ece32014
MM
5520
5521 ALIAS_SET is the alias set for the destination. This value will
5522 (in general) be different from that for TARGET, since TARGET is a
5523 reference to the containing structure. */
bbf6f052
RK
5524
5525static rtx
502b8322
AJ
5526store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5527 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5528 int unsignedp, tree type, int alias_set)
bbf6f052 5529{
906c4e36 5530 HOST_WIDE_INT width_mask = 0;
bbf6f052 5531
e9a25f70
JL
5532 if (TREE_CODE (exp) == ERROR_MARK)
5533 return const0_rtx;
5534
2be6a7e9
RK
5535 /* If we have nothing to store, do nothing unless the expression has
5536 side-effects. */
5537 if (bitsize == 0)
5538 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5539 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5540 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5541
5542 /* If we are storing into an unaligned field of an aligned union that is
5543 in a register, we may have the mode of TARGET being an integer mode but
5544 MODE == BLKmode. In that case, get an aligned object whose size and
5545 alignment are the same as TARGET and store TARGET into it (we can avoid
5546 the store if the field being stored is the entire width of TARGET). Then
5547 call ourselves recursively to store the field into a BLKmode version of
5548 that object. Finally, load from the object into TARGET. This is not
5549 very efficient in general, but should only be slightly more expensive
5550 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5551 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5552 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5553
5554 if (mode == BLKmode
5555 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5556 {
85a43a2f 5557 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5558 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5559
8752c357 5560 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5561 emit_move_insn (object, target);
5562
a06ef755
RK
5563 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5564 alias_set);
bbf6f052
RK
5565
5566 emit_move_insn (target, object);
5567
a06ef755 5568 /* We want to return the BLKmode version of the data. */
46093b97 5569 return blk_object;
bbf6f052 5570 }
c3b247b4
JM
5571
5572 if (GET_CODE (target) == CONCAT)
5573 {
5574 /* We're storing into a struct containing a single __complex. */
5575
5576 if (bitpos != 0)
5577 abort ();
5578 return store_expr (exp, target, 0);
5579 }
bbf6f052
RK
5580
5581 /* If the structure is in a register or if the component
5582 is a bit field, we cannot use addressing to access it.
5583 Use bit-field techniques or SUBREG to store in it. */
5584
4fa52007 5585 if (mode == VOIDmode
6ab06cbb
JW
5586 || (mode != BLKmode && ! direct_store[(int) mode]
5587 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5588 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5589 || GET_CODE (target) == REG
c980ac49 5590 || GET_CODE (target) == SUBREG
ccc98036
RS
5591 /* If the field isn't aligned enough to store as an ordinary memref,
5592 store it as a bit field. */
15b19a7d 5593 || (mode != BLKmode
9e5f281f
OH
5594 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5595 || bitpos % GET_MODE_ALIGNMENT (mode))
5596 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5597 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5598 /* If the RHS and field are a constant size and the size of the
5599 RHS isn't the same size as the bitfield, we must use bitfield
5600 operations. */
05bccae2
RK
5601 || (bitsize >= 0
5602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5603 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5604 {
906c4e36 5605 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5606
ef19912d
RK
5607 /* If BITSIZE is narrower than the size of the type of EXP
5608 we will be narrowing TEMP. Normally, what's wanted are the
5609 low-order bits. However, if EXP's type is a record and this is
5610 big-endian machine, we want the upper BITSIZE bits. */
5611 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5612 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5613 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5614 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5615 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5616 - bitsize),
c1853da7 5617 NULL_RTX, 1);
ef19912d 5618
bbd6cf73
RK
5619 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5620 MODE. */
5621 if (mode != VOIDmode && mode != BLKmode
5622 && mode != TYPE_MODE (TREE_TYPE (exp)))
5623 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5624
a281e72d
RK
5625 /* If the modes of TARGET and TEMP are both BLKmode, both
5626 must be in memory and BITPOS must be aligned on a byte
5627 boundary. If so, we simply do a block copy. */
5628 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5629 {
5630 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5631 || bitpos % BITS_PER_UNIT != 0)
5632 abort ();
5633
f4ef873c 5634 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5635 emit_block_move (target, temp,
a06ef755 5636 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5637 / BITS_PER_UNIT),
5638 BLOCK_OP_NORMAL);
a281e72d
RK
5639
5640 return value_mode == VOIDmode ? const0_rtx : target;
5641 }
5642
bbf6f052 5643 /* Store the value in the bitfield. */
a06ef755
RK
5644 store_bit_field (target, bitsize, bitpos, mode, temp,
5645 int_size_in_bytes (type));
5646
bbf6f052
RK
5647 if (value_mode != VOIDmode)
5648 {
04050c69
RK
5649 /* The caller wants an rtx for the value.
5650 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5651 if (width_mask != 0
5652 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5653 {
9074de27 5654 tree count;
5c4d7cfb 5655 enum machine_mode tmode;
86a2c12a 5656
5c4d7cfb 5657 tmode = GET_MODE (temp);
86a2c12a
RS
5658 if (tmode == VOIDmode)
5659 tmode = value_mode;
22273300
JJ
5660
5661 if (unsignedp)
5662 return expand_and (tmode, temp,
2496c7bd 5663 gen_int_mode (width_mask, tmode),
22273300
JJ
5664 NULL_RTX);
5665
5c4d7cfb
RS
5666 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5667 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5668 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5669 }
04050c69 5670
bbf6f052 5671 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5672 NULL_RTX, value_mode, VOIDmode,
a06ef755 5673 int_size_in_bytes (type));
bbf6f052
RK
5674 }
5675 return const0_rtx;
5676 }
5677 else
5678 {
5679 rtx addr = XEXP (target, 0);
a06ef755 5680 rtx to_rtx = target;
bbf6f052
RK
5681
5682 /* If a value is wanted, it must be the lhs;
5683 so make the address stable for multiple use. */
5684
5685 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5686 && ! CONSTANT_ADDRESS_P (addr)
5687 /* A frame-pointer reference is already stable. */
5688 && ! (GET_CODE (addr) == PLUS
5689 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5690 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5691 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5692 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5693
5694 /* Now build a reference to just the desired component. */
5695
a06ef755
RK
5696 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5697
5698 if (to_rtx == target)
5699 to_rtx = copy_rtx (to_rtx);
792760b9 5700
c6df88cb 5701 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5702 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5703 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5704
5705 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5706 }
5707}
5708\f
5709/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5710 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5711 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5712
5713 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5714 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5715 If the position of the field is variable, we store a tree
5716 giving the variable offset (in units) in *POFFSET.
5717 This offset is in addition to the bit position.
5718 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5719
5720 If any of the extraction expressions is volatile,
5721 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5722
5723 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5724 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5725 is redundant.
5726
5727 If the field describes a variable-sized object, *PMODE is set to
5728 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5729 this case, but the address of the object can be found. */
bbf6f052
RK
5730
5731tree
502b8322
AJ
5732get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5733 HOST_WIDE_INT *pbitpos, tree *poffset,
5734 enum machine_mode *pmode, int *punsignedp,
5735 int *pvolatilep)
bbf6f052
RK
5736{
5737 tree size_tree = 0;
5738 enum machine_mode mode = VOIDmode;
fed3cef0 5739 tree offset = size_zero_node;
770ae6cc 5740 tree bit_offset = bitsize_zero_node;
738cc472 5741 tree placeholder_ptr = 0;
770ae6cc 5742 tree tem;
bbf6f052 5743
770ae6cc
RK
5744 /* First get the mode, signedness, and size. We do this from just the
5745 outermost expression. */
bbf6f052
RK
5746 if (TREE_CODE (exp) == COMPONENT_REF)
5747 {
5748 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5749 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5750 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5751
bbf6f052
RK
5752 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5753 }
5754 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5755 {
5756 size_tree = TREE_OPERAND (exp, 1);
5757 *punsignedp = TREE_UNSIGNED (exp);
5758 }
5759 else
5760 {
5761 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5762 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5763
ab87f8c8
JL
5764 if (mode == BLKmode)
5765 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5766 else
5767 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5768 }
3a94c984 5769
770ae6cc 5770 if (size_tree != 0)
bbf6f052 5771 {
770ae6cc 5772 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5773 mode = BLKmode, *pbitsize = -1;
5774 else
770ae6cc 5775 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5776 }
5777
5778 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5779 and find the ultimate containing object. */
bbf6f052
RK
5780 while (1)
5781 {
770ae6cc
RK
5782 if (TREE_CODE (exp) == BIT_FIELD_REF)
5783 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5784 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5785 {
770ae6cc
RK
5786 tree field = TREE_OPERAND (exp, 1);
5787 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5788
e7f3c83f
RK
5789 /* If this field hasn't been filled in yet, don't go
5790 past it. This should only happen when folding expressions
5791 made during type construction. */
770ae6cc 5792 if (this_offset == 0)
e7f3c83f 5793 break;
7a6cdb44 5794 else if (CONTAINS_PLACEHOLDER_P (this_offset))
770ae6cc 5795 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5796
7156dead 5797 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5798 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5799 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5800
a06ef755 5801 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5802 }
7156dead 5803
b4e3fabb
RK
5804 else if (TREE_CODE (exp) == ARRAY_REF
5805 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5806 {
742920c7 5807 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5808 tree array = TREE_OPERAND (exp, 0);
5809 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5810 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5811 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5812
770ae6cc
RK
5813 /* We assume all arrays have sizes that are a multiple of a byte.
5814 First subtract the lower bound, if any, in the type of the
5815 index, then convert to sizetype and multiply by the size of the
5816 array element. */
5817 if (low_bound != 0 && ! integer_zerop (low_bound))
5818 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5819 index, low_bound));
f8dac6eb 5820
7156dead
RK
5821 /* If the index has a self-referential type, pass it to a
5822 WITH_RECORD_EXPR; if the component size is, pass our
5823 component to one. */
7a6cdb44 5824 if (CONTAINS_PLACEHOLDER_P (index))
770ae6cc 5825 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7a6cdb44 5826 if (CONTAINS_PLACEHOLDER_P (unit_size))
b4e3fabb 5827 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5828
770ae6cc
RK
5829 offset = size_binop (PLUS_EXPR, offset,
5830 size_binop (MULT_EXPR,
5831 convert (sizetype, index),
7156dead 5832 unit_size));
bbf6f052 5833 }
7156dead 5834
738cc472
RK
5835 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5836 {
70072ed9
RK
5837 tree new = find_placeholder (exp, &placeholder_ptr);
5838
5839 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5840 We might have been called from tree optimization where we
5841 haven't set up an object yet. */
5842 if (new == 0)
5843 break;
5844 else
5845 exp = new;
5846
738cc472
RK
5847 continue;
5848 }
c1853da7
RK
5849
5850 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5851 conversions that don't change the mode, and all view conversions
5852 except those that need to "step up" the alignment. */
bbf6f052 5853 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5854 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5855 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5856 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5857 && STRICT_ALIGNMENT
5858 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5859 < BIGGEST_ALIGNMENT)
5860 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5861 || TYPE_ALIGN_OK (TREE_TYPE
5862 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5863 && ! ((TREE_CODE (exp) == NOP_EXPR
5864 || TREE_CODE (exp) == CONVERT_EXPR)
5865 && (TYPE_MODE (TREE_TYPE (exp))
5866 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5867 break;
7bb0943f
RS
5868
5869 /* If any reference in the chain is volatile, the effect is volatile. */
5870 if (TREE_THIS_VOLATILE (exp))
5871 *pvolatilep = 1;
839c4796 5872
bbf6f052
RK
5873 exp = TREE_OPERAND (exp, 0);
5874 }
5875
770ae6cc
RK
5876 /* If OFFSET is constant, see if we can return the whole thing as a
5877 constant bit position. Otherwise, split it up. */
5878 if (host_integerp (offset, 0)
5879 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5880 bitsize_unit_node))
5881 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5882 && host_integerp (tem, 0))
5883 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5884 else
5885 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5886
bbf6f052 5887 *pmode = mode;
bbf6f052
RK
5888 return exp;
5889}
921b3427 5890
ed239f5a
RK
5891/* Return 1 if T is an expression that get_inner_reference handles. */
5892
5893int
502b8322 5894handled_component_p (tree t)
ed239f5a
RK
5895{
5896 switch (TREE_CODE (t))
5897 {
5898 case BIT_FIELD_REF:
5899 case COMPONENT_REF:
5900 case ARRAY_REF:
5901 case ARRAY_RANGE_REF:
5902 case NON_LVALUE_EXPR:
5903 case VIEW_CONVERT_EXPR:
5904 return 1;
5905
1a8c4ca6
EB
5906 /* ??? Sure they are handled, but get_inner_reference may return
5907 a different PBITSIZE, depending upon whether the expression is
5908 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5909 case NOP_EXPR:
5910 case CONVERT_EXPR:
5911 return (TYPE_MODE (TREE_TYPE (t))
5912 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5913
5914 default:
5915 return 0;
5916 }
5917}
bbf6f052 5918\f
3fe44edd
RK
5919/* Given an rtx VALUE that may contain additions and multiplications, return
5920 an equivalent value that just refers to a register, memory, or constant.
5921 This is done by generating instructions to perform the arithmetic and
5922 returning a pseudo-register containing the value.
c45a13a6
RK
5923
5924 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5925
5926rtx
502b8322 5927force_operand (rtx value, rtx target)
bbf6f052 5928{
8a28dbcc 5929 rtx op1, op2;
bbf6f052 5930 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5931 rtx subtarget = get_subtarget (target);
8a28dbcc 5932 enum rtx_code code = GET_CODE (value);
bbf6f052 5933
8b015896 5934 /* Check for a PIC address load. */
8a28dbcc 5935 if ((code == PLUS || code == MINUS)
8b015896
RH
5936 && XEXP (value, 0) == pic_offset_table_rtx
5937 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5938 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5939 || GET_CODE (XEXP (value, 1)) == CONST))
5940 {
5941 if (!subtarget)
5942 subtarget = gen_reg_rtx (GET_MODE (value));
5943 emit_move_insn (subtarget, value);
5944 return subtarget;
5945 }
5946
8a28dbcc 5947 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5948 {
8a28dbcc
JH
5949 if (!target)
5950 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5951 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5952 code == ZERO_EXTEND);
5953 return target;
bbf6f052
RK
5954 }
5955
8a28dbcc 5956 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
5957 {
5958 op2 = XEXP (value, 1);
8a28dbcc 5959 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5960 subtarget = 0;
8a28dbcc 5961 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5962 {
8a28dbcc 5963 code = PLUS;
bbf6f052
RK
5964 op2 = negate_rtx (GET_MODE (value), op2);
5965 }
5966
5967 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5968 operand a PLUS of a virtual register and something else. In that
5969 case, we want to emit the sum of the virtual register and the
5970 constant first and then add the other value. This allows virtual
5971 register instantiation to simply modify the constant rather than
5972 creating another one around this addition. */
5973 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5974 && GET_CODE (XEXP (value, 0)) == PLUS
5975 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5976 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5977 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5978 {
8a28dbcc
JH
5979 rtx temp = expand_simple_binop (GET_MODE (value), code,
5980 XEXP (XEXP (value, 0), 0), op2,
5981 subtarget, 0, OPTAB_LIB_WIDEN);
5982 return expand_simple_binop (GET_MODE (value), code, temp,
5983 force_operand (XEXP (XEXP (value,
5984 0), 1), 0),
5985 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5986 }
3a94c984 5987
8a28dbcc
JH
5988 op1 = force_operand (XEXP (value, 0), subtarget);
5989 op2 = force_operand (op2, NULL_RTX);
5990 switch (code)
5991 {
5992 case MULT:
5993 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5994 case DIV:
5995 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5996 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5997 target, 1, OPTAB_LIB_WIDEN);
5998 else
5999 return expand_divmod (0,
6000 FLOAT_MODE_P (GET_MODE (value))
6001 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6002 GET_MODE (value), op1, op2, target, 0);
6003 break;
6004 case MOD:
6005 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6006 target, 0);
6007 break;
6008 case UDIV:
6009 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6010 target, 1);
6011 break;
6012 case UMOD:
6013 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6014 target, 1);
6015 break;
6016 case ASHIFTRT:
6017 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6018 target, 0, OPTAB_LIB_WIDEN);
6019 break;
6020 default:
6021 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6022 target, 1, OPTAB_LIB_WIDEN);
6023 }
6024 }
6025 if (GET_RTX_CLASS (code) == '1')
6026 {
6027 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6028 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 6029 }
34e81b5a
RK
6030
6031#ifdef INSN_SCHEDULING
6032 /* On machines that have insn scheduling, we want all memory reference to be
6033 explicit, so we need to deal with such paradoxical SUBREGs. */
6034 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6035 && (GET_MODE_SIZE (GET_MODE (value))
6036 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6037 value
6038 = simplify_gen_subreg (GET_MODE (value),
6039 force_reg (GET_MODE (SUBREG_REG (value)),
6040 force_operand (SUBREG_REG (value),
6041 NULL_RTX)),
6042 GET_MODE (SUBREG_REG (value)),
6043 SUBREG_BYTE (value));
6044#endif
6045
bbf6f052
RK
6046 return value;
6047}
6048\f
bbf6f052 6049/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
6050 EXP can reference X, which is being modified. TOP_P is nonzero if this
6051 call is going to be used to determine whether we need a temporary
ff439b5f
CB
6052 for EXP, as opposed to a recursive call to this function.
6053
6054 It is always safe for this routine to return zero since it merely
6055 searches for optimization opportunities. */
bbf6f052 6056
8f17b5c5 6057int
502b8322 6058safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
6059{
6060 rtx exp_rtl = 0;
6061 int i, nops;
1da68f56 6062 static tree save_expr_list;
bbf6f052 6063
6676e72f
RK
6064 if (x == 0
6065 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
6066 have no way of allocating temporaries of variable size
6067 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6068 So we assume here that something at a higher level has prevented a
f4510f37 6069 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 6070 do this when X is BLKmode and when we are at the top level. */
d0f062fb 6071 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 6072 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
6073 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6074 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6075 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6076 != INTEGER_CST)
1da68f56
RK
6077 && GET_MODE (x) == BLKmode)
6078 /* If X is in the outgoing argument area, it is always safe. */
6079 || (GET_CODE (x) == MEM
6080 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6081 || (GET_CODE (XEXP (x, 0)) == PLUS
6082 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
6083 return 1;
6084
6085 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6086 find the underlying pseudo. */
6087 if (GET_CODE (x) == SUBREG)
6088 {
6089 x = SUBREG_REG (x);
6090 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6091 return 0;
6092 }
6093
1da68f56
RK
6094 /* A SAVE_EXPR might appear many times in the expression passed to the
6095 top-level safe_from_p call, and if it has a complex subexpression,
6096 examining it multiple times could result in a combinatorial explosion.
6097 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6098 with optimization took about 28 minutes to compile -- even though it was
6099 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6100 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6101 we have processed. Note that the only test of top_p was above. */
6102
6103 if (top_p)
6104 {
6105 int rtn;
6106 tree t;
6107
6108 save_expr_list = 0;
6109
6110 rtn = safe_from_p (x, exp, 0);
6111
6112 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6113 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6114
6115 return rtn;
6116 }
bbf6f052 6117
1da68f56 6118 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
6119 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6120 {
6121 case 'd':
a9772b60 6122 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
6123 break;
6124
6125 case 'c':
6126 return 1;
6127
6128 case 'x':
6129 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
6130 {
6131 while (1)
6132 {
6133 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6134 return 0;
6135 exp = TREE_CHAIN (exp);
6136 if (!exp)
6137 return 1;
6138 if (TREE_CODE (exp) != TREE_LIST)
6139 return safe_from_p (x, exp, 0);
6140 }
6141 }
ff439b5f
CB
6142 else if (TREE_CODE (exp) == ERROR_MARK)
6143 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
6144 else
6145 return 0;
6146
bbf6f052
RK
6147 case '2':
6148 case '<':
f8d4be57
CE
6149 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6150 return 0;
6151 /* FALLTHRU */
6152
6153 case '1':
6154 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
6155
6156 case 'e':
6157 case 'r':
6158 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6159 the expression. If it is set, we conflict iff we are that rtx or
6160 both are in memory. Otherwise, we check all operands of the
6161 expression recursively. */
6162
6163 switch (TREE_CODE (exp))
6164 {
6165 case ADDR_EXPR:
70072ed9
RK
6166 /* If the operand is static or we are static, we can't conflict.
6167 Likewise if we don't conflict with the operand at all. */
6168 if (staticp (TREE_OPERAND (exp, 0))
6169 || TREE_STATIC (exp)
6170 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6171 return 1;
6172
6173 /* Otherwise, the only way this can conflict is if we are taking
6174 the address of a DECL a that address if part of X, which is
6175 very rare. */
6176 exp = TREE_OPERAND (exp, 0);
6177 if (DECL_P (exp))
6178 {
6179 if (!DECL_RTL_SET_P (exp)
6180 || GET_CODE (DECL_RTL (exp)) != MEM)
6181 return 0;
6182 else
6183 exp_rtl = XEXP (DECL_RTL (exp), 0);
6184 }
6185 break;
bbf6f052
RK
6186
6187 case INDIRECT_REF:
1da68f56
RK
6188 if (GET_CODE (x) == MEM
6189 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6190 get_alias_set (exp)))
bbf6f052
RK
6191 return 0;
6192 break;
6193
6194 case CALL_EXPR:
f9808f81
MM
6195 /* Assume that the call will clobber all hard registers and
6196 all of memory. */
6197 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6198 || GET_CODE (x) == MEM)
6199 return 0;
bbf6f052
RK
6200 break;
6201
6202 case RTL_EXPR:
3bb5826a
RK
6203 /* If a sequence exists, we would have to scan every instruction
6204 in the sequence to see if it was safe. This is probably not
6205 worthwhile. */
6206 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
6207 return 0;
6208
3bb5826a 6209 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
6210 break;
6211
6212 case WITH_CLEANUP_EXPR:
6ad7895a 6213 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
6214 break;
6215
5dab5552 6216 case CLEANUP_POINT_EXPR:
e5e809f4 6217 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 6218
bbf6f052
RK
6219 case SAVE_EXPR:
6220 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
6221 if (exp_rtl)
6222 break;
6223
1da68f56
RK
6224 /* If we've already scanned this, don't do it again. Otherwise,
6225 show we've scanned it and record for clearing the flag if we're
6226 going on. */
6227 if (TREE_PRIVATE (exp))
6228 return 1;
ff439b5f 6229
1da68f56
RK
6230 TREE_PRIVATE (exp) = 1;
6231 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 6232 {
1da68f56
RK
6233 TREE_PRIVATE (exp) = 0;
6234 return 0;
ff59bfe6 6235 }
1da68f56
RK
6236
6237 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 6238 return 1;
bbf6f052 6239
8129842c
RS
6240 case BIND_EXPR:
6241 /* The only operand we look at is operand 1. The rest aren't
6242 part of the expression. */
e5e809f4 6243 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 6244
bbf6f052 6245 case METHOD_CALL_EXPR:
4fe9b91c 6246 /* This takes an rtx argument, but shouldn't appear here. */
bbf6f052 6247 abort ();
3a94c984 6248
e9a25f70
JL
6249 default:
6250 break;
bbf6f052
RK
6251 }
6252
6253 /* If we have an rtx, we do not need to scan our operands. */
6254 if (exp_rtl)
6255 break;
6256
8f17b5c5 6257 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
6258 for (i = 0; i < nops; i++)
6259 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6260 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6261 return 0;
8f17b5c5
MM
6262
6263 /* If this is a language-specific tree code, it may require
6264 special handling. */
dbbbbf3b
JDA
6265 if ((unsigned int) TREE_CODE (exp)
6266 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 6267 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 6268 return 0;
bbf6f052
RK
6269 }
6270
6271 /* If we have an rtl, find any enclosed object. Then see if we conflict
6272 with it. */
6273 if (exp_rtl)
6274 {
6275 if (GET_CODE (exp_rtl) == SUBREG)
6276 {
6277 exp_rtl = SUBREG_REG (exp_rtl);
6278 if (GET_CODE (exp_rtl) == REG
6279 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6280 return 0;
6281 }
6282
6283 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6284 are memory and they conflict. */
bbf6f052
RK
6285 return ! (rtx_equal_p (x, exp_rtl)
6286 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 6287 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6288 rtx_addr_varies_p)));
bbf6f052
RK
6289 }
6290
6291 /* If we reach here, it is safe. */
6292 return 1;
6293}
6294
01c8a7c8
RK
6295/* Subroutine of expand_expr: return rtx if EXP is a
6296 variable or parameter; else return 0. */
6297
6298static rtx
502b8322 6299var_rtx (tree exp)
01c8a7c8
RK
6300{
6301 STRIP_NOPS (exp);
6302 switch (TREE_CODE (exp))
6303 {
6304 case PARM_DECL:
6305 case VAR_DECL:
6306 return DECL_RTL (exp);
6307 default:
6308 return 0;
6309 }
6310}
dbecbbe4
JL
6311
6312#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 6313
dbecbbe4 6314void
502b8322 6315check_max_integer_computation_mode (tree exp)
dbecbbe4 6316{
5f652c07 6317 enum tree_code code;
dbecbbe4
JL
6318 enum machine_mode mode;
6319
5f652c07
JM
6320 /* Strip any NOPs that don't change the mode. */
6321 STRIP_NOPS (exp);
6322 code = TREE_CODE (exp);
6323
71bca506
JL
6324 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6325 if (code == NOP_EXPR
6326 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6327 return;
6328
dbecbbe4
JL
6329 /* First check the type of the overall operation. We need only look at
6330 unary, binary and relational operations. */
6331 if (TREE_CODE_CLASS (code) == '1'
6332 || TREE_CODE_CLASS (code) == '2'
6333 || TREE_CODE_CLASS (code) == '<')
6334 {
6335 mode = TYPE_MODE (TREE_TYPE (exp));
6336 if (GET_MODE_CLASS (mode) == MODE_INT
6337 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6338 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6339 }
6340
6341 /* Check operand of a unary op. */
6342 if (TREE_CODE_CLASS (code) == '1')
6343 {
6344 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6345 if (GET_MODE_CLASS (mode) == MODE_INT
6346 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6347 internal_error ("unsupported wide integer operation");
dbecbbe4 6348 }
3a94c984 6349
dbecbbe4
JL
6350 /* Check operands of a binary/comparison op. */
6351 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6352 {
6353 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6354 if (GET_MODE_CLASS (mode) == MODE_INT
6355 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6356 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6357
6358 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6359 if (GET_MODE_CLASS (mode) == MODE_INT
6360 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6361 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6362 }
6363}
6364#endif
14a774a9 6365\f
0d4903b8
RK
6366/* Return the highest power of two that EXP is known to be a multiple of.
6367 This is used in updating alignment of MEMs in array references. */
6368
9ceca302 6369static unsigned HOST_WIDE_INT
502b8322 6370highest_pow2_factor (tree exp)
0d4903b8 6371{
9ceca302 6372 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6373
6374 switch (TREE_CODE (exp))
6375 {
6376 case INTEGER_CST:
e0f1be5c
JJ
6377 /* We can find the lowest bit that's a one. If the low
6378 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6379 We need to handle this case since we can find it in a COND_EXPR,
6380 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6381 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6382 later ICE. */
e0f1be5c 6383 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6384 return BIGGEST_ALIGNMENT;
e0f1be5c 6385 else
0d4903b8 6386 {
e0f1be5c
JJ
6387 /* Note: tree_low_cst is intentionally not used here,
6388 we don't care about the upper bits. */
6389 c0 = TREE_INT_CST_LOW (exp);
6390 c0 &= -c0;
6391 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6392 }
6393 break;
6394
65a07688 6395 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6396 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6397 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6398 return MIN (c0, c1);
6399
6400 case MULT_EXPR:
6401 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6402 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6403 return c0 * c1;
6404
6405 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6406 case CEIL_DIV_EXPR:
65a07688
RK
6407 if (integer_pow2p (TREE_OPERAND (exp, 1))
6408 && host_integerp (TREE_OPERAND (exp, 1), 1))
6409 {
6410 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6411 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6412 return MAX (1, c0 / c1);
6413 }
6414 break;
0d4903b8
RK
6415
6416 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6417 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6418 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6419
65a07688
RK
6420 case COMPOUND_EXPR:
6421 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6422
0d4903b8
RK
6423 case COND_EXPR:
6424 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6425 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6426 return MIN (c0, c1);
6427
6428 default:
6429 break;
6430 }
6431
6432 return 1;
6433}
818c0c94
RH
6434
6435/* Similar, except that it is known that the expression must be a multiple
6436 of the alignment of TYPE. */
6437
9ceca302 6438static unsigned HOST_WIDE_INT
502b8322 6439highest_pow2_factor_for_type (tree type, tree exp)
818c0c94 6440{
9ceca302 6441 unsigned HOST_WIDE_INT type_align, factor;
818c0c94
RH
6442
6443 factor = highest_pow2_factor (exp);
6444 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6445 return MAX (factor, type_align);
6446}
0d4903b8 6447\f
f47e9b4e
RK
6448/* Return an object on the placeholder list that matches EXP, a
6449 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6450 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6451 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6452 is a location which initially points to a starting location in the
738cc472
RK
6453 placeholder list (zero means start of the list) and where a pointer into
6454 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6455
6456tree
502b8322 6457find_placeholder (tree exp, tree *plist)
f47e9b4e
RK
6458{
6459 tree type = TREE_TYPE (exp);
6460 tree placeholder_expr;
6461
738cc472
RK
6462 for (placeholder_expr
6463 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6464 placeholder_expr != 0;
f47e9b4e
RK
6465 placeholder_expr = TREE_CHAIN (placeholder_expr))
6466 {
6467 tree need_type = TYPE_MAIN_VARIANT (type);
6468 tree elt;
6469
6470 /* Find the outermost reference that is of the type we want. If none,
6471 see if any object has a type that is a pointer to the type we
6472 want. */
6473 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6474 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6475 || TREE_CODE (elt) == COND_EXPR)
6476 ? TREE_OPERAND (elt, 1)
6477 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6478 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6479 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6480 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6481 ? TREE_OPERAND (elt, 0) : 0))
6482 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6483 {
6484 if (plist)
6485 *plist = placeholder_expr;
6486 return elt;
6487 }
6488
6489 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6490 elt
6491 = ((TREE_CODE (elt) == COMPOUND_EXPR
6492 || TREE_CODE (elt) == COND_EXPR)
6493 ? TREE_OPERAND (elt, 1)
6494 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6495 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6496 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6497 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6498 ? TREE_OPERAND (elt, 0) : 0))
6499 if (POINTER_TYPE_P (TREE_TYPE (elt))
6500 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6501 == need_type))
6502 {
6503 if (plist)
6504 *plist = placeholder_expr;
6505 return build1 (INDIRECT_REF, need_type, elt);
6506 }
6507 }
6508
70072ed9 6509 return 0;
f47e9b4e
RK
6510}
6511\f
bbf6f052
RK
6512/* expand_expr: generate code for computing expression EXP.
6513 An rtx for the computed value is returned. The value is never null.
6514 In the case of a void EXP, const0_rtx is returned.
6515
6516 The value may be stored in TARGET if TARGET is nonzero.
6517 TARGET is just a suggestion; callers must assume that
6518 the rtx returned may not be the same as TARGET.
6519
6520 If TARGET is CONST0_RTX, it means that the value will be ignored.
6521
6522 If TMODE is not VOIDmode, it suggests generating the
6523 result in mode TMODE. But this is done only when convenient.
6524 Otherwise, TMODE is ignored and the value generated in its natural mode.
6525 TMODE is just a suggestion; callers must assume that
6526 the rtx returned may not have mode TMODE.
6527
d6a5ac33
RK
6528 Note that TARGET may have neither TMODE nor MODE. In that case, it
6529 probably will not be used.
bbf6f052
RK
6530
6531 If MODIFIER is EXPAND_SUM then when EXP is an addition
6532 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6533 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6534 products as above, or REG or MEM, or constant.
6535 Ordinarily in such cases we would output mul or add instructions
6536 and then return a pseudo reg containing the sum.
6537
6538 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6539 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6540 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6541 This is used for outputting expressions used in initializers.
6542
6543 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6544 with a constant address even if that address is not normally legitimate.
8403445a
AM
6545 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6546
6547 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6548 a call parameter. Such targets require special care as we haven't yet
6549 marked TARGET so that it's safe from being trashed by libcalls. We
6550 don't want to use TARGET for anything but the final result;
6551 Intermediate values must go elsewhere. Additionally, calls to
6552 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
bbf6f052
RK
6553
6554rtx
502b8322 6555expand_expr (tree exp, rtx target, enum machine_mode tmode, enum expand_modifier modifier)
bbf6f052 6556{
b3694847 6557 rtx op0, op1, temp;
bbf6f052
RK
6558 tree type = TREE_TYPE (exp);
6559 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6560 enum machine_mode mode;
6561 enum tree_code code = TREE_CODE (exp);
bbf6f052 6562 optab this_optab;
68557e14
ML
6563 rtx subtarget, original_target;
6564 int ignore;
bbf6f052
RK
6565 tree context;
6566
3a94c984 6567 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6568 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6569 {
6570 op0 = CONST0_RTX (tmode);
6571 if (op0 != 0)
6572 return op0;
6573 return const0_rtx;
6574 }
6575
6576 mode = TYPE_MODE (type);
6577 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6578 subtarget = get_subtarget (target);
68557e14
ML
6579 original_target = target;
6580 ignore = (target == const0_rtx
6581 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6582 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6583 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6584 && TREE_CODE (type) == VOID_TYPE));
6585
dd27116b
RK
6586 /* If we are going to ignore this result, we need only do something
6587 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6588 is, short-circuit the most common cases here. Note that we must
6589 not call expand_expr with anything but const0_rtx in case this
6590 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6591
dd27116b
RK
6592 if (ignore)
6593 {
6594 if (! TREE_SIDE_EFFECTS (exp))
6595 return const0_rtx;
6596
14a774a9
RK
6597 /* Ensure we reference a volatile object even if value is ignored, but
6598 don't do this if all we are doing is taking its address. */
dd27116b
RK
6599 if (TREE_THIS_VOLATILE (exp)
6600 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6601 && mode != VOIDmode && mode != BLKmode
6602 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6603 {
37a08a29 6604 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6605 if (GET_CODE (temp) == MEM)
6606 temp = copy_to_reg (temp);
6607 return const0_rtx;
6608 }
6609
14a774a9
RK
6610 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6611 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6612 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6613 modifier);
6614
14a774a9 6615 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6616 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6617 {
37a08a29
RK
6618 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6619 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6620 return const0_rtx;
6621 }
6622 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6623 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6624 /* If the second operand has no side effects, just evaluate
0f41302f 6625 the first. */
37a08a29
RK
6626 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6627 modifier);
14a774a9
RK
6628 else if (code == BIT_FIELD_REF)
6629 {
37a08a29
RK
6630 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6631 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6632 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6633 return const0_rtx;
6634 }
37a08a29 6635
90764a87 6636 target = 0;
dd27116b 6637 }
bbf6f052 6638
dbecbbe4 6639#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07 6640 /* Only check stuff here if the mode we want is different from the mode
fbe5a4a6 6641 of the expression; if it's the same, check_max_integer_computation_mode
5f652c07
JM
6642 will handle it. Do we really need to check this stuff at all? */
6643
ce3c0b53 6644 if (target
5f652c07 6645 && GET_MODE (target) != mode
ce3c0b53
JL
6646 && TREE_CODE (exp) != INTEGER_CST
6647 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6648 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6649 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6650 && TREE_CODE (exp) != COMPONENT_REF
6651 && TREE_CODE (exp) != BIT_FIELD_REF
6652 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6653 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6654 && TREE_CODE (exp) != VAR_DECL
6655 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6656 {
6657 enum machine_mode mode = GET_MODE (target);
6658
6659 if (GET_MODE_CLASS (mode) == MODE_INT
6660 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6661 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6662 }
6663
5f652c07
JM
6664 if (tmode != mode
6665 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6666 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6667 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6668 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6669 && TREE_CODE (exp) != COMPONENT_REF
6670 && TREE_CODE (exp) != BIT_FIELD_REF
6671 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6672 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6673 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6674 && TREE_CODE (exp) != RTL_EXPR
71bca506 6675 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6676 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6677 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6678
6679 check_max_integer_computation_mode (exp);
6680#endif
6681
e44842fe
RK
6682 /* If will do cse, generate all results into pseudo registers
6683 since 1) that allows cse to find more things
6684 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6685 cannot support. An exception is a CONSTRUCTOR into a multi-word
6686 MEM: that's much more likely to be most efficient into the MEM.
6687 Another is a CALL_EXPR which must return in memory. */
e44842fe 6688
bbf6f052 6689 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6690 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6
ZW
6691 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6692 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
8403445a 6693 target = 0;
bbf6f052 6694
bbf6f052
RK
6695 switch (code)
6696 {
6697 case LABEL_DECL:
b552441b
RS
6698 {
6699 tree function = decl_function_context (exp);
046e4e36
ZW
6700 /* Labels in containing functions, or labels used from initializers,
6701 must be forced. */
6702 if (modifier == EXPAND_INITIALIZER
6703 || (function != current_function_decl
6704 && function != inline_function_decl
6705 && function != 0))
6706 temp = force_label_rtx (exp);
ab87f8c8 6707 else
046e4e36 6708 temp = label_rtx (exp);
c5c76735 6709
046e4e36 6710 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
d0977240
RK
6711 if (function != current_function_decl
6712 && function != inline_function_decl && function != 0)
26fcb35a
RS
6713 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6714 return temp;
b552441b 6715 }
bbf6f052
RK
6716
6717 case PARM_DECL:
1877be45 6718 if (!DECL_RTL_SET_P (exp))
bbf6f052
RK
6719 {
6720 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6721 return CONST0_RTX (mode);
bbf6f052
RK
6722 }
6723
0f41302f 6724 /* ... fall through ... */
d6a5ac33 6725
bbf6f052 6726 case VAR_DECL:
2dca20cd
RS
6727 /* If a static var's type was incomplete when the decl was written,
6728 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6729 if (DECL_SIZE (exp) == 0
6730 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6731 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6732 layout_decl (exp, 0);
921b3427 6733
0f41302f 6734 /* ... fall through ... */
d6a5ac33 6735
2dca20cd 6736 case FUNCTION_DECL:
bbf6f052
RK
6737 case RESULT_DECL:
6738 if (DECL_RTL (exp) == 0)
6739 abort ();
d6a5ac33 6740
e44842fe
RK
6741 /* Ensure variable marked as used even if it doesn't go through
6742 a parser. If it hasn't be used yet, write out an external
6743 definition. */
6744 if (! TREE_USED (exp))
6745 {
6746 assemble_external (exp);
6747 TREE_USED (exp) = 1;
6748 }
6749
dc6d66b3
RK
6750 /* Show we haven't gotten RTL for this yet. */
6751 temp = 0;
6752
bbf6f052
RK
6753 /* Handle variables inherited from containing functions. */
6754 context = decl_function_context (exp);
6755
6756 /* We treat inline_function_decl as an alias for the current function
6757 because that is the inline function whose vars, types, etc.
6758 are being merged into the current function.
6759 See expand_inline_function. */
d6a5ac33 6760
bbf6f052
RK
6761 if (context != 0 && context != current_function_decl
6762 && context != inline_function_decl
6763 /* If var is static, we don't need a static chain to access it. */
6764 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6765 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6766 {
6767 rtx addr;
6768
6769 /* Mark as non-local and addressable. */
81feeecb 6770 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6771 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6772 abort ();
dffd7eb6 6773 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6774 if (GET_CODE (DECL_RTL (exp)) != MEM)
6775 abort ();
6776 addr = XEXP (DECL_RTL (exp), 0);
6777 if (GET_CODE (addr) == MEM)
792760b9
RK
6778 addr
6779 = replace_equiv_address (addr,
6780 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6781 else
6782 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6783
792760b9 6784 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6785 }
4af3895e 6786
bbf6f052
RK
6787 /* This is the case of an array whose size is to be determined
6788 from its initializer, while the initializer is still being parsed.
6789 See expand_decl. */
d6a5ac33 6790
dc6d66b3
RK
6791 else if (GET_CODE (DECL_RTL (exp)) == MEM
6792 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6793 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6794
6795 /* If DECL_RTL is memory, we are in the normal case and either
6796 the address is not valid or it is not a register and -fforce-addr
6797 is specified, get the address into a register. */
6798
dc6d66b3
RK
6799 else if (GET_CODE (DECL_RTL (exp)) == MEM
6800 && modifier != EXPAND_CONST_ADDRESS
6801 && modifier != EXPAND_SUM
6802 && modifier != EXPAND_INITIALIZER
6803 && (! memory_address_p (DECL_MODE (exp),
6804 XEXP (DECL_RTL (exp), 0))
6805 || (flag_force_addr
6806 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6807 temp = replace_equiv_address (DECL_RTL (exp),
6808 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6809
dc6d66b3 6810 /* If we got something, return it. But first, set the alignment
04956a1a 6811 if the address is a register. */
dc6d66b3
RK
6812 if (temp != 0)
6813 {
6814 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6815 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6816
6817 return temp;
6818 }
6819
1499e0a8
RK
6820 /* If the mode of DECL_RTL does not match that of the decl, it
6821 must be a promoted value. We return a SUBREG of the wanted mode,
6822 but mark it so that we know that it was already extended. */
6823
6824 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6825 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6826 {
1499e0a8
RK
6827 /* Get the signedness used for this variable. Ensure we get the
6828 same mode we got when the variable was declared. */
78911e8b 6829 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6830 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6831 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6832 abort ();
6833
ddef6bc7 6834 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6835 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6836 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6837 return temp;
6838 }
6839
bbf6f052
RK
6840 return DECL_RTL (exp);
6841
6842 case INTEGER_CST:
d8a50944 6843 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6844 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6845
d8a50944
RH
6846 /* ??? If overflow is set, fold will have done an incomplete job,
6847 which can result in (plus xx (const_int 0)), which can get
6848 simplified by validate_replace_rtx during virtual register
6849 instantiation, which can result in unrecognizable insns.
6850 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6851 if (TREE_CONSTANT_OVERFLOW (exp)
6852 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6853 temp = force_reg (mode, temp);
6854
6855 return temp;
6856
d744e06e
AH
6857 case VECTOR_CST:
6858 return const_vector_from_tree (exp);
6859
bbf6f052 6860 case CONST_DECL:
8403445a 6861 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6862
6863 case REAL_CST:
6864 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6865 which will be turned into memory by reload if necessary.
6866
bbf6f052
RK
6867 We used to force a register so that loop.c could see it. But
6868 this does not allow gen_* patterns to perform optimizations with
6869 the constants. It also produces two insns in cases like "x = 1.0;".
6870 On most machines, floating-point constants are not permitted in
6871 many insns, so we'd end up copying it to a register in any case.
6872
6873 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6874 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6875 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6876
6877 case COMPLEX_CST:
9ad58e09
RS
6878 /* Handle evaluating a complex constant in a CONCAT target. */
6879 if (original_target && GET_CODE (original_target) == CONCAT)
6880 {
6881 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6882 rtx rtarg, itarg;
6883
6884 rtarg = XEXP (original_target, 0);
6885 itarg = XEXP (original_target, 1);
6886
6887 /* Move the real and imaginary parts separately. */
6888 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6889 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6890
6891 if (op0 != rtarg)
6892 emit_move_insn (rtarg, op0);
6893 if (op1 != itarg)
6894 emit_move_insn (itarg, op1);
6895
6896 return original_target;
6897 }
6898
71c0e7fc 6899 /* ... fall through ... */
9ad58e09 6900
bbf6f052 6901 case STRING_CST:
afc6aaab 6902 temp = output_constant_def (exp, 1);
bbf6f052 6903
afc6aaab 6904 /* temp contains a constant address.
bbf6f052
RK
6905 On RISC machines where a constant address isn't valid,
6906 make some insns to get that address into a register. */
afc6aaab 6907 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6908 && modifier != EXPAND_INITIALIZER
6909 && modifier != EXPAND_SUM
afc6aaab
ZW
6910 && (! memory_address_p (mode, XEXP (temp, 0))
6911 || flag_force_addr))
6912 return replace_equiv_address (temp,
6913 copy_rtx (XEXP (temp, 0)));
6914 return temp;
bbf6f052 6915
bf1e5319 6916 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6917 {
6918 rtx to_return;
070588f0 6919 location_t saved_loc = input_location;
b24f65cd 6920 input_filename = EXPR_WFL_FILENAME (exp);
d479d37f 6921 input_line = EXPR_WFL_LINENO (exp);
b24f65cd 6922 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
d479d37f 6923 emit_line_note (input_filename, input_line);
6ad7895a 6924 /* Possibly avoid switching back and forth here. */
b0ca54af 6925 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
070588f0 6926 input_location = saved_loc;
b24f65cd
APB
6927 return to_return;
6928 }
bf1e5319 6929
bbf6f052
RK
6930 case SAVE_EXPR:
6931 context = decl_function_context (exp);
d6a5ac33 6932
d0977240
RK
6933 /* If this SAVE_EXPR was at global context, assume we are an
6934 initialization function and move it into our context. */
6935 if (context == 0)
6936 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6937
bbf6f052
RK
6938 /* We treat inline_function_decl as an alias for the current function
6939 because that is the inline function whose vars, types, etc.
6940 are being merged into the current function.
6941 See expand_inline_function. */
6942 if (context == current_function_decl || context == inline_function_decl)
6943 context = 0;
6944
6945 /* If this is non-local, handle it. */
6946 if (context)
6947 {
d0977240
RK
6948 /* The following call just exists to abort if the context is
6949 not of a containing function. */
6950 find_function_data (context);
6951
bbf6f052
RK
6952 temp = SAVE_EXPR_RTL (exp);
6953 if (temp && GET_CODE (temp) == REG)
6954 {
f29a2bd1 6955 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6956 temp = SAVE_EXPR_RTL (exp);
6957 }
6958 if (temp == 0 || GET_CODE (temp) != MEM)
6959 abort ();
792760b9
RK
6960 return
6961 replace_equiv_address (temp,
6962 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6963 }
6964 if (SAVE_EXPR_RTL (exp) == 0)
6965 {
06089a8b
RK
6966 if (mode == VOIDmode)
6967 temp = const0_rtx;
6968 else
1da68f56
RK
6969 temp = assign_temp (build_qualified_type (type,
6970 (TYPE_QUALS (type)
6971 | TYPE_QUAL_CONST)),
6972 3, 0, 0);
1499e0a8 6973
bbf6f052 6974 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6975 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6976 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6977 save_expr_regs);
ff78f773
RK
6978
6979 /* If the mode of TEMP does not match that of the expression, it
6980 must be a promoted value. We pass store_expr a SUBREG of the
6981 wanted mode but mark it so that we know that it was already
3ac1a319 6982 extended. */
ff78f773
RK
6983
6984 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6985 {
ddef6bc7 6986 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6987 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6988 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6989 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6990 }
6991
4c7a0be9 6992 if (temp == const0_rtx)
37a08a29 6993 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6994 else
8403445a
AM
6995 store_expr (TREE_OPERAND (exp, 0), temp,
6996 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6997
6998 TREE_USED (exp) = 1;
bbf6f052 6999 }
1499e0a8
RK
7000
7001 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7002 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 7003 but mark it so that we know that it was already extended. */
1499e0a8
RK
7004
7005 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7006 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7007 {
e70d22c8
RK
7008 /* Compute the signedness and make the proper SUBREG. */
7009 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 7010 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 7011 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 7012 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
7013 return temp;
7014 }
7015
bbf6f052
RK
7016 return SAVE_EXPR_RTL (exp);
7017
679163cf
MS
7018 case UNSAVE_EXPR:
7019 {
7020 rtx temp;
7021 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
7022 TREE_OPERAND (exp, 0)
7023 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
7024 return temp;
7025 }
7026
b50d17a1 7027 case PLACEHOLDER_EXPR:
e9a25f70 7028 {
f47e9b4e 7029 tree old_list = placeholder_list;
738cc472 7030 tree placeholder_expr = 0;
e9a25f70 7031
f47e9b4e 7032 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
7033 if (exp == 0)
7034 abort ();
7035
f47e9b4e 7036 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 7037 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
7038 placeholder_list = old_list;
7039 return temp;
e9a25f70 7040 }
b50d17a1 7041
b50d17a1
RK
7042 case WITH_RECORD_EXPR:
7043 /* Put the object on the placeholder list, expand our first operand,
7044 and pop the list. */
7045 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7046 placeholder_list);
37a08a29
RK
7047 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7048 modifier);
b50d17a1
RK
7049 placeholder_list = TREE_CHAIN (placeholder_list);
7050 return target;
7051
70e6ca43
APB
7052 case GOTO_EXPR:
7053 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7054 expand_goto (TREE_OPERAND (exp, 0));
7055 else
7056 expand_computed_goto (TREE_OPERAND (exp, 0));
7057 return const0_rtx;
7058
bbf6f052 7059 case EXIT_EXPR:
df4ae160 7060 expand_exit_loop_if_false (NULL,
e44842fe 7061 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
7062 return const0_rtx;
7063
f42e28dd
APB
7064 case LABELED_BLOCK_EXPR:
7065 if (LABELED_BLOCK_BODY (exp))
b0832fe1 7066 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 7067 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 7068 do_pending_stack_adjust ();
f42e28dd
APB
7069 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7070 return const0_rtx;
7071
7072 case EXIT_BLOCK_EXPR:
7073 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 7074 sorry ("returned value in block_exit_expr");
f42e28dd
APB
7075 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7076 return const0_rtx;
7077
bbf6f052 7078 case LOOP_EXPR:
0088fcb1 7079 push_temp_slots ();
bbf6f052 7080 expand_start_loop (1);
b0832fe1 7081 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 7082 expand_end_loop ();
0088fcb1 7083 pop_temp_slots ();
bbf6f052
RK
7084
7085 return const0_rtx;
7086
7087 case BIND_EXPR:
7088 {
7089 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
7090
7091 /* Need to open a binding contour here because
e976b8b2 7092 if there are any cleanups they must be contained here. */
8e91754e 7093 expand_start_bindings (2);
bbf6f052 7094
2df53c0b
RS
7095 /* Mark the corresponding BLOCK for output in its proper place. */
7096 if (TREE_OPERAND (exp, 2) != 0
7097 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 7098 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
7099
7100 /* If VARS have not yet been expanded, expand them now. */
7101 while (vars)
7102 {
19e7881c 7103 if (!DECL_RTL_SET_P (vars))
4977bab6 7104 expand_decl (vars);
bbf6f052
RK
7105 expand_decl_init (vars);
7106 vars = TREE_CHAIN (vars);
7107 }
7108
37a08a29 7109 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
7110
7111 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7112
7113 return temp;
7114 }
7115
7116 case RTL_EXPR:
83b853c9
JM
7117 if (RTL_EXPR_SEQUENCE (exp))
7118 {
7119 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7120 abort ();
2f937369 7121 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
7122 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7123 }
64dc53f3
MM
7124 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7125 free_temps_for_rtl_expr (exp);
bbf6f052
RK
7126 return RTL_EXPR_RTL (exp);
7127
7128 case CONSTRUCTOR:
dd27116b
RK
7129 /* If we don't need the result, just ensure we evaluate any
7130 subexpressions. */
7131 if (ignore)
7132 {
7133 tree elt;
37a08a29 7134
dd27116b 7135 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
7136 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7137
dd27116b
RK
7138 return const0_rtx;
7139 }
3207b172 7140
4af3895e
JVA
7141 /* All elts simple constants => refer to a constant in memory. But
7142 if this is a non-BLKmode mode, let it store a field at a time
7143 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 7144 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
7145 store directly into the target unless the type is large enough
7146 that memcpy will be used. If we are making an initializer and
00182e1e
AH
7147 all operands are constant, put it in memory as well.
7148
7149 FIXME: Avoid trying to fill vector constructors piece-meal.
7150 Output them with output_constant_def below unless we're sure
7151 they're zeros. This should go away when vector initializers
7152 are treated like VECTOR_CST instead of arrays.
7153 */
dd27116b 7154 else if ((TREE_STATIC (exp)
3207b172 7155 && ((mode == BLKmode
e5e809f4 7156 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 7157 || TREE_ADDRESSABLE (exp)
19caa751 7158 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 7159 && (! MOVE_BY_PIECES_P
19caa751
RK
7160 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7161 TYPE_ALIGN (type)))
0fb7aeda
KH
7162 && ((TREE_CODE (type) == VECTOR_TYPE
7163 && !is_zeros_p (exp))
7164 || ! mostly_zeros_p (exp)))))
f59700f9
RK
7165 || ((modifier == EXPAND_INITIALIZER
7166 || modifier == EXPAND_CONST_ADDRESS)
7167 && TREE_CONSTANT (exp)))
bbf6f052 7168 {
bd7cf17e 7169 rtx constructor = output_constant_def (exp, 1);
19caa751 7170
b552441b
RS
7171 if (modifier != EXPAND_CONST_ADDRESS
7172 && modifier != EXPAND_INITIALIZER
792760b9
RK
7173 && modifier != EXPAND_SUM)
7174 constructor = validize_mem (constructor);
7175
bbf6f052
RK
7176 return constructor;
7177 }
bbf6f052
RK
7178 else
7179 {
e9ac02a6
JW
7180 /* Handle calls that pass values in multiple non-contiguous
7181 locations. The Irix 6 ABI has examples of this. */
e5e809f4 7182 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
7183 || GET_CODE (target) == PARALLEL
7184 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
7185 target
7186 = assign_temp (build_qualified_type (type,
7187 (TYPE_QUALS (type)
7188 | (TREE_READONLY (exp)
7189 * TYPE_QUAL_CONST))),
c24ae149 7190 0, TREE_ADDRESSABLE (exp), 1);
07604beb 7191
de8920be 7192 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
7193 return target;
7194 }
7195
7196 case INDIRECT_REF:
7197 {
7198 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 7199 tree index;
3a94c984
KH
7200 tree string = string_constant (exp1, &index);
7201
06eaa86f 7202 /* Try to optimize reads from const strings. */
0fb7aeda
KH
7203 if (string
7204 && TREE_CODE (string) == STRING_CST
7205 && TREE_CODE (index) == INTEGER_CST
05bccae2 7206 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
7207 && GET_MODE_CLASS (mode) == MODE_INT
7208 && GET_MODE_SIZE (mode) == 1
37a08a29 7209 && modifier != EXPAND_WRITE)
0fb7aeda 7210 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 7211 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7212
405f0da6
JW
7213 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7214 op0 = memory_address (mode, op0);
38a448ca 7215 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 7216 set_mem_attributes (temp, exp, 0);
1125706f 7217
14a774a9
RK
7218 /* If we are writing to this object and its type is a record with
7219 readonly fields, we must mark it as readonly so it will
7220 conflict with readonly references to those fields. */
37a08a29 7221 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
7222 RTX_UNCHANGING_P (temp) = 1;
7223
8c8a8e34
JW
7224 return temp;
7225 }
bbf6f052
RK
7226
7227 case ARRAY_REF:
742920c7
RK
7228 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7229 abort ();
bbf6f052 7230
bbf6f052 7231 {
742920c7
RK
7232 tree array = TREE_OPERAND (exp, 0);
7233 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7234 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 7235 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 7236 HOST_WIDE_INT i;
b50d17a1 7237
d4c89139
PB
7238 /* Optimize the special-case of a zero lower bound.
7239
7240 We convert the low_bound to sizetype to avoid some problems
7241 with constant folding. (E.g. suppose the lower bound is 1,
7242 and its mode is QI. Without the conversion, (ARRAY
7243 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 7244 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 7245
742920c7 7246 if (! integer_zerop (low_bound))
fed3cef0 7247 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 7248
742920c7 7249 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7250 This is not done in fold so it won't happen inside &.
7251 Don't fold if this is for wide characters since it's too
7252 difficult to do correctly and this is a very rare case. */
742920c7 7253
017e1b43
RH
7254 if (modifier != EXPAND_CONST_ADDRESS
7255 && modifier != EXPAND_INITIALIZER
7256 && modifier != EXPAND_MEMORY
cb5fa0f8 7257 && TREE_CODE (array) == STRING_CST
742920c7 7258 && TREE_CODE (index) == INTEGER_CST
05bccae2 7259 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
7260 && GET_MODE_CLASS (mode) == MODE_INT
7261 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7262 return gen_int_mode (TREE_STRING_POINTER (array)
7263 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7264
742920c7
RK
7265 /* If this is a constant index into a constant array,
7266 just get the value from the array. Handle both the cases when
7267 we have an explicit constructor and when our operand is a variable
7268 that was declared const. */
4af3895e 7269
017e1b43
RH
7270 if (modifier != EXPAND_CONST_ADDRESS
7271 && modifier != EXPAND_INITIALIZER
7272 && modifier != EXPAND_MEMORY
7273 && TREE_CODE (array) == CONSTRUCTOR
7274 && ! TREE_SIDE_EFFECTS (array)
05bccae2 7275 && TREE_CODE (index) == INTEGER_CST
3a94c984 7276 && 0 > compare_tree_int (index,
05bccae2
RK
7277 list_length (CONSTRUCTOR_ELTS
7278 (TREE_OPERAND (exp, 0)))))
742920c7 7279 {
05bccae2
RK
7280 tree elem;
7281
7282 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7283 i = TREE_INT_CST_LOW (index);
7284 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7285 ;
7286
7287 if (elem)
37a08a29
RK
7288 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7289 modifier);
742920c7 7290 }
3a94c984 7291
742920c7 7292 else if (optimize >= 1
cb5fa0f8
RK
7293 && modifier != EXPAND_CONST_ADDRESS
7294 && modifier != EXPAND_INITIALIZER
017e1b43 7295 && modifier != EXPAND_MEMORY
742920c7
RK
7296 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7297 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7298 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7299 {
08293add 7300 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7301 {
7302 tree init = DECL_INITIAL (array);
7303
742920c7
RK
7304 if (TREE_CODE (init) == CONSTRUCTOR)
7305 {
665f2503 7306 tree elem;
742920c7 7307
05bccae2 7308 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7309 (elem
7310 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7311 elem = TREE_CHAIN (elem))
7312 ;
7313
c54b0a5e 7314 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7315 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7316 tmode, modifier);
742920c7
RK
7317 }
7318 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7319 && 0 > compare_tree_int (index,
7320 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7321 {
7322 tree type = TREE_TYPE (TREE_TYPE (init));
7323 enum machine_mode mode = TYPE_MODE (type);
7324
7325 if (GET_MODE_CLASS (mode) == MODE_INT
7326 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7327 return gen_int_mode (TREE_STRING_POINTER (init)
7328 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7329 }
742920c7
RK
7330 }
7331 }
7332 }
afc6aaab 7333 goto normal_inner_ref;
bbf6f052
RK
7334
7335 case COMPONENT_REF:
4af3895e 7336 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7337 appropriate field if it is present. */
7338 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
7339 {
7340 tree elt;
7341
7342 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7343 elt = TREE_CHAIN (elt))
86b5812c
RK
7344 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7345 /* We can normally use the value of the field in the
7346 CONSTRUCTOR. However, if this is a bitfield in
7347 an integral mode that we can fit in a HOST_WIDE_INT,
7348 we must mask only the number of bits in the bitfield,
7349 since this is done implicitly by the constructor. If
7350 the bitfield does not meet either of those conditions,
7351 we can't do this optimization. */
7352 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7353 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7354 == MODE_INT)
7355 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7356 <= HOST_BITS_PER_WIDE_INT))))
7357 {
8403445a
AM
7358 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7359 && modifier == EXPAND_STACK_PARM)
7360 target = 0;
3a94c984 7361 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7362 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7363 {
9df2c88c
RK
7364 HOST_WIDE_INT bitsize
7365 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7366 enum machine_mode imode
7367 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
7368
7369 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7370 {
7371 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7372 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7373 }
7374 else
7375 {
7376 tree count
e5e809f4
JL
7377 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7378 0);
86b5812c
RK
7379
7380 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7381 target, 0);
7382 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7383 target, 0);
7384 }
7385 }
7386
7387 return op0;
7388 }
4af3895e 7389 }
afc6aaab 7390 goto normal_inner_ref;
4af3895e 7391
afc6aaab
ZW
7392 case BIT_FIELD_REF:
7393 case ARRAY_RANGE_REF:
7394 normal_inner_ref:
bbf6f052
RK
7395 {
7396 enum machine_mode mode1;
770ae6cc 7397 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7398 tree offset;
bbf6f052 7399 int volatilep = 0;
839c4796 7400 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7401 &mode1, &unsignedp, &volatilep);
f47e9b4e 7402 rtx orig_op0;
bbf6f052 7403
e7f3c83f
RK
7404 /* If we got back the original object, something is wrong. Perhaps
7405 we are evaluating an expression too early. In any event, don't
7406 infinitely recurse. */
7407 if (tem == exp)
7408 abort ();
7409
3d27140a 7410 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7411 computation, since it will need a temporary and TARGET is known
7412 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7413
f47e9b4e
RK
7414 orig_op0 = op0
7415 = expand_expr (tem,
7416 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7417 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7418 != INTEGER_CST)
8403445a 7419 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7420 ? target : NULL_RTX),
7421 VOIDmode,
7422 (modifier == EXPAND_INITIALIZER
8403445a
AM
7423 || modifier == EXPAND_CONST_ADDRESS
7424 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7425 ? modifier : EXPAND_NORMAL);
bbf6f052 7426
8c8a8e34 7427 /* If this is a constant, put it into a register if it is a
14a774a9 7428 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7429 if (CONSTANT_P (op0))
7430 {
7431 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7432 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7433 && offset == 0)
8c8a8e34
JW
7434 op0 = force_reg (mode, op0);
7435 else
7436 op0 = validize_mem (force_const_mem (mode, op0));
7437 }
7438
7bb0943f
RS
7439 if (offset != 0)
7440 {
8403445a
AM
7441 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7442 EXPAND_SUM);
7bb0943f 7443
a2725049 7444 /* If this object is in a register, put it into memory.
14a774a9
RK
7445 This case can't occur in C, but can in Ada if we have
7446 unchecked conversion of an expression from a scalar type to
7447 an array or record type. */
7448 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7449 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7450 {
d04218c0
RK
7451 /* If the operand is a SAVE_EXPR, we can deal with this by
7452 forcing the SAVE_EXPR into memory. */
7453 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45 7454 {
502b8322 7455 put_var_into_stack (TREE_OPERAND (exp, 0),
f29a2bd1 7456 /*rescan=*/true);
eeb35b45
RK
7457 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7458 }
d04218c0
RK
7459 else
7460 {
7461 tree nt
7462 = build_qualified_type (TREE_TYPE (tem),
7463 (TYPE_QUALS (TREE_TYPE (tem))
7464 | TYPE_QUAL_CONST));
7465 rtx memloc = assign_temp (nt, 1, 1, 1);
7466
d04218c0
RK
7467 emit_move_insn (memloc, op0);
7468 op0 = memloc;
7469 }
14a774a9
RK
7470 }
7471
7bb0943f
RS
7472 if (GET_CODE (op0) != MEM)
7473 abort ();
2d48c13d 7474
2d48c13d 7475#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7476 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7477 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7478#else
7479 if (GET_MODE (offset_rtx) != ptr_mode)
7480 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7481#endif
7482
14a774a9 7483 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7484 to call force_reg for that case. Avoid that case. */
89752202
HB
7485 if (GET_CODE (op0) == MEM
7486 && GET_MODE (op0) == BLKmode
efd07ca7 7487 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7488 && bitsize != 0
3a94c984 7489 && (bitpos % bitsize) == 0
89752202 7490 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7491 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7492 {
e3c8ea67 7493 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7494 bitpos = 0;
7495 }
7496
0d4903b8
RK
7497 op0 = offset_address (op0, offset_rtx,
7498 highest_pow2_factor (offset));
7bb0943f
RS
7499 }
7500
1ce7f3c2
RK
7501 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7502 record its alignment as BIGGEST_ALIGNMENT. */
7503 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7504 && is_aligning_offset (offset, tem))
7505 set_mem_align (op0, BIGGEST_ALIGNMENT);
7506
bbf6f052
RK
7507 /* Don't forget about volatility even if this is a bitfield. */
7508 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7509 {
f47e9b4e
RK
7510 if (op0 == orig_op0)
7511 op0 = copy_rtx (op0);
7512
bbf6f052
RK
7513 MEM_VOLATILE_P (op0) = 1;
7514 }
7515
010f87c4
JJ
7516 /* The following code doesn't handle CONCAT.
7517 Assume only bitpos == 0 can be used for CONCAT, due to
7518 one element arrays having the same mode as its element. */
7519 if (GET_CODE (op0) == CONCAT)
7520 {
7521 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7522 abort ();
7523 return op0;
7524 }
7525
ccc98036
RS
7526 /* In cases where an aligned union has an unaligned object
7527 as a field, we might be extracting a BLKmode value from
7528 an integer-mode (e.g., SImode) object. Handle this case
7529 by doing the extract into an object as wide as the field
7530 (which we know to be the width of a basic mode), then
cb5fa0f8 7531 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7532 if (mode1 == VOIDmode
ccc98036 7533 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7534 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7535 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7536 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7537 && modifier != EXPAND_CONST_ADDRESS
7538 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7539 /* If the field isn't aligned enough to fetch as a memref,
7540 fetch it as a bit field. */
7541 || (mode1 != BLKmode
9e5f281f
OH
7542 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7543 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
a8f3bf52
RK
7544 && ((modifier == EXPAND_CONST_ADDRESS
7545 || modifier == EXPAND_INITIALIZER)
7546 ? STRICT_ALIGNMENT
7547 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7548 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7549 /* If the type and the field are a constant size and the
7550 size of the type isn't the same size as the bitfield,
7551 we must use bitfield operations. */
7552 || (bitsize >= 0
7553 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7554 == INTEGER_CST)
7555 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7556 bitsize)))
bbf6f052 7557 {
bbf6f052
RK
7558 enum machine_mode ext_mode = mode;
7559
14a774a9
RK
7560 if (ext_mode == BLKmode
7561 && ! (target != 0 && GET_CODE (op0) == MEM
7562 && GET_CODE (target) == MEM
7563 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7564 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7565
7566 if (ext_mode == BLKmode)
a281e72d
RK
7567 {
7568 /* In this case, BITPOS must start at a byte boundary and
7569 TARGET, if specified, must be a MEM. */
7570 if (GET_CODE (op0) != MEM
7571 || (target != 0 && GET_CODE (target) != MEM)
7572 || bitpos % BITS_PER_UNIT != 0)
7573 abort ();
7574
f4ef873c 7575 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
7576 if (target == 0)
7577 target = assign_temp (type, 0, 1, 1);
7578
7579 emit_block_move (target, op0,
a06ef755 7580 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7581 / BITS_PER_UNIT),
8403445a
AM
7582 (modifier == EXPAND_STACK_PARM
7583 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7584
a281e72d
RK
7585 return target;
7586 }
bbf6f052 7587
dc6d66b3
RK
7588 op0 = validize_mem (op0);
7589
7590 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7591 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7592
8403445a
AM
7593 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7594 (modifier == EXPAND_STACK_PARM
7595 ? NULL_RTX : target),
7596 ext_mode, ext_mode,
bbf6f052 7597 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7598
7599 /* If the result is a record type and BITSIZE is narrower than
7600 the mode of OP0, an integral mode, and this is a big endian
7601 machine, we must put the field into the high-order bits. */
7602 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7603 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7604 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7605 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7606 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7607 - bitsize),
7608 op0, 1);
7609
bbf6f052
RK
7610 if (mode == BLKmode)
7611 {
c3d32120 7612 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7613 ((*lang_hooks.types.type_for_mode)
7614 (ext_mode, 0),
c3d32120 7615 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7616
7617 emit_move_insn (new, op0);
7618 op0 = copy_rtx (new);
7619 PUT_MODE (op0, BLKmode);
c3d32120 7620 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7621 }
7622
7623 return op0;
7624 }
7625
05019f83
RK
7626 /* If the result is BLKmode, use that to access the object
7627 now as well. */
7628 if (mode == BLKmode)
7629 mode1 = BLKmode;
7630
bbf6f052
RK
7631 /* Get a reference to just this component. */
7632 if (modifier == EXPAND_CONST_ADDRESS
7633 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7634 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7635 else
f4ef873c 7636 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7637
f47e9b4e
RK
7638 if (op0 == orig_op0)
7639 op0 = copy_rtx (op0);
7640
3bdf5ad1 7641 set_mem_attributes (op0, exp, 0);
dc6d66b3 7642 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7643 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7644
bbf6f052 7645 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7646 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7647 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7648 || modifier == EXPAND_INITIALIZER)
bbf6f052 7649 return op0;
0d15e60c 7650 else if (target == 0)
bbf6f052 7651 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7652
bbf6f052
RK
7653 convert_move (target, op0, unsignedp);
7654 return target;
7655 }
7656
4a8d0c9c
RH
7657 case VTABLE_REF:
7658 {
7659 rtx insn, before = get_last_insn (), vtbl_ref;
7660
7661 /* Evaluate the interior expression. */
7662 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7663 tmode, modifier);
7664
7665 /* Get or create an instruction off which to hang a note. */
7666 if (REG_P (subtarget))
7667 {
7668 target = subtarget;
7669 insn = get_last_insn ();
7670 if (insn == before)
7671 abort ();
7672 if (! INSN_P (insn))
7673 insn = prev_nonnote_insn (insn);
7674 }
7675 else
7676 {
7677 target = gen_reg_rtx (GET_MODE (subtarget));
7678 insn = emit_move_insn (target, subtarget);
7679 }
7680
7681 /* Collect the data for the note. */
7682 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7683 vtbl_ref = plus_constant (vtbl_ref,
7684 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7685 /* Discard the initial CONST that was added. */
7686 vtbl_ref = XEXP (vtbl_ref, 0);
7687
7688 REG_NOTES (insn)
7689 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7690
7691 return target;
7692 }
7693
bbf6f052
RK
7694 /* Intended for a reference to a buffer of a file-object in Pascal.
7695 But it's not certain that a special tree code will really be
7696 necessary for these. INDIRECT_REF might work for them. */
7697 case BUFFER_REF:
7698 abort ();
7699
7308a047 7700 case IN_EXPR:
7308a047 7701 {
d6a5ac33
RK
7702 /* Pascal set IN expression.
7703
7704 Algorithm:
7705 rlo = set_low - (set_low%bits_per_word);
7706 the_word = set [ (index - rlo)/bits_per_word ];
7707 bit_index = index % bits_per_word;
7708 bitmask = 1 << bit_index;
7709 return !!(the_word & bitmask); */
7710
7308a047
RS
7711 tree set = TREE_OPERAND (exp, 0);
7712 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7713 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7714 tree set_type = TREE_TYPE (set);
7308a047
RS
7715 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7716 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7717 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7718 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7719 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7720 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7721 rtx setaddr = XEXP (setval, 0);
7722 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7723 rtx rlow;
7724 rtx diff, quo, rem, addr, bit, result;
7308a047 7725
d6a5ac33
RK
7726 /* If domain is empty, answer is no. Likewise if index is constant
7727 and out of bounds. */
51723711 7728 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7729 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7730 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7731 || (TREE_CODE (index) == INTEGER_CST
7732 && TREE_CODE (set_low_bound) == INTEGER_CST
7733 && tree_int_cst_lt (index, set_low_bound))
7734 || (TREE_CODE (set_high_bound) == INTEGER_CST
7735 && TREE_CODE (index) == INTEGER_CST
7736 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7737 return const0_rtx;
7738
d6a5ac33
RK
7739 if (target == 0)
7740 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7741
7742 /* If we get here, we have to generate the code for both cases
7743 (in range and out of range). */
7744
7745 op0 = gen_label_rtx ();
7746 op1 = gen_label_rtx ();
7747
7748 if (! (GET_CODE (index_val) == CONST_INT
7749 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7750 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7751 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7752
7753 if (! (GET_CODE (index_val) == CONST_INT
7754 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7755 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7756 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7757
7758 /* Calculate the element number of bit zero in the first word
7759 of the set. */
7760 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7761 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7762 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7763 else
17938e57
RK
7764 rlow = expand_binop (index_mode, and_optab, lo_r,
7765 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7766 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7767
d6a5ac33
RK
7768 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7769 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7770
7771 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7772 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7773 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7774 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7775
7308a047 7776 addr = memory_address (byte_mode,
d6a5ac33
RK
7777 expand_binop (index_mode, add_optab, diff,
7778 setaddr, NULL_RTX, iunsignedp,
17938e57 7779 OPTAB_LIB_WIDEN));
d6a5ac33 7780
3a94c984 7781 /* Extract the bit we want to examine. */
7308a047 7782 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7783 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7784 make_tree (TREE_TYPE (index), rem),
7785 NULL_RTX, 1);
7786 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7787 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7788 1, OPTAB_LIB_WIDEN);
17938e57
RK
7789
7790 if (result != target)
7791 convert_move (target, result, 1);
7308a047
RS
7792
7793 /* Output the code to handle the out-of-range case. */
7794 emit_jump (op0);
7795 emit_label (op1);
7796 emit_move_insn (target, const0_rtx);
7797 emit_label (op0);
7798 return target;
7799 }
7800
bbf6f052 7801 case WITH_CLEANUP_EXPR:
6ad7895a 7802 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7803 {
6ad7895a 7804 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7805 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7806 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7807 CLEANUP_EH_ONLY (exp));
e976b8b2 7808
bbf6f052 7809 /* That's it for this cleanup. */
6ad7895a 7810 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7811 }
6ad7895a 7812 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7813
5dab5552
MS
7814 case CLEANUP_POINT_EXPR:
7815 {
e976b8b2
MS
7816 /* Start a new binding layer that will keep track of all cleanup
7817 actions to be performed. */
8e91754e 7818 expand_start_bindings (2);
e976b8b2 7819
d93d4205 7820 target_temp_slot_level = temp_slot_level;
e976b8b2 7821
37a08a29 7822 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7823 /* If we're going to use this value, load it up now. */
7824 if (! ignore)
7825 op0 = force_not_mem (op0);
d93d4205 7826 preserve_temp_slots (op0);
e976b8b2 7827 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7828 }
7829 return op0;
7830
bbf6f052
RK
7831 case CALL_EXPR:
7832 /* Check for a built-in function. */
7833 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7834 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7835 == FUNCTION_DECL)
bbf6f052 7836 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7837 {
c70eaeaf
KG
7838 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7839 == BUILT_IN_FRONTEND)
8403445a
AM
7840 return (*lang_hooks.expand_expr) (exp, original_target,
7841 tmode, modifier);
c70eaeaf
KG
7842 else
7843 return expand_builtin (exp, target, subtarget, tmode, ignore);
7844 }
d6a5ac33 7845
8129842c 7846 return expand_call (exp, target, ignore);
bbf6f052
RK
7847
7848 case NON_LVALUE_EXPR:
7849 case NOP_EXPR:
7850 case CONVERT_EXPR:
7851 case REFERENCE_EXPR:
4a53008b 7852 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7853 return const0_rtx;
4a53008b 7854
bbf6f052
RK
7855 if (TREE_CODE (type) == UNION_TYPE)
7856 {
7857 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7858
c3d32120
RK
7859 /* If both input and output are BLKmode, this conversion isn't doing
7860 anything except possibly changing memory attribute. */
7861 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7862 {
7863 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7864 modifier);
7865
7866 result = copy_rtx (result);
7867 set_mem_attributes (result, exp, 0);
7868 return result;
7869 }
14a774a9 7870
bbf6f052 7871 if (target == 0)
1da68f56 7872 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7873
bbf6f052
RK
7874 if (GET_CODE (target) == MEM)
7875 /* Store data into beginning of memory target. */
7876 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7877 adjust_address (target, TYPE_MODE (valtype), 0),
7878 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7879
bbf6f052
RK
7880 else if (GET_CODE (target) == REG)
7881 /* Store this field into a union of the proper type. */
14a774a9
RK
7882 store_field (target,
7883 MIN ((int_size_in_bytes (TREE_TYPE
7884 (TREE_OPERAND (exp, 0)))
7885 * BITS_PER_UNIT),
8752c357 7886 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7887 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7888 VOIDmode, 0, type, 0);
bbf6f052
RK
7889 else
7890 abort ();
7891
7892 /* Return the entire union. */
7893 return target;
7894 }
d6a5ac33 7895
7f62854a
RK
7896 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7897 {
7898 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7899 modifier);
7f62854a
RK
7900
7901 /* If the signedness of the conversion differs and OP0 is
7902 a promoted SUBREG, clear that indication since we now
7903 have to do the proper extension. */
7904 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7905 && GET_CODE (op0) == SUBREG)
7906 SUBREG_PROMOTED_VAR_P (op0) = 0;
7907
7908 return op0;
7909 }
7910
fdf473ae 7911 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7912 if (GET_MODE (op0) == mode)
7913 return op0;
12342f90 7914
d6a5ac33
RK
7915 /* If OP0 is a constant, just convert it into the proper mode. */
7916 if (CONSTANT_P (op0))
fdf473ae
RH
7917 {
7918 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7919 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7920
0fb7aeda 7921 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7922 return simplify_gen_subreg (mode, op0, inner_mode,
7923 subreg_lowpart_offset (mode,
7924 inner_mode));
7925 else
7926 return convert_modes (mode, inner_mode, op0,
7927 TREE_UNSIGNED (inner_type));
7928 }
12342f90 7929
26fcb35a 7930 if (modifier == EXPAND_INITIALIZER)
38a448ca 7931 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7932
bbf6f052 7933 if (target == 0)
d6a5ac33
RK
7934 return
7935 convert_to_mode (mode, op0,
7936 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7937 else
d6a5ac33
RK
7938 convert_move (target, op0,
7939 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7940 return target;
7941
ed239f5a 7942 case VIEW_CONVERT_EXPR:
37a08a29 7943 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7944
7945 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7946 Otherwise, if neither mode is BLKmode and both are integral and within
7947 a word, we can use gen_lowpart. If neither is true, make sure the
7948 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7949 if (TYPE_MODE (type) == GET_MODE (op0))
7950 ;
7951 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7952 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7953 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7954 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7955 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7956 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7957 else if (GET_CODE (op0) != MEM)
ed239f5a 7958 {
c11c10d8
RK
7959 /* If the operand is not a MEM, force it into memory. Since we
7960 are going to be be changing the mode of the MEM, don't call
7961 force_const_mem for constants because we don't allow pool
7962 constants to change mode. */
ed239f5a 7963 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7964
c11c10d8
RK
7965 if (TREE_ADDRESSABLE (exp))
7966 abort ();
ed239f5a 7967
c11c10d8
RK
7968 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7969 target
7970 = assign_stack_temp_for_type
7971 (TYPE_MODE (inner_type),
7972 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7973
c11c10d8
RK
7974 emit_move_insn (target, op0);
7975 op0 = target;
ed239f5a
RK
7976 }
7977
c11c10d8
RK
7978 /* At this point, OP0 is in the correct mode. If the output type is such
7979 that the operand is known to be aligned, indicate that it is.
7980 Otherwise, we need only be concerned about alignment for non-BLKmode
7981 results. */
ed239f5a
RK
7982 if (GET_CODE (op0) == MEM)
7983 {
7984 op0 = copy_rtx (op0);
7985
ed239f5a
RK
7986 if (TYPE_ALIGN_OK (type))
7987 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7988 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7989 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7990 {
7991 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7992 HOST_WIDE_INT temp_size
7993 = MAX (int_size_in_bytes (inner_type),
7994 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7995 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7996 temp_size, 0, type);
c4e59f51 7997 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7998
c11c10d8
RK
7999 if (TREE_ADDRESSABLE (exp))
8000 abort ();
8001
ed239f5a
RK
8002 if (GET_MODE (op0) == BLKmode)
8003 emit_block_move (new_with_op0_mode, op0,
44bb111a 8004 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
8005 (modifier == EXPAND_STACK_PARM
8006 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
8007 else
8008 emit_move_insn (new_with_op0_mode, op0);
8009
8010 op0 = new;
8011 }
0fb7aeda 8012
c4e59f51 8013 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
8014 }
8015
8016 return op0;
8017
bbf6f052 8018 case PLUS_EXPR:
91ce572a 8019 this_optab = ! unsignedp && flag_trapv
a9785c70 8020 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 8021 ? addv_optab : add_optab;
bbf6f052
RK
8022
8023 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8024 something else, make sure we add the register to the constant and
8025 then to the other thing. This case can occur during strength
8026 reduction and doing it this way will produce better code if the
8027 frame pointer or argument pointer is eliminated.
8028
8029 fold-const.c will ensure that the constant is always in the inner
8030 PLUS_EXPR, so the only case we need to do anything about is if
8031 sp, ap, or fp is our second argument, in which case we must swap
8032 the innermost first argument and our second argument. */
8033
8034 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8035 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8036 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8037 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8038 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8039 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8040 {
8041 tree t = TREE_OPERAND (exp, 1);
8042
8043 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8044 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8045 }
8046
88f63c77 8047 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
8048 something, we might be forming a constant. So try to use
8049 plus_constant. If it produces a sum and we can't accept it,
8050 use force_operand. This allows P = &ARR[const] to generate
8051 efficient code on machines where a SYMBOL_REF is not a valid
8052 address.
8053
8054 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 8055 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 8056 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 8057 {
8403445a
AM
8058 if (modifier == EXPAND_STACK_PARM)
8059 target = 0;
c980ac49
RS
8060 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8061 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8062 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8063 {
cbbc503e
JL
8064 rtx constant_part;
8065
c980ac49
RS
8066 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8067 EXPAND_SUM);
cbbc503e
JL
8068 /* Use immed_double_const to ensure that the constant is
8069 truncated according to the mode of OP1, then sign extended
8070 to a HOST_WIDE_INT. Using the constant directly can result
8071 in non-canonical RTL in a 64x32 cross compile. */
8072 constant_part
8073 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8074 (HOST_WIDE_INT) 0,
a5efcd63 8075 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 8076 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
8077 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8078 op1 = force_operand (op1, target);
8079 return op1;
8080 }
bbf6f052 8081
c980ac49
RS
8082 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8083 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8084 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8085 {
cbbc503e
JL
8086 rtx constant_part;
8087
c980ac49 8088 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
8089 (modifier == EXPAND_INITIALIZER
8090 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
8091 if (! CONSTANT_P (op0))
8092 {
8093 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8094 VOIDmode, modifier);
709f5be1
RS
8095 /* Don't go to both_summands if modifier
8096 says it's not right to return a PLUS. */
8097 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8098 goto binop2;
c980ac49
RS
8099 goto both_summands;
8100 }
cbbc503e
JL
8101 /* Use immed_double_const to ensure that the constant is
8102 truncated according to the mode of OP1, then sign extended
8103 to a HOST_WIDE_INT. Using the constant directly can result
8104 in non-canonical RTL in a 64x32 cross compile. */
8105 constant_part
8106 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8107 (HOST_WIDE_INT) 0,
2a94e396 8108 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 8109 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
8110 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8111 op0 = force_operand (op0, target);
8112 return op0;
8113 }
bbf6f052
RK
8114 }
8115
4ef7870a
EB
8116 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8117 subtarget = 0;
8118
bbf6f052
RK
8119 /* No sense saving up arithmetic to be done
8120 if it's all in the wrong mode to form part of an address.
8121 And force_operand won't know whether to sign-extend or
8122 zero-extend. */
8123 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 8124 || mode != ptr_mode)
4ef7870a
EB
8125 {
8126 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
c6547c92
RS
8127 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8128 TREE_OPERAND (exp, 1), 0))
8129 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8130 else
8131 op1 = op0;
6e7727eb
EB
8132 if (op0 == const0_rtx)
8133 return op1;
8134 if (op1 == const0_rtx)
8135 return op0;
4ef7870a
EB
8136 goto binop2;
8137 }
bbf6f052 8138
37a08a29 8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
c6547c92
RS
8140 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8141 TREE_OPERAND (exp, 1), 0))
8142 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8143 VOIDmode, modifier);
8144 else
8145 op1 = op0;
bbf6f052 8146
1717e19e
UW
8147 /* We come here from MINUS_EXPR when the second operand is a
8148 constant. */
c980ac49 8149 both_summands:
bbf6f052
RK
8150 /* Make sure any term that's a sum with a constant comes last. */
8151 if (GET_CODE (op0) == PLUS
8152 && CONSTANT_P (XEXP (op0, 1)))
8153 {
8154 temp = op0;
8155 op0 = op1;
8156 op1 = temp;
8157 }
8158 /* If adding to a sum including a constant,
8159 associate it to put the constant outside. */
8160 if (GET_CODE (op1) == PLUS
8161 && CONSTANT_P (XEXP (op1, 1)))
8162 {
8163 rtx constant_term = const0_rtx;
8164
8165 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8166 if (temp != 0)
8167 op0 = temp;
6f90e075
JW
8168 /* Ensure that MULT comes first if there is one. */
8169 else if (GET_CODE (op0) == MULT)
38a448ca 8170 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 8171 else
38a448ca 8172 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
8173
8174 /* Let's also eliminate constants from op0 if possible. */
8175 op0 = eliminate_constant_term (op0, &constant_term);
8176
8177 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 8178 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
8179 result we want will then be OP0 + OP1. */
8180
8181 temp = simplify_binary_operation (PLUS, mode, constant_term,
8182 XEXP (op1, 1));
8183 if (temp != 0)
8184 op1 = temp;
8185 else
38a448ca 8186 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
8187 }
8188
8189 /* Put a constant term last and put a multiplication first. */
8190 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8191 temp = op1, op1 = op0, op0 = temp;
8192
8193 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 8194 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
8195
8196 case MINUS_EXPR:
ea87523e
RK
8197 /* For initializers, we are allowed to return a MINUS of two
8198 symbolic constants. Here we handle all cases when both operands
8199 are constant. */
bbf6f052
RK
8200 /* Handle difference of two symbolic constants,
8201 for the sake of an initializer. */
8202 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8203 && really_constant_p (TREE_OPERAND (exp, 0))
8204 && really_constant_p (TREE_OPERAND (exp, 1)))
8205 {
37a08a29
RK
8206 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8207 modifier);
8208 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8209 modifier);
ea87523e 8210
ea87523e
RK
8211 /* If the last operand is a CONST_INT, use plus_constant of
8212 the negated constant. Else make the MINUS. */
8213 if (GET_CODE (op1) == CONST_INT)
8214 return plus_constant (op0, - INTVAL (op1));
8215 else
38a448ca 8216 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 8217 }
ae431183 8218
91ce572a
CC
8219 this_optab = ! unsignedp && flag_trapv
8220 && (GET_MODE_CLASS(mode) == MODE_INT)
8221 ? subv_optab : sub_optab;
1717e19e
UW
8222
8223 /* No sense saving up arithmetic to be done
8224 if it's all in the wrong mode to form part of an address.
8225 And force_operand won't know whether to sign-extend or
8226 zero-extend. */
8227 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8228 || mode != ptr_mode)
8229 goto binop;
8230
8231 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8232 subtarget = 0;
8233
8234 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8235 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8236
8237 /* Convert A - const to A + (-const). */
8238 if (GET_CODE (op1) == CONST_INT)
8239 {
8240 op1 = negate_rtx (mode, op1);
8241 goto both_summands;
8242 }
8243
8244 goto binop2;
bbf6f052
RK
8245
8246 case MULT_EXPR:
bbf6f052
RK
8247 /* If first operand is constant, swap them.
8248 Thus the following special case checks need only
8249 check the second operand. */
8250 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8251 {
b3694847 8252 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
8253 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8254 TREE_OPERAND (exp, 1) = t1;
8255 }
8256
8257 /* Attempt to return something suitable for generating an
8258 indexed address, for machines that support that. */
8259
88f63c77 8260 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 8261 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 8262 {
48a5f2fa
DJ
8263 tree exp1 = TREE_OPERAND (exp, 1);
8264
921b3427
RK
8265 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8266 EXPAND_SUM);
bbf6f052 8267
3b40e71b
RH
8268 /* If we knew for certain that this is arithmetic for an array
8269 reference, and we knew the bounds of the array, then we could
8270 apply the distributive law across (PLUS X C) for constant C.
8271 Without such knowledge, we risk overflowing the computation
8272 when both X and C are large, but X+C isn't. */
8273 /* ??? Could perhaps special-case EXP being unsigned and C being
8274 positive. In that case we are certain that X+C is no smaller
8275 than X and so the transformed expression will overflow iff the
8276 original would have. */
bbf6f052
RK
8277
8278 if (GET_CODE (op0) != REG)
906c4e36 8279 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
8280 if (GET_CODE (op0) != REG)
8281 op0 = copy_to_mode_reg (mode, op0);
8282
48a5f2fa
DJ
8283 return gen_rtx_MULT (mode, op0,
8284 gen_int_mode (tree_low_cst (exp1, 0),
8285 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
8286 }
8287
e5e809f4 8288 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8289 subtarget = 0;
8290
8403445a
AM
8291 if (modifier == EXPAND_STACK_PARM)
8292 target = 0;
8293
bbf6f052
RK
8294 /* Check for multiplying things that have been extended
8295 from a narrower type. If this machine supports multiplying
8296 in that narrower type with a result in the desired type,
8297 do it that way, and avoid the explicit type-conversion. */
8298 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8299 && TREE_CODE (type) == INTEGER_TYPE
8300 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8301 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8302 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8303 && int_fits_type_p (TREE_OPERAND (exp, 1),
8304 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8305 /* Don't use a widening multiply if a shift will do. */
8306 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 8307 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
8308 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8309 ||
8310 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8311 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8312 ==
8313 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8314 /* If both operands are extended, they must either both
8315 be zero-extended or both be sign-extended. */
8316 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8317 ==
8318 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8319 {
8320 enum machine_mode innermode
8321 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
8322 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8323 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
8324 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8325 ? umul_widen_optab : smul_widen_optab);
b10af0c8 8326 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 8327 {
b10af0c8
TG
8328 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8329 {
8330 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8331 NULL_RTX, VOIDmode, 0);
8332 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8333 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8334 VOIDmode, 0);
8335 else
8336 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8337 NULL_RTX, VOIDmode, 0);
8338 goto binop2;
8339 }
8340 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8341 && innermode == word_mode)
8342 {
8343 rtx htem;
8344 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8345 NULL_RTX, VOIDmode, 0);
8346 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
8347 op1 = convert_modes (innermode, mode,
8348 expand_expr (TREE_OPERAND (exp, 1),
8349 NULL_RTX, VOIDmode, 0),
8350 unsignedp);
b10af0c8
TG
8351 else
8352 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8353 NULL_RTX, VOIDmode, 0);
8354 temp = expand_binop (mode, other_optab, op0, op1, target,
8355 unsignedp, OPTAB_LIB_WIDEN);
8356 htem = expand_mult_highpart_adjust (innermode,
8357 gen_highpart (innermode, temp),
8358 op0, op1,
8359 gen_highpart (innermode, temp),
8360 unsignedp);
8361 emit_move_insn (gen_highpart (innermode, temp), htem);
8362 return temp;
8363 }
bbf6f052
RK
8364 }
8365 }
8366 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
c6547c92
RS
8367 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8368 TREE_OPERAND (exp, 1), 0))
8369 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8370 else
8371 op1 = op0;
bbf6f052
RK
8372 return expand_mult (mode, op0, op1, target, unsignedp);
8373
8374 case TRUNC_DIV_EXPR:
8375 case FLOOR_DIV_EXPR:
8376 case CEIL_DIV_EXPR:
8377 case ROUND_DIV_EXPR:
8378 case EXACT_DIV_EXPR:
e5e809f4 8379 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8380 subtarget = 0;
8403445a
AM
8381 if (modifier == EXPAND_STACK_PARM)
8382 target = 0;
bbf6f052
RK
8383 /* Possible optimization: compute the dividend with EXPAND_SUM
8384 then if the divisor is constant can optimize the case
8385 where some terms of the dividend have coeffs divisible by it. */
8386 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8387 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8388 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8389
8390 case RDIV_EXPR:
b7e9703c
JH
8391 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8392 expensive divide. If not, combine will rebuild the original
8393 computation. */
8394 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 8395 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
8396 && !real_onep (TREE_OPERAND (exp, 0)))
8397 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8398 build (RDIV_EXPR, type,
8399 build_real (type, dconst1),
8400 TREE_OPERAND (exp, 1))),
8e37cba8 8401 target, tmode, modifier);
ef89d648 8402 this_optab = sdiv_optab;
bbf6f052
RK
8403 goto binop;
8404
8405 case TRUNC_MOD_EXPR:
8406 case FLOOR_MOD_EXPR:
8407 case CEIL_MOD_EXPR:
8408 case ROUND_MOD_EXPR:
e5e809f4 8409 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8410 subtarget = 0;
8403445a
AM
8411 if (modifier == EXPAND_STACK_PARM)
8412 target = 0;
bbf6f052 8413 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8414 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8415 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8416
8417 case FIX_ROUND_EXPR:
8418 case FIX_FLOOR_EXPR:
8419 case FIX_CEIL_EXPR:
8420 abort (); /* Not used for C. */
8421
8422 case FIX_TRUNC_EXPR:
906c4e36 8423 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8424 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8425 target = gen_reg_rtx (mode);
8426 expand_fix (target, op0, unsignedp);
8427 return target;
8428
8429 case FLOAT_EXPR:
906c4e36 8430 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8431 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8432 target = gen_reg_rtx (mode);
8433 /* expand_float can't figure out what to do if FROM has VOIDmode.
8434 So give it the correct mode. With -O, cse will optimize this. */
8435 if (GET_MODE (op0) == VOIDmode)
8436 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8437 op0);
8438 expand_float (target, op0,
8439 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8440 return target;
8441
8442 case NEGATE_EXPR:
5b22bee8 8443 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8444 if (modifier == EXPAND_STACK_PARM)
8445 target = 0;
91ce572a 8446 temp = expand_unop (mode,
0fb7aeda
KH
8447 ! unsignedp && flag_trapv
8448 && (GET_MODE_CLASS(mode) == MODE_INT)
8449 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
8450 if (temp == 0)
8451 abort ();
8452 return temp;
8453
8454 case ABS_EXPR:
8455 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8456 if (modifier == EXPAND_STACK_PARM)
8457 target = 0;
bbf6f052 8458
2d7050fd 8459 /* Handle complex values specially. */
d6a5ac33
RK
8460 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8461 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8462 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 8463
bbf6f052
RK
8464 /* Unsigned abs is simply the operand. Testing here means we don't
8465 risk generating incorrect code below. */
8466 if (TREE_UNSIGNED (type))
8467 return op0;
8468
91ce572a 8469 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8470 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8471
8472 case MAX_EXPR:
8473 case MIN_EXPR:
8474 target = original_target;
8403445a
AM
8475 if (target == 0
8476 || modifier == EXPAND_STACK_PARM
8477 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 8478 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8479 || GET_MODE (target) != mode
bbf6f052
RK
8480 || (GET_CODE (target) == REG
8481 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8482 target = gen_reg_rtx (mode);
906c4e36 8483 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8484 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8485
8486 /* First try to do it with a special MIN or MAX instruction.
8487 If that does not win, use a conditional jump to select the proper
8488 value. */
8489 this_optab = (TREE_UNSIGNED (type)
8490 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8491 : (code == MIN_EXPR ? smin_optab : smax_optab));
8492
8493 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8494 OPTAB_WIDEN);
8495 if (temp != 0)
8496 return temp;
8497
fa2981d8
JW
8498 /* At this point, a MEM target is no longer useful; we will get better
8499 code without it. */
3a94c984 8500
fa2981d8
JW
8501 if (GET_CODE (target) == MEM)
8502 target = gen_reg_rtx (mode);
8503
ee456b1c
RK
8504 if (target != op0)
8505 emit_move_insn (target, op0);
d6a5ac33 8506
bbf6f052 8507 op0 = gen_label_rtx ();
d6a5ac33 8508
f81497d9
RS
8509 /* If this mode is an integer too wide to compare properly,
8510 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8511 if (GET_MODE_CLASS (mode) == MODE_INT
8512 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8513 {
f81497d9 8514 if (code == MAX_EXPR)
d6a5ac33
RK
8515 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8516 target, op1, NULL_RTX, op0);
bbf6f052 8517 else
d6a5ac33
RK
8518 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8519 op1, target, NULL_RTX, op0);
bbf6f052 8520 }
f81497d9
RS
8521 else
8522 {
b30f05db
BS
8523 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8524 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8525 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8526 op0);
f81497d9 8527 }
b30f05db 8528 emit_move_insn (target, op1);
bbf6f052
RK
8529 emit_label (op0);
8530 return target;
8531
bbf6f052
RK
8532 case BIT_NOT_EXPR:
8533 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8534 if (modifier == EXPAND_STACK_PARM)
8535 target = 0;
bbf6f052
RK
8536 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8537 if (temp == 0)
8538 abort ();
8539 return temp;
8540
8541 case FFS_EXPR:
8542 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8543 if (modifier == EXPAND_STACK_PARM)
8544 target = 0;
bbf6f052
RK
8545 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8546 if (temp == 0)
8547 abort ();
8548 return temp;
8549
2928cd7a
RH
8550 case CLZ_EXPR:
8551 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8552 temp = expand_unop (mode, clz_optab, op0, target, 1);
8553 if (temp == 0)
8554 abort ();
8555 return temp;
8556
8557 case CTZ_EXPR:
8558 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8559 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8560 if (temp == 0)
8561 abort ();
8562 return temp;
8563
8564 case POPCOUNT_EXPR:
8565 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8566 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8567 if (temp == 0)
8568 abort ();
8569 return temp;
8570
8571 case PARITY_EXPR:
8572 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8573 temp = expand_unop (mode, parity_optab, op0, target, 1);
8574 if (temp == 0)
8575 abort ();
8576 return temp;
8577
d6a5ac33
RK
8578 /* ??? Can optimize bitwise operations with one arg constant.
8579 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8580 and (a bitwise1 b) bitwise2 b (etc)
8581 but that is probably not worth while. */
8582
8583 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8584 boolean values when we want in all cases to compute both of them. In
8585 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8586 as actual zero-or-1 values and then bitwise anding. In cases where
8587 there cannot be any side effects, better code would be made by
8588 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8589 how to recognize those cases. */
8590
bbf6f052
RK
8591 case TRUTH_AND_EXPR:
8592 case BIT_AND_EXPR:
8593 this_optab = and_optab;
8594 goto binop;
8595
bbf6f052
RK
8596 case TRUTH_OR_EXPR:
8597 case BIT_IOR_EXPR:
8598 this_optab = ior_optab;
8599 goto binop;
8600
874726a8 8601 case TRUTH_XOR_EXPR:
bbf6f052
RK
8602 case BIT_XOR_EXPR:
8603 this_optab = xor_optab;
8604 goto binop;
8605
8606 case LSHIFT_EXPR:
8607 case RSHIFT_EXPR:
8608 case LROTATE_EXPR:
8609 case RROTATE_EXPR:
e5e809f4 8610 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8611 subtarget = 0;
8403445a
AM
8612 if (modifier == EXPAND_STACK_PARM)
8613 target = 0;
bbf6f052
RK
8614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8615 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8616 unsignedp);
8617
d6a5ac33
RK
8618 /* Could determine the answer when only additive constants differ. Also,
8619 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8620 case LT_EXPR:
8621 case LE_EXPR:
8622 case GT_EXPR:
8623 case GE_EXPR:
8624 case EQ_EXPR:
8625 case NE_EXPR:
1eb8759b
RH
8626 case UNORDERED_EXPR:
8627 case ORDERED_EXPR:
8628 case UNLT_EXPR:
8629 case UNLE_EXPR:
8630 case UNGT_EXPR:
8631 case UNGE_EXPR:
8632 case UNEQ_EXPR:
8403445a
AM
8633 temp = do_store_flag (exp,
8634 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8635 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8636 if (temp != 0)
8637 return temp;
d6a5ac33 8638
0f41302f 8639 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8640 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8641 && original_target
8642 && GET_CODE (original_target) == REG
8643 && (GET_MODE (original_target)
8644 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8645 {
d6a5ac33
RK
8646 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8647 VOIDmode, 0);
8648
c0a3eeac
UW
8649 /* If temp is constant, we can just compute the result. */
8650 if (GET_CODE (temp) == CONST_INT)
8651 {
8652 if (INTVAL (temp) != 0)
8653 emit_move_insn (target, const1_rtx);
8654 else
8655 emit_move_insn (target, const0_rtx);
8656
8657 return target;
8658 }
8659
bbf6f052 8660 if (temp != original_target)
c0a3eeac
UW
8661 {
8662 enum machine_mode mode1 = GET_MODE (temp);
8663 if (mode1 == VOIDmode)
8664 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8665
c0a3eeac
UW
8666 temp = copy_to_mode_reg (mode1, temp);
8667 }
d6a5ac33 8668
bbf6f052 8669 op1 = gen_label_rtx ();
c5d5d461 8670 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8671 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8672 emit_move_insn (temp, const1_rtx);
8673 emit_label (op1);
8674 return temp;
8675 }
d6a5ac33 8676
bbf6f052
RK
8677 /* If no set-flag instruction, must generate a conditional
8678 store into a temporary variable. Drop through
8679 and handle this like && and ||. */
8680
8681 case TRUTH_ANDIF_EXPR:
8682 case TRUTH_ORIF_EXPR:
e44842fe 8683 if (! ignore
8403445a
AM
8684 && (target == 0
8685 || modifier == EXPAND_STACK_PARM
8686 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8687 /* Make sure we don't have a hard reg (such as function's return
8688 value) live across basic blocks, if not optimizing. */
8689 || (!optimize && GET_CODE (target) == REG
8690 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8691 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8692
8693 if (target)
8694 emit_clr_insn (target);
8695
bbf6f052
RK
8696 op1 = gen_label_rtx ();
8697 jumpifnot (exp, op1);
e44842fe
RK
8698
8699 if (target)
8700 emit_0_to_1_insn (target);
8701
bbf6f052 8702 emit_label (op1);
e44842fe 8703 return ignore ? const0_rtx : target;
bbf6f052
RK
8704
8705 case TRUTH_NOT_EXPR:
8403445a
AM
8706 if (modifier == EXPAND_STACK_PARM)
8707 target = 0;
bbf6f052
RK
8708 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8709 /* The parser is careful to generate TRUTH_NOT_EXPR
8710 only with operands that are always zero or one. */
906c4e36 8711 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8712 target, 1, OPTAB_LIB_WIDEN);
8713 if (temp == 0)
8714 abort ();
8715 return temp;
8716
8717 case COMPOUND_EXPR:
8718 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8719 emit_queue ();
8720 return expand_expr (TREE_OPERAND (exp, 1),
8721 (ignore ? const0_rtx : target),
8403445a 8722 VOIDmode, modifier);
bbf6f052
RK
8723
8724 case COND_EXPR:
ac01eace
RK
8725 /* If we would have a "singleton" (see below) were it not for a
8726 conversion in each arm, bring that conversion back out. */
8727 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8728 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8729 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8730 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8731 {
d6edb99e
ZW
8732 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8733 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8734
8735 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8736 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8737 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8738 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8739 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8740 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8741 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8742 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8743 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8744 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8745 TREE_OPERAND (exp, 0),
d6edb99e 8746 iftrue, iffalse)),
ac01eace
RK
8747 target, tmode, modifier);
8748 }
8749
bbf6f052
RK
8750 {
8751 /* Note that COND_EXPRs whose type is a structure or union
8752 are required to be constructed to contain assignments of
8753 a temporary variable, so that we can evaluate them here
8754 for side effect only. If type is void, we must do likewise. */
8755
8756 /* If an arm of the branch requires a cleanup,
8757 only that cleanup is performed. */
8758
8759 tree singleton = 0;
8760 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8761
8762 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8763 convert it to our mode, if necessary. */
8764 if (integer_onep (TREE_OPERAND (exp, 1))
8765 && integer_zerop (TREE_OPERAND (exp, 2))
8766 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8767 {
dd27116b
RK
8768 if (ignore)
8769 {
8770 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8771 modifier);
dd27116b
RK
8772 return const0_rtx;
8773 }
8774
8403445a
AM
8775 if (modifier == EXPAND_STACK_PARM)
8776 target = 0;
37a08a29 8777 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8778 if (GET_MODE (op0) == mode)
8779 return op0;
d6a5ac33 8780
bbf6f052
RK
8781 if (target == 0)
8782 target = gen_reg_rtx (mode);
8783 convert_move (target, op0, unsignedp);
8784 return target;
8785 }
8786
ac01eace
RK
8787 /* Check for X ? A + B : A. If we have this, we can copy A to the
8788 output and conditionally add B. Similarly for unary operations.
8789 Don't do this if X has side-effects because those side effects
8790 might affect A or B and the "?" operation is a sequence point in
8791 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8792
8793 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8794 && operand_equal_p (TREE_OPERAND (exp, 2),
8795 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8796 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8797 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8798 && operand_equal_p (TREE_OPERAND (exp, 1),
8799 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8800 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8801 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8802 && operand_equal_p (TREE_OPERAND (exp, 2),
8803 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8804 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8805 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8806 && operand_equal_p (TREE_OPERAND (exp, 1),
8807 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8808 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8809
01c8a7c8
RK
8810 /* If we are not to produce a result, we have no target. Otherwise,
8811 if a target was specified use it; it will not be used as an
3a94c984 8812 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8813 temporary. */
8814
8815 if (ignore)
8816 temp = 0;
8403445a
AM
8817 else if (modifier == EXPAND_STACK_PARM)
8818 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8819 else if (original_target
e5e809f4 8820 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8821 || (singleton && GET_CODE (original_target) == REG
8822 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8823 && original_target == var_rtx (singleton)))
8824 && GET_MODE (original_target) == mode
7c00d1fe
RK
8825#ifdef HAVE_conditional_move
8826 && (! can_conditionally_move_p (mode)
8827 || GET_CODE (original_target) == REG
8828 || TREE_ADDRESSABLE (type))
8829#endif
8125d7e9
BS
8830 && (GET_CODE (original_target) != MEM
8831 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8832 temp = original_target;
8833 else if (TREE_ADDRESSABLE (type))
8834 abort ();
8835 else
8836 temp = assign_temp (type, 0, 0, 1);
8837
ac01eace
RK
8838 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8839 do the test of X as a store-flag operation, do this as
8840 A + ((X != 0) << log C). Similarly for other simple binary
8841 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8842 if (temp && singleton && binary_op
bbf6f052
RK
8843 && (TREE_CODE (binary_op) == PLUS_EXPR
8844 || TREE_CODE (binary_op) == MINUS_EXPR
8845 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8846 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8847 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8848 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8849 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8850 {
8851 rtx result;
61f6c84f 8852 tree cond;
91ce572a 8853 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8854 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8855 ? addv_optab : add_optab)
8856 : TREE_CODE (binary_op) == MINUS_EXPR
8857 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8858 ? subv_optab : sub_optab)
8859 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8860 : xor_optab);
bbf6f052 8861
61f6c84f 8862 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8863 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8864 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8865 else
8866 cond = TREE_OPERAND (exp, 0);
bbf6f052 8867
61f6c84f
JJ
8868 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8869 ? temp : NULL_RTX),
bbf6f052
RK
8870 mode, BRANCH_COST <= 1);
8871
ac01eace
RK
8872 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8873 result = expand_shift (LSHIFT_EXPR, mode, result,
8874 build_int_2 (tree_log2
8875 (TREE_OPERAND
8876 (binary_op, 1)),
8877 0),
e5e809f4 8878 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8879 ? temp : NULL_RTX), 0);
8880
bbf6f052
RK
8881 if (result)
8882 {
906c4e36 8883 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8884 return expand_binop (mode, boptab, op1, result, temp,
8885 unsignedp, OPTAB_LIB_WIDEN);
8886 }
bbf6f052 8887 }
3a94c984 8888
dabf8373 8889 do_pending_stack_adjust ();
bbf6f052
RK
8890 NO_DEFER_POP;
8891 op0 = gen_label_rtx ();
8892
8893 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8894 {
8895 if (temp != 0)
8896 {
8897 /* If the target conflicts with the other operand of the
8898 binary op, we can't use it. Also, we can't use the target
8899 if it is a hard register, because evaluating the condition
8900 might clobber it. */
8901 if ((binary_op
e5e809f4 8902 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8903 || (GET_CODE (temp) == REG
8904 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8905 temp = gen_reg_rtx (mode);
8403445a
AM
8906 store_expr (singleton, temp,
8907 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8908 }
8909 else
906c4e36 8910 expand_expr (singleton,
2937cf87 8911 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8912 if (singleton == TREE_OPERAND (exp, 1))
8913 jumpif (TREE_OPERAND (exp, 0), op0);
8914 else
8915 jumpifnot (TREE_OPERAND (exp, 0), op0);
8916
956d6950 8917 start_cleanup_deferral ();
bbf6f052
RK
8918 if (binary_op && temp == 0)
8919 /* Just touch the other operand. */
8920 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8921 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8922 else if (binary_op)
8923 store_expr (build (TREE_CODE (binary_op), type,
8924 make_tree (type, temp),
8925 TREE_OPERAND (binary_op, 1)),
8403445a 8926 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8927 else
8928 store_expr (build1 (TREE_CODE (unary_op), type,
8929 make_tree (type, temp)),
8403445a 8930 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8931 op1 = op0;
bbf6f052 8932 }
bbf6f052
RK
8933 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8934 comparison operator. If we have one of these cases, set the
8935 output to A, branch on A (cse will merge these two references),
8936 then set the output to FOO. */
8937 else if (temp
8938 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8939 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8940 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8941 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8942 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8943 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8944 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8945 {
3a94c984
KH
8946 if (GET_CODE (temp) == REG
8947 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8948 temp = gen_reg_rtx (mode);
8403445a
AM
8949 store_expr (TREE_OPERAND (exp, 1), temp,
8950 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8951 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8952
956d6950 8953 start_cleanup_deferral ();
8403445a
AM
8954 store_expr (TREE_OPERAND (exp, 2), temp,
8955 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8956 op1 = op0;
8957 }
8958 else if (temp
8959 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8960 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8961 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8962 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8963 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8964 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8965 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8966 {
3a94c984
KH
8967 if (GET_CODE (temp) == REG
8968 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8969 temp = gen_reg_rtx (mode);
8403445a
AM
8970 store_expr (TREE_OPERAND (exp, 2), temp,
8971 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8972 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8973
956d6950 8974 start_cleanup_deferral ();
8403445a
AM
8975 store_expr (TREE_OPERAND (exp, 1), temp,
8976 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8977 op1 = op0;
8978 }
8979 else
8980 {
8981 op1 = gen_label_rtx ();
8982 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8983
956d6950 8984 start_cleanup_deferral ();
3a94c984 8985
2ac84cfe 8986 /* One branch of the cond can be void, if it never returns. For
3a94c984 8987 example A ? throw : E */
2ac84cfe 8988 if (temp != 0
3a94c984 8989 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8990 store_expr (TREE_OPERAND (exp, 1), temp,
8991 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8992 else
906c4e36
RK
8993 expand_expr (TREE_OPERAND (exp, 1),
8994 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8995 end_cleanup_deferral ();
bbf6f052
RK
8996 emit_queue ();
8997 emit_jump_insn (gen_jump (op1));
8998 emit_barrier ();
8999 emit_label (op0);
956d6950 9000 start_cleanup_deferral ();
2ac84cfe 9001 if (temp != 0
3a94c984 9002 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
9003 store_expr (TREE_OPERAND (exp, 2), temp,
9004 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 9005 else
906c4e36
RK
9006 expand_expr (TREE_OPERAND (exp, 2),
9007 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
9008 }
9009
956d6950 9010 end_cleanup_deferral ();
bbf6f052
RK
9011
9012 emit_queue ();
9013 emit_label (op1);
9014 OK_DEFER_POP;
5dab5552 9015
bbf6f052
RK
9016 return temp;
9017 }
9018
9019 case TARGET_EXPR:
9020 {
9021 /* Something needs to be initialized, but we didn't know
9022 where that thing was when building the tree. For example,
9023 it could be the return value of a function, or a parameter
9024 to a function which lays down in the stack, or a temporary
9025 variable which must be passed by reference.
9026
9027 We guarantee that the expression will either be constructed
9028 or copied into our original target. */
9029
9030 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 9031 tree cleanups = NULL_TREE;
5c062816 9032 tree exp1;
bbf6f052
RK
9033
9034 if (TREE_CODE (slot) != VAR_DECL)
9035 abort ();
9036
9c51f375
RK
9037 if (! ignore)
9038 target = original_target;
9039
6fbfac92
JM
9040 /* Set this here so that if we get a target that refers to a
9041 register variable that's already been used, put_reg_into_stack
3a94c984 9042 knows that it should fix up those uses. */
6fbfac92
JM
9043 TREE_USED (slot) = 1;
9044
bbf6f052
RK
9045 if (target == 0)
9046 {
19e7881c 9047 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
9048 {
9049 target = DECL_RTL (slot);
5c062816 9050 /* If we have already expanded the slot, so don't do
ac993f4f 9051 it again. (mrs) */
5c062816
MS
9052 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9053 return target;
ac993f4f 9054 }
bbf6f052
RK
9055 else
9056 {
e9a25f70 9057 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
9058 /* All temp slots at this level must not conflict. */
9059 preserve_temp_slots (target);
19e7881c 9060 SET_DECL_RTL (slot, target);
e9a25f70 9061 if (TREE_ADDRESSABLE (slot))
f29a2bd1 9062 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 9063
e287fd6e
RK
9064 /* Since SLOT is not known to the called function
9065 to belong to its stack frame, we must build an explicit
9066 cleanup. This case occurs when we must build up a reference
9067 to pass the reference as an argument. In this case,
9068 it is very likely that such a reference need not be
9069 built here. */
9070
9071 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
9072 TREE_OPERAND (exp, 2)
9073 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 9074 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 9075 }
bbf6f052
RK
9076 }
9077 else
9078 {
9079 /* This case does occur, when expanding a parameter which
9080 needs to be constructed on the stack. The target
9081 is the actual stack address that we want to initialize.
9082 The function we call will perform the cleanup in this case. */
9083
8c042b47
RS
9084 /* If we have already assigned it space, use that space,
9085 not target that we were passed in, as our target
9086 parameter is only a hint. */
19e7881c 9087 if (DECL_RTL_SET_P (slot))
3a94c984
KH
9088 {
9089 target = DECL_RTL (slot);
9090 /* If we have already expanded the slot, so don't do
8c042b47 9091 it again. (mrs) */
3a94c984
KH
9092 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9093 return target;
8c042b47 9094 }
21002281
JW
9095 else
9096 {
19e7881c 9097 SET_DECL_RTL (slot, target);
21002281
JW
9098 /* If we must have an addressable slot, then make sure that
9099 the RTL that we just stored in slot is OK. */
9100 if (TREE_ADDRESSABLE (slot))
f29a2bd1 9101 put_var_into_stack (slot, /*rescan=*/true);
21002281 9102 }
bbf6f052
RK
9103 }
9104
4847c938 9105 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
9106 /* Mark it as expanded. */
9107 TREE_OPERAND (exp, 1) = NULL_TREE;
9108
8403445a 9109 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 9110
659e5a7a 9111 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 9112
41531e5b 9113 return target;
bbf6f052
RK
9114 }
9115
9116 case INIT_EXPR:
9117 {
9118 tree lhs = TREE_OPERAND (exp, 0);
9119 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9120
9121 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
bbf6f052
RK
9122 return temp;
9123 }
9124
9125 case MODIFY_EXPR:
9126 {
9127 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
9128 That's so we don't compute a pointer and save it over a
9129 call. If lhs is simple, compute it first so we can give it
9130 as a target if the rhs is just a call. This avoids an
9131 extra temp and copy and that prevents a partial-subsumption
9132 which makes bad code. Actually we could treat
9133 component_ref's of vars like vars. */
bbf6f052
RK
9134
9135 tree lhs = TREE_OPERAND (exp, 0);
9136 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9137
9138 temp = 0;
9139
bbf6f052
RK
9140 /* Check for |= or &= of a bitfield of size one into another bitfield
9141 of size 1. In this case, (unless we need the result of the
9142 assignment) we can do this more efficiently with a
9143 test followed by an assignment, if necessary.
9144
9145 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9146 things change so we do, this code should be enhanced to
9147 support it. */
9148 if (ignore
9149 && TREE_CODE (lhs) == COMPONENT_REF
9150 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9151 || TREE_CODE (rhs) == BIT_AND_EXPR)
9152 && TREE_OPERAND (rhs, 0) == lhs
9153 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
9154 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9155 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
9156 {
9157 rtx label = gen_label_rtx ();
9158
9159 do_jump (TREE_OPERAND (rhs, 1),
9160 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9161 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9162 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9163 (TREE_CODE (rhs) == BIT_IOR_EXPR
9164 ? integer_one_node
9165 : integer_zero_node)),
9166 0, 0);
e7c33f54 9167 do_pending_stack_adjust ();
bbf6f052
RK
9168 emit_label (label);
9169 return const0_rtx;
9170 }
9171
bbf6f052 9172 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
0fb7aeda 9173
bbf6f052
RK
9174 return temp;
9175 }
9176
6e7f84a7
APB
9177 case RETURN_EXPR:
9178 if (!TREE_OPERAND (exp, 0))
9179 expand_null_return ();
9180 else
9181 expand_return (TREE_OPERAND (exp, 0));
9182 return const0_rtx;
9183
bbf6f052
RK
9184 case PREINCREMENT_EXPR:
9185 case PREDECREMENT_EXPR:
7b8b9722 9186 return expand_increment (exp, 0, ignore);
bbf6f052
RK
9187
9188 case POSTINCREMENT_EXPR:
9189 case POSTDECREMENT_EXPR:
9190 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 9191 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
9192
9193 case ADDR_EXPR:
8403445a
AM
9194 if (modifier == EXPAND_STACK_PARM)
9195 target = 0;
bbf6f052
RK
9196 /* Are we taking the address of a nested function? */
9197 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 9198 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
9199 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9200 && ! TREE_STATIC (exp))
bbf6f052
RK
9201 {
9202 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9203 op0 = force_operand (op0, target);
9204 }
682ba3a6
RK
9205 /* If we are taking the address of something erroneous, just
9206 return a zero. */
9207 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9208 return const0_rtx;
d6b6783b
RK
9209 /* If we are taking the address of a constant and are at the
9210 top level, we have to use output_constant_def since we can't
9211 call force_const_mem at top level. */
9212 else if (cfun == 0
9213 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9214 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9215 == 'c')))
9216 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
9217 else
9218 {
e287fd6e
RK
9219 /* We make sure to pass const0_rtx down if we came in with
9220 ignore set, to avoid doing the cleanups twice for something. */
9221 op0 = expand_expr (TREE_OPERAND (exp, 0),
9222 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
9223 (modifier == EXPAND_INITIALIZER
9224 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 9225
119af78a
RK
9226 /* If we are going to ignore the result, OP0 will have been set
9227 to const0_rtx, so just return it. Don't get confused and
9228 think we are taking the address of the constant. */
9229 if (ignore)
9230 return op0;
9231
73b7f58c
BS
9232 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9233 clever and returns a REG when given a MEM. */
9234 op0 = protect_from_queue (op0, 1);
3539e816 9235
c5c76735
JL
9236 /* We would like the object in memory. If it is a constant, we can
9237 have it be statically allocated into memory. For a non-constant,
9238 we need to allocate some memory and store the value into it. */
896102d0
RK
9239
9240 if (CONSTANT_P (op0))
9241 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9242 op0);
682ba3a6 9243 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 9244 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 9245 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 9246 {
6c7d86ec
RK
9247 /* If the operand is a SAVE_EXPR, we can deal with this by
9248 forcing the SAVE_EXPR into memory. */
9249 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9250 {
f29a2bd1
MM
9251 put_var_into_stack (TREE_OPERAND (exp, 0),
9252 /*rescan=*/true);
6c7d86ec
RK
9253 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9254 }
df6018fd 9255 else
6c7d86ec
RK
9256 {
9257 /* If this object is in a register, it can't be BLKmode. */
9258 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 9259 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
9260
9261 if (GET_CODE (op0) == PARALLEL)
9262 /* Handle calls that pass values in multiple
9263 non-contiguous locations. The Irix 6 ABI has examples
9264 of this. */
0fb7aeda 9265 emit_group_store (memloc, op0,
6c7d86ec
RK
9266 int_size_in_bytes (inner_type));
9267 else
9268 emit_move_insn (memloc, op0);
0fb7aeda 9269
6c7d86ec
RK
9270 op0 = memloc;
9271 }
896102d0
RK
9272 }
9273
bbf6f052
RK
9274 if (GET_CODE (op0) != MEM)
9275 abort ();
3a94c984 9276
34e81b5a 9277 mark_temp_addr_taken (op0);
bbf6f052 9278 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 9279 {
34e81b5a 9280 op0 = XEXP (op0, 0);
88f63c77 9281#ifdef POINTERS_EXTEND_UNSIGNED
34e81b5a 9282 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
88f63c77 9283 && mode == ptr_mode)
34e81b5a 9284 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 9285#endif
34e81b5a 9286 return op0;
88f63c77 9287 }
987c71d9 9288
c952ff4b
RK
9289 /* If OP0 is not aligned as least as much as the type requires, we
9290 need to make a temporary, copy OP0 to it, and take the address of
9291 the temporary. We want to use the alignment of the type, not of
9292 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9293 the test for BLKmode means that can't happen. The test for
9294 BLKmode is because we never make mis-aligned MEMs with
9295 non-BLKmode.
9296
9297 We don't need to do this at all if the machine doesn't have
9298 strict alignment. */
9299 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9300 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
9301 > MEM_ALIGN (op0))
9302 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
9303 {
9304 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 9305 rtx new;
a06ef755 9306
c3d32120
RK
9307 if (TYPE_ALIGN_OK (inner_type))
9308 abort ();
9309
bdaa131b
JM
9310 if (TREE_ADDRESSABLE (inner_type))
9311 {
9312 /* We can't make a bitwise copy of this object, so fail. */
9313 error ("cannot take the address of an unaligned member");
9314 return const0_rtx;
9315 }
9316
9317 new = assign_stack_temp_for_type
9318 (TYPE_MODE (inner_type),
9319 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9320 : int_size_in_bytes (inner_type),
9321 1, build_qualified_type (inner_type,
9322 (TYPE_QUALS (inner_type)
9323 | TYPE_QUAL_CONST)));
9324
44bb111a 9325 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
9326 (modifier == EXPAND_STACK_PARM
9327 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 9328
a06ef755
RK
9329 op0 = new;
9330 }
9331
bbf6f052
RK
9332 op0 = force_operand (XEXP (op0, 0), target);
9333 }
987c71d9 9334
05c8e58b
HPN
9335 if (flag_force_addr
9336 && GET_CODE (op0) != REG
9337 && modifier != EXPAND_CONST_ADDRESS
9338 && modifier != EXPAND_INITIALIZER
9339 && modifier != EXPAND_SUM)
987c71d9
RK
9340 op0 = force_reg (Pmode, op0);
9341
dc6d66b3
RK
9342 if (GET_CODE (op0) == REG
9343 && ! REG_USERVAR_P (op0))
bdb429a5 9344 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 9345
88f63c77
RK
9346#ifdef POINTERS_EXTEND_UNSIGNED
9347 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9348 && mode == ptr_mode)
9fcfcce7 9349 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
9350#endif
9351
bbf6f052
RK
9352 return op0;
9353
9354 case ENTRY_VALUE_EXPR:
9355 abort ();
9356
7308a047
RS
9357 /* COMPLEX type for Extended Pascal & Fortran */
9358 case COMPLEX_EXPR:
9359 {
9360 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 9361 rtx insns;
7308a047
RS
9362
9363 /* Get the rtx code of the operands. */
9364 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9365 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9366
9367 if (! target)
9368 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9369
6551fa4d 9370 start_sequence ();
7308a047
RS
9371
9372 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
9373 emit_move_insn (gen_realpart (mode, target), op0);
9374 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 9375
6551fa4d
JW
9376 insns = get_insns ();
9377 end_sequence ();
9378
7308a047 9379 /* Complex construction should appear as a single unit. */
6551fa4d
JW
9380 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9381 each with a separate pseudo as destination.
9382 It's not correct for flow to treat them as a unit. */
6d6e61ce 9383 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9384 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9385 else
2f937369 9386 emit_insn (insns);
7308a047
RS
9387
9388 return target;
9389 }
9390
9391 case REALPART_EXPR:
2d7050fd
RS
9392 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9393 return gen_realpart (mode, op0);
3a94c984 9394
7308a047 9395 case IMAGPART_EXPR:
2d7050fd
RS
9396 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9397 return gen_imagpart (mode, op0);
7308a047
RS
9398
9399 case CONJ_EXPR:
9400 {
62acb978 9401 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 9402 rtx imag_t;
6551fa4d 9403 rtx insns;
3a94c984
KH
9404
9405 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
9406
9407 if (! target)
d6a5ac33 9408 target = gen_reg_rtx (mode);
3a94c984 9409
6551fa4d 9410 start_sequence ();
7308a047
RS
9411
9412 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
9413 emit_move_insn (gen_realpart (partmode, target),
9414 gen_realpart (partmode, op0));
7308a047 9415
62acb978 9416 imag_t = gen_imagpart (partmode, target);
91ce572a 9417 temp = expand_unop (partmode,
0fb7aeda
KH
9418 ! unsignedp && flag_trapv
9419 && (GET_MODE_CLASS(partmode) == MODE_INT)
9420 ? negv_optab : neg_optab,
3a94c984 9421 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
9422 if (temp != imag_t)
9423 emit_move_insn (imag_t, temp);
9424
6551fa4d
JW
9425 insns = get_insns ();
9426 end_sequence ();
9427
3a94c984 9428 /* Conjugate should appear as a single unit
d6a5ac33 9429 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
9430 each with a separate pseudo as destination.
9431 It's not correct for flow to treat them as a unit. */
6d6e61ce 9432 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9433 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9434 else
2f937369 9435 emit_insn (insns);
7308a047
RS
9436
9437 return target;
9438 }
9439
e976b8b2
MS
9440 case TRY_CATCH_EXPR:
9441 {
9442 tree handler = TREE_OPERAND (exp, 1);
9443
9444 expand_eh_region_start ();
9445
9446 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9447
52a11cbf 9448 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
9449
9450 return op0;
9451 }
9452
b335b813
PB
9453 case TRY_FINALLY_EXPR:
9454 {
9455 tree try_block = TREE_OPERAND (exp, 0);
9456 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 9457
8ad8135a 9458 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
9459 {
9460 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9461 is not sufficient, so we cannot expand the block twice.
9462 So we play games with GOTO_SUBROUTINE_EXPR to let us
9463 expand the thing only once. */
8ad8135a
RH
9464 /* When not optimizing, we go ahead with this form since
9465 (1) user breakpoints operate more predictably without
9466 code duplication, and
9467 (2) we're not running any of the global optimizers
9468 that would explode in time/space with the highly
9469 connected CFG created by the indirect branching. */
8943a0b4
RH
9470
9471 rtx finally_label = gen_label_rtx ();
9472 rtx done_label = gen_label_rtx ();
9473 rtx return_link = gen_reg_rtx (Pmode);
9474 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9475 (tree) finally_label, (tree) return_link);
9476 TREE_SIDE_EFFECTS (cleanup) = 1;
9477
9478 /* Start a new binding layer that will keep track of all cleanup
9479 actions to be performed. */
9480 expand_start_bindings (2);
9481 target_temp_slot_level = temp_slot_level;
9482
9483 expand_decl_cleanup (NULL_TREE, cleanup);
9484 op0 = expand_expr (try_block, target, tmode, modifier);
9485
9486 preserve_temp_slots (op0);
9487 expand_end_bindings (NULL_TREE, 0, 0);
9488 emit_jump (done_label);
9489 emit_label (finally_label);
9490 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9491 emit_indirect_jump (return_link);
9492 emit_label (done_label);
9493 }
9494 else
9495 {
9496 expand_start_bindings (2);
9497 target_temp_slot_level = temp_slot_level;
b335b813 9498
8943a0b4
RH
9499 expand_decl_cleanup (NULL_TREE, finally_block);
9500 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9501
8943a0b4
RH
9502 preserve_temp_slots (op0);
9503 expand_end_bindings (NULL_TREE, 0, 0);
9504 }
b335b813 9505
b335b813
PB
9506 return op0;
9507 }
9508
3a94c984 9509 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9510 {
9511 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9512 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9513 rtx return_address = gen_label_rtx ();
3a94c984
KH
9514 emit_move_insn (return_link,
9515 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9516 emit_jump (subr);
9517 emit_label (return_address);
9518 return const0_rtx;
9519 }
9520
d3707adb
RH
9521 case VA_ARG_EXPR:
9522 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9523
52a11cbf 9524 case EXC_PTR_EXPR:
86c99549 9525 return get_exception_pointer (cfun);
52a11cbf 9526
67231816
RH
9527 case FDESC_EXPR:
9528 /* Function descriptors are not valid except for as
9529 initialization constants, and should not be expanded. */
9530 abort ();
9531
bbf6f052 9532 default:
c9d892a8 9533 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
9534 }
9535
9536 /* Here to do an ordinary binary operator, generating an instruction
9537 from the optab already placed in `this_optab'. */
9538 binop:
e5e809f4 9539 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
9540 subtarget = 0;
9541 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 9542 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9543 binop2:
8403445a
AM
9544 if (modifier == EXPAND_STACK_PARM)
9545 target = 0;
bbf6f052
RK
9546 temp = expand_binop (mode, this_optab, op0, op1, target,
9547 unsignedp, OPTAB_LIB_WIDEN);
9548 if (temp == 0)
9549 abort ();
9550 return temp;
9551}
b93a436e 9552\f
1ce7f3c2
RK
9553/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9554 when applied to the address of EXP produces an address known to be
9555 aligned more than BIGGEST_ALIGNMENT. */
9556
9557static int
502b8322 9558is_aligning_offset (tree offset, tree exp)
1ce7f3c2
RK
9559{
9560 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9561 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9562 || TREE_CODE (offset) == NOP_EXPR
9563 || TREE_CODE (offset) == CONVERT_EXPR
9564 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9565 offset = TREE_OPERAND (offset, 0);
9566
9567 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9568 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9569 if (TREE_CODE (offset) != BIT_AND_EXPR
9570 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9571 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9572 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9573 return 0;
9574
9575 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9576 It must be NEGATE_EXPR. Then strip any more conversions. */
9577 offset = TREE_OPERAND (offset, 0);
9578 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9579 || TREE_CODE (offset) == NOP_EXPR
9580 || TREE_CODE (offset) == CONVERT_EXPR)
9581 offset = TREE_OPERAND (offset, 0);
9582
9583 if (TREE_CODE (offset) != NEGATE_EXPR)
9584 return 0;
9585
9586 offset = TREE_OPERAND (offset, 0);
9587 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9588 || TREE_CODE (offset) == NOP_EXPR
9589 || TREE_CODE (offset) == CONVERT_EXPR)
9590 offset = TREE_OPERAND (offset, 0);
9591
9592 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9593 whose type is the same as EXP. */
9594 return (TREE_CODE (offset) == ADDR_EXPR
9595 && (TREE_OPERAND (offset, 0) == exp
9596 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9597 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9598 == TREE_TYPE (exp)))));
9599}
9600\f
e0a2f705 9601/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9602 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9603 in bytes within the string that ARG is accessing. The type of the
9604 offset will be `sizetype'. */
b93a436e 9605
28f4ec01 9606tree
502b8322 9607string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9608{
9609 STRIP_NOPS (arg);
9610
9611 if (TREE_CODE (arg) == ADDR_EXPR
9612 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9613 {
fed3cef0 9614 *ptr_offset = size_zero_node;
b93a436e
JL
9615 return TREE_OPERAND (arg, 0);
9616 }
9617 else if (TREE_CODE (arg) == PLUS_EXPR)
9618 {
9619 tree arg0 = TREE_OPERAND (arg, 0);
9620 tree arg1 = TREE_OPERAND (arg, 1);
9621
9622 STRIP_NOPS (arg0);
9623 STRIP_NOPS (arg1);
9624
9625 if (TREE_CODE (arg0) == ADDR_EXPR
9626 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9627 {
fed3cef0 9628 *ptr_offset = convert (sizetype, arg1);
b93a436e 9629 return TREE_OPERAND (arg0, 0);
bbf6f052 9630 }
b93a436e
JL
9631 else if (TREE_CODE (arg1) == ADDR_EXPR
9632 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9633 {
fed3cef0 9634 *ptr_offset = convert (sizetype, arg0);
b93a436e 9635 return TREE_OPERAND (arg1, 0);
bbf6f052 9636 }
b93a436e 9637 }
ca695ac9 9638
b93a436e
JL
9639 return 0;
9640}
ca695ac9 9641\f
b93a436e
JL
9642/* Expand code for a post- or pre- increment or decrement
9643 and return the RTX for the result.
9644 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9645
b93a436e 9646static rtx
502b8322 9647expand_increment (tree exp, int post, int ignore)
ca695ac9 9648{
b3694847
SS
9649 rtx op0, op1;
9650 rtx temp, value;
9651 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9652 optab this_optab = add_optab;
9653 int icode;
9654 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9655 int op0_is_copy = 0;
9656 int single_insn = 0;
9657 /* 1 means we can't store into OP0 directly,
9658 because it is a subreg narrower than a word,
9659 and we don't dare clobber the rest of the word. */
9660 int bad_subreg = 0;
1499e0a8 9661
b93a436e
JL
9662 /* Stabilize any component ref that might need to be
9663 evaluated more than once below. */
9664 if (!post
9665 || TREE_CODE (incremented) == BIT_FIELD_REF
9666 || (TREE_CODE (incremented) == COMPONENT_REF
9667 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9668 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9669 incremented = stabilize_reference (incremented);
9670 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9671 ones into save exprs so that they don't accidentally get evaluated
9672 more than once by the code below. */
9673 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9674 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9675 incremented = save_expr (incremented);
e9a25f70 9676
b93a436e
JL
9677 /* Compute the operands as RTX.
9678 Note whether OP0 is the actual lvalue or a copy of it:
9679 I believe it is a copy iff it is a register or subreg
6d2f8887 9680 and insns were generated in computing it. */
e9a25f70 9681
b93a436e 9682 temp = get_last_insn ();
37a08a29 9683 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9684
b93a436e
JL
9685 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9686 in place but instead must do sign- or zero-extension during assignment,
9687 so we copy it into a new register and let the code below use it as
9688 a copy.
e9a25f70 9689
b93a436e
JL
9690 Note that we can safely modify this SUBREG since it is know not to be
9691 shared (it was made by the expand_expr call above). */
9692
9693 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9694 {
9695 if (post)
9696 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9697 else
9698 bad_subreg = 1;
9699 }
9700 else if (GET_CODE (op0) == SUBREG
9701 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9702 {
9703 /* We cannot increment this SUBREG in place. If we are
9704 post-incrementing, get a copy of the old value. Otherwise,
9705 just mark that we cannot increment in place. */
9706 if (post)
9707 op0 = copy_to_reg (op0);
9708 else
9709 bad_subreg = 1;
e9a25f70
JL
9710 }
9711
b93a436e
JL
9712 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9713 && temp != get_last_insn ());
37a08a29 9714 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9715
b93a436e
JL
9716 /* Decide whether incrementing or decrementing. */
9717 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9718 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9719 this_optab = sub_optab;
9720
9721 /* Convert decrement by a constant into a negative increment. */
9722 if (this_optab == sub_optab
9723 && GET_CODE (op1) == CONST_INT)
ca695ac9 9724 {
3a94c984 9725 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9726 this_optab = add_optab;
ca695ac9 9727 }
1499e0a8 9728
91ce572a 9729 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9730 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9731
b93a436e
JL
9732 /* For a preincrement, see if we can do this with a single instruction. */
9733 if (!post)
9734 {
9735 icode = (int) this_optab->handlers[(int) mode].insn_code;
9736 if (icode != (int) CODE_FOR_nothing
9737 /* Make sure that OP0 is valid for operands 0 and 1
9738 of the insn we want to queue. */
a995e389
RH
9739 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9740 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9741 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9742 single_insn = 1;
9743 }
bbf6f052 9744
b93a436e
JL
9745 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9746 then we cannot just increment OP0. We must therefore contrive to
9747 increment the original value. Then, for postincrement, we can return
9748 OP0 since it is a copy of the old value. For preincrement, expand here
9749 unless we can do it with a single insn.
bbf6f052 9750
b93a436e
JL
9751 Likewise if storing directly into OP0 would clobber high bits
9752 we need to preserve (bad_subreg). */
9753 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9754 {
b93a436e
JL
9755 /* This is the easiest way to increment the value wherever it is.
9756 Problems with multiple evaluation of INCREMENTED are prevented
9757 because either (1) it is a component_ref or preincrement,
9758 in which case it was stabilized above, or (2) it is an array_ref
9759 with constant index in an array in a register, which is
9760 safe to reevaluate. */
9761 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9762 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9763 ? MINUS_EXPR : PLUS_EXPR),
9764 TREE_TYPE (exp),
9765 incremented,
9766 TREE_OPERAND (exp, 1));
a358cee0 9767
b93a436e
JL
9768 while (TREE_CODE (incremented) == NOP_EXPR
9769 || TREE_CODE (incremented) == CONVERT_EXPR)
9770 {
9771 newexp = convert (TREE_TYPE (incremented), newexp);
9772 incremented = TREE_OPERAND (incremented, 0);
9773 }
bbf6f052 9774
b93a436e
JL
9775 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9776 return post ? op0 : temp;
9777 }
bbf6f052 9778
b93a436e
JL
9779 if (post)
9780 {
9781 /* We have a true reference to the value in OP0.
9782 If there is an insn to add or subtract in this mode, queue it.
9783 Queueing the increment insn avoids the register shuffling
9784 that often results if we must increment now and first save
9785 the old value for subsequent use. */
bbf6f052 9786
b93a436e
JL
9787#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9788 op0 = stabilize (op0);
9789#endif
41dfd40c 9790
b93a436e
JL
9791 icode = (int) this_optab->handlers[(int) mode].insn_code;
9792 if (icode != (int) CODE_FOR_nothing
9793 /* Make sure that OP0 is valid for operands 0 and 1
9794 of the insn we want to queue. */
a995e389
RH
9795 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9796 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9797 {
a995e389 9798 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9799 op1 = force_reg (mode, op1);
bbf6f052 9800
b93a436e
JL
9801 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9802 }
9803 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9804 {
9805 rtx addr = (general_operand (XEXP (op0, 0), mode)
9806 ? force_reg (Pmode, XEXP (op0, 0))
9807 : copy_to_reg (XEXP (op0, 0)));
9808 rtx temp, result;
ca695ac9 9809
792760b9 9810 op0 = replace_equiv_address (op0, addr);
b93a436e 9811 temp = force_reg (GET_MODE (op0), op0);
a995e389 9812 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9813 op1 = force_reg (mode, op1);
ca695ac9 9814
b93a436e
JL
9815 /* The increment queue is LIFO, thus we have to `queue'
9816 the instructions in reverse order. */
9817 enqueue_insn (op0, gen_move_insn (op0, temp));
9818 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9819 return result;
bbf6f052
RK
9820 }
9821 }
ca695ac9 9822
b93a436e
JL
9823 /* Preincrement, or we can't increment with one simple insn. */
9824 if (post)
9825 /* Save a copy of the value before inc or dec, to return it later. */
9826 temp = value = copy_to_reg (op0);
9827 else
9828 /* Arrange to return the incremented value. */
9829 /* Copy the rtx because expand_binop will protect from the queue,
9830 and the results of that would be invalid for us to return
9831 if our caller does emit_queue before using our result. */
9832 temp = copy_rtx (value = op0);
bbf6f052 9833
b93a436e 9834 /* Increment however we can. */
37a08a29 9835 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9836 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9837
b93a436e
JL
9838 /* Make sure the value is stored into OP0. */
9839 if (op1 != op0)
9840 emit_move_insn (op0, op1);
5718612f 9841
b93a436e
JL
9842 return temp;
9843}
9844\f
b93a436e
JL
9845/* Generate code to calculate EXP using a store-flag instruction
9846 and return an rtx for the result. EXP is either a comparison
9847 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9848
b93a436e 9849 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9850
cc2902df 9851 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9852 cheap.
ca695ac9 9853
b93a436e
JL
9854 Return zero if there is no suitable set-flag instruction
9855 available on this machine.
ca695ac9 9856
b93a436e
JL
9857 Once expand_expr has been called on the arguments of the comparison,
9858 we are committed to doing the store flag, since it is not safe to
9859 re-evaluate the expression. We emit the store-flag insn by calling
9860 emit_store_flag, but only expand the arguments if we have a reason
9861 to believe that emit_store_flag will be successful. If we think that
9862 it will, but it isn't, we have to simulate the store-flag with a
9863 set/jump/set sequence. */
ca695ac9 9864
b93a436e 9865static rtx
502b8322 9866do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9867{
9868 enum rtx_code code;
9869 tree arg0, arg1, type;
9870 tree tem;
9871 enum machine_mode operand_mode;
9872 int invert = 0;
9873 int unsignedp;
9874 rtx op0, op1;
9875 enum insn_code icode;
9876 rtx subtarget = target;
381127e8 9877 rtx result, label;
ca695ac9 9878
b93a436e
JL
9879 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9880 result at the end. We can't simply invert the test since it would
9881 have already been inverted if it were valid. This case occurs for
9882 some floating-point comparisons. */
ca695ac9 9883
b93a436e
JL
9884 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9885 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9886
b93a436e
JL
9887 arg0 = TREE_OPERAND (exp, 0);
9888 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9889
9890 /* Don't crash if the comparison was erroneous. */
9891 if (arg0 == error_mark_node || arg1 == error_mark_node)
9892 return const0_rtx;
9893
b93a436e
JL
9894 type = TREE_TYPE (arg0);
9895 operand_mode = TYPE_MODE (type);
9896 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9897
b93a436e
JL
9898 /* We won't bother with BLKmode store-flag operations because it would mean
9899 passing a lot of information to emit_store_flag. */
9900 if (operand_mode == BLKmode)
9901 return 0;
ca695ac9 9902
b93a436e
JL
9903 /* We won't bother with store-flag operations involving function pointers
9904 when function pointers must be canonicalized before comparisons. */
9905#ifdef HAVE_canonicalize_funcptr_for_compare
9906 if (HAVE_canonicalize_funcptr_for_compare
9907 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9908 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9909 == FUNCTION_TYPE))
9910 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9911 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9912 == FUNCTION_TYPE))))
9913 return 0;
ca695ac9
JB
9914#endif
9915
b93a436e
JL
9916 STRIP_NOPS (arg0);
9917 STRIP_NOPS (arg1);
ca695ac9 9918
b93a436e
JL
9919 /* Get the rtx comparison code to use. We know that EXP is a comparison
9920 operation of some type. Some comparisons against 1 and -1 can be
9921 converted to comparisons with zero. Do so here so that the tests
9922 below will be aware that we have a comparison with zero. These
9923 tests will not catch constants in the first operand, but constants
9924 are rarely passed as the first operand. */
ca695ac9 9925
b93a436e
JL
9926 switch (TREE_CODE (exp))
9927 {
9928 case EQ_EXPR:
9929 code = EQ;
bbf6f052 9930 break;
b93a436e
JL
9931 case NE_EXPR:
9932 code = NE;
bbf6f052 9933 break;
b93a436e
JL
9934 case LT_EXPR:
9935 if (integer_onep (arg1))
9936 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9937 else
9938 code = unsignedp ? LTU : LT;
ca695ac9 9939 break;
b93a436e
JL
9940 case LE_EXPR:
9941 if (! unsignedp && integer_all_onesp (arg1))
9942 arg1 = integer_zero_node, code = LT;
9943 else
9944 code = unsignedp ? LEU : LE;
ca695ac9 9945 break;
b93a436e
JL
9946 case GT_EXPR:
9947 if (! unsignedp && integer_all_onesp (arg1))
9948 arg1 = integer_zero_node, code = GE;
9949 else
9950 code = unsignedp ? GTU : GT;
9951 break;
9952 case GE_EXPR:
9953 if (integer_onep (arg1))
9954 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9955 else
9956 code = unsignedp ? GEU : GE;
ca695ac9 9957 break;
1eb8759b
RH
9958
9959 case UNORDERED_EXPR:
9960 code = UNORDERED;
9961 break;
9962 case ORDERED_EXPR:
9963 code = ORDERED;
9964 break;
9965 case UNLT_EXPR:
9966 code = UNLT;
9967 break;
9968 case UNLE_EXPR:
9969 code = UNLE;
9970 break;
9971 case UNGT_EXPR:
9972 code = UNGT;
9973 break;
9974 case UNGE_EXPR:
9975 code = UNGE;
9976 break;
9977 case UNEQ_EXPR:
9978 code = UNEQ;
9979 break;
1eb8759b 9980
ca695ac9 9981 default:
b93a436e 9982 abort ();
bbf6f052 9983 }
bbf6f052 9984
b93a436e
JL
9985 /* Put a constant second. */
9986 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9987 {
9988 tem = arg0; arg0 = arg1; arg1 = tem;
9989 code = swap_condition (code);
ca695ac9 9990 }
bbf6f052 9991
b93a436e
JL
9992 /* If this is an equality or inequality test of a single bit, we can
9993 do this by shifting the bit being tested to the low-order bit and
9994 masking the result with the constant 1. If the condition was EQ,
9995 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9996 than an scc insn even if we have it.
9997
9998 The code to make this transformation was moved into fold_single_bit_test,
9999 so we just call into the folder and expand its result. */
d39985fa 10000
b93a436e
JL
10001 if ((code == NE || code == EQ)
10002 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10003 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae
JL
10004 {
10005 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
10006 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
10007 arg0, arg1, type),
10008 target, VOIDmode, EXPAND_NORMAL);
10009 }
bbf6f052 10010
b93a436e 10011 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10012 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10013 return 0;
1eb8759b 10014
b93a436e
JL
10015 icode = setcc_gen_code[(int) code];
10016 if (icode == CODE_FOR_nothing
a995e389 10017 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10018 {
b93a436e
JL
10019 /* We can only do this if it is one of the special cases that
10020 can be handled without an scc insn. */
10021 if ((code == LT && integer_zerop (arg1))
10022 || (! only_cheap && code == GE && integer_zerop (arg1)))
10023 ;
10024 else if (BRANCH_COST >= 0
10025 && ! only_cheap && (code == NE || code == EQ)
10026 && TREE_CODE (type) != REAL_TYPE
10027 && ((abs_optab->handlers[(int) operand_mode].insn_code
10028 != CODE_FOR_nothing)
10029 || (ffs_optab->handlers[(int) operand_mode].insn_code
10030 != CODE_FOR_nothing)))
10031 ;
10032 else
10033 return 0;
ca695ac9 10034 }
3a94c984 10035
296b4ed9 10036 if (! get_subtarget (target)
a47fed55 10037 || GET_MODE (subtarget) != operand_mode
e5e809f4 10038 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10039 subtarget = 0;
10040
10041 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10042 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10043
10044 if (target == 0)
10045 target = gen_reg_rtx (mode);
10046
10047 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10048 because, if the emit_store_flag does anything it will succeed and
10049 OP0 and OP1 will not be used subsequently. */
ca695ac9 10050
b93a436e
JL
10051 result = emit_store_flag (target, code,
10052 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10053 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10054 operand_mode, unsignedp, 1);
ca695ac9 10055
b93a436e
JL
10056 if (result)
10057 {
10058 if (invert)
10059 result = expand_binop (mode, xor_optab, result, const1_rtx,
10060 result, 0, OPTAB_LIB_WIDEN);
10061 return result;
ca695ac9 10062 }
bbf6f052 10063
b93a436e
JL
10064 /* If this failed, we have to do this with set/compare/jump/set code. */
10065 if (GET_CODE (target) != REG
10066 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10067 target = gen_reg_rtx (GET_MODE (target));
10068
10069 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10070 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 10071 operand_mode, NULL_RTX);
b93a436e
JL
10072 if (GET_CODE (result) == CONST_INT)
10073 return (((result == const0_rtx && ! invert)
10074 || (result != const0_rtx && invert))
10075 ? const0_rtx : const1_rtx);
ca695ac9 10076
8f08e8c0
JL
10077 /* The code of RESULT may not match CODE if compare_from_rtx
10078 decided to swap its operands and reverse the original code.
10079
10080 We know that compare_from_rtx returns either a CONST_INT or
10081 a new comparison code, so it is safe to just extract the
10082 code from RESULT. */
10083 code = GET_CODE (result);
10084
b93a436e
JL
10085 label = gen_label_rtx ();
10086 if (bcc_gen_fctn[(int) code] == 0)
10087 abort ();
0f41302f 10088
b93a436e
JL
10089 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10090 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10091 emit_label (label);
bbf6f052 10092
b93a436e 10093 return target;
ca695ac9 10094}
b93a436e 10095\f
b93a436e 10096
ad82abb8
ZW
10097/* Stubs in case we haven't got a casesi insn. */
10098#ifndef HAVE_casesi
10099# define HAVE_casesi 0
10100# define gen_casesi(a, b, c, d, e) (0)
10101# define CODE_FOR_casesi CODE_FOR_nothing
10102#endif
10103
10104/* If the machine does not have a case insn that compares the bounds,
10105 this means extra overhead for dispatch tables, which raises the
10106 threshold for using them. */
10107#ifndef CASE_VALUES_THRESHOLD
10108#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10109#endif /* CASE_VALUES_THRESHOLD */
10110
10111unsigned int
502b8322 10112case_values_threshold (void)
ad82abb8
ZW
10113{
10114 return CASE_VALUES_THRESHOLD;
10115}
10116
10117/* Attempt to generate a casesi instruction. Returns 1 if successful,
10118 0 otherwise (i.e. if there is no casesi instruction). */
10119int
502b8322
AJ
10120try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10121 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
10122{
10123 enum machine_mode index_mode = SImode;
10124 int index_bits = GET_MODE_BITSIZE (index_mode);
10125 rtx op1, op2, index;
10126 enum machine_mode op_mode;
10127
10128 if (! HAVE_casesi)
10129 return 0;
10130
10131 /* Convert the index to SImode. */
10132 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10133 {
10134 enum machine_mode omode = TYPE_MODE (index_type);
10135 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10136
10137 /* We must handle the endpoints in the original mode. */
10138 index_expr = build (MINUS_EXPR, index_type,
10139 index_expr, minval);
10140 minval = integer_zero_node;
10141 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10142 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 10143 omode, 1, default_label);
ad82abb8
ZW
10144 /* Now we can safely truncate. */
10145 index = convert_to_mode (index_mode, index, 0);
10146 }
10147 else
10148 {
10149 if (TYPE_MODE (index_type) != index_mode)
10150 {
b0c48229
NB
10151 index_expr = convert ((*lang_hooks.types.type_for_size)
10152 (index_bits, 0), index_expr);
ad82abb8
ZW
10153 index_type = TREE_TYPE (index_expr);
10154 }
10155
10156 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10157 }
10158 emit_queue ();
10159 index = protect_from_queue (index, 0);
10160 do_pending_stack_adjust ();
10161
10162 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10163 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10164 (index, op_mode))
10165 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10166
ad82abb8
ZW
10167 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10168
10169 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10170 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10171 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10172 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10173 (op1, op_mode))
10174 op1 = copy_to_mode_reg (op_mode, op1);
10175
10176 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10177
10178 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10179 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10180 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10181 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10182 (op2, op_mode))
10183 op2 = copy_to_mode_reg (op_mode, op2);
10184
10185 emit_jump_insn (gen_casesi (index, op1, op2,
10186 table_label, default_label));
10187 return 1;
10188}
10189
10190/* Attempt to generate a tablejump instruction; same concept. */
10191#ifndef HAVE_tablejump
10192#define HAVE_tablejump 0
10193#define gen_tablejump(x, y) (0)
10194#endif
10195
10196/* Subroutine of the next function.
10197
10198 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10199 in the table already subtracted.
10200 MODE is its expected mode (needed if INDEX is constant).
10201 RANGE is the length of the jump table.
10202 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10203
b93a436e
JL
10204 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10205 index value is out of range. */
0f41302f 10206
ad82abb8 10207static void
502b8322
AJ
10208do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10209 rtx default_label)
ca695ac9 10210{
b3694847 10211 rtx temp, vector;
88d3b7f0 10212
74f6d071
JH
10213 if (INTVAL (range) > cfun->max_jumptable_ents)
10214 cfun->max_jumptable_ents = INTVAL (range);
1877be45 10215
b93a436e
JL
10216 /* Do an unsigned comparison (in the proper mode) between the index
10217 expression and the value which represents the length of the range.
10218 Since we just finished subtracting the lower bound of the range
10219 from the index expression, this comparison allows us to simultaneously
10220 check that the original index expression value is both greater than
10221 or equal to the minimum value of the range and less than or equal to
10222 the maximum value of the range. */
709f5be1 10223
c5d5d461 10224 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 10225 default_label);
bbf6f052 10226
b93a436e
JL
10227 /* If index is in range, it must fit in Pmode.
10228 Convert to Pmode so we can index with it. */
10229 if (mode != Pmode)
10230 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10231
b93a436e
JL
10232 /* Don't let a MEM slip thru, because then INDEX that comes
10233 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10234 and break_out_memory_refs will go to work on it and mess it up. */
10235#ifdef PIC_CASE_VECTOR_ADDRESS
10236 if (flag_pic && GET_CODE (index) != REG)
10237 index = copy_to_mode_reg (Pmode, index);
10238#endif
ca695ac9 10239
b93a436e
JL
10240 /* If flag_force_addr were to affect this address
10241 it could interfere with the tricky assumptions made
10242 about addresses that contain label-refs,
10243 which may be valid only very near the tablejump itself. */
10244 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10245 GET_MODE_SIZE, because this indicates how large insns are. The other
10246 uses should all be Pmode, because they are addresses. This code
10247 could fail if addresses and insns are not the same size. */
10248 index = gen_rtx_PLUS (Pmode,
10249 gen_rtx_MULT (Pmode, index,
10250 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10251 gen_rtx_LABEL_REF (Pmode, table_label));
10252#ifdef PIC_CASE_VECTOR_ADDRESS
10253 if (flag_pic)
10254 index = PIC_CASE_VECTOR_ADDRESS (index);
10255 else
bbf6f052 10256#endif
b93a436e
JL
10257 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10258 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10259 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10260 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 10261 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
10262 convert_move (temp, vector, 0);
10263
10264 emit_jump_insn (gen_tablejump (temp, table_label));
10265
10266 /* If we are generating PIC code or if the table is PC-relative, the
10267 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10268 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10269 emit_barrier ();
bbf6f052 10270}
b93a436e 10271
ad82abb8 10272int
502b8322
AJ
10273try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10274 rtx table_label, rtx default_label)
ad82abb8
ZW
10275{
10276 rtx index;
10277
10278 if (! HAVE_tablejump)
10279 return 0;
10280
10281 index_expr = fold (build (MINUS_EXPR, index_type,
10282 convert (index_type, index_expr),
10283 convert (index_type, minval)));
10284 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10285 emit_queue ();
10286 index = protect_from_queue (index, 0);
10287 do_pending_stack_adjust ();
10288
10289 do_tablejump (index, TYPE_MODE (index_type),
10290 convert_modes (TYPE_MODE (index_type),
10291 TYPE_MODE (TREE_TYPE (range)),
10292 expand_expr (range, NULL_RTX,
10293 VOIDmode, 0),
10294 TREE_UNSIGNED (TREE_TYPE (range))),
10295 table_label, default_label);
10296 return 1;
10297}
e2500fed 10298
cb2a532e
AH
10299/* Nonzero if the mode is a valid vector mode for this architecture.
10300 This returns nonzero even if there is no hardware support for the
10301 vector mode, but we can emulate with narrower modes. */
10302
10303int
502b8322 10304vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
10305{
10306 enum mode_class class = GET_MODE_CLASS (mode);
10307 enum machine_mode innermode;
10308
10309 /* Doh! What's going on? */
10310 if (class != MODE_VECTOR_INT
10311 && class != MODE_VECTOR_FLOAT)
10312 return 0;
10313
10314 /* Hardware support. Woo hoo! */
10315 if (VECTOR_MODE_SUPPORTED_P (mode))
10316 return 1;
10317
10318 innermode = GET_MODE_INNER (mode);
10319
10320 /* We should probably return 1 if requesting V4DI and we have no DI,
10321 but we have V2DI, but this is probably very unlikely. */
10322
10323 /* If we have support for the inner mode, we can safely emulate it.
10324 We may not have V2DI, but me can emulate with a pair of DIs. */
10325 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10326}
10327
d744e06e
AH
10328/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10329static rtx
502b8322 10330const_vector_from_tree (tree exp)
d744e06e
AH
10331{
10332 rtvec v;
10333 int units, i;
10334 tree link, elt;
10335 enum machine_mode inner, mode;
10336
10337 mode = TYPE_MODE (TREE_TYPE (exp));
10338
10339 if (is_zeros_p (exp))
10340 return CONST0_RTX (mode);
10341
10342 units = GET_MODE_NUNITS (mode);
10343 inner = GET_MODE_INNER (mode);
10344
10345 v = rtvec_alloc (units);
10346
10347 link = TREE_VECTOR_CST_ELTS (exp);
10348 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10349 {
10350 elt = TREE_VALUE (link);
10351
10352 if (TREE_CODE (elt) == REAL_CST)
10353 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10354 inner);
10355 else
10356 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10357 TREE_INT_CST_HIGH (elt),
10358 inner);
10359 }
10360
5f6c070d
AH
10361 /* Initialize remaining elements to 0. */
10362 for (; i < units; ++i)
10363 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10364
d744e06e
AH
10365 return gen_rtx_raw_CONST_VECTOR (mode, v);
10366}
10367
e2500fed 10368#include "gt-expr.h"
This page took 4.232387 seconds and 5 git commands to generate.