]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
index.html: Add OS X workaround.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
1574ef13 3 2000, 2001, 2002 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052 32#include "function.h"
bbf6f052 33#include "insn-config.h"
34e81b5a 34#include "insn-attr.h"
3a94c984 35/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 36#include "expr.h"
e78d8e51
ZW
37#include "optabs.h"
38#include "libfuncs.h"
bbf6f052 39#include "recog.h"
3ef1eef4 40#include "reload.h"
bbf6f052 41#include "output.h"
bbf6f052 42#include "typeclass.h"
10f0ad3d 43#include "toplev.h"
d7db6646 44#include "ggc.h"
ac79cd5a 45#include "langhooks.h"
e2c49ac2 46#include "intl.h"
b1474bb7 47#include "tm_p.h"
bbf6f052 48
bbf6f052 49/* Decide whether a function's arguments should be processed
bbc8a071
RK
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
bbf6f052 54
bbf6f052 55#ifdef PUSH_ROUNDING
bbc8a071 56
3319a347 57#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 58#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 59#endif
bbc8a071 60
bbf6f052
RK
61#endif
62
63#ifndef STACK_PUSH_CODE
64#ifdef STACK_GROWS_DOWNWARD
65#define STACK_PUSH_CODE PRE_DEC
66#else
67#define STACK_PUSH_CODE PRE_INC
68#endif
69#endif
70
18543a22
ILT
71/* Assume that case vectors are not pc-relative. */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
bbf6f052
RK
76/* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82int cse_not_expected;
83
14a774a9
RK
84/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85static tree placeholder_list = 0;
86
4969d05d
RK
87/* This structure is used by move_by_pieces to describe the move to
88 be performed. */
4969d05d
RK
89struct move_by_pieces
90{
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
3bdf5ad1
RK
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
4969d05d
RK
101 int reverse;
102};
103
57814e5e 104/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
105 be performed. */
106
57814e5e 107struct store_by_pieces
9de08200
RK
108{
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
3bdf5ad1
RK
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
57814e5e
JJ
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
9de08200
RK
117 int reverse;
118};
119
292b1216 120extern struct obstack permanent_obstack;
c02bd5d9 121
711d877c 122static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
711d877c
KG
126static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
57814e5e
JJ
128static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
3bdf5ad1
RK
130static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
57814e5e
JJ
132static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 135 enum machine_mode,
57814e5e 136 struct store_by_pieces *));
296b4ed9 137static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
138static int is_zeros_p PARAMS ((tree));
139static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
140static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
04050c69
RK
142 tree, tree, int, int));
143static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
770ae6cc
RK
144static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
a06ef755
RK
146 tree, enum machine_mode, int, tree,
147 int));
711d877c 148static rtx var_rtx PARAMS ((tree));
0d4903b8 149static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
711d877c 150static rtx expand_increment PARAMS ((tree, int, int));
711d877c
KG
151static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
153static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
154 rtx, rtx));
711d877c 155static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
21d93687 156#ifdef PUSH_ROUNDING
566aa174 157static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
21d93687 158#endif
ad82abb8 159static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
bbf6f052 160
4fa52007
RK
161/* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165static char direct_load[NUM_MACHINE_MODES];
166static char direct_store[NUM_MACHINE_MODES];
167
7e24ffc9
HPN
168/* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
170
171#ifndef MOVE_RATIO
266007a7 172#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
173#define MOVE_RATIO 2
174#else
3a94c984 175/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 176#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
177#endif
178#endif
e87b4f3f 179
fbe1758d 180/* This macro is used to determine whether move_by_pieces should be called
3a94c984 181 to perform a structure copy. */
fbe1758d 182#ifndef MOVE_BY_PIECES_P
19caa751 183#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
185#endif
186
266007a7 187/* This array records the insn_code of insns to perform block moves. */
e6677db3 188enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 189
9de08200
RK
190/* This array records the insn_code of insns to perform block clears. */
191enum insn_code clrstr_optab[NUM_MACHINE_MODES];
192
0f41302f 193/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
194
195#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 196#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 197#endif
bbf6f052 198\f
4fa52007 199/* This is run once per compilation to set up which modes can be used
266007a7 200 directly in memory and to initialize the block move optab. */
4fa52007
RK
201
202void
203init_expr_once ()
204{
205 rtx insn, pat;
206 enum machine_mode mode;
cff48d8f 207 int num_clobbers;
9ec36da5 208 rtx mem, mem1;
9ec36da5
JL
209
210 start_sequence ();
211
e2549997
RS
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
9ec36da5
JL
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 217
38a448ca 218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
219 pat = PATTERN (insn);
220
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
223 {
224 int regno;
225 rtx reg;
4fa52007
RK
226
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
e2549997 229 PUT_MODE (mem1, mode);
4fa52007 230
e6fe56a4
RK
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
233
7308a047
RS
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 regno++)
238 {
239 if (! HARD_REGNO_MODE_OK (regno, mode))
240 continue;
e6fe56a4 241
38a448ca 242 reg = gen_rtx_REG (mode, regno);
e6fe56a4 243
7308a047
RS
244 SET_SRC (pat) = mem;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
e6fe56a4 248
e2549997
RS
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
253
7308a047
RS
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
e2549997
RS
258
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
7308a047 263 }
4fa52007
RK
264 }
265
266 end_sequence ();
267}
cff48d8f 268
bbf6f052
RK
269/* This is run at the start of compiling a function. */
270
271void
272init_expr ()
273{
01d939e8 274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 275
49ad7cfa 276 pending_chain = 0;
bbf6f052 277 pending_stack_adjust = 0;
1503a7ec 278 stack_pointer_delta = 0;
bbf6f052 279 inhibit_defer_pop = 0;
bbf6f052 280 saveregs_value = 0;
0006469d 281 apply_args_value = 0;
e87b4f3f 282 forced_labels = 0;
bbf6f052
RK
283}
284
fa51b01b
RH
285void
286mark_expr_status (p)
287 struct expr_status *p;
288{
289 if (p == NULL)
290 return;
291
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
295}
296
297void
298free_expr_status (f)
299 struct function *f;
300{
301 free (f->expr);
302 f->expr = NULL;
303}
304
49ad7cfa 305/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 306
bbf6f052 307void
49ad7cfa 308finish_expr_for_function ()
bbf6f052 309{
49ad7cfa
BS
310 if (pending_chain)
311 abort ();
bbf6f052
RK
312}
313\f
314/* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
316
bbf6f052
RK
317/* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
320
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
323
324static rtx
325enqueue_insn (var, body)
326 rtx var, body;
327{
c5c76735
JL
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
bbf6f052
RK
330 return pending_chain;
331}
332
333/* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
339
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
343
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
347
348rtx
349protect_from_queue (x, modify)
b3694847 350 rtx x;
bbf6f052
RK
351 int modify;
352{
b3694847 353 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
354
355#if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
358 return x;
359#endif
360
361 if (code != QUEUED)
362 {
e9baa644
RK
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
367 shared. */
bbf6f052
RK
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
370 {
f1ec5147
RK
371 rtx y = XEXP (x, 0);
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 373
bbf6f052
RK
374 if (QUEUED_INSN (y))
375 {
f1ec5147
RK
376 rtx temp = gen_reg_rtx (GET_MODE (x));
377
e9baa644 378 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
379 QUEUED_INSN (y));
380 return temp;
381 }
f1ec5147 382
73b7f58c
BS
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
f1ec5147 385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 386 }
f1ec5147 387
bbf6f052
RK
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
390 if (code == MEM)
3f15938e
RS
391 {
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
394 {
395 x = copy_rtx (x);
396 XEXP (x, 0) = tem;
397 }
398 }
bbf6f052
RK
399 else if (code == PLUS || code == MULT)
400 {
3f15938e
RS
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
404 {
405 x = copy_rtx (x);
406 XEXP (x, 0) = new0;
407 XEXP (x, 1) = new1;
408 }
bbf6f052
RK
409 }
410 return x;
411 }
73b7f58c
BS
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
414 emit_queue. */
bbf6f052 415 if (QUEUED_INSN (x) == 0)
73b7f58c 416 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
417 /* If the increment has happened and a pre-increment copy exists,
418 use that copy. */
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425 QUEUED_INSN (x));
426 return QUEUED_COPY (x);
427}
428
429/* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
433
1f06ee8d 434int
bbf6f052
RK
435queued_subexp_p (x)
436 rtx x;
437{
b3694847 438 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
439 switch (code)
440 {
441 case QUEUED:
442 return 1;
443 case MEM:
444 return queued_subexp_p (XEXP (x, 0));
445 case MULT:
446 case PLUS:
447 case MINUS:
e9a25f70
JL
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
450 default:
451 return 0;
bbf6f052 452 }
bbf6f052
RK
453}
454
455/* Perform all the pending incrementations. */
456
457void
458emit_queue ()
459{
b3694847 460 rtx p;
381127e8 461 while ((p = pending_chain))
bbf6f052 462 {
41b083c4
R
463 rtx body = QUEUED_BODY (p);
464
465 if (GET_CODE (body) == SEQUENCE)
466 {
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
469 }
470 else
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
472 pending_chain = QUEUED_NEXT (p);
473 }
474}
bbf6f052
RK
475\f
476/* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
480
481void
482convert_move (to, from, unsignedp)
b3694847 483 rtx to, from;
bbf6f052
RK
484 int unsignedp;
485{
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490 enum insn_code code;
491 rtx libcall;
492
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
495
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
498
499 if (to_real != from_real)
500 abort ();
501
1499e0a8
RK
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
504 TO here. */
505
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
511
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513 abort ();
514
bbf6f052
RK
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
517 {
518 emit_move_insn (to, from);
519 return;
520 }
521
0b4565c9
BS
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
523 {
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525 abort ();
3a94c984 526
0b4565c9 527 if (VECTOR_MODE_P (to_mode))
bafe341a 528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 529 else
bafe341a 530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
531
532 emit_move_insn (to, from);
533 return;
534 }
535
536 if (to_real != from_real)
537 abort ();
538
bbf6f052
RK
539 if (to_real)
540 {
642dfa8b 541 rtx value, insns;
81d79e2c 542
2b01c326 543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 544 {
2b01c326
RK
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
547 != CODE_FOR_nothing)
548 {
549 emit_unop_insn (code, to, from, UNKNOWN);
550 return;
551 }
bbf6f052 552 }
3a94c984 553
b424402e
RS
554#ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
704af6a1
JL
561#ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565 return;
566 }
567#endif
b424402e
RS
568#ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572 return;
573 }
574#endif
575#ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579 return;
580 }
581#endif
582#ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586 return;
587 }
588#endif
589#ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593 return;
594 }
595#endif
03747aa3
RK
596
597#ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601 return;
602 }
603#endif
b424402e
RS
604#ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608 return;
609 }
610#endif
611#ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615 return;
616 }
617#endif
618#ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622 return;
623 }
624#endif
625#ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629 return;
630 }
631#endif
2b01c326
RK
632
633#ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
661
bbf6f052
RK
662#ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
b092b471
JW
669#ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
bbf6f052
RK
676#ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 {
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680 return;
681 }
682#endif
b092b471
JW
683#ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687 return;
688 }
689#endif
bbf6f052
RK
690#ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694 return;
695 }
696#endif
697
b092b471
JW
698 libcall = (rtx) 0;
699 switch (from_mode)
700 {
701 case SFmode:
702 switch (to_mode)
703 {
704 case DFmode:
705 libcall = extendsfdf2_libfunc;
706 break;
707
708 case XFmode:
709 libcall = extendsfxf2_libfunc;
710 break;
711
712 case TFmode:
713 libcall = extendsftf2_libfunc;
714 break;
3a94c984 715
e9a25f70
JL
716 default:
717 break;
b092b471
JW
718 }
719 break;
720
721 case DFmode:
722 switch (to_mode)
723 {
724 case SFmode:
725 libcall = truncdfsf2_libfunc;
726 break;
727
728 case XFmode:
729 libcall = extenddfxf2_libfunc;
730 break;
731
732 case TFmode:
733 libcall = extenddftf2_libfunc;
734 break;
3a94c984 735
e9a25f70
JL
736 default:
737 break;
b092b471
JW
738 }
739 break;
740
741 case XFmode:
742 switch (to_mode)
743 {
744 case SFmode:
745 libcall = truncxfsf2_libfunc;
746 break;
747
748 case DFmode:
749 libcall = truncxfdf2_libfunc;
750 break;
3a94c984 751
e9a25f70
JL
752 default:
753 break;
b092b471
JW
754 }
755 break;
756
757 case TFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = trunctfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = trunctfdf2_libfunc;
766 break;
3a94c984 767
e9a25f70
JL
768 default:
769 break;
b092b471
JW
770 }
771 break;
3a94c984 772
e9a25f70
JL
773 default:
774 break;
b092b471
JW
775 }
776
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
bbf6f052
RK
779 abort ();
780
642dfa8b 781 start_sequence ();
ebb1b59a 782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 783 1, from, from_mode);
642dfa8b
BS
784 insns = get_insns ();
785 end_sequence ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787 from));
bbf6f052
RK
788 return;
789 }
790
791 /* Now both modes are integers. */
792
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
796 {
797 rtx insns;
798 rtx lowpart;
799 rtx fill_value;
800 rtx lowfrom;
801 int i;
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
804
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807 != CODE_FOR_nothing)
808 {
cd1b4b44
RK
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
bbf6f052
RK
815 emit_unop_insn (code, to, from, equiv_code);
816 return;
817 }
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
822 {
a81fee56 823 if (GET_CODE (to) == REG)
38a448ca 824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
829 }
830
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
833
5c5033c3
RK
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
836
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
839
bbf6f052
RK
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
843 else
844 lowpart_mode = from_mode;
845
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
847
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
850
851 /* Compute the value to put in each remaining word. */
852 if (unsignedp)
853 fill_value = const0_rtx;
854 else
855 {
856#ifdef HAVE_slt
857 if (HAVE_slt
a995e389 858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
859 && STORE_FLAG_VALUE == -1)
860 {
906c4e36 861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 862 lowpart_mode, 0);
bbf6f052
RK
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
865 }
866 else
867#endif
868 {
869 fill_value
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 872 NULL_RTX, 0);
bbf6f052
RK
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
874 }
875 }
876
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
879 {
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
882
883 if (subword == 0)
884 abort ();
885
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
888 }
889
890 insns = get_insns ();
891 end_sequence ();
892
906c4e36 893 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
895 return;
896 }
897
d3c64ee3
RS
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 901 {
431a6eca
JW
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
bbf6f052
RK
909 convert_move (to, gen_lowpart (word_mode, from), 0);
910 return;
911 }
912
3a94c984 913 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
914 if (to_mode == PQImode)
915 {
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
918
919#ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
921 {
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923 return;
924 }
925#endif /* HAVE_truncqipqi2 */
926 abort ();
927 }
928
929 if (from_mode == PQImode)
930 {
931 if (to_mode != QImode)
932 {
933 from = convert_to_mode (QImode, from, unsignedp);
934 from_mode = QImode;
935 }
936 else
937 {
938#ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
940 {
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942 return;
943 }
944#endif /* HAVE_extendpqiqi2 */
945 abort ();
946 }
947 }
948
bbf6f052
RK
949 if (to_mode == PSImode)
950 {
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
953
1f584163
DE
954#ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
bbf6f052 956 {
1f584163 957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
958 return;
959 }
1f584163 960#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
961 abort ();
962 }
963
964 if (from_mode == PSImode)
965 {
966 if (to_mode != SImode)
967 {
968 from = convert_to_mode (SImode, from, unsignedp);
969 from_mode = SImode;
970 }
971 else
972 {
1f584163 973#ifdef HAVE_extendpsisi2
43d75418 974 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 975 {
1f584163 976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
977 return;
978 }
1f584163 979#endif /* HAVE_extendpsisi2 */
43d75418
R
980#ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
987 abort ();
988 }
989 }
990
0407367d
RK
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996#ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002#endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015#ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021#endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
bbf6f052
RK
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1032 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1033 {
d3c64ee3
RS
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
34aa3599
RK
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
bbf6f052
RK
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
d3c64ee3 1048 /* Handle extension. */
bbf6f052
RK
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
9413de45
RK
1055 if (flag_force_mem)
1056 from = force_not_mem (from);
1057
bbf6f052
RK
1058 emit_unop_insn (code, to, from, equiv_code);
1059 return;
1060 }
1061 else
1062 {
1063 enum machine_mode intermediate;
2b28d92e
NC
1064 rtx tmp;
1065 tree shift_amount;
bbf6f052
RK
1066
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1077 {
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1080 return;
1081 }
1082
2b28d92e 1083 /* No suitable intermediate mode.
3a94c984 1084 Generate what we need with shifts. */
2b28d92e
NC
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1089 to, unsignedp);
3a94c984 1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1091 to, unsignedp);
1092 if (tmp != to)
1093 emit_move_insn (to, tmp);
1094 return;
bbf6f052
RK
1095 }
1096 }
1097
3a94c984 1098 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1099
1100 if (from_mode == DImode && to_mode == SImode)
1101 {
1102#ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 return;
1107 }
1108#endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == DImode && to_mode == HImode)
1114 {
1115#ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 return;
1120 }
1121#endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == QImode)
1127 {
1128#ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134#endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == SImode && to_mode == HImode)
1140 {
1141#ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 return;
1146 }
1147#endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == QImode)
1153 {
1154#ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 return;
1159 }
1160#endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == HImode && to_mode == QImode)
1166 {
1167#ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173#endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
b9bcad65
RK
1178 if (from_mode == TImode && to_mode == DImode)
1179 {
1180#ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 return;
1185 }
1186#endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == SImode)
1192 {
1193#ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 return;
1198 }
1199#endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == HImode)
1205 {
1206#ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 return;
1211 }
1212#endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == QImode)
1218 {
1219#ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 return;
1224 }
1225#endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
bbf6f052
RK
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1234 {
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1237 return;
1238 }
1239
1240 /* Mode combination is not recognized. */
1241 abort ();
1242}
1243
1244/* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
5d901c31
RS
1249 or by copying to a new temporary with conversion.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1253
1254rtx
1255convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1257 rtx x;
1258 int unsignedp;
5ffe63ed
RS
1259{
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1261}
1262
1263/* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1267
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1270
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1272
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1275
1276rtx
1277convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1279 rtx x;
1280 int unsignedp;
bbf6f052 1281{
b3694847 1282 rtx temp;
5ffe63ed 1283
1499e0a8
RK
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1286
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
bbf6f052 1291
64791b18
RK
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
3a94c984 1294
5ffe63ed 1295 if (mode == oldmode)
bbf6f052
RK
1296 return x;
1297
1298 /* There is one case that we must handle specially: If we are converting
906c4e36 1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1303
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1307 {
1308 HOST_WIDE_INT val = INTVAL (x);
1309
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1312 {
1313 int width = GET_MODE_BITSIZE (oldmode);
1314
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1317 }
1318
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1320 }
bbf6f052
RK
1321
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1326
ba2e110c
RK
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1329 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1330 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1331 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
2bf29316
JW
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1338 {
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1344 {
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1347
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1351 if (! unsignedp
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1354
69107307 1355 return GEN_INT (trunc_int_for_mode (val, mode));
ba2e110c
RK
1356 }
1357
1358 return gen_lowpart (mode, x);
1359 }
bbf6f052
RK
1360
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1363 return temp;
1364}
1365\f
fbe1758d 1366/* This macro is used to determine what the largest unit size that
3a94c984 1367 move_by_pieces can use is. */
fbe1758d
AM
1368
1369/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1371 number of bytes we can move with a single instruction. */
fbe1758d
AM
1372
1373#ifndef MOVE_MAX_PIECES
1374#define MOVE_MAX_PIECES MOVE_MAX
1375#endif
1376
21d93687
RK
1377/* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
566aa174 1380
21d93687
RK
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
566aa174 1383
19caa751 1384 ALIGN is maximum alignment we can assume. */
bbf6f052 1385
2e245dac 1386void
bbf6f052
RK
1387move_by_pieces (to, from, len, align)
1388 rtx to, from;
3bdf5ad1 1389 unsigned HOST_WIDE_INT len;
729a2125 1390 unsigned int align;
bbf6f052
RK
1391{
1392 struct move_by_pieces data;
566aa174 1393 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
bbf6f052
RK
1397
1398 data.offset = 0;
bbf6f052 1399 data.from_addr = from_addr;
566aa174
JH
1400 if (to)
1401 {
1402 to_addr = XEXP (to, 0);
1403 data.to = to;
1404 data.autinc_to
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409 }
1410 else
1411 {
1412 to_addr = NULL_RTX;
1413 data.to = NULL_RTX;
1414 data.autinc_to = 1;
1415#ifdef STACK_GROWS_DOWNWARD
1416 data.reverse = 1;
1417#else
1418 data.reverse = 0;
1419#endif
1420 }
1421 data.to_addr = to_addr;
bbf6f052 1422 data.from = from;
bbf6f052
RK
1423 data.autinc_from
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1427
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
bbf6f052
RK
1430 if (data.reverse) data.offset = len;
1431 data.len = len;
1432
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1438 {
3a94c984 1439 /* Find the mode of the largest move... */
fbe1758d
AM
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1443 mode = tmode;
1444
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1446 {
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1450 }
fbe1758d 1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1452 {
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1456 }
bbf6f052
RK
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1460 {
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = -1;
1464 }
fbe1758d 1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1466 {
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = 1;
1470 }
bbf6f052
RK
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1473 }
1474
e1565e65 1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1478
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1481
1482 while (max_size > 1)
1483 {
e7c33f54
RK
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1487 mode = tmode;
1488
1489 if (mode == VOIDmode)
1490 break;
1491
1492 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1495
1496 max_size = GET_MODE_SIZE (mode);
1497 }
1498
1499 /* The code above should have handled everything. */
2a8e278c 1500 if (data.len > 0)
bbf6f052
RK
1501 abort ();
1502}
1503
1504/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1505 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1506
3bdf5ad1 1507static unsigned HOST_WIDE_INT
bbf6f052 1508move_by_pieces_ninsns (l, align)
3bdf5ad1 1509 unsigned HOST_WIDE_INT l;
729a2125 1510 unsigned int align;
bbf6f052 1511{
3bdf5ad1
RK
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1514
e1565e65 1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1517 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1518
1519 while (max_size > 1)
1520 {
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1523
e7c33f54
RK
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1527 mode = tmode;
1528
1529 if (mode == VOIDmode)
1530 break;
1531
1532 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1535
1536 max_size = GET_MODE_SIZE (mode);
1537 }
1538
13c6f0d5
NS
1539 if (l)
1540 abort ();
bbf6f052
RK
1541 return n_insns;
1542}
1543
1544/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1547
1548static void
1549move_by_pieces_1 (genfun, mode, data)
711d877c 1550 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1553{
3bdf5ad1 1554 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1555 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1556
1557 while (data->len >= size)
1558 {
3bdf5ad1
RK
1559 if (data->reverse)
1560 data->offset -= size;
1561
566aa174 1562 if (data->to)
3bdf5ad1 1563 {
566aa174 1564 if (data->autinc_to)
630036c6
JJ
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1566 data->offset);
566aa174 1567 else
f4ef873c 1568 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1569 }
3bdf5ad1
RK
1570
1571 if (data->autinc_from)
630036c6
JJ
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1573 data->offset);
3bdf5ad1 1574 else
f4ef873c 1575 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1576
940da324 1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1583
566aa174
JH
1584 if (data->to)
1585 emit_insn ((*genfun) (to1, from1));
1586 else
21d93687
RK
1587 {
1588#ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1590#else
1591 abort ();
1592#endif
1593 }
3bdf5ad1 1594
940da324 1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1599
3bdf5ad1
RK
1600 if (! data->reverse)
1601 data->offset += size;
bbf6f052
RK
1602
1603 data->len -= size;
1604 }
1605}
1606\f
1607/* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1610
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1612 with mode BLKmode.
1613 SIZE is an rtx that says how long they are.
19caa751 1614 ALIGN is the maximum alignment we can assume they have.
bbf6f052 1615
e9a25f70
JL
1616 Return the address of the new block, if memcpy is called and returns it,
1617 0 otherwise. */
1618
1619rtx
8ac61af7 1620emit_block_move (x, y, size)
bbf6f052
RK
1621 rtx x, y;
1622 rtx size;
bbf6f052 1623{
e9a25f70 1624 rtx retval = 0;
52cf7115
JL
1625#ifdef TARGET_MEM_FUNCTIONS
1626 static tree fn;
1627 tree call_expr, arg_list;
1628#endif
8ac61af7 1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1630
bbf6f052
RK
1631 if (GET_MODE (x) != BLKmode)
1632 abort ();
1633
1634 if (GET_MODE (y) != BLKmode)
1635 abort ();
1636
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
5d901c31 1639 size = protect_from_queue (size, 0);
bbf6f052
RK
1640
1641 if (GET_CODE (x) != MEM)
1642 abort ();
1643 if (GET_CODE (y) != MEM)
1644 abort ();
1645 if (size == 0)
1646 abort ();
1647
fbe1758d 1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1651 {
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
266007a7 1655
19caa751 1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
266007a7
RK
1657 enum machine_mode mode;
1658
3ef1eef4
RK
1659 /* Since this is a move insn, we don't care about volatility. */
1660 volatile_ok = 1;
1661
266007a7
RK
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1664 {
266007a7 1665 enum insn_code code = movstr_optab[(int) mode];
a995e389 1666 insn_operand_predicate_fn pred;
266007a7
RK
1667
1668 if (code != CODE_FOR_nothing
803090c4
RK
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
8008b228 1671 returned by the macro, it will definitely be less than the
803090c4 1672 actual mode mask. */
8ca00751
RK
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1675 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
bbf6f052 1683 {
1ba1e2a8 1684 rtx op2;
266007a7
RK
1685 rtx last = get_last_insn ();
1686 rtx pat;
1687
1ba1e2a8 1688 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1691 op2 = copy_to_mode_reg (mode, op2);
1692
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1694 if (pat)
1695 {
1696 emit_insn (pat);
3ef1eef4 1697 volatile_ok = 0;
e9a25f70 1698 return 0;
266007a7
RK
1699 }
1700 else
1701 delete_insns_since (last);
bbf6f052
RK
1702 }
1703 }
bbf6f052 1704
3ef1eef4
RK
1705 volatile_ok = 0;
1706
4bc973ae
JL
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1708
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1712
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1716
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
3a94c984 1720 emit_queue.
4bc973ae
JL
1721
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1729
1730#ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1732#else
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
f3dc586a 1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1736#endif
1737
bbf6f052 1738#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1741
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1744
1745 For targets where libcalls and normal calls have different conventions
3a94c984 1746 for returning pointers, we could end up generating incorrect code.
52cf7115
JL
1747
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1751 {
1752 tree fntype;
1753
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
52cf7115
JL
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
3a94c984 1760 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 1764 TREE_NOTHROW (fn) = 1;
6496a589 1765 make_decl_rtl (fn, NULL);
52cf7115 1766 assemble_external (fn);
52cf7115
JL
1767 }
1768
3a94c984 1769 /* We need to make an argument list for the function call.
52cf7115
JL
1770
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1773 arg_list
1774 = build_tree_list (NULL_TREE,
4bc973ae 1775 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
4bc973ae 1778 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1782
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1788
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1790#else
ebb1b59a 1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
fe7bbd2a 1792 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
bbf6f052 1796#endif
66c60e67
RK
1797
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1800 from a loop. */
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
bbf6f052 1803 }
e9a25f70
JL
1804
1805 return retval;
bbf6f052
RK
1806}
1807\f
1808/* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1810
1811void
1812move_block_to_reg (regno, x, nregs, mode)
1813 int regno;
1814 rtx x;
1815 int nregs;
1816 enum machine_mode mode;
1817{
1818 int i;
381127e8 1819#ifdef HAVE_load_multiple
3a94c984 1820 rtx pat;
381127e8
RL
1821 rtx last;
1822#endif
bbf6f052 1823
72bb9717
RK
1824 if (nregs == 0)
1825 return;
1826
bbf6f052
RK
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1829
1830 /* See if the machine can do this with a load multiple insn. */
1831#ifdef HAVE_load_multiple
c3a02afe 1832 if (HAVE_load_multiple)
bbf6f052 1833 {
c3a02afe 1834 last = get_last_insn ();
38a448ca 1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1836 GEN_INT (nregs));
1837 if (pat)
1838 {
1839 emit_insn (pat);
1840 return;
1841 }
1842 else
1843 delete_insns_since (last);
bbf6f052 1844 }
bbf6f052
RK
1845#endif
1846
1847 for (i = 0; i < nregs; i++)
38a448ca 1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1849 operand_subword_force (x, i, mode));
1850}
1851
1852/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1855
bbf6f052 1856void
0040593d 1857move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1858 int regno;
1859 rtx x;
1860 int nregs;
0040593d 1861 int size;
bbf6f052
RK
1862{
1863 int i;
381127e8 1864#ifdef HAVE_store_multiple
3a94c984 1865 rtx pat;
381127e8
RL
1866 rtx last;
1867#endif
58a32c5c 1868 enum machine_mode mode;
bbf6f052 1869
2954d7db
RK
1870 if (nregs == 0)
1871 return;
1872
58a32c5c
DE
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
0d7839da
SE
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
58a32c5c 1878 {
792760b9 1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
58a32c5c
DE
1880 return;
1881 }
3a94c984 1882
0040593d 1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
0d7839da
SE
1886 if (size < UNITS_PER_WORD
1887 && BYTES_BIG_ENDIAN
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
0040593d
JW
1889 {
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1891 rtx shift;
1892
1893 if (tem == 0)
1894 abort ();
1895
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1897 gen_rtx_REG (word_mode, regno),
0040593d
JW
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1901 return;
1902 }
1903
bbf6f052
RK
1904 /* See if the machine can do this with a store multiple insn. */
1905#ifdef HAVE_store_multiple
c3a02afe 1906 if (HAVE_store_multiple)
bbf6f052 1907 {
c3a02afe 1908 last = get_last_insn ();
38a448ca 1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1910 GEN_INT (nregs));
1911 if (pat)
1912 {
1913 emit_insn (pat);
1914 return;
1915 }
1916 else
1917 delete_insns_since (last);
bbf6f052 1918 }
bbf6f052
RK
1919#endif
1920
1921 for (i = 0; i < nregs; i++)
1922 {
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1924
1925 if (tem == 0)
1926 abort ();
1927
38a448ca 1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1929 }
1930}
1931
aac5cc16
RH
1932/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 1934 block SRC in bytes, or -1 if not known. */
d6a7951f 1935/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
aac5cc16
RH
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1940 would be needed. */
fffa9c1d
JW
1941
1942void
04050c69 1943emit_group_load (dst, orig_src, ssize)
aac5cc16 1944 rtx dst, orig_src;
729a2125 1945 int ssize;
fffa9c1d 1946{
aac5cc16
RH
1947 rtx *tmps, src;
1948 int start, i;
fffa9c1d 1949
aac5cc16 1950 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1951 abort ();
1952
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
aac5cc16
RH
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1956 start = 0;
fffa9c1d 1957 else
aac5cc16
RH
1958 start = 1;
1959
3a94c984 1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1961
aac5cc16
RH
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1964 {
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1968 int shift = 0;
1969
1970 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
1972 {
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1975 if (bytelen <= 0)
729a2125 1976 abort ();
aac5cc16
RH
1977 }
1978
f3ce87a9
DE
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1982 src = orig_src;
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1987 {
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1990 else
1991 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1992
f3ce87a9
DE
1993 emit_move_insn (src, orig_src);
1994 }
1995
aac5cc16
RH
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
04050c69 1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
729a2125 1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2000 && bytelen == GET_MODE_SIZE (mode))
2001 {
2002 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2004 }
7c4a6db0
JW
2005 else if (GET_CODE (src) == CONCAT)
2006 {
cbb92744
JJ
2007 if ((bytepos == 0
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2011 {
2012 tmps[i] = XEXP (src, bytepos != 0);
2013 if (! CONSTANT_P (tmps[i])
2014 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2015 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2016 0, 1, NULL_RTX, mode, mode, ssize);
2017 }
58f69841
JH
2018 else if (bytepos == 0)
2019 {
04050c69
RK
2020 rtx mem = assign_stack_temp (GET_MODE (src),
2021 GET_MODE_SIZE (GET_MODE (src)), 0);
58f69841 2022 emit_move_insn (mem, src);
04050c69 2023 tmps[i] = adjust_address (mem, mode, 0);
58f69841 2024 }
7c4a6db0
JW
2025 else
2026 abort ();
2027 }
f3ce87a9 2028 else if (CONSTANT_P (src)
2ee5437b
RH
2029 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2030 tmps[i] = src;
fffa9c1d 2031 else
19caa751
RK
2032 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2033 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 2034 mode, mode, ssize);
fffa9c1d 2035
aac5cc16 2036 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2037 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2039 }
19caa751 2040
3a94c984 2041 emit_queue ();
aac5cc16
RH
2042
2043 /* Copy the extracted pieces into the proper (probable) hard regs. */
2044 for (i = start; i < XVECLEN (dst, 0); i++)
2045 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2046}
2047
aac5cc16
RH
2048/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2049 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2050 block DST, or -1 if not known. */
fffa9c1d
JW
2051
2052void
04050c69 2053emit_group_store (orig_dst, src, ssize)
aac5cc16 2054 rtx orig_dst, src;
729a2125 2055 int ssize;
fffa9c1d 2056{
aac5cc16
RH
2057 rtx *tmps, dst;
2058 int start, i;
fffa9c1d 2059
aac5cc16 2060 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2061 abort ();
2062
2063 /* Check for a NULL entry, used to indicate that the parameter goes
2064 both on the stack and in registers. */
aac5cc16
RH
2065 if (XEXP (XVECEXP (src, 0, 0), 0))
2066 start = 0;
fffa9c1d 2067 else
aac5cc16
RH
2068 start = 1;
2069
3a94c984 2070 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2071
aac5cc16
RH
2072 /* Copy the (probable) hard regs into pseudos. */
2073 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2074 {
aac5cc16
RH
2075 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2076 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2077 emit_move_insn (tmps[i], reg);
2078 }
3a94c984 2079 emit_queue ();
fffa9c1d 2080
aac5cc16
RH
2081 /* If we won't be storing directly into memory, protect the real destination
2082 from strange tricks we might play. */
2083 dst = orig_dst;
10a9f2be
JW
2084 if (GET_CODE (dst) == PARALLEL)
2085 {
2086 rtx temp;
2087
2088 /* We can get a PARALLEL dst if there is a conditional expression in
2089 a return statement. In that case, the dst and src are the same,
2090 so no action is necessary. */
2091 if (rtx_equal_p (dst, src))
2092 return;
2093
2094 /* It is unclear if we can ever reach here, but we may as well handle
2095 it. Allocate a temporary, and split this into a store/load to/from
2096 the temporary. */
2097
2098 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
04050c69
RK
2099 emit_group_store (temp, src, ssize);
2100 emit_group_load (dst, temp, ssize);
10a9f2be
JW
2101 return;
2102 }
2103 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2104 {
2105 dst = gen_reg_rtx (GET_MODE (orig_dst));
2106 /* Make life a bit easier for combine. */
2107 emit_move_insn (dst, const0_rtx);
2108 }
aac5cc16
RH
2109
2110 /* Process the pieces. */
2111 for (i = start; i < XVECLEN (src, 0); i++)
2112 {
770ae6cc 2113 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2114 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2115 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2116
2117 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2118 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2119 {
aac5cc16
RH
2120 if (BYTES_BIG_ENDIAN)
2121 {
2122 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2123 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2124 tmps[i], 0, OPTAB_WIDEN);
2125 }
2126 bytelen = ssize - bytepos;
71bc0330 2127 }
fffa9c1d 2128
aac5cc16
RH
2129 /* Optimize the access just a bit. */
2130 if (GET_CODE (dst) == MEM
04050c69 2131 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
729a2125 2132 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2133 && bytelen == GET_MODE_SIZE (mode))
f4ef873c 2134 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
aac5cc16 2135 else
729a2125 2136 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2137 mode, tmps[i], ssize);
fffa9c1d 2138 }
729a2125 2139
3a94c984 2140 emit_queue ();
aac5cc16
RH
2141
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (GET_CODE (dst) == REG)
2144 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2145}
2146
c36fce9a
GRK
2147/* Generate code to copy a BLKmode object of TYPE out of a
2148 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2149 is null, a stack temporary is created. TGTBLK is returned.
2150
2151 The primary purpose of this routine is to handle functions
2152 that return BLKmode structures in registers. Some machines
2153 (the PA for example) want to return all small structures
3a94c984 2154 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2155
2156rtx
19caa751 2157copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2158 rtx tgtblk;
2159 rtx srcreg;
2160 tree type;
2161{
19caa751
RK
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2166
2167 if (tgtblk == 0)
2168 {
1da68f56
RK
2169 tgtblk = assign_temp (build_qualified_type (type,
2170 (TYPE_QUALS (type)
2171 | TYPE_QUAL_CONST)),
2172 0, 1, 1);
19caa751
RK
2173 preserve_temp_slots (tgtblk);
2174 }
3a94c984 2175
1ed1b4fb
RK
2176 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2177 into a new pseudo which is a full word.
0d7839da 2178
1ed1b4fb
RK
2179 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2180 the wrong part of the register gets copied so we fake a type conversion
2181 in place. */
19caa751
RK
2182 if (GET_MODE (srcreg) != BLKmode
2183 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1ed1b4fb
RK
2184 {
2185 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2186 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2187 else
2188 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2189 }
19caa751
RK
2190
2191 /* Structures whose size is not a multiple of a word are aligned
2192 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2193 machine, this means we must skip the empty high order bytes when
2194 calculating the bit offset. */
0d7839da
SE
2195 if (BYTES_BIG_ENDIAN
2196 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2197 && bytes % UNITS_PER_WORD)
19caa751
RK
2198 big_endian_correction
2199 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2200
2201 /* Copy the structure BITSIZE bites at a time.
3a94c984 2202
19caa751
RK
2203 We could probably emit more efficient code for machines which do not use
2204 strict alignment, but it doesn't seem worth the effort at the current
2205 time. */
2206 for (bitpos = 0, xbitpos = big_endian_correction;
2207 bitpos < bytes * BITS_PER_UNIT;
2208 bitpos += bitsize, xbitpos += bitsize)
2209 {
3a94c984 2210 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2211 word boundary and when xbitpos == big_endian_correction
2212 (the first time through). */
2213 if (xbitpos % BITS_PER_WORD == 0
2214 || xbitpos == big_endian_correction)
b47f8cfc
JH
2215 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2216 GET_MODE (srcreg));
19caa751
RK
2217
2218 /* We need a new destination operand each time bitpos is on
2219 a word boundary. */
2220 if (bitpos % BITS_PER_WORD == 0)
2221 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2222
19caa751
RK
2223 /* Use xbitpos for the source extraction (right justified) and
2224 xbitpos for the destination store (left justified). */
2225 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2226 extract_bit_field (src, bitsize,
2227 xbitpos % BITS_PER_WORD, 1,
2228 NULL_RTX, word_mode, word_mode,
04050c69
RK
2229 BITS_PER_WORD),
2230 BITS_PER_WORD);
19caa751
RK
2231 }
2232
2233 return tgtblk;
c36fce9a
GRK
2234}
2235
94b25f81
RK
2236/* Add a USE expression for REG to the (possibly empty) list pointed
2237 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2238
2239void
b3f8cf4a
RK
2240use_reg (call_fusage, reg)
2241 rtx *call_fusage, reg;
2242{
0304dfbb
DE
2243 if (GET_CODE (reg) != REG
2244 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2245 abort ();
b3f8cf4a
RK
2246
2247 *call_fusage
38a448ca
RH
2248 = gen_rtx_EXPR_LIST (VOIDmode,
2249 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2250}
2251
94b25f81
RK
2252/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2253 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2254
2255void
0304dfbb
DE
2256use_regs (call_fusage, regno, nregs)
2257 rtx *call_fusage;
bbf6f052
RK
2258 int regno;
2259 int nregs;
2260{
0304dfbb 2261 int i;
bbf6f052 2262
0304dfbb
DE
2263 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2264 abort ();
2265
2266 for (i = 0; i < nregs; i++)
38a448ca 2267 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2268}
fffa9c1d
JW
2269
2270/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2271 PARALLEL REGS. This is for calls that pass values in multiple
2272 non-contiguous locations. The Irix 6 ABI has examples of this. */
2273
2274void
2275use_group_regs (call_fusage, regs)
2276 rtx *call_fusage;
2277 rtx regs;
2278{
2279 int i;
2280
6bd35f86
DE
2281 for (i = 0; i < XVECLEN (regs, 0); i++)
2282 {
2283 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2284
6bd35f86
DE
2285 /* A NULL entry means the parameter goes both on the stack and in
2286 registers. This can also be a MEM for targets that pass values
2287 partially on the stack and partially in registers. */
e9a25f70 2288 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2289 use_reg (call_fusage, reg);
2290 }
fffa9c1d 2291}
bbf6f052 2292\f
57814e5e
JJ
2293
2294int
2295can_store_by_pieces (len, constfun, constfundata, align)
2296 unsigned HOST_WIDE_INT len;
2297 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2298 PTR constfundata;
2299 unsigned int align;
2300{
98166639 2301 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2302 HOST_WIDE_INT offset = 0;
2303 enum machine_mode mode, tmode;
2304 enum insn_code icode;
2305 int reverse;
2306 rtx cst;
2307
2308 if (! MOVE_BY_PIECES_P (len, align))
2309 return 0;
2310
2311 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2312 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2313 align = MOVE_MAX * BITS_PER_UNIT;
2314
2315 /* We would first store what we can in the largest integer mode, then go to
2316 successively smaller modes. */
2317
2318 for (reverse = 0;
2319 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2320 reverse++)
2321 {
2322 l = len;
2323 mode = VOIDmode;
98166639 2324 max_size = MOVE_MAX_PIECES + 1;
57814e5e
JJ
2325 while (max_size > 1)
2326 {
2327 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2328 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2329 if (GET_MODE_SIZE (tmode) < max_size)
2330 mode = tmode;
2331
2332 if (mode == VOIDmode)
2333 break;
2334
2335 icode = mov_optab->handlers[(int) mode].insn_code;
2336 if (icode != CODE_FOR_nothing
2337 && align >= GET_MODE_ALIGNMENT (mode))
2338 {
2339 unsigned int size = GET_MODE_SIZE (mode);
2340
2341 while (l >= size)
2342 {
2343 if (reverse)
2344 offset -= size;
2345
2346 cst = (*constfun) (constfundata, offset, mode);
2347 if (!LEGITIMATE_CONSTANT_P (cst))
2348 return 0;
2349
2350 if (!reverse)
2351 offset += size;
2352
2353 l -= size;
2354 }
2355 }
2356
2357 max_size = GET_MODE_SIZE (mode);
2358 }
2359
2360 /* The code above should have handled everything. */
2361 if (l != 0)
2362 abort ();
2363 }
2364
2365 return 1;
2366}
2367
2368/* Generate several move instructions to store LEN bytes generated by
2369 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2370 pointer which will be passed as argument in every CONSTFUN call.
2371 ALIGN is maximum alignment we can assume. */
2372
2373void
2374store_by_pieces (to, len, constfun, constfundata, align)
2375 rtx to;
2376 unsigned HOST_WIDE_INT len;
2377 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2378 PTR constfundata;
2379 unsigned int align;
2380{
2381 struct store_by_pieces data;
2382
2383 if (! MOVE_BY_PIECES_P (len, align))
2384 abort ();
2385 to = protect_from_queue (to, 1);
2386 data.constfun = constfun;
2387 data.constfundata = constfundata;
2388 data.len = len;
2389 data.to = to;
2390 store_by_pieces_1 (&data, align);
2391}
2392
19caa751
RK
2393/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2396
2397static void
2398clear_by_pieces (to, len, align)
2399 rtx to;
3bdf5ad1 2400 unsigned HOST_WIDE_INT len;
729a2125 2401 unsigned int align;
9de08200 2402{
57814e5e
JJ
2403 struct store_by_pieces data;
2404
2405 data.constfun = clear_by_pieces_1;
df4ae160 2406 data.constfundata = NULL;
57814e5e
JJ
2407 data.len = len;
2408 data.to = to;
2409 store_by_pieces_1 (&data, align);
2410}
2411
2412/* Callback routine for clear_by_pieces.
2413 Return const0_rtx unconditionally. */
2414
2415static rtx
2416clear_by_pieces_1 (data, offset, mode)
2417 PTR data ATTRIBUTE_UNUSED;
2418 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2419 enum machine_mode mode ATTRIBUTE_UNUSED;
2420{
2421 return const0_rtx;
2422}
2423
2424/* Subroutine of clear_by_pieces and store_by_pieces.
2425 Generate several move instructions to store LEN bytes of block TO. (A MEM
2426 rtx with BLKmode). The caller must pass TO through protect_from_queue
2427 before calling. ALIGN is maximum alignment we can assume. */
2428
2429static void
2430store_by_pieces_1 (data, align)
2431 struct store_by_pieces *data;
2432 unsigned int align;
2433{
2434 rtx to_addr = XEXP (data->to, 0);
3bdf5ad1 2435 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
2436 enum machine_mode mode = VOIDmode, tmode;
2437 enum insn_code icode;
9de08200 2438
57814e5e
JJ
2439 data->offset = 0;
2440 data->to_addr = to_addr;
2441 data->autinc_to
9de08200
RK
2442 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2443 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2444
57814e5e
JJ
2445 data->explicit_inc_to = 0;
2446 data->reverse
9de08200 2447 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2448 if (data->reverse)
2449 data->offset = data->len;
9de08200 2450
57814e5e 2451 /* If storing requires more than two move insns,
9de08200
RK
2452 copy addresses to registers (to make displacements shorter)
2453 and use post-increment if available. */
57814e5e
JJ
2454 if (!data->autinc_to
2455 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2456 {
3a94c984 2457 /* Determine the main mode we'll be using. */
fbe1758d
AM
2458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2460 if (GET_MODE_SIZE (tmode) < max_size)
2461 mode = tmode;
2462
57814e5e 2463 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2464 {
57814e5e
JJ
2465 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2466 data->autinc_to = 1;
2467 data->explicit_inc_to = -1;
9de08200 2468 }
3bdf5ad1 2469
57814e5e
JJ
2470 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2471 && ! data->autinc_to)
9de08200 2472 {
57814e5e
JJ
2473 data->to_addr = copy_addr_to_reg (to_addr);
2474 data->autinc_to = 1;
2475 data->explicit_inc_to = 1;
9de08200 2476 }
3bdf5ad1 2477
57814e5e
JJ
2478 if ( !data->autinc_to && CONSTANT_P (to_addr))
2479 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2480 }
2481
e1565e65 2482 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2483 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2484 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2485
57814e5e 2486 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2487 successively smaller modes. */
2488
2489 while (max_size > 1)
2490 {
9de08200
RK
2491 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2492 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2493 if (GET_MODE_SIZE (tmode) < max_size)
2494 mode = tmode;
2495
2496 if (mode == VOIDmode)
2497 break;
2498
2499 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2500 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2501 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2502
2503 max_size = GET_MODE_SIZE (mode);
2504 }
2505
2506 /* The code above should have handled everything. */
57814e5e 2507 if (data->len != 0)
9de08200
RK
2508 abort ();
2509}
2510
57814e5e 2511/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2512 with move instructions for mode MODE. GENFUN is the gen_... function
2513 to make a move insn for that mode. DATA has all the other info. */
2514
2515static void
57814e5e 2516store_by_pieces_2 (genfun, mode, data)
711d877c 2517 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2518 enum machine_mode mode;
57814e5e 2519 struct store_by_pieces *data;
9de08200 2520{
3bdf5ad1 2521 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2522 rtx to1, cst;
9de08200
RK
2523
2524 while (data->len >= size)
2525 {
3bdf5ad1
RK
2526 if (data->reverse)
2527 data->offset -= size;
9de08200 2528
3bdf5ad1 2529 if (data->autinc_to)
630036c6
JJ
2530 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2531 data->offset);
3a94c984 2532 else
f4ef873c 2533 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2534
940da324 2535 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2536 emit_insn (gen_add2_insn (data->to_addr,
2537 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2538
57814e5e
JJ
2539 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2540 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2541
940da324 2542 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2543 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2544
3bdf5ad1
RK
2545 if (! data->reverse)
2546 data->offset += size;
9de08200
RK
2547
2548 data->len -= size;
2549 }
2550}
2551\f
19caa751 2552/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2553 its length in bytes. */
e9a25f70
JL
2554
2555rtx
8ac61af7 2556clear_storage (object, size)
bbf6f052 2557 rtx object;
4c08eef0 2558 rtx size;
bbf6f052 2559{
52cf7115
JL
2560#ifdef TARGET_MEM_FUNCTIONS
2561 static tree fn;
2562 tree call_expr, arg_list;
2563#endif
e9a25f70 2564 rtx retval = 0;
8ac61af7
RK
2565 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2566 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2567
fcf1b822
RK
2568 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2569 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2570 if (GET_MODE (object) != BLKmode
fcf1b822 2571 && GET_CODE (size) == CONST_INT
8752c357 2572 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
fcf1b822
RK
2573 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574 else
bbf6f052 2575 {
9de08200
RK
2576 object = protect_from_queue (object, 1);
2577 size = protect_from_queue (size, 0);
2578
2579 if (GET_CODE (size) == CONST_INT
fbe1758d 2580 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200 2581 clear_by_pieces (object, INTVAL (size), align);
9de08200
RK
2582 else
2583 {
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2587
19caa751 2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
9de08200
RK
2589 enum machine_mode mode;
2590
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2593 {
2594 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2595 insn_operand_predicate_fn pred;
9de08200
RK
2596
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2604 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2610 {
2611 rtx op1;
2612 rtx last = get_last_insn ();
2613 rtx pat;
2614
2615 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2616 pred = insn_data[(int) code].operand[1].predicate;
2617 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2618 op1 = copy_to_mode_reg (mode, op1);
2619
2620 pat = GEN_FCN ((int) code) (object, op1, opalign);
2621 if (pat)
2622 {
2623 emit_insn (pat);
e9a25f70 2624 return 0;
9de08200
RK
2625 }
2626 else
2627 delete_insns_since (last);
2628 }
2629 }
2630
4bc973ae 2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2632
4bc973ae
JL
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
52cf7115 2636
4bc973ae
JL
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
52cf7115 2640
4bc973ae
JL
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2644 emit_queue.
52cf7115 2645
4bc973ae
JL
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2652
4bc973ae
JL
2653#ifdef TARGET_MEM_FUNCTIONS
2654 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2655#else
2656 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2657 TREE_UNSIGNED (integer_type_node));
f3dc586a 2658 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2659#endif
52cf7115 2660
4bc973ae
JL
2661#ifdef TARGET_MEM_FUNCTIONS
2662 /* It is incorrect to use the libcall calling conventions to call
2663 memset in this context.
52cf7115 2664
4bc973ae
JL
2665 This could be a user call to memset and the user may wish to
2666 examine the return value from memset.
52cf7115 2667
4bc973ae
JL
2668 For targets where libcalls and normal calls have different
2669 conventions for returning pointers, we could end up generating
0d97bf4c 2670 incorrect code.
4bc973ae
JL
2671
2672 So instead of using a libcall sequence we build up a suitable
2673 CALL_EXPR and expand the call in the normal fashion. */
2674 if (fn == NULL_TREE)
2675 {
2676 tree fntype;
2677
2678 /* This was copied from except.c, I don't know if all this is
2679 necessary in this context or not. */
2680 fn = get_identifier ("memset");
4bc973ae
JL
2681 fntype = build_pointer_type (void_type_node);
2682 fntype = build_function_type (fntype, NULL_TREE);
2683 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2684 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2685 DECL_EXTERNAL (fn) = 1;
2686 TREE_PUBLIC (fn) = 1;
2687 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 2688 TREE_NOTHROW (fn) = 1;
6496a589 2689 make_decl_rtl (fn, NULL);
4bc973ae 2690 assemble_external (fn);
4bc973ae
JL
2691 }
2692
3a94c984 2693 /* We need to make an argument list for the function call.
4bc973ae
JL
2694
2695 memset has three arguments, the first is a void * addresses, the
4fe9b91c 2696 second an integer with the initialization value, the last is a
4bc973ae
JL
2697 size_t byte count for the copy. */
2698 arg_list
2699 = build_tree_list (NULL_TREE,
2700 make_tree (build_pointer_type (void_type_node),
2701 object));
2702 TREE_CHAIN (arg_list)
2703 = build_tree_list (NULL_TREE,
3a94c984 2704 make_tree (integer_type_node, const0_rtx));
4bc973ae
JL
2705 TREE_CHAIN (TREE_CHAIN (arg_list))
2706 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2707 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2708
2709 /* Now we have to build up the CALL_EXPR itself. */
2710 call_expr = build1 (ADDR_EXPR,
2711 build_pointer_type (TREE_TYPE (fn)), fn);
2712 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2713 call_expr, arg_list, NULL_TREE);
2714 TREE_SIDE_EFFECTS (call_expr) = 1;
2715
2716 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2717#else
ebb1b59a 2718 emit_library_call (bzero_libfunc, LCT_NORMAL,
fe7bbd2a 2719 VOIDmode, 2, object, Pmode, size,
9de08200 2720 TYPE_MODE (integer_type_node));
bbf6f052 2721#endif
66c60e67
RK
2722
2723 /* If we are initializing a readonly value, show the above call
2724 clobbered it. Otherwise, a load from it may erroneously be
2725 hoisted from a loop. */
2726 if (RTX_UNCHANGING_P (object))
2727 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
9de08200 2728 }
bbf6f052 2729 }
e9a25f70
JL
2730
2731 return retval;
bbf6f052
RK
2732}
2733
2734/* Generate code to copy Y into X.
2735 Both Y and X must have the same mode, except that
2736 Y can be a constant with VOIDmode.
2737 This mode cannot be BLKmode; use emit_block_move for that.
2738
2739 Return the last instruction emitted. */
2740
2741rtx
2742emit_move_insn (x, y)
2743 rtx x, y;
2744{
2745 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
2746 rtx y_cst = NULL_RTX;
2747 rtx last_insn;
bbf6f052
RK
2748
2749 x = protect_from_queue (x, 1);
2750 y = protect_from_queue (y, 0);
2751
2752 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2753 abort ();
2754
ee5332b8
RH
2755 /* Never force constant_p_rtx to memory. */
2756 if (GET_CODE (y) == CONSTANT_P_RTX)
2757 ;
2758 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
de1b33dd
AO
2759 {
2760 y_cst = y;
2761 y = force_const_mem (mode, y);
2762 }
bbf6f052
RK
2763
2764 /* If X or Y are memory references, verify that their addresses are valid
2765 for the machine. */
2766 if (GET_CODE (x) == MEM
2767 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2768 && ! push_operand (x, GET_MODE (x)))
2769 || (flag_force_addr
2770 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2771 x = validize_mem (x);
bbf6f052
RK
2772
2773 if (GET_CODE (y) == MEM
2774 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2775 || (flag_force_addr
2776 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2777 y = validize_mem (y);
bbf6f052
RK
2778
2779 if (mode == BLKmode)
2780 abort ();
2781
de1b33dd
AO
2782 last_insn = emit_move_insn_1 (x, y);
2783
2784 if (y_cst && GET_CODE (x) == REG)
3d238248 2785 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2786
2787 return last_insn;
261c4230
RS
2788}
2789
2790/* Low level part of emit_move_insn.
2791 Called just like emit_move_insn, but assumes X and Y
2792 are basically valid. */
2793
2794rtx
2795emit_move_insn_1 (x, y)
2796 rtx x, y;
2797{
2798 enum machine_mode mode = GET_MODE (x);
2799 enum machine_mode submode;
2800 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2801
dbbbbf3b 2802 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2803 abort ();
76bbe028 2804
bbf6f052
RK
2805 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2806 return
2807 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2808
89742723 2809 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2810 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2811 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2812 * BITS_PER_UNIT),
2813 (class == MODE_COMPLEX_INT
2814 ? MODE_INT : MODE_FLOAT),
2815 0))
7308a047
RS
2816 && (mov_optab->handlers[(int) submode].insn_code
2817 != CODE_FOR_nothing))
2818 {
2819 /* Don't split destination if it is a stack push. */
2820 int stack = push_operand (x, GET_MODE (x));
7308a047 2821
79ce92d7 2822#ifdef PUSH_ROUNDING
1a06f5fe
JH
2823 /* In case we output to the stack, but the size is smaller machine can
2824 push exactly, we need to use move instructions. */
2825 if (stack
bb93b973
RK
2826 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2827 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2828 {
2829 rtx temp;
bb93b973 2830 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2831
2832 /* Do not use anti_adjust_stack, since we don't want to update
2833 stack_pointer_delta. */
2834 temp = expand_binop (Pmode,
2835#ifdef STACK_GROWS_DOWNWARD
2836 sub_optab,
2837#else
2838 add_optab,
2839#endif
2840 stack_pointer_rtx,
2841 GEN_INT
bb93b973
RK
2842 (PUSH_ROUNDING
2843 (GET_MODE_SIZE (GET_MODE (x)))),
2844 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2845
1a06f5fe
JH
2846 if (temp != stack_pointer_rtx)
2847 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2848
1a06f5fe
JH
2849#ifdef STACK_GROWS_DOWNWARD
2850 offset1 = 0;
2851 offset2 = GET_MODE_SIZE (submode);
2852#else
2853 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2854 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2855 + GET_MODE_SIZE (submode));
2856#endif
bb93b973 2857
1a06f5fe
JH
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2860 stack_pointer_rtx,
2861 GEN_INT (offset1))),
2862 gen_realpart (submode, y));
2863 emit_move_insn (change_address (x, submode,
2864 gen_rtx_PLUS (Pmode,
2865 stack_pointer_rtx,
2866 GEN_INT (offset2))),
2867 gen_imagpart (submode, y));
2868 }
e9c0bd54 2869 else
79ce92d7 2870#endif
7308a047
RS
2871 /* If this is a stack, push the highpart first, so it
2872 will be in the argument order.
2873
2874 In that case, change_address is used only to convert
2875 the mode, not to change the address. */
e9c0bd54 2876 if (stack)
c937357e 2877 {
e33c0d66
RS
2878 /* Note that the real part always precedes the imag part in memory
2879 regardless of machine's endianness. */
c937357e
RS
2880#ifdef STACK_GROWS_DOWNWARD
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2883 gen_imagpart (submode, y)));
c937357e 2884 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2885 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2886 gen_realpart (submode, y)));
c937357e
RS
2887#else
2888 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2889 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2890 gen_realpart (submode, y)));
c937357e 2891 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2892 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2893 gen_imagpart (submode, y)));
c937357e
RS
2894#endif
2895 }
2896 else
2897 {
235ae7be
DM
2898 rtx realpart_x, realpart_y;
2899 rtx imagpart_x, imagpart_y;
2900
405f63da
MM
2901 /* If this is a complex value with each part being smaller than a
2902 word, the usual calling sequence will likely pack the pieces into
2903 a single register. Unfortunately, SUBREG of hard registers only
2904 deals in terms of words, so we have a problem converting input
2905 arguments to the CONCAT of two registers that is used elsewhere
2906 for complex values. If this is before reload, we can copy it into
2907 memory and reload. FIXME, we should see about using extract and
2908 insert on integer registers, but complex short and complex char
2909 variables should be rarely used. */
3a94c984 2910 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2911 && (reload_in_progress | reload_completed) == 0)
2912 {
bb93b973
RK
2913 int packed_dest_p
2914 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2915 int packed_src_p
2916 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2917
2918 if (packed_dest_p || packed_src_p)
2919 {
2920 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2921 ? MODE_FLOAT : MODE_INT);
2922
1da68f56
RK
2923 enum machine_mode reg_mode
2924 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2925
2926 if (reg_mode != BLKmode)
2927 {
2928 rtx mem = assign_stack_temp (reg_mode,
2929 GET_MODE_SIZE (mode), 0);
f4ef873c 2930 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2931
1da68f56
RK
2932 cfun->cannot_inline
2933 = N_("function using short complex types cannot be inline");
405f63da
MM
2934
2935 if (packed_dest_p)
2936 {
2937 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2938
405f63da
MM
2939 emit_move_insn_1 (cmem, y);
2940 return emit_move_insn_1 (sreg, mem);
2941 }
2942 else
2943 {
2944 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 2945
405f63da
MM
2946 emit_move_insn_1 (mem, sreg);
2947 return emit_move_insn_1 (x, cmem);
2948 }
2949 }
2950 }
2951 }
2952
235ae7be
DM
2953 realpart_x = gen_realpart (submode, x);
2954 realpart_y = gen_realpart (submode, y);
2955 imagpart_x = gen_imagpart (submode, x);
2956 imagpart_y = gen_imagpart (submode, y);
2957
2958 /* Show the output dies here. This is necessary for SUBREGs
2959 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2960 hard regs shouldn't appear here except as return values.
2961 We never want to emit such a clobber after reload. */
2962 if (x != y
235ae7be
DM
2963 && ! (reload_in_progress || reload_completed)
2964 && (GET_CODE (realpart_x) == SUBREG
2965 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 2966 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2967
c937357e 2968 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2969 (realpart_x, realpart_y));
c937357e 2970 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2971 (imagpart_x, imagpart_y));
c937357e 2972 }
7308a047 2973
7a1ab50a 2974 return get_last_insn ();
7308a047
RS
2975 }
2976
bbf6f052
RK
2977 /* This will handle any multi-word mode that lacks a move_insn pattern.
2978 However, you will get better code if you define such patterns,
2979 even if they must turn into multiple assembler instructions. */
a4320483 2980 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2981 {
2982 rtx last_insn = 0;
3ef1eef4 2983 rtx seq, inner;
235ae7be 2984 int need_clobber;
bb93b973 2985 int i;
3a94c984 2986
a98c9f1a
RK
2987#ifdef PUSH_ROUNDING
2988
2989 /* If X is a push on the stack, do the push now and replace
2990 X with a reference to the stack pointer. */
2991 if (push_operand (x, GET_MODE (x)))
2992 {
918a6124
GK
2993 rtx temp;
2994 enum rtx_code code;
2995
2996 /* Do not use anti_adjust_stack, since we don't want to update
2997 stack_pointer_delta. */
2998 temp = expand_binop (Pmode,
2999#ifdef STACK_GROWS_DOWNWARD
3000 sub_optab,
3001#else
3002 add_optab,
3003#endif
3004 stack_pointer_rtx,
3005 GEN_INT
bb93b973
RK
3006 (PUSH_ROUNDING
3007 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3008 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3009
918a6124
GK
3010 if (temp != stack_pointer_rtx)
3011 emit_move_insn (stack_pointer_rtx, temp);
3012
3013 code = GET_CODE (XEXP (x, 0));
bb93b973 3014
918a6124
GK
3015 /* Just hope that small offsets off SP are OK. */
3016 if (code == POST_INC)
3017 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3018 GEN_INT (-((HOST_WIDE_INT)
3019 GET_MODE_SIZE (GET_MODE (x)))));
918a6124
GK
3020 else if (code == POST_DEC)
3021 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3022 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3023 else
3024 temp = stack_pointer_rtx;
3025
3026 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3027 }
3028#endif
3a94c984 3029
3ef1eef4
RK
3030 /* If we are in reload, see if either operand is a MEM whose address
3031 is scheduled for replacement. */
3032 if (reload_in_progress && GET_CODE (x) == MEM
3033 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3034 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3035 if (reload_in_progress && GET_CODE (y) == MEM
3036 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3037 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3038
235ae7be 3039 start_sequence ();
15a7a8ec 3040
235ae7be 3041 need_clobber = 0;
bbf6f052 3042 for (i = 0;
3a94c984 3043 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3044 i++)
3045 {
3046 rtx xpart = operand_subword (x, i, 1, mode);
3047 rtx ypart = operand_subword (y, i, 1, mode);
3048
3049 /* If we can't get a part of Y, put Y into memory if it is a
3050 constant. Otherwise, force it into a register. If we still
3051 can't get a part of Y, abort. */
3052 if (ypart == 0 && CONSTANT_P (y))
3053 {
3054 y = force_const_mem (mode, y);
3055 ypart = operand_subword (y, i, 1, mode);
3056 }
3057 else if (ypart == 0)
3058 ypart = operand_subword_force (y, i, mode);
3059
3060 if (xpart == 0 || ypart == 0)
3061 abort ();
3062
235ae7be
DM
3063 need_clobber |= (GET_CODE (xpart) == SUBREG);
3064
bbf6f052
RK
3065 last_insn = emit_move_insn (xpart, ypart);
3066 }
6551fa4d 3067
235ae7be
DM
3068 seq = gen_sequence ();
3069 end_sequence ();
3070
3071 /* Show the output dies here. This is necessary for SUBREGs
3072 of pseudos since we cannot track their lifetimes correctly;
3073 hard regs shouldn't appear here except as return values.
3074 We never want to emit such a clobber after reload. */
3075 if (x != y
3076 && ! (reload_in_progress || reload_completed)
3077 && need_clobber != 0)
bb93b973 3078 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3079
3080 emit_insn (seq);
3081
bbf6f052
RK
3082 return last_insn;
3083 }
3084 else
3085 abort ();
3086}
3087\f
3088/* Pushing data onto the stack. */
3089
3090/* Push a block of length SIZE (perhaps variable)
3091 and return an rtx to address the beginning of the block.
3092 Note that it is not possible for the value returned to be a QUEUED.
3093 The value may be virtual_outgoing_args_rtx.
3094
3095 EXTRA is the number of bytes of padding to push in addition to SIZE.
3096 BELOW nonzero means this padding comes at low addresses;
3097 otherwise, the padding comes at high addresses. */
3098
3099rtx
3100push_block (size, extra, below)
3101 rtx size;
3102 int extra, below;
3103{
b3694847 3104 rtx temp;
88f63c77
RK
3105
3106 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3107 if (CONSTANT_P (size))
3108 anti_adjust_stack (plus_constant (size, extra));
3109 else if (GET_CODE (size) == REG && extra == 0)
3110 anti_adjust_stack (size);
3111 else
3112 {
ce48579b 3113 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3114 if (extra != 0)
906c4e36 3115 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3116 temp, 0, OPTAB_LIB_WIDEN);
3117 anti_adjust_stack (temp);
3118 }
3119
f73ad30e 3120#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3121 if (0)
f73ad30e
JH
3122#else
3123 if (1)
bbf6f052 3124#endif
f73ad30e 3125 {
f73ad30e
JH
3126 temp = virtual_outgoing_args_rtx;
3127 if (extra != 0 && below)
3128 temp = plus_constant (temp, extra);
3129 }
3130 else
3131 {
3132 if (GET_CODE (size) == CONST_INT)
3133 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3134 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3135 else if (extra != 0 && !below)
3136 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3137 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3138 else
3139 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3140 negate_rtx (Pmode, size));
3141 }
bbf6f052
RK
3142
3143 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3144}
3145
21d93687
RK
3146#ifdef PUSH_ROUNDING
3147
566aa174 3148/* Emit single push insn. */
21d93687 3149
566aa174
JH
3150static void
3151emit_single_push_insn (mode, x, type)
3152 rtx x;
3153 enum machine_mode mode;
3154 tree type;
3155{
566aa174 3156 rtx dest_addr;
918a6124 3157 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3158 rtx dest;
371b8fc0
JH
3159 enum insn_code icode;
3160 insn_operand_predicate_fn pred;
566aa174 3161
371b8fc0
JH
3162 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3163 /* If there is push pattern, use it. Otherwise try old way of throwing
3164 MEM representing push operation to move expander. */
3165 icode = push_optab->handlers[(int) mode].insn_code;
3166 if (icode != CODE_FOR_nothing)
3167 {
3168 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3169 && !((*pred) (x, mode))))
371b8fc0
JH
3170 x = force_reg (mode, x);
3171 emit_insn (GEN_FCN (icode) (x));
3172 return;
3173 }
566aa174
JH
3174 if (GET_MODE_SIZE (mode) == rounded_size)
3175 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3176 else
3177 {
3178#ifdef STACK_GROWS_DOWNWARD
3179 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3180 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174
JH
3181#else
3182 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3183 GEN_INT (rounded_size));
3184#endif
3185 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3186 }
3187
3188 dest = gen_rtx_MEM (mode, dest_addr);
3189
566aa174
JH
3190 if (type != 0)
3191 {
3192 set_mem_attributes (dest, type, 1);
c3d32120
RK
3193
3194 if (flag_optimize_sibling_calls)
3195 /* Function incoming arguments may overlap with sibling call
3196 outgoing arguments and we cannot allow reordering of reads
3197 from function arguments with stores to outgoing arguments
3198 of sibling calls. */
3199 set_mem_alias_set (dest, 0);
566aa174
JH
3200 }
3201 emit_move_insn (dest, x);
566aa174 3202}
21d93687 3203#endif
566aa174 3204
bbf6f052
RK
3205/* Generate code to push X onto the stack, assuming it has mode MODE and
3206 type TYPE.
3207 MODE is redundant except when X is a CONST_INT (since they don't
3208 carry mode info).
3209 SIZE is an rtx for the size of data to be copied (in bytes),
3210 needed only if X is BLKmode.
3211
f1eaaf73 3212 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3213
cd048831
RK
3214 If PARTIAL and REG are both nonzero, then copy that many of the first
3215 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3216 The amount of space pushed is decreased by PARTIAL words,
3217 rounded *down* to a multiple of PARM_BOUNDARY.
3218 REG must be a hard register in this case.
cd048831
RK
3219 If REG is zero but PARTIAL is not, take any all others actions for an
3220 argument partially in registers, but do not actually load any
3221 registers.
bbf6f052
RK
3222
3223 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3224 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3225
3226 On a machine that lacks real push insns, ARGS_ADDR is the address of
3227 the bottom of the argument block for this call. We use indexing off there
3228 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3229 argument block has not been preallocated.
3230
e5e809f4
JL
3231 ARGS_SO_FAR is the size of args previously pushed for this call.
3232
3233 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3234 for arguments passed in registers. If nonzero, it will be the number
3235 of bytes required. */
bbf6f052
RK
3236
3237void
3238emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
3239 args_addr, args_so_far, reg_parm_stack_space,
3240 alignment_pad)
b3694847 3241 rtx x;
bbf6f052
RK
3242 enum machine_mode mode;
3243 tree type;
3244 rtx size;
729a2125 3245 unsigned int align;
bbf6f052
RK
3246 int partial;
3247 rtx reg;
3248 int extra;
3249 rtx args_addr;
3250 rtx args_so_far;
e5e809f4 3251 int reg_parm_stack_space;
4fc026cd 3252 rtx alignment_pad;
bbf6f052
RK
3253{
3254 rtx xinner;
3255 enum direction stack_direction
3256#ifdef STACK_GROWS_DOWNWARD
3257 = downward;
3258#else
3259 = upward;
3260#endif
3261
3262 /* Decide where to pad the argument: `downward' for below,
3263 `upward' for above, or `none' for don't pad it.
3264 Default is below for small data on big-endian machines; else above. */
3265 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3266
9e0e11bf
GK
3267 /* Invert direction if stack is post-decrement.
3268 FIXME: why? */
3269 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3270 if (where_pad != none)
3271 where_pad = (where_pad == downward ? upward : downward);
3272
3273 xinner = x = protect_from_queue (x, 0);
3274
3275 if (mode == BLKmode)
3276 {
3277 /* Copy a block into the stack, entirely or partially. */
3278
b3694847 3279 rtx temp;
bbf6f052
RK
3280 int used = partial * UNITS_PER_WORD;
3281 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3282 int skip;
3a94c984 3283
bbf6f052
RK
3284 if (size == 0)
3285 abort ();
3286
3287 used -= offset;
3288
3289 /* USED is now the # of bytes we need not copy to the stack
3290 because registers will take care of them. */
3291
3292 if (partial != 0)
f4ef873c 3293 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3294
3295 /* If the partial register-part of the arg counts in its stack size,
3296 skip the part of stack space corresponding to the registers.
3297 Otherwise, start copying to the beginning of the stack space,
3298 by setting SKIP to 0. */
e5e809f4 3299 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3300
3301#ifdef PUSH_ROUNDING
3302 /* Do it with several push insns if that doesn't take lots of insns
3303 and if there is no difficulty with push insns that skip bytes
3304 on the stack for alignment purposes. */
3305 if (args_addr == 0
f73ad30e 3306 && PUSH_ARGS
bbf6f052
RK
3307 && GET_CODE (size) == CONST_INT
3308 && skip == 0
15914757 3309 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3310 /* Here we avoid the case of a structure whose weak alignment
3311 forces many pushes of a small amount of data,
3312 and such small pushes do rounding that causes trouble. */
e1565e65 3313 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3314 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3315 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3316 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3317 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3318 {
3319 /* Push padding now if padding above and stack grows down,
3320 or if padding below and stack grows up.
3321 But if space already allocated, this has already been done. */
3322 if (extra && args_addr == 0
3323 && where_pad != none && where_pad != stack_direction)
906c4e36 3324 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3325
566aa174 3326 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
bbf6f052
RK
3327 }
3328 else
3a94c984 3329#endif /* PUSH_ROUNDING */
bbf6f052 3330 {
7ab923cc
JJ
3331 rtx target;
3332
bbf6f052
RK
3333 /* Otherwise make space on the stack and copy the data
3334 to the address of that space. */
3335
3336 /* Deduct words put into registers from the size we must copy. */
3337 if (partial != 0)
3338 {
3339 if (GET_CODE (size) == CONST_INT)
906c4e36 3340 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3341 else
3342 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3343 GEN_INT (used), NULL_RTX, 0,
3344 OPTAB_LIB_WIDEN);
bbf6f052
RK
3345 }
3346
3347 /* Get the address of the stack space.
3348 In this case, we do not deal with EXTRA separately.
3349 A single stack adjust will do. */
3350 if (! args_addr)
3351 {
3352 temp = push_block (size, extra, where_pad == downward);
3353 extra = 0;
3354 }
3355 else if (GET_CODE (args_so_far) == CONST_INT)
3356 temp = memory_address (BLKmode,
3357 plus_constant (args_addr,
3358 skip + INTVAL (args_so_far)));
3359 else
3360 temp = memory_address (BLKmode,
38a448ca
RH
3361 plus_constant (gen_rtx_PLUS (Pmode,
3362 args_addr,
3363 args_so_far),
bbf6f052 3364 skip));
3a94c984 3365 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3366
3a94c984
KH
3367 if (type != 0)
3368 {
3369 set_mem_attributes (target, type, 1);
3370 /* Function incoming arguments may overlap with sibling call
3371 outgoing arguments and we cannot allow reordering of reads
3372 from function arguments with stores to outgoing arguments
3373 of sibling calls. */
ba4828e0 3374 set_mem_alias_set (target, 0);
3a94c984 3375 }
8ac61af7
RK
3376 else
3377 set_mem_align (target, align);
7ab923cc 3378
bbf6f052
RK
3379 /* TEMP is the address of the block. Copy the data there. */
3380 if (GET_CODE (size) == CONST_INT
729a2125 3381 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3382 {
7ab923cc 3383 move_by_pieces (target, xinner, INTVAL (size), align);
bbf6f052
RK
3384 goto ret;
3385 }
e5e809f4 3386 else
bbf6f052 3387 {
19caa751 3388 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
e5e809f4 3389 enum machine_mode mode;
3bdf5ad1 3390
e5e809f4
JL
3391 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3392 mode != VOIDmode;
3393 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3394 {
e5e809f4 3395 enum insn_code code = movstr_optab[(int) mode];
a995e389 3396 insn_operand_predicate_fn pred;
e5e809f4
JL
3397
3398 if (code != CODE_FOR_nothing
3399 && ((GET_CODE (size) == CONST_INT
3400 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3401 <= (GET_MODE_MASK (mode) >> 1)))
3402 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3403 && (!(pred = insn_data[(int) code].operand[0].predicate)
3404 || ((*pred) (target, BLKmode)))
3405 && (!(pred = insn_data[(int) code].operand[1].predicate)
3406 || ((*pred) (xinner, BLKmode)))
3407 && (!(pred = insn_data[(int) code].operand[3].predicate)
3408 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3409 {
3410 rtx op2 = convert_to_mode (mode, size, 1);
3411 rtx last = get_last_insn ();
3412 rtx pat;
3413
a995e389
RH
3414 pred = insn_data[(int) code].operand[2].predicate;
3415 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3416 op2 = copy_to_mode_reg (mode, op2);
3417
3418 pat = GEN_FCN ((int) code) (target, xinner,
3419 op2, opalign);
3420 if (pat)
3421 {
3422 emit_insn (pat);
3423 goto ret;
3424 }
3425 else
3426 delete_insns_since (last);
3427 }
c841050e 3428 }
bbf6f052 3429 }
bbf6f052 3430
f73ad30e
JH
3431 if (!ACCUMULATE_OUTGOING_ARGS)
3432 {
3433 /* If the source is referenced relative to the stack pointer,
3434 copy it to another register to stabilize it. We do not need
3435 to do this if we know that we won't be changing sp. */
bbf6f052 3436
f73ad30e
JH
3437 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3438 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3439 temp = copy_to_reg (temp);
3440 }
bbf6f052
RK
3441
3442 /* Make inhibit_defer_pop nonzero around the library call
3443 to force it to pop the bcopy-arguments right away. */
3444 NO_DEFER_POP;
3445#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3446 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052 3447 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3448 convert_to_mode (TYPE_MODE (sizetype),
3449 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3450 TYPE_MODE (sizetype));
bbf6f052 3451#else
ebb1b59a 3452 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052 3453 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3454 convert_to_mode (TYPE_MODE (integer_type_node),
3455 size,
3456 TREE_UNSIGNED (integer_type_node)),
3457 TYPE_MODE (integer_type_node));
bbf6f052
RK
3458#endif
3459 OK_DEFER_POP;
3460 }
3461 }
3462 else if (partial > 0)
3463 {
3464 /* Scalar partly in registers. */
3465
3466 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3467 int i;
3468 int not_stack;
3469 /* # words of start of argument
3470 that we must make space for but need not store. */
3471 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3472 int args_offset = INTVAL (args_so_far);
3473 int skip;
3474
3475 /* Push padding now if padding above and stack grows down,
3476 or if padding below and stack grows up.
3477 But if space already allocated, this has already been done. */
3478 if (extra && args_addr == 0
3479 && where_pad != none && where_pad != stack_direction)
906c4e36 3480 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3481
3482 /* If we make space by pushing it, we might as well push
3483 the real data. Otherwise, we can leave OFFSET nonzero
3484 and leave the space uninitialized. */
3485 if (args_addr == 0)
3486 offset = 0;
3487
3488 /* Now NOT_STACK gets the number of words that we don't need to
3489 allocate on the stack. */
3490 not_stack = partial - offset;
3491
3492 /* If the partial register-part of the arg counts in its stack size,
3493 skip the part of stack space corresponding to the registers.
3494 Otherwise, start copying to the beginning of the stack space,
3495 by setting SKIP to 0. */
e5e809f4 3496 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3497
3498 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3499 x = validize_mem (force_const_mem (mode, x));
3500
3501 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3502 SUBREGs of such registers are not allowed. */
3503 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3504 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3505 x = copy_to_reg (x);
3506
3507 /* Loop over all the words allocated on the stack for this arg. */
3508 /* We can do it by words, because any scalar bigger than a word
3509 has a size a multiple of a word. */
3510#ifndef PUSH_ARGS_REVERSED
3511 for (i = not_stack; i < size; i++)
3512#else
3513 for (i = size - 1; i >= not_stack; i--)
3514#endif
3515 if (i >= not_stack + offset)
3516 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3517 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3518 0, args_addr,
3519 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3520 * UNITS_PER_WORD)),
4fc026cd 3521 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3522 }
3523 else
3524 {
3525 rtx addr;
921b3427 3526 rtx target = NULL_RTX;
3bdf5ad1 3527 rtx dest;
bbf6f052
RK
3528
3529 /* Push padding now if padding above and stack grows down,
3530 or if padding below and stack grows up.
3531 But if space already allocated, this has already been done. */
3532 if (extra && args_addr == 0
3533 && where_pad != none && where_pad != stack_direction)
906c4e36 3534 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3535
3536#ifdef PUSH_ROUNDING
f73ad30e 3537 if (args_addr == 0 && PUSH_ARGS)
566aa174 3538 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3539 else
3540#endif
921b3427
RK
3541 {
3542 if (GET_CODE (args_so_far) == CONST_INT)
3543 addr
3544 = memory_address (mode,
3a94c984 3545 plus_constant (args_addr,
921b3427 3546 INTVAL (args_so_far)));
3a94c984 3547 else
38a448ca
RH
3548 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3549 args_so_far));
921b3427 3550 target = addr;
566aa174
JH
3551 dest = gen_rtx_MEM (mode, addr);
3552 if (type != 0)
3553 {
3554 set_mem_attributes (dest, type, 1);
3555 /* Function incoming arguments may overlap with sibling call
3556 outgoing arguments and we cannot allow reordering of reads
3557 from function arguments with stores to outgoing arguments
3558 of sibling calls. */
ba4828e0 3559 set_mem_alias_set (dest, 0);
566aa174 3560 }
bbf6f052 3561
566aa174 3562 emit_move_insn (dest, x);
566aa174 3563 }
921b3427 3564
bbf6f052
RK
3565 }
3566
3567 ret:
3568 /* If part should go in registers, copy that part
3569 into the appropriate registers. Do this now, at the end,
3570 since mem-to-mem copies above may do function calls. */
cd048831 3571 if (partial > 0 && reg != 0)
fffa9c1d
JW
3572 {
3573 /* Handle calls that pass values in multiple non-contiguous locations.
3574 The Irix 6 ABI has examples of this. */
3575 if (GET_CODE (reg) == PARALLEL)
04050c69 3576 emit_group_load (reg, x, -1); /* ??? size? */
fffa9c1d
JW
3577 else
3578 move_block_to_reg (REGNO (reg), x, partial, mode);
3579 }
bbf6f052
RK
3580
3581 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3582 anti_adjust_stack (GEN_INT (extra));
3a94c984 3583
3ea2292a 3584 if (alignment_pad && args_addr == 0)
4fc026cd 3585 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3586}
3587\f
296b4ed9
RK
3588/* Return X if X can be used as a subtarget in a sequence of arithmetic
3589 operations. */
3590
3591static rtx
3592get_subtarget (x)
3593 rtx x;
3594{
3595 return ((x == 0
3596 /* Only registers can be subtargets. */
3597 || GET_CODE (x) != REG
3598 /* If the register is readonly, it can't be set more than once. */
3599 || RTX_UNCHANGING_P (x)
3600 /* Don't use hard regs to avoid extending their life. */
3601 || REGNO (x) < FIRST_PSEUDO_REGISTER
3602 /* Avoid subtargets inside loops,
3603 since they hide some invariant expressions. */
3604 || preserve_subexpressions_p ())
3605 ? 0 : x);
3606}
3607
bbf6f052
RK
3608/* Expand an assignment that stores the value of FROM into TO.
3609 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3610 (This may contain a QUEUED rtx;
3611 if the value is constant, this rtx is a constant.)
3612 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3613
3614 SUGGEST_REG is no longer actually used.
3615 It used to mean, copy the value through a register
3616 and return that register, if that is possible.
709f5be1 3617 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3618
3619rtx
3620expand_assignment (to, from, want_value, suggest_reg)
3621 tree to, from;
3622 int want_value;
c5c76735 3623 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052 3624{
b3694847 3625 rtx to_rtx = 0;
bbf6f052
RK
3626 rtx result;
3627
3628 /* Don't crash if the lhs of the assignment was erroneous. */
3629
3630 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3631 {
3632 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3633 return want_value ? result : NULL_RTX;
3634 }
bbf6f052
RK
3635
3636 /* Assignment of a structure component needs special treatment
3637 if the structure component's rtx is not simply a MEM.
6be58303
JW
3638 Assignment of an array element at a constant index, and assignment of
3639 an array element in an unaligned packed structure field, has the same
3640 problem. */
bbf6f052 3641
08293add 3642 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
b4e3fabb 3643 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
bbf6f052
RK
3644 {
3645 enum machine_mode mode1;
770ae6cc 3646 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3647 rtx orig_to_rtx;
7bb0943f 3648 tree offset;
bbf6f052
RK
3649 int unsignedp;
3650 int volatilep = 0;
0088fcb1
RK
3651 tree tem;
3652
3653 push_temp_slots ();
839c4796 3654 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3655 &unsignedp, &volatilep);
bbf6f052
RK
3656
3657 /* If we are going to use store_bit_field and extract_bit_field,
3658 make sure to_rtx will be safe for multiple use. */
3659
3660 if (mode1 == VOIDmode && want_value)
3661 tem = stabilize_reference (tem);
3662
1ed1b4fb
RK
3663 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3664
7bb0943f
RS
3665 if (offset != 0)
3666 {
e3c8ea67 3667 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3668
3669 if (GET_CODE (to_rtx) != MEM)
3670 abort ();
bd070e1a
RH
3671
3672 if (GET_MODE (offset_rtx) != ptr_mode)
4b6c1672
RK
3673 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3674
bd070e1a 3675#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
3676 if (GET_MODE (offset_rtx) != Pmode)
3677 offset_rtx = convert_memory_address (Pmode, offset_rtx);
bd070e1a 3678#endif
bd070e1a 3679
9a7b9f4f
JL
3680 /* A constant address in TO_RTX can have VOIDmode, we must not try
3681 to call force_reg for that case. Avoid that case. */
89752202
HB
3682 if (GET_CODE (to_rtx) == MEM
3683 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3684 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3685 && bitsize > 0
3a94c984 3686 && (bitpos % bitsize) == 0
89752202 3687 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3688 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3689 {
e3c8ea67 3690 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3691 bitpos = 0;
3692 }
3693
0d4903b8
RK
3694 to_rtx = offset_address (to_rtx, offset_rtx,
3695 highest_pow2_factor (offset));
7bb0943f 3696 }
c5c76735 3697
998d7deb
RH
3698 if (GET_CODE (to_rtx) == MEM)
3699 {
3700 tree old_expr = MEM_EXPR (to_rtx);
3701
3702 /* If the field is at offset zero, we could have been given the
3703 DECL_RTX of the parent struct. Don't munge it. */
3704 to_rtx = shallow_copy_rtx (to_rtx);
3705
3706 set_mem_attributes (to_rtx, to, 0);
3707
3708 /* If we changed MEM_EXPR, that means we're now referencing
3709 the COMPONENT_REF, which means that MEM_OFFSET must be
3710 relative to that field. But we've not yet reflected BITPOS
3711 in TO_RTX. This will be done in store_field. Adjust for
3712 that by biasing MEM_OFFSET by -bitpos. */
3713 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3714 && (bitpos / BITS_PER_UNIT) != 0)
3715 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3716 - (bitpos / BITS_PER_UNIT)));
3717 }
effbcc6a 3718
a06ef755
RK
3719 /* Deal with volatile and readonly fields. The former is only done
3720 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3721 if (volatilep && GET_CODE (to_rtx) == MEM)
3722 {
3723 if (to_rtx == orig_to_rtx)
3724 to_rtx = copy_rtx (to_rtx);
3725 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3726 }
3727
956d6950
JL
3728 if (TREE_CODE (to) == COMPONENT_REF
3729 && TREE_READONLY (TREE_OPERAND (to, 1)))
3730 {
a06ef755 3731 if (to_rtx == orig_to_rtx)
956d6950 3732 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3733 RTX_UNCHANGING_P (to_rtx) = 1;
3734 }
3735
a84b4898 3736 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
3737 {
3738 if (to_rtx == orig_to_rtx)
3739 to_rtx = copy_rtx (to_rtx);
3740 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3741 }
3742
a06ef755
RK
3743 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3744 (want_value
3745 /* Spurious cast for HPUX compiler. */
3746 ? ((enum machine_mode)
3747 TYPE_MODE (TREE_TYPE (to)))
3748 : VOIDmode),
3749 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3750
a06ef755
RK
3751 preserve_temp_slots (result);
3752 free_temp_slots ();
3753 pop_temp_slots ();
a69beca1 3754
a06ef755
RK
3755 /* If the value is meaningful, convert RESULT to the proper mode.
3756 Otherwise, return nothing. */
3757 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3758 TYPE_MODE (TREE_TYPE (from)),
3759 result,
3760 TREE_UNSIGNED (TREE_TYPE (to)))
3761 : NULL_RTX);
bbf6f052
RK
3762 }
3763
cd1db108
RS
3764 /* If the rhs is a function call and its value is not an aggregate,
3765 call the function before we start to compute the lhs.
3766 This is needed for correct code for cases such as
3767 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3768 requires loading up part of an address in a separate insn.
3769
1858863b
JW
3770 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3771 since it might be a promoted variable where the zero- or sign- extension
3772 needs to be done. Handling this in the normal way is safe because no
3773 computation is done before the call. */
1ad87b63 3774 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3775 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3776 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3777 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3778 {
0088fcb1
RK
3779 rtx value;
3780
3781 push_temp_slots ();
3782 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3783 if (to_rtx == 0)
37a08a29 3784 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3785
fffa9c1d
JW
3786 /* Handle calls that return values in multiple non-contiguous locations.
3787 The Irix 6 ABI has examples of this. */
3788 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 3789 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3790 else if (GET_MODE (to_rtx) == BLKmode)
8ac61af7 3791 emit_block_move (to_rtx, value, expr_size (from));
aaf87c45 3792 else
6419e5b0
DT
3793 {
3794#ifdef POINTERS_EXTEND_UNSIGNED
0d4903b8
RK
3795 if (POINTER_TYPE_P (TREE_TYPE (to))
3796 && GET_MODE (to_rtx) != GET_MODE (value))
6419e5b0
DT
3797 value = convert_memory_address (GET_MODE (to_rtx), value);
3798#endif
3799 emit_move_insn (to_rtx, value);
3800 }
cd1db108
RS
3801 preserve_temp_slots (to_rtx);
3802 free_temp_slots ();
0088fcb1 3803 pop_temp_slots ();
709f5be1 3804 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3805 }
3806
bbf6f052
RK
3807 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3808 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3809
3810 if (to_rtx == 0)
37a08a29 3811 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3812
86d38d25 3813 /* Don't move directly into a return register. */
14a774a9
RK
3814 if (TREE_CODE (to) == RESULT_DECL
3815 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3816 {
0088fcb1
RK
3817 rtx temp;
3818
3819 push_temp_slots ();
3820 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3821
3822 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 3823 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3824 else
3825 emit_move_insn (to_rtx, temp);
3826
86d38d25
RS
3827 preserve_temp_slots (to_rtx);
3828 free_temp_slots ();
0088fcb1 3829 pop_temp_slots ();
709f5be1 3830 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3831 }
3832
bbf6f052
RK
3833 /* In case we are returning the contents of an object which overlaps
3834 the place the value is being stored, use a safe function when copying
3835 a value through a pointer into a structure value return block. */
3836 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3837 && current_function_returns_struct
3838 && !current_function_returns_pcc_struct)
3839 {
0088fcb1
RK
3840 rtx from_rtx, size;
3841
3842 push_temp_slots ();
33a20d10 3843 size = expr_size (from);
37a08a29 3844 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
3845
3846#ifdef TARGET_MEM_FUNCTIONS
b215b52e 3847 emit_library_call (memmove_libfunc, LCT_NORMAL,
bbf6f052
RK
3848 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3849 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3850 convert_to_mode (TYPE_MODE (sizetype),
3851 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3852 TYPE_MODE (sizetype));
bbf6f052 3853#else
ebb1b59a 3854 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052
RK
3855 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3856 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3857 convert_to_mode (TYPE_MODE (integer_type_node),
3858 size, TREE_UNSIGNED (integer_type_node)),
3859 TYPE_MODE (integer_type_node));
bbf6f052
RK
3860#endif
3861
3862 preserve_temp_slots (to_rtx);
3863 free_temp_slots ();
0088fcb1 3864 pop_temp_slots ();
709f5be1 3865 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3866 }
3867
3868 /* Compute FROM and store the value in the rtx we got. */
3869
0088fcb1 3870 push_temp_slots ();
bbf6f052
RK
3871 result = store_expr (from, to_rtx, want_value);
3872 preserve_temp_slots (result);
3873 free_temp_slots ();
0088fcb1 3874 pop_temp_slots ();
709f5be1 3875 return want_value ? result : NULL_RTX;
bbf6f052
RK
3876}
3877
3878/* Generate code for computing expression EXP,
3879 and storing the value into TARGET.
bbf6f052
RK
3880 TARGET may contain a QUEUED rtx.
3881
709f5be1
RS
3882 If WANT_VALUE is nonzero, return a copy of the value
3883 not in TARGET, so that we can be sure to use the proper
3884 value in a containing expression even if TARGET has something
3885 else stored in it. If possible, we copy the value through a pseudo
3886 and return that pseudo. Or, if the value is constant, we try to
3887 return the constant. In some cases, we return a pseudo
3888 copied *from* TARGET.
3889
3890 If the mode is BLKmode then we may return TARGET itself.
3891 It turns out that in BLKmode it doesn't cause a problem.
3892 because C has no operators that could combine two different
3893 assignments into the same BLKmode object with different values
3894 with no sequence point. Will other languages need this to
3895 be more thorough?
3896
3897 If WANT_VALUE is 0, we return NULL, to make sure
3898 to catch quickly any cases where the caller uses the value
3899 and fails to set WANT_VALUE. */
bbf6f052
RK
3900
3901rtx
709f5be1 3902store_expr (exp, target, want_value)
b3694847
SS
3903 tree exp;
3904 rtx target;
709f5be1 3905 int want_value;
bbf6f052 3906{
b3694847 3907 rtx temp;
bbf6f052 3908 int dont_return_target = 0;
e5408e52 3909 int dont_store_target = 0;
bbf6f052
RK
3910
3911 if (TREE_CODE (exp) == COMPOUND_EXPR)
3912 {
3913 /* Perform first part of compound expression, then assign from second
3914 part. */
3915 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3916 emit_queue ();
709f5be1 3917 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3918 }
3919 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3920 {
3921 /* For conditional expression, get safe form of the target. Then
3922 test the condition, doing the appropriate assignment on either
3923 side. This avoids the creation of unnecessary temporaries.
3924 For non-BLKmode, it is more efficient not to do this. */
3925
3926 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3927
3928 emit_queue ();
3929 target = protect_from_queue (target, 1);
3930
dabf8373 3931 do_pending_stack_adjust ();
bbf6f052
RK
3932 NO_DEFER_POP;
3933 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3934 start_cleanup_deferral ();
709f5be1 3935 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3936 end_cleanup_deferral ();
bbf6f052
RK
3937 emit_queue ();
3938 emit_jump_insn (gen_jump (lab2));
3939 emit_barrier ();
3940 emit_label (lab1);
956d6950 3941 start_cleanup_deferral ();
709f5be1 3942 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3943 end_cleanup_deferral ();
bbf6f052
RK
3944 emit_queue ();
3945 emit_label (lab2);
3946 OK_DEFER_POP;
a3a58acc 3947
709f5be1 3948 return want_value ? target : NULL_RTX;
bbf6f052 3949 }
bbf6f052 3950 else if (queued_subexp_p (target))
709f5be1
RS
3951 /* If target contains a postincrement, let's not risk
3952 using it as the place to generate the rhs. */
bbf6f052
RK
3953 {
3954 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3955 {
3956 /* Expand EXP into a new pseudo. */
3957 temp = gen_reg_rtx (GET_MODE (target));
3958 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3959 }
3960 else
906c4e36 3961 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3962
3963 /* If target is volatile, ANSI requires accessing the value
3964 *from* the target, if it is accessed. So make that happen.
3965 In no case return the target itself. */
3966 if (! MEM_VOLATILE_P (target) && want_value)
3967 dont_return_target = 1;
bbf6f052 3968 }
12f06d17
CH
3969 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3970 && GET_MODE (target) != BLKmode)
3971 /* If target is in memory and caller wants value in a register instead,
3972 arrange that. Pass TARGET as target for expand_expr so that,
3973 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3974 We know expand_expr will not use the target in that case.
3975 Don't do this if TARGET is volatile because we are supposed
3976 to write it and then read it. */
3977 {
1da93fe0 3978 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17 3979 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
3980 {
3981 /* If TEMP is already in the desired TARGET, only copy it from
3982 memory and don't store it there again. */
3983 if (temp == target
3984 || (rtx_equal_p (temp, target)
3985 && ! side_effects_p (temp) && ! side_effects_p (target)))
3986 dont_store_target = 1;
3987 temp = copy_to_reg (temp);
3988 }
12f06d17
CH
3989 dont_return_target = 1;
3990 }
1499e0a8
RK
3991 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3992 /* If this is an scalar in a register that is stored in a wider mode
3993 than the declared mode, compute the result into its declared mode
3994 and then convert to the wider mode. Our value is the computed
3995 expression. */
3996 {
b76b08ef
RK
3997 rtx inner_target = 0;
3998
5a32d038 3999 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4000 which will often result in some optimizations. Do the conversion
4001 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4002 the extend. But don't do this if the type of EXP is a subtype
4003 of something else since then the conversion might involve
4004 more than just converting modes. */
4005 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4006 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4007 {
4008 if (TREE_UNSIGNED (TREE_TYPE (exp))
4009 != SUBREG_PROMOTED_UNSIGNED_P (target))
4010 exp
4011 = convert
4012 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4013 TREE_TYPE (exp)),
4014 exp);
4015
4016 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4017 SUBREG_PROMOTED_UNSIGNED_P (target)),
4018 exp);
b76b08ef
RK
4019
4020 inner_target = SUBREG_REG (target);
f635a84d 4021 }
3a94c984 4022
b76b08ef 4023 temp = expand_expr (exp, inner_target, VOIDmode, 0);
b258707c 4024
766f36c7 4025 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
4026 the access now so it gets done only once. Likewise if
4027 it contains TARGET. */
4028 if (GET_CODE (temp) == MEM && want_value
4029 && (MEM_VOLATILE_P (temp)
4030 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
4031 temp = copy_to_reg (temp);
4032
b258707c
RS
4033 /* If TEMP is a VOIDmode constant, use convert_modes to make
4034 sure that we properly convert it. */
4035 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4036 {
4037 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4038 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4039 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4040 GET_MODE (target), temp,
4041 SUBREG_PROMOTED_UNSIGNED_P (target));
4042 }
b258707c 4043
1499e0a8
RK
4044 convert_move (SUBREG_REG (target), temp,
4045 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4046
4047 /* If we promoted a constant, change the mode back down to match
4048 target. Otherwise, the caller might get confused by a result whose
4049 mode is larger than expected. */
4050
b3ca30df 4051 if (want_value && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4052 {
b3ca30df
JJ
4053 if (GET_MODE (temp) != VOIDmode)
4054 {
4055 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4056 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e
SE
4057 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4058 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4059 }
4060 else
4061 temp = convert_modes (GET_MODE (target),
4062 GET_MODE (SUBREG_REG (target)),
4063 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4064 }
4065
709f5be1 4066 return want_value ? temp : NULL_RTX;
1499e0a8 4067 }
bbf6f052
RK
4068 else
4069 {
4070 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 4071 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4072 If TARGET is a volatile mem ref, either return TARGET
4073 or return a reg copied *from* TARGET; ANSI requires this.
4074
4075 Otherwise, if TEMP is not TARGET, return TEMP
4076 if it is constant (for efficiency),
4077 or if we really want the correct value. */
bbf6f052
RK
4078 if (!(target && GET_CODE (target) == REG
4079 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4080 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4081 && ! rtx_equal_p (temp, target)
709f5be1 4082 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
4083 dont_return_target = 1;
4084 }
4085
b258707c
RS
4086 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4087 the same as that of TARGET, adjust the constant. This is needed, for
4088 example, in case it is a CONST_DOUBLE and we want only a word-sized
4089 value. */
4090 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4091 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4092 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4093 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4094 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4095
bbf6f052 4096 /* If value was not generated in the target, store it there.
37a08a29
RK
4097 Convert the value to TARGET's type first if necessary.
4098 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4099 one or both of them are volatile memory refs, we have to distinguish
4100 two cases:
4101 - expand_expr has used TARGET. In this case, we must not generate
4102 another copy. This can be detected by TARGET being equal according
4103 to == .
4104 - expand_expr has not used TARGET - that means that the source just
4105 happens to have the same RTX form. Since temp will have been created
4106 by expand_expr, it will compare unequal according to == .
4107 We must generate a copy in this case, to reach the correct number
4108 of volatile memory references. */
bbf6f052 4109
6036acbb 4110 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4111 || (temp != target && (side_effects_p (temp)
4112 || side_effects_p (target))))
e5408e52
JJ
4113 && TREE_CODE (exp) != ERROR_MARK
4114 && ! dont_store_target)
bbf6f052
RK
4115 {
4116 target = protect_from_queue (target, 1);
4117 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4118 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4119 {
4120 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4121 if (dont_return_target)
4122 {
4123 /* In this case, we will return TEMP,
4124 so make sure it has the proper mode.
4125 But don't forget to store the value into TARGET. */
4126 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4127 emit_move_insn (target, temp);
4128 }
4129 else
4130 convert_move (target, temp, unsignedp);
4131 }
4132
4133 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4134 {
c24ae149
RK
4135 /* Handle copying a string constant into an array. The string
4136 constant may be shorter than the array. So copy just the string's
4137 actual length, and clear the rest. First get the size of the data
4138 type of the string, which is actually the size of the target. */
4139 rtx size = expr_size (exp);
bbf6f052 4140
e87b4f3f
RS
4141 if (GET_CODE (size) == CONST_INT
4142 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8ac61af7 4143 emit_block_move (target, temp, size);
e87b4f3f 4144 else
bbf6f052 4145 {
e87b4f3f
RS
4146 /* Compute the size of the data to copy from the string. */
4147 tree copy_size
c03b7665 4148 = size_binop (MIN_EXPR,
b50d17a1 4149 make_tree (sizetype, size),
fed3cef0 4150 size_int (TREE_STRING_LENGTH (exp)));
906c4e36
RK
4151 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4152 VOIDmode, 0);
e87b4f3f
RS
4153 rtx label = 0;
4154
4155 /* Copy that much. */
c24ae149 4156 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
8ac61af7 4157 emit_block_move (target, temp, copy_size_rtx);
e87b4f3f 4158
88f63c77
RK
4159 /* Figure out how much is left in TARGET that we have to clear.
4160 Do all calculations in ptr_mode. */
e87b4f3f
RS
4161 if (GET_CODE (copy_size_rtx) == CONST_INT)
4162 {
c24ae149
RK
4163 size = plus_constant (size, -INTVAL (copy_size_rtx));
4164 target = adjust_address (target, BLKmode,
4165 INTVAL (copy_size_rtx));
e87b4f3f
RS
4166 }
4167 else
4168 {
88f63c77 4169 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
4170 copy_size_rtx, NULL_RTX, 0,
4171 OPTAB_LIB_WIDEN);
e87b4f3f 4172
c24ae149
RK
4173#ifdef POINTERS_EXTEND_UNSIGNED
4174 if (GET_MODE (copy_size_rtx) != Pmode)
4175 copy_size_rtx = convert_memory_address (Pmode,
4176 copy_size_rtx);
4177#endif
4178
4179 target = offset_address (target, copy_size_rtx,
4180 highest_pow2_factor (copy_size));
e87b4f3f 4181 label = gen_label_rtx ();
c5d5d461 4182 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4183 GET_MODE (size), 0, label);
e87b4f3f
RS
4184 }
4185
4186 if (size != const0_rtx)
37a08a29 4187 clear_storage (target, size);
22619c3f 4188
e87b4f3f
RS
4189 if (label)
4190 emit_label (label);
bbf6f052
RK
4191 }
4192 }
fffa9c1d
JW
4193 /* Handle calls that return values in multiple non-contiguous locations.
4194 The Irix 6 ABI has examples of this. */
4195 else if (GET_CODE (target) == PARALLEL)
04050c69 4196 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4197 else if (GET_MODE (temp) == BLKmode)
8ac61af7 4198 emit_block_move (target, temp, expr_size (exp));
bbf6f052
RK
4199 else
4200 emit_move_insn (target, temp);
4201 }
709f5be1 4202
766f36c7
RK
4203 /* If we don't want a value, return NULL_RTX. */
4204 if (! want_value)
4205 return NULL_RTX;
4206
4207 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4208 ??? The latter test doesn't seem to make sense. */
4209 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4210 return temp;
766f36c7
RK
4211
4212 /* Return TARGET itself if it is a hard register. */
4213 else if (want_value && GET_MODE (target) != BLKmode
4214 && ! (GET_CODE (target) == REG
4215 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4216 return copy_to_reg (target);
3a94c984 4217
766f36c7 4218 else
709f5be1 4219 return target;
bbf6f052
RK
4220}
4221\f
9de08200
RK
4222/* Return 1 if EXP just contains zeros. */
4223
4224static int
4225is_zeros_p (exp)
4226 tree exp;
4227{
4228 tree elt;
4229
4230 switch (TREE_CODE (exp))
4231 {
4232 case CONVERT_EXPR:
4233 case NOP_EXPR:
4234 case NON_LVALUE_EXPR:
ed239f5a 4235 case VIEW_CONVERT_EXPR:
9de08200
RK
4236 return is_zeros_p (TREE_OPERAND (exp, 0));
4237
4238 case INTEGER_CST:
05bccae2 4239 return integer_zerop (exp);
9de08200
RK
4240
4241 case COMPLEX_CST:
4242 return
4243 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4244
4245 case REAL_CST:
41c9120b 4246 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4247
69ef87e2
AH
4248 case VECTOR_CST:
4249 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4250 elt = TREE_CHAIN (elt))
4251 if (!is_zeros_p (TREE_VALUE (elt)))
4252 return 0;
4253
4254 return 1;
4255
9de08200 4256 case CONSTRUCTOR:
e1a43f73
PB
4257 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4258 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4259 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4260 if (! is_zeros_p (TREE_VALUE (elt)))
4261 return 0;
4262
4263 return 1;
3a94c984 4264
e9a25f70
JL
4265 default:
4266 return 0;
9de08200 4267 }
9de08200
RK
4268}
4269
4270/* Return 1 if EXP contains mostly (3/4) zeros. */
4271
4272static int
4273mostly_zeros_p (exp)
4274 tree exp;
4275{
9de08200
RK
4276 if (TREE_CODE (exp) == CONSTRUCTOR)
4277 {
e1a43f73
PB
4278 int elts = 0, zeros = 0;
4279 tree elt = CONSTRUCTOR_ELTS (exp);
4280 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4281 {
4282 /* If there are no ranges of true bits, it is all zero. */
4283 return elt == NULL_TREE;
4284 }
4285 for (; elt; elt = TREE_CHAIN (elt))
4286 {
4287 /* We do not handle the case where the index is a RANGE_EXPR,
4288 so the statistic will be somewhat inaccurate.
4289 We do make a more accurate count in store_constructor itself,
4290 so since this function is only used for nested array elements,
0f41302f 4291 this should be close enough. */
e1a43f73
PB
4292 if (mostly_zeros_p (TREE_VALUE (elt)))
4293 zeros++;
4294 elts++;
4295 }
9de08200
RK
4296
4297 return 4 * zeros >= 3 * elts;
4298 }
4299
4300 return is_zeros_p (exp);
4301}
4302\f
e1a43f73
PB
4303/* Helper function for store_constructor.
4304 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4305 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4306 CLEARED is as for store_constructor.
23cb1766 4307 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4308
4309 This provides a recursive shortcut back to store_constructor when it isn't
4310 necessary to go through store_field. This is so that we can pass through
4311 the cleared field to let store_constructor know that we may not have to
4312 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4313
4314static void
04050c69
RK
4315store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4316 alias_set)
e1a43f73 4317 rtx target;
770ae6cc
RK
4318 unsigned HOST_WIDE_INT bitsize;
4319 HOST_WIDE_INT bitpos;
e1a43f73
PB
4320 enum machine_mode mode;
4321 tree exp, type;
4322 int cleared;
23cb1766 4323 int alias_set;
e1a43f73
PB
4324{
4325 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4326 && bitpos % BITS_PER_UNIT == 0
4327 /* If we have a non-zero bitpos for a register target, then we just
4328 let store_field do the bitfield handling. This is unlikely to
4329 generate unnecessary clear instructions anyways. */
4330 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4331 {
61cb205c
RK
4332 if (GET_CODE (target) == MEM)
4333 target
4334 = adjust_address (target,
4335 GET_MODE (target) == BLKmode
4336 || 0 != (bitpos
4337 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4338 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4339
e0339ef7 4340
04050c69 4341 /* Update the alias set, if required. */
10b76d73
RK
4342 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4343 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4344 {
4345 target = copy_rtx (target);
4346 set_mem_alias_set (target, alias_set);
4347 }
e0339ef7 4348
04050c69 4349 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4350 }
4351 else
a06ef755
RK
4352 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4353 alias_set);
e1a43f73
PB
4354}
4355
bbf6f052 4356/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4357 TARGET is either a REG or a MEM; we know it cannot conflict, since
4358 safe_from_p has been called.
b7010412
RK
4359 CLEARED is true if TARGET is known to have been zero'd.
4360 SIZE is the number of bytes of TARGET we are allowed to modify: this
4361 may not be the same as the size of EXP if we are assigning to a field
4362 which has been packed to exclude padding bits. */
bbf6f052
RK
4363
4364static void
04050c69 4365store_constructor (exp, target, cleared, size)
bbf6f052
RK
4366 tree exp;
4367 rtx target;
e1a43f73 4368 int cleared;
13eb1f7f 4369 HOST_WIDE_INT size;
bbf6f052 4370{
4af3895e 4371 tree type = TREE_TYPE (exp);
a5efcd63 4372#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4373 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4374#endif
4af3895e 4375
e44842fe
RK
4376 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4377 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4378 {
b3694847 4379 tree elt;
bbf6f052 4380
04050c69 4381 /* We either clear the aggregate or indicate the value is dead. */
dd1db5ec
RK
4382 if ((TREE_CODE (type) == UNION_TYPE
4383 || TREE_CODE (type) == QUAL_UNION_TYPE)
04050c69
RK
4384 && ! cleared
4385 && ! CONSTRUCTOR_ELTS (exp))
4386 /* If the constructor is empty, clear the union. */
a59f8640 4387 {
04050c69
RK
4388 clear_storage (target, expr_size (exp));
4389 cleared = 1;
a59f8640 4390 }
4af3895e
JVA
4391
4392 /* If we are building a static constructor into a register,
4393 set the initial value as zero so we can fold the value into
67225c15
RK
4394 a constant. But if more than one register is involved,
4395 this probably loses. */
04050c69 4396 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4397 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4398 {
04050c69 4399 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4400 cleared = 1;
4401 }
4402
4403 /* If the constructor has fewer fields than the structure
4404 or if we are initializing the structure to mostly zeros,
0d97bf4c 4405 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4406 register whose mode size isn't equal to SIZE since clear_storage
4407 can't handle this case. */
04050c69 4408 else if (! cleared && size > 0
9376fcd6 4409 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4410 != fields_length (type))
fcf1b822
RK
4411 || mostly_zeros_p (exp))
4412 && (GET_CODE (target) != REG
04050c69
RK
4413 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4414 == size)))
9de08200 4415 {
04050c69 4416 clear_storage (target, GEN_INT (size));
9de08200
RK
4417 cleared = 1;
4418 }
04050c69
RK
4419
4420 if (! cleared)
38a448ca 4421 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4422
4423 /* Store each element of the constructor into
4424 the corresponding field of TARGET. */
4425
4426 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4427 {
b3694847 4428 tree field = TREE_PURPOSE (elt);
34c73909 4429 tree value = TREE_VALUE (elt);
b3694847 4430 enum machine_mode mode;
770ae6cc
RK
4431 HOST_WIDE_INT bitsize;
4432 HOST_WIDE_INT bitpos = 0;
bbf6f052 4433 int unsignedp;
770ae6cc 4434 tree offset;
b50d17a1 4435 rtx to_rtx = target;
bbf6f052 4436
f32fd778
RS
4437 /* Just ignore missing fields.
4438 We cleared the whole structure, above,
4439 if any fields are missing. */
4440 if (field == 0)
4441 continue;
4442
8b6000fc 4443 if (cleared && is_zeros_p (value))
e1a43f73 4444 continue;
9de08200 4445
770ae6cc
RK
4446 if (host_integerp (DECL_SIZE (field), 1))
4447 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4448 else
4449 bitsize = -1;
4450
bbf6f052
RK
4451 unsignedp = TREE_UNSIGNED (field);
4452 mode = DECL_MODE (field);
4453 if (DECL_BIT_FIELD (field))
4454 mode = VOIDmode;
4455
770ae6cc
RK
4456 offset = DECL_FIELD_OFFSET (field);
4457 if (host_integerp (offset, 0)
4458 && host_integerp (bit_position (field), 0))
4459 {
4460 bitpos = int_bit_position (field);
4461 offset = 0;
4462 }
b50d17a1 4463 else
770ae6cc 4464 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4465
b50d17a1
RK
4466 if (offset)
4467 {
4468 rtx offset_rtx;
4469
4470 if (contains_placeholder_p (offset))
7fa96708 4471 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4472 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4473
b50d17a1
RK
4474 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4475 if (GET_CODE (to_rtx) != MEM)
4476 abort ();
4477
3a94c984 4478 if (GET_MODE (offset_rtx) != ptr_mode)
0d4903b8
RK
4479 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4480
bd070e1a 4481#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
4482 if (GET_MODE (offset_rtx) != Pmode)
4483 offset_rtx = convert_memory_address (Pmode, offset_rtx);
bd070e1a 4484#endif
bd070e1a 4485
0d4903b8
RK
4486 to_rtx = offset_address (to_rtx, offset_rtx,
4487 highest_pow2_factor (offset));
b50d17a1 4488 }
c5c76735 4489
cf04eb80
RK
4490 if (TREE_READONLY (field))
4491 {
9151b3bf 4492 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4493 to_rtx = copy_rtx (to_rtx);
4494
cf04eb80
RK
4495 RTX_UNCHANGING_P (to_rtx) = 1;
4496 }
4497
34c73909
R
4498#ifdef WORD_REGISTER_OPERATIONS
4499 /* If this initializes a field that is smaller than a word, at the
4500 start of a word, try to widen it to a full word.
4501 This special case allows us to output C++ member function
4502 initializations in a form that the optimizers can understand. */
770ae6cc 4503 if (GET_CODE (target) == REG
34c73909
R
4504 && bitsize < BITS_PER_WORD
4505 && bitpos % BITS_PER_WORD == 0
4506 && GET_MODE_CLASS (mode) == MODE_INT
4507 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4508 && exp_size >= 0
4509 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4510 {
4511 tree type = TREE_TYPE (value);
04050c69 4512
34c73909
R
4513 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4514 {
4515 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4516 value = convert (type, value);
4517 }
04050c69 4518
34c73909
R
4519 if (BYTES_BIG_ENDIAN)
4520 value
4521 = fold (build (LSHIFT_EXPR, type, value,
4522 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4523 bitsize = BITS_PER_WORD;
4524 mode = word_mode;
4525 }
4526#endif
10b76d73
RK
4527
4528 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4529 && DECL_NONADDRESSABLE_P (field))
4530 {
4531 to_rtx = copy_rtx (to_rtx);
4532 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4533 }
4534
c5c76735 4535 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4536 value, type, cleared,
10b76d73 4537 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4538 }
4539 }
e6834654
SS
4540 else if (TREE_CODE (type) == ARRAY_TYPE
4541 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4542 {
b3694847
SS
4543 tree elt;
4544 int i;
e1a43f73 4545 int need_to_clear;
4af3895e 4546 tree domain = TYPE_DOMAIN (type);
4af3895e 4547 tree elttype = TREE_TYPE (type);
e6834654 4548 int const_bounds_p;
ae0ed63a
JM
4549 HOST_WIDE_INT minelt = 0;
4550 HOST_WIDE_INT maxelt = 0;
85f3d674 4551
e6834654
SS
4552 /* Vectors are like arrays, but the domain is stored via an array
4553 type indirectly. */
4554 if (TREE_CODE (type) == VECTOR_TYPE)
4555 {
4556 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4557 the same field as TYPE_DOMAIN, we are not guaranteed that
4558 it always will. */
4559 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4560 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4561 }
4562
4563 const_bounds_p = (TYPE_MIN_VALUE (domain)
4564 && TYPE_MAX_VALUE (domain)
4565 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4566 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4567
85f3d674
RK
4568 /* If we have constant bounds for the range of the type, get them. */
4569 if (const_bounds_p)
4570 {
4571 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4572 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4573 }
bbf6f052 4574
e1a43f73 4575 /* If the constructor has fewer elements than the array,
38e01259 4576 clear the whole array first. Similarly if this is
e1a43f73
PB
4577 static constructor of a non-BLKmode object. */
4578 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4579 need_to_clear = 1;
4580 else
4581 {
4582 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4583 need_to_clear = ! const_bounds_p;
4584
e1a43f73
PB
4585 /* This loop is a more accurate version of the loop in
4586 mostly_zeros_p (it handles RANGE_EXPR in an index).
4587 It is also needed to check for missing elements. */
4588 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4589 elt != NULL_TREE && ! need_to_clear;
df0faff1 4590 elt = TREE_CHAIN (elt))
e1a43f73
PB
4591 {
4592 tree index = TREE_PURPOSE (elt);
4593 HOST_WIDE_INT this_node_count;
19caa751 4594
e1a43f73
PB
4595 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4596 {
4597 tree lo_index = TREE_OPERAND (index, 0);
4598 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4599
19caa751
RK
4600 if (! host_integerp (lo_index, 1)
4601 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4602 {
4603 need_to_clear = 1;
4604 break;
4605 }
19caa751
RK
4606
4607 this_node_count = (tree_low_cst (hi_index, 1)
4608 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4609 }
4610 else
4611 this_node_count = 1;
85f3d674 4612
e1a43f73
PB
4613 count += this_node_count;
4614 if (mostly_zeros_p (TREE_VALUE (elt)))
4615 zero_count += this_node_count;
4616 }
85f3d674 4617
8e958f70 4618 /* Clear the entire array first if there are any missing elements,
0f41302f 4619 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4620 if (! need_to_clear
4621 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4622 need_to_clear = 1;
4623 }
85f3d674 4624
9376fcd6 4625 if (need_to_clear && size > 0)
9de08200
RK
4626 {
4627 if (! cleared)
725e58b1
RK
4628 {
4629 if (REG_P (target))
4630 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4631 else
4632 clear_storage (target, GEN_INT (size));
4633 }
9de08200
RK
4634 cleared = 1;
4635 }
df4556a3 4636 else if (REG_P (target))
bbf6f052 4637 /* Inform later passes that the old value is dead. */
38a448ca 4638 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4639
4640 /* Store each element of the constructor into
4641 the corresponding element of TARGET, determined
4642 by counting the elements. */
4643 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4644 elt;
4645 elt = TREE_CHAIN (elt), i++)
4646 {
b3694847 4647 enum machine_mode mode;
19caa751
RK
4648 HOST_WIDE_INT bitsize;
4649 HOST_WIDE_INT bitpos;
bbf6f052 4650 int unsignedp;
e1a43f73 4651 tree value = TREE_VALUE (elt);
03dc44a6
RS
4652 tree index = TREE_PURPOSE (elt);
4653 rtx xtarget = target;
bbf6f052 4654
e1a43f73
PB
4655 if (cleared && is_zeros_p (value))
4656 continue;
9de08200 4657
bbf6f052 4658 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4659 mode = TYPE_MODE (elttype);
4660 if (mode == BLKmode)
19caa751
RK
4661 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4662 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4663 : -1);
14a774a9
RK
4664 else
4665 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4666
e1a43f73
PB
4667 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4668 {
4669 tree lo_index = TREE_OPERAND (index, 0);
4670 tree hi_index = TREE_OPERAND (index, 1);
0d4903b8 4671 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
e1a43f73 4672 struct nesting *loop;
05c0b405
PB
4673 HOST_WIDE_INT lo, hi, count;
4674 tree position;
e1a43f73 4675
0f41302f 4676 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4677 if (const_bounds_p
4678 && host_integerp (lo_index, 0)
19caa751
RK
4679 && host_integerp (hi_index, 0)
4680 && (lo = tree_low_cst (lo_index, 0),
4681 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4682 count = hi - lo + 1,
4683 (GET_CODE (target) != MEM
4684 || count <= 2
19caa751
RK
4685 || (host_integerp (TYPE_SIZE (elttype), 1)
4686 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4687 <= 40 * 8)))))
e1a43f73 4688 {
05c0b405
PB
4689 lo -= minelt; hi -= minelt;
4690 for (; lo <= hi; lo++)
e1a43f73 4691 {
19caa751 4692 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
4693
4694 if (GET_CODE (target) == MEM
4695 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4696 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4697 && TYPE_NONALIASED_COMPONENT (type))
4698 {
4699 target = copy_rtx (target);
4700 MEM_KEEP_ALIAS_SET_P (target) = 1;
4701 }
4702
23cb1766 4703 store_constructor_field
04050c69
RK
4704 (target, bitsize, bitpos, mode, value, type, cleared,
4705 get_alias_set (elttype));
e1a43f73
PB
4706 }
4707 }
4708 else
4709 {
4710 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4711 loop_top = gen_label_rtx ();
4712 loop_end = gen_label_rtx ();
4713
4714 unsignedp = TREE_UNSIGNED (domain);
4715
4716 index = build_decl (VAR_DECL, NULL_TREE, domain);
4717
19e7881c 4718 index_r
e1a43f73
PB
4719 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4720 &unsignedp, 0));
19e7881c 4721 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4722 if (TREE_CODE (value) == SAVE_EXPR
4723 && SAVE_EXPR_RTL (value) == 0)
4724 {
0f41302f
MS
4725 /* Make sure value gets expanded once before the
4726 loop. */
e1a43f73
PB
4727 expand_expr (value, const0_rtx, VOIDmode, 0);
4728 emit_queue ();
4729 }
4730 store_expr (lo_index, index_r, 0);
4731 loop = expand_start_loop (0);
4732
0f41302f 4733 /* Assign value to element index. */
fed3cef0
RK
4734 position
4735 = convert (ssizetype,
4736 fold (build (MINUS_EXPR, TREE_TYPE (index),
4737 index, TYPE_MIN_VALUE (domain))));
4738 position = size_binop (MULT_EXPR, position,
4739 convert (ssizetype,
4740 TYPE_SIZE_UNIT (elttype)));
4741
e1a43f73 4742 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
4743 xtarget = offset_address (target, pos_rtx,
4744 highest_pow2_factor (position));
4745 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4746 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 4747 store_constructor (value, xtarget, cleared,
b7010412 4748 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4749 else
4750 store_expr (value, xtarget, 0);
4751
4752 expand_exit_loop_if_false (loop,
4753 build (LT_EXPR, integer_type_node,
4754 index, hi_index));
4755
4756 expand_increment (build (PREINCREMENT_EXPR,
4757 TREE_TYPE (index),
7b8b9722 4758 index, integer_one_node), 0, 0);
e1a43f73
PB
4759 expand_end_loop ();
4760 emit_label (loop_end);
e1a43f73
PB
4761 }
4762 }
19caa751
RK
4763 else if ((index != 0 && ! host_integerp (index, 0))
4764 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4765 {
03dc44a6
RS
4766 tree position;
4767
5b6c44ff 4768 if (index == 0)
fed3cef0 4769 index = ssize_int (1);
5b6c44ff 4770
e1a43f73 4771 if (minelt)
fed3cef0
RK
4772 index = convert (ssizetype,
4773 fold (build (MINUS_EXPR, index,
4774 TYPE_MIN_VALUE (domain))));
19caa751 4775
fed3cef0
RK
4776 position = size_binop (MULT_EXPR, index,
4777 convert (ssizetype,
4778 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
4779 xtarget = offset_address (target,
4780 expand_expr (position, 0, VOIDmode, 0),
4781 highest_pow2_factor (position));
4782 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4783 store_expr (value, xtarget, 0);
03dc44a6
RS
4784 }
4785 else
4786 {
4787 if (index != 0)
19caa751
RK
4788 bitpos = ((tree_low_cst (index, 0) - minelt)
4789 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4790 else
19caa751
RK
4791 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4792
10b76d73 4793 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4794 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4795 && TYPE_NONALIASED_COMPONENT (type))
4796 {
4797 target = copy_rtx (target);
4798 MEM_KEEP_ALIAS_SET_P (target) = 1;
4799 }
4800
c5c76735 4801 store_constructor_field (target, bitsize, bitpos, mode, value,
04050c69 4802 type, cleared, get_alias_set (elttype));
23cb1766 4803
03dc44a6 4804 }
bbf6f052
RK
4805 }
4806 }
19caa751 4807
3a94c984 4808 /* Set constructor assignments. */
071a6595
PB
4809 else if (TREE_CODE (type) == SET_TYPE)
4810 {
e1a43f73 4811 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4812 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4813 tree domain = TYPE_DOMAIN (type);
4814 tree domain_min, domain_max, bitlength;
4815
9faa82d8 4816 /* The default implementation strategy is to extract the constant
071a6595
PB
4817 parts of the constructor, use that to initialize the target,
4818 and then "or" in whatever non-constant ranges we need in addition.
4819
4820 If a large set is all zero or all ones, it is
4821 probably better to set it using memset (if available) or bzero.
4822 Also, if a large set has just a single range, it may also be
4823 better to first clear all the first clear the set (using
0f41302f 4824 bzero/memset), and set the bits we want. */
3a94c984 4825
0f41302f 4826 /* Check for all zeros. */
9376fcd6 4827 if (elt == NULL_TREE && size > 0)
071a6595 4828 {
e1a43f73 4829 if (!cleared)
8ac61af7 4830 clear_storage (target, GEN_INT (size));
071a6595
PB
4831 return;
4832 }
4833
071a6595
PB
4834 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4835 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4836 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4837 size_diffop (domain_max, domain_min),
4838 ssize_int (1));
071a6595 4839
19caa751 4840 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4841
4842 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4843 are "complicated" (more than one range), initialize (the
3a94c984 4844 constant parts) by copying from a constant. */
e1a43f73
PB
4845 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4846 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4847 {
19caa751 4848 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4849 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4850 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 4851 HOST_WIDE_INT word = 0;
19caa751
RK
4852 unsigned int bit_pos = 0;
4853 unsigned int ibit = 0;
4854 unsigned int offset = 0; /* In bytes from beginning of set. */
4855
e1a43f73 4856 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4857 for (;;)
071a6595 4858 {
b4ee5a72
PB
4859 if (bit_buffer[ibit])
4860 {
b09f3348 4861 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4862 word |= (1 << (set_word_size - 1 - bit_pos));
4863 else
4864 word |= 1 << bit_pos;
4865 }
19caa751 4866
b4ee5a72
PB
4867 bit_pos++; ibit++;
4868 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4869 {
e1a43f73
PB
4870 if (word != 0 || ! cleared)
4871 {
4872 rtx datum = GEN_INT (word);
4873 rtx to_rtx;
19caa751 4874
0f41302f
MS
4875 /* The assumption here is that it is safe to use
4876 XEXP if the set is multi-word, but not if
4877 it's single-word. */
e1a43f73 4878 if (GET_CODE (target) == MEM)
f4ef873c 4879 to_rtx = adjust_address (target, mode, offset);
3a94c984 4880 else if (offset == 0)
e1a43f73
PB
4881 to_rtx = target;
4882 else
4883 abort ();
4884 emit_move_insn (to_rtx, datum);
4885 }
19caa751 4886
b4ee5a72
PB
4887 if (ibit == nbits)
4888 break;
4889 word = 0;
4890 bit_pos = 0;
4891 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4892 }
4893 }
071a6595 4894 }
e1a43f73 4895 else if (!cleared)
19caa751
RK
4896 /* Don't bother clearing storage if the set is all ones. */
4897 if (TREE_CHAIN (elt) != NULL_TREE
4898 || (TREE_PURPOSE (elt) == NULL_TREE
4899 ? nbits != 1
4900 : ( ! host_integerp (TREE_VALUE (elt), 0)
4901 || ! host_integerp (TREE_PURPOSE (elt), 0)
4902 || (tree_low_cst (TREE_VALUE (elt), 0)
4903 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4904 != (HOST_WIDE_INT) nbits))))
8ac61af7 4905 clear_storage (target, expr_size (exp));
3a94c984 4906
e1a43f73 4907 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 4908 {
3a94c984 4909 /* Start of range of element or NULL. */
071a6595 4910 tree startbit = TREE_PURPOSE (elt);
3a94c984 4911 /* End of range of element, or element value. */
071a6595 4912 tree endbit = TREE_VALUE (elt);
381127e8 4913#ifdef TARGET_MEM_FUNCTIONS
071a6595 4914 HOST_WIDE_INT startb, endb;
381127e8 4915#endif
19caa751 4916 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
4917
4918 bitlength_rtx = expand_expr (bitlength,
19caa751 4919 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 4920
3a94c984 4921 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
4922 if (startbit == NULL_TREE)
4923 {
4924 startbit = save_expr (endbit);
4925 endbit = startbit;
4926 }
19caa751 4927
071a6595
PB
4928 startbit = convert (sizetype, startbit);
4929 endbit = convert (sizetype, endbit);
4930 if (! integer_zerop (domain_min))
4931 {
4932 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4933 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4934 }
3a94c984 4935 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 4936 EXPAND_CONST_ADDRESS);
3a94c984 4937 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
4938 EXPAND_CONST_ADDRESS);
4939
4940 if (REG_P (target))
4941 {
1da68f56
RK
4942 targetx
4943 = assign_temp
4944 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4945 TYPE_QUAL_CONST)),
4946 0, 1, 1);
071a6595
PB
4947 emit_move_insn (targetx, target);
4948 }
19caa751 4949
071a6595
PB
4950 else if (GET_CODE (target) == MEM)
4951 targetx = target;
4952 else
4953 abort ();
4954
4955#ifdef TARGET_MEM_FUNCTIONS
4956 /* Optimization: If startbit and endbit are
9faa82d8 4957 constants divisible by BITS_PER_UNIT,
0f41302f 4958 call memset instead. */
071a6595
PB
4959 if (TREE_CODE (startbit) == INTEGER_CST
4960 && TREE_CODE (endbit) == INTEGER_CST
4961 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4962 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4963 {
ebb1b59a 4964 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 4965 VOIDmode, 3,
e1a43f73
PB
4966 plus_constant (XEXP (targetx, 0),
4967 startb / BITS_PER_UNIT),
071a6595 4968 Pmode,
3b6f75e2 4969 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4970 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4971 TYPE_MODE (sizetype));
071a6595
PB
4972 }
4973 else
4974#endif
19caa751 4975 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
4976 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4977 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
4978 startbit_rtx, TYPE_MODE (sizetype),
4979 endbit_rtx, TYPE_MODE (sizetype));
4980
071a6595
PB
4981 if (REG_P (target))
4982 emit_move_insn (target, targetx);
4983 }
4984 }
bbf6f052
RK
4985
4986 else
4987 abort ();
4988}
4989
4990/* Store the value of EXP (an expression tree)
4991 into a subfield of TARGET which has mode MODE and occupies
4992 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4993 If MODE is VOIDmode, it means that we are storing into a bit-field.
4994
4995 If VALUE_MODE is VOIDmode, return nothing in particular.
4996 UNSIGNEDP is not used in this case.
4997
4998 Otherwise, return an rtx for the value stored. This rtx
4999 has mode VALUE_MODE if that is convenient to do.
5000 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5001
a06ef755 5002 TYPE is the type of the underlying object,
ece32014
MM
5003
5004 ALIAS_SET is the alias set for the destination. This value will
5005 (in general) be different from that for TARGET, since TARGET is a
5006 reference to the containing structure. */
bbf6f052
RK
5007
5008static rtx
a06ef755
RK
5009store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5010 alias_set)
bbf6f052 5011 rtx target;
770ae6cc
RK
5012 HOST_WIDE_INT bitsize;
5013 HOST_WIDE_INT bitpos;
bbf6f052
RK
5014 enum machine_mode mode;
5015 tree exp;
5016 enum machine_mode value_mode;
5017 int unsignedp;
a06ef755 5018 tree type;
ece32014 5019 int alias_set;
bbf6f052 5020{
906c4e36 5021 HOST_WIDE_INT width_mask = 0;
bbf6f052 5022
e9a25f70
JL
5023 if (TREE_CODE (exp) == ERROR_MARK)
5024 return const0_rtx;
5025
2be6a7e9
RK
5026 /* If we have nothing to store, do nothing unless the expression has
5027 side-effects. */
5028 if (bitsize == 0)
5029 return expand_expr (exp, const0_rtx, VOIDmode, 0);
a06ef755 5030 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5031 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5032
5033 /* If we are storing into an unaligned field of an aligned union that is
5034 in a register, we may have the mode of TARGET being an integer mode but
5035 MODE == BLKmode. In that case, get an aligned object whose size and
5036 alignment are the same as TARGET and store TARGET into it (we can avoid
5037 the store if the field being stored is the entire width of TARGET). Then
5038 call ourselves recursively to store the field into a BLKmode version of
5039 that object. Finally, load from the object into TARGET. This is not
5040 very efficient in general, but should only be slightly more expensive
5041 than the otherwise-required unaligned accesses. Perhaps this can be
5042 cleaned up later. */
5043
5044 if (mode == BLKmode
5045 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5046 {
1da68f56
RK
5047 rtx object
5048 = assign_temp
a06ef755 5049 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
1da68f56 5050 0, 1, 1);
c4e59f51 5051 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5052
8752c357 5053 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5054 emit_move_insn (object, target);
5055
a06ef755
RK
5056 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5057 alias_set);
bbf6f052
RK
5058
5059 emit_move_insn (target, object);
5060
a06ef755 5061 /* We want to return the BLKmode version of the data. */
46093b97 5062 return blk_object;
bbf6f052 5063 }
c3b247b4
JM
5064
5065 if (GET_CODE (target) == CONCAT)
5066 {
5067 /* We're storing into a struct containing a single __complex. */
5068
5069 if (bitpos != 0)
5070 abort ();
5071 return store_expr (exp, target, 0);
5072 }
bbf6f052
RK
5073
5074 /* If the structure is in a register or if the component
5075 is a bit field, we cannot use addressing to access it.
5076 Use bit-field techniques or SUBREG to store in it. */
5077
4fa52007 5078 if (mode == VOIDmode
6ab06cbb
JW
5079 || (mode != BLKmode && ! direct_store[(int) mode]
5080 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5081 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5082 || GET_CODE (target) == REG
c980ac49 5083 || GET_CODE (target) == SUBREG
ccc98036
RS
5084 /* If the field isn't aligned enough to store as an ordinary memref,
5085 store it as a bit field. */
04050c69
RK
5086 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5087 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
14a774a9 5088 || bitpos % GET_MODE_ALIGNMENT (mode)))
14a774a9
RK
5089 /* If the RHS and field are a constant size and the size of the
5090 RHS isn't the same size as the bitfield, we must use bitfield
5091 operations. */
05bccae2
RK
5092 || (bitsize >= 0
5093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5094 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5095 {
906c4e36 5096 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5097
ef19912d
RK
5098 /* If BITSIZE is narrower than the size of the type of EXP
5099 we will be narrowing TEMP. Normally, what's wanted are the
5100 low-order bits. However, if EXP's type is a record and this is
5101 big-endian machine, we want the upper BITSIZE bits. */
5102 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5103 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5104 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5105 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5106 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5107 - bitsize),
5108 temp, 1);
5109
bbd6cf73
RK
5110 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5111 MODE. */
5112 if (mode != VOIDmode && mode != BLKmode
5113 && mode != TYPE_MODE (TREE_TYPE (exp)))
5114 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5115
a281e72d
RK
5116 /* If the modes of TARGET and TEMP are both BLKmode, both
5117 must be in memory and BITPOS must be aligned on a byte
5118 boundary. If so, we simply do a block copy. */
5119 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5120 {
5121 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5122 || bitpos % BITS_PER_UNIT != 0)
5123 abort ();
5124
f4ef873c 5125 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5126 emit_block_move (target, temp,
a06ef755
RK
5127 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5128 / BITS_PER_UNIT));
a281e72d
RK
5129
5130 return value_mode == VOIDmode ? const0_rtx : target;
5131 }
5132
bbf6f052 5133 /* Store the value in the bitfield. */
a06ef755
RK
5134 store_bit_field (target, bitsize, bitpos, mode, temp,
5135 int_size_in_bytes (type));
5136
bbf6f052
RK
5137 if (value_mode != VOIDmode)
5138 {
04050c69
RK
5139 /* The caller wants an rtx for the value.
5140 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5141 if (width_mask != 0
5142 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5143 {
9074de27 5144 tree count;
5c4d7cfb 5145 enum machine_mode tmode;
86a2c12a 5146
5c4d7cfb 5147 tmode = GET_MODE (temp);
86a2c12a
RS
5148 if (tmode == VOIDmode)
5149 tmode = value_mode;
22273300
JJ
5150
5151 if (unsignedp)
5152 return expand_and (tmode, temp,
5153 GEN_INT (trunc_int_for_mode (width_mask,
5154 tmode)),
5155 NULL_RTX);
5156
5c4d7cfb
RS
5157 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5158 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5159 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5160 }
04050c69 5161
bbf6f052 5162 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5163 NULL_RTX, value_mode, VOIDmode,
a06ef755 5164 int_size_in_bytes (type));
bbf6f052
RK
5165 }
5166 return const0_rtx;
5167 }
5168 else
5169 {
5170 rtx addr = XEXP (target, 0);
a06ef755 5171 rtx to_rtx = target;
bbf6f052
RK
5172
5173 /* If a value is wanted, it must be the lhs;
5174 so make the address stable for multiple use. */
5175
5176 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5177 && ! CONSTANT_ADDRESS_P (addr)
5178 /* A frame-pointer reference is already stable. */
5179 && ! (GET_CODE (addr) == PLUS
5180 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5181 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5182 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5183 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5184
5185 /* Now build a reference to just the desired component. */
5186
a06ef755
RK
5187 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5188
5189 if (to_rtx == target)
5190 to_rtx = copy_rtx (to_rtx);
792760b9 5191
c6df88cb 5192 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5193 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5194 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5195
5196 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5197 }
5198}
5199\f
5200/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5201 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5202 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5203
5204 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5205 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5206 If the position of the field is variable, we store a tree
5207 giving the variable offset (in units) in *POFFSET.
5208 This offset is in addition to the bit position.
5209 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5210
5211 If any of the extraction expressions is volatile,
5212 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5213
5214 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5215 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5216 is redundant.
5217
5218 If the field describes a variable-sized object, *PMODE is set to
5219 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5220 this case, but the address of the object can be found. */
bbf6f052
RK
5221
5222tree
4969d05d 5223get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
a06ef755 5224 punsignedp, pvolatilep)
bbf6f052 5225 tree exp;
770ae6cc
RK
5226 HOST_WIDE_INT *pbitsize;
5227 HOST_WIDE_INT *pbitpos;
7bb0943f 5228 tree *poffset;
bbf6f052
RK
5229 enum machine_mode *pmode;
5230 int *punsignedp;
5231 int *pvolatilep;
5232{
5233 tree size_tree = 0;
5234 enum machine_mode mode = VOIDmode;
fed3cef0 5235 tree offset = size_zero_node;
770ae6cc 5236 tree bit_offset = bitsize_zero_node;
738cc472 5237 tree placeholder_ptr = 0;
770ae6cc 5238 tree tem;
bbf6f052 5239
770ae6cc
RK
5240 /* First get the mode, signedness, and size. We do this from just the
5241 outermost expression. */
bbf6f052
RK
5242 if (TREE_CODE (exp) == COMPONENT_REF)
5243 {
5244 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5245 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5246 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5247
bbf6f052
RK
5248 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5249 }
5250 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5251 {
5252 size_tree = TREE_OPERAND (exp, 1);
5253 *punsignedp = TREE_UNSIGNED (exp);
5254 }
5255 else
5256 {
5257 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5258 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5259
ab87f8c8
JL
5260 if (mode == BLKmode)
5261 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5262 else
5263 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5264 }
3a94c984 5265
770ae6cc 5266 if (size_tree != 0)
bbf6f052 5267 {
770ae6cc 5268 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5269 mode = BLKmode, *pbitsize = -1;
5270 else
770ae6cc 5271 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5272 }
5273
5274 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5275 and find the ultimate containing object. */
bbf6f052
RK
5276 while (1)
5277 {
770ae6cc
RK
5278 if (TREE_CODE (exp) == BIT_FIELD_REF)
5279 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5280 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5281 {
770ae6cc
RK
5282 tree field = TREE_OPERAND (exp, 1);
5283 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5284
e7f3c83f
RK
5285 /* If this field hasn't been filled in yet, don't go
5286 past it. This should only happen when folding expressions
5287 made during type construction. */
770ae6cc 5288 if (this_offset == 0)
e7f3c83f 5289 break;
770ae6cc
RK
5290 else if (! TREE_CONSTANT (this_offset)
5291 && contains_placeholder_p (this_offset))
5292 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5293
7156dead 5294 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5295 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5296 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5297
a06ef755 5298 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5299 }
7156dead 5300
b4e3fabb
RK
5301 else if (TREE_CODE (exp) == ARRAY_REF
5302 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5303 {
742920c7 5304 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5305 tree array = TREE_OPERAND (exp, 0);
5306 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5307 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5308 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5309
770ae6cc
RK
5310 /* We assume all arrays have sizes that are a multiple of a byte.
5311 First subtract the lower bound, if any, in the type of the
5312 index, then convert to sizetype and multiply by the size of the
5313 array element. */
5314 if (low_bound != 0 && ! integer_zerop (low_bound))
5315 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5316 index, low_bound));
f8dac6eb 5317
7156dead
RK
5318 /* If the index has a self-referential type, pass it to a
5319 WITH_RECORD_EXPR; if the component size is, pass our
5320 component to one. */
770ae6cc
RK
5321 if (! TREE_CONSTANT (index)
5322 && contains_placeholder_p (index))
5323 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5324 if (! TREE_CONSTANT (unit_size)
5325 && contains_placeholder_p (unit_size))
b4e3fabb 5326 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5327
770ae6cc
RK
5328 offset = size_binop (PLUS_EXPR, offset,
5329 size_binop (MULT_EXPR,
5330 convert (sizetype, index),
7156dead 5331 unit_size));
bbf6f052 5332 }
7156dead 5333
738cc472
RK
5334 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5335 {
70072ed9
RK
5336 tree new = find_placeholder (exp, &placeholder_ptr);
5337
5338 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5339 We might have been called from tree optimization where we
5340 haven't set up an object yet. */
5341 if (new == 0)
5342 break;
5343 else
5344 exp = new;
5345
738cc472
RK
5346 continue;
5347 }
bbf6f052 5348 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
ed239f5a 5349 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
bbf6f052
RK
5350 && ! ((TREE_CODE (exp) == NOP_EXPR
5351 || TREE_CODE (exp) == CONVERT_EXPR)
5352 && (TYPE_MODE (TREE_TYPE (exp))
5353 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5354 break;
7bb0943f
RS
5355
5356 /* If any reference in the chain is volatile, the effect is volatile. */
5357 if (TREE_THIS_VOLATILE (exp))
5358 *pvolatilep = 1;
839c4796 5359
bbf6f052
RK
5360 exp = TREE_OPERAND (exp, 0);
5361 }
5362
770ae6cc
RK
5363 /* If OFFSET is constant, see if we can return the whole thing as a
5364 constant bit position. Otherwise, split it up. */
5365 if (host_integerp (offset, 0)
5366 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5367 bitsize_unit_node))
5368 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5369 && host_integerp (tem, 0))
5370 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5371 else
5372 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5373
bbf6f052 5374 *pmode = mode;
bbf6f052
RK
5375 return exp;
5376}
921b3427 5377
ed239f5a
RK
5378/* Return 1 if T is an expression that get_inner_reference handles. */
5379
5380int
5381handled_component_p (t)
5382 tree t;
5383{
5384 switch (TREE_CODE (t))
5385 {
5386 case BIT_FIELD_REF:
5387 case COMPONENT_REF:
5388 case ARRAY_REF:
5389 case ARRAY_RANGE_REF:
5390 case NON_LVALUE_EXPR:
5391 case VIEW_CONVERT_EXPR:
5392 return 1;
5393
5394 case NOP_EXPR:
5395 case CONVERT_EXPR:
5396 return (TYPE_MODE (TREE_TYPE (t))
5397 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5398
5399 default:
5400 return 0;
5401 }
5402}
bbf6f052 5403\f
3fe44edd
RK
5404/* Given an rtx VALUE that may contain additions and multiplications, return
5405 an equivalent value that just refers to a register, memory, or constant.
5406 This is done by generating instructions to perform the arithmetic and
5407 returning a pseudo-register containing the value.
c45a13a6
RK
5408
5409 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5410
5411rtx
5412force_operand (value, target)
5413 rtx value, target;
5414{
b3694847 5415 optab binoptab = 0;
bbf6f052
RK
5416 /* Use a temporary to force order of execution of calls to
5417 `force_operand'. */
5418 rtx tmp;
b3694847 5419 rtx op2;
bbf6f052 5420 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5421 rtx subtarget = get_subtarget (target);
bbf6f052 5422
8b015896 5423 /* Check for a PIC address load. */
12beba6f 5424 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
8b015896
RH
5425 && XEXP (value, 0) == pic_offset_table_rtx
5426 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5427 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5428 || GET_CODE (XEXP (value, 1)) == CONST))
5429 {
5430 if (!subtarget)
5431 subtarget = gen_reg_rtx (GET_MODE (value));
5432 emit_move_insn (subtarget, value);
5433 return subtarget;
5434 }
5435
bbf6f052
RK
5436 if (GET_CODE (value) == PLUS)
5437 binoptab = add_optab;
5438 else if (GET_CODE (value) == MINUS)
5439 binoptab = sub_optab;
5440 else if (GET_CODE (value) == MULT)
5441 {
5442 op2 = XEXP (value, 1);
5443 if (!CONSTANT_P (op2)
5444 && !(GET_CODE (op2) == REG && op2 != subtarget))
5445 subtarget = 0;
5446 tmp = force_operand (XEXP (value, 0), subtarget);
5447 return expand_mult (GET_MODE (value), tmp,
906c4e36 5448 force_operand (op2, NULL_RTX),
91ce572a 5449 target, 1);
bbf6f052
RK
5450 }
5451
5452 if (binoptab)
5453 {
5454 op2 = XEXP (value, 1);
5455 if (!CONSTANT_P (op2)
5456 && !(GET_CODE (op2) == REG && op2 != subtarget))
5457 subtarget = 0;
5458 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5459 {
5460 binoptab = add_optab;
5461 op2 = negate_rtx (GET_MODE (value), op2);
5462 }
5463
5464 /* Check for an addition with OP2 a constant integer and our first
5465 operand a PLUS of a virtual register and something else. In that
5466 case, we want to emit the sum of the virtual register and the
5467 constant first and then add the other value. This allows virtual
5468 register instantiation to simply modify the constant rather than
5469 creating another one around this addition. */
5470 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5471 && GET_CODE (XEXP (value, 0)) == PLUS
5472 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5473 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5474 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5475 {
5476 rtx temp = expand_binop (GET_MODE (value), binoptab,
5477 XEXP (XEXP (value, 0), 0), op2,
5478 subtarget, 0, OPTAB_LIB_WIDEN);
5479 return expand_binop (GET_MODE (value), binoptab, temp,
5480 force_operand (XEXP (XEXP (value, 0), 1), 0),
5481 target, 0, OPTAB_LIB_WIDEN);
5482 }
3a94c984 5483
bbf6f052
RK
5484 tmp = force_operand (XEXP (value, 0), subtarget);
5485 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5486 force_operand (op2, NULL_RTX),
bbf6f052 5487 target, 0, OPTAB_LIB_WIDEN);
8008b228 5488 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5489 because the only operations we are expanding here are signed ones. */
5490 }
34e81b5a
RK
5491
5492#ifdef INSN_SCHEDULING
5493 /* On machines that have insn scheduling, we want all memory reference to be
5494 explicit, so we need to deal with such paradoxical SUBREGs. */
5495 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5496 && (GET_MODE_SIZE (GET_MODE (value))
5497 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5498 value
5499 = simplify_gen_subreg (GET_MODE (value),
5500 force_reg (GET_MODE (SUBREG_REG (value)),
5501 force_operand (SUBREG_REG (value),
5502 NULL_RTX)),
5503 GET_MODE (SUBREG_REG (value)),
5504 SUBREG_BYTE (value));
5505#endif
5506
bbf6f052
RK
5507 return value;
5508}
5509\f
bbf6f052 5510/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5511 EXP can reference X, which is being modified. TOP_P is nonzero if this
5512 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5513 for EXP, as opposed to a recursive call to this function.
5514
5515 It is always safe for this routine to return zero since it merely
5516 searches for optimization opportunities. */
bbf6f052 5517
8f17b5c5 5518int
e5e809f4 5519safe_from_p (x, exp, top_p)
bbf6f052
RK
5520 rtx x;
5521 tree exp;
e5e809f4 5522 int top_p;
bbf6f052
RK
5523{
5524 rtx exp_rtl = 0;
5525 int i, nops;
1da68f56 5526 static tree save_expr_list;
bbf6f052 5527
6676e72f
RK
5528 if (x == 0
5529 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5530 have no way of allocating temporaries of variable size
5531 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5532 So we assume here that something at a higher level has prevented a
f4510f37 5533 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5534 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5535 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5536 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5537 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5538 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5539 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5540 != INTEGER_CST)
1da68f56
RK
5541 && GET_MODE (x) == BLKmode)
5542 /* If X is in the outgoing argument area, it is always safe. */
5543 || (GET_CODE (x) == MEM
5544 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5545 || (GET_CODE (XEXP (x, 0)) == PLUS
5546 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5547 return 1;
5548
5549 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5550 find the underlying pseudo. */
5551 if (GET_CODE (x) == SUBREG)
5552 {
5553 x = SUBREG_REG (x);
5554 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5555 return 0;
5556 }
5557
1da68f56
RK
5558 /* A SAVE_EXPR might appear many times in the expression passed to the
5559 top-level safe_from_p call, and if it has a complex subexpression,
5560 examining it multiple times could result in a combinatorial explosion.
5561 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5562 with optimization took about 28 minutes to compile -- even though it was
5563 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5564 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5565 we have processed. Note that the only test of top_p was above. */
5566
5567 if (top_p)
5568 {
5569 int rtn;
5570 tree t;
5571
5572 save_expr_list = 0;
5573
5574 rtn = safe_from_p (x, exp, 0);
5575
5576 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5577 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5578
5579 return rtn;
5580 }
bbf6f052 5581
1da68f56 5582 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5583 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5584 {
5585 case 'd':
19e7881c 5586 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
bbf6f052
RK
5587 break;
5588
5589 case 'c':
5590 return 1;
5591
5592 case 'x':
5593 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5594 return ((TREE_VALUE (exp) == 0
e5e809f4 5595 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5596 && (TREE_CHAIN (exp) == 0
e5e809f4 5597 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5598 else if (TREE_CODE (exp) == ERROR_MARK)
5599 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5600 else
5601 return 0;
5602
5603 case '1':
e5e809f4 5604 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5605
5606 case '2':
5607 case '<':
e5e809f4
JL
5608 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5609 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5610
5611 case 'e':
5612 case 'r':
5613 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5614 the expression. If it is set, we conflict iff we are that rtx or
5615 both are in memory. Otherwise, we check all operands of the
5616 expression recursively. */
5617
5618 switch (TREE_CODE (exp))
5619 {
5620 case ADDR_EXPR:
70072ed9
RK
5621 /* If the operand is static or we are static, we can't conflict.
5622 Likewise if we don't conflict with the operand at all. */
5623 if (staticp (TREE_OPERAND (exp, 0))
5624 || TREE_STATIC (exp)
5625 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5626 return 1;
5627
5628 /* Otherwise, the only way this can conflict is if we are taking
5629 the address of a DECL a that address if part of X, which is
5630 very rare. */
5631 exp = TREE_OPERAND (exp, 0);
5632 if (DECL_P (exp))
5633 {
5634 if (!DECL_RTL_SET_P (exp)
5635 || GET_CODE (DECL_RTL (exp)) != MEM)
5636 return 0;
5637 else
5638 exp_rtl = XEXP (DECL_RTL (exp), 0);
5639 }
5640 break;
bbf6f052
RK
5641
5642 case INDIRECT_REF:
1da68f56
RK
5643 if (GET_CODE (x) == MEM
5644 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5645 get_alias_set (exp)))
bbf6f052
RK
5646 return 0;
5647 break;
5648
5649 case CALL_EXPR:
f9808f81
MM
5650 /* Assume that the call will clobber all hard registers and
5651 all of memory. */
5652 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5653 || GET_CODE (x) == MEM)
5654 return 0;
bbf6f052
RK
5655 break;
5656
5657 case RTL_EXPR:
3bb5826a
RK
5658 /* If a sequence exists, we would have to scan every instruction
5659 in the sequence to see if it was safe. This is probably not
5660 worthwhile. */
5661 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5662 return 0;
5663
3bb5826a 5664 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5665 break;
5666
5667 case WITH_CLEANUP_EXPR:
6ad7895a 5668 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
5669 break;
5670
5dab5552 5671 case CLEANUP_POINT_EXPR:
e5e809f4 5672 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5673
bbf6f052
RK
5674 case SAVE_EXPR:
5675 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5676 if (exp_rtl)
5677 break;
5678
1da68f56
RK
5679 /* If we've already scanned this, don't do it again. Otherwise,
5680 show we've scanned it and record for clearing the flag if we're
5681 going on. */
5682 if (TREE_PRIVATE (exp))
5683 return 1;
ff439b5f 5684
1da68f56
RK
5685 TREE_PRIVATE (exp) = 1;
5686 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5687 {
1da68f56
RK
5688 TREE_PRIVATE (exp) = 0;
5689 return 0;
ff59bfe6 5690 }
1da68f56
RK
5691
5692 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5693 return 1;
bbf6f052 5694
8129842c
RS
5695 case BIND_EXPR:
5696 /* The only operand we look at is operand 1. The rest aren't
5697 part of the expression. */
e5e809f4 5698 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5699
bbf6f052 5700 case METHOD_CALL_EXPR:
4fe9b91c 5701 /* This takes an rtx argument, but shouldn't appear here. */
bbf6f052 5702 abort ();
3a94c984 5703
e9a25f70
JL
5704 default:
5705 break;
bbf6f052
RK
5706 }
5707
5708 /* If we have an rtx, we do not need to scan our operands. */
5709 if (exp_rtl)
5710 break;
5711
8f17b5c5 5712 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5713 for (i = 0; i < nops; i++)
5714 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5715 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5716 return 0;
8f17b5c5
MM
5717
5718 /* If this is a language-specific tree code, it may require
5719 special handling. */
dbbbbf3b
JDA
5720 if ((unsigned int) TREE_CODE (exp)
5721 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 5722 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 5723 return 0;
bbf6f052
RK
5724 }
5725
5726 /* If we have an rtl, find any enclosed object. Then see if we conflict
5727 with it. */
5728 if (exp_rtl)
5729 {
5730 if (GET_CODE (exp_rtl) == SUBREG)
5731 {
5732 exp_rtl = SUBREG_REG (exp_rtl);
5733 if (GET_CODE (exp_rtl) == REG
5734 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5735 return 0;
5736 }
5737
5738 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5739 are memory and they conflict. */
bbf6f052
RK
5740 return ! (rtx_equal_p (x, exp_rtl)
5741 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 5742 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 5743 rtx_addr_varies_p)));
bbf6f052
RK
5744 }
5745
5746 /* If we reach here, it is safe. */
5747 return 1;
5748}
5749
01c8a7c8
RK
5750/* Subroutine of expand_expr: return rtx if EXP is a
5751 variable or parameter; else return 0. */
5752
5753static rtx
5754var_rtx (exp)
5755 tree exp;
5756{
5757 STRIP_NOPS (exp);
5758 switch (TREE_CODE (exp))
5759 {
5760 case PARM_DECL:
5761 case VAR_DECL:
5762 return DECL_RTL (exp);
5763 default:
5764 return 0;
5765 }
5766}
dbecbbe4
JL
5767
5768#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 5769
dbecbbe4
JL
5770void
5771check_max_integer_computation_mode (exp)
3a94c984 5772 tree exp;
dbecbbe4 5773{
5f652c07 5774 enum tree_code code;
dbecbbe4
JL
5775 enum machine_mode mode;
5776
5f652c07
JM
5777 /* Strip any NOPs that don't change the mode. */
5778 STRIP_NOPS (exp);
5779 code = TREE_CODE (exp);
5780
71bca506
JL
5781 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5782 if (code == NOP_EXPR
5783 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5784 return;
5785
dbecbbe4
JL
5786 /* First check the type of the overall operation. We need only look at
5787 unary, binary and relational operations. */
5788 if (TREE_CODE_CLASS (code) == '1'
5789 || TREE_CODE_CLASS (code) == '2'
5790 || TREE_CODE_CLASS (code) == '<')
5791 {
5792 mode = TYPE_MODE (TREE_TYPE (exp));
5793 if (GET_MODE_CLASS (mode) == MODE_INT
5794 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5795 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5796 }
5797
5798 /* Check operand of a unary op. */
5799 if (TREE_CODE_CLASS (code) == '1')
5800 {
5801 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5802 if (GET_MODE_CLASS (mode) == MODE_INT
5803 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5804 internal_error ("unsupported wide integer operation");
dbecbbe4 5805 }
3a94c984 5806
dbecbbe4
JL
5807 /* Check operands of a binary/comparison op. */
5808 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5809 {
5810 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5811 if (GET_MODE_CLASS (mode) == MODE_INT
5812 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5813 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5814
5815 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5816 if (GET_MODE_CLASS (mode) == MODE_INT
5817 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5818 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5819 }
5820}
5821#endif
14a774a9 5822\f
0d4903b8
RK
5823/* Return the highest power of two that EXP is known to be a multiple of.
5824 This is used in updating alignment of MEMs in array references. */
5825
5826static HOST_WIDE_INT
5827highest_pow2_factor (exp)
5828 tree exp;
5829{
5830 HOST_WIDE_INT c0, c1;
5831
5832 switch (TREE_CODE (exp))
5833 {
5834 case INTEGER_CST:
e0f1be5c
JJ
5835 /* We can find the lowest bit that's a one. If the low
5836 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5837 We need to handle this case since we can find it in a COND_EXPR,
5838 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5839 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 5840 later ICE. */
e0f1be5c 5841 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 5842 return BIGGEST_ALIGNMENT;
e0f1be5c 5843 else
0d4903b8 5844 {
e0f1be5c
JJ
5845 /* Note: tree_low_cst is intentionally not used here,
5846 we don't care about the upper bits. */
5847 c0 = TREE_INT_CST_LOW (exp);
5848 c0 &= -c0;
5849 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
5850 }
5851 break;
5852
65a07688 5853 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
5854 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5855 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5856 return MIN (c0, c1);
5857
5858 case MULT_EXPR:
5859 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5860 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5861 return c0 * c1;
5862
5863 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5864 case CEIL_DIV_EXPR:
65a07688
RK
5865 if (integer_pow2p (TREE_OPERAND (exp, 1))
5866 && host_integerp (TREE_OPERAND (exp, 1), 1))
5867 {
5868 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5869 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5870 return MAX (1, c0 / c1);
5871 }
5872 break;
0d4903b8
RK
5873
5874 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 5875 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
5876 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5877
65a07688
RK
5878 case COMPOUND_EXPR:
5879 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5880
0d4903b8
RK
5881 case COND_EXPR:
5882 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5883 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5884 return MIN (c0, c1);
5885
5886 default:
5887 break;
5888 }
5889
5890 return 1;
5891}
5892\f
f47e9b4e
RK
5893/* Return an object on the placeholder list that matches EXP, a
5894 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 5895 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
5896 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5897 is a location which initially points to a starting location in the
738cc472
RK
5898 placeholder list (zero means start of the list) and where a pointer into
5899 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
5900
5901tree
5902find_placeholder (exp, plist)
5903 tree exp;
5904 tree *plist;
5905{
5906 tree type = TREE_TYPE (exp);
5907 tree placeholder_expr;
5908
738cc472
RK
5909 for (placeholder_expr
5910 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5911 placeholder_expr != 0;
f47e9b4e
RK
5912 placeholder_expr = TREE_CHAIN (placeholder_expr))
5913 {
5914 tree need_type = TYPE_MAIN_VARIANT (type);
5915 tree elt;
5916
5917 /* Find the outermost reference that is of the type we want. If none,
5918 see if any object has a type that is a pointer to the type we
5919 want. */
5920 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5921 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5922 || TREE_CODE (elt) == COND_EXPR)
5923 ? TREE_OPERAND (elt, 1)
5924 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5925 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5926 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5927 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5928 ? TREE_OPERAND (elt, 0) : 0))
5929 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5930 {
5931 if (plist)
5932 *plist = placeholder_expr;
5933 return elt;
5934 }
5935
5936 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5937 elt
5938 = ((TREE_CODE (elt) == COMPOUND_EXPR
5939 || TREE_CODE (elt) == COND_EXPR)
5940 ? TREE_OPERAND (elt, 1)
5941 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5942 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5943 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5944 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5945 ? TREE_OPERAND (elt, 0) : 0))
5946 if (POINTER_TYPE_P (TREE_TYPE (elt))
5947 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5948 == need_type))
5949 {
5950 if (plist)
5951 *plist = placeholder_expr;
5952 return build1 (INDIRECT_REF, need_type, elt);
5953 }
5954 }
5955
70072ed9 5956 return 0;
f47e9b4e
RK
5957}
5958\f
bbf6f052
RK
5959/* expand_expr: generate code for computing expression EXP.
5960 An rtx for the computed value is returned. The value is never null.
5961 In the case of a void EXP, const0_rtx is returned.
5962
5963 The value may be stored in TARGET if TARGET is nonzero.
5964 TARGET is just a suggestion; callers must assume that
5965 the rtx returned may not be the same as TARGET.
5966
5967 If TARGET is CONST0_RTX, it means that the value will be ignored.
5968
5969 If TMODE is not VOIDmode, it suggests generating the
5970 result in mode TMODE. But this is done only when convenient.
5971 Otherwise, TMODE is ignored and the value generated in its natural mode.
5972 TMODE is just a suggestion; callers must assume that
5973 the rtx returned may not have mode TMODE.
5974
d6a5ac33
RK
5975 Note that TARGET may have neither TMODE nor MODE. In that case, it
5976 probably will not be used.
bbf6f052
RK
5977
5978 If MODIFIER is EXPAND_SUM then when EXP is an addition
5979 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5980 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5981 products as above, or REG or MEM, or constant.
5982 Ordinarily in such cases we would output mul or add instructions
5983 and then return a pseudo reg containing the sum.
5984
5985 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5986 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5987 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5988 This is used for outputting expressions used in initializers.
5989
5990 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5991 with a constant address even if that address is not normally legitimate.
5992 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5993
5994rtx
5995expand_expr (exp, target, tmode, modifier)
b3694847 5996 tree exp;
bbf6f052
RK
5997 rtx target;
5998 enum machine_mode tmode;
5999 enum expand_modifier modifier;
6000{
b3694847 6001 rtx op0, op1, temp;
bbf6f052
RK
6002 tree type = TREE_TYPE (exp);
6003 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6004 enum machine_mode mode;
6005 enum tree_code code = TREE_CODE (exp);
bbf6f052 6006 optab this_optab;
68557e14
ML
6007 rtx subtarget, original_target;
6008 int ignore;
bbf6f052
RK
6009 tree context;
6010
3a94c984 6011 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6012 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6013 {
6014 op0 = CONST0_RTX (tmode);
6015 if (op0 != 0)
6016 return op0;
6017 return const0_rtx;
6018 }
6019
6020 mode = TYPE_MODE (type);
6021 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6022 subtarget = get_subtarget (target);
68557e14
ML
6023 original_target = target;
6024 ignore = (target == const0_rtx
6025 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6026 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6027 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6028 && TREE_CODE (type) == VOID_TYPE));
6029
dd27116b
RK
6030 /* If we are going to ignore this result, we need only do something
6031 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6032 is, short-circuit the most common cases here. Note that we must
6033 not call expand_expr with anything but const0_rtx in case this
6034 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6035
dd27116b
RK
6036 if (ignore)
6037 {
6038 if (! TREE_SIDE_EFFECTS (exp))
6039 return const0_rtx;
6040
14a774a9
RK
6041 /* Ensure we reference a volatile object even if value is ignored, but
6042 don't do this if all we are doing is taking its address. */
dd27116b
RK
6043 if (TREE_THIS_VOLATILE (exp)
6044 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6045 && mode != VOIDmode && mode != BLKmode
6046 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6047 {
37a08a29 6048 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6049 if (GET_CODE (temp) == MEM)
6050 temp = copy_to_reg (temp);
6051 return const0_rtx;
6052 }
6053
14a774a9
RK
6054 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6055 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6056 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6057 modifier);
6058
14a774a9 6059 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6060 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6061 {
37a08a29
RK
6062 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6063 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6064 return const0_rtx;
6065 }
6066 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6067 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6068 /* If the second operand has no side effects, just evaluate
0f41302f 6069 the first. */
37a08a29
RK
6070 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6071 modifier);
14a774a9
RK
6072 else if (code == BIT_FIELD_REF)
6073 {
37a08a29
RK
6074 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6075 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6076 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6077 return const0_rtx;
6078 }
37a08a29 6079
90764a87 6080 target = 0;
dd27116b 6081 }
bbf6f052 6082
dbecbbe4 6083#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
6084 /* Only check stuff here if the mode we want is different from the mode
6085 of the expression; if it's the same, check_max_integer_computiation_mode
6086 will handle it. Do we really need to check this stuff at all? */
6087
ce3c0b53 6088 if (target
5f652c07 6089 && GET_MODE (target) != mode
ce3c0b53
JL
6090 && TREE_CODE (exp) != INTEGER_CST
6091 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6092 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6093 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6094 && TREE_CODE (exp) != COMPONENT_REF
6095 && TREE_CODE (exp) != BIT_FIELD_REF
6096 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6097 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6098 && TREE_CODE (exp) != VAR_DECL
6099 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6100 {
6101 enum machine_mode mode = GET_MODE (target);
6102
6103 if (GET_MODE_CLASS (mode) == MODE_INT
6104 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6105 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6106 }
6107
5f652c07
JM
6108 if (tmode != mode
6109 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6110 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6111 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6112 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6113 && TREE_CODE (exp) != COMPONENT_REF
6114 && TREE_CODE (exp) != BIT_FIELD_REF
6115 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6116 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6117 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6118 && TREE_CODE (exp) != RTL_EXPR
71bca506 6119 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6120 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6121 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6122
6123 check_max_integer_computation_mode (exp);
6124#endif
6125
e44842fe
RK
6126 /* If will do cse, generate all results into pseudo registers
6127 since 1) that allows cse to find more things
6128 and 2) otherwise cse could produce an insn the machine
c24ae149
RK
6129 cannot support. And exception is a CONSTRUCTOR into a multi-word
6130 MEM: that's much more likely to be most efficient into the MEM. */
e44842fe 6131
bbf6f052 6132 if (! cse_not_expected && mode != BLKmode && target
c24ae149
RK
6133 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6134 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
bbf6f052
RK
6135 target = subtarget;
6136
bbf6f052
RK
6137 switch (code)
6138 {
6139 case LABEL_DECL:
b552441b
RS
6140 {
6141 tree function = decl_function_context (exp);
6142 /* Handle using a label in a containing function. */
d0977240
RK
6143 if (function != current_function_decl
6144 && function != inline_function_decl && function != 0)
b552441b
RS
6145 {
6146 struct function *p = find_function_data (function);
49ad7cfa
BS
6147 p->expr->x_forced_labels
6148 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6149 p->expr->x_forced_labels);
b552441b 6150 }
ab87f8c8
JL
6151 else
6152 {
ab87f8c8
JL
6153 if (modifier == EXPAND_INITIALIZER)
6154 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6155 label_rtx (exp),
6156 forced_labels);
6157 }
c5c76735 6158
38a448ca
RH
6159 temp = gen_rtx_MEM (FUNCTION_MODE,
6160 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6161 if (function != current_function_decl
6162 && function != inline_function_decl && function != 0)
26fcb35a
RS
6163 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6164 return temp;
b552441b 6165 }
bbf6f052
RK
6166
6167 case PARM_DECL:
6168 if (DECL_RTL (exp) == 0)
6169 {
6170 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6171 return CONST0_RTX (mode);
bbf6f052
RK
6172 }
6173
0f41302f 6174 /* ... fall through ... */
d6a5ac33 6175
bbf6f052 6176 case VAR_DECL:
2dca20cd
RS
6177 /* If a static var's type was incomplete when the decl was written,
6178 but the type is complete now, lay out the decl now. */
d0f062fb 6179 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6180 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6181 {
ed239f5a
RK
6182 rtx value = DECL_RTL_IF_SET (exp);
6183
2dca20cd 6184 layout_decl (exp, 0);
ed239f5a
RK
6185
6186 /* If the RTL was already set, update its mode and memory
6187 attributes. */
6188 if (value != 0)
6189 {
6190 PUT_MODE (value, DECL_MODE (exp));
6191 SET_DECL_RTL (exp, 0);
6192 set_mem_attributes (value, exp, 1);
6193 SET_DECL_RTL (exp, value);
6194 }
505ddab6 6195 }
921b3427 6196
0f41302f 6197 /* ... fall through ... */
d6a5ac33 6198
2dca20cd 6199 case FUNCTION_DECL:
bbf6f052
RK
6200 case RESULT_DECL:
6201 if (DECL_RTL (exp) == 0)
6202 abort ();
d6a5ac33 6203
e44842fe
RK
6204 /* Ensure variable marked as used even if it doesn't go through
6205 a parser. If it hasn't be used yet, write out an external
6206 definition. */
6207 if (! TREE_USED (exp))
6208 {
6209 assemble_external (exp);
6210 TREE_USED (exp) = 1;
6211 }
6212
dc6d66b3
RK
6213 /* Show we haven't gotten RTL for this yet. */
6214 temp = 0;
6215
bbf6f052
RK
6216 /* Handle variables inherited from containing functions. */
6217 context = decl_function_context (exp);
6218
6219 /* We treat inline_function_decl as an alias for the current function
6220 because that is the inline function whose vars, types, etc.
6221 are being merged into the current function.
6222 See expand_inline_function. */
d6a5ac33 6223
bbf6f052
RK
6224 if (context != 0 && context != current_function_decl
6225 && context != inline_function_decl
6226 /* If var is static, we don't need a static chain to access it. */
6227 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6228 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6229 {
6230 rtx addr;
6231
6232 /* Mark as non-local and addressable. */
81feeecb 6233 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6234 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6235 abort ();
bbf6f052
RK
6236 mark_addressable (exp);
6237 if (GET_CODE (DECL_RTL (exp)) != MEM)
6238 abort ();
6239 addr = XEXP (DECL_RTL (exp), 0);
6240 if (GET_CODE (addr) == MEM)
792760b9
RK
6241 addr
6242 = replace_equiv_address (addr,
6243 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6244 else
6245 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6246
792760b9 6247 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6248 }
4af3895e 6249
bbf6f052
RK
6250 /* This is the case of an array whose size is to be determined
6251 from its initializer, while the initializer is still being parsed.
6252 See expand_decl. */
d6a5ac33 6253
dc6d66b3
RK
6254 else if (GET_CODE (DECL_RTL (exp)) == MEM
6255 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6256 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6257
6258 /* If DECL_RTL is memory, we are in the normal case and either
6259 the address is not valid or it is not a register and -fforce-addr
6260 is specified, get the address into a register. */
6261
dc6d66b3
RK
6262 else if (GET_CODE (DECL_RTL (exp)) == MEM
6263 && modifier != EXPAND_CONST_ADDRESS
6264 && modifier != EXPAND_SUM
6265 && modifier != EXPAND_INITIALIZER
6266 && (! memory_address_p (DECL_MODE (exp),
6267 XEXP (DECL_RTL (exp), 0))
6268 || (flag_force_addr
6269 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6270 temp = replace_equiv_address (DECL_RTL (exp),
6271 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6272
dc6d66b3 6273 /* If we got something, return it. But first, set the alignment
04956a1a 6274 if the address is a register. */
dc6d66b3
RK
6275 if (temp != 0)
6276 {
6277 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6278 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6279
6280 return temp;
6281 }
6282
1499e0a8
RK
6283 /* If the mode of DECL_RTL does not match that of the decl, it
6284 must be a promoted value. We return a SUBREG of the wanted mode,
6285 but mark it so that we know that it was already extended. */
6286
6287 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6288 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6289 {
1499e0a8
RK
6290 /* Get the signedness used for this variable. Ensure we get the
6291 same mode we got when the variable was declared. */
78911e8b
RK
6292 if (GET_MODE (DECL_RTL (exp))
6293 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
6294 abort ();
6295
ddef6bc7 6296 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6297 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6298 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6299 return temp;
6300 }
6301
bbf6f052
RK
6302 return DECL_RTL (exp);
6303
6304 case INTEGER_CST:
6305 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6306 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6307
6308 case CONST_DECL:
37a08a29 6309 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
bbf6f052
RK
6310
6311 case REAL_CST:
6312 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6313 which will be turned into memory by reload if necessary.
6314
bbf6f052
RK
6315 We used to force a register so that loop.c could see it. But
6316 this does not allow gen_* patterns to perform optimizations with
6317 the constants. It also produces two insns in cases like "x = 1.0;".
6318 On most machines, floating-point constants are not permitted in
6319 many insns, so we'd end up copying it to a register in any case.
6320
6321 Now, we do the copying in expand_binop, if appropriate. */
6322 return immed_real_const (exp);
6323
6324 case COMPLEX_CST:
6325 case STRING_CST:
6326 if (! TREE_CST_RTL (exp))
bd7cf17e 6327 output_constant_def (exp, 1);
bbf6f052
RK
6328
6329 /* TREE_CST_RTL probably contains a constant address.
6330 On RISC machines where a constant address isn't valid,
6331 make some insns to get that address into a register. */
6332 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6333 && modifier != EXPAND_CONST_ADDRESS
6334 && modifier != EXPAND_INITIALIZER
6335 && modifier != EXPAND_SUM
d6a5ac33
RK
6336 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6337 || (flag_force_addr
6338 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
792760b9
RK
6339 return replace_equiv_address (TREE_CST_RTL (exp),
6340 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
bbf6f052
RK
6341 return TREE_CST_RTL (exp);
6342
bf1e5319 6343 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6344 {
6345 rtx to_return;
3b304f5b 6346 const char *saved_input_filename = input_filename;
b24f65cd
APB
6347 int saved_lineno = lineno;
6348 input_filename = EXPR_WFL_FILENAME (exp);
6349 lineno = EXPR_WFL_LINENO (exp);
6350 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6351 emit_line_note (input_filename, lineno);
6ad7895a 6352 /* Possibly avoid switching back and forth here. */
b0ca54af 6353 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
b24f65cd
APB
6354 input_filename = saved_input_filename;
6355 lineno = saved_lineno;
6356 return to_return;
6357 }
bf1e5319 6358
bbf6f052
RK
6359 case SAVE_EXPR:
6360 context = decl_function_context (exp);
d6a5ac33 6361
d0977240
RK
6362 /* If this SAVE_EXPR was at global context, assume we are an
6363 initialization function and move it into our context. */
6364 if (context == 0)
6365 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6366
bbf6f052
RK
6367 /* We treat inline_function_decl as an alias for the current function
6368 because that is the inline function whose vars, types, etc.
6369 are being merged into the current function.
6370 See expand_inline_function. */
6371 if (context == current_function_decl || context == inline_function_decl)
6372 context = 0;
6373
6374 /* If this is non-local, handle it. */
6375 if (context)
6376 {
d0977240
RK
6377 /* The following call just exists to abort if the context is
6378 not of a containing function. */
6379 find_function_data (context);
6380
bbf6f052
RK
6381 temp = SAVE_EXPR_RTL (exp);
6382 if (temp && GET_CODE (temp) == REG)
6383 {
6384 put_var_into_stack (exp);
6385 temp = SAVE_EXPR_RTL (exp);
6386 }
6387 if (temp == 0 || GET_CODE (temp) != MEM)
6388 abort ();
792760b9
RK
6389 return
6390 replace_equiv_address (temp,
6391 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6392 }
6393 if (SAVE_EXPR_RTL (exp) == 0)
6394 {
06089a8b
RK
6395 if (mode == VOIDmode)
6396 temp = const0_rtx;
6397 else
1da68f56
RK
6398 temp = assign_temp (build_qualified_type (type,
6399 (TYPE_QUALS (type)
6400 | TYPE_QUAL_CONST)),
6401 3, 0, 0);
1499e0a8 6402
bbf6f052 6403 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6404 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6405 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6406 save_expr_regs);
ff78f773
RK
6407
6408 /* If the mode of TEMP does not match that of the expression, it
6409 must be a promoted value. We pass store_expr a SUBREG of the
6410 wanted mode but mark it so that we know that it was already
6411 extended. Note that `unsignedp' was modified above in
6412 this case. */
6413
6414 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6415 {
ddef6bc7 6416 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
ff78f773 6417 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6418 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6419 }
6420
4c7a0be9 6421 if (temp == const0_rtx)
37a08a29 6422 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9
JW
6423 else
6424 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6425
6426 TREE_USED (exp) = 1;
bbf6f052 6427 }
1499e0a8
RK
6428
6429 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6430 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6431 but mark it so that we know that it was already extended. */
1499e0a8
RK
6432
6433 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6434 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6435 {
e70d22c8
RK
6436 /* Compute the signedness and make the proper SUBREG. */
6437 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6438 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6439 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6440 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6441 return temp;
6442 }
6443
bbf6f052
RK
6444 return SAVE_EXPR_RTL (exp);
6445
679163cf
MS
6446 case UNSAVE_EXPR:
6447 {
6448 rtx temp;
6449 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
6450 TREE_OPERAND (exp, 0)
6451 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
6452 return temp;
6453 }
6454
b50d17a1 6455 case PLACEHOLDER_EXPR:
e9a25f70 6456 {
f47e9b4e 6457 tree old_list = placeholder_list;
738cc472 6458 tree placeholder_expr = 0;
e9a25f70 6459
f47e9b4e 6460 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
6461 if (exp == 0)
6462 abort ();
6463
f47e9b4e 6464 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 6465 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
6466 placeholder_list = old_list;
6467 return temp;
e9a25f70 6468 }
b50d17a1
RK
6469
6470 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6471 abort ();
6472
6473 case WITH_RECORD_EXPR:
6474 /* Put the object on the placeholder list, expand our first operand,
6475 and pop the list. */
6476 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6477 placeholder_list);
37a08a29
RK
6478 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6479 modifier);
b50d17a1
RK
6480 placeholder_list = TREE_CHAIN (placeholder_list);
6481 return target;
6482
70e6ca43
APB
6483 case GOTO_EXPR:
6484 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6485 expand_goto (TREE_OPERAND (exp, 0));
6486 else
6487 expand_computed_goto (TREE_OPERAND (exp, 0));
6488 return const0_rtx;
6489
bbf6f052 6490 case EXIT_EXPR:
df4ae160 6491 expand_exit_loop_if_false (NULL,
e44842fe 6492 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6493 return const0_rtx;
6494
f42e28dd
APB
6495 case LABELED_BLOCK_EXPR:
6496 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6497 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6498 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6499 do_pending_stack_adjust ();
f42e28dd
APB
6500 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6501 return const0_rtx;
6502
6503 case EXIT_BLOCK_EXPR:
6504 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6505 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6506 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6507 return const0_rtx;
6508
bbf6f052 6509 case LOOP_EXPR:
0088fcb1 6510 push_temp_slots ();
bbf6f052 6511 expand_start_loop (1);
b0832fe1 6512 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6513 expand_end_loop ();
0088fcb1 6514 pop_temp_slots ();
bbf6f052
RK
6515
6516 return const0_rtx;
6517
6518 case BIND_EXPR:
6519 {
6520 tree vars = TREE_OPERAND (exp, 0);
6521 int vars_need_expansion = 0;
6522
6523 /* Need to open a binding contour here because
e976b8b2 6524 if there are any cleanups they must be contained here. */
8e91754e 6525 expand_start_bindings (2);
bbf6f052 6526
2df53c0b
RS
6527 /* Mark the corresponding BLOCK for output in its proper place. */
6528 if (TREE_OPERAND (exp, 2) != 0
6529 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6530 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6531
6532 /* If VARS have not yet been expanded, expand them now. */
6533 while (vars)
6534 {
19e7881c 6535 if (!DECL_RTL_SET_P (vars))
bbf6f052
RK
6536 {
6537 vars_need_expansion = 1;
6538 expand_decl (vars);
6539 }
6540 expand_decl_init (vars);
6541 vars = TREE_CHAIN (vars);
6542 }
6543
37a08a29 6544 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
6545
6546 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6547
6548 return temp;
6549 }
6550
6551 case RTL_EXPR:
83b853c9
JM
6552 if (RTL_EXPR_SEQUENCE (exp))
6553 {
6554 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6555 abort ();
6556 emit_insns (RTL_EXPR_SEQUENCE (exp));
6557 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6558 }
64dc53f3
MM
6559 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6560 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6561 return RTL_EXPR_RTL (exp);
6562
6563 case CONSTRUCTOR:
dd27116b
RK
6564 /* If we don't need the result, just ensure we evaluate any
6565 subexpressions. */
6566 if (ignore)
6567 {
6568 tree elt;
37a08a29 6569
dd27116b 6570 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6571 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6572
dd27116b
RK
6573 return const0_rtx;
6574 }
3207b172 6575
4af3895e
JVA
6576 /* All elts simple constants => refer to a constant in memory. But
6577 if this is a non-BLKmode mode, let it store a field at a time
6578 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6579 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6580 store directly into the target unless the type is large enough
6581 that memcpy will be used. If we are making an initializer and
3207b172 6582 all operands are constant, put it in memory as well. */
dd27116b 6583 else if ((TREE_STATIC (exp)
3207b172 6584 && ((mode == BLKmode
e5e809f4 6585 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6586 || TREE_ADDRESSABLE (exp)
19caa751 6587 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6588 && (! MOVE_BY_PIECES_P
19caa751
RK
6589 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6590 TYPE_ALIGN (type)))
9de08200 6591 && ! mostly_zeros_p (exp))))
dd27116b 6592 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 6593 {
bd7cf17e 6594 rtx constructor = output_constant_def (exp, 1);
19caa751 6595
b552441b
RS
6596 if (modifier != EXPAND_CONST_ADDRESS
6597 && modifier != EXPAND_INITIALIZER
792760b9
RK
6598 && modifier != EXPAND_SUM)
6599 constructor = validize_mem (constructor);
6600
bbf6f052
RK
6601 return constructor;
6602 }
bbf6f052
RK
6603 else
6604 {
e9ac02a6
JW
6605 /* Handle calls that pass values in multiple non-contiguous
6606 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6607 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6608 || GET_CODE (target) == PARALLEL)
1da68f56
RK
6609 target
6610 = assign_temp (build_qualified_type (type,
6611 (TYPE_QUALS (type)
6612 | (TREE_READONLY (exp)
6613 * TYPE_QUAL_CONST))),
c24ae149 6614 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6615
04050c69 6616 store_constructor (exp, target, 0,
b7010412 6617 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6618 return target;
6619 }
6620
6621 case INDIRECT_REF:
6622 {
6623 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6624 tree index;
3a94c984
KH
6625 tree string = string_constant (exp1, &index);
6626
06eaa86f 6627 /* Try to optimize reads from const strings. */
7581a30f
JW
6628 if (string
6629 && TREE_CODE (string) == STRING_CST
6630 && TREE_CODE (index) == INTEGER_CST
05bccae2 6631 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6632 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f 6633 && GET_MODE_SIZE (mode) == 1
37a08a29 6634 && modifier != EXPAND_WRITE)
05bccae2
RK
6635 return
6636 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6637
405f0da6
JW
6638 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6639 op0 = memory_address (mode, op0);
38a448ca 6640 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6641 set_mem_attributes (temp, exp, 0);
1125706f 6642
14a774a9
RK
6643 /* If we are writing to this object and its type is a record with
6644 readonly fields, we must mark it as readonly so it will
6645 conflict with readonly references to those fields. */
37a08a29 6646 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6647 RTX_UNCHANGING_P (temp) = 1;
6648
8c8a8e34
JW
6649 return temp;
6650 }
bbf6f052
RK
6651
6652 case ARRAY_REF:
742920c7
RK
6653 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6654 abort ();
bbf6f052 6655
bbf6f052 6656 {
742920c7
RK
6657 tree array = TREE_OPERAND (exp, 0);
6658 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6659 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6660 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6661 HOST_WIDE_INT i;
b50d17a1 6662
d4c89139
PB
6663 /* Optimize the special-case of a zero lower bound.
6664
6665 We convert the low_bound to sizetype to avoid some problems
6666 with constant folding. (E.g. suppose the lower bound is 1,
6667 and its mode is QI. Without the conversion, (ARRAY
6668 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6669 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6670
742920c7 6671 if (! integer_zerop (low_bound))
fed3cef0 6672 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6673
742920c7 6674 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6675 This is not done in fold so it won't happen inside &.
6676 Don't fold if this is for wide characters since it's too
6677 difficult to do correctly and this is a very rare case. */
742920c7 6678
cb5fa0f8
RK
6679 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6680 && TREE_CODE (array) == STRING_CST
742920c7 6681 && TREE_CODE (index) == INTEGER_CST
05bccae2 6682 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6683 && GET_MODE_CLASS (mode) == MODE_INT
6684 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6685 return
6686 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6687
742920c7
RK
6688 /* If this is a constant index into a constant array,
6689 just get the value from the array. Handle both the cases when
6690 we have an explicit constructor and when our operand is a variable
6691 that was declared const. */
4af3895e 6692
cb5fa0f8
RK
6693 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6694 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 6695 && TREE_CODE (index) == INTEGER_CST
3a94c984 6696 && 0 > compare_tree_int (index,
05bccae2
RK
6697 list_length (CONSTRUCTOR_ELTS
6698 (TREE_OPERAND (exp, 0)))))
742920c7 6699 {
05bccae2
RK
6700 tree elem;
6701
6702 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6703 i = TREE_INT_CST_LOW (index);
6704 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6705 ;
6706
6707 if (elem)
37a08a29
RK
6708 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6709 modifier);
742920c7 6710 }
3a94c984 6711
742920c7 6712 else if (optimize >= 1
cb5fa0f8
RK
6713 && modifier != EXPAND_CONST_ADDRESS
6714 && modifier != EXPAND_INITIALIZER
742920c7
RK
6715 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6716 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6717 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6718 {
08293add 6719 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6720 {
6721 tree init = DECL_INITIAL (array);
6722
742920c7
RK
6723 if (TREE_CODE (init) == CONSTRUCTOR)
6724 {
665f2503 6725 tree elem;
742920c7 6726
05bccae2 6727 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6728 (elem
6729 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6730 elem = TREE_CHAIN (elem))
6731 ;
6732
c54b0a5e 6733 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6734 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 6735 tmode, modifier);
742920c7
RK
6736 }
6737 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6738 && 0 > compare_tree_int (index,
6739 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6740 {
6741 tree type = TREE_TYPE (TREE_TYPE (init));
6742 enum machine_mode mode = TYPE_MODE (type);
6743
6744 if (GET_MODE_CLASS (mode) == MODE_INT
6745 && GET_MODE_SIZE (mode) == 1)
6746 return (GEN_INT
6747 (TREE_STRING_POINTER
6748 (init)[TREE_INT_CST_LOW (index)]));
6749 }
742920c7
RK
6750 }
6751 }
6752 }
3a94c984 6753 /* Fall through. */
bbf6f052
RK
6754
6755 case COMPONENT_REF:
6756 case BIT_FIELD_REF:
b4e3fabb 6757 case ARRAY_RANGE_REF:
4af3895e 6758 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6759 appropriate field if it is present. Don't do this if we have
6760 already written the data since we want to refer to that copy
6761 and varasm.c assumes that's what we'll do. */
b4e3fabb 6762 if (code == COMPONENT_REF
7a0b7b9a
RK
6763 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6764 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6765 {
6766 tree elt;
6767
6768 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6769 elt = TREE_CHAIN (elt))
86b5812c
RK
6770 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6771 /* We can normally use the value of the field in the
6772 CONSTRUCTOR. However, if this is a bitfield in
6773 an integral mode that we can fit in a HOST_WIDE_INT,
6774 we must mask only the number of bits in the bitfield,
6775 since this is done implicitly by the constructor. If
6776 the bitfield does not meet either of those conditions,
6777 we can't do this optimization. */
6778 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6779 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6780 == MODE_INT)
6781 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6782 <= HOST_BITS_PER_WIDE_INT))))
6783 {
3a94c984 6784 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6785 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6786 {
9df2c88c
RK
6787 HOST_WIDE_INT bitsize
6788 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
6789 enum machine_mode imode
6790 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
6791
6792 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6793 {
6794 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 6795 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
6796 }
6797 else
6798 {
6799 tree count
e5e809f4
JL
6800 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6801 0);
86b5812c
RK
6802
6803 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6804 target, 0);
6805 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6806 target, 0);
6807 }
6808 }
6809
6810 return op0;
6811 }
4af3895e
JVA
6812 }
6813
bbf6f052
RK
6814 {
6815 enum machine_mode mode1;
770ae6cc 6816 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6817 tree offset;
bbf6f052 6818 int volatilep = 0;
839c4796 6819 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 6820 &mode1, &unsignedp, &volatilep);
f47e9b4e 6821 rtx orig_op0;
bbf6f052 6822
e7f3c83f
RK
6823 /* If we got back the original object, something is wrong. Perhaps
6824 we are evaluating an expression too early. In any event, don't
6825 infinitely recurse. */
6826 if (tem == exp)
6827 abort ();
6828
3d27140a 6829 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6830 computation, since it will need a temporary and TARGET is known
6831 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 6832
f47e9b4e
RK
6833 orig_op0 = op0
6834 = expand_expr (tem,
6835 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6836 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6837 != INTEGER_CST)
6838 ? target : NULL_RTX),
6839 VOIDmode,
6840 (modifier == EXPAND_INITIALIZER
6841 || modifier == EXPAND_CONST_ADDRESS)
6842 ? modifier : EXPAND_NORMAL);
bbf6f052 6843
8c8a8e34 6844 /* If this is a constant, put it into a register if it is a
14a774a9 6845 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6846 if (CONSTANT_P (op0))
6847 {
6848 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6849 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6850 && offset == 0)
8c8a8e34
JW
6851 op0 = force_reg (mode, op0);
6852 else
6853 op0 = validize_mem (force_const_mem (mode, op0));
6854 }
6855
7bb0943f
RS
6856 if (offset != 0)
6857 {
e3c8ea67 6858 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f 6859
a2725049 6860 /* If this object is in a register, put it into memory.
14a774a9
RK
6861 This case can't occur in C, but can in Ada if we have
6862 unchecked conversion of an expression from a scalar type to
6863 an array or record type. */
6864 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6865 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6866 {
d04218c0
RK
6867 /* If the operand is a SAVE_EXPR, we can deal with this by
6868 forcing the SAVE_EXPR into memory. */
6869 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45
RK
6870 {
6871 put_var_into_stack (TREE_OPERAND (exp, 0));
6872 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6873 }
d04218c0
RK
6874 else
6875 {
6876 tree nt
6877 = build_qualified_type (TREE_TYPE (tem),
6878 (TYPE_QUALS (TREE_TYPE (tem))
6879 | TYPE_QUAL_CONST));
6880 rtx memloc = assign_temp (nt, 1, 1, 1);
6881
d04218c0
RK
6882 emit_move_insn (memloc, op0);
6883 op0 = memloc;
6884 }
14a774a9
RK
6885 }
6886
7bb0943f
RS
6887 if (GET_CODE (op0) != MEM)
6888 abort ();
2d48c13d
JL
6889
6890 if (GET_MODE (offset_rtx) != ptr_mode)
0d4903b8
RK
6891 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6892
2d48c13d 6893#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
6894 if (GET_MODE (offset_rtx) != Pmode)
6895 offset_rtx = convert_memory_address (Pmode, offset_rtx);
2d48c13d
JL
6896#endif
6897
14a774a9 6898 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 6899 to call force_reg for that case. Avoid that case. */
89752202
HB
6900 if (GET_CODE (op0) == MEM
6901 && GET_MODE (op0) == BLKmode
efd07ca7 6902 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6903 && bitsize != 0
3a94c984 6904 && (bitpos % bitsize) == 0
89752202 6905 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 6906 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 6907 {
e3c8ea67 6908 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
6909 bitpos = 0;
6910 }
6911
0d4903b8
RK
6912 op0 = offset_address (op0, offset_rtx,
6913 highest_pow2_factor (offset));
7bb0943f
RS
6914 }
6915
bbf6f052
RK
6916 /* Don't forget about volatility even if this is a bitfield. */
6917 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6918 {
f47e9b4e
RK
6919 if (op0 == orig_op0)
6920 op0 = copy_rtx (op0);
6921
bbf6f052
RK
6922 MEM_VOLATILE_P (op0) = 1;
6923 }
6924
ccc98036
RS
6925 /* In cases where an aligned union has an unaligned object
6926 as a field, we might be extracting a BLKmode value from
6927 an integer-mode (e.g., SImode) object. Handle this case
6928 by doing the extract into an object as wide as the field
6929 (which we know to be the width of a basic mode), then
cb5fa0f8 6930 storing into memory, and changing the mode to BLKmode. */
bbf6f052 6931 if (mode1 == VOIDmode
ccc98036 6932 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
6933 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6934 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
6935 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6936 && modifier != EXPAND_CONST_ADDRESS
6937 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
6938 /* If the field isn't aligned enough to fetch as a memref,
6939 fetch it as a bit field. */
6940 || (mode1 != BLKmode
38b3baae 6941 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
cb5fa0f8
RK
6942 && ((TYPE_ALIGN (TREE_TYPE (tem))
6943 < GET_MODE_ALIGNMENT (mode))
6944 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6945 /* If the type and the field are a constant size and the
6946 size of the type isn't the same size as the bitfield,
6947 we must use bitfield operations. */
6948 || (bitsize >= 0
6949 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6950 == INTEGER_CST)
6951 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 6952 bitsize)))
bbf6f052 6953 {
bbf6f052
RK
6954 enum machine_mode ext_mode = mode;
6955
14a774a9
RK
6956 if (ext_mode == BLKmode
6957 && ! (target != 0 && GET_CODE (op0) == MEM
6958 && GET_CODE (target) == MEM
6959 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
6960 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6961
6962 if (ext_mode == BLKmode)
a281e72d
RK
6963 {
6964 /* In this case, BITPOS must start at a byte boundary and
6965 TARGET, if specified, must be a MEM. */
6966 if (GET_CODE (op0) != MEM
6967 || (target != 0 && GET_CODE (target) != MEM)
6968 || bitpos % BITS_PER_UNIT != 0)
6969 abort ();
6970
f4ef873c 6971 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
6972 if (target == 0)
6973 target = assign_temp (type, 0, 1, 1);
6974
6975 emit_block_move (target, op0,
a06ef755
RK
6976 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6977 / BITS_PER_UNIT));
3a94c984 6978
a281e72d
RK
6979 return target;
6980 }
bbf6f052 6981
dc6d66b3
RK
6982 op0 = validize_mem (op0);
6983
6984 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 6985 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3
RK
6986
6987 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6988 unsignedp, target, ext_mode, ext_mode,
bbf6f052 6989 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6990
6991 /* If the result is a record type and BITSIZE is narrower than
6992 the mode of OP0, an integral mode, and this is a big endian
6993 machine, we must put the field into the high-order bits. */
6994 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6995 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 6996 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
6997 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6998 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6999 - bitsize),
7000 op0, 1);
7001
bbf6f052
RK
7002 if (mode == BLKmode)
7003 {
c3d32120
RK
7004 rtx new = assign_temp (build_qualified_type
7005 (type_for_mode (ext_mode, 0),
7006 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7007
7008 emit_move_insn (new, op0);
7009 op0 = copy_rtx (new);
7010 PUT_MODE (op0, BLKmode);
c3d32120 7011 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7012 }
7013
7014 return op0;
7015 }
7016
05019f83
RK
7017 /* If the result is BLKmode, use that to access the object
7018 now as well. */
7019 if (mode == BLKmode)
7020 mode1 = BLKmode;
7021
bbf6f052
RK
7022 /* Get a reference to just this component. */
7023 if (modifier == EXPAND_CONST_ADDRESS
7024 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7025 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7026 else
f4ef873c 7027 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7028
f47e9b4e
RK
7029 if (op0 == orig_op0)
7030 op0 = copy_rtx (op0);
7031
3bdf5ad1 7032 set_mem_attributes (op0, exp, 0);
dc6d66b3 7033 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7034 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7035
bbf6f052 7036 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7037 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7038 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7039 || modifier == EXPAND_INITIALIZER)
bbf6f052 7040 return op0;
0d15e60c 7041 else if (target == 0)
bbf6f052 7042 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7043
bbf6f052
RK
7044 convert_move (target, op0, unsignedp);
7045 return target;
7046 }
7047
4a8d0c9c
RH
7048 case VTABLE_REF:
7049 {
7050 rtx insn, before = get_last_insn (), vtbl_ref;
7051
7052 /* Evaluate the interior expression. */
7053 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7054 tmode, modifier);
7055
7056 /* Get or create an instruction off which to hang a note. */
7057 if (REG_P (subtarget))
7058 {
7059 target = subtarget;
7060 insn = get_last_insn ();
7061 if (insn == before)
7062 abort ();
7063 if (! INSN_P (insn))
7064 insn = prev_nonnote_insn (insn);
7065 }
7066 else
7067 {
7068 target = gen_reg_rtx (GET_MODE (subtarget));
7069 insn = emit_move_insn (target, subtarget);
7070 }
7071
7072 /* Collect the data for the note. */
7073 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7074 vtbl_ref = plus_constant (vtbl_ref,
7075 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7076 /* Discard the initial CONST that was added. */
7077 vtbl_ref = XEXP (vtbl_ref, 0);
7078
7079 REG_NOTES (insn)
7080 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7081
7082 return target;
7083 }
7084
bbf6f052
RK
7085 /* Intended for a reference to a buffer of a file-object in Pascal.
7086 But it's not certain that a special tree code will really be
7087 necessary for these. INDIRECT_REF might work for them. */
7088 case BUFFER_REF:
7089 abort ();
7090
7308a047 7091 case IN_EXPR:
7308a047 7092 {
d6a5ac33
RK
7093 /* Pascal set IN expression.
7094
7095 Algorithm:
7096 rlo = set_low - (set_low%bits_per_word);
7097 the_word = set [ (index - rlo)/bits_per_word ];
7098 bit_index = index % bits_per_word;
7099 bitmask = 1 << bit_index;
7100 return !!(the_word & bitmask); */
7101
7308a047
RS
7102 tree set = TREE_OPERAND (exp, 0);
7103 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7104 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7105 tree set_type = TREE_TYPE (set);
7308a047
RS
7106 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7107 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7108 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7109 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7110 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7111 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7112 rtx setaddr = XEXP (setval, 0);
7113 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7114 rtx rlow;
7115 rtx diff, quo, rem, addr, bit, result;
7308a047 7116
d6a5ac33
RK
7117 /* If domain is empty, answer is no. Likewise if index is constant
7118 and out of bounds. */
51723711 7119 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7120 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7121 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7122 || (TREE_CODE (index) == INTEGER_CST
7123 && TREE_CODE (set_low_bound) == INTEGER_CST
7124 && tree_int_cst_lt (index, set_low_bound))
7125 || (TREE_CODE (set_high_bound) == INTEGER_CST
7126 && TREE_CODE (index) == INTEGER_CST
7127 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7128 return const0_rtx;
7129
d6a5ac33
RK
7130 if (target == 0)
7131 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7132
7133 /* If we get here, we have to generate the code for both cases
7134 (in range and out of range). */
7135
7136 op0 = gen_label_rtx ();
7137 op1 = gen_label_rtx ();
7138
7139 if (! (GET_CODE (index_val) == CONST_INT
7140 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7141 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7142 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7143
7144 if (! (GET_CODE (index_val) == CONST_INT
7145 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7146 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7147 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7148
7149 /* Calculate the element number of bit zero in the first word
7150 of the set. */
7151 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7152 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7153 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7154 else
17938e57
RK
7155 rlow = expand_binop (index_mode, and_optab, lo_r,
7156 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7157 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7158
d6a5ac33
RK
7159 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7160 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7161
7162 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7163 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7164 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7165 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7166
7308a047 7167 addr = memory_address (byte_mode,
d6a5ac33
RK
7168 expand_binop (index_mode, add_optab, diff,
7169 setaddr, NULL_RTX, iunsignedp,
17938e57 7170 OPTAB_LIB_WIDEN));
d6a5ac33 7171
3a94c984 7172 /* Extract the bit we want to examine. */
7308a047 7173 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7174 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7175 make_tree (TREE_TYPE (index), rem),
7176 NULL_RTX, 1);
7177 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7178 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7179 1, OPTAB_LIB_WIDEN);
17938e57
RK
7180
7181 if (result != target)
7182 convert_move (target, result, 1);
7308a047
RS
7183
7184 /* Output the code to handle the out-of-range case. */
7185 emit_jump (op0);
7186 emit_label (op1);
7187 emit_move_insn (target, const0_rtx);
7188 emit_label (op0);
7189 return target;
7190 }
7191
bbf6f052 7192 case WITH_CLEANUP_EXPR:
6ad7895a 7193 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7194 {
6ad7895a 7195 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7196 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6ad7895a 7197 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
e976b8b2 7198
bbf6f052 7199 /* That's it for this cleanup. */
6ad7895a 7200 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7201 }
6ad7895a 7202 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7203
5dab5552
MS
7204 case CLEANUP_POINT_EXPR:
7205 {
e976b8b2
MS
7206 /* Start a new binding layer that will keep track of all cleanup
7207 actions to be performed. */
8e91754e 7208 expand_start_bindings (2);
e976b8b2 7209
d93d4205 7210 target_temp_slot_level = temp_slot_level;
e976b8b2 7211
37a08a29 7212 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7213 /* If we're going to use this value, load it up now. */
7214 if (! ignore)
7215 op0 = force_not_mem (op0);
d93d4205 7216 preserve_temp_slots (op0);
e976b8b2 7217 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7218 }
7219 return op0;
7220
bbf6f052
RK
7221 case CALL_EXPR:
7222 /* Check for a built-in function. */
7223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7224 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7225 == FUNCTION_DECL)
bbf6f052 7226 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
c70eaeaf
KG
7227 {
7228 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7229 == BUILT_IN_FRONTEND)
7230 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7231 else
7232 return expand_builtin (exp, target, subtarget, tmode, ignore);
7233 }
d6a5ac33 7234
8129842c 7235 return expand_call (exp, target, ignore);
bbf6f052
RK
7236
7237 case NON_LVALUE_EXPR:
7238 case NOP_EXPR:
7239 case CONVERT_EXPR:
7240 case REFERENCE_EXPR:
4a53008b 7241 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7242 return const0_rtx;
4a53008b 7243
bbf6f052
RK
7244 if (TREE_CODE (type) == UNION_TYPE)
7245 {
7246 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7247
c3d32120
RK
7248 /* If both input and output are BLKmode, this conversion isn't doing
7249 anything except possibly changing memory attribute. */
7250 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7251 {
7252 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7253 modifier);
7254
7255 result = copy_rtx (result);
7256 set_mem_attributes (result, exp, 0);
7257 return result;
7258 }
14a774a9 7259
bbf6f052 7260 if (target == 0)
1da68f56 7261 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7262
bbf6f052
RK
7263 if (GET_CODE (target) == MEM)
7264 /* Store data into beginning of memory target. */
7265 store_expr (TREE_OPERAND (exp, 0),
f4ef873c 7266 adjust_address (target, TYPE_MODE (valtype), 0), 0);
1499e0a8 7267
bbf6f052
RK
7268 else if (GET_CODE (target) == REG)
7269 /* Store this field into a union of the proper type. */
14a774a9
RK
7270 store_field (target,
7271 MIN ((int_size_in_bytes (TREE_TYPE
7272 (TREE_OPERAND (exp, 0)))
7273 * BITS_PER_UNIT),
8752c357 7274 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7275 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7276 VOIDmode, 0, type, 0);
bbf6f052
RK
7277 else
7278 abort ();
7279
7280 /* Return the entire union. */
7281 return target;
7282 }
d6a5ac33 7283
7f62854a
RK
7284 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7285 {
7286 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7287 modifier);
7f62854a
RK
7288
7289 /* If the signedness of the conversion differs and OP0 is
7290 a promoted SUBREG, clear that indication since we now
7291 have to do the proper extension. */
7292 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7293 && GET_CODE (op0) == SUBREG)
7294 SUBREG_PROMOTED_VAR_P (op0) = 0;
7295
7296 return op0;
7297 }
7298
fdf473ae 7299 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7300 if (GET_MODE (op0) == mode)
7301 return op0;
12342f90 7302
d6a5ac33
RK
7303 /* If OP0 is a constant, just convert it into the proper mode. */
7304 if (CONSTANT_P (op0))
fdf473ae
RH
7305 {
7306 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7307 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7308
7309 if (modifier == EXPAND_INITIALIZER)
7310 return simplify_gen_subreg (mode, op0, inner_mode,
7311 subreg_lowpart_offset (mode,
7312 inner_mode));
7313 else
7314 return convert_modes (mode, inner_mode, op0,
7315 TREE_UNSIGNED (inner_type));
7316 }
12342f90 7317
26fcb35a 7318 if (modifier == EXPAND_INITIALIZER)
38a448ca 7319 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7320
bbf6f052 7321 if (target == 0)
d6a5ac33
RK
7322 return
7323 convert_to_mode (mode, op0,
7324 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7325 else
d6a5ac33
RK
7326 convert_move (target, op0,
7327 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7328 return target;
7329
ed239f5a 7330 case VIEW_CONVERT_EXPR:
37a08a29 7331 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7332
7333 /* If the input and output modes are both the same, we are done.
7334 Otherwise, if neither mode is BLKmode and both are within a word, we
c11c10d8
RK
7335 can use gen_lowpart. If neither is true, make sure the operand is
7336 in memory and convert the MEM to the new mode. */
ed239f5a
RK
7337 if (TYPE_MODE (type) == GET_MODE (op0))
7338 ;
7339 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7340 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7341 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7342 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7343 else if (GET_CODE (op0) != MEM)
ed239f5a 7344 {
c11c10d8
RK
7345 /* If the operand is not a MEM, force it into memory. Since we
7346 are going to be be changing the mode of the MEM, don't call
7347 force_const_mem for constants because we don't allow pool
7348 constants to change mode. */
ed239f5a 7349 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7350
c11c10d8
RK
7351 if (TREE_ADDRESSABLE (exp))
7352 abort ();
ed239f5a 7353
c11c10d8
RK
7354 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7355 target
7356 = assign_stack_temp_for_type
7357 (TYPE_MODE (inner_type),
7358 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7359
c11c10d8
RK
7360 emit_move_insn (target, op0);
7361 op0 = target;
ed239f5a
RK
7362 }
7363
c11c10d8
RK
7364 /* At this point, OP0 is in the correct mode. If the output type is such
7365 that the operand is known to be aligned, indicate that it is.
7366 Otherwise, we need only be concerned about alignment for non-BLKmode
7367 results. */
ed239f5a
RK
7368 if (GET_CODE (op0) == MEM)
7369 {
7370 op0 = copy_rtx (op0);
7371
ed239f5a
RK
7372 if (TYPE_ALIGN_OK (type))
7373 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7374 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7375 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7376 {
7377 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7378 HOST_WIDE_INT temp_size
7379 = MAX (int_size_in_bytes (inner_type),
7380 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7381 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7382 temp_size, 0, type);
c4e59f51 7383 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7384
c11c10d8
RK
7385 if (TREE_ADDRESSABLE (exp))
7386 abort ();
7387
ed239f5a
RK
7388 if (GET_MODE (op0) == BLKmode)
7389 emit_block_move (new_with_op0_mode, op0,
7390 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7391 else
7392 emit_move_insn (new_with_op0_mode, op0);
7393
7394 op0 = new;
7395 }
7396
c4e59f51 7397 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7398 }
7399
7400 return op0;
7401
bbf6f052 7402 case PLUS_EXPR:
0f41302f
MS
7403 /* We come here from MINUS_EXPR when the second operand is a
7404 constant. */
bbf6f052 7405 plus_expr:
91ce572a 7406 this_optab = ! unsignedp && flag_trapv
a9785c70 7407 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7408 ? addv_optab : add_optab;
bbf6f052
RK
7409
7410 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7411 something else, make sure we add the register to the constant and
7412 then to the other thing. This case can occur during strength
7413 reduction and doing it this way will produce better code if the
7414 frame pointer or argument pointer is eliminated.
7415
7416 fold-const.c will ensure that the constant is always in the inner
7417 PLUS_EXPR, so the only case we need to do anything about is if
7418 sp, ap, or fp is our second argument, in which case we must swap
7419 the innermost first argument and our second argument. */
7420
7421 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7422 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7423 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7424 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7425 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7426 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7427 {
7428 tree t = TREE_OPERAND (exp, 1);
7429
7430 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7431 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7432 }
7433
88f63c77 7434 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7435 something, we might be forming a constant. So try to use
7436 plus_constant. If it produces a sum and we can't accept it,
7437 use force_operand. This allows P = &ARR[const] to generate
7438 efficient code on machines where a SYMBOL_REF is not a valid
7439 address.
7440
7441 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7442 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
91ce572a 7443 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7444 {
c980ac49
RS
7445 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7446 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7447 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7448 {
cbbc503e
JL
7449 rtx constant_part;
7450
c980ac49
RS
7451 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7452 EXPAND_SUM);
cbbc503e
JL
7453 /* Use immed_double_const to ensure that the constant is
7454 truncated according to the mode of OP1, then sign extended
7455 to a HOST_WIDE_INT. Using the constant directly can result
7456 in non-canonical RTL in a 64x32 cross compile. */
7457 constant_part
7458 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7459 (HOST_WIDE_INT) 0,
a5efcd63 7460 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7461 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7462 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7463 op1 = force_operand (op1, target);
7464 return op1;
7465 }
bbf6f052 7466
c980ac49
RS
7467 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7468 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7469 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7470 {
cbbc503e
JL
7471 rtx constant_part;
7472
c980ac49
RS
7473 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7474 EXPAND_SUM);
7475 if (! CONSTANT_P (op0))
7476 {
7477 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7478 VOIDmode, modifier);
709f5be1
RS
7479 /* Don't go to both_summands if modifier
7480 says it's not right to return a PLUS. */
7481 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7482 goto binop2;
c980ac49
RS
7483 goto both_summands;
7484 }
cbbc503e
JL
7485 /* Use immed_double_const to ensure that the constant is
7486 truncated according to the mode of OP1, then sign extended
7487 to a HOST_WIDE_INT. Using the constant directly can result
7488 in non-canonical RTL in a 64x32 cross compile. */
7489 constant_part
7490 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7491 (HOST_WIDE_INT) 0,
2a94e396 7492 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7493 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7494 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7495 op0 = force_operand (op0, target);
7496 return op0;
7497 }
bbf6f052
RK
7498 }
7499
7500 /* No sense saving up arithmetic to be done
7501 if it's all in the wrong mode to form part of an address.
7502 And force_operand won't know whether to sign-extend or
7503 zero-extend. */
7504 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7505 || mode != ptr_mode)
c980ac49 7506 goto binop;
bbf6f052 7507
e5e809f4 7508 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7509 subtarget = 0;
7510
37a08a29
RK
7511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 7513
c980ac49 7514 both_summands:
bbf6f052
RK
7515 /* Make sure any term that's a sum with a constant comes last. */
7516 if (GET_CODE (op0) == PLUS
7517 && CONSTANT_P (XEXP (op0, 1)))
7518 {
7519 temp = op0;
7520 op0 = op1;
7521 op1 = temp;
7522 }
7523 /* If adding to a sum including a constant,
7524 associate it to put the constant outside. */
7525 if (GET_CODE (op1) == PLUS
7526 && CONSTANT_P (XEXP (op1, 1)))
7527 {
7528 rtx constant_term = const0_rtx;
7529
7530 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7531 if (temp != 0)
7532 op0 = temp;
6f90e075
JW
7533 /* Ensure that MULT comes first if there is one. */
7534 else if (GET_CODE (op0) == MULT)
38a448ca 7535 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7536 else
38a448ca 7537 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7538
7539 /* Let's also eliminate constants from op0 if possible. */
7540 op0 = eliminate_constant_term (op0, &constant_term);
7541
7542 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 7543 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
7544 result we want will then be OP0 + OP1. */
7545
7546 temp = simplify_binary_operation (PLUS, mode, constant_term,
7547 XEXP (op1, 1));
7548 if (temp != 0)
7549 op1 = temp;
7550 else
38a448ca 7551 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7552 }
7553
7554 /* Put a constant term last and put a multiplication first. */
7555 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7556 temp = op1, op1 = op0, op0 = temp;
7557
7558 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7559 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7560
7561 case MINUS_EXPR:
ea87523e
RK
7562 /* For initializers, we are allowed to return a MINUS of two
7563 symbolic constants. Here we handle all cases when both operands
7564 are constant. */
bbf6f052
RK
7565 /* Handle difference of two symbolic constants,
7566 for the sake of an initializer. */
7567 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7568 && really_constant_p (TREE_OPERAND (exp, 0))
7569 && really_constant_p (TREE_OPERAND (exp, 1)))
7570 {
37a08a29
RK
7571 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7572 modifier);
7573 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7574 modifier);
ea87523e 7575
ea87523e
RK
7576 /* If the last operand is a CONST_INT, use plus_constant of
7577 the negated constant. Else make the MINUS. */
7578 if (GET_CODE (op1) == CONST_INT)
7579 return plus_constant (op0, - INTVAL (op1));
7580 else
38a448ca 7581 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7582 }
7583 /* Convert A - const to A + (-const). */
7584 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7585 {
ae431183
RK
7586 tree negated = fold (build1 (NEGATE_EXPR, type,
7587 TREE_OPERAND (exp, 1)));
7588
ae431183 7589 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7590 /* If we can't negate the constant in TYPE, leave it alone and
7591 expand_binop will negate it for us. We used to try to do it
7592 here in the signed version of TYPE, but that doesn't work
7593 on POINTER_TYPEs. */;
ae431183
RK
7594 else
7595 {
7596 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7597 goto plus_expr;
7598 }
bbf6f052 7599 }
91ce572a
CC
7600 this_optab = ! unsignedp && flag_trapv
7601 && (GET_MODE_CLASS(mode) == MODE_INT)
7602 ? subv_optab : sub_optab;
bbf6f052
RK
7603 goto binop;
7604
7605 case MULT_EXPR:
bbf6f052
RK
7606 /* If first operand is constant, swap them.
7607 Thus the following special case checks need only
7608 check the second operand. */
7609 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7610 {
b3694847 7611 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7612 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7613 TREE_OPERAND (exp, 1) = t1;
7614 }
7615
7616 /* Attempt to return something suitable for generating an
7617 indexed address, for machines that support that. */
7618
88f63c77 7619 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7620 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7621 {
921b3427
RK
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7623 EXPAND_SUM);
bbf6f052 7624
3b40e71b
RH
7625 /* If we knew for certain that this is arithmetic for an array
7626 reference, and we knew the bounds of the array, then we could
7627 apply the distributive law across (PLUS X C) for constant C.
7628 Without such knowledge, we risk overflowing the computation
7629 when both X and C are large, but X+C isn't. */
7630 /* ??? Could perhaps special-case EXP being unsigned and C being
7631 positive. In that case we are certain that X+C is no smaller
7632 than X and so the transformed expression will overflow iff the
7633 original would have. */
bbf6f052
RK
7634
7635 if (GET_CODE (op0) != REG)
906c4e36 7636 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7637 if (GET_CODE (op0) != REG)
7638 op0 = copy_to_mode_reg (mode, op0);
7639
c5c76735
JL
7640 return
7641 gen_rtx_MULT (mode, op0,
3b40e71b 7642 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
bbf6f052
RK
7643 }
7644
e5e809f4 7645 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7646 subtarget = 0;
7647
7648 /* Check for multiplying things that have been extended
7649 from a narrower type. If this machine supports multiplying
7650 in that narrower type with a result in the desired type,
7651 do it that way, and avoid the explicit type-conversion. */
7652 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7653 && TREE_CODE (type) == INTEGER_TYPE
7654 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7655 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7656 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7657 && int_fits_type_p (TREE_OPERAND (exp, 1),
7658 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7659 /* Don't use a widening multiply if a shift will do. */
7660 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7661 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7662 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7663 ||
7664 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7665 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7666 ==
7667 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7668 /* If both operands are extended, they must either both
7669 be zero-extended or both be sign-extended. */
7670 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7671 ==
7672 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7673 {
7674 enum machine_mode innermode
7675 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7676 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7677 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7678 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7679 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7680 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7681 {
b10af0c8
TG
7682 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7683 {
7684 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7685 NULL_RTX, VOIDmode, 0);
7686 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7687 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7688 VOIDmode, 0);
7689 else
7690 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7691 NULL_RTX, VOIDmode, 0);
7692 goto binop2;
7693 }
7694 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7695 && innermode == word_mode)
7696 {
7697 rtx htem;
7698 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7699 NULL_RTX, VOIDmode, 0);
7700 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7701 op1 = convert_modes (innermode, mode,
7702 expand_expr (TREE_OPERAND (exp, 1),
7703 NULL_RTX, VOIDmode, 0),
7704 unsignedp);
b10af0c8
TG
7705 else
7706 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7707 NULL_RTX, VOIDmode, 0);
7708 temp = expand_binop (mode, other_optab, op0, op1, target,
7709 unsignedp, OPTAB_LIB_WIDEN);
7710 htem = expand_mult_highpart_adjust (innermode,
7711 gen_highpart (innermode, temp),
7712 op0, op1,
7713 gen_highpart (innermode, temp),
7714 unsignedp);
7715 emit_move_insn (gen_highpart (innermode, temp), htem);
7716 return temp;
7717 }
bbf6f052
RK
7718 }
7719 }
7720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7721 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7722 return expand_mult (mode, op0, op1, target, unsignedp);
7723
7724 case TRUNC_DIV_EXPR:
7725 case FLOOR_DIV_EXPR:
7726 case CEIL_DIV_EXPR:
7727 case ROUND_DIV_EXPR:
7728 case EXACT_DIV_EXPR:
e5e809f4 7729 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7730 subtarget = 0;
7731 /* Possible optimization: compute the dividend with EXPAND_SUM
7732 then if the divisor is constant can optimize the case
7733 where some terms of the dividend have coeffs divisible by it. */
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7735 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7736 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7737
7738 case RDIV_EXPR:
b7e9703c
JH
7739 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7740 expensive divide. If not, combine will rebuild the original
7741 computation. */
7742 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7743 && !real_onep (TREE_OPERAND (exp, 0)))
7744 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7745 build (RDIV_EXPR, type,
7746 build_real (type, dconst1),
7747 TREE_OPERAND (exp, 1))),
7748 target, tmode, unsignedp);
ef89d648 7749 this_optab = sdiv_optab;
bbf6f052
RK
7750 goto binop;
7751
7752 case TRUNC_MOD_EXPR:
7753 case FLOOR_MOD_EXPR:
7754 case CEIL_MOD_EXPR:
7755 case ROUND_MOD_EXPR:
e5e809f4 7756 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7757 subtarget = 0;
7758 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7759 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7760 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7761
7762 case FIX_ROUND_EXPR:
7763 case FIX_FLOOR_EXPR:
7764 case FIX_CEIL_EXPR:
7765 abort (); /* Not used for C. */
7766
7767 case FIX_TRUNC_EXPR:
906c4e36 7768 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7769 if (target == 0)
7770 target = gen_reg_rtx (mode);
7771 expand_fix (target, op0, unsignedp);
7772 return target;
7773
7774 case FLOAT_EXPR:
906c4e36 7775 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7776 if (target == 0)
7777 target = gen_reg_rtx (mode);
7778 /* expand_float can't figure out what to do if FROM has VOIDmode.
7779 So give it the correct mode. With -O, cse will optimize this. */
7780 if (GET_MODE (op0) == VOIDmode)
7781 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7782 op0);
7783 expand_float (target, op0,
7784 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7785 return target;
7786
7787 case NEGATE_EXPR:
5b22bee8 7788 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
91ce572a
CC
7789 temp = expand_unop (mode,
7790 ! unsignedp && flag_trapv
7791 && (GET_MODE_CLASS(mode) == MODE_INT)
7792 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7793 if (temp == 0)
7794 abort ();
7795 return temp;
7796
7797 case ABS_EXPR:
7798 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7799
2d7050fd 7800 /* Handle complex values specially. */
d6a5ac33
RK
7801 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7802 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7803 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7804
bbf6f052
RK
7805 /* Unsigned abs is simply the operand. Testing here means we don't
7806 risk generating incorrect code below. */
7807 if (TREE_UNSIGNED (type))
7808 return op0;
7809
91ce572a 7810 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7811 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7812
7813 case MAX_EXPR:
7814 case MIN_EXPR:
7815 target = original_target;
e5e809f4 7816 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7817 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7818 || GET_MODE (target) != mode
bbf6f052
RK
7819 || (GET_CODE (target) == REG
7820 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7821 target = gen_reg_rtx (mode);
906c4e36 7822 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7823 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7824
7825 /* First try to do it with a special MIN or MAX instruction.
7826 If that does not win, use a conditional jump to select the proper
7827 value. */
7828 this_optab = (TREE_UNSIGNED (type)
7829 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7830 : (code == MIN_EXPR ? smin_optab : smax_optab));
7831
7832 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7833 OPTAB_WIDEN);
7834 if (temp != 0)
7835 return temp;
7836
fa2981d8
JW
7837 /* At this point, a MEM target is no longer useful; we will get better
7838 code without it. */
3a94c984 7839
fa2981d8
JW
7840 if (GET_CODE (target) == MEM)
7841 target = gen_reg_rtx (mode);
7842
ee456b1c
RK
7843 if (target != op0)
7844 emit_move_insn (target, op0);
d6a5ac33 7845
bbf6f052 7846 op0 = gen_label_rtx ();
d6a5ac33 7847
f81497d9
RS
7848 /* If this mode is an integer too wide to compare properly,
7849 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7850 if (GET_MODE_CLASS (mode) == MODE_INT
7851 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7852 {
f81497d9 7853 if (code == MAX_EXPR)
d6a5ac33
RK
7854 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7855 target, op1, NULL_RTX, op0);
bbf6f052 7856 else
d6a5ac33
RK
7857 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7858 op1, target, NULL_RTX, op0);
bbf6f052 7859 }
f81497d9
RS
7860 else
7861 {
b30f05db
BS
7862 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7863 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 7864 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 7865 op0);
f81497d9 7866 }
b30f05db 7867 emit_move_insn (target, op1);
bbf6f052
RK
7868 emit_label (op0);
7869 return target;
7870
bbf6f052
RK
7871 case BIT_NOT_EXPR:
7872 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7873 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7874 if (temp == 0)
7875 abort ();
7876 return temp;
7877
7878 case FFS_EXPR:
7879 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7880 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7881 if (temp == 0)
7882 abort ();
7883 return temp;
7884
d6a5ac33
RK
7885 /* ??? Can optimize bitwise operations with one arg constant.
7886 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7887 and (a bitwise1 b) bitwise2 b (etc)
7888 but that is probably not worth while. */
7889
7890 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7891 boolean values when we want in all cases to compute both of them. In
7892 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7893 as actual zero-or-1 values and then bitwise anding. In cases where
7894 there cannot be any side effects, better code would be made by
7895 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7896 how to recognize those cases. */
7897
bbf6f052
RK
7898 case TRUTH_AND_EXPR:
7899 case BIT_AND_EXPR:
7900 this_optab = and_optab;
7901 goto binop;
7902
bbf6f052
RK
7903 case TRUTH_OR_EXPR:
7904 case BIT_IOR_EXPR:
7905 this_optab = ior_optab;
7906 goto binop;
7907
874726a8 7908 case TRUTH_XOR_EXPR:
bbf6f052
RK
7909 case BIT_XOR_EXPR:
7910 this_optab = xor_optab;
7911 goto binop;
7912
7913 case LSHIFT_EXPR:
7914 case RSHIFT_EXPR:
7915 case LROTATE_EXPR:
7916 case RROTATE_EXPR:
e5e809f4 7917 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7918 subtarget = 0;
7919 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7920 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7921 unsignedp);
7922
d6a5ac33
RK
7923 /* Could determine the answer when only additive constants differ. Also,
7924 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7925 case LT_EXPR:
7926 case LE_EXPR:
7927 case GT_EXPR:
7928 case GE_EXPR:
7929 case EQ_EXPR:
7930 case NE_EXPR:
1eb8759b
RH
7931 case UNORDERED_EXPR:
7932 case ORDERED_EXPR:
7933 case UNLT_EXPR:
7934 case UNLE_EXPR:
7935 case UNGT_EXPR:
7936 case UNGE_EXPR:
7937 case UNEQ_EXPR:
bbf6f052
RK
7938 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7939 if (temp != 0)
7940 return temp;
d6a5ac33 7941
0f41302f 7942 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7943 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7944 && original_target
7945 && GET_CODE (original_target) == REG
7946 && (GET_MODE (original_target)
7947 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7948 {
d6a5ac33
RK
7949 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7950 VOIDmode, 0);
7951
c0a3eeac
UW
7952 /* If temp is constant, we can just compute the result. */
7953 if (GET_CODE (temp) == CONST_INT)
7954 {
7955 if (INTVAL (temp) != 0)
7956 emit_move_insn (target, const1_rtx);
7957 else
7958 emit_move_insn (target, const0_rtx);
7959
7960 return target;
7961 }
7962
bbf6f052 7963 if (temp != original_target)
c0a3eeac
UW
7964 {
7965 enum machine_mode mode1 = GET_MODE (temp);
7966 if (mode1 == VOIDmode)
7967 mode1 = tmode != VOIDmode ? tmode : mode;
7968
7969 temp = copy_to_mode_reg (mode1, temp);
7970 }
d6a5ac33 7971
bbf6f052 7972 op1 = gen_label_rtx ();
c5d5d461 7973 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 7974 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
7975 emit_move_insn (temp, const1_rtx);
7976 emit_label (op1);
7977 return temp;
7978 }
d6a5ac33 7979
bbf6f052
RK
7980 /* If no set-flag instruction, must generate a conditional
7981 store into a temporary variable. Drop through
7982 and handle this like && and ||. */
7983
7984 case TRUTH_ANDIF_EXPR:
7985 case TRUTH_ORIF_EXPR:
e44842fe 7986 if (! ignore
e5e809f4 7987 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7988 /* Make sure we don't have a hard reg (such as function's return
7989 value) live across basic blocks, if not optimizing. */
7990 || (!optimize && GET_CODE (target) == REG
7991 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7992 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7993
7994 if (target)
7995 emit_clr_insn (target);
7996
bbf6f052
RK
7997 op1 = gen_label_rtx ();
7998 jumpifnot (exp, op1);
e44842fe
RK
7999
8000 if (target)
8001 emit_0_to_1_insn (target);
8002
bbf6f052 8003 emit_label (op1);
e44842fe 8004 return ignore ? const0_rtx : target;
bbf6f052
RK
8005
8006 case TRUTH_NOT_EXPR:
8007 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8008 /* The parser is careful to generate TRUTH_NOT_EXPR
8009 only with operands that are always zero or one. */
906c4e36 8010 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8011 target, 1, OPTAB_LIB_WIDEN);
8012 if (temp == 0)
8013 abort ();
8014 return temp;
8015
8016 case COMPOUND_EXPR:
8017 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8018 emit_queue ();
8019 return expand_expr (TREE_OPERAND (exp, 1),
8020 (ignore ? const0_rtx : target),
8021 VOIDmode, 0);
8022
8023 case COND_EXPR:
ac01eace
RK
8024 /* If we would have a "singleton" (see below) were it not for a
8025 conversion in each arm, bring that conversion back out. */
8026 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8027 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8028 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8029 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8030 {
d6edb99e
ZW
8031 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8032 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8033
8034 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8035 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8036 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8037 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8038 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8039 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8040 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8041 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8042 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8043 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8044 TREE_OPERAND (exp, 0),
d6edb99e 8045 iftrue, iffalse)),
ac01eace
RK
8046 target, tmode, modifier);
8047 }
8048
bbf6f052
RK
8049 {
8050 /* Note that COND_EXPRs whose type is a structure or union
8051 are required to be constructed to contain assignments of
8052 a temporary variable, so that we can evaluate them here
8053 for side effect only. If type is void, we must do likewise. */
8054
8055 /* If an arm of the branch requires a cleanup,
8056 only that cleanup is performed. */
8057
8058 tree singleton = 0;
8059 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8060
8061 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8062 convert it to our mode, if necessary. */
8063 if (integer_onep (TREE_OPERAND (exp, 1))
8064 && integer_zerop (TREE_OPERAND (exp, 2))
8065 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8066 {
dd27116b
RK
8067 if (ignore)
8068 {
8069 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8070 modifier);
dd27116b
RK
8071 return const0_rtx;
8072 }
8073
37a08a29 8074 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8075 if (GET_MODE (op0) == mode)
8076 return op0;
d6a5ac33 8077
bbf6f052
RK
8078 if (target == 0)
8079 target = gen_reg_rtx (mode);
8080 convert_move (target, op0, unsignedp);
8081 return target;
8082 }
8083
ac01eace
RK
8084 /* Check for X ? A + B : A. If we have this, we can copy A to the
8085 output and conditionally add B. Similarly for unary operations.
8086 Don't do this if X has side-effects because those side effects
8087 might affect A or B and the "?" operation is a sequence point in
8088 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8089
8090 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8091 && operand_equal_p (TREE_OPERAND (exp, 2),
8092 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8093 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8094 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8095 && operand_equal_p (TREE_OPERAND (exp, 1),
8096 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8097 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8098 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8099 && operand_equal_p (TREE_OPERAND (exp, 2),
8100 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8101 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8102 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8103 && operand_equal_p (TREE_OPERAND (exp, 1),
8104 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8105 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8106
01c8a7c8
RK
8107 /* If we are not to produce a result, we have no target. Otherwise,
8108 if a target was specified use it; it will not be used as an
3a94c984 8109 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8110 temporary. */
8111
8112 if (ignore)
8113 temp = 0;
8114 else if (original_target
e5e809f4 8115 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8116 || (singleton && GET_CODE (original_target) == REG
8117 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8118 && original_target == var_rtx (singleton)))
8119 && GET_MODE (original_target) == mode
7c00d1fe
RK
8120#ifdef HAVE_conditional_move
8121 && (! can_conditionally_move_p (mode)
8122 || GET_CODE (original_target) == REG
8123 || TREE_ADDRESSABLE (type))
8124#endif
8125d7e9
BS
8125 && (GET_CODE (original_target) != MEM
8126 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8127 temp = original_target;
8128 else if (TREE_ADDRESSABLE (type))
8129 abort ();
8130 else
8131 temp = assign_temp (type, 0, 0, 1);
8132
ac01eace
RK
8133 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8134 do the test of X as a store-flag operation, do this as
8135 A + ((X != 0) << log C). Similarly for other simple binary
8136 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8137 if (temp && singleton && binary_op
bbf6f052
RK
8138 && (TREE_CODE (binary_op) == PLUS_EXPR
8139 || TREE_CODE (binary_op) == MINUS_EXPR
8140 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8141 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8142 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8143 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8144 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8145 {
8146 rtx result;
91ce572a
CC
8147 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8148 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8149 ? addv_optab : add_optab)
8150 : TREE_CODE (binary_op) == MINUS_EXPR
8151 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8152 ? subv_optab : sub_optab)
8153 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8154 : xor_optab);
bbf6f052
RK
8155
8156 /* If we had X ? A : A + 1, do this as A + (X == 0).
8157
8158 We have to invert the truth value here and then put it
8159 back later if do_store_flag fails. We cannot simply copy
8160 TREE_OPERAND (exp, 0) to another variable and modify that
8161 because invert_truthvalue can modify the tree pointed to
8162 by its argument. */
8163 if (singleton == TREE_OPERAND (exp, 1))
8164 TREE_OPERAND (exp, 0)
8165 = invert_truthvalue (TREE_OPERAND (exp, 0));
8166
8167 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 8168 (safe_from_p (temp, singleton, 1)
906c4e36 8169 ? temp : NULL_RTX),
bbf6f052
RK
8170 mode, BRANCH_COST <= 1);
8171
ac01eace
RK
8172 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8173 result = expand_shift (LSHIFT_EXPR, mode, result,
8174 build_int_2 (tree_log2
8175 (TREE_OPERAND
8176 (binary_op, 1)),
8177 0),
e5e809f4 8178 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8179 ? temp : NULL_RTX), 0);
8180
bbf6f052
RK
8181 if (result)
8182 {
906c4e36 8183 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8184 return expand_binop (mode, boptab, op1, result, temp,
8185 unsignedp, OPTAB_LIB_WIDEN);
8186 }
8187 else if (singleton == TREE_OPERAND (exp, 1))
8188 TREE_OPERAND (exp, 0)
8189 = invert_truthvalue (TREE_OPERAND (exp, 0));
8190 }
3a94c984 8191
dabf8373 8192 do_pending_stack_adjust ();
bbf6f052
RK
8193 NO_DEFER_POP;
8194 op0 = gen_label_rtx ();
8195
8196 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8197 {
8198 if (temp != 0)
8199 {
8200 /* If the target conflicts with the other operand of the
8201 binary op, we can't use it. Also, we can't use the target
8202 if it is a hard register, because evaluating the condition
8203 might clobber it. */
8204 if ((binary_op
e5e809f4 8205 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8206 || (GET_CODE (temp) == REG
8207 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8208 temp = gen_reg_rtx (mode);
8209 store_expr (singleton, temp, 0);
8210 }
8211 else
906c4e36 8212 expand_expr (singleton,
2937cf87 8213 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8214 if (singleton == TREE_OPERAND (exp, 1))
8215 jumpif (TREE_OPERAND (exp, 0), op0);
8216 else
8217 jumpifnot (TREE_OPERAND (exp, 0), op0);
8218
956d6950 8219 start_cleanup_deferral ();
bbf6f052
RK
8220 if (binary_op && temp == 0)
8221 /* Just touch the other operand. */
8222 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8223 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8224 else if (binary_op)
8225 store_expr (build (TREE_CODE (binary_op), type,
8226 make_tree (type, temp),
8227 TREE_OPERAND (binary_op, 1)),
8228 temp, 0);
8229 else
8230 store_expr (build1 (TREE_CODE (unary_op), type,
8231 make_tree (type, temp)),
8232 temp, 0);
8233 op1 = op0;
bbf6f052 8234 }
bbf6f052
RK
8235 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8236 comparison operator. If we have one of these cases, set the
8237 output to A, branch on A (cse will merge these two references),
8238 then set the output to FOO. */
8239 else if (temp
8240 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8241 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8242 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8243 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8244 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8245 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8246 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8247 {
3a94c984
KH
8248 if (GET_CODE (temp) == REG
8249 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8250 temp = gen_reg_rtx (mode);
8251 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8252 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8253
956d6950 8254 start_cleanup_deferral ();
bbf6f052
RK
8255 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8256 op1 = op0;
8257 }
8258 else if (temp
8259 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8260 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8261 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8262 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8263 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8264 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8265 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8266 {
3a94c984
KH
8267 if (GET_CODE (temp) == REG
8268 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8269 temp = gen_reg_rtx (mode);
8270 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8271 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8272
956d6950 8273 start_cleanup_deferral ();
bbf6f052
RK
8274 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8275 op1 = op0;
8276 }
8277 else
8278 {
8279 op1 = gen_label_rtx ();
8280 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8281
956d6950 8282 start_cleanup_deferral ();
3a94c984 8283
2ac84cfe 8284 /* One branch of the cond can be void, if it never returns. For
3a94c984 8285 example A ? throw : E */
2ac84cfe 8286 if (temp != 0
3a94c984 8287 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
8288 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8289 else
906c4e36
RK
8290 expand_expr (TREE_OPERAND (exp, 1),
8291 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8292 end_cleanup_deferral ();
bbf6f052
RK
8293 emit_queue ();
8294 emit_jump_insn (gen_jump (op1));
8295 emit_barrier ();
8296 emit_label (op0);
956d6950 8297 start_cleanup_deferral ();
2ac84cfe 8298 if (temp != 0
3a94c984 8299 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
8300 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8301 else
906c4e36
RK
8302 expand_expr (TREE_OPERAND (exp, 2),
8303 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8304 }
8305
956d6950 8306 end_cleanup_deferral ();
bbf6f052
RK
8307
8308 emit_queue ();
8309 emit_label (op1);
8310 OK_DEFER_POP;
5dab5552 8311
bbf6f052
RK
8312 return temp;
8313 }
8314
8315 case TARGET_EXPR:
8316 {
8317 /* Something needs to be initialized, but we didn't know
8318 where that thing was when building the tree. For example,
8319 it could be the return value of a function, or a parameter
8320 to a function which lays down in the stack, or a temporary
8321 variable which must be passed by reference.
8322
8323 We guarantee that the expression will either be constructed
8324 or copied into our original target. */
8325
8326 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8327 tree cleanups = NULL_TREE;
5c062816 8328 tree exp1;
bbf6f052
RK
8329
8330 if (TREE_CODE (slot) != VAR_DECL)
8331 abort ();
8332
9c51f375
RK
8333 if (! ignore)
8334 target = original_target;
8335
6fbfac92
JM
8336 /* Set this here so that if we get a target that refers to a
8337 register variable that's already been used, put_reg_into_stack
3a94c984 8338 knows that it should fix up those uses. */
6fbfac92
JM
8339 TREE_USED (slot) = 1;
8340
bbf6f052
RK
8341 if (target == 0)
8342 {
19e7881c 8343 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8344 {
8345 target = DECL_RTL (slot);
5c062816 8346 /* If we have already expanded the slot, so don't do
ac993f4f 8347 it again. (mrs) */
5c062816
MS
8348 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8349 return target;
ac993f4f 8350 }
bbf6f052
RK
8351 else
8352 {
e9a25f70 8353 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8354 /* All temp slots at this level must not conflict. */
8355 preserve_temp_slots (target);
19e7881c 8356 SET_DECL_RTL (slot, target);
e9a25f70 8357 if (TREE_ADDRESSABLE (slot))
4361b41d 8358 put_var_into_stack (slot);
bbf6f052 8359
e287fd6e
RK
8360 /* Since SLOT is not known to the called function
8361 to belong to its stack frame, we must build an explicit
8362 cleanup. This case occurs when we must build up a reference
8363 to pass the reference as an argument. In this case,
8364 it is very likely that such a reference need not be
8365 built here. */
8366
8367 if (TREE_OPERAND (exp, 2) == 0)
8368 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8369 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8370 }
bbf6f052
RK
8371 }
8372 else
8373 {
8374 /* This case does occur, when expanding a parameter which
8375 needs to be constructed on the stack. The target
8376 is the actual stack address that we want to initialize.
8377 The function we call will perform the cleanup in this case. */
8378
8c042b47
RS
8379 /* If we have already assigned it space, use that space,
8380 not target that we were passed in, as our target
8381 parameter is only a hint. */
19e7881c 8382 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8383 {
8384 target = DECL_RTL (slot);
8385 /* If we have already expanded the slot, so don't do
8c042b47 8386 it again. (mrs) */
3a94c984
KH
8387 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8388 return target;
8c042b47 8389 }
21002281
JW
8390 else
8391 {
19e7881c 8392 SET_DECL_RTL (slot, target);
21002281
JW
8393 /* If we must have an addressable slot, then make sure that
8394 the RTL that we just stored in slot is OK. */
8395 if (TREE_ADDRESSABLE (slot))
4361b41d 8396 put_var_into_stack (slot);
21002281 8397 }
bbf6f052
RK
8398 }
8399
4847c938 8400 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8401 /* Mark it as expanded. */
8402 TREE_OPERAND (exp, 1) = NULL_TREE;
8403
41531e5b 8404 store_expr (exp1, target, 0);
61d6b1cc 8405
e976b8b2 8406 expand_decl_cleanup (NULL_TREE, cleanups);
3a94c984 8407
41531e5b 8408 return target;
bbf6f052
RK
8409 }
8410
8411 case INIT_EXPR:
8412 {
8413 tree lhs = TREE_OPERAND (exp, 0);
8414 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8415
8416 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
bbf6f052
RK
8417 return temp;
8418 }
8419
8420 case MODIFY_EXPR:
8421 {
8422 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8423 That's so we don't compute a pointer and save it over a
8424 call. If lhs is simple, compute it first so we can give it
8425 as a target if the rhs is just a call. This avoids an
8426 extra temp and copy and that prevents a partial-subsumption
8427 which makes bad code. Actually we could treat
8428 component_ref's of vars like vars. */
bbf6f052
RK
8429
8430 tree lhs = TREE_OPERAND (exp, 0);
8431 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8432
8433 temp = 0;
8434
bbf6f052
RK
8435 /* Check for |= or &= of a bitfield of size one into another bitfield
8436 of size 1. In this case, (unless we need the result of the
8437 assignment) we can do this more efficiently with a
8438 test followed by an assignment, if necessary.
8439
8440 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8441 things change so we do, this code should be enhanced to
8442 support it. */
8443 if (ignore
8444 && TREE_CODE (lhs) == COMPONENT_REF
8445 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8446 || TREE_CODE (rhs) == BIT_AND_EXPR)
8447 && TREE_OPERAND (rhs, 0) == lhs
8448 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8449 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8450 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8451 {
8452 rtx label = gen_label_rtx ();
8453
8454 do_jump (TREE_OPERAND (rhs, 1),
8455 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8456 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8457 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8458 (TREE_CODE (rhs) == BIT_IOR_EXPR
8459 ? integer_one_node
8460 : integer_zero_node)),
8461 0, 0);
e7c33f54 8462 do_pending_stack_adjust ();
bbf6f052
RK
8463 emit_label (label);
8464 return const0_rtx;
8465 }
8466
bbf6f052 8467 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6d0a3f67 8468
bbf6f052
RK
8469 return temp;
8470 }
8471
6e7f84a7
APB
8472 case RETURN_EXPR:
8473 if (!TREE_OPERAND (exp, 0))
8474 expand_null_return ();
8475 else
8476 expand_return (TREE_OPERAND (exp, 0));
8477 return const0_rtx;
8478
bbf6f052
RK
8479 case PREINCREMENT_EXPR:
8480 case PREDECREMENT_EXPR:
7b8b9722 8481 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8482
8483 case POSTINCREMENT_EXPR:
8484 case POSTDECREMENT_EXPR:
8485 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8486 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8487
8488 case ADDR_EXPR:
8489 /* Are we taking the address of a nested function? */
8490 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8491 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8492 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8493 && ! TREE_STATIC (exp))
bbf6f052
RK
8494 {
8495 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8496 op0 = force_operand (op0, target);
8497 }
682ba3a6
RK
8498 /* If we are taking the address of something erroneous, just
8499 return a zero. */
8500 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8501 return const0_rtx;
d6b6783b
RK
8502 /* If we are taking the address of a constant and are at the
8503 top level, we have to use output_constant_def since we can't
8504 call force_const_mem at top level. */
8505 else if (cfun == 0
8506 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8507 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8508 == 'c')))
8509 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8510 else
8511 {
e287fd6e
RK
8512 /* We make sure to pass const0_rtx down if we came in with
8513 ignore set, to avoid doing the cleanups twice for something. */
8514 op0 = expand_expr (TREE_OPERAND (exp, 0),
8515 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8516 (modifier == EXPAND_INITIALIZER
8517 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8518
119af78a
RK
8519 /* If we are going to ignore the result, OP0 will have been set
8520 to const0_rtx, so just return it. Don't get confused and
8521 think we are taking the address of the constant. */
8522 if (ignore)
8523 return op0;
8524
73b7f58c
BS
8525 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8526 clever and returns a REG when given a MEM. */
8527 op0 = protect_from_queue (op0, 1);
3539e816 8528
c5c76735
JL
8529 /* We would like the object in memory. If it is a constant, we can
8530 have it be statically allocated into memory. For a non-constant,
8531 we need to allocate some memory and store the value into it. */
896102d0
RK
8532
8533 if (CONSTANT_P (op0))
8534 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8535 op0);
682ba3a6 8536 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
8537 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8538 || GET_CODE (op0) == PARALLEL)
896102d0 8539 {
6c7d86ec
RK
8540 /* If the operand is a SAVE_EXPR, we can deal with this by
8541 forcing the SAVE_EXPR into memory. */
8542 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8543 {
8544 put_var_into_stack (TREE_OPERAND (exp, 0));
8545 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8546 }
df6018fd 8547 else
6c7d86ec
RK
8548 {
8549 /* If this object is in a register, it can't be BLKmode. */
8550 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8551 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
8552
8553 if (GET_CODE (op0) == PARALLEL)
8554 /* Handle calls that pass values in multiple
8555 non-contiguous locations. The Irix 6 ABI has examples
8556 of this. */
8557 emit_group_store (memloc, op0,
8558 int_size_in_bytes (inner_type));
8559 else
8560 emit_move_insn (memloc, op0);
8561
8562 op0 = memloc;
8563 }
896102d0
RK
8564 }
8565
bbf6f052
RK
8566 if (GET_CODE (op0) != MEM)
8567 abort ();
3a94c984 8568
34e81b5a 8569 mark_temp_addr_taken (op0);
bbf6f052 8570 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8571 {
34e81b5a 8572 op0 = XEXP (op0, 0);
88f63c77 8573#ifdef POINTERS_EXTEND_UNSIGNED
34e81b5a 8574 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
88f63c77 8575 && mode == ptr_mode)
34e81b5a 8576 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8577#endif
34e81b5a 8578 return op0;
88f63c77 8579 }
987c71d9 8580
c952ff4b
RK
8581 /* If OP0 is not aligned as least as much as the type requires, we
8582 need to make a temporary, copy OP0 to it, and take the address of
8583 the temporary. We want to use the alignment of the type, not of
8584 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8585 the test for BLKmode means that can't happen. The test for
8586 BLKmode is because we never make mis-aligned MEMs with
8587 non-BLKmode.
8588
8589 We don't need to do this at all if the machine doesn't have
8590 strict alignment. */
8591 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8592 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8593 > MEM_ALIGN (op0))
8594 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8595 {
8596 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8597 rtx new
8598 = assign_stack_temp_for_type
8599 (TYPE_MODE (inner_type),
8600 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
e27cc84b 8601 : int_size_in_bytes (inner_type),
a06ef755
RK
8602 1, build_qualified_type (inner_type,
8603 (TYPE_QUALS (inner_type)
8604 | TYPE_QUAL_CONST)));
8605
c3d32120
RK
8606 if (TYPE_ALIGN_OK (inner_type))
8607 abort ();
8608
a06ef755
RK
8609 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8610 op0 = new;
8611 }
8612
bbf6f052
RK
8613 op0 = force_operand (XEXP (op0, 0), target);
8614 }
987c71d9 8615
05c8e58b
HPN
8616 if (flag_force_addr
8617 && GET_CODE (op0) != REG
8618 && modifier != EXPAND_CONST_ADDRESS
8619 && modifier != EXPAND_INITIALIZER
8620 && modifier != EXPAND_SUM)
987c71d9
RK
8621 op0 = force_reg (Pmode, op0);
8622
dc6d66b3
RK
8623 if (GET_CODE (op0) == REG
8624 && ! REG_USERVAR_P (op0))
bdb429a5 8625 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8626
88f63c77
RK
8627#ifdef POINTERS_EXTEND_UNSIGNED
8628 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8629 && mode == ptr_mode)
9fcfcce7 8630 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8631#endif
8632
bbf6f052
RK
8633 return op0;
8634
8635 case ENTRY_VALUE_EXPR:
8636 abort ();
8637
7308a047
RS
8638 /* COMPLEX type for Extended Pascal & Fortran */
8639 case COMPLEX_EXPR:
8640 {
8641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8642 rtx insns;
7308a047
RS
8643
8644 /* Get the rtx code of the operands. */
8645 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8646 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8647
8648 if (! target)
8649 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8650
6551fa4d 8651 start_sequence ();
7308a047
RS
8652
8653 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8654 emit_move_insn (gen_realpart (mode, target), op0);
8655 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8656
6551fa4d
JW
8657 insns = get_insns ();
8658 end_sequence ();
8659
7308a047 8660 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8661 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8662 each with a separate pseudo as destination.
8663 It's not correct for flow to treat them as a unit. */
6d6e61ce 8664 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8665 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8666 else
8667 emit_insns (insns);
7308a047
RS
8668
8669 return target;
8670 }
8671
8672 case REALPART_EXPR:
2d7050fd
RS
8673 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8674 return gen_realpart (mode, op0);
3a94c984 8675
7308a047 8676 case IMAGPART_EXPR:
2d7050fd
RS
8677 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8678 return gen_imagpart (mode, op0);
7308a047
RS
8679
8680 case CONJ_EXPR:
8681 {
62acb978 8682 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8683 rtx imag_t;
6551fa4d 8684 rtx insns;
3a94c984
KH
8685
8686 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8687
8688 if (! target)
d6a5ac33 8689 target = gen_reg_rtx (mode);
3a94c984 8690
6551fa4d 8691 start_sequence ();
7308a047
RS
8692
8693 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8694 emit_move_insn (gen_realpart (partmode, target),
8695 gen_realpart (partmode, op0));
7308a047 8696
62acb978 8697 imag_t = gen_imagpart (partmode, target);
91ce572a
CC
8698 temp = expand_unop (partmode,
8699 ! unsignedp && flag_trapv
8700 && (GET_MODE_CLASS(partmode) == MODE_INT)
8701 ? negv_optab : neg_optab,
3a94c984 8702 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8703 if (temp != imag_t)
8704 emit_move_insn (imag_t, temp);
8705
6551fa4d
JW
8706 insns = get_insns ();
8707 end_sequence ();
8708
3a94c984 8709 /* Conjugate should appear as a single unit
d6a5ac33 8710 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8711 each with a separate pseudo as destination.
8712 It's not correct for flow to treat them as a unit. */
6d6e61ce 8713 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8714 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8715 else
8716 emit_insns (insns);
7308a047
RS
8717
8718 return target;
8719 }
8720
e976b8b2
MS
8721 case TRY_CATCH_EXPR:
8722 {
8723 tree handler = TREE_OPERAND (exp, 1);
8724
8725 expand_eh_region_start ();
8726
8727 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8728
52a11cbf 8729 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8730
8731 return op0;
8732 }
8733
b335b813
PB
8734 case TRY_FINALLY_EXPR:
8735 {
8736 tree try_block = TREE_OPERAND (exp, 0);
8737 tree finally_block = TREE_OPERAND (exp, 1);
8738 rtx finally_label = gen_label_rtx ();
8739 rtx done_label = gen_label_rtx ();
8740 rtx return_link = gen_reg_rtx (Pmode);
8741 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8742 (tree) finally_label, (tree) return_link);
8743 TREE_SIDE_EFFECTS (cleanup) = 1;
8744
8745 /* Start a new binding layer that will keep track of all cleanup
8746 actions to be performed. */
8e91754e 8747 expand_start_bindings (2);
b335b813
PB
8748
8749 target_temp_slot_level = temp_slot_level;
8750
8751 expand_decl_cleanup (NULL_TREE, cleanup);
8752 op0 = expand_expr (try_block, target, tmode, modifier);
8753
8754 preserve_temp_slots (op0);
8755 expand_end_bindings (NULL_TREE, 0, 0);
8756 emit_jump (done_label);
8757 emit_label (finally_label);
8758 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8759 emit_indirect_jump (return_link);
8760 emit_label (done_label);
8761 return op0;
8762 }
8763
3a94c984 8764 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8765 {
8766 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8767 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8768 rtx return_address = gen_label_rtx ();
3a94c984
KH
8769 emit_move_insn (return_link,
8770 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8771 emit_jump (subr);
8772 emit_label (return_address);
8773 return const0_rtx;
8774 }
8775
d3707adb
RH
8776 case VA_ARG_EXPR:
8777 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8778
52a11cbf 8779 case EXC_PTR_EXPR:
86c99549 8780 return get_exception_pointer (cfun);
52a11cbf 8781
67231816
RH
8782 case FDESC_EXPR:
8783 /* Function descriptors are not valid except for as
8784 initialization constants, and should not be expanded. */
8785 abort ();
8786
bbf6f052 8787 default:
90764a87 8788 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8789 }
8790
8791 /* Here to do an ordinary binary operator, generating an instruction
8792 from the optab already placed in `this_optab'. */
8793 binop:
e5e809f4 8794 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8795 subtarget = 0;
8796 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8797 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8798 binop2:
8799 temp = expand_binop (mode, this_optab, op0, op1, target,
8800 unsignedp, OPTAB_LIB_WIDEN);
8801 if (temp == 0)
8802 abort ();
8803 return temp;
8804}
b93a436e 8805\f
fed3cef0
RK
8806/* Return the tree node if a ARG corresponds to a string constant or zero
8807 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8808 in bytes within the string that ARG is accessing. The type of the
8809 offset will be `sizetype'. */
b93a436e 8810
28f4ec01 8811tree
b93a436e
JL
8812string_constant (arg, ptr_offset)
8813 tree arg;
8814 tree *ptr_offset;
8815{
8816 STRIP_NOPS (arg);
8817
8818 if (TREE_CODE (arg) == ADDR_EXPR
8819 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8820 {
fed3cef0 8821 *ptr_offset = size_zero_node;
b93a436e
JL
8822 return TREE_OPERAND (arg, 0);
8823 }
8824 else if (TREE_CODE (arg) == PLUS_EXPR)
8825 {
8826 tree arg0 = TREE_OPERAND (arg, 0);
8827 tree arg1 = TREE_OPERAND (arg, 1);
8828
8829 STRIP_NOPS (arg0);
8830 STRIP_NOPS (arg1);
8831
8832 if (TREE_CODE (arg0) == ADDR_EXPR
8833 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8834 {
fed3cef0 8835 *ptr_offset = convert (sizetype, arg1);
b93a436e 8836 return TREE_OPERAND (arg0, 0);
bbf6f052 8837 }
b93a436e
JL
8838 else if (TREE_CODE (arg1) == ADDR_EXPR
8839 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8840 {
fed3cef0 8841 *ptr_offset = convert (sizetype, arg0);
b93a436e 8842 return TREE_OPERAND (arg1, 0);
bbf6f052 8843 }
b93a436e 8844 }
ca695ac9 8845
b93a436e
JL
8846 return 0;
8847}
ca695ac9 8848\f
b93a436e
JL
8849/* Expand code for a post- or pre- increment or decrement
8850 and return the RTX for the result.
8851 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 8852
b93a436e
JL
8853static rtx
8854expand_increment (exp, post, ignore)
b3694847 8855 tree exp;
b93a436e 8856 int post, ignore;
ca695ac9 8857{
b3694847
SS
8858 rtx op0, op1;
8859 rtx temp, value;
8860 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
8861 optab this_optab = add_optab;
8862 int icode;
8863 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8864 int op0_is_copy = 0;
8865 int single_insn = 0;
8866 /* 1 means we can't store into OP0 directly,
8867 because it is a subreg narrower than a word,
8868 and we don't dare clobber the rest of the word. */
8869 int bad_subreg = 0;
1499e0a8 8870
b93a436e
JL
8871 /* Stabilize any component ref that might need to be
8872 evaluated more than once below. */
8873 if (!post
8874 || TREE_CODE (incremented) == BIT_FIELD_REF
8875 || (TREE_CODE (incremented) == COMPONENT_REF
8876 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8877 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8878 incremented = stabilize_reference (incremented);
8879 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8880 ones into save exprs so that they don't accidentally get evaluated
8881 more than once by the code below. */
8882 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8883 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8884 incremented = save_expr (incremented);
e9a25f70 8885
b93a436e
JL
8886 /* Compute the operands as RTX.
8887 Note whether OP0 is the actual lvalue or a copy of it:
8888 I believe it is a copy iff it is a register or subreg
6d2f8887 8889 and insns were generated in computing it. */
e9a25f70 8890
b93a436e 8891 temp = get_last_insn ();
37a08a29 8892 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 8893
b93a436e
JL
8894 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8895 in place but instead must do sign- or zero-extension during assignment,
8896 so we copy it into a new register and let the code below use it as
8897 a copy.
e9a25f70 8898
b93a436e
JL
8899 Note that we can safely modify this SUBREG since it is know not to be
8900 shared (it was made by the expand_expr call above). */
8901
8902 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8903 {
8904 if (post)
8905 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8906 else
8907 bad_subreg = 1;
8908 }
8909 else if (GET_CODE (op0) == SUBREG
8910 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8911 {
8912 /* We cannot increment this SUBREG in place. If we are
8913 post-incrementing, get a copy of the old value. Otherwise,
8914 just mark that we cannot increment in place. */
8915 if (post)
8916 op0 = copy_to_reg (op0);
8917 else
8918 bad_subreg = 1;
e9a25f70
JL
8919 }
8920
b93a436e
JL
8921 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8922 && temp != get_last_insn ());
37a08a29 8923 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 8924
b93a436e
JL
8925 /* Decide whether incrementing or decrementing. */
8926 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8927 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8928 this_optab = sub_optab;
8929
8930 /* Convert decrement by a constant into a negative increment. */
8931 if (this_optab == sub_optab
8932 && GET_CODE (op1) == CONST_INT)
ca695ac9 8933 {
3a94c984 8934 op1 = GEN_INT (-INTVAL (op1));
b93a436e 8935 this_optab = add_optab;
ca695ac9 8936 }
1499e0a8 8937
91ce572a 8938 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 8939 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 8940
b93a436e
JL
8941 /* For a preincrement, see if we can do this with a single instruction. */
8942 if (!post)
8943 {
8944 icode = (int) this_optab->handlers[(int) mode].insn_code;
8945 if (icode != (int) CODE_FOR_nothing
8946 /* Make sure that OP0 is valid for operands 0 and 1
8947 of the insn we want to queue. */
a995e389
RH
8948 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8949 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8950 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
8951 single_insn = 1;
8952 }
bbf6f052 8953
b93a436e
JL
8954 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8955 then we cannot just increment OP0. We must therefore contrive to
8956 increment the original value. Then, for postincrement, we can return
8957 OP0 since it is a copy of the old value. For preincrement, expand here
8958 unless we can do it with a single insn.
bbf6f052 8959
b93a436e
JL
8960 Likewise if storing directly into OP0 would clobber high bits
8961 we need to preserve (bad_subreg). */
8962 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 8963 {
b93a436e
JL
8964 /* This is the easiest way to increment the value wherever it is.
8965 Problems with multiple evaluation of INCREMENTED are prevented
8966 because either (1) it is a component_ref or preincrement,
8967 in which case it was stabilized above, or (2) it is an array_ref
8968 with constant index in an array in a register, which is
8969 safe to reevaluate. */
8970 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8971 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8972 ? MINUS_EXPR : PLUS_EXPR),
8973 TREE_TYPE (exp),
8974 incremented,
8975 TREE_OPERAND (exp, 1));
a358cee0 8976
b93a436e
JL
8977 while (TREE_CODE (incremented) == NOP_EXPR
8978 || TREE_CODE (incremented) == CONVERT_EXPR)
8979 {
8980 newexp = convert (TREE_TYPE (incremented), newexp);
8981 incremented = TREE_OPERAND (incremented, 0);
8982 }
bbf6f052 8983
b93a436e
JL
8984 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8985 return post ? op0 : temp;
8986 }
bbf6f052 8987
b93a436e
JL
8988 if (post)
8989 {
8990 /* We have a true reference to the value in OP0.
8991 If there is an insn to add or subtract in this mode, queue it.
8992 Queueing the increment insn avoids the register shuffling
8993 that often results if we must increment now and first save
8994 the old value for subsequent use. */
bbf6f052 8995
b93a436e
JL
8996#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8997 op0 = stabilize (op0);
8998#endif
41dfd40c 8999
b93a436e
JL
9000 icode = (int) this_optab->handlers[(int) mode].insn_code;
9001 if (icode != (int) CODE_FOR_nothing
9002 /* Make sure that OP0 is valid for operands 0 and 1
9003 of the insn we want to queue. */
a995e389
RH
9004 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9005 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9006 {
a995e389 9007 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9008 op1 = force_reg (mode, op1);
bbf6f052 9009
b93a436e
JL
9010 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9011 }
9012 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9013 {
9014 rtx addr = (general_operand (XEXP (op0, 0), mode)
9015 ? force_reg (Pmode, XEXP (op0, 0))
9016 : copy_to_reg (XEXP (op0, 0)));
9017 rtx temp, result;
ca695ac9 9018
792760b9 9019 op0 = replace_equiv_address (op0, addr);
b93a436e 9020 temp = force_reg (GET_MODE (op0), op0);
a995e389 9021 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9022 op1 = force_reg (mode, op1);
ca695ac9 9023
b93a436e
JL
9024 /* The increment queue is LIFO, thus we have to `queue'
9025 the instructions in reverse order. */
9026 enqueue_insn (op0, gen_move_insn (op0, temp));
9027 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9028 return result;
bbf6f052
RK
9029 }
9030 }
ca695ac9 9031
b93a436e
JL
9032 /* Preincrement, or we can't increment with one simple insn. */
9033 if (post)
9034 /* Save a copy of the value before inc or dec, to return it later. */
9035 temp = value = copy_to_reg (op0);
9036 else
9037 /* Arrange to return the incremented value. */
9038 /* Copy the rtx because expand_binop will protect from the queue,
9039 and the results of that would be invalid for us to return
9040 if our caller does emit_queue before using our result. */
9041 temp = copy_rtx (value = op0);
bbf6f052 9042
b93a436e 9043 /* Increment however we can. */
37a08a29 9044 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9045 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9046
b93a436e
JL
9047 /* Make sure the value is stored into OP0. */
9048 if (op1 != op0)
9049 emit_move_insn (op0, op1);
5718612f 9050
b93a436e
JL
9051 return temp;
9052}
9053\f
b93a436e
JL
9054/* At the start of a function, record that we have no previously-pushed
9055 arguments waiting to be popped. */
bbf6f052 9056
b93a436e
JL
9057void
9058init_pending_stack_adjust ()
9059{
9060 pending_stack_adjust = 0;
9061}
bbf6f052 9062
b93a436e 9063/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9064 so the adjustment won't get done.
9065
9066 Note, if the current function calls alloca, then it must have a
9067 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9068
b93a436e
JL
9069void
9070clear_pending_stack_adjust ()
9071{
9072#ifdef EXIT_IGNORE_STACK
9073 if (optimize > 0
060fbabf
JL
9074 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9075 && EXIT_IGNORE_STACK
b93a436e
JL
9076 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9077 && ! flag_inline_functions)
1503a7ec
JH
9078 {
9079 stack_pointer_delta -= pending_stack_adjust,
9080 pending_stack_adjust = 0;
9081 }
b93a436e
JL
9082#endif
9083}
bbf6f052 9084
b93a436e
JL
9085/* Pop any previously-pushed arguments that have not been popped yet. */
9086
9087void
9088do_pending_stack_adjust ()
9089{
9090 if (inhibit_defer_pop == 0)
ca695ac9 9091 {
b93a436e
JL
9092 if (pending_stack_adjust != 0)
9093 adjust_stack (GEN_INT (pending_stack_adjust));
9094 pending_stack_adjust = 0;
bbf6f052 9095 }
bbf6f052
RK
9096}
9097\f
b93a436e 9098/* Expand conditional expressions. */
bbf6f052 9099
b93a436e
JL
9100/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9101 LABEL is an rtx of code CODE_LABEL, in this function and all the
9102 functions here. */
bbf6f052 9103
b93a436e
JL
9104void
9105jumpifnot (exp, label)
ca695ac9 9106 tree exp;
b93a436e 9107 rtx label;
bbf6f052 9108{
b93a436e
JL
9109 do_jump (exp, label, NULL_RTX);
9110}
bbf6f052 9111
b93a436e 9112/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9113
b93a436e
JL
9114void
9115jumpif (exp, label)
9116 tree exp;
9117 rtx label;
9118{
9119 do_jump (exp, NULL_RTX, label);
9120}
ca695ac9 9121
b93a436e
JL
9122/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9123 the result is zero, or IF_TRUE_LABEL if the result is one.
9124 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9125 meaning fall through in that case.
ca695ac9 9126
b93a436e
JL
9127 do_jump always does any pending stack adjust except when it does not
9128 actually perform a jump. An example where there is no jump
9129 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9130
b93a436e
JL
9131 This function is responsible for optimizing cases such as
9132 &&, || and comparison operators in EXP. */
5718612f 9133
b93a436e
JL
9134void
9135do_jump (exp, if_false_label, if_true_label)
9136 tree exp;
9137 rtx if_false_label, if_true_label;
9138{
b3694847 9139 enum tree_code code = TREE_CODE (exp);
b93a436e
JL
9140 /* Some cases need to create a label to jump to
9141 in order to properly fall through.
9142 These cases set DROP_THROUGH_LABEL nonzero. */
9143 rtx drop_through_label = 0;
9144 rtx temp;
b93a436e
JL
9145 int i;
9146 tree type;
9147 enum machine_mode mode;
ca695ac9 9148
dbecbbe4
JL
9149#ifdef MAX_INTEGER_COMPUTATION_MODE
9150 check_max_integer_computation_mode (exp);
9151#endif
9152
b93a436e 9153 emit_queue ();
ca695ac9 9154
b93a436e 9155 switch (code)
ca695ac9 9156 {
b93a436e 9157 case ERROR_MARK:
ca695ac9 9158 break;
bbf6f052 9159
b93a436e
JL
9160 case INTEGER_CST:
9161 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9162 if (temp)
9163 emit_jump (temp);
9164 break;
bbf6f052 9165
b93a436e
JL
9166#if 0
9167 /* This is not true with #pragma weak */
9168 case ADDR_EXPR:
9169 /* The address of something can never be zero. */
9170 if (if_true_label)
9171 emit_jump (if_true_label);
9172 break;
9173#endif
bbf6f052 9174
b93a436e
JL
9175 case NOP_EXPR:
9176 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9177 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
b4e3fabb
RK
9178 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9179 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
b93a436e
JL
9180 goto normal;
9181 case CONVERT_EXPR:
9182 /* If we are narrowing the operand, we have to do the compare in the
9183 narrower mode. */
9184 if ((TYPE_PRECISION (TREE_TYPE (exp))
9185 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9186 goto normal;
9187 case NON_LVALUE_EXPR:
9188 case REFERENCE_EXPR:
9189 case ABS_EXPR:
9190 case NEGATE_EXPR:
9191 case LROTATE_EXPR:
9192 case RROTATE_EXPR:
9193 /* These cannot change zero->non-zero or vice versa. */
9194 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9195 break;
bbf6f052 9196
14a774a9
RK
9197 case WITH_RECORD_EXPR:
9198 /* Put the object on the placeholder list, recurse through our first
9199 operand, and pop the list. */
9200 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9201 placeholder_list);
9202 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9203 placeholder_list = TREE_CHAIN (placeholder_list);
9204 break;
9205
b93a436e
JL
9206#if 0
9207 /* This is never less insns than evaluating the PLUS_EXPR followed by
9208 a test and can be longer if the test is eliminated. */
9209 case PLUS_EXPR:
9210 /* Reduce to minus. */
9211 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9212 TREE_OPERAND (exp, 0),
9213 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9214 TREE_OPERAND (exp, 1))));
9215 /* Process as MINUS. */
ca695ac9 9216#endif
bbf6f052 9217
b93a436e
JL
9218 case MINUS_EXPR:
9219 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9220 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9221 TREE_OPERAND (exp, 0),
9222 TREE_OPERAND (exp, 1)),
9223 NE, NE, if_false_label, if_true_label);
b93a436e 9224 break;
bbf6f052 9225
b93a436e
JL
9226 case BIT_AND_EXPR:
9227 /* If we are AND'ing with a small constant, do this comparison in the
9228 smallest type that fits. If the machine doesn't have comparisons
9229 that small, it will be converted back to the wider comparison.
9230 This helps if we are testing the sign bit of a narrower object.
9231 combine can't do this for us because it can't know whether a
9232 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9233
b93a436e
JL
9234 if (! SLOW_BYTE_ACCESS
9235 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9236 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9237 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9238 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9239 && (type = type_for_mode (mode, 1)) != 0
9240 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9241 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9242 != CODE_FOR_nothing))
9243 {
9244 do_jump (convert (type, exp), if_false_label, if_true_label);
9245 break;
9246 }
9247 goto normal;
bbf6f052 9248
b93a436e
JL
9249 case TRUTH_NOT_EXPR:
9250 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9251 break;
bbf6f052 9252
b93a436e
JL
9253 case TRUTH_ANDIF_EXPR:
9254 if (if_false_label == 0)
9255 if_false_label = drop_through_label = gen_label_rtx ();
9256 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9257 start_cleanup_deferral ();
9258 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9259 end_cleanup_deferral ();
9260 break;
bbf6f052 9261
b93a436e
JL
9262 case TRUTH_ORIF_EXPR:
9263 if (if_true_label == 0)
9264 if_true_label = drop_through_label = gen_label_rtx ();
9265 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9266 start_cleanup_deferral ();
9267 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9268 end_cleanup_deferral ();
9269 break;
bbf6f052 9270
b93a436e
JL
9271 case COMPOUND_EXPR:
9272 push_temp_slots ();
9273 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9274 preserve_temp_slots (NULL_RTX);
9275 free_temp_slots ();
9276 pop_temp_slots ();
9277 emit_queue ();
9278 do_pending_stack_adjust ();
9279 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9280 break;
bbf6f052 9281
b93a436e
JL
9282 case COMPONENT_REF:
9283 case BIT_FIELD_REF:
9284 case ARRAY_REF:
b4e3fabb 9285 case ARRAY_RANGE_REF:
b93a436e 9286 {
770ae6cc
RK
9287 HOST_WIDE_INT bitsize, bitpos;
9288 int unsignedp;
b93a436e
JL
9289 enum machine_mode mode;
9290 tree type;
9291 tree offset;
9292 int volatilep = 0;
bbf6f052 9293
b93a436e
JL
9294 /* Get description of this reference. We don't actually care
9295 about the underlying object here. */
19caa751 9296 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
a06ef755 9297 &unsignedp, &volatilep);
bbf6f052 9298
b93a436e
JL
9299 type = type_for_size (bitsize, unsignedp);
9300 if (! SLOW_BYTE_ACCESS
9301 && type != 0 && bitsize >= 0
9302 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9303 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9304 != CODE_FOR_nothing))
9305 {
9306 do_jump (convert (type, exp), if_false_label, if_true_label);
9307 break;
9308 }
9309 goto normal;
9310 }
bbf6f052 9311
b93a436e
JL
9312 case COND_EXPR:
9313 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9314 if (integer_onep (TREE_OPERAND (exp, 1))
9315 && integer_zerop (TREE_OPERAND (exp, 2)))
9316 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9317
b93a436e
JL
9318 else if (integer_zerop (TREE_OPERAND (exp, 1))
9319 && integer_onep (TREE_OPERAND (exp, 2)))
9320 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9321
b93a436e
JL
9322 else
9323 {
b3694847 9324 rtx label1 = gen_label_rtx ();
b93a436e 9325 drop_through_label = gen_label_rtx ();
bbf6f052 9326
b93a436e 9327 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9328
b93a436e
JL
9329 start_cleanup_deferral ();
9330 /* Now the THEN-expression. */
9331 do_jump (TREE_OPERAND (exp, 1),
9332 if_false_label ? if_false_label : drop_through_label,
9333 if_true_label ? if_true_label : drop_through_label);
9334 /* In case the do_jump just above never jumps. */
9335 do_pending_stack_adjust ();
9336 emit_label (label1);
bbf6f052 9337
b93a436e
JL
9338 /* Now the ELSE-expression. */
9339 do_jump (TREE_OPERAND (exp, 2),
9340 if_false_label ? if_false_label : drop_through_label,
9341 if_true_label ? if_true_label : drop_through_label);
9342 end_cleanup_deferral ();
9343 }
9344 break;
bbf6f052 9345
b93a436e
JL
9346 case EQ_EXPR:
9347 {
9348 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9349
9ec36da5
JL
9350 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9351 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9352 {
9353 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9354 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9355 do_jump
9356 (fold
9357 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9358 fold (build (EQ_EXPR, TREE_TYPE (exp),
9359 fold (build1 (REALPART_EXPR,
9360 TREE_TYPE (inner_type),
9361 exp0)),
9362 fold (build1 (REALPART_EXPR,
9363 TREE_TYPE (inner_type),
9364 exp1)))),
9365 fold (build (EQ_EXPR, TREE_TYPE (exp),
9366 fold (build1 (IMAGPART_EXPR,
9367 TREE_TYPE (inner_type),
9368 exp0)),
9369 fold (build1 (IMAGPART_EXPR,
9370 TREE_TYPE (inner_type),
9371 exp1)))))),
9372 if_false_label, if_true_label);
9373 }
9ec36da5
JL
9374
9375 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9376 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9377
b93a436e 9378 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9379 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9380 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9381 else
b30f05db 9382 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9383 break;
9384 }
bbf6f052 9385
b93a436e
JL
9386 case NE_EXPR:
9387 {
9388 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9389
9ec36da5
JL
9390 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9391 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9392 {
9393 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9394 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9395 do_jump
9396 (fold
9397 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9398 fold (build (NE_EXPR, TREE_TYPE (exp),
9399 fold (build1 (REALPART_EXPR,
9400 TREE_TYPE (inner_type),
9401 exp0)),
9402 fold (build1 (REALPART_EXPR,
9403 TREE_TYPE (inner_type),
9404 exp1)))),
9405 fold (build (NE_EXPR, TREE_TYPE (exp),
9406 fold (build1 (IMAGPART_EXPR,
9407 TREE_TYPE (inner_type),
9408 exp0)),
9409 fold (build1 (IMAGPART_EXPR,
9410 TREE_TYPE (inner_type),
9411 exp1)))))),
9412 if_false_label, if_true_label);
9413 }
9ec36da5
JL
9414
9415 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9416 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9417
b93a436e 9418 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9419 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9420 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9421 else
b30f05db 9422 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9423 break;
9424 }
bbf6f052 9425
b93a436e 9426 case LT_EXPR:
1c0290ea
BS
9427 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9428 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9429 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9430 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9431 else
b30f05db 9432 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9433 break;
bbf6f052 9434
b93a436e 9435 case LE_EXPR:
1c0290ea
BS
9436 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9437 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9438 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9439 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9440 else
b30f05db 9441 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9442 break;
bbf6f052 9443
b93a436e 9444 case GT_EXPR:
1c0290ea
BS
9445 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9446 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9447 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9448 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9449 else
b30f05db 9450 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9451 break;
bbf6f052 9452
b93a436e 9453 case GE_EXPR:
1c0290ea
BS
9454 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9455 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9456 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9457 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9458 else
b30f05db 9459 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9460 break;
bbf6f052 9461
1eb8759b
RH
9462 case UNORDERED_EXPR:
9463 case ORDERED_EXPR:
9464 {
9465 enum rtx_code cmp, rcmp;
9466 int do_rev;
9467
9468 if (code == UNORDERED_EXPR)
9469 cmp = UNORDERED, rcmp = ORDERED;
9470 else
9471 cmp = ORDERED, rcmp = UNORDERED;
3a94c984 9472 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
9473
9474 do_rev = 0;
9475 if (! can_compare_p (cmp, mode, ccp_jump)
9476 && (can_compare_p (rcmp, mode, ccp_jump)
9477 /* If the target doesn't provide either UNORDERED or ORDERED
9478 comparisons, canonicalize on UNORDERED for the library. */
9479 || rcmp == UNORDERED))
9480 do_rev = 1;
9481
9482 if (! do_rev)
9483 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9484 else
9485 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9486 }
9487 break;
9488
9489 {
9490 enum rtx_code rcode1;
9491 enum tree_code tcode2;
9492
9493 case UNLT_EXPR:
9494 rcode1 = UNLT;
9495 tcode2 = LT_EXPR;
9496 goto unordered_bcc;
9497 case UNLE_EXPR:
9498 rcode1 = UNLE;
9499 tcode2 = LE_EXPR;
9500 goto unordered_bcc;
9501 case UNGT_EXPR:
9502 rcode1 = UNGT;
9503 tcode2 = GT_EXPR;
9504 goto unordered_bcc;
9505 case UNGE_EXPR:
9506 rcode1 = UNGE;
9507 tcode2 = GE_EXPR;
9508 goto unordered_bcc;
9509 case UNEQ_EXPR:
9510 rcode1 = UNEQ;
9511 tcode2 = EQ_EXPR;
9512 goto unordered_bcc;
7913f3d0 9513
1eb8759b
RH
9514 unordered_bcc:
9515 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9516 if (can_compare_p (rcode1, mode, ccp_jump))
9517 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9518 if_true_label);
9519 else
9520 {
9521 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9522 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9523 tree cmp0, cmp1;
9524
3a94c984 9525 /* If the target doesn't support combined unordered
1eb8759b
RH
9526 compares, decompose into UNORDERED + comparison. */
9527 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9528 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9529 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9530 do_jump (exp, if_false_label, if_true_label);
9531 }
9532 }
9533 break;
9534
5f2d6cfa
MM
9535 /* Special case:
9536 __builtin_expect (<test>, 0) and
9537 __builtin_expect (<test>, 1)
9538
9539 We need to do this here, so that <test> is not converted to a SCC
9540 operation on machines that use condition code registers and COMPARE
9541 like the PowerPC, and then the jump is done based on whether the SCC
9542 operation produced a 1 or 0. */
9543 case CALL_EXPR:
9544 /* Check for a built-in function. */
9545 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9546 {
9547 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9548 tree arglist = TREE_OPERAND (exp, 1);
9549
9550 if (TREE_CODE (fndecl) == FUNCTION_DECL
9551 && DECL_BUILT_IN (fndecl)
9552 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9553 && arglist != NULL_TREE
9554 && TREE_CHAIN (arglist) != NULL_TREE)
9555 {
9556 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9557 if_true_label);
9558
9559 if (seq != NULL_RTX)
9560 {
9561 emit_insn (seq);
9562 return;
9563 }
9564 }
9565 }
9566 /* fall through and generate the normal code. */
9567
b93a436e
JL
9568 default:
9569 normal:
9570 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9571#if 0
9572 /* This is not needed any more and causes poor code since it causes
9573 comparisons and tests from non-SI objects to have different code
9574 sequences. */
9575 /* Copy to register to avoid generating bad insns by cse
9576 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9577 if (!cse_not_expected && GET_CODE (temp) == MEM)
9578 temp = copy_to_reg (temp);
ca695ac9 9579#endif
b93a436e 9580 do_pending_stack_adjust ();
b30f05db
BS
9581 /* Do any postincrements in the expression that was tested. */
9582 emit_queue ();
9583
998a298e
GK
9584 if (GET_CODE (temp) == CONST_INT
9585 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9586 || GET_CODE (temp) == LABEL_REF)
b30f05db
BS
9587 {
9588 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9589 if (target)
9590 emit_jump (target);
9591 }
b93a436e 9592 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9593 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9594 /* Note swapping the labels gives us not-equal. */
9595 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9596 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9597 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9598 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
a06ef755 9599 GET_MODE (temp), NULL_RTX,
b30f05db 9600 if_false_label, if_true_label);
b93a436e
JL
9601 else
9602 abort ();
9603 }
bbf6f052 9604
b93a436e
JL
9605 if (drop_through_label)
9606 {
9607 /* If do_jump produces code that might be jumped around,
9608 do any stack adjusts from that code, before the place
9609 where control merges in. */
9610 do_pending_stack_adjust ();
9611 emit_label (drop_through_label);
9612 }
bbf6f052 9613}
b93a436e
JL
9614\f
9615/* Given a comparison expression EXP for values too wide to be compared
9616 with one insn, test the comparison and jump to the appropriate label.
9617 The code of EXP is ignored; we always test GT if SWAP is 0,
9618 and LT if SWAP is 1. */
bbf6f052 9619
b93a436e
JL
9620static void
9621do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9622 tree exp;
9623 int swap;
9624 rtx if_false_label, if_true_label;
9625{
9626 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9627 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9628 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9629 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9630
b30f05db 9631 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9632}
9633
b93a436e
JL
9634/* Compare OP0 with OP1, word at a time, in mode MODE.
9635 UNSIGNEDP says to do unsigned comparison.
9636 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9637
b93a436e
JL
9638void
9639do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9640 enum machine_mode mode;
9641 int unsignedp;
9642 rtx op0, op1;
9643 rtx if_false_label, if_true_label;
f81497d9 9644{
b93a436e
JL
9645 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9646 rtx drop_through_label = 0;
9647 int i;
f81497d9 9648
b93a436e
JL
9649 if (! if_true_label || ! if_false_label)
9650 drop_through_label = gen_label_rtx ();
9651 if (! if_true_label)
9652 if_true_label = drop_through_label;
9653 if (! if_false_label)
9654 if_false_label = drop_through_label;
f81497d9 9655
b93a436e
JL
9656 /* Compare a word at a time, high order first. */
9657 for (i = 0; i < nwords; i++)
9658 {
b93a436e 9659 rtx op0_word, op1_word;
bbf6f052 9660
b93a436e
JL
9661 if (WORDS_BIG_ENDIAN)
9662 {
9663 op0_word = operand_subword_force (op0, i, mode);
9664 op1_word = operand_subword_force (op1, i, mode);
9665 }
9666 else
9667 {
9668 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9669 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9670 }
bbf6f052 9671
b93a436e 9672 /* All but high-order word must be compared as unsigned. */
b30f05db 9673 do_compare_rtx_and_jump (op0_word, op1_word, GT,
a06ef755 9674 (unsignedp || i > 0), word_mode, NULL_RTX,
b30f05db 9675 NULL_RTX, if_true_label);
bbf6f052 9676
b93a436e 9677 /* Consider lower words only if these are equal. */
b30f05db 9678 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
a06ef755 9679 NULL_RTX, NULL_RTX, if_false_label);
b93a436e 9680 }
bbf6f052 9681
b93a436e
JL
9682 if (if_false_label)
9683 emit_jump (if_false_label);
9684 if (drop_through_label)
9685 emit_label (drop_through_label);
bbf6f052
RK
9686}
9687
b93a436e
JL
9688/* Given an EQ_EXPR expression EXP for values too wide to be compared
9689 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9690
b93a436e
JL
9691static void
9692do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9693 tree exp;
9694 rtx if_false_label, if_true_label;
bbf6f052 9695{
b93a436e
JL
9696 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9697 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9698 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9699 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9700 int i;
9701 rtx drop_through_label = 0;
bbf6f052 9702
b93a436e
JL
9703 if (! if_false_label)
9704 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9705
b93a436e 9706 for (i = 0; i < nwords; i++)
b30f05db
BS
9707 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9708 operand_subword_force (op1, i, mode),
9709 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
a06ef755 9710 word_mode, NULL_RTX, if_false_label, NULL_RTX);
bbf6f052 9711
b93a436e
JL
9712 if (if_true_label)
9713 emit_jump (if_true_label);
9714 if (drop_through_label)
9715 emit_label (drop_through_label);
bbf6f052 9716}
b93a436e
JL
9717\f
9718/* Jump according to whether OP0 is 0.
9719 We assume that OP0 has an integer mode that is too wide
9720 for the available compare insns. */
bbf6f052 9721
f5963e61 9722void
b93a436e
JL
9723do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9724 rtx op0;
9725 rtx if_false_label, if_true_label;
ca695ac9 9726{
b93a436e
JL
9727 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9728 rtx part;
9729 int i;
9730 rtx drop_through_label = 0;
bbf6f052 9731
b93a436e
JL
9732 /* The fastest way of doing this comparison on almost any machine is to
9733 "or" all the words and compare the result. If all have to be loaded
9734 from memory and this is a very wide item, it's possible this may
9735 be slower, but that's highly unlikely. */
bbf6f052 9736
b93a436e
JL
9737 part = gen_reg_rtx (word_mode);
9738 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9739 for (i = 1; i < nwords && part != 0; i++)
9740 part = expand_binop (word_mode, ior_optab, part,
9741 operand_subword_force (op0, i, GET_MODE (op0)),
9742 part, 1, OPTAB_WIDEN);
bbf6f052 9743
b93a436e
JL
9744 if (part != 0)
9745 {
b30f05db 9746 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
a06ef755 9747 NULL_RTX, if_false_label, if_true_label);
bbf6f052 9748
b93a436e
JL
9749 return;
9750 }
bbf6f052 9751
b93a436e
JL
9752 /* If we couldn't do the "or" simply, do this with a series of compares. */
9753 if (! if_false_label)
9754 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9755
b93a436e 9756 for (i = 0; i < nwords; i++)
b30f05db 9757 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
a06ef755 9758 const0_rtx, EQ, 1, word_mode, NULL_RTX,
b30f05db 9759 if_false_label, NULL_RTX);
bbf6f052 9760
b93a436e
JL
9761 if (if_true_label)
9762 emit_jump (if_true_label);
0f41302f 9763
b93a436e
JL
9764 if (drop_through_label)
9765 emit_label (drop_through_label);
bbf6f052 9766}
b93a436e 9767\f
b30f05db 9768/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9769 (including code to compute the values to be compared)
9770 and set (CC0) according to the result.
b30f05db 9771 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9772
b93a436e 9773 We force a stack adjustment unless there are currently
b30f05db 9774 things pushed on the stack that aren't yet used.
ca695ac9 9775
b30f05db 9776 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
a06ef755 9777 compared. */
b30f05db
BS
9778
9779rtx
a06ef755 9780compare_from_rtx (op0, op1, code, unsignedp, mode, size)
b3694847 9781 rtx op0, op1;
b30f05db
BS
9782 enum rtx_code code;
9783 int unsignedp;
9784 enum machine_mode mode;
9785 rtx size;
b93a436e 9786{
b30f05db 9787 rtx tem;
76bbe028 9788
b30f05db
BS
9789 /* If one operand is constant, make it the second one. Only do this
9790 if the other operand is not constant as well. */
ca695ac9 9791
8c9864f3 9792 if (swap_commutative_operands_p (op0, op1))
bbf6f052 9793 {
b30f05db
BS
9794 tem = op0;
9795 op0 = op1;
9796 op1 = tem;
9797 code = swap_condition (code);
ca695ac9 9798 }
bbf6f052 9799
b30f05db 9800 if (flag_force_mem)
b93a436e 9801 {
b30f05db
BS
9802 op0 = force_not_mem (op0);
9803 op1 = force_not_mem (op1);
9804 }
bbf6f052 9805
b30f05db
BS
9806 do_pending_stack_adjust ();
9807
9808 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9809 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9810 return tem;
9811
9812#if 0
9813 /* There's no need to do this now that combine.c can eliminate lots of
9814 sign extensions. This can be less efficient in certain cases on other
9815 machines. */
9816
9817 /* If this is a signed equality comparison, we can do it as an
9818 unsigned comparison since zero-extension is cheaper than sign
9819 extension and comparisons with zero are done as unsigned. This is
9820 the case even on machines that can do fast sign extension, since
9821 zero-extension is easier to combine with other operations than
9822 sign-extension is. If we are comparing against a constant, we must
9823 convert it to what it would look like unsigned. */
9824 if ((code == EQ || code == NE) && ! unsignedp
9825 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9826 {
9827 if (GET_CODE (op1) == CONST_INT
9828 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9829 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9830 unsignedp = 1;
b93a436e
JL
9831 }
9832#endif
3a94c984 9833
a06ef755 9834 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
0f41302f 9835
b30f05db 9836 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 9837}
bbf6f052 9838
b30f05db 9839/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 9840 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9841
b93a436e 9842 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
a06ef755 9843 compared. */
ca695ac9 9844
b30f05db 9845void
a06ef755 9846do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
b30f05db 9847 if_false_label, if_true_label)
b3694847 9848 rtx op0, op1;
b93a436e
JL
9849 enum rtx_code code;
9850 int unsignedp;
9851 enum machine_mode mode;
9852 rtx size;
b30f05db 9853 rtx if_false_label, if_true_label;
bbf6f052 9854{
b93a436e 9855 rtx tem;
b30f05db
BS
9856 int dummy_true_label = 0;
9857
9858 /* Reverse the comparison if that is safe and we want to jump if it is
9859 false. */
9860 if (! if_true_label && ! FLOAT_MODE_P (mode))
9861 {
9862 if_true_label = if_false_label;
9863 if_false_label = 0;
9864 code = reverse_condition (code);
9865 }
bbf6f052 9866
b93a436e
JL
9867 /* If one operand is constant, make it the second one. Only do this
9868 if the other operand is not constant as well. */
e7c33f54 9869
8c9864f3 9870 if (swap_commutative_operands_p (op0, op1))
ca695ac9 9871 {
b93a436e
JL
9872 tem = op0;
9873 op0 = op1;
9874 op1 = tem;
9875 code = swap_condition (code);
9876 }
bbf6f052 9877
b93a436e
JL
9878 if (flag_force_mem)
9879 {
9880 op0 = force_not_mem (op0);
9881 op1 = force_not_mem (op1);
9882 }
bbf6f052 9883
b93a436e 9884 do_pending_stack_adjust ();
ca695ac9 9885
b93a436e
JL
9886 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9887 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
9888 {
9889 if (tem == const_true_rtx)
9890 {
9891 if (if_true_label)
9892 emit_jump (if_true_label);
9893 }
9894 else
9895 {
9896 if (if_false_label)
9897 emit_jump (if_false_label);
9898 }
9899 return;
9900 }
ca695ac9 9901
b93a436e
JL
9902#if 0
9903 /* There's no need to do this now that combine.c can eliminate lots of
9904 sign extensions. This can be less efficient in certain cases on other
9905 machines. */
ca695ac9 9906
b93a436e
JL
9907 /* If this is a signed equality comparison, we can do it as an
9908 unsigned comparison since zero-extension is cheaper than sign
9909 extension and comparisons with zero are done as unsigned. This is
9910 the case even on machines that can do fast sign extension, since
9911 zero-extension is easier to combine with other operations than
9912 sign-extension is. If we are comparing against a constant, we must
9913 convert it to what it would look like unsigned. */
9914 if ((code == EQ || code == NE) && ! unsignedp
9915 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9916 {
9917 if (GET_CODE (op1) == CONST_INT
9918 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9919 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9920 unsignedp = 1;
9921 }
9922#endif
ca695ac9 9923
b30f05db
BS
9924 if (! if_true_label)
9925 {
9926 dummy_true_label = 1;
9927 if_true_label = gen_label_rtx ();
9928 }
9929
a06ef755 9930 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
b30f05db
BS
9931 if_true_label);
9932
9933 if (if_false_label)
9934 emit_jump (if_false_label);
9935 if (dummy_true_label)
9936 emit_label (if_true_label);
9937}
9938
9939/* Generate code for a comparison expression EXP (including code to compute
9940 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9941 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9942 generated code will drop through.
9943 SIGNED_CODE should be the rtx operation for this comparison for
9944 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9945
9946 We force a stack adjustment unless there are currently
9947 things pushed on the stack that aren't yet used. */
9948
9949static void
9950do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9951 if_true_label)
b3694847 9952 tree exp;
b30f05db
BS
9953 enum rtx_code signed_code, unsigned_code;
9954 rtx if_false_label, if_true_label;
9955{
b3694847
SS
9956 rtx op0, op1;
9957 tree type;
9958 enum machine_mode mode;
b30f05db
BS
9959 int unsignedp;
9960 enum rtx_code code;
9961
9962 /* Don't crash if the comparison was erroneous. */
a06ef755 9963 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
b30f05db
BS
9964 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9965 return;
9966
a06ef755 9967 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6b16805e
JJ
9968 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9969 return;
9970
b30f05db
BS
9971 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9972 mode = TYPE_MODE (type);
6b16805e
JJ
9973 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9974 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9975 || (GET_MODE_BITSIZE (mode)
31a7659b
JDA
9976 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9977 1)))))))
6b16805e
JJ
9978 {
9979 /* op0 might have been replaced by promoted constant, in which
9980 case the type of second argument should be used. */
9981 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9982 mode = TYPE_MODE (type);
9983 }
b30f05db
BS
9984 unsignedp = TREE_UNSIGNED (type);
9985 code = unsignedp ? unsigned_code : signed_code;
9986
9987#ifdef HAVE_canonicalize_funcptr_for_compare
9988 /* If function pointers need to be "canonicalized" before they can
9989 be reliably compared, then canonicalize them. */
9990 if (HAVE_canonicalize_funcptr_for_compare
9991 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9992 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9993 == FUNCTION_TYPE))
9994 {
9995 rtx new_op0 = gen_reg_rtx (mode);
9996
9997 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9998 op0 = new_op0;
9999 }
10000
10001 if (HAVE_canonicalize_funcptr_for_compare
10002 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10003 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10004 == FUNCTION_TYPE))
10005 {
10006 rtx new_op1 = gen_reg_rtx (mode);
10007
10008 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10009 op1 = new_op1;
10010 }
10011#endif
10012
10013 /* Do any postincrements in the expression that was tested. */
10014 emit_queue ();
10015
10016 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10017 ((mode == BLKmode)
10018 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
b30f05db 10019 if_false_label, if_true_label);
b93a436e
JL
10020}
10021\f
10022/* Generate code to calculate EXP using a store-flag instruction
10023 and return an rtx for the result. EXP is either a comparison
10024 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10025
b93a436e 10026 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10027
b93a436e
JL
10028 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10029 cheap.
ca695ac9 10030
b93a436e
JL
10031 Return zero if there is no suitable set-flag instruction
10032 available on this machine.
ca695ac9 10033
b93a436e
JL
10034 Once expand_expr has been called on the arguments of the comparison,
10035 we are committed to doing the store flag, since it is not safe to
10036 re-evaluate the expression. We emit the store-flag insn by calling
10037 emit_store_flag, but only expand the arguments if we have a reason
10038 to believe that emit_store_flag will be successful. If we think that
10039 it will, but it isn't, we have to simulate the store-flag with a
10040 set/jump/set sequence. */
ca695ac9 10041
b93a436e
JL
10042static rtx
10043do_store_flag (exp, target, mode, only_cheap)
10044 tree exp;
10045 rtx target;
10046 enum machine_mode mode;
10047 int only_cheap;
10048{
10049 enum rtx_code code;
10050 tree arg0, arg1, type;
10051 tree tem;
10052 enum machine_mode operand_mode;
10053 int invert = 0;
10054 int unsignedp;
10055 rtx op0, op1;
10056 enum insn_code icode;
10057 rtx subtarget = target;
381127e8 10058 rtx result, label;
ca695ac9 10059
b93a436e
JL
10060 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10061 result at the end. We can't simply invert the test since it would
10062 have already been inverted if it were valid. This case occurs for
10063 some floating-point comparisons. */
ca695ac9 10064
b93a436e
JL
10065 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10066 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10067
b93a436e
JL
10068 arg0 = TREE_OPERAND (exp, 0);
10069 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
10070
10071 /* Don't crash if the comparison was erroneous. */
10072 if (arg0 == error_mark_node || arg1 == error_mark_node)
10073 return const0_rtx;
10074
b93a436e
JL
10075 type = TREE_TYPE (arg0);
10076 operand_mode = TYPE_MODE (type);
10077 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10078
b93a436e
JL
10079 /* We won't bother with BLKmode store-flag operations because it would mean
10080 passing a lot of information to emit_store_flag. */
10081 if (operand_mode == BLKmode)
10082 return 0;
ca695ac9 10083
b93a436e
JL
10084 /* We won't bother with store-flag operations involving function pointers
10085 when function pointers must be canonicalized before comparisons. */
10086#ifdef HAVE_canonicalize_funcptr_for_compare
10087 if (HAVE_canonicalize_funcptr_for_compare
10088 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10089 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10090 == FUNCTION_TYPE))
10091 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10092 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10093 == FUNCTION_TYPE))))
10094 return 0;
ca695ac9
JB
10095#endif
10096
b93a436e
JL
10097 STRIP_NOPS (arg0);
10098 STRIP_NOPS (arg1);
ca695ac9 10099
b93a436e
JL
10100 /* Get the rtx comparison code to use. We know that EXP is a comparison
10101 operation of some type. Some comparisons against 1 and -1 can be
10102 converted to comparisons with zero. Do so here so that the tests
10103 below will be aware that we have a comparison with zero. These
10104 tests will not catch constants in the first operand, but constants
10105 are rarely passed as the first operand. */
ca695ac9 10106
b93a436e
JL
10107 switch (TREE_CODE (exp))
10108 {
10109 case EQ_EXPR:
10110 code = EQ;
bbf6f052 10111 break;
b93a436e
JL
10112 case NE_EXPR:
10113 code = NE;
bbf6f052 10114 break;
b93a436e
JL
10115 case LT_EXPR:
10116 if (integer_onep (arg1))
10117 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10118 else
10119 code = unsignedp ? LTU : LT;
ca695ac9 10120 break;
b93a436e
JL
10121 case LE_EXPR:
10122 if (! unsignedp && integer_all_onesp (arg1))
10123 arg1 = integer_zero_node, code = LT;
10124 else
10125 code = unsignedp ? LEU : LE;
ca695ac9 10126 break;
b93a436e
JL
10127 case GT_EXPR:
10128 if (! unsignedp && integer_all_onesp (arg1))
10129 arg1 = integer_zero_node, code = GE;
10130 else
10131 code = unsignedp ? GTU : GT;
10132 break;
10133 case GE_EXPR:
10134 if (integer_onep (arg1))
10135 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10136 else
10137 code = unsignedp ? GEU : GE;
ca695ac9 10138 break;
1eb8759b
RH
10139
10140 case UNORDERED_EXPR:
10141 code = UNORDERED;
10142 break;
10143 case ORDERED_EXPR:
10144 code = ORDERED;
10145 break;
10146 case UNLT_EXPR:
10147 code = UNLT;
10148 break;
10149 case UNLE_EXPR:
10150 code = UNLE;
10151 break;
10152 case UNGT_EXPR:
10153 code = UNGT;
10154 break;
10155 case UNGE_EXPR:
10156 code = UNGE;
10157 break;
10158 case UNEQ_EXPR:
10159 code = UNEQ;
10160 break;
1eb8759b 10161
ca695ac9 10162 default:
b93a436e 10163 abort ();
bbf6f052 10164 }
bbf6f052 10165
b93a436e
JL
10166 /* Put a constant second. */
10167 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10168 {
10169 tem = arg0; arg0 = arg1; arg1 = tem;
10170 code = swap_condition (code);
ca695ac9 10171 }
bbf6f052 10172
b93a436e
JL
10173 /* If this is an equality or inequality test of a single bit, we can
10174 do this by shifting the bit being tested to the low-order bit and
10175 masking the result with the constant 1. If the condition was EQ,
10176 we xor it with 1. This does not require an scc insn and is faster
10177 than an scc insn even if we have it. */
d39985fa 10178
b93a436e
JL
10179 if ((code == NE || code == EQ)
10180 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10181 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10182 {
10183 tree inner = TREE_OPERAND (arg0, 0);
10184 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10185 int ops_unsignedp;
bbf6f052 10186
b93a436e
JL
10187 /* If INNER is a right shift of a constant and it plus BITNUM does
10188 not overflow, adjust BITNUM and INNER. */
ca695ac9 10189
b93a436e
JL
10190 if (TREE_CODE (inner) == RSHIFT_EXPR
10191 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10192 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10193 && bitnum < TYPE_PRECISION (type)
10194 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10195 bitnum - TYPE_PRECISION (type)))
ca695ac9 10196 {
b93a436e
JL
10197 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10198 inner = TREE_OPERAND (inner, 0);
ca695ac9 10199 }
ca695ac9 10200
b93a436e
JL
10201 /* If we are going to be able to omit the AND below, we must do our
10202 operations as unsigned. If we must use the AND, we have a choice.
10203 Normally unsigned is faster, but for some machines signed is. */
10204 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10205#ifdef LOAD_EXTEND_OP
10206 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10207#else
10208 : 1
10209#endif
10210 );
bbf6f052 10211
296b4ed9 10212 if (! get_subtarget (subtarget)
a47fed55 10213 || GET_MODE (subtarget) != operand_mode
e5e809f4 10214 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10215 subtarget = 0;
bbf6f052 10216
b93a436e 10217 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10218
b93a436e 10219 if (bitnum != 0)
681cb233 10220 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10221 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10222
b93a436e
JL
10223 if (GET_MODE (op0) != mode)
10224 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10225
b93a436e
JL
10226 if ((code == EQ && ! invert) || (code == NE && invert))
10227 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10228 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10229
b93a436e
JL
10230 /* Put the AND last so it can combine with more things. */
10231 if (bitnum != TYPE_PRECISION (type) - 1)
22273300 10232 op0 = expand_and (mode, op0, const1_rtx, subtarget);
bbf6f052 10233
b93a436e
JL
10234 return op0;
10235 }
bbf6f052 10236
b93a436e 10237 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10238 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10239 return 0;
1eb8759b 10240
b93a436e
JL
10241 icode = setcc_gen_code[(int) code];
10242 if (icode == CODE_FOR_nothing
a995e389 10243 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10244 {
b93a436e
JL
10245 /* We can only do this if it is one of the special cases that
10246 can be handled without an scc insn. */
10247 if ((code == LT && integer_zerop (arg1))
10248 || (! only_cheap && code == GE && integer_zerop (arg1)))
10249 ;
10250 else if (BRANCH_COST >= 0
10251 && ! only_cheap && (code == NE || code == EQ)
10252 && TREE_CODE (type) != REAL_TYPE
10253 && ((abs_optab->handlers[(int) operand_mode].insn_code
10254 != CODE_FOR_nothing)
10255 || (ffs_optab->handlers[(int) operand_mode].insn_code
10256 != CODE_FOR_nothing)))
10257 ;
10258 else
10259 return 0;
ca695ac9 10260 }
3a94c984 10261
296b4ed9 10262 if (! get_subtarget (target)
a47fed55 10263 || GET_MODE (subtarget) != operand_mode
e5e809f4 10264 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10265 subtarget = 0;
10266
10267 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10268 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10269
10270 if (target == 0)
10271 target = gen_reg_rtx (mode);
10272
10273 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10274 because, if the emit_store_flag does anything it will succeed and
10275 OP0 and OP1 will not be used subsequently. */
ca695ac9 10276
b93a436e
JL
10277 result = emit_store_flag (target, code,
10278 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10279 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10280 operand_mode, unsignedp, 1);
ca695ac9 10281
b93a436e
JL
10282 if (result)
10283 {
10284 if (invert)
10285 result = expand_binop (mode, xor_optab, result, const1_rtx,
10286 result, 0, OPTAB_LIB_WIDEN);
10287 return result;
ca695ac9 10288 }
bbf6f052 10289
b93a436e
JL
10290 /* If this failed, we have to do this with set/compare/jump/set code. */
10291 if (GET_CODE (target) != REG
10292 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10293 target = gen_reg_rtx (GET_MODE (target));
10294
10295 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10296 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 10297 operand_mode, NULL_RTX);
b93a436e
JL
10298 if (GET_CODE (result) == CONST_INT)
10299 return (((result == const0_rtx && ! invert)
10300 || (result != const0_rtx && invert))
10301 ? const0_rtx : const1_rtx);
ca695ac9 10302
8f08e8c0
JL
10303 /* The code of RESULT may not match CODE if compare_from_rtx
10304 decided to swap its operands and reverse the original code.
10305
10306 We know that compare_from_rtx returns either a CONST_INT or
10307 a new comparison code, so it is safe to just extract the
10308 code from RESULT. */
10309 code = GET_CODE (result);
10310
b93a436e
JL
10311 label = gen_label_rtx ();
10312 if (bcc_gen_fctn[(int) code] == 0)
10313 abort ();
0f41302f 10314
b93a436e
JL
10315 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10316 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10317 emit_label (label);
bbf6f052 10318
b93a436e 10319 return target;
ca695ac9 10320}
b93a436e 10321\f
b93a436e 10322
ad82abb8
ZW
10323/* Stubs in case we haven't got a casesi insn. */
10324#ifndef HAVE_casesi
10325# define HAVE_casesi 0
10326# define gen_casesi(a, b, c, d, e) (0)
10327# define CODE_FOR_casesi CODE_FOR_nothing
10328#endif
10329
10330/* If the machine does not have a case insn that compares the bounds,
10331 this means extra overhead for dispatch tables, which raises the
10332 threshold for using them. */
10333#ifndef CASE_VALUES_THRESHOLD
10334#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10335#endif /* CASE_VALUES_THRESHOLD */
10336
10337unsigned int
10338case_values_threshold ()
10339{
10340 return CASE_VALUES_THRESHOLD;
10341}
10342
10343/* Attempt to generate a casesi instruction. Returns 1 if successful,
10344 0 otherwise (i.e. if there is no casesi instruction). */
10345int
10346try_casesi (index_type, index_expr, minval, range,
10347 table_label, default_label)
10348 tree index_type, index_expr, minval, range;
10349 rtx table_label ATTRIBUTE_UNUSED;
10350 rtx default_label;
10351{
10352 enum machine_mode index_mode = SImode;
10353 int index_bits = GET_MODE_BITSIZE (index_mode);
10354 rtx op1, op2, index;
10355 enum machine_mode op_mode;
10356
10357 if (! HAVE_casesi)
10358 return 0;
10359
10360 /* Convert the index to SImode. */
10361 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10362 {
10363 enum machine_mode omode = TYPE_MODE (index_type);
10364 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10365
10366 /* We must handle the endpoints in the original mode. */
10367 index_expr = build (MINUS_EXPR, index_type,
10368 index_expr, minval);
10369 minval = integer_zero_node;
10370 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10371 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 10372 omode, 1, default_label);
ad82abb8
ZW
10373 /* Now we can safely truncate. */
10374 index = convert_to_mode (index_mode, index, 0);
10375 }
10376 else
10377 {
10378 if (TYPE_MODE (index_type) != index_mode)
10379 {
10380 index_expr = convert (type_for_size (index_bits, 0),
10381 index_expr);
10382 index_type = TREE_TYPE (index_expr);
10383 }
10384
10385 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10386 }
10387 emit_queue ();
10388 index = protect_from_queue (index, 0);
10389 do_pending_stack_adjust ();
10390
10391 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10392 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10393 (index, op_mode))
10394 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10395
ad82abb8
ZW
10396 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10397
10398 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10399 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10400 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10401 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10402 (op1, op_mode))
10403 op1 = copy_to_mode_reg (op_mode, op1);
10404
10405 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10406
10407 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10408 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10409 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10410 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10411 (op2, op_mode))
10412 op2 = copy_to_mode_reg (op_mode, op2);
10413
10414 emit_jump_insn (gen_casesi (index, op1, op2,
10415 table_label, default_label));
10416 return 1;
10417}
10418
10419/* Attempt to generate a tablejump instruction; same concept. */
10420#ifndef HAVE_tablejump
10421#define HAVE_tablejump 0
10422#define gen_tablejump(x, y) (0)
10423#endif
10424
10425/* Subroutine of the next function.
10426
10427 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10428 in the table already subtracted.
10429 MODE is its expected mode (needed if INDEX is constant).
10430 RANGE is the length of the jump table.
10431 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10432
b93a436e
JL
10433 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10434 index value is out of range. */
0f41302f 10435
ad82abb8 10436static void
b93a436e
JL
10437do_tablejump (index, mode, range, table_label, default_label)
10438 rtx index, range, table_label, default_label;
10439 enum machine_mode mode;
ca695ac9 10440{
b3694847 10441 rtx temp, vector;
88d3b7f0 10442
b93a436e
JL
10443 /* Do an unsigned comparison (in the proper mode) between the index
10444 expression and the value which represents the length of the range.
10445 Since we just finished subtracting the lower bound of the range
10446 from the index expression, this comparison allows us to simultaneously
10447 check that the original index expression value is both greater than
10448 or equal to the minimum value of the range and less than or equal to
10449 the maximum value of the range. */
709f5be1 10450
c5d5d461 10451 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 10452 default_label);
bbf6f052 10453
b93a436e
JL
10454 /* If index is in range, it must fit in Pmode.
10455 Convert to Pmode so we can index with it. */
10456 if (mode != Pmode)
10457 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10458
b93a436e
JL
10459 /* Don't let a MEM slip thru, because then INDEX that comes
10460 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10461 and break_out_memory_refs will go to work on it and mess it up. */
10462#ifdef PIC_CASE_VECTOR_ADDRESS
10463 if (flag_pic && GET_CODE (index) != REG)
10464 index = copy_to_mode_reg (Pmode, index);
10465#endif
ca695ac9 10466
b93a436e
JL
10467 /* If flag_force_addr were to affect this address
10468 it could interfere with the tricky assumptions made
10469 about addresses that contain label-refs,
10470 which may be valid only very near the tablejump itself. */
10471 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10472 GET_MODE_SIZE, because this indicates how large insns are. The other
10473 uses should all be Pmode, because they are addresses. This code
10474 could fail if addresses and insns are not the same size. */
10475 index = gen_rtx_PLUS (Pmode,
10476 gen_rtx_MULT (Pmode, index,
10477 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10478 gen_rtx_LABEL_REF (Pmode, table_label));
10479#ifdef PIC_CASE_VECTOR_ADDRESS
10480 if (flag_pic)
10481 index = PIC_CASE_VECTOR_ADDRESS (index);
10482 else
bbf6f052 10483#endif
b93a436e
JL
10484 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10485 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10486 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10487 RTX_UNCHANGING_P (vector) = 1;
10488 convert_move (temp, vector, 0);
10489
10490 emit_jump_insn (gen_tablejump (temp, table_label));
10491
10492 /* If we are generating PIC code or if the table is PC-relative, the
10493 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10494 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10495 emit_barrier ();
bbf6f052 10496}
b93a436e 10497
ad82abb8
ZW
10498int
10499try_tablejump (index_type, index_expr, minval, range,
10500 table_label, default_label)
10501 tree index_type, index_expr, minval, range;
10502 rtx table_label, default_label;
10503{
10504 rtx index;
10505
10506 if (! HAVE_tablejump)
10507 return 0;
10508
10509 index_expr = fold (build (MINUS_EXPR, index_type,
10510 convert (index_type, index_expr),
10511 convert (index_type, minval)));
10512 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10513 emit_queue ();
10514 index = protect_from_queue (index, 0);
10515 do_pending_stack_adjust ();
10516
10517 do_tablejump (index, TYPE_MODE (index_type),
10518 convert_modes (TYPE_MODE (index_type),
10519 TYPE_MODE (TREE_TYPE (range)),
10520 expand_expr (range, NULL_RTX,
10521 VOIDmode, 0),
10522 TREE_UNSIGNED (TREE_TYPE (range))),
10523 table_label, default_label);
10524 return 1;
10525}
This page took 3.6849 seconds and 5 git commands to generate.