]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
md.texi (machine constraints): Document 'Q', 'R', 'e' and 'Z' constraints.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357
AJ
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
bbf6f052
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
3a94c984 36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 37#include "expr.h"
bbf6f052 38#include "recog.h"
3ef1eef4 39#include "reload.h"
bbf6f052 40#include "output.h"
bbf6f052 41#include "typeclass.h"
10f0ad3d 42#include "toplev.h"
d7db6646 43#include "ggc.h"
e2c49ac2 44#include "intl.h"
b1474bb7 45#include "tm_p.h"
bbf6f052 46
f73ad30e
JH
47#ifndef ACCUMULATE_OUTGOING_ARGS
48#define ACCUMULATE_OUTGOING_ARGS 0
49#endif
50
51/* Supply a default definition for PUSH_ARGS. */
52#ifndef PUSH_ARGS
53#ifdef PUSH_ROUNDING
54#define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
55#else
56#define PUSH_ARGS 0
57#endif
58#endif
59
bbf6f052 60/* Decide whether a function's arguments should be processed
bbc8a071
RK
61 from first to last or from last to first.
62
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
bbf6f052 65
bbf6f052 66#ifdef PUSH_ROUNDING
bbc8a071 67
3319a347 68#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 69#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 70#endif
bbc8a071 71
bbf6f052
RK
72#endif
73
74#ifndef STACK_PUSH_CODE
75#ifdef STACK_GROWS_DOWNWARD
76#define STACK_PUSH_CODE PRE_DEC
77#else
78#define STACK_PUSH_CODE PRE_INC
79#endif
80#endif
81
18543a22
ILT
82/* Assume that case vectors are not pc-relative. */
83#ifndef CASE_VECTOR_PC_RELATIVE
84#define CASE_VECTOR_PC_RELATIVE 0
85#endif
86
8f17b5c5
MM
87/* Hook called by safe_from_p for language-specific tree codes. It is
88 up to the language front-end to install a hook if it has any such
89 codes that safe_from_p needs to know about. Since same_from_p will
90 recursively explore the TREE_OPERANDs of an expression, this hook
91 should not reexamine those pieces. This routine may recursively
92 call safe_from_p; it should always pass `0' as the TOP_P
93 parameter. */
94int (*lang_safe_from_p) PARAMS ((rtx, tree));
95
bbf6f052
RK
96/* If this is nonzero, we do not bother generating VOLATILE
97 around volatile memory references, and we are willing to
98 output indirect addresses. If cse is to follow, we reject
99 indirect addresses so a useful potential cse is generated;
100 if it is used only once, instruction combination will produce
101 the same indirect address eventually. */
102int cse_not_expected;
103
956d6950 104/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
956d6950
JL
107static int in_check_memory_usage;
108
14a774a9
RK
109/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
110static tree placeholder_list = 0;
111
4969d05d
RK
112/* This structure is used by move_by_pieces to describe the move to
113 be performed. */
4969d05d
RK
114struct move_by_pieces
115{
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
3bdf5ad1
RK
124 unsigned HOST_WIDE_INT len;
125 HOST_WIDE_INT offset;
4969d05d
RK
126 int reverse;
127};
128
57814e5e 129/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
130 be performed. */
131
57814e5e 132struct store_by_pieces
9de08200
RK
133{
134 rtx to;
135 rtx to_addr;
136 int autinc_to;
137 int explicit_inc_to;
3bdf5ad1
RK
138 unsigned HOST_WIDE_INT len;
139 HOST_WIDE_INT offset;
57814e5e
JJ
140 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
141 PTR constfundata;
9de08200
RK
142 int reverse;
143};
144
292b1216 145extern struct obstack permanent_obstack;
c02bd5d9 146
711d877c
KG
147static rtx get_push_address PARAMS ((int));
148
149static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
150static unsigned HOST_WIDE_INT move_by_pieces_ninsns
151 PARAMS ((unsigned HOST_WIDE_INT,
152 unsigned int));
711d877c
KG
153static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
154 struct move_by_pieces *));
57814e5e
JJ
155static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
156 enum machine_mode));
3bdf5ad1
RK
157static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
158 unsigned int));
57814e5e
JJ
159static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
160 unsigned int));
161static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 162 enum machine_mode,
57814e5e 163 struct store_by_pieces *));
296b4ed9 164static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
165static int is_zeros_p PARAMS ((tree));
166static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
167static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, enum machine_mode,
23cb1766
RK
169 tree, tree, unsigned int, int,
170 int));
770ae6cc 171static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
13eb1f7f 172 HOST_WIDE_INT));
770ae6cc
RK
173static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
174 HOST_WIDE_INT, enum machine_mode,
729a2125 175 tree, enum machine_mode, int,
770ae6cc 176 unsigned int, HOST_WIDE_INT, int));
e009aaf3 177static enum memory_use_mode
711d877c
KG
178 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
179static tree save_noncopied_parts PARAMS ((tree, tree));
180static tree init_noncopied_parts PARAMS ((tree, tree));
711d877c
KG
181static int fixed_type_p PARAMS ((tree));
182static rtx var_rtx PARAMS ((tree));
729a2125 183static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
711d877c 184static rtx expand_increment PARAMS ((tree, int, int));
711d877c
KG
185static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
186static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
187static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
188 rtx, rtx));
711d877c 189static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
bbf6f052 190
4fa52007
RK
191/* Record for each mode whether we can move a register directly to or
192 from an object of that mode in memory. If we can't, we won't try
193 to use that mode directly when accessing a field of that mode. */
194
195static char direct_load[NUM_MACHINE_MODES];
196static char direct_store[NUM_MACHINE_MODES];
197
7e24ffc9
HPN
198/* If a memory-to-memory move would take MOVE_RATIO or more simple
199 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
200
201#ifndef MOVE_RATIO
266007a7 202#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
203#define MOVE_RATIO 2
204#else
3a94c984 205/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 206#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
207#endif
208#endif
e87b4f3f 209
fbe1758d 210/* This macro is used to determine whether move_by_pieces should be called
3a94c984 211 to perform a structure copy. */
fbe1758d 212#ifndef MOVE_BY_PIECES_P
19caa751 213#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 214 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
215#endif
216
266007a7 217/* This array records the insn_code of insns to perform block moves. */
e6677db3 218enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 219
9de08200
RK
220/* This array records the insn_code of insns to perform block clears. */
221enum insn_code clrstr_optab[NUM_MACHINE_MODES];
222
0f41302f 223/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
224
225#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 226#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 227#endif
bbf6f052 228\f
4fa52007 229/* This is run once per compilation to set up which modes can be used
266007a7 230 directly in memory and to initialize the block move optab. */
4fa52007
RK
231
232void
233init_expr_once ()
234{
235 rtx insn, pat;
236 enum machine_mode mode;
cff48d8f 237 int num_clobbers;
9ec36da5 238 rtx mem, mem1;
9ec36da5
JL
239
240 start_sequence ();
241
e2549997
RS
242 /* Try indexing by frame ptr and try by stack ptr.
243 It is known that on the Convex the stack ptr isn't a valid index.
244 With luck, one or the other is valid on any machine. */
9ec36da5
JL
245 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
246 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 247
38a448ca 248 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
249 pat = PATTERN (insn);
250
251 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
252 mode = (enum machine_mode) ((int) mode + 1))
253 {
254 int regno;
255 rtx reg;
4fa52007
RK
256
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
e2549997 259 PUT_MODE (mem1, mode);
4fa52007 260
e6fe56a4
RK
261 /* See if there is some register that can be used in this mode and
262 directly loaded or stored from memory. */
263
7308a047
RS
264 if (mode != VOIDmode && mode != BLKmode)
265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
266 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
267 regno++)
268 {
269 if (! HARD_REGNO_MODE_OK (regno, mode))
270 continue;
e6fe56a4 271
38a448ca 272 reg = gen_rtx_REG (mode, regno);
e6fe56a4 273
7308a047
RS
274 SET_SRC (pat) = mem;
275 SET_DEST (pat) = reg;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_load[(int) mode] = 1;
e6fe56a4 278
e2549997
RS
279 SET_SRC (pat) = mem1;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
7308a047
RS
284 SET_SRC (pat) = reg;
285 SET_DEST (pat) = mem;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_store[(int) mode] = 1;
e2549997
RS
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem1;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
7308a047 293 }
4fa52007
RK
294 }
295
296 end_sequence ();
297}
cff48d8f 298
bbf6f052
RK
299/* This is run at the start of compiling a function. */
300
301void
302init_expr ()
303{
01d939e8 304 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 305
49ad7cfa 306 pending_chain = 0;
bbf6f052 307 pending_stack_adjust = 0;
1503a7ec 308 stack_pointer_delta = 0;
bbf6f052 309 inhibit_defer_pop = 0;
bbf6f052 310 saveregs_value = 0;
0006469d 311 apply_args_value = 0;
e87b4f3f 312 forced_labels = 0;
bbf6f052
RK
313}
314
fa51b01b
RH
315void
316mark_expr_status (p)
317 struct expr_status *p;
318{
319 if (p == NULL)
320 return;
321
322 ggc_mark_rtx (p->x_saveregs_value);
323 ggc_mark_rtx (p->x_apply_args_value);
324 ggc_mark_rtx (p->x_forced_labels);
325}
326
327void
328free_expr_status (f)
329 struct function *f;
330{
331 free (f->expr);
332 f->expr = NULL;
333}
334
49ad7cfa 335/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 336
bbf6f052 337void
49ad7cfa 338finish_expr_for_function ()
bbf6f052 339{
49ad7cfa
BS
340 if (pending_chain)
341 abort ();
bbf6f052
RK
342}
343\f
344/* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
346
bbf6f052
RK
347/* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
350
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
353
354static rtx
355enqueue_insn (var, body)
356 rtx var, body;
357{
c5c76735
JL
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
bbf6f052
RK
360 return pending_chain;
361}
362
363/* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
369
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
373
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
377
378rtx
379protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
382{
383 register RTX_CODE code = GET_CODE (x);
384
385#if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389#endif
390
391 if (code != QUEUED)
392 {
e9baa644
RK
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
bbf6f052
RK
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
400 {
401 register rtx y = XEXP (x, 0);
38a448ca 402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 403
c6df88cb 404 MEM_COPY_ATTRIBUTES (new, x);
e9baa644 405
bbf6f052
RK
406 if (QUEUED_INSN (y))
407 {
e9baa644
RK
408 register rtx temp = gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
410 QUEUED_INSN (y));
411 return temp;
412 }
e9baa644 413 return new;
bbf6f052
RK
414 }
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
3f15938e
RS
418 {
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
421 {
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
424 }
425 }
bbf6f052
RK
426 else if (code == PLUS || code == MULT)
427 {
3f15938e
RS
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 {
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
435 }
bbf6f052
RK
436 }
437 return x;
438 }
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x) == 0)
441 return QUEUED_VAR (x);
442 /* If the increment has happened and a pre-increment copy exists,
443 use that copy. */
444 if (QUEUED_COPY (x) != 0)
445 return QUEUED_COPY (x);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
450 QUEUED_INSN (x));
451 return QUEUED_COPY (x);
452}
453
454/* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
458
1f06ee8d 459int
bbf6f052
RK
460queued_subexp_p (x)
461 rtx x;
462{
463 register enum rtx_code code = GET_CODE (x);
464 switch (code)
465 {
466 case QUEUED:
467 return 1;
468 case MEM:
469 return queued_subexp_p (XEXP (x, 0));
470 case MULT:
471 case PLUS:
472 case MINUS:
e9a25f70
JL
473 return (queued_subexp_p (XEXP (x, 0))
474 || queued_subexp_p (XEXP (x, 1)));
475 default:
476 return 0;
bbf6f052 477 }
bbf6f052
RK
478}
479
480/* Perform all the pending incrementations. */
481
482void
483emit_queue ()
484{
485 register rtx p;
381127e8 486 while ((p = pending_chain))
bbf6f052 487 {
41b083c4
R
488 rtx body = QUEUED_BODY (p);
489
490 if (GET_CODE (body) == SEQUENCE)
491 {
492 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
493 emit_insn (QUEUED_BODY (p));
494 }
495 else
496 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
497 pending_chain = QUEUED_NEXT (p);
498 }
499}
bbf6f052
RK
500\f
501/* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
505
506void
507convert_move (to, from, unsignedp)
508 register rtx to, from;
509 int unsignedp;
510{
511 enum machine_mode to_mode = GET_MODE (to);
512 enum machine_mode from_mode = GET_MODE (from);
513 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
514 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
515 enum insn_code code;
516 rtx libcall;
517
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
520
521 to = protect_from_queue (to, 1);
522 from = protect_from_queue (from, 0);
523
524 if (to_real != from_real)
525 abort ();
526
1499e0a8
RK
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
529 TO here. */
530
531 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
533 >= GET_MODE_SIZE (to_mode))
534 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
535 from = gen_lowpart (to_mode, from), from_mode = to_mode;
536
537 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
538 abort ();
539
bbf6f052
RK
540 if (to_mode == from_mode
541 || (from_mode == VOIDmode && CONSTANT_P (from)))
542 {
543 emit_move_insn (to, from);
544 return;
545 }
546
0b4565c9
BS
547 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
548 {
549 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
550 abort ();
3a94c984 551
0b4565c9
BS
552 if (VECTOR_MODE_P (to_mode))
553 from = gen_rtx_SUBREG (to_mode, from, 0);
554 else
555 to = gen_rtx_SUBREG (from_mode, to, 0);
556
557 emit_move_insn (to, from);
558 return;
559 }
560
561 if (to_real != from_real)
562 abort ();
563
bbf6f052
RK
564 if (to_real)
565 {
642dfa8b 566 rtx value, insns;
81d79e2c 567
2b01c326 568 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 569 {
2b01c326
RK
570 /* Try converting directly if the insn is supported. */
571 if ((code = can_extend_p (to_mode, from_mode, 0))
572 != CODE_FOR_nothing)
573 {
574 emit_unop_insn (code, to, from, UNKNOWN);
575 return;
576 }
bbf6f052 577 }
3a94c984 578
b424402e
RS
579#ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
583 return;
584 }
585#endif
704af6a1
JL
586#ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
590 return;
591 }
592#endif
b424402e
RS
593#ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
597 return;
598 }
599#endif
600#ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
604 return;
605 }
606#endif
607#ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
03747aa3
RK
621
622#ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
626 return;
627 }
628#endif
b424402e
RS
629#ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
633 return;
634 }
635#endif
636#ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
643#ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
650#ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
2b01c326
RK
657
658#ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
662 return;
663 }
664#endif
665#ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
669 return;
670 }
671#endif
672#ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
681 {
682 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686
bbf6f052
RK
687#ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
691 return;
692 }
693#endif
b092b471
JW
694#ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
698 return;
699 }
700#endif
bbf6f052
RK
701#ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
705 return;
706 }
707#endif
b092b471
JW
708#ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
710 {
711 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
712 return;
713 }
714#endif
bbf6f052
RK
715#ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
717 {
718 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
719 return;
720 }
721#endif
722
b092b471
JW
723 libcall = (rtx) 0;
724 switch (from_mode)
725 {
726 case SFmode:
727 switch (to_mode)
728 {
729 case DFmode:
730 libcall = extendsfdf2_libfunc;
731 break;
732
733 case XFmode:
734 libcall = extendsfxf2_libfunc;
735 break;
736
737 case TFmode:
738 libcall = extendsftf2_libfunc;
739 break;
3a94c984 740
e9a25f70
JL
741 default:
742 break;
b092b471
JW
743 }
744 break;
745
746 case DFmode:
747 switch (to_mode)
748 {
749 case SFmode:
750 libcall = truncdfsf2_libfunc;
751 break;
752
753 case XFmode:
754 libcall = extenddfxf2_libfunc;
755 break;
756
757 case TFmode:
758 libcall = extenddftf2_libfunc;
759 break;
3a94c984 760
e9a25f70
JL
761 default:
762 break;
b092b471
JW
763 }
764 break;
765
766 case XFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = truncxfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = truncxfdf2_libfunc;
775 break;
3a94c984 776
e9a25f70
JL
777 default:
778 break;
b092b471
JW
779 }
780 break;
781
782 case TFmode:
783 switch (to_mode)
784 {
785 case SFmode:
786 libcall = trunctfsf2_libfunc;
787 break;
788
789 case DFmode:
790 libcall = trunctfdf2_libfunc;
791 break;
3a94c984 792
e9a25f70
JL
793 default:
794 break;
b092b471
JW
795 }
796 break;
3a94c984 797
e9a25f70
JL
798 default:
799 break;
b092b471
JW
800 }
801
802 if (libcall == (rtx) 0)
803 /* This conversion is not implemented yet. */
bbf6f052
RK
804 abort ();
805
642dfa8b 806 start_sequence ();
ebb1b59a 807 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 808 1, from, from_mode);
642dfa8b
BS
809 insns = get_insns ();
810 end_sequence ();
811 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
812 from));
bbf6f052
RK
813 return;
814 }
815
816 /* Now both modes are integers. */
817
818 /* Handle expanding beyond a word. */
819 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
820 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 {
822 rtx insns;
823 rtx lowpart;
824 rtx fill_value;
825 rtx lowfrom;
826 int i;
827 enum machine_mode lowpart_mode;
828 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
829
830 /* Try converting directly if the insn is supported. */
831 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
832 != CODE_FOR_nothing)
833 {
cd1b4b44
RK
834 /* If FROM is a SUBREG, put it into a register. Do this
835 so that we always generate the same set of insns for
836 better cse'ing; if an intermediate assignment occurred,
837 we won't be doing the operation directly on the SUBREG. */
838 if (optimize > 0 && GET_CODE (from) == SUBREG)
839 from = force_reg (from_mode, from);
bbf6f052
RK
840 emit_unop_insn (code, to, from, equiv_code);
841 return;
842 }
843 /* Next, try converting via full word. */
844 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
845 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
846 != CODE_FOR_nothing))
847 {
a81fee56 848 if (GET_CODE (to) == REG)
38a448ca 849 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
850 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
851 emit_unop_insn (code, to,
852 gen_lowpart (word_mode, to), equiv_code);
853 return;
854 }
855
856 /* No special multiword conversion insn; do it by hand. */
857 start_sequence ();
858
5c5033c3
RK
859 /* Since we will turn this into a no conflict block, we must ensure
860 that the source does not overlap the target. */
861
862 if (reg_overlap_mentioned_p (to, from))
863 from = force_reg (from_mode, from);
864
bbf6f052
RK
865 /* Get a copy of FROM widened to a word, if necessary. */
866 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
867 lowpart_mode = word_mode;
868 else
869 lowpart_mode = from_mode;
870
871 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
872
873 lowpart = gen_lowpart (lowpart_mode, to);
874 emit_move_insn (lowpart, lowfrom);
875
876 /* Compute the value to put in each remaining word. */
877 if (unsignedp)
878 fill_value = const0_rtx;
879 else
880 {
881#ifdef HAVE_slt
882 if (HAVE_slt
a995e389 883 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
884 && STORE_FLAG_VALUE == -1)
885 {
906c4e36
RK
886 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
887 lowpart_mode, 0, 0);
bbf6f052
RK
888 fill_value = gen_reg_rtx (word_mode);
889 emit_insn (gen_slt (fill_value));
890 }
891 else
892#endif
893 {
894 fill_value
895 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
896 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 897 NULL_RTX, 0);
bbf6f052
RK
898 fill_value = convert_to_mode (word_mode, fill_value, 1);
899 }
900 }
901
902 /* Fill the remaining words. */
903 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
904 {
905 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
906 rtx subword = operand_subword (to, index, 1, to_mode);
907
908 if (subword == 0)
909 abort ();
910
911 if (fill_value != subword)
912 emit_move_insn (subword, fill_value);
913 }
914
915 insns = get_insns ();
916 end_sequence ();
917
906c4e36 918 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 919 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
920 return;
921 }
922
d3c64ee3
RS
923 /* Truncating multi-word to a word or less. */
924 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
925 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 926 {
431a6eca
JW
927 if (!((GET_CODE (from) == MEM
928 && ! MEM_VOLATILE_P (from)
929 && direct_load[(int) to_mode]
930 && ! mode_dependent_address_p (XEXP (from, 0)))
931 || GET_CODE (from) == REG
932 || GET_CODE (from) == SUBREG))
933 from = force_reg (from_mode, from);
bbf6f052
RK
934 convert_move (to, gen_lowpart (word_mode, from), 0);
935 return;
936 }
937
3a94c984 938 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
939 if (to_mode == PQImode)
940 {
941 if (from_mode != QImode)
942 from = convert_to_mode (QImode, from, unsignedp);
943
944#ifdef HAVE_truncqipqi2
945 if (HAVE_truncqipqi2)
946 {
947 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
948 return;
949 }
950#endif /* HAVE_truncqipqi2 */
951 abort ();
952 }
953
954 if (from_mode == PQImode)
955 {
956 if (to_mode != QImode)
957 {
958 from = convert_to_mode (QImode, from, unsignedp);
959 from_mode = QImode;
960 }
961 else
962 {
963#ifdef HAVE_extendpqiqi2
964 if (HAVE_extendpqiqi2)
965 {
966 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
967 return;
968 }
969#endif /* HAVE_extendpqiqi2 */
970 abort ();
971 }
972 }
973
bbf6f052
RK
974 if (to_mode == PSImode)
975 {
976 if (from_mode != SImode)
977 from = convert_to_mode (SImode, from, unsignedp);
978
1f584163
DE
979#ifdef HAVE_truncsipsi2
980 if (HAVE_truncsipsi2)
bbf6f052 981 {
1f584163 982 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
983 return;
984 }
1f584163 985#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
986 abort ();
987 }
988
989 if (from_mode == PSImode)
990 {
991 if (to_mode != SImode)
992 {
993 from = convert_to_mode (SImode, from, unsignedp);
994 from_mode = SImode;
995 }
996 else
997 {
1f584163 998#ifdef HAVE_extendpsisi2
43d75418 999 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 1000 {
1f584163 1001 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1002 return;
1003 }
1f584163 1004#endif /* HAVE_extendpsisi2 */
43d75418
R
1005#ifdef HAVE_zero_extendpsisi2
1006 if (unsignedp && HAVE_zero_extendpsisi2)
1007 {
1008 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1009 return;
1010 }
1011#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1012 abort ();
1013 }
1014 }
1015
0407367d
RK
1016 if (to_mode == PDImode)
1017 {
1018 if (from_mode != DImode)
1019 from = convert_to_mode (DImode, from, unsignedp);
1020
1021#ifdef HAVE_truncdipdi2
1022 if (HAVE_truncdipdi2)
1023 {
1024 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1025 return;
1026 }
1027#endif /* HAVE_truncdipdi2 */
1028 abort ();
1029 }
1030
1031 if (from_mode == PDImode)
1032 {
1033 if (to_mode != DImode)
1034 {
1035 from = convert_to_mode (DImode, from, unsignedp);
1036 from_mode = DImode;
1037 }
1038 else
1039 {
1040#ifdef HAVE_extendpdidi2
1041 if (HAVE_extendpdidi2)
1042 {
1043 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1044 return;
1045 }
1046#endif /* HAVE_extendpdidi2 */
1047 abort ();
1048 }
1049 }
1050
bbf6f052
RK
1051 /* Now follow all the conversions between integers
1052 no more than a word long. */
1053
1054 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1055 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1056 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1057 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1058 {
d3c64ee3
RS
1059 if (!((GET_CODE (from) == MEM
1060 && ! MEM_VOLATILE_P (from)
1061 && direct_load[(int) to_mode]
1062 && ! mode_dependent_address_p (XEXP (from, 0)))
1063 || GET_CODE (from) == REG
1064 || GET_CODE (from) == SUBREG))
1065 from = force_reg (from_mode, from);
34aa3599
RK
1066 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1067 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1068 from = copy_to_reg (from);
bbf6f052
RK
1069 emit_move_insn (to, gen_lowpart (to_mode, from));
1070 return;
1071 }
1072
d3c64ee3 1073 /* Handle extension. */
bbf6f052
RK
1074 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1075 {
1076 /* Convert directly if that works. */
1077 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1078 != CODE_FOR_nothing)
1079 {
1080 emit_unop_insn (code, to, from, equiv_code);
1081 return;
1082 }
1083 else
1084 {
1085 enum machine_mode intermediate;
2b28d92e
NC
1086 rtx tmp;
1087 tree shift_amount;
bbf6f052
RK
1088
1089 /* Search for a mode to convert via. */
1090 for (intermediate = from_mode; intermediate != VOIDmode;
1091 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1092 if (((can_extend_p (to_mode, intermediate, unsignedp)
1093 != CODE_FOR_nothing)
1094 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1095 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1096 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1097 && (can_extend_p (intermediate, from_mode, unsignedp)
1098 != CODE_FOR_nothing))
1099 {
1100 convert_move (to, convert_to_mode (intermediate, from,
1101 unsignedp), unsignedp);
1102 return;
1103 }
1104
2b28d92e 1105 /* No suitable intermediate mode.
3a94c984 1106 Generate what we need with shifts. */
2b28d92e
NC
1107 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1108 - GET_MODE_BITSIZE (from_mode), 0);
1109 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1110 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1111 to, unsignedp);
3a94c984 1112 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1113 to, unsignedp);
1114 if (tmp != to)
1115 emit_move_insn (to, tmp);
1116 return;
bbf6f052
RK
1117 }
1118 }
1119
3a94c984 1120 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1121
1122 if (from_mode == DImode && to_mode == SImode)
1123 {
1124#ifdef HAVE_truncdisi2
1125 if (HAVE_truncdisi2)
1126 {
1127 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1128 return;
1129 }
1130#endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1133 }
1134
1135 if (from_mode == DImode && to_mode == HImode)
1136 {
1137#ifdef HAVE_truncdihi2
1138 if (HAVE_truncdihi2)
1139 {
1140 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1141 return;
1142 }
1143#endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1146 }
1147
1148 if (from_mode == DImode && to_mode == QImode)
1149 {
1150#ifdef HAVE_truncdiqi2
1151 if (HAVE_truncdiqi2)
1152 {
1153 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1154 return;
1155 }
1156#endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1159 }
1160
1161 if (from_mode == SImode && to_mode == HImode)
1162 {
1163#ifdef HAVE_truncsihi2
1164 if (HAVE_truncsihi2)
1165 {
1166 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1167 return;
1168 }
1169#endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
1174 if (from_mode == SImode && to_mode == QImode)
1175 {
1176#ifdef HAVE_truncsiqi2
1177 if (HAVE_truncsiqi2)
1178 {
1179 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1180 return;
1181 }
1182#endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == HImode && to_mode == QImode)
1188 {
1189#ifdef HAVE_trunchiqi2
1190 if (HAVE_trunchiqi2)
1191 {
1192 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1193 return;
1194 }
1195#endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
b9bcad65
RK
1200 if (from_mode == TImode && to_mode == DImode)
1201 {
1202#ifdef HAVE_trunctidi2
1203 if (HAVE_trunctidi2)
1204 {
1205 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1206 return;
1207 }
1208#endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 if (from_mode == TImode && to_mode == SImode)
1214 {
1215#ifdef HAVE_trunctisi2
1216 if (HAVE_trunctisi2)
1217 {
1218 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1219 return;
1220 }
1221#endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1224 }
1225
1226 if (from_mode == TImode && to_mode == HImode)
1227 {
1228#ifdef HAVE_trunctihi2
1229 if (HAVE_trunctihi2)
1230 {
1231 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1232 return;
1233 }
1234#endif
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1236 return;
1237 }
1238
1239 if (from_mode == TImode && to_mode == QImode)
1240 {
1241#ifdef HAVE_trunctiqi2
1242 if (HAVE_trunctiqi2)
1243 {
1244 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1245 return;
1246 }
1247#endif
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1249 return;
1250 }
1251
bbf6f052
RK
1252 /* Handle truncation of volatile memrefs, and so on;
1253 the things that couldn't be truncated directly,
1254 and for which there was no special instruction. */
1255 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1256 {
1257 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1258 emit_move_insn (to, temp);
1259 return;
1260 }
1261
1262 /* Mode combination is not recognized. */
1263 abort ();
1264}
1265
1266/* Return an rtx for a value that would result
1267 from converting X to mode MODE.
1268 Both X and MODE may be floating, or both integer.
1269 UNSIGNEDP is nonzero if X is an unsigned value.
1270 This can be done by referring to a part of X in place
5d901c31
RS
1271 or by copying to a new temporary with conversion.
1272
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1275
1276rtx
1277convert_to_mode (mode, x, unsignedp)
1278 enum machine_mode mode;
1279 rtx x;
1280 int unsignedp;
5ffe63ed
RS
1281{
1282 return convert_modes (mode, VOIDmode, x, unsignedp);
1283}
1284
1285/* Return an rtx for a value that would result
1286 from converting X from mode OLDMODE to mode MODE.
1287 Both modes may be floating, or both integer.
1288 UNSIGNEDP is nonzero if X is an unsigned value.
1289
1290 This can be done by referring to a part of X in place
1291 or by copying to a new temporary with conversion.
1292
1293 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1294
1295 This function *must not* call protect_from_queue
1296 except when putting X into an insn (in which case convert_move does it). */
1297
1298rtx
1299convert_modes (mode, oldmode, x, unsignedp)
1300 enum machine_mode mode, oldmode;
1301 rtx x;
1302 int unsignedp;
bbf6f052
RK
1303{
1304 register rtx temp;
5ffe63ed 1305
1499e0a8
RK
1306 /* If FROM is a SUBREG that indicates that we have already done at least
1307 the required extension, strip it. */
1308
1309 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1310 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1311 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1312 x = gen_lowpart (mode, x);
bbf6f052 1313
64791b18
RK
1314 if (GET_MODE (x) != VOIDmode)
1315 oldmode = GET_MODE (x);
3a94c984 1316
5ffe63ed 1317 if (mode == oldmode)
bbf6f052
RK
1318 return x;
1319
1320 /* There is one case that we must handle specially: If we are converting
906c4e36 1321 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1322 we are to interpret the constant as unsigned, gen_lowpart will do
1323 the wrong if the constant appears negative. What we want to do is
1324 make the high-order word of the constant zero, not all ones. */
1325
1326 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1327 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1328 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1329 {
1330 HOST_WIDE_INT val = INTVAL (x);
1331
1332 if (oldmode != VOIDmode
1333 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1334 {
1335 int width = GET_MODE_BITSIZE (oldmode);
1336
1337 /* We need to zero extend VAL. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 }
1340
1341 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1342 }
bbf6f052
RK
1343
1344 /* We can do this with a gen_lowpart if both desired and current modes
1345 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1346 non-volatile MEM. Except for the constant case where MODE is no
1347 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1348
ba2e110c
RK
1349 if ((GET_CODE (x) == CONST_INT
1350 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1351 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1352 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1353 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1354 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1355 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1356 && direct_load[(int) mode])
2bf29316
JW
1357 || (GET_CODE (x) == REG
1358 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1359 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1360 {
1361 /* ?? If we don't know OLDMODE, we have to assume here that
1362 X does not need sign- or zero-extension. This may not be
1363 the case, but it's the best we can do. */
1364 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1365 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1366 {
1367 HOST_WIDE_INT val = INTVAL (x);
1368 int width = GET_MODE_BITSIZE (oldmode);
1369
1370 /* We must sign or zero-extend in this case. Start by
1371 zero-extending, then sign extend if we need to. */
1372 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1373 if (! unsignedp
1374 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1375 val |= (HOST_WIDE_INT) (-1) << width;
1376
1377 return GEN_INT (val);
1378 }
1379
1380 return gen_lowpart (mode, x);
1381 }
bbf6f052
RK
1382
1383 temp = gen_reg_rtx (mode);
1384 convert_move (temp, x, unsignedp);
1385 return temp;
1386}
1387\f
fbe1758d 1388/* This macro is used to determine what the largest unit size that
3a94c984 1389 move_by_pieces can use is. */
fbe1758d
AM
1390
1391/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1392 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1393 number of bytes we can move with a single instruction. */
fbe1758d
AM
1394
1395#ifndef MOVE_MAX_PIECES
1396#define MOVE_MAX_PIECES MOVE_MAX
1397#endif
1398
bbf6f052
RK
1399/* Generate several move instructions to copy LEN bytes
1400 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1401 The caller must pass FROM and TO
1402 through protect_from_queue before calling.
19caa751 1403 ALIGN is maximum alignment we can assume. */
bbf6f052 1404
2e245dac 1405void
bbf6f052
RK
1406move_by_pieces (to, from, len, align)
1407 rtx to, from;
3bdf5ad1 1408 unsigned HOST_WIDE_INT len;
729a2125 1409 unsigned int align;
bbf6f052
RK
1410{
1411 struct move_by_pieces data;
1412 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
770ae6cc 1413 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1414 enum machine_mode mode = VOIDmode, tmode;
1415 enum insn_code icode;
bbf6f052
RK
1416
1417 data.offset = 0;
1418 data.to_addr = to_addr;
1419 data.from_addr = from_addr;
1420 data.to = to;
1421 data.from = from;
1422 data.autinc_to
1423 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1424 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1425 data.autinc_from
1426 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1427 || GET_CODE (from_addr) == POST_INC
1428 || GET_CODE (from_addr) == POST_DEC);
1429
1430 data.explicit_inc_from = 0;
1431 data.explicit_inc_to = 0;
1432 data.reverse
1433 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1434 if (data.reverse) data.offset = len;
1435 data.len = len;
1436
1437 /* If copying requires more than two move insns,
1438 copy addresses to registers (to make displacements shorter)
1439 and use post-increment if available. */
1440 if (!(data.autinc_from && data.autinc_to)
1441 && move_by_pieces_ninsns (len, align) > 2)
1442 {
3a94c984 1443 /* Find the mode of the largest move... */
fbe1758d
AM
1444 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1445 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1446 if (GET_MODE_SIZE (tmode) < max_size)
1447 mode = tmode;
1448
1449 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1450 {
1451 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1452 data.autinc_from = 1;
1453 data.explicit_inc_from = -1;
1454 }
fbe1758d 1455 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1456 {
1457 data.from_addr = copy_addr_to_reg (from_addr);
1458 data.autinc_from = 1;
1459 data.explicit_inc_from = 1;
1460 }
bbf6f052
RK
1461 if (!data.autinc_from && CONSTANT_P (from_addr))
1462 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1463 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1464 {
1465 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1466 data.autinc_to = 1;
1467 data.explicit_inc_to = -1;
1468 }
fbe1758d 1469 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1470 {
1471 data.to_addr = copy_addr_to_reg (to_addr);
1472 data.autinc_to = 1;
1473 data.explicit_inc_to = 1;
1474 }
bbf6f052
RK
1475 if (!data.autinc_to && CONSTANT_P (to_addr))
1476 data.to_addr = copy_addr_to_reg (to_addr);
1477 }
1478
e1565e65 1479 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1480 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1481 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1482
1483 /* First move what we can in the largest integer mode, then go to
1484 successively smaller modes. */
1485
1486 while (max_size > 1)
1487 {
e7c33f54
RK
1488 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1489 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1490 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1491 mode = tmode;
1492
1493 if (mode == VOIDmode)
1494 break;
1495
1496 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1497 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1498 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1499
1500 max_size = GET_MODE_SIZE (mode);
1501 }
1502
1503 /* The code above should have handled everything. */
2a8e278c 1504 if (data.len > 0)
bbf6f052
RK
1505 abort ();
1506}
1507
1508/* Return number of insns required to move L bytes by pieces.
1509 ALIGN (in bytes) is maximum alignment we can assume. */
1510
3bdf5ad1 1511static unsigned HOST_WIDE_INT
bbf6f052 1512move_by_pieces_ninsns (l, align)
3bdf5ad1 1513 unsigned HOST_WIDE_INT l;
729a2125 1514 unsigned int align;
bbf6f052 1515{
3bdf5ad1
RK
1516 unsigned HOST_WIDE_INT n_insns = 0;
1517 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1518
e1565e65 1519 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1520 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1521 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1522
1523 while (max_size > 1)
1524 {
1525 enum machine_mode mode = VOIDmode, tmode;
1526 enum insn_code icode;
1527
e7c33f54
RK
1528 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1529 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1530 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1531 mode = tmode;
1532
1533 if (mode == VOIDmode)
1534 break;
1535
1536 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1537 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1538 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1539
1540 max_size = GET_MODE_SIZE (mode);
1541 }
1542
13c6f0d5
NS
1543 if (l)
1544 abort ();
bbf6f052
RK
1545 return n_insns;
1546}
1547
1548/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1549 with move instructions for mode MODE. GENFUN is the gen_... function
1550 to make a move insn for that mode. DATA has all the other info. */
1551
1552static void
1553move_by_pieces_1 (genfun, mode, data)
711d877c 1554 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1555 enum machine_mode mode;
1556 struct move_by_pieces *data;
1557{
3bdf5ad1
RK
1558 unsigned int size = GET_MODE_SIZE (mode);
1559 rtx to1, from1;
bbf6f052
RK
1560
1561 while (data->len >= size)
1562 {
3bdf5ad1
RK
1563 if (data->reverse)
1564 data->offset -= size;
1565
1566 if (data->autinc_to)
1567 {
1568 to1 = gen_rtx_MEM (mode, data->to_addr);
1569 MEM_COPY_ATTRIBUTES (to1, data->to);
1570 }
1571 else
1572 to1 = change_address (data->to, mode,
1573 plus_constant (data->to_addr, data->offset));
1574
1575 if (data->autinc_from)
1576 {
1577 from1 = gen_rtx_MEM (mode, data->from_addr);
1578 MEM_COPY_ATTRIBUTES (from1, data->from);
1579 }
1580 else
1581 from1 = change_address (data->from, mode,
1582 plus_constant (data->from_addr, data->offset));
bbf6f052 1583
940da324 1584 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1585 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1586 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1587 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1588
1589 emit_insn ((*genfun) (to1, from1));
3bdf5ad1 1590
940da324 1591 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1592 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1593 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1594 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1595
3bdf5ad1
RK
1596 if (! data->reverse)
1597 data->offset += size;
bbf6f052
RK
1598
1599 data->len -= size;
1600 }
1601}
1602\f
1603/* Emit code to move a block Y to a block X.
1604 This may be done with string-move instructions,
1605 with multiple scalar move instructions, or with a library call.
1606
1607 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1608 with mode BLKmode.
1609 SIZE is an rtx that says how long they are.
19caa751 1610 ALIGN is the maximum alignment we can assume they have.
bbf6f052 1611
e9a25f70
JL
1612 Return the address of the new block, if memcpy is called and returns it,
1613 0 otherwise. */
1614
1615rtx
bbf6f052
RK
1616emit_block_move (x, y, size, align)
1617 rtx x, y;
1618 rtx size;
729a2125 1619 unsigned int align;
bbf6f052 1620{
e9a25f70 1621 rtx retval = 0;
52cf7115
JL
1622#ifdef TARGET_MEM_FUNCTIONS
1623 static tree fn;
1624 tree call_expr, arg_list;
1625#endif
e9a25f70 1626
bbf6f052
RK
1627 if (GET_MODE (x) != BLKmode)
1628 abort ();
1629
1630 if (GET_MODE (y) != BLKmode)
1631 abort ();
1632
1633 x = protect_from_queue (x, 1);
1634 y = protect_from_queue (y, 0);
5d901c31 1635 size = protect_from_queue (size, 0);
bbf6f052
RK
1636
1637 if (GET_CODE (x) != MEM)
1638 abort ();
1639 if (GET_CODE (y) != MEM)
1640 abort ();
1641 if (size == 0)
1642 abort ();
1643
fbe1758d 1644 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1645 move_by_pieces (x, y, INTVAL (size), align);
1646 else
1647 {
1648 /* Try the most limited insn first, because there's no point
1649 including more than one in the machine description unless
1650 the more limited one has some advantage. */
266007a7 1651
19caa751 1652 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
266007a7
RK
1653 enum machine_mode mode;
1654
3ef1eef4
RK
1655 /* Since this is a move insn, we don't care about volatility. */
1656 volatile_ok = 1;
1657
266007a7
RK
1658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1659 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1660 {
266007a7 1661 enum insn_code code = movstr_optab[(int) mode];
a995e389 1662 insn_operand_predicate_fn pred;
266007a7
RK
1663
1664 if (code != CODE_FOR_nothing
803090c4
RK
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
8008b228 1667 returned by the macro, it will definitely be less than the
803090c4 1668 actual mode mask. */
8ca00751
RK
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1671 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1673 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1674 || (*pred) (x, BLKmode))
1675 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1676 || (*pred) (y, BLKmode))
1677 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1678 || (*pred) (opalign, VOIDmode)))
bbf6f052 1679 {
1ba1e2a8 1680 rtx op2;
266007a7
RK
1681 rtx last = get_last_insn ();
1682 rtx pat;
1683
1ba1e2a8 1684 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1685 pred = insn_data[(int) code].operand[2].predicate;
1686 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1687 op2 = copy_to_mode_reg (mode, op2);
1688
1689 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1690 if (pat)
1691 {
1692 emit_insn (pat);
3ef1eef4 1693 volatile_ok = 0;
e9a25f70 1694 return 0;
266007a7
RK
1695 }
1696 else
1697 delete_insns_since (last);
bbf6f052
RK
1698 }
1699 }
bbf6f052 1700
3ef1eef4
RK
1701 volatile_ok = 0;
1702
4bc973ae
JL
1703 /* X, Y, or SIZE may have been passed through protect_from_queue.
1704
1705 It is unsafe to save the value generated by protect_from_queue
1706 and reuse it later. Consider what happens if emit_queue is
1707 called before the return value from protect_from_queue is used.
1708
1709 Expansion of the CALL_EXPR below will call emit_queue before
1710 we are finished emitting RTL for argument setup. So if we are
1711 not careful we could get the wrong value for an argument.
1712
1713 To avoid this problem we go ahead and emit code to copy X, Y &
1714 SIZE into new pseudos. We can then place those new pseudos
1715 into an RTL_EXPR and use them later, even after a call to
3a94c984 1716 emit_queue.
4bc973ae
JL
1717
1718 Note this is not strictly needed for library calls since they
1719 do not call emit_queue before loading their arguments. However,
1720 we may need to have library calls call emit_queue in the future
1721 since failing to do so could cause problems for targets which
1722 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1723 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1724 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1725
1726#ifdef TARGET_MEM_FUNCTIONS
1727 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1728#else
1729 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1730 TREE_UNSIGNED (integer_type_node));
f3dc586a 1731 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1732#endif
1733
bbf6f052 1734#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1735 /* It is incorrect to use the libcall calling conventions to call
1736 memcpy in this context.
1737
1738 This could be a user call to memcpy and the user may wish to
1739 examine the return value from memcpy.
1740
1741 For targets where libcalls and normal calls have different conventions
3a94c984 1742 for returning pointers, we could end up generating incorrect code.
52cf7115
JL
1743
1744 So instead of using a libcall sequence we build up a suitable
1745 CALL_EXPR and expand the call in the normal fashion. */
1746 if (fn == NULL_TREE)
1747 {
1748 tree fntype;
1749
1750 /* This was copied from except.c, I don't know if all this is
1751 necessary in this context or not. */
1752 fn = get_identifier ("memcpy");
52cf7115
JL
1753 fntype = build_pointer_type (void_type_node);
1754 fntype = build_function_type (fntype, NULL_TREE);
1755 fn = build_decl (FUNCTION_DECL, fn, fntype);
3a94c984 1756 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1757 DECL_EXTERNAL (fn) = 1;
1758 TREE_PUBLIC (fn) = 1;
1759 DECL_ARTIFICIAL (fn) = 1;
6c418184 1760 make_decl_rtl (fn, NULL_PTR);
52cf7115 1761 assemble_external (fn);
52cf7115
JL
1762 }
1763
3a94c984 1764 /* We need to make an argument list for the function call.
52cf7115
JL
1765
1766 memcpy has three arguments, the first two are void * addresses and
1767 the last is a size_t byte count for the copy. */
1768 arg_list
1769 = build_tree_list (NULL_TREE,
4bc973ae 1770 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1771 TREE_CHAIN (arg_list)
1772 = build_tree_list (NULL_TREE,
4bc973ae 1773 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1774 TREE_CHAIN (TREE_CHAIN (arg_list))
1775 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1776 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1777
1778 /* Now we have to build up the CALL_EXPR itself. */
1779 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1780 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1781 call_expr, arg_list, NULL_TREE);
1782 TREE_SIDE_EFFECTS (call_expr) = 1;
1783
1784 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1785#else
ebb1b59a 1786 emit_library_call (bcopy_libfunc, LCT_NORMAL,
fe7bbd2a 1787 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1788 convert_to_mode (TYPE_MODE (integer_type_node), size,
1789 TREE_UNSIGNED (integer_type_node)),
1790 TYPE_MODE (integer_type_node));
bbf6f052
RK
1791#endif
1792 }
e9a25f70
JL
1793
1794 return retval;
bbf6f052
RK
1795}
1796\f
1797/* Copy all or part of a value X into registers starting at REGNO.
1798 The number of registers to be filled is NREGS. */
1799
1800void
1801move_block_to_reg (regno, x, nregs, mode)
1802 int regno;
1803 rtx x;
1804 int nregs;
1805 enum machine_mode mode;
1806{
1807 int i;
381127e8 1808#ifdef HAVE_load_multiple
3a94c984 1809 rtx pat;
381127e8
RL
1810 rtx last;
1811#endif
bbf6f052 1812
72bb9717
RK
1813 if (nregs == 0)
1814 return;
1815
bbf6f052
RK
1816 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1817 x = validize_mem (force_const_mem (mode, x));
1818
1819 /* See if the machine can do this with a load multiple insn. */
1820#ifdef HAVE_load_multiple
c3a02afe 1821 if (HAVE_load_multiple)
bbf6f052 1822 {
c3a02afe 1823 last = get_last_insn ();
38a448ca 1824 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1825 GEN_INT (nregs));
1826 if (pat)
1827 {
1828 emit_insn (pat);
1829 return;
1830 }
1831 else
1832 delete_insns_since (last);
bbf6f052 1833 }
bbf6f052
RK
1834#endif
1835
1836 for (i = 0; i < nregs; i++)
38a448ca 1837 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1838 operand_subword_force (x, i, mode));
1839}
1840
1841/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1842 The number of registers to be filled is NREGS. SIZE indicates the number
1843 of bytes in the object X. */
1844
bbf6f052 1845void
0040593d 1846move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1847 int regno;
1848 rtx x;
1849 int nregs;
0040593d 1850 int size;
bbf6f052
RK
1851{
1852 int i;
381127e8 1853#ifdef HAVE_store_multiple
3a94c984 1854 rtx pat;
381127e8
RL
1855 rtx last;
1856#endif
58a32c5c 1857 enum machine_mode mode;
bbf6f052 1858
2954d7db
RK
1859 if (nregs == 0)
1860 return;
1861
58a32c5c
DE
1862 /* If SIZE is that of a mode no bigger than a word, just use that
1863 mode's store operation. */
1864 if (size <= UNITS_PER_WORD
1865 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1866 {
1867 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1868 gen_rtx_REG (mode, regno));
58a32c5c
DE
1869 return;
1870 }
3a94c984 1871
0040593d 1872 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1873 to the left before storing to memory. Note that the previous test
1874 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1875 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1876 {
1877 rtx tem = operand_subword (x, 0, 1, BLKmode);
1878 rtx shift;
1879
1880 if (tem == 0)
1881 abort ();
1882
1883 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1884 gen_rtx_REG (word_mode, regno),
0040593d
JW
1885 build_int_2 ((UNITS_PER_WORD - size)
1886 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1887 emit_move_insn (tem, shift);
1888 return;
1889 }
1890
bbf6f052
RK
1891 /* See if the machine can do this with a store multiple insn. */
1892#ifdef HAVE_store_multiple
c3a02afe 1893 if (HAVE_store_multiple)
bbf6f052 1894 {
c3a02afe 1895 last = get_last_insn ();
38a448ca 1896 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1897 GEN_INT (nregs));
1898 if (pat)
1899 {
1900 emit_insn (pat);
1901 return;
1902 }
1903 else
1904 delete_insns_since (last);
bbf6f052 1905 }
bbf6f052
RK
1906#endif
1907
1908 for (i = 0; i < nregs; i++)
1909 {
1910 rtx tem = operand_subword (x, i, 1, BLKmode);
1911
1912 if (tem == 0)
1913 abort ();
1914
38a448ca 1915 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1916 }
1917}
1918
aac5cc16
RH
1919/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1920 registers represented by a PARALLEL. SSIZE represents the total size of
1921 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1922 SRC in bits. */
1923/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1924 the balance will be in what would be the low-order memory addresses, i.e.
1925 left justified for big endian, right justified for little endian. This
1926 happens to be true for the targets currently using this support. If this
1927 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1928 would be needed. */
fffa9c1d
JW
1929
1930void
aac5cc16
RH
1931emit_group_load (dst, orig_src, ssize, align)
1932 rtx dst, orig_src;
729a2125
RK
1933 unsigned int align;
1934 int ssize;
fffa9c1d 1935{
aac5cc16
RH
1936 rtx *tmps, src;
1937 int start, i;
fffa9c1d 1938
aac5cc16 1939 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1940 abort ();
1941
1942 /* Check for a NULL entry, used to indicate that the parameter goes
1943 both on the stack and in registers. */
aac5cc16
RH
1944 if (XEXP (XVECEXP (dst, 0, 0), 0))
1945 start = 0;
fffa9c1d 1946 else
aac5cc16
RH
1947 start = 1;
1948
3a94c984 1949 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16
RH
1950
1951 /* If we won't be loading directly from memory, protect the real source
1952 from strange tricks we might play. */
1953 src = orig_src;
2ee5437b 1954 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
aac5cc16 1955 {
4636c0a2 1956 if (GET_MODE (src) == VOIDmode)
8b725198
JJ
1957 src = gen_reg_rtx (GET_MODE (dst));
1958 else
1959 src = gen_reg_rtx (GET_MODE (orig_src));
aac5cc16
RH
1960 emit_move_insn (src, orig_src);
1961 }
1962
1963 /* Process the pieces. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1965 {
1966 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1967 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1968 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1969 int shift = 0;
1970
1971 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1972 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
1973 {
1974 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1975 bytelen = ssize - bytepos;
1976 if (bytelen <= 0)
729a2125 1977 abort ();
aac5cc16
RH
1978 }
1979
1980 /* Optimize the access just a bit. */
1981 if (GET_CODE (src) == MEM
19caa751 1982 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 1983 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1984 && bytelen == GET_MODE_SIZE (mode))
1985 {
1986 tmps[i] = gen_reg_rtx (mode);
1987 emit_move_insn (tmps[i],
1988 change_address (src, mode,
1989 plus_constant (XEXP (src, 0),
1990 bytepos)));
fffa9c1d 1991 }
7c4a6db0
JW
1992 else if (GET_CODE (src) == CONCAT)
1993 {
1994 if (bytepos == 0
1995 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1996 tmps[i] = XEXP (src, 0);
8752c357 1997 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7c4a6db0
JW
1998 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1999 tmps[i] = XEXP (src, 1);
2000 else
2001 abort ();
2002 }
2ee5437b
RH
2003 else if ((CONSTANT_P (src)
2004 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2005 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2006 tmps[i] = src;
fffa9c1d 2007 else
19caa751
RK
2008 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2009 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2010 mode, mode, align, ssize);
fffa9c1d 2011
aac5cc16 2012 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2013 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2014 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2015 }
19caa751 2016
3a94c984 2017 emit_queue ();
aac5cc16
RH
2018
2019 /* Copy the extracted pieces into the proper (probable) hard regs. */
2020 for (i = start; i < XVECLEN (dst, 0); i++)
2021 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2022}
2023
aac5cc16
RH
2024/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2025 registers represented by a PARALLEL. SSIZE represents the total size of
2026 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
2027
2028void
aac5cc16
RH
2029emit_group_store (orig_dst, src, ssize, align)
2030 rtx orig_dst, src;
729a2125
RK
2031 int ssize;
2032 unsigned int align;
fffa9c1d 2033{
aac5cc16
RH
2034 rtx *tmps, dst;
2035 int start, i;
fffa9c1d 2036
aac5cc16 2037 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2038 abort ();
2039
2040 /* Check for a NULL entry, used to indicate that the parameter goes
2041 both on the stack and in registers. */
aac5cc16
RH
2042 if (XEXP (XVECEXP (src, 0, 0), 0))
2043 start = 0;
fffa9c1d 2044 else
aac5cc16
RH
2045 start = 1;
2046
3a94c984 2047 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2048
aac5cc16
RH
2049 /* Copy the (probable) hard regs into pseudos. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2051 {
aac5cc16
RH
2052 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2053 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2054 emit_move_insn (tmps[i], reg);
2055 }
3a94c984 2056 emit_queue ();
fffa9c1d 2057
aac5cc16
RH
2058 /* If we won't be storing directly into memory, protect the real destination
2059 from strange tricks we might play. */
2060 dst = orig_dst;
10a9f2be
JW
2061 if (GET_CODE (dst) == PARALLEL)
2062 {
2063 rtx temp;
2064
2065 /* We can get a PARALLEL dst if there is a conditional expression in
2066 a return statement. In that case, the dst and src are the same,
2067 so no action is necessary. */
2068 if (rtx_equal_p (dst, src))
2069 return;
2070
2071 /* It is unclear if we can ever reach here, but we may as well handle
2072 it. Allocate a temporary, and split this into a store/load to/from
2073 the temporary. */
2074
2075 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2076 emit_group_store (temp, src, ssize, align);
2077 emit_group_load (dst, temp, ssize, align);
2078 return;
2079 }
2080 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2081 {
2082 dst = gen_reg_rtx (GET_MODE (orig_dst));
2083 /* Make life a bit easier for combine. */
2084 emit_move_insn (dst, const0_rtx);
2085 }
aac5cc16
RH
2086
2087 /* Process the pieces. */
2088 for (i = start; i < XVECLEN (src, 0); i++)
2089 {
770ae6cc 2090 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2091 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2092 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2093
2094 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2095 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2096 {
aac5cc16
RH
2097 if (BYTES_BIG_ENDIAN)
2098 {
2099 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2100 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2101 tmps[i], 0, OPTAB_WIDEN);
2102 }
2103 bytelen = ssize - bytepos;
71bc0330 2104 }
fffa9c1d 2105
aac5cc16
RH
2106 /* Optimize the access just a bit. */
2107 if (GET_CODE (dst) == MEM
19caa751 2108 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2109 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2110 && bytelen == GET_MODE_SIZE (mode))
729a2125
RK
2111 emit_move_insn (change_address (dst, mode,
2112 plus_constant (XEXP (dst, 0),
2113 bytepos)),
2114 tmps[i]);
aac5cc16 2115 else
729a2125 2116 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
aac5cc16 2117 mode, tmps[i], align, ssize);
fffa9c1d 2118 }
729a2125 2119
3a94c984 2120 emit_queue ();
aac5cc16
RH
2121
2122 /* Copy from the pseudo into the (probable) hard reg. */
2123 if (GET_CODE (dst) == REG)
2124 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2125}
2126
c36fce9a
GRK
2127/* Generate code to copy a BLKmode object of TYPE out of a
2128 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2129 is null, a stack temporary is created. TGTBLK is returned.
2130
2131 The primary purpose of this routine is to handle functions
2132 that return BLKmode structures in registers. Some machines
2133 (the PA for example) want to return all small structures
3a94c984 2134 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2135
2136rtx
19caa751 2137copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2138 rtx tgtblk;
2139 rtx srcreg;
2140 tree type;
2141{
19caa751
RK
2142 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2143 rtx src = NULL, dst = NULL;
2144 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2145 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2146
2147 if (tgtblk == 0)
2148 {
1da68f56
RK
2149 tgtblk = assign_temp (build_qualified_type (type,
2150 (TYPE_QUALS (type)
2151 | TYPE_QUAL_CONST)),
2152 0, 1, 1);
19caa751
RK
2153 preserve_temp_slots (tgtblk);
2154 }
3a94c984 2155
19caa751
RK
2156 /* This code assumes srcreg is at least a full word. If it isn't,
2157 copy it into a new pseudo which is a full word. */
2158 if (GET_MODE (srcreg) != BLKmode
2159 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2160 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2161
2162 /* Structures whose size is not a multiple of a word are aligned
2163 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2164 machine, this means we must skip the empty high order bytes when
2165 calculating the bit offset. */
2166 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2167 big_endian_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2169
2170 /* Copy the structure BITSIZE bites at a time.
3a94c984 2171
19caa751
RK
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = big_endian_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2178 {
3a94c984 2179 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2180 word boundary and when xbitpos == big_endian_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == big_endian_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2185
2186 /* We need a new destination operand each time bitpos is on
2187 a word boundary. */
2188 if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2190
19caa751
RK
2191 /* Use xbitpos for the source extraction (right justified) and
2192 xbitpos for the destination store (left justified). */
2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2194 extract_bit_field (src, bitsize,
2195 xbitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, word_mode, word_mode,
2197 bitsize, BITS_PER_WORD),
2198 bitsize, BITS_PER_WORD);
2199 }
2200
2201 return tgtblk;
c36fce9a
GRK
2202}
2203
94b25f81
RK
2204/* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2206
2207void
b3f8cf4a
RK
2208use_reg (call_fusage, reg)
2209 rtx *call_fusage, reg;
2210{
0304dfbb
DE
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2213 abort ();
b3f8cf4a
RK
2214
2215 *call_fusage
38a448ca
RH
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2218}
2219
94b25f81
RK
2220/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2222
2223void
0304dfbb
DE
2224use_regs (call_fusage, regno, nregs)
2225 rtx *call_fusage;
bbf6f052
RK
2226 int regno;
2227 int nregs;
2228{
0304dfbb 2229 int i;
bbf6f052 2230
0304dfbb
DE
2231 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2232 abort ();
2233
2234 for (i = 0; i < nregs; i++)
38a448ca 2235 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2236}
fffa9c1d
JW
2237
2238/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2239 PARALLEL REGS. This is for calls that pass values in multiple
2240 non-contiguous locations. The Irix 6 ABI has examples of this. */
2241
2242void
2243use_group_regs (call_fusage, regs)
2244 rtx *call_fusage;
2245 rtx regs;
2246{
2247 int i;
2248
6bd35f86
DE
2249 for (i = 0; i < XVECLEN (regs, 0); i++)
2250 {
2251 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2252
6bd35f86
DE
2253 /* A NULL entry means the parameter goes both on the stack and in
2254 registers. This can also be a MEM for targets that pass values
2255 partially on the stack and partially in registers. */
e9a25f70 2256 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2257 use_reg (call_fusage, reg);
2258 }
fffa9c1d 2259}
bbf6f052 2260\f
57814e5e
JJ
2261
2262int
2263can_store_by_pieces (len, constfun, constfundata, align)
2264 unsigned HOST_WIDE_INT len;
2265 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2266 PTR constfundata;
2267 unsigned int align;
2268{
98166639 2269 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2270 HOST_WIDE_INT offset = 0;
2271 enum machine_mode mode, tmode;
2272 enum insn_code icode;
2273 int reverse;
2274 rtx cst;
2275
2276 if (! MOVE_BY_PIECES_P (len, align))
2277 return 0;
2278
2279 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2280 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2281 align = MOVE_MAX * BITS_PER_UNIT;
2282
2283 /* We would first store what we can in the largest integer mode, then go to
2284 successively smaller modes. */
2285
2286 for (reverse = 0;
2287 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2288 reverse++)
2289 {
2290 l = len;
2291 mode = VOIDmode;
98166639 2292 max_size = MOVE_MAX_PIECES + 1;
57814e5e
JJ
2293 while (max_size > 1)
2294 {
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (mode == VOIDmode)
2301 break;
2302
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= GET_MODE_ALIGNMENT (mode))
2306 {
2307 unsigned int size = GET_MODE_SIZE (mode);
2308
2309 while (l >= size)
2310 {
2311 if (reverse)
2312 offset -= size;
2313
2314 cst = (*constfun) (constfundata, offset, mode);
2315 if (!LEGITIMATE_CONSTANT_P (cst))
2316 return 0;
2317
2318 if (!reverse)
2319 offset += size;
2320
2321 l -= size;
2322 }
2323 }
2324
2325 max_size = GET_MODE_SIZE (mode);
2326 }
2327
2328 /* The code above should have handled everything. */
2329 if (l != 0)
2330 abort ();
2331 }
2332
2333 return 1;
2334}
2335
2336/* Generate several move instructions to store LEN bytes generated by
2337 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2338 pointer which will be passed as argument in every CONSTFUN call.
2339 ALIGN is maximum alignment we can assume. */
2340
2341void
2342store_by_pieces (to, len, constfun, constfundata, align)
2343 rtx to;
2344 unsigned HOST_WIDE_INT len;
2345 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2346 PTR constfundata;
2347 unsigned int align;
2348{
2349 struct store_by_pieces data;
2350
2351 if (! MOVE_BY_PIECES_P (len, align))
2352 abort ();
2353 to = protect_from_queue (to, 1);
2354 data.constfun = constfun;
2355 data.constfundata = constfundata;
2356 data.len = len;
2357 data.to = to;
2358 store_by_pieces_1 (&data, align);
2359}
2360
19caa751
RK
2361/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2362 rtx with BLKmode). The caller must pass TO through protect_from_queue
2363 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2364
2365static void
2366clear_by_pieces (to, len, align)
2367 rtx to;
3bdf5ad1 2368 unsigned HOST_WIDE_INT len;
729a2125 2369 unsigned int align;
9de08200 2370{
57814e5e
JJ
2371 struct store_by_pieces data;
2372
2373 data.constfun = clear_by_pieces_1;
2374 data.constfundata = NULL_PTR;
2375 data.len = len;
2376 data.to = to;
2377 store_by_pieces_1 (&data, align);
2378}
2379
2380/* Callback routine for clear_by_pieces.
2381 Return const0_rtx unconditionally. */
2382
2383static rtx
2384clear_by_pieces_1 (data, offset, mode)
2385 PTR data ATTRIBUTE_UNUSED;
2386 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2387 enum machine_mode mode ATTRIBUTE_UNUSED;
2388{
2389 return const0_rtx;
2390}
2391
2392/* Subroutine of clear_by_pieces and store_by_pieces.
2393 Generate several move instructions to store LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
2396
2397static void
2398store_by_pieces_1 (data, align)
2399 struct store_by_pieces *data;
2400 unsigned int align;
2401{
2402 rtx to_addr = XEXP (data->to, 0);
3bdf5ad1 2403 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
2404 enum machine_mode mode = VOIDmode, tmode;
2405 enum insn_code icode;
9de08200 2406
57814e5e
JJ
2407 data->offset = 0;
2408 data->to_addr = to_addr;
2409 data->autinc_to
9de08200
RK
2410 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2411 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2412
57814e5e
JJ
2413 data->explicit_inc_to = 0;
2414 data->reverse
9de08200 2415 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2416 if (data->reverse)
2417 data->offset = data->len;
9de08200 2418
57814e5e 2419 /* If storing requires more than two move insns,
9de08200
RK
2420 copy addresses to registers (to make displacements shorter)
2421 and use post-increment if available. */
57814e5e
JJ
2422 if (!data->autinc_to
2423 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2424 {
3a94c984 2425 /* Determine the main mode we'll be using. */
fbe1758d
AM
2426 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2427 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2428 if (GET_MODE_SIZE (tmode) < max_size)
2429 mode = tmode;
2430
57814e5e 2431 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2432 {
57814e5e
JJ
2433 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2434 data->autinc_to = 1;
2435 data->explicit_inc_to = -1;
9de08200 2436 }
3bdf5ad1 2437
57814e5e
JJ
2438 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2439 && ! data->autinc_to)
9de08200 2440 {
57814e5e
JJ
2441 data->to_addr = copy_addr_to_reg (to_addr);
2442 data->autinc_to = 1;
2443 data->explicit_inc_to = 1;
9de08200 2444 }
3bdf5ad1 2445
57814e5e
JJ
2446 if ( !data->autinc_to && CONSTANT_P (to_addr))
2447 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2448 }
2449
e1565e65 2450 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2451 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2452 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2453
57814e5e 2454 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2455 successively smaller modes. */
2456
2457 while (max_size > 1)
2458 {
9de08200
RK
2459 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2460 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2461 if (GET_MODE_SIZE (tmode) < max_size)
2462 mode = tmode;
2463
2464 if (mode == VOIDmode)
2465 break;
2466
2467 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2468 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2469 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2470
2471 max_size = GET_MODE_SIZE (mode);
2472 }
2473
2474 /* The code above should have handled everything. */
57814e5e 2475 if (data->len != 0)
9de08200
RK
2476 abort ();
2477}
2478
57814e5e 2479/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2480 with move instructions for mode MODE. GENFUN is the gen_... function
2481 to make a move insn for that mode. DATA has all the other info. */
2482
2483static void
57814e5e 2484store_by_pieces_2 (genfun, mode, data)
711d877c 2485 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2486 enum machine_mode mode;
57814e5e 2487 struct store_by_pieces *data;
9de08200 2488{
3bdf5ad1 2489 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2490 rtx to1, cst;
9de08200
RK
2491
2492 while (data->len >= size)
2493 {
3bdf5ad1
RK
2494 if (data->reverse)
2495 data->offset -= size;
9de08200 2496
3bdf5ad1
RK
2497 if (data->autinc_to)
2498 {
2499 to1 = gen_rtx_MEM (mode, data->to_addr);
2500 MEM_COPY_ATTRIBUTES (to1, data->to);
2501 }
3a94c984 2502 else
3bdf5ad1
RK
2503 to1 = change_address (data->to, mode,
2504 plus_constant (data->to_addr, data->offset));
9de08200 2505
940da324 2506 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2507 emit_insn (gen_add2_insn (data->to_addr,
2508 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2509
57814e5e
JJ
2510 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2511 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2512
940da324 2513 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2514 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2515
3bdf5ad1
RK
2516 if (! data->reverse)
2517 data->offset += size;
9de08200
RK
2518
2519 data->len -= size;
2520 }
2521}
2522\f
19caa751
RK
2523/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2524 its length in bytes and ALIGN is the maximum alignment we can is has.
bbf6f052 2525
e9a25f70
JL
2526 If we call a function that returns the length of the block, return it. */
2527
2528rtx
9de08200 2529clear_storage (object, size, align)
bbf6f052 2530 rtx object;
4c08eef0 2531 rtx size;
729a2125 2532 unsigned int align;
bbf6f052 2533{
52cf7115
JL
2534#ifdef TARGET_MEM_FUNCTIONS
2535 static tree fn;
2536 tree call_expr, arg_list;
2537#endif
e9a25f70
JL
2538 rtx retval = 0;
2539
fcf1b822
RK
2540 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2541 just move a zero. Otherwise, do this a piece at a time. */
2542 if (GET_MODE (object) != BLKmode
2543 && GET_CODE (size) == CONST_INT
8752c357 2544 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
fcf1b822
RK
2545 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2546 else
bbf6f052 2547 {
9de08200
RK
2548 object = protect_from_queue (object, 1);
2549 size = protect_from_queue (size, 0);
2550
2551 if (GET_CODE (size) == CONST_INT
fbe1758d 2552 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200 2553 clear_by_pieces (object, INTVAL (size), align);
9de08200
RK
2554 else
2555 {
2556 /* Try the most limited insn first, because there's no point
2557 including more than one in the machine description unless
2558 the more limited one has some advantage. */
2559
19caa751 2560 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
9de08200
RK
2561 enum machine_mode mode;
2562
2563 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2564 mode = GET_MODE_WIDER_MODE (mode))
2565 {
2566 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2567 insn_operand_predicate_fn pred;
9de08200
RK
2568
2569 if (code != CODE_FOR_nothing
2570 /* We don't need MODE to be narrower than
2571 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2572 the mode mask, as it is returned by the macro, it will
2573 definitely be less than the actual mode mask. */
2574 && ((GET_CODE (size) == CONST_INT
2575 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2576 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2577 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2578 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2579 || (*pred) (object, BLKmode))
2580 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2581 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2582 {
2583 rtx op1;
2584 rtx last = get_last_insn ();
2585 rtx pat;
2586
2587 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2588 pred = insn_data[(int) code].operand[1].predicate;
2589 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2590 op1 = copy_to_mode_reg (mode, op1);
2591
2592 pat = GEN_FCN ((int) code) (object, op1, opalign);
2593 if (pat)
2594 {
2595 emit_insn (pat);
e9a25f70 2596 return 0;
9de08200
RK
2597 }
2598 else
2599 delete_insns_since (last);
2600 }
2601 }
2602
4bc973ae 2603 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2604
4bc973ae
JL
2605 It is unsafe to save the value generated by protect_from_queue
2606 and reuse it later. Consider what happens if emit_queue is
2607 called before the return value from protect_from_queue is used.
52cf7115 2608
4bc973ae
JL
2609 Expansion of the CALL_EXPR below will call emit_queue before
2610 we are finished emitting RTL for argument setup. So if we are
2611 not careful we could get the wrong value for an argument.
52cf7115 2612
4bc973ae
JL
2613 To avoid this problem we go ahead and emit code to copy OBJECT
2614 and SIZE into new pseudos. We can then place those new pseudos
2615 into an RTL_EXPR and use them later, even after a call to
2616 emit_queue.
52cf7115 2617
4bc973ae
JL
2618 Note this is not strictly needed for library calls since they
2619 do not call emit_queue before loading their arguments. However,
2620 we may need to have library calls call emit_queue in the future
2621 since failing to do so could cause problems for targets which
2622 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2623 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2624
4bc973ae
JL
2625#ifdef TARGET_MEM_FUNCTIONS
2626 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2627#else
2628 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2629 TREE_UNSIGNED (integer_type_node));
f3dc586a 2630 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2631#endif
52cf7115 2632
4bc973ae
JL
2633#ifdef TARGET_MEM_FUNCTIONS
2634 /* It is incorrect to use the libcall calling conventions to call
2635 memset in this context.
52cf7115 2636
4bc973ae
JL
2637 This could be a user call to memset and the user may wish to
2638 examine the return value from memset.
52cf7115 2639
4bc973ae
JL
2640 For targets where libcalls and normal calls have different
2641 conventions for returning pointers, we could end up generating
3a94c984 2642 incorrect code.
4bc973ae
JL
2643
2644 So instead of using a libcall sequence we build up a suitable
2645 CALL_EXPR and expand the call in the normal fashion. */
2646 if (fn == NULL_TREE)
2647 {
2648 tree fntype;
2649
2650 /* This was copied from except.c, I don't know if all this is
2651 necessary in this context or not. */
2652 fn = get_identifier ("memset");
4bc973ae
JL
2653 fntype = build_pointer_type (void_type_node);
2654 fntype = build_function_type (fntype, NULL_TREE);
2655 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2656 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2657 DECL_EXTERNAL (fn) = 1;
2658 TREE_PUBLIC (fn) = 1;
2659 DECL_ARTIFICIAL (fn) = 1;
6c418184 2660 make_decl_rtl (fn, NULL_PTR);
4bc973ae 2661 assemble_external (fn);
4bc973ae
JL
2662 }
2663
3a94c984 2664 /* We need to make an argument list for the function call.
4bc973ae
JL
2665
2666 memset has three arguments, the first is a void * addresses, the
2667 second a integer with the initialization value, the last is a
2668 size_t byte count for the copy. */
2669 arg_list
2670 = build_tree_list (NULL_TREE,
2671 make_tree (build_pointer_type (void_type_node),
2672 object));
2673 TREE_CHAIN (arg_list)
2674 = build_tree_list (NULL_TREE,
3a94c984 2675 make_tree (integer_type_node, const0_rtx));
4bc973ae
JL
2676 TREE_CHAIN (TREE_CHAIN (arg_list))
2677 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2678 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2679
2680 /* Now we have to build up the CALL_EXPR itself. */
2681 call_expr = build1 (ADDR_EXPR,
2682 build_pointer_type (TREE_TYPE (fn)), fn);
2683 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2684 call_expr, arg_list, NULL_TREE);
2685 TREE_SIDE_EFFECTS (call_expr) = 1;
2686
2687 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2688#else
ebb1b59a 2689 emit_library_call (bzero_libfunc, LCT_NORMAL,
fe7bbd2a 2690 VOIDmode, 2, object, Pmode, size,
9de08200 2691 TYPE_MODE (integer_type_node));
bbf6f052 2692#endif
9de08200 2693 }
bbf6f052 2694 }
e9a25f70
JL
2695
2696 return retval;
bbf6f052
RK
2697}
2698
2699/* Generate code to copy Y into X.
2700 Both Y and X must have the same mode, except that
2701 Y can be a constant with VOIDmode.
2702 This mode cannot be BLKmode; use emit_block_move for that.
2703
2704 Return the last instruction emitted. */
2705
2706rtx
2707emit_move_insn (x, y)
2708 rtx x, y;
2709{
2710 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
2711 rtx y_cst = NULL_RTX;
2712 rtx last_insn;
bbf6f052
RK
2713
2714 x = protect_from_queue (x, 1);
2715 y = protect_from_queue (y, 0);
2716
2717 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2718 abort ();
2719
ee5332b8
RH
2720 /* Never force constant_p_rtx to memory. */
2721 if (GET_CODE (y) == CONSTANT_P_RTX)
2722 ;
2723 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
de1b33dd
AO
2724 {
2725 y_cst = y;
2726 y = force_const_mem (mode, y);
2727 }
bbf6f052
RK
2728
2729 /* If X or Y are memory references, verify that their addresses are valid
2730 for the machine. */
2731 if (GET_CODE (x) == MEM
2732 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2733 && ! push_operand (x, GET_MODE (x)))
2734 || (flag_force_addr
2735 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2736 x = change_address (x, VOIDmode, XEXP (x, 0));
2737
2738 if (GET_CODE (y) == MEM
2739 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2740 || (flag_force_addr
2741 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2742 y = change_address (y, VOIDmode, XEXP (y, 0));
2743
2744 if (mode == BLKmode)
2745 abort ();
2746
de1b33dd
AO
2747 last_insn = emit_move_insn_1 (x, y);
2748
2749 if (y_cst && GET_CODE (x) == REG)
2750 REG_NOTES (last_insn)
2751 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2752
2753 return last_insn;
261c4230
RS
2754}
2755
2756/* Low level part of emit_move_insn.
2757 Called just like emit_move_insn, but assumes X and Y
2758 are basically valid. */
2759
2760rtx
2761emit_move_insn_1 (x, y)
2762 rtx x, y;
2763{
2764 enum machine_mode mode = GET_MODE (x);
2765 enum machine_mode submode;
2766 enum mode_class class = GET_MODE_CLASS (mode);
770ae6cc 2767 unsigned int i;
261c4230 2768
dbbbbf3b 2769 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2770 abort ();
76bbe028 2771
bbf6f052
RK
2772 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2773 return
2774 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2775
89742723 2776 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2777 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2778 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2779 * BITS_PER_UNIT),
2780 (class == MODE_COMPLEX_INT
2781 ? MODE_INT : MODE_FLOAT),
2782 0))
7308a047
RS
2783 && (mov_optab->handlers[(int) submode].insn_code
2784 != CODE_FOR_nothing))
2785 {
2786 /* Don't split destination if it is a stack push. */
2787 int stack = push_operand (x, GET_MODE (x));
7308a047 2788
79ce92d7 2789#ifdef PUSH_ROUNDING
1a06f5fe
JH
2790 /* In case we output to the stack, but the size is smaller machine can
2791 push exactly, we need to use move instructions. */
2792 if (stack
2793 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2794 {
2795 rtx temp;
2796 int offset1, offset2;
2797
2798 /* Do not use anti_adjust_stack, since we don't want to update
2799 stack_pointer_delta. */
2800 temp = expand_binop (Pmode,
2801#ifdef STACK_GROWS_DOWNWARD
2802 sub_optab,
2803#else
2804 add_optab,
2805#endif
2806 stack_pointer_rtx,
2807 GEN_INT
2808 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2809 stack_pointer_rtx,
2810 0,
2811 OPTAB_LIB_WIDEN);
2812 if (temp != stack_pointer_rtx)
2813 emit_move_insn (stack_pointer_rtx, temp);
2814#ifdef STACK_GROWS_DOWNWARD
2815 offset1 = 0;
2816 offset2 = GET_MODE_SIZE (submode);
2817#else
2818 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2819 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2820 + GET_MODE_SIZE (submode));
2821#endif
2822 emit_move_insn (change_address (x, submode,
2823 gen_rtx_PLUS (Pmode,
2824 stack_pointer_rtx,
2825 GEN_INT (offset1))),
2826 gen_realpart (submode, y));
2827 emit_move_insn (change_address (x, submode,
2828 gen_rtx_PLUS (Pmode,
2829 stack_pointer_rtx,
2830 GEN_INT (offset2))),
2831 gen_imagpart (submode, y));
2832 }
79ce92d7 2833#endif
7308a047
RS
2834 /* If this is a stack, push the highpart first, so it
2835 will be in the argument order.
2836
2837 In that case, change_address is used only to convert
2838 the mode, not to change the address. */
1a06f5fe 2839 else if (stack)
c937357e 2840 {
e33c0d66
RS
2841 /* Note that the real part always precedes the imag part in memory
2842 regardless of machine's endianness. */
c937357e
RS
2843#ifdef STACK_GROWS_DOWNWARD
2844 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2845 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2846 gen_imagpart (submode, y)));
c937357e 2847 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2848 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2849 gen_realpart (submode, y)));
c937357e
RS
2850#else
2851 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2852 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2853 gen_realpart (submode, y)));
c937357e 2854 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2855 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2856 gen_imagpart (submode, y)));
c937357e
RS
2857#endif
2858 }
2859 else
2860 {
235ae7be
DM
2861 rtx realpart_x, realpart_y;
2862 rtx imagpart_x, imagpart_y;
2863
405f63da
MM
2864 /* If this is a complex value with each part being smaller than a
2865 word, the usual calling sequence will likely pack the pieces into
2866 a single register. Unfortunately, SUBREG of hard registers only
2867 deals in terms of words, so we have a problem converting input
2868 arguments to the CONCAT of two registers that is used elsewhere
2869 for complex values. If this is before reload, we can copy it into
2870 memory and reload. FIXME, we should see about using extract and
2871 insert on integer registers, but complex short and complex char
2872 variables should be rarely used. */
3a94c984 2873 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2874 && (reload_in_progress | reload_completed) == 0)
2875 {
2876 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2877 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2878
2879 if (packed_dest_p || packed_src_p)
2880 {
2881 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2882 ? MODE_FLOAT : MODE_INT);
2883
1da68f56
RK
2884 enum machine_mode reg_mode
2885 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2886
2887 if (reg_mode != BLKmode)
2888 {
2889 rtx mem = assign_stack_temp (reg_mode,
2890 GET_MODE_SIZE (mode), 0);
405f63da
MM
2891 rtx cmem = change_address (mem, mode, NULL_RTX);
2892
1da68f56
RK
2893 cfun->cannot_inline
2894 = N_("function using short complex types cannot be inline");
405f63da
MM
2895
2896 if (packed_dest_p)
2897 {
2898 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2899 emit_move_insn_1 (cmem, y);
2900 return emit_move_insn_1 (sreg, mem);
2901 }
2902 else
2903 {
2904 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2905 emit_move_insn_1 (mem, sreg);
2906 return emit_move_insn_1 (x, cmem);
2907 }
2908 }
2909 }
2910 }
2911
235ae7be
DM
2912 realpart_x = gen_realpart (submode, x);
2913 realpart_y = gen_realpart (submode, y);
2914 imagpart_x = gen_imagpart (submode, x);
2915 imagpart_y = gen_imagpart (submode, y);
2916
2917 /* Show the output dies here. This is necessary for SUBREGs
2918 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2919 hard regs shouldn't appear here except as return values.
2920 We never want to emit such a clobber after reload. */
2921 if (x != y
235ae7be
DM
2922 && ! (reload_in_progress || reload_completed)
2923 && (GET_CODE (realpart_x) == SUBREG
2924 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2925 {
c14c6529 2926 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2927 }
2638126a 2928
c937357e 2929 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2930 (realpart_x, realpart_y));
c937357e 2931 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2932 (imagpart_x, imagpart_y));
c937357e 2933 }
7308a047 2934
7a1ab50a 2935 return get_last_insn ();
7308a047
RS
2936 }
2937
bbf6f052
RK
2938 /* This will handle any multi-word mode that lacks a move_insn pattern.
2939 However, you will get better code if you define such patterns,
2940 even if they must turn into multiple assembler instructions. */
a4320483 2941 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2942 {
2943 rtx last_insn = 0;
3ef1eef4 2944 rtx seq, inner;
235ae7be 2945 int need_clobber;
3a94c984 2946
a98c9f1a
RK
2947#ifdef PUSH_ROUNDING
2948
2949 /* If X is a push on the stack, do the push now and replace
2950 X with a reference to the stack pointer. */
2951 if (push_operand (x, GET_MODE (x)))
2952 {
2953 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2954 x = change_address (x, VOIDmode, stack_pointer_rtx);
2955 }
2956#endif
3a94c984 2957
3ef1eef4
RK
2958 /* If we are in reload, see if either operand is a MEM whose address
2959 is scheduled for replacement. */
2960 if (reload_in_progress && GET_CODE (x) == MEM
2961 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2962 {
2963 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2964
3ef1eef4 2965 MEM_COPY_ATTRIBUTES (new, x);
3ef1eef4
RK
2966 x = new;
2967 }
2968 if (reload_in_progress && GET_CODE (y) == MEM
2969 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2970 {
2971 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2972
3ef1eef4 2973 MEM_COPY_ATTRIBUTES (new, y);
3ef1eef4
RK
2974 y = new;
2975 }
2976
235ae7be 2977 start_sequence ();
15a7a8ec 2978
235ae7be 2979 need_clobber = 0;
bbf6f052 2980 for (i = 0;
3a94c984 2981 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
2982 i++)
2983 {
2984 rtx xpart = operand_subword (x, i, 1, mode);
2985 rtx ypart = operand_subword (y, i, 1, mode);
2986
2987 /* If we can't get a part of Y, put Y into memory if it is a
2988 constant. Otherwise, force it into a register. If we still
2989 can't get a part of Y, abort. */
2990 if (ypart == 0 && CONSTANT_P (y))
2991 {
2992 y = force_const_mem (mode, y);
2993 ypart = operand_subword (y, i, 1, mode);
2994 }
2995 else if (ypart == 0)
2996 ypart = operand_subword_force (y, i, mode);
2997
2998 if (xpart == 0 || ypart == 0)
2999 abort ();
3000
235ae7be
DM
3001 need_clobber |= (GET_CODE (xpart) == SUBREG);
3002
bbf6f052
RK
3003 last_insn = emit_move_insn (xpart, ypart);
3004 }
6551fa4d 3005
235ae7be
DM
3006 seq = gen_sequence ();
3007 end_sequence ();
3008
3009 /* Show the output dies here. This is necessary for SUBREGs
3010 of pseudos since we cannot track their lifetimes correctly;
3011 hard regs shouldn't appear here except as return values.
3012 We never want to emit such a clobber after reload. */
3013 if (x != y
3014 && ! (reload_in_progress || reload_completed)
3015 && need_clobber != 0)
3016 {
3017 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3018 }
3019
3020 emit_insn (seq);
3021
bbf6f052
RK
3022 return last_insn;
3023 }
3024 else
3025 abort ();
3026}
3027\f
3028/* Pushing data onto the stack. */
3029
3030/* Push a block of length SIZE (perhaps variable)
3031 and return an rtx to address the beginning of the block.
3032 Note that it is not possible for the value returned to be a QUEUED.
3033 The value may be virtual_outgoing_args_rtx.
3034
3035 EXTRA is the number of bytes of padding to push in addition to SIZE.
3036 BELOW nonzero means this padding comes at low addresses;
3037 otherwise, the padding comes at high addresses. */
3038
3039rtx
3040push_block (size, extra, below)
3041 rtx size;
3042 int extra, below;
3043{
3044 register rtx temp;
88f63c77
RK
3045
3046 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3047 if (CONSTANT_P (size))
3048 anti_adjust_stack (plus_constant (size, extra));
3049 else if (GET_CODE (size) == REG && extra == 0)
3050 anti_adjust_stack (size);
3051 else
3052 {
ce48579b 3053 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3054 if (extra != 0)
906c4e36 3055 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3056 temp, 0, OPTAB_LIB_WIDEN);
3057 anti_adjust_stack (temp);
3058 }
3059
f73ad30e
JH
3060#ifndef STACK_GROWS_DOWNWARD
3061#ifdef ARGS_GROW_DOWNWARD
3062 if (!ACCUMULATE_OUTGOING_ARGS)
bbf6f052 3063#else
f73ad30e
JH
3064 if (0)
3065#endif
3066#else
3067 if (1)
bbf6f052 3068#endif
f73ad30e 3069 {
f73ad30e
JH
3070 /* Return the lowest stack address when STACK or ARGS grow downward and
3071 we are not aaccumulating outgoing arguments (the c4x port uses such
3072 conventions). */
3073 temp = virtual_outgoing_args_rtx;
3074 if (extra != 0 && below)
3075 temp = plus_constant (temp, extra);
3076 }
3077 else
3078 {
3079 if (GET_CODE (size) == CONST_INT)
3080 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3081 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3082 else if (extra != 0 && !below)
3083 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3084 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3085 else
3086 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3087 negate_rtx (Pmode, size));
3088 }
bbf6f052
RK
3089
3090 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3091}
3092
87e38d84 3093rtx
bbf6f052
RK
3094gen_push_operand ()
3095{
38a448ca 3096 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
3097}
3098
921b3427
RK
3099/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3100 block of SIZE bytes. */
3101
3102static rtx
3103get_push_address (size)
3a94c984 3104 int size;
921b3427
RK
3105{
3106 register rtx temp;
3107
3108 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 3109 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 3110 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 3111 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
3112 else
3113 temp = stack_pointer_rtx;
3114
c85f7c16 3115 return copy_to_reg (temp);
921b3427
RK
3116}
3117
bbf6f052
RK
3118/* Generate code to push X onto the stack, assuming it has mode MODE and
3119 type TYPE.
3120 MODE is redundant except when X is a CONST_INT (since they don't
3121 carry mode info).
3122 SIZE is an rtx for the size of data to be copied (in bytes),
3123 needed only if X is BLKmode.
3124
19caa751 3125 ALIGN is maximum alignment we can assume.
bbf6f052 3126
cd048831
RK
3127 If PARTIAL and REG are both nonzero, then copy that many of the first
3128 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3129 The amount of space pushed is decreased by PARTIAL words,
3130 rounded *down* to a multiple of PARM_BOUNDARY.
3131 REG must be a hard register in this case.
cd048831
RK
3132 If REG is zero but PARTIAL is not, take any all others actions for an
3133 argument partially in registers, but do not actually load any
3134 registers.
bbf6f052
RK
3135
3136 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3137 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3138
3139 On a machine that lacks real push insns, ARGS_ADDR is the address of
3140 the bottom of the argument block for this call. We use indexing off there
3141 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3142 argument block has not been preallocated.
3143
e5e809f4
JL
3144 ARGS_SO_FAR is the size of args previously pushed for this call.
3145
3146 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3147 for arguments passed in registers. If nonzero, it will be the number
3148 of bytes required. */
bbf6f052
RK
3149
3150void
3151emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
3152 args_addr, args_so_far, reg_parm_stack_space,
3153 alignment_pad)
bbf6f052
RK
3154 register rtx x;
3155 enum machine_mode mode;
3156 tree type;
3157 rtx size;
729a2125 3158 unsigned int align;
bbf6f052
RK
3159 int partial;
3160 rtx reg;
3161 int extra;
3162 rtx args_addr;
3163 rtx args_so_far;
e5e809f4 3164 int reg_parm_stack_space;
4fc026cd 3165 rtx alignment_pad;
bbf6f052
RK
3166{
3167 rtx xinner;
3168 enum direction stack_direction
3169#ifdef STACK_GROWS_DOWNWARD
3170 = downward;
3171#else
3172 = upward;
3173#endif
3174
3175 /* Decide where to pad the argument: `downward' for below,
3176 `upward' for above, or `none' for don't pad it.
3177 Default is below for small data on big-endian machines; else above. */
3178 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3179
3180 /* Invert direction if stack is post-update. */
3181 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3182 if (where_pad != none)
3183 where_pad = (where_pad == downward ? upward : downward);
3184
3185 xinner = x = protect_from_queue (x, 0);
3186
3187 if (mode == BLKmode)
3188 {
3189 /* Copy a block into the stack, entirely or partially. */
3190
3191 register rtx temp;
3192 int used = partial * UNITS_PER_WORD;
3193 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3194 int skip;
3a94c984 3195
bbf6f052
RK
3196 if (size == 0)
3197 abort ();
3198
3199 used -= offset;
3200
3201 /* USED is now the # of bytes we need not copy to the stack
3202 because registers will take care of them. */
3203
3204 if (partial != 0)
3205 xinner = change_address (xinner, BLKmode,
3206 plus_constant (XEXP (xinner, 0), used));
3207
3208 /* If the partial register-part of the arg counts in its stack size,
3209 skip the part of stack space corresponding to the registers.
3210 Otherwise, start copying to the beginning of the stack space,
3211 by setting SKIP to 0. */
e5e809f4 3212 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3213
3214#ifdef PUSH_ROUNDING
3215 /* Do it with several push insns if that doesn't take lots of insns
3216 and if there is no difficulty with push insns that skip bytes
3217 on the stack for alignment purposes. */
3218 if (args_addr == 0
f73ad30e 3219 && PUSH_ARGS
bbf6f052
RK
3220 && GET_CODE (size) == CONST_INT
3221 && skip == 0
15914757 3222 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3223 /* Here we avoid the case of a structure whose weak alignment
3224 forces many pushes of a small amount of data,
3225 and such small pushes do rounding that causes trouble. */
e1565e65 3226 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3227 || align >= BIGGEST_ALIGNMENT
bbf6f052 3228 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
3229 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3230 {
3231 /* Push padding now if padding above and stack grows down,
3232 or if padding below and stack grows up.
3233 But if space already allocated, this has already been done. */
3234 if (extra && args_addr == 0
3235 && where_pad != none && where_pad != stack_direction)
906c4e36 3236 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3237
1503a7ec 3238 stack_pointer_delta += INTVAL (size) - used;
38a448ca 3239 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 3240 INTVAL (size) - used, align);
921b3427 3241
7d384cc0 3242 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3243 {
3244 rtx temp;
3a94c984 3245
956d6950 3246 in_check_memory_usage = 1;
3a94c984 3247 temp = get_push_address (INTVAL (size) - used);
c85f7c16 3248 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3249 emit_library_call (chkr_copy_bitmap_libfunc,
3250 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3251 Pmode, XEXP (xinner, 0), Pmode,
3a94c984 3252 GEN_INT (INTVAL (size) - used),
921b3427
RK
3253 TYPE_MODE (sizetype));
3254 else
ebb1b59a
BS
3255 emit_library_call (chkr_set_right_libfunc,
3256 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3257 Pmode, GEN_INT (INTVAL (size) - used),
921b3427 3258 TYPE_MODE (sizetype),
956d6950
JL
3259 GEN_INT (MEMORY_USE_RW),
3260 TYPE_MODE (integer_type_node));
3261 in_check_memory_usage = 0;
921b3427 3262 }
bbf6f052
RK
3263 }
3264 else
3a94c984 3265#endif /* PUSH_ROUNDING */
bbf6f052 3266 {
7ab923cc
JJ
3267 rtx target;
3268
bbf6f052
RK
3269 /* Otherwise make space on the stack and copy the data
3270 to the address of that space. */
3271
3272 /* Deduct words put into registers from the size we must copy. */
3273 if (partial != 0)
3274 {
3275 if (GET_CODE (size) == CONST_INT)
906c4e36 3276 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3277 else
3278 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3279 GEN_INT (used), NULL_RTX, 0,
3280 OPTAB_LIB_WIDEN);
bbf6f052
RK
3281 }
3282
3283 /* Get the address of the stack space.
3284 In this case, we do not deal with EXTRA separately.
3285 A single stack adjust will do. */
3286 if (! args_addr)
3287 {
3288 temp = push_block (size, extra, where_pad == downward);
3289 extra = 0;
3290 }
3291 else if (GET_CODE (args_so_far) == CONST_INT)
3292 temp = memory_address (BLKmode,
3293 plus_constant (args_addr,
3294 skip + INTVAL (args_so_far)));
3295 else
3296 temp = memory_address (BLKmode,
38a448ca
RH
3297 plus_constant (gen_rtx_PLUS (Pmode,
3298 args_addr,
3299 args_so_far),
bbf6f052 3300 skip));
7d384cc0 3301 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3302 {
956d6950 3303 in_check_memory_usage = 1;
921b3427 3304 target = copy_to_reg (temp);
c85f7c16 3305 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3306 emit_library_call (chkr_copy_bitmap_libfunc,
3307 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed
MK
3308 target, Pmode,
3309 XEXP (xinner, 0), Pmode,
921b3427
RK
3310 size, TYPE_MODE (sizetype));
3311 else
ebb1b59a
BS
3312 emit_library_call (chkr_set_right_libfunc,
3313 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 3314 target, Pmode,
921b3427 3315 size, TYPE_MODE (sizetype),
956d6950
JL
3316 GEN_INT (MEMORY_USE_RW),
3317 TYPE_MODE (integer_type_node));
3318 in_check_memory_usage = 0;
921b3427 3319 }
bbf6f052 3320
3a94c984 3321 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3322
3a94c984
KH
3323 if (type != 0)
3324 {
3325 set_mem_attributes (target, type, 1);
3326 /* Function incoming arguments may overlap with sibling call
3327 outgoing arguments and we cannot allow reordering of reads
3328 from function arguments with stores to outgoing arguments
3329 of sibling calls. */
3330 MEM_ALIAS_SET (target) = 0;
3331 }
7ab923cc 3332
bbf6f052
RK
3333 /* TEMP is the address of the block. Copy the data there. */
3334 if (GET_CODE (size) == CONST_INT
729a2125 3335 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3336 {
7ab923cc 3337 move_by_pieces (target, xinner, INTVAL (size), align);
bbf6f052
RK
3338 goto ret;
3339 }
e5e809f4 3340 else
bbf6f052 3341 {
19caa751 3342 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
e5e809f4 3343 enum machine_mode mode;
3bdf5ad1 3344
e5e809f4
JL
3345 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3346 mode != VOIDmode;
3347 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3348 {
e5e809f4 3349 enum insn_code code = movstr_optab[(int) mode];
a995e389 3350 insn_operand_predicate_fn pred;
e5e809f4
JL
3351
3352 if (code != CODE_FOR_nothing
3353 && ((GET_CODE (size) == CONST_INT
3354 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3355 <= (GET_MODE_MASK (mode) >> 1)))
3356 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3357 && (!(pred = insn_data[(int) code].operand[0].predicate)
3358 || ((*pred) (target, BLKmode)))
3359 && (!(pred = insn_data[(int) code].operand[1].predicate)
3360 || ((*pred) (xinner, BLKmode)))
3361 && (!(pred = insn_data[(int) code].operand[3].predicate)
3362 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3363 {
3364 rtx op2 = convert_to_mode (mode, size, 1);
3365 rtx last = get_last_insn ();
3366 rtx pat;
3367
a995e389
RH
3368 pred = insn_data[(int) code].operand[2].predicate;
3369 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3370 op2 = copy_to_mode_reg (mode, op2);
3371
3372 pat = GEN_FCN ((int) code) (target, xinner,
3373 op2, opalign);
3374 if (pat)
3375 {
3376 emit_insn (pat);
3377 goto ret;
3378 }
3379 else
3380 delete_insns_since (last);
3381 }
c841050e 3382 }
bbf6f052 3383 }
bbf6f052 3384
f73ad30e
JH
3385 if (!ACCUMULATE_OUTGOING_ARGS)
3386 {
3387 /* If the source is referenced relative to the stack pointer,
3388 copy it to another register to stabilize it. We do not need
3389 to do this if we know that we won't be changing sp. */
bbf6f052 3390
f73ad30e
JH
3391 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3392 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3393 temp = copy_to_reg (temp);
3394 }
bbf6f052
RK
3395
3396 /* Make inhibit_defer_pop nonzero around the library call
3397 to force it to pop the bcopy-arguments right away. */
3398 NO_DEFER_POP;
3399#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3400 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052 3401 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3402 convert_to_mode (TYPE_MODE (sizetype),
3403 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3404 TYPE_MODE (sizetype));
bbf6f052 3405#else
ebb1b59a 3406 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052 3407 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3408 convert_to_mode (TYPE_MODE (integer_type_node),
3409 size,
3410 TREE_UNSIGNED (integer_type_node)),
3411 TYPE_MODE (integer_type_node));
bbf6f052
RK
3412#endif
3413 OK_DEFER_POP;
3414 }
3415 }
3416 else if (partial > 0)
3417 {
3418 /* Scalar partly in registers. */
3419
3420 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3421 int i;
3422 int not_stack;
3423 /* # words of start of argument
3424 that we must make space for but need not store. */
3425 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3426 int args_offset = INTVAL (args_so_far);
3427 int skip;
3428
3429 /* Push padding now if padding above and stack grows down,
3430 or if padding below and stack grows up.
3431 But if space already allocated, this has already been done. */
3432 if (extra && args_addr == 0
3433 && where_pad != none && where_pad != stack_direction)
906c4e36 3434 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3435
3436 /* If we make space by pushing it, we might as well push
3437 the real data. Otherwise, we can leave OFFSET nonzero
3438 and leave the space uninitialized. */
3439 if (args_addr == 0)
3440 offset = 0;
3441
3442 /* Now NOT_STACK gets the number of words that we don't need to
3443 allocate on the stack. */
3444 not_stack = partial - offset;
3445
3446 /* If the partial register-part of the arg counts in its stack size,
3447 skip the part of stack space corresponding to the registers.
3448 Otherwise, start copying to the beginning of the stack space,
3449 by setting SKIP to 0. */
e5e809f4 3450 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3451
3452 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3453 x = validize_mem (force_const_mem (mode, x));
3454
3455 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3456 SUBREGs of such registers are not allowed. */
3457 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3458 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3459 x = copy_to_reg (x);
3460
3461 /* Loop over all the words allocated on the stack for this arg. */
3462 /* We can do it by words, because any scalar bigger than a word
3463 has a size a multiple of a word. */
3464#ifndef PUSH_ARGS_REVERSED
3465 for (i = not_stack; i < size; i++)
3466#else
3467 for (i = size - 1; i >= not_stack; i--)
3468#endif
3469 if (i >= not_stack + offset)
3470 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3471 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3472 0, args_addr,
3473 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3474 * UNITS_PER_WORD)),
4fc026cd 3475 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3476 }
3477 else
3478 {
3479 rtx addr;
921b3427 3480 rtx target = NULL_RTX;
3bdf5ad1 3481 rtx dest;
bbf6f052
RK
3482
3483 /* Push padding now if padding above and stack grows down,
3484 or if padding below and stack grows up.
3485 But if space already allocated, this has already been done. */
3486 if (extra && args_addr == 0
3487 && where_pad != none && where_pad != stack_direction)
906c4e36 3488 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3489
3490#ifdef PUSH_ROUNDING
f73ad30e 3491 if (args_addr == 0 && PUSH_ARGS)
1503a7ec
JH
3492 {
3493 addr = gen_push_operand ();
3494 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3495 }
bbf6f052
RK
3496 else
3497#endif
921b3427
RK
3498 {
3499 if (GET_CODE (args_so_far) == CONST_INT)
3500 addr
3501 = memory_address (mode,
3a94c984 3502 plus_constant (args_addr,
921b3427 3503 INTVAL (args_so_far)));
3a94c984 3504 else
38a448ca
RH
3505 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3506 args_so_far));
921b3427
RK
3507 target = addr;
3508 }
bbf6f052 3509
3bdf5ad1
RK
3510 dest = gen_rtx_MEM (mode, addr);
3511 if (type != 0)
7ab923cc
JJ
3512 {
3513 set_mem_attributes (dest, type, 1);
3514 /* Function incoming arguments may overlap with sibling call
3515 outgoing arguments and we cannot allow reordering of reads
3516 from function arguments with stores to outgoing arguments
3517 of sibling calls. */
3518 MEM_ALIAS_SET (dest) = 0;
3519 }
3bdf5ad1
RK
3520
3521 emit_move_insn (dest, x);
921b3427 3522
7d384cc0 3523 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3524 {
956d6950 3525 in_check_memory_usage = 1;
921b3427
RK
3526 if (target == 0)
3527 target = get_push_address (GET_MODE_SIZE (mode));
3528
c85f7c16 3529 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3530 emit_library_call (chkr_copy_bitmap_libfunc,
3531 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3532 Pmode, XEXP (x, 0), Pmode,
921b3427
RK
3533 GEN_INT (GET_MODE_SIZE (mode)),
3534 TYPE_MODE (sizetype));
3535 else
ebb1b59a
BS
3536 emit_library_call (chkr_set_right_libfunc,
3537 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3538 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
921b3427 3539 TYPE_MODE (sizetype),
956d6950
JL
3540 GEN_INT (MEMORY_USE_RW),
3541 TYPE_MODE (integer_type_node));
3542 in_check_memory_usage = 0;
921b3427 3543 }
bbf6f052
RK
3544 }
3545
3546 ret:
3547 /* If part should go in registers, copy that part
3548 into the appropriate registers. Do this now, at the end,
3549 since mem-to-mem copies above may do function calls. */
cd048831 3550 if (partial > 0 && reg != 0)
fffa9c1d
JW
3551 {
3552 /* Handle calls that pass values in multiple non-contiguous locations.
3553 The Irix 6 ABI has examples of this. */
3554 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3555 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3556 else
3557 move_block_to_reg (REGNO (reg), x, partial, mode);
3558 }
bbf6f052
RK
3559
3560 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3561 anti_adjust_stack (GEN_INT (extra));
3a94c984 3562
3ea2292a 3563 if (alignment_pad && args_addr == 0)
4fc026cd 3564 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3565}
3566\f
296b4ed9
RK
3567/* Return X if X can be used as a subtarget in a sequence of arithmetic
3568 operations. */
3569
3570static rtx
3571get_subtarget (x)
3572 rtx x;
3573{
3574 return ((x == 0
3575 /* Only registers can be subtargets. */
3576 || GET_CODE (x) != REG
3577 /* If the register is readonly, it can't be set more than once. */
3578 || RTX_UNCHANGING_P (x)
3579 /* Don't use hard regs to avoid extending their life. */
3580 || REGNO (x) < FIRST_PSEUDO_REGISTER
3581 /* Avoid subtargets inside loops,
3582 since they hide some invariant expressions. */
3583 || preserve_subexpressions_p ())
3584 ? 0 : x);
3585}
3586
bbf6f052
RK
3587/* Expand an assignment that stores the value of FROM into TO.
3588 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3589 (This may contain a QUEUED rtx;
3590 if the value is constant, this rtx is a constant.)
3591 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3592
3593 SUGGEST_REG is no longer actually used.
3594 It used to mean, copy the value through a register
3595 and return that register, if that is possible.
709f5be1 3596 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3597
3598rtx
3599expand_assignment (to, from, want_value, suggest_reg)
3600 tree to, from;
3601 int want_value;
c5c76735 3602 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3603{
3604 register rtx to_rtx = 0;
3605 rtx result;
3606
3607 /* Don't crash if the lhs of the assignment was erroneous. */
3608
3609 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3610 {
3611 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3612 return want_value ? result : NULL_RTX;
3613 }
bbf6f052
RK
3614
3615 /* Assignment of a structure component needs special treatment
3616 if the structure component's rtx is not simply a MEM.
6be58303
JW
3617 Assignment of an array element at a constant index, and assignment of
3618 an array element in an unaligned packed structure field, has the same
3619 problem. */
bbf6f052 3620
08293add
RK
3621 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3622 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3623 {
3624 enum machine_mode mode1;
770ae6cc 3625 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3626 tree offset;
bbf6f052
RK
3627 int unsignedp;
3628 int volatilep = 0;
0088fcb1 3629 tree tem;
729a2125 3630 unsigned int alignment;
0088fcb1
RK
3631
3632 push_temp_slots ();
839c4796
RK
3633 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3634 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3635
3636 /* If we are going to use store_bit_field and extract_bit_field,
3637 make sure to_rtx will be safe for multiple use. */
3638
3639 if (mode1 == VOIDmode && want_value)
3640 tem = stabilize_reference (tem);
3641
921b3427 3642 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3643 if (offset != 0)
3644 {
906c4e36 3645 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3646
3647 if (GET_CODE (to_rtx) != MEM)
3648 abort ();
bd070e1a
RH
3649
3650 if (GET_MODE (offset_rtx) != ptr_mode)
3651 {
3652#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3653 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3654#else
3655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3656#endif
3657 }
3658
9a7b9f4f
JL
3659 /* A constant address in TO_RTX can have VOIDmode, we must not try
3660 to call force_reg for that case. Avoid that case. */
89752202
HB
3661 if (GET_CODE (to_rtx) == MEM
3662 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3663 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202 3664 && bitsize
3a94c984 3665 && (bitpos % bitsize) == 0
89752202 3666 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 3667 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
3668 {
3669 rtx temp = change_address (to_rtx, mode1,
3670 plus_constant (XEXP (to_rtx, 0),
3671 (bitpos /
3672 BITS_PER_UNIT)));
3673 if (GET_CODE (XEXP (temp, 0)) == REG)
3674 to_rtx = temp;
3675 else
3676 to_rtx = change_address (to_rtx, mode1,
3677 force_reg (GET_MODE (XEXP (temp, 0)),
3678 XEXP (temp, 0)));
3679 bitpos = 0;
3680 }
3681
7bb0943f 3682 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3683 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3684 force_reg (ptr_mode,
3685 offset_rtx)));
7bb0943f 3686 }
c5c76735 3687
bbf6f052
RK
3688 if (volatilep)
3689 {
3690 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3691 {
3692 /* When the offset is zero, to_rtx is the address of the
3693 structure we are storing into, and hence may be shared.
3694 We must make a new MEM before setting the volatile bit. */
3695 if (offset == 0)
effbcc6a
RK
3696 to_rtx = copy_rtx (to_rtx);
3697
01188446
JW
3698 MEM_VOLATILE_P (to_rtx) = 1;
3699 }
bbf6f052
RK
3700#if 0 /* This was turned off because, when a field is volatile
3701 in an object which is not volatile, the object may be in a register,
3702 and then we would abort over here. */
3703 else
3704 abort ();
3705#endif
3706 }
3707
956d6950
JL
3708 if (TREE_CODE (to) == COMPONENT_REF
3709 && TREE_READONLY (TREE_OPERAND (to, 1)))
3710 {
8bd6ecc2 3711 if (offset == 0)
956d6950
JL
3712 to_rtx = copy_rtx (to_rtx);
3713
3714 RTX_UNCHANGING_P (to_rtx) = 1;
3715 }
3716
921b3427 3717 /* Check the access. */
7d384cc0 3718 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3719 {
3720 rtx to_addr;
3721 int size;
3722 int best_mode_size;
3723 enum machine_mode best_mode;
3724
3725 best_mode = get_best_mode (bitsize, bitpos,
3726 TYPE_ALIGN (TREE_TYPE (tem)),
3727 mode1, volatilep);
3728 if (best_mode == VOIDmode)
3729 best_mode = QImode;
3730
3731 best_mode_size = GET_MODE_BITSIZE (best_mode);
3732 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3733 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3734 size *= GET_MODE_SIZE (best_mode);
3735
3736 /* Check the access right of the pointer. */
ea4da9db 3737 in_check_memory_usage = 1;
e9a25f70 3738 if (size)
ebb1b59a
BS
3739 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3740 VOIDmode, 3, to_addr, Pmode,
e9a25f70 3741 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3742 GEN_INT (MEMORY_USE_WO),
3743 TYPE_MODE (integer_type_node));
ea4da9db 3744 in_check_memory_usage = 0;
921b3427
RK
3745 }
3746
a69beca1
RK
3747 /* If this is a varying-length object, we must get the address of
3748 the source and do an explicit block move. */
3749 if (bitsize < 0)
3750 {
3751 unsigned int from_align;
3752 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3753 rtx inner_to_rtx
3754 = change_address (to_rtx, VOIDmode,
3755 plus_constant (XEXP (to_rtx, 0),
3756 bitpos / BITS_PER_UNIT));
3757
3758 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
19caa751 3759 MIN (alignment, from_align));
a69beca1
RK
3760 free_temp_slots ();
3761 pop_temp_slots ();
3762 return to_rtx;
3763 }
3764 else
3765 {
3766 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3767 (want_value
3768 /* Spurious cast for HPUX compiler. */
3769 ? ((enum machine_mode)
3770 TYPE_MODE (TREE_TYPE (to)))
3771 : VOIDmode),
3772 unsignedp,
a69beca1
RK
3773 alignment,
3774 int_size_in_bytes (TREE_TYPE (tem)),
3775 get_alias_set (to));
3776
3777 preserve_temp_slots (result);
3778 free_temp_slots ();
3779 pop_temp_slots ();
3780
3781 /* If the value is meaningful, convert RESULT to the proper mode.
3782 Otherwise, return nothing. */
3783 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3784 TYPE_MODE (TREE_TYPE (from)),
3785 result,
3786 TREE_UNSIGNED (TREE_TYPE (to)))
3787 : NULL_RTX);
3788 }
bbf6f052
RK
3789 }
3790
cd1db108
RS
3791 /* If the rhs is a function call and its value is not an aggregate,
3792 call the function before we start to compute the lhs.
3793 This is needed for correct code for cases such as
3794 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3795 requires loading up part of an address in a separate insn.
3796
1858863b
JW
3797 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3798 since it might be a promoted variable where the zero- or sign- extension
3799 needs to be done. Handling this in the normal way is safe because no
3800 computation is done before the call. */
1ad87b63 3801 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3802 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3803 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3804 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3805 {
0088fcb1
RK
3806 rtx value;
3807
3808 push_temp_slots ();
3809 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3810 if (to_rtx == 0)
921b3427 3811 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3812
fffa9c1d
JW
3813 /* Handle calls that return values in multiple non-contiguous locations.
3814 The Irix 6 ABI has examples of this. */
3815 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16 3816 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3817 TYPE_ALIGN (TREE_TYPE (from)));
fffa9c1d 3818 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3819 emit_block_move (to_rtx, value, expr_size (from),
19caa751 3820 TYPE_ALIGN (TREE_TYPE (from)));
aaf87c45 3821 else
6419e5b0
DT
3822 {
3823#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3824 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3825 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3826 value = convert_memory_address (GET_MODE (to_rtx), value);
3827#endif
3828 emit_move_insn (to_rtx, value);
3829 }
cd1db108
RS
3830 preserve_temp_slots (to_rtx);
3831 free_temp_slots ();
0088fcb1 3832 pop_temp_slots ();
709f5be1 3833 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3834 }
3835
bbf6f052
RK
3836 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3837 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3838
3839 if (to_rtx == 0)
41472af8
MM
3840 {
3841 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3842 if (GET_CODE (to_rtx) == MEM)
3843 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3844 }
bbf6f052 3845
86d38d25 3846 /* Don't move directly into a return register. */
14a774a9
RK
3847 if (TREE_CODE (to) == RESULT_DECL
3848 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3849 {
0088fcb1
RK
3850 rtx temp;
3851
3852 push_temp_slots ();
3853 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3854
3855 if (GET_CODE (to_rtx) == PARALLEL)
3856 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3857 TYPE_ALIGN (TREE_TYPE (from)));
14a774a9
RK
3858 else
3859 emit_move_insn (to_rtx, temp);
3860
86d38d25
RS
3861 preserve_temp_slots (to_rtx);
3862 free_temp_slots ();
0088fcb1 3863 pop_temp_slots ();
709f5be1 3864 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3865 }
3866
bbf6f052
RK
3867 /* In case we are returning the contents of an object which overlaps
3868 the place the value is being stored, use a safe function when copying
3869 a value through a pointer into a structure value return block. */
3870 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3871 && current_function_returns_struct
3872 && !current_function_returns_pcc_struct)
3873 {
0088fcb1
RK
3874 rtx from_rtx, size;
3875
3876 push_temp_slots ();
33a20d10 3877 size = expr_size (from);
921b3427
RK
3878 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3879 EXPAND_MEMORY_USE_DONT);
3880
3881 /* Copy the rights of the bitmap. */
7d384cc0 3882 if (current_function_check_memory_usage)
ebb1b59a
BS
3883 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3884 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
6a9c4aed 3885 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3886 convert_to_mode (TYPE_MODE (sizetype),
3887 size, TREE_UNSIGNED (sizetype)),
3888 TYPE_MODE (sizetype));
bbf6f052
RK
3889
3890#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3891 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052
RK
3892 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3893 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3894 convert_to_mode (TYPE_MODE (sizetype),
3895 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3896 TYPE_MODE (sizetype));
bbf6f052 3897#else
ebb1b59a 3898 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052
RK
3899 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3900 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3901 convert_to_mode (TYPE_MODE (integer_type_node),
3902 size, TREE_UNSIGNED (integer_type_node)),
3903 TYPE_MODE (integer_type_node));
bbf6f052
RK
3904#endif
3905
3906 preserve_temp_slots (to_rtx);
3907 free_temp_slots ();
0088fcb1 3908 pop_temp_slots ();
709f5be1 3909 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3910 }
3911
3912 /* Compute FROM and store the value in the rtx we got. */
3913
0088fcb1 3914 push_temp_slots ();
bbf6f052
RK
3915 result = store_expr (from, to_rtx, want_value);
3916 preserve_temp_slots (result);
3917 free_temp_slots ();
0088fcb1 3918 pop_temp_slots ();
709f5be1 3919 return want_value ? result : NULL_RTX;
bbf6f052
RK
3920}
3921
3922/* Generate code for computing expression EXP,
3923 and storing the value into TARGET.
bbf6f052
RK
3924 TARGET may contain a QUEUED rtx.
3925
709f5be1
RS
3926 If WANT_VALUE is nonzero, return a copy of the value
3927 not in TARGET, so that we can be sure to use the proper
3928 value in a containing expression even if TARGET has something
3929 else stored in it. If possible, we copy the value through a pseudo
3930 and return that pseudo. Or, if the value is constant, we try to
3931 return the constant. In some cases, we return a pseudo
3932 copied *from* TARGET.
3933
3934 If the mode is BLKmode then we may return TARGET itself.
3935 It turns out that in BLKmode it doesn't cause a problem.
3936 because C has no operators that could combine two different
3937 assignments into the same BLKmode object with different values
3938 with no sequence point. Will other languages need this to
3939 be more thorough?
3940
3941 If WANT_VALUE is 0, we return NULL, to make sure
3942 to catch quickly any cases where the caller uses the value
3943 and fails to set WANT_VALUE. */
bbf6f052
RK
3944
3945rtx
709f5be1 3946store_expr (exp, target, want_value)
bbf6f052
RK
3947 register tree exp;
3948 register rtx target;
709f5be1 3949 int want_value;
bbf6f052
RK
3950{
3951 register rtx temp;
3952 int dont_return_target = 0;
e5408e52 3953 int dont_store_target = 0;
bbf6f052
RK
3954
3955 if (TREE_CODE (exp) == COMPOUND_EXPR)
3956 {
3957 /* Perform first part of compound expression, then assign from second
3958 part. */
3959 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3960 emit_queue ();
709f5be1 3961 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3962 }
3963 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3964 {
3965 /* For conditional expression, get safe form of the target. Then
3966 test the condition, doing the appropriate assignment on either
3967 side. This avoids the creation of unnecessary temporaries.
3968 For non-BLKmode, it is more efficient not to do this. */
3969
3970 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3971
3972 emit_queue ();
3973 target = protect_from_queue (target, 1);
3974
dabf8373 3975 do_pending_stack_adjust ();
bbf6f052
RK
3976 NO_DEFER_POP;
3977 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3978 start_cleanup_deferral ();
709f5be1 3979 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3980 end_cleanup_deferral ();
bbf6f052
RK
3981 emit_queue ();
3982 emit_jump_insn (gen_jump (lab2));
3983 emit_barrier ();
3984 emit_label (lab1);
956d6950 3985 start_cleanup_deferral ();
709f5be1 3986 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3987 end_cleanup_deferral ();
bbf6f052
RK
3988 emit_queue ();
3989 emit_label (lab2);
3990 OK_DEFER_POP;
a3a58acc 3991
709f5be1 3992 return want_value ? target : NULL_RTX;
bbf6f052 3993 }
bbf6f052 3994 else if (queued_subexp_p (target))
709f5be1
RS
3995 /* If target contains a postincrement, let's not risk
3996 using it as the place to generate the rhs. */
bbf6f052
RK
3997 {
3998 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3999 {
4000 /* Expand EXP into a new pseudo. */
4001 temp = gen_reg_rtx (GET_MODE (target));
4002 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4003 }
4004 else
906c4e36 4005 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
4006
4007 /* If target is volatile, ANSI requires accessing the value
4008 *from* the target, if it is accessed. So make that happen.
4009 In no case return the target itself. */
4010 if (! MEM_VOLATILE_P (target) && want_value)
4011 dont_return_target = 1;
bbf6f052 4012 }
12f06d17
CH
4013 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4014 && GET_MODE (target) != BLKmode)
4015 /* If target is in memory and caller wants value in a register instead,
4016 arrange that. Pass TARGET as target for expand_expr so that,
4017 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4018 We know expand_expr will not use the target in that case.
4019 Don't do this if TARGET is volatile because we are supposed
4020 to write it and then read it. */
4021 {
1da93fe0 4022 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17 4023 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4024 {
4025 /* If TEMP is already in the desired TARGET, only copy it from
4026 memory and don't store it there again. */
4027 if (temp == target
4028 || (rtx_equal_p (temp, target)
4029 && ! side_effects_p (temp) && ! side_effects_p (target)))
4030 dont_store_target = 1;
4031 temp = copy_to_reg (temp);
4032 }
12f06d17
CH
4033 dont_return_target = 1;
4034 }
1499e0a8
RK
4035 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4036 /* If this is an scalar in a register that is stored in a wider mode
4037 than the declared mode, compute the result into its declared mode
4038 and then convert to the wider mode. Our value is the computed
4039 expression. */
4040 {
5a32d038 4041 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4042 which will often result in some optimizations. Do the conversion
4043 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4044 the extend. But don't do this if the type of EXP is a subtype
4045 of something else since then the conversion might involve
4046 more than just converting modes. */
4047 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4048 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4049 {
4050 if (TREE_UNSIGNED (TREE_TYPE (exp))
4051 != SUBREG_PROMOTED_UNSIGNED_P (target))
4052 exp
4053 = convert
4054 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4055 TREE_TYPE (exp)),
4056 exp);
4057
4058 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4059 SUBREG_PROMOTED_UNSIGNED_P (target)),
4060 exp);
4061 }
3a94c984 4062
1499e0a8 4063 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 4064
766f36c7 4065 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
4066 the access now so it gets done only once. Likewise if
4067 it contains TARGET. */
4068 if (GET_CODE (temp) == MEM && want_value
4069 && (MEM_VOLATILE_P (temp)
4070 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
4071 temp = copy_to_reg (temp);
4072
b258707c
RS
4073 /* If TEMP is a VOIDmode constant, use convert_modes to make
4074 sure that we properly convert it. */
4075 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4076 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4077 TYPE_MODE (TREE_TYPE (exp)), temp,
4078 SUBREG_PROMOTED_UNSIGNED_P (target));
4079
1499e0a8
RK
4080 convert_move (SUBREG_REG (target), temp,
4081 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4082
4083 /* If we promoted a constant, change the mode back down to match
4084 target. Otherwise, the caller might get confused by a result whose
4085 mode is larger than expected. */
4086
4087 if (want_value && GET_MODE (temp) != GET_MODE (target)
4088 && GET_MODE (temp) != VOIDmode)
4089 {
4090 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4091 SUBREG_PROMOTED_VAR_P (temp) = 1;
4092 SUBREG_PROMOTED_UNSIGNED_P (temp)
4093 = SUBREG_PROMOTED_UNSIGNED_P (target);
4094 }
4095
709f5be1 4096 return want_value ? temp : NULL_RTX;
1499e0a8 4097 }
bbf6f052
RK
4098 else
4099 {
4100 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 4101 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4102 If TARGET is a volatile mem ref, either return TARGET
4103 or return a reg copied *from* TARGET; ANSI requires this.
4104
4105 Otherwise, if TEMP is not TARGET, return TEMP
4106 if it is constant (for efficiency),
4107 or if we really want the correct value. */
bbf6f052
RK
4108 if (!(target && GET_CODE (target) == REG
4109 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4110 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4111 && ! rtx_equal_p (temp, target)
709f5be1 4112 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
4113 dont_return_target = 1;
4114 }
4115
b258707c
RS
4116 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4117 the same as that of TARGET, adjust the constant. This is needed, for
4118 example, in case it is a CONST_DOUBLE and we want only a word-sized
4119 value. */
4120 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4121 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4122 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4123 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4124 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4125
7d384cc0 4126 if (current_function_check_memory_usage
921b3427
RK
4127 && GET_CODE (target) == MEM
4128 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4129 {
ea4da9db 4130 in_check_memory_usage = 1;
921b3427 4131 if (GET_CODE (temp) == MEM)
ebb1b59a
BS
4132 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4133 VOIDmode, 3, XEXP (target, 0), Pmode,
6a9c4aed 4134 XEXP (temp, 0), Pmode,
921b3427
RK
4135 expr_size (exp), TYPE_MODE (sizetype));
4136 else
ebb1b59a
BS
4137 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4138 VOIDmode, 3, XEXP (target, 0), Pmode,
921b3427 4139 expr_size (exp), TYPE_MODE (sizetype),
3a94c984 4140 GEN_INT (MEMORY_USE_WO),
956d6950 4141 TYPE_MODE (integer_type_node));
ea4da9db 4142 in_check_memory_usage = 0;
921b3427
RK
4143 }
4144
bbf6f052
RK
4145 /* If value was not generated in the target, store it there.
4146 Convert the value to TARGET's type first if nec. */
f3f2255a
R
4147 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4148 one or both of them are volatile memory refs, we have to distinguish
4149 two cases:
4150 - expand_expr has used TARGET. In this case, we must not generate
4151 another copy. This can be detected by TARGET being equal according
4152 to == .
4153 - expand_expr has not used TARGET - that means that the source just
4154 happens to have the same RTX form. Since temp will have been created
4155 by expand_expr, it will compare unequal according to == .
4156 We must generate a copy in this case, to reach the correct number
4157 of volatile memory references. */
bbf6f052 4158
6036acbb 4159 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4160 || (temp != target && (side_effects_p (temp)
4161 || side_effects_p (target))))
e5408e52
JJ
4162 && TREE_CODE (exp) != ERROR_MARK
4163 && ! dont_store_target)
bbf6f052
RK
4164 {
4165 target = protect_from_queue (target, 1);
4166 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4167 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4168 {
4169 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4170 if (dont_return_target)
4171 {
4172 /* In this case, we will return TEMP,
4173 so make sure it has the proper mode.
4174 But don't forget to store the value into TARGET. */
4175 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4176 emit_move_insn (target, temp);
4177 }
4178 else
4179 convert_move (target, temp, unsignedp);
4180 }
4181
4182 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4183 {
4184 /* Handle copying a string constant into an array.
4185 The string constant may be shorter than the array.
4186 So copy just the string's actual length, and clear the rest. */
4187 rtx size;
22619c3f 4188 rtx addr;
bbf6f052 4189
e87b4f3f
RS
4190 /* Get the size of the data type of the string,
4191 which is actually the size of the target. */
4192 size = expr_size (exp);
4193 if (GET_CODE (size) == CONST_INT
4194 && INTVAL (size) < TREE_STRING_LENGTH (exp))
19caa751 4195 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4196 else
bbf6f052 4197 {
e87b4f3f
RS
4198 /* Compute the size of the data to copy from the string. */
4199 tree copy_size
c03b7665 4200 = size_binop (MIN_EXPR,
b50d17a1 4201 make_tree (sizetype, size),
fed3cef0 4202 size_int (TREE_STRING_LENGTH (exp)));
f9e158c3 4203 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
906c4e36
RK
4204 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4205 VOIDmode, 0);
e87b4f3f
RS
4206 rtx label = 0;
4207
4208 /* Copy that much. */
4209 emit_block_move (target, temp, copy_size_rtx,
19caa751 4210 TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4211
88f63c77
RK
4212 /* Figure out how much is left in TARGET that we have to clear.
4213 Do all calculations in ptr_mode. */
4214
4215 addr = XEXP (target, 0);
4216 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4217
e87b4f3f
RS
4218 if (GET_CODE (copy_size_rtx) == CONST_INT)
4219 {
88f63c77 4220 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3a94c984 4221 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
8752c357
AJ
4222 align = MIN (align,
4223 (unsigned int) (BITS_PER_UNIT
4224 * (INTVAL (copy_size_rtx)
4225 & - INTVAL (copy_size_rtx))));
e87b4f3f
RS
4226 }
4227 else
4228 {
88f63c77
RK
4229 addr = force_reg (ptr_mode, addr);
4230 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
4231 copy_size_rtx, NULL_RTX, 0,
4232 OPTAB_LIB_WIDEN);
e87b4f3f 4233
88f63c77 4234 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
4235 copy_size_rtx, NULL_RTX, 0,
4236 OPTAB_LIB_WIDEN);
e87b4f3f 4237
2a5b96fd 4238 align = BITS_PER_UNIT;
e87b4f3f 4239 label = gen_label_rtx ();
c5d5d461
JL
4240 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4241 GET_MODE (size), 0, 0, label);
e87b4f3f 4242 }
2a5b96fd 4243 align = MIN (align, expr_align (copy_size));
e87b4f3f
RS
4244
4245 if (size != const0_rtx)
4246 {
3bdf5ad1
RK
4247 rtx dest = gen_rtx_MEM (BLKmode, addr);
4248
4249 MEM_COPY_ATTRIBUTES (dest, target);
4250
921b3427 4251 /* Be sure we can write on ADDR. */
ea4da9db 4252 in_check_memory_usage = 1;
7d384cc0 4253 if (current_function_check_memory_usage)
ebb1b59a
BS
4254 emit_library_call (chkr_check_addr_libfunc,
4255 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 4256 addr, Pmode,
921b3427 4257 size, TYPE_MODE (sizetype),
3a94c984 4258 GEN_INT (MEMORY_USE_WO),
956d6950 4259 TYPE_MODE (integer_type_node));
ea4da9db 4260 in_check_memory_usage = 0;
051ffad5 4261 clear_storage (dest, size, align);
e87b4f3f 4262 }
22619c3f 4263
e87b4f3f
RS
4264 if (label)
4265 emit_label (label);
bbf6f052
RK
4266 }
4267 }
fffa9c1d
JW
4268 /* Handle calls that return values in multiple non-contiguous locations.
4269 The Irix 6 ABI has examples of this. */
4270 else if (GET_CODE (target) == PARALLEL)
aac5cc16 4271 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
19caa751 4272 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4273 else if (GET_MODE (temp) == BLKmode)
4274 emit_block_move (target, temp, expr_size (exp),
19caa751 4275 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4276 else
4277 emit_move_insn (target, temp);
4278 }
709f5be1 4279
766f36c7
RK
4280 /* If we don't want a value, return NULL_RTX. */
4281 if (! want_value)
4282 return NULL_RTX;
4283
4284 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4285 ??? The latter test doesn't seem to make sense. */
4286 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4287 return temp;
766f36c7
RK
4288
4289 /* Return TARGET itself if it is a hard register. */
4290 else if (want_value && GET_MODE (target) != BLKmode
4291 && ! (GET_CODE (target) == REG
4292 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4293 return copy_to_reg (target);
3a94c984 4294
766f36c7 4295 else
709f5be1 4296 return target;
bbf6f052
RK
4297}
4298\f
9de08200
RK
4299/* Return 1 if EXP just contains zeros. */
4300
4301static int
4302is_zeros_p (exp)
4303 tree exp;
4304{
4305 tree elt;
4306
4307 switch (TREE_CODE (exp))
4308 {
4309 case CONVERT_EXPR:
4310 case NOP_EXPR:
4311 case NON_LVALUE_EXPR:
4312 return is_zeros_p (TREE_OPERAND (exp, 0));
4313
4314 case INTEGER_CST:
05bccae2 4315 return integer_zerop (exp);
9de08200
RK
4316
4317 case COMPLEX_CST:
4318 return
4319 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4320
4321 case REAL_CST:
41c9120b 4322 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
4323
4324 case CONSTRUCTOR:
e1a43f73
PB
4325 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4326 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4327 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4328 if (! is_zeros_p (TREE_VALUE (elt)))
4329 return 0;
4330
4331 return 1;
3a94c984 4332
e9a25f70
JL
4333 default:
4334 return 0;
9de08200 4335 }
9de08200
RK
4336}
4337
4338/* Return 1 if EXP contains mostly (3/4) zeros. */
4339
4340static int
4341mostly_zeros_p (exp)
4342 tree exp;
4343{
9de08200
RK
4344 if (TREE_CODE (exp) == CONSTRUCTOR)
4345 {
e1a43f73
PB
4346 int elts = 0, zeros = 0;
4347 tree elt = CONSTRUCTOR_ELTS (exp);
4348 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4349 {
4350 /* If there are no ranges of true bits, it is all zero. */
4351 return elt == NULL_TREE;
4352 }
4353 for (; elt; elt = TREE_CHAIN (elt))
4354 {
4355 /* We do not handle the case where the index is a RANGE_EXPR,
4356 so the statistic will be somewhat inaccurate.
4357 We do make a more accurate count in store_constructor itself,
4358 so since this function is only used for nested array elements,
0f41302f 4359 this should be close enough. */
e1a43f73
PB
4360 if (mostly_zeros_p (TREE_VALUE (elt)))
4361 zeros++;
4362 elts++;
4363 }
9de08200
RK
4364
4365 return 4 * zeros >= 3 * elts;
4366 }
4367
4368 return is_zeros_p (exp);
4369}
4370\f
e1a43f73
PB
4371/* Helper function for store_constructor.
4372 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4373 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4374 ALIGN and CLEARED are as for store_constructor.
23cb1766 4375 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4376
4377 This provides a recursive shortcut back to store_constructor when it isn't
4378 necessary to go through store_field. This is so that we can pass through
4379 the cleared field to let store_constructor know that we may not have to
4380 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4381
4382static void
4383store_constructor_field (target, bitsize, bitpos,
23cb1766 4384 mode, exp, type, align, cleared, alias_set)
e1a43f73 4385 rtx target;
770ae6cc
RK
4386 unsigned HOST_WIDE_INT bitsize;
4387 HOST_WIDE_INT bitpos;
e1a43f73
PB
4388 enum machine_mode mode;
4389 tree exp, type;
729a2125 4390 unsigned int align;
e1a43f73 4391 int cleared;
23cb1766 4392 int alias_set;
e1a43f73
PB
4393{
4394 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4395 && bitpos % BITS_PER_UNIT == 0
4396 /* If we have a non-zero bitpos for a register target, then we just
4397 let store_field do the bitfield handling. This is unlikely to
4398 generate unnecessary clear instructions anyways. */
4399 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4400 {
126e5b0d 4401 if (bitpos != 0)
ce64861e
RK
4402 target
4403 = change_address (target,
4404 GET_MODE (target) == BLKmode
4405 || 0 != (bitpos
4406 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4407 ? BLKmode : VOIDmode,
4408 plus_constant (XEXP (target, 0),
4409 bitpos / BITS_PER_UNIT));
23cb1766 4410
e0339ef7
RK
4411
4412 /* Show the alignment may no longer be what it was and update the alias
4413 set, if required. */
eeebb824 4414 if (bitpos != 0)
8752c357 4415 align = MIN (align, (unsigned int) bitpos & - bitpos);
832ea3b3
FS
4416 if (GET_CODE (target) == MEM)
4417 MEM_ALIAS_SET (target) = alias_set;
e0339ef7 4418
b7010412 4419 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4420 }
4421 else
19caa751 4422 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
23cb1766 4423 int_size_in_bytes (type), alias_set);
e1a43f73
PB
4424}
4425
bbf6f052 4426/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4427 TARGET is either a REG or a MEM.
19caa751 4428 ALIGN is the maximum known alignment for TARGET.
b7010412
RK
4429 CLEARED is true if TARGET is known to have been zero'd.
4430 SIZE is the number of bytes of TARGET we are allowed to modify: this
4431 may not be the same as the size of EXP if we are assigning to a field
4432 which has been packed to exclude padding bits. */
bbf6f052
RK
4433
4434static void
b7010412 4435store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4436 tree exp;
4437 rtx target;
729a2125 4438 unsigned int align;
e1a43f73 4439 int cleared;
13eb1f7f 4440 HOST_WIDE_INT size;
bbf6f052 4441{
4af3895e 4442 tree type = TREE_TYPE (exp);
a5efcd63 4443#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4444 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4445#endif
4af3895e 4446
bbf6f052
RK
4447 /* We know our target cannot conflict, since safe_from_p has been called. */
4448#if 0
4449 /* Don't try copying piece by piece into a hard register
4450 since that is vulnerable to being clobbered by EXP.
4451 Instead, construct in a pseudo register and then copy it all. */
4452 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4453 {
4454 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4455 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4456 emit_move_insn (target, temp);
4457 return;
4458 }
4459#endif
4460
e44842fe
RK
4461 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4462 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4463 {
4464 register tree elt;
4465
4af3895e 4466 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4467 if ((TREE_CODE (type) == UNION_TYPE
4468 || TREE_CODE (type) == QUAL_UNION_TYPE)
4469 && ! cleared)
a59f8640
R
4470 {
4471 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4472
4473 /* If the constructor is empty, clear the union. */
4474 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
19caa751 4475 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
a59f8640 4476 }
4af3895e
JVA
4477
4478 /* If we are building a static constructor into a register,
4479 set the initial value as zero so we can fold the value into
67225c15
RK
4480 a constant. But if more than one register is involved,
4481 this probably loses. */
4482 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4483 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4484 {
4485 if (! cleared)
e9a25f70 4486 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4487
9de08200
RK
4488 cleared = 1;
4489 }
4490
4491 /* If the constructor has fewer fields than the structure
4492 or if we are initializing the structure to mostly zeros,
fcf1b822
RK
4493 clear the whole structure first. Don't do this is TARGET is
4494 register whose mode size isn't equal to SIZE since clear_storage
4495 can't handle this case. */
9376fcd6
RK
4496 else if (size > 0
4497 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4498 != fields_length (type))
fcf1b822
RK
4499 || mostly_zeros_p (exp))
4500 && (GET_CODE (target) != REG
8752c357 4501 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
9de08200
RK
4502 {
4503 if (! cleared)
19caa751 4504 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4505
4506 cleared = 1;
4507 }
dd1db5ec 4508 else if (! cleared)
bbf6f052 4509 /* Inform later passes that the old value is dead. */
38a448ca 4510 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4511
4512 /* Store each element of the constructor into
4513 the corresponding field of TARGET. */
4514
4515 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4516 {
4517 register tree field = TREE_PURPOSE (elt);
c5c76735 4518#ifdef WORD_REGISTER_OPERATIONS
34c73909 4519 tree value = TREE_VALUE (elt);
c5c76735 4520#endif
bbf6f052 4521 register enum machine_mode mode;
770ae6cc
RK
4522 HOST_WIDE_INT bitsize;
4523 HOST_WIDE_INT bitpos = 0;
bbf6f052 4524 int unsignedp;
770ae6cc 4525 tree offset;
b50d17a1 4526 rtx to_rtx = target;
bbf6f052 4527
f32fd778
RS
4528 /* Just ignore missing fields.
4529 We cleared the whole structure, above,
4530 if any fields are missing. */
4531 if (field == 0)
4532 continue;
4533
e1a43f73
PB
4534 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4535 continue;
9de08200 4536
770ae6cc
RK
4537 if (host_integerp (DECL_SIZE (field), 1))
4538 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4539 else
4540 bitsize = -1;
4541
bbf6f052
RK
4542 unsignedp = TREE_UNSIGNED (field);
4543 mode = DECL_MODE (field);
4544 if (DECL_BIT_FIELD (field))
4545 mode = VOIDmode;
4546
770ae6cc
RK
4547 offset = DECL_FIELD_OFFSET (field);
4548 if (host_integerp (offset, 0)
4549 && host_integerp (bit_position (field), 0))
4550 {
4551 bitpos = int_bit_position (field);
4552 offset = 0;
4553 }
b50d17a1 4554 else
770ae6cc 4555 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4556
b50d17a1
RK
4557 if (offset)
4558 {
4559 rtx offset_rtx;
4560
4561 if (contains_placeholder_p (offset))
7fa96708 4562 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4563 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4564
b50d17a1
RK
4565 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4566 if (GET_CODE (to_rtx) != MEM)
4567 abort ();
4568
3a94c984
KH
4569 if (GET_MODE (offset_rtx) != ptr_mode)
4570 {
bd070e1a 4571#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4572 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4573#else
4574 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4575#endif
4576 }
4577
b50d17a1
RK
4578 to_rtx
4579 = change_address (to_rtx, VOIDmode,
38a448ca 4580 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4581 force_reg (ptr_mode,
4582 offset_rtx)));
7fa96708 4583 align = DECL_OFFSET_ALIGN (field);
b50d17a1 4584 }
c5c76735 4585
cf04eb80
RK
4586 if (TREE_READONLY (field))
4587 {
9151b3bf 4588 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4589 to_rtx = copy_rtx (to_rtx);
4590
cf04eb80
RK
4591 RTX_UNCHANGING_P (to_rtx) = 1;
4592 }
4593
34c73909
R
4594#ifdef WORD_REGISTER_OPERATIONS
4595 /* If this initializes a field that is smaller than a word, at the
4596 start of a word, try to widen it to a full word.
4597 This special case allows us to output C++ member function
4598 initializations in a form that the optimizers can understand. */
770ae6cc 4599 if (GET_CODE (target) == REG
34c73909
R
4600 && bitsize < BITS_PER_WORD
4601 && bitpos % BITS_PER_WORD == 0
4602 && GET_MODE_CLASS (mode) == MODE_INT
4603 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4604 && exp_size >= 0
4605 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4606 {
4607 tree type = TREE_TYPE (value);
4608 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4609 {
4610 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4611 value = convert (type, value);
4612 }
4613 if (BYTES_BIG_ENDIAN)
4614 value
4615 = fold (build (LSHIFT_EXPR, type, value,
4616 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4617 bitsize = BITS_PER_WORD;
4618 mode = word_mode;
4619 }
4620#endif
c5c76735 4621 store_constructor_field (to_rtx, bitsize, bitpos, mode,
23cb1766 4622 TREE_VALUE (elt), type, align, cleared,
963a2a84 4623 (DECL_NONADDRESSABLE_P (field)
1ccfe3fa 4624 && GET_CODE (to_rtx) == MEM)
23cb1766
RK
4625 ? MEM_ALIAS_SET (to_rtx)
4626 : get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4627 }
4628 }
4af3895e 4629 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4630 {
4631 register tree elt;
4632 register int i;
e1a43f73 4633 int need_to_clear;
4af3895e 4634 tree domain = TYPE_DOMAIN (type);
4af3895e 4635 tree elttype = TREE_TYPE (type);
85f3d674
RK
4636 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4637 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4638 HOST_WIDE_INT minelt;
4639 HOST_WIDE_INT maxelt;
4640
4641 /* If we have constant bounds for the range of the type, get them. */
4642 if (const_bounds_p)
4643 {
4644 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4645 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4646 }
bbf6f052 4647
e1a43f73 4648 /* If the constructor has fewer elements than the array,
38e01259 4649 clear the whole array first. Similarly if this is
e1a43f73
PB
4650 static constructor of a non-BLKmode object. */
4651 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4652 need_to_clear = 1;
4653 else
4654 {
4655 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4656 need_to_clear = ! const_bounds_p;
4657
e1a43f73
PB
4658 /* This loop is a more accurate version of the loop in
4659 mostly_zeros_p (it handles RANGE_EXPR in an index).
4660 It is also needed to check for missing elements. */
4661 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4662 elt != NULL_TREE && ! need_to_clear;
df0faff1 4663 elt = TREE_CHAIN (elt))
e1a43f73
PB
4664 {
4665 tree index = TREE_PURPOSE (elt);
4666 HOST_WIDE_INT this_node_count;
19caa751 4667
e1a43f73
PB
4668 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4669 {
4670 tree lo_index = TREE_OPERAND (index, 0);
4671 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4672
19caa751
RK
4673 if (! host_integerp (lo_index, 1)
4674 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4675 {
4676 need_to_clear = 1;
4677 break;
4678 }
19caa751
RK
4679
4680 this_node_count = (tree_low_cst (hi_index, 1)
4681 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4682 }
4683 else
4684 this_node_count = 1;
85f3d674 4685
e1a43f73
PB
4686 count += this_node_count;
4687 if (mostly_zeros_p (TREE_VALUE (elt)))
4688 zero_count += this_node_count;
4689 }
85f3d674 4690
8e958f70 4691 /* Clear the entire array first if there are any missing elements,
0f41302f 4692 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4693 if (! need_to_clear
4694 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4695 need_to_clear = 1;
4696 }
85f3d674 4697
9376fcd6 4698 if (need_to_clear && size > 0)
9de08200
RK
4699 {
4700 if (! cleared)
19caa751 4701 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4702 cleared = 1;
4703 }
bbf6f052
RK
4704 else
4705 /* Inform later passes that the old value is dead. */
38a448ca 4706 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4707
4708 /* Store each element of the constructor into
4709 the corresponding element of TARGET, determined
4710 by counting the elements. */
4711 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4712 elt;
4713 elt = TREE_CHAIN (elt), i++)
4714 {
4715 register enum machine_mode mode;
19caa751
RK
4716 HOST_WIDE_INT bitsize;
4717 HOST_WIDE_INT bitpos;
bbf6f052 4718 int unsignedp;
e1a43f73 4719 tree value = TREE_VALUE (elt);
729a2125 4720 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4721 tree index = TREE_PURPOSE (elt);
4722 rtx xtarget = target;
bbf6f052 4723
e1a43f73
PB
4724 if (cleared && is_zeros_p (value))
4725 continue;
9de08200 4726
bbf6f052 4727 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4728 mode = TYPE_MODE (elttype);
4729 if (mode == BLKmode)
19caa751
RK
4730 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4731 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4732 : -1);
14a774a9
RK
4733 else
4734 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4735
e1a43f73
PB
4736 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4737 {
4738 tree lo_index = TREE_OPERAND (index, 0);
4739 tree hi_index = TREE_OPERAND (index, 1);
4740 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4741 struct nesting *loop;
05c0b405
PB
4742 HOST_WIDE_INT lo, hi, count;
4743 tree position;
e1a43f73 4744
0f41302f 4745 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4746 if (const_bounds_p
4747 && host_integerp (lo_index, 0)
19caa751
RK
4748 && host_integerp (hi_index, 0)
4749 && (lo = tree_low_cst (lo_index, 0),
4750 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4751 count = hi - lo + 1,
4752 (GET_CODE (target) != MEM
4753 || count <= 2
19caa751
RK
4754 || (host_integerp (TYPE_SIZE (elttype), 1)
4755 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4756 <= 40 * 8)))))
e1a43f73 4757 {
05c0b405
PB
4758 lo -= minelt; hi -= minelt;
4759 for (; lo <= hi; lo++)
e1a43f73 4760 {
19caa751 4761 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
23cb1766
RK
4762 store_constructor_field
4763 (target, bitsize, bitpos, mode, value, type, align,
4764 cleared,
4765 TYPE_NONALIASED_COMPONENT (type)
4766 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
e1a43f73
PB
4767 }
4768 }
4769 else
4770 {
4771 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4772 loop_top = gen_label_rtx ();
4773 loop_end = gen_label_rtx ();
4774
4775 unsignedp = TREE_UNSIGNED (domain);
4776
4777 index = build_decl (VAR_DECL, NULL_TREE, domain);
4778
4779 DECL_RTL (index) = index_r
4780 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4781 &unsignedp, 0));
4782
4783 if (TREE_CODE (value) == SAVE_EXPR
4784 && SAVE_EXPR_RTL (value) == 0)
4785 {
0f41302f
MS
4786 /* Make sure value gets expanded once before the
4787 loop. */
e1a43f73
PB
4788 expand_expr (value, const0_rtx, VOIDmode, 0);
4789 emit_queue ();
4790 }
4791 store_expr (lo_index, index_r, 0);
4792 loop = expand_start_loop (0);
4793
0f41302f 4794 /* Assign value to element index. */
fed3cef0
RK
4795 position
4796 = convert (ssizetype,
4797 fold (build (MINUS_EXPR, TREE_TYPE (index),
4798 index, TYPE_MIN_VALUE (domain))));
4799 position = size_binop (MULT_EXPR, position,
4800 convert (ssizetype,
4801 TYPE_SIZE_UNIT (elttype)));
4802
e1a43f73 4803 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4804 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4805 xtarget = change_address (target, mode, addr);
4806 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4807 store_constructor (value, xtarget, align, cleared,
4808 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4809 else
4810 store_expr (value, xtarget, 0);
4811
4812 expand_exit_loop_if_false (loop,
4813 build (LT_EXPR, integer_type_node,
4814 index, hi_index));
4815
4816 expand_increment (build (PREINCREMENT_EXPR,
4817 TREE_TYPE (index),
7b8b9722 4818 index, integer_one_node), 0, 0);
e1a43f73
PB
4819 expand_end_loop ();
4820 emit_label (loop_end);
e1a43f73
PB
4821 }
4822 }
19caa751
RK
4823 else if ((index != 0 && ! host_integerp (index, 0))
4824 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4825 {
e1a43f73 4826 rtx pos_rtx, addr;
03dc44a6
RS
4827 tree position;
4828
5b6c44ff 4829 if (index == 0)
fed3cef0 4830 index = ssize_int (1);
5b6c44ff 4831
e1a43f73 4832 if (minelt)
fed3cef0
RK
4833 index = convert (ssizetype,
4834 fold (build (MINUS_EXPR, index,
4835 TYPE_MIN_VALUE (domain))));
19caa751 4836
fed3cef0
RK
4837 position = size_binop (MULT_EXPR, index,
4838 convert (ssizetype,
4839 TYPE_SIZE_UNIT (elttype)));
03dc44a6 4840 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4841 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4842 xtarget = change_address (target, mode, addr);
e1a43f73 4843 store_expr (value, xtarget, 0);
03dc44a6
RS
4844 }
4845 else
4846 {
4847 if (index != 0)
19caa751
RK
4848 bitpos = ((tree_low_cst (index, 0) - minelt)
4849 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4850 else
19caa751
RK
4851 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4852
c5c76735 4853 store_constructor_field (target, bitsize, bitpos, mode, value,
23cb1766
RK
4854 type, align, cleared,
4855 TYPE_NONALIASED_COMPONENT (type)
831ecbd4 4856 && GET_CODE (target) == MEM
23cb1766
RK
4857 ? MEM_ALIAS_SET (target) :
4858 get_alias_set (elttype));
4859
03dc44a6 4860 }
bbf6f052
RK
4861 }
4862 }
19caa751 4863
3a94c984 4864 /* Set constructor assignments. */
071a6595
PB
4865 else if (TREE_CODE (type) == SET_TYPE)
4866 {
e1a43f73 4867 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4868 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4869 tree domain = TYPE_DOMAIN (type);
4870 tree domain_min, domain_max, bitlength;
4871
9faa82d8 4872 /* The default implementation strategy is to extract the constant
071a6595
PB
4873 parts of the constructor, use that to initialize the target,
4874 and then "or" in whatever non-constant ranges we need in addition.
4875
4876 If a large set is all zero or all ones, it is
4877 probably better to set it using memset (if available) or bzero.
4878 Also, if a large set has just a single range, it may also be
4879 better to first clear all the first clear the set (using
0f41302f 4880 bzero/memset), and set the bits we want. */
3a94c984 4881
0f41302f 4882 /* Check for all zeros. */
9376fcd6 4883 if (elt == NULL_TREE && size > 0)
071a6595 4884 {
e1a43f73 4885 if (!cleared)
19caa751 4886 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
071a6595
PB
4887 return;
4888 }
4889
071a6595
PB
4890 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4891 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4892 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4893 size_diffop (domain_max, domain_min),
4894 ssize_int (1));
071a6595 4895
19caa751 4896 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4897
4898 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4899 are "complicated" (more than one range), initialize (the
3a94c984 4900 constant parts) by copying from a constant. */
e1a43f73
PB
4901 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4902 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4903 {
19caa751 4904 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4905 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4906 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 4907 HOST_WIDE_INT word = 0;
19caa751
RK
4908 unsigned int bit_pos = 0;
4909 unsigned int ibit = 0;
4910 unsigned int offset = 0; /* In bytes from beginning of set. */
4911
e1a43f73 4912 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4913 for (;;)
071a6595 4914 {
b4ee5a72
PB
4915 if (bit_buffer[ibit])
4916 {
b09f3348 4917 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4918 word |= (1 << (set_word_size - 1 - bit_pos));
4919 else
4920 word |= 1 << bit_pos;
4921 }
19caa751 4922
b4ee5a72
PB
4923 bit_pos++; ibit++;
4924 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4925 {
e1a43f73
PB
4926 if (word != 0 || ! cleared)
4927 {
4928 rtx datum = GEN_INT (word);
4929 rtx to_rtx;
19caa751 4930
0f41302f
MS
4931 /* The assumption here is that it is safe to use
4932 XEXP if the set is multi-word, but not if
4933 it's single-word. */
e1a43f73
PB
4934 if (GET_CODE (target) == MEM)
4935 {
4936 to_rtx = plus_constant (XEXP (target, 0), offset);
4937 to_rtx = change_address (target, mode, to_rtx);
4938 }
3a94c984 4939 else if (offset == 0)
e1a43f73
PB
4940 to_rtx = target;
4941 else
4942 abort ();
4943 emit_move_insn (to_rtx, datum);
4944 }
19caa751 4945
b4ee5a72
PB
4946 if (ibit == nbits)
4947 break;
4948 word = 0;
4949 bit_pos = 0;
4950 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4951 }
4952 }
071a6595 4953 }
e1a43f73 4954 else if (!cleared)
19caa751
RK
4955 /* Don't bother clearing storage if the set is all ones. */
4956 if (TREE_CHAIN (elt) != NULL_TREE
4957 || (TREE_PURPOSE (elt) == NULL_TREE
4958 ? nbits != 1
4959 : ( ! host_integerp (TREE_VALUE (elt), 0)
4960 || ! host_integerp (TREE_PURPOSE (elt), 0)
4961 || (tree_low_cst (TREE_VALUE (elt), 0)
4962 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4963 != (HOST_WIDE_INT) nbits))))
4964 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
3a94c984 4965
e1a43f73 4966 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 4967 {
3a94c984 4968 /* Start of range of element or NULL. */
071a6595 4969 tree startbit = TREE_PURPOSE (elt);
3a94c984 4970 /* End of range of element, or element value. */
071a6595 4971 tree endbit = TREE_VALUE (elt);
381127e8 4972#ifdef TARGET_MEM_FUNCTIONS
071a6595 4973 HOST_WIDE_INT startb, endb;
381127e8 4974#endif
19caa751 4975 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
4976
4977 bitlength_rtx = expand_expr (bitlength,
19caa751 4978 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 4979
3a94c984 4980 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
4981 if (startbit == NULL_TREE)
4982 {
4983 startbit = save_expr (endbit);
4984 endbit = startbit;
4985 }
19caa751 4986
071a6595
PB
4987 startbit = convert (sizetype, startbit);
4988 endbit = convert (sizetype, endbit);
4989 if (! integer_zerop (domain_min))
4990 {
4991 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4992 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4993 }
3a94c984 4994 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 4995 EXPAND_CONST_ADDRESS);
3a94c984 4996 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
4997 EXPAND_CONST_ADDRESS);
4998
4999 if (REG_P (target))
5000 {
1da68f56
RK
5001 targetx
5002 = assign_temp
5003 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5004 TYPE_QUAL_CONST)),
5005 0, 1, 1);
071a6595
PB
5006 emit_move_insn (targetx, target);
5007 }
19caa751 5008
071a6595
PB
5009 else if (GET_CODE (target) == MEM)
5010 targetx = target;
5011 else
5012 abort ();
5013
5014#ifdef TARGET_MEM_FUNCTIONS
5015 /* Optimization: If startbit and endbit are
9faa82d8 5016 constants divisible by BITS_PER_UNIT,
0f41302f 5017 call memset instead. */
071a6595
PB
5018 if (TREE_CODE (startbit) == INTEGER_CST
5019 && TREE_CODE (endbit) == INTEGER_CST
5020 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5021 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5022 {
ebb1b59a 5023 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5024 VOIDmode, 3,
e1a43f73
PB
5025 plus_constant (XEXP (targetx, 0),
5026 startb / BITS_PER_UNIT),
071a6595 5027 Pmode,
3b6f75e2 5028 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5029 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5030 TYPE_MODE (sizetype));
071a6595
PB
5031 }
5032 else
5033#endif
19caa751 5034 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
5035 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5036 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5037 startbit_rtx, TYPE_MODE (sizetype),
5038 endbit_rtx, TYPE_MODE (sizetype));
5039
071a6595
PB
5040 if (REG_P (target))
5041 emit_move_insn (target, targetx);
5042 }
5043 }
bbf6f052
RK
5044
5045 else
5046 abort ();
5047}
5048
5049/* Store the value of EXP (an expression tree)
5050 into a subfield of TARGET which has mode MODE and occupies
5051 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5052 If MODE is VOIDmode, it means that we are storing into a bit-field.
5053
5054 If VALUE_MODE is VOIDmode, return nothing in particular.
5055 UNSIGNEDP is not used in this case.
5056
5057 Otherwise, return an rtx for the value stored. This rtx
5058 has mode VALUE_MODE if that is convenient to do.
5059 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5060
19caa751 5061 ALIGN is the alignment that TARGET is known to have.
3a94c984 5062 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
ece32014
MM
5063
5064 ALIAS_SET is the alias set for the destination. This value will
5065 (in general) be different from that for TARGET, since TARGET is a
5066 reference to the containing structure. */
bbf6f052
RK
5067
5068static rtx
5069store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 5070 unsignedp, align, total_size, alias_set)
bbf6f052 5071 rtx target;
770ae6cc
RK
5072 HOST_WIDE_INT bitsize;
5073 HOST_WIDE_INT bitpos;
bbf6f052
RK
5074 enum machine_mode mode;
5075 tree exp;
5076 enum machine_mode value_mode;
5077 int unsignedp;
729a2125 5078 unsigned int align;
770ae6cc 5079 HOST_WIDE_INT total_size;
ece32014 5080 int alias_set;
bbf6f052 5081{
906c4e36 5082 HOST_WIDE_INT width_mask = 0;
bbf6f052 5083
e9a25f70
JL
5084 if (TREE_CODE (exp) == ERROR_MARK)
5085 return const0_rtx;
5086
906c4e36
RK
5087 if (bitsize < HOST_BITS_PER_WIDE_INT)
5088 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5089
5090 /* If we are storing into an unaligned field of an aligned union that is
5091 in a register, we may have the mode of TARGET being an integer mode but
5092 MODE == BLKmode. In that case, get an aligned object whose size and
5093 alignment are the same as TARGET and store TARGET into it (we can avoid
5094 the store if the field being stored is the entire width of TARGET). Then
5095 call ourselves recursively to store the field into a BLKmode version of
5096 that object. Finally, load from the object into TARGET. This is not
5097 very efficient in general, but should only be slightly more expensive
5098 than the otherwise-required unaligned accesses. Perhaps this can be
5099 cleaned up later. */
5100
5101 if (mode == BLKmode
5102 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5103 {
1da68f56
RK
5104 rtx object
5105 = assign_temp
5106 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5107 TYPE_QUAL_CONST),
5108 0, 1, 1);
bbf6f052
RK
5109 rtx blk_object = copy_rtx (object);
5110
5111 PUT_MODE (blk_object, BLKmode);
5112
8752c357 5113 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5114 emit_move_insn (object, target);
5115
5116 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 5117 align, total_size, alias_set);
bbf6f052 5118
46093b97
RS
5119 /* Even though we aren't returning target, we need to
5120 give it the updated value. */
bbf6f052
RK
5121 emit_move_insn (target, object);
5122
46093b97 5123 return blk_object;
bbf6f052 5124 }
c3b247b4
JM
5125
5126 if (GET_CODE (target) == CONCAT)
5127 {
5128 /* We're storing into a struct containing a single __complex. */
5129
5130 if (bitpos != 0)
5131 abort ();
5132 return store_expr (exp, target, 0);
5133 }
bbf6f052
RK
5134
5135 /* If the structure is in a register or if the component
5136 is a bit field, we cannot use addressing to access it.
5137 Use bit-field techniques or SUBREG to store in it. */
5138
4fa52007 5139 if (mode == VOIDmode
6ab06cbb
JW
5140 || (mode != BLKmode && ! direct_store[(int) mode]
5141 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5142 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5143 || GET_CODE (target) == REG
c980ac49 5144 || GET_CODE (target) == SUBREG
ccc98036
RS
5145 /* If the field isn't aligned enough to store as an ordinary memref,
5146 store it as a bit field. */
e1565e65 5147 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5148 && (align < GET_MODE_ALIGNMENT (mode)
14a774a9 5149 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 5150 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5151 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
14a774a9
RK
5152 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5153 /* If the RHS and field are a constant size and the size of the
5154 RHS isn't the same size as the bitfield, we must use bitfield
5155 operations. */
05bccae2
RK
5156 || (bitsize >= 0
5157 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5158 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5159 {
906c4e36 5160 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5161
ef19912d
RK
5162 /* If BITSIZE is narrower than the size of the type of EXP
5163 we will be narrowing TEMP. Normally, what's wanted are the
5164 low-order bits. However, if EXP's type is a record and this is
5165 big-endian machine, we want the upper BITSIZE bits. */
5166 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5167 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5168 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5169 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5170 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5171 - bitsize),
5172 temp, 1);
5173
bbd6cf73
RK
5174 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5175 MODE. */
5176 if (mode != VOIDmode && mode != BLKmode
5177 && mode != TYPE_MODE (TREE_TYPE (exp)))
5178 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5179
a281e72d
RK
5180 /* If the modes of TARGET and TEMP are both BLKmode, both
5181 must be in memory and BITPOS must be aligned on a byte
5182 boundary. If so, we simply do a block copy. */
5183 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5184 {
19caa751 5185 unsigned int exp_align = expr_align (exp);
729a2125 5186
a281e72d
RK
5187 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5188 || bitpos % BITS_PER_UNIT != 0)
5189 abort ();
5190
0086427c
RK
5191 target = change_address (target, VOIDmode,
5192 plus_constant (XEXP (target, 0),
a281e72d
RK
5193 bitpos / BITS_PER_UNIT));
5194
729a2125
RK
5195 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5196 align = MIN (exp_align, align);
c297a34e 5197
14a774a9 5198 /* Find an alignment that is consistent with the bit position. */
19caa751 5199 while ((bitpos % align) != 0)
14a774a9
RK
5200 align >>= 1;
5201
a281e72d 5202 emit_block_move (target, temp,
bd5dab53
RK
5203 bitsize == -1 ? expr_size (exp)
5204 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5205 / BITS_PER_UNIT),
14a774a9 5206 align);
a281e72d
RK
5207
5208 return value_mode == VOIDmode ? const0_rtx : target;
5209 }
5210
bbf6f052
RK
5211 /* Store the value in the bitfield. */
5212 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5213 if (value_mode != VOIDmode)
5214 {
5215 /* The caller wants an rtx for the value. */
5216 /* If possible, avoid refetching from the bitfield itself. */
5217 if (width_mask != 0
5218 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5219 {
9074de27 5220 tree count;
5c4d7cfb 5221 enum machine_mode tmode;
86a2c12a 5222
5c4d7cfb
RS
5223 if (unsignedp)
5224 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5225 tmode = GET_MODE (temp);
86a2c12a
RS
5226 if (tmode == VOIDmode)
5227 tmode = value_mode;
5c4d7cfb
RS
5228 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5229 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5230 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5231 }
bbf6f052 5232 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
5233 NULL_RTX, value_mode, 0, align,
5234 total_size);
bbf6f052
RK
5235 }
5236 return const0_rtx;
5237 }
5238 else
5239 {
5240 rtx addr = XEXP (target, 0);
5241 rtx to_rtx;
5242
5243 /* If a value is wanted, it must be the lhs;
5244 so make the address stable for multiple use. */
5245
5246 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5247 && ! CONSTANT_ADDRESS_P (addr)
5248 /* A frame-pointer reference is already stable. */
5249 && ! (GET_CODE (addr) == PLUS
5250 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5251 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5252 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5253 addr = copy_to_reg (addr);
5254
5255 /* Now build a reference to just the desired component. */
5256
effbcc6a
RK
5257 to_rtx = copy_rtx (change_address (target, mode,
5258 plus_constant (addr,
5259 (bitpos
5260 / BITS_PER_UNIT))));
c6df88cb 5261 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 5262 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
5263
5264 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5265 }
5266}
5267\f
5268/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5269 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 5270 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
5271
5272 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5273 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5274 If the position of the field is variable, we store a tree
5275 giving the variable offset (in units) in *POFFSET.
5276 This offset is in addition to the bit position.
5277 If the position is not variable, we store 0 in *POFFSET.
19caa751 5278 We set *PALIGNMENT to the alignment of the address that will be
839c4796
RK
5279 computed. This is the alignment of the thing we return if *POFFSET
5280 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
5281
5282 If any of the extraction expressions is volatile,
5283 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5284
5285 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5286 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5287 is redundant.
5288
5289 If the field describes a variable-sized object, *PMODE is set to
5290 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 5291 this case, but the address of the object can be found. */
bbf6f052
RK
5292
5293tree
4969d05d 5294get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 5295 punsignedp, pvolatilep, palignment)
bbf6f052 5296 tree exp;
770ae6cc
RK
5297 HOST_WIDE_INT *pbitsize;
5298 HOST_WIDE_INT *pbitpos;
7bb0943f 5299 tree *poffset;
bbf6f052
RK
5300 enum machine_mode *pmode;
5301 int *punsignedp;
5302 int *pvolatilep;
729a2125 5303 unsigned int *palignment;
bbf6f052
RK
5304{
5305 tree size_tree = 0;
5306 enum machine_mode mode = VOIDmode;
fed3cef0 5307 tree offset = size_zero_node;
770ae6cc 5308 tree bit_offset = bitsize_zero_node;
c84e2712 5309 unsigned int alignment = BIGGEST_ALIGNMENT;
770ae6cc 5310 tree tem;
bbf6f052 5311
770ae6cc
RK
5312 /* First get the mode, signedness, and size. We do this from just the
5313 outermost expression. */
bbf6f052
RK
5314 if (TREE_CODE (exp) == COMPONENT_REF)
5315 {
5316 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5317 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5318 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5319
bbf6f052
RK
5320 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5321 }
5322 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5323 {
5324 size_tree = TREE_OPERAND (exp, 1);
5325 *punsignedp = TREE_UNSIGNED (exp);
5326 }
5327 else
5328 {
5329 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5330 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5331
ab87f8c8
JL
5332 if (mode == BLKmode)
5333 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5334 else
5335 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5336 }
3a94c984 5337
770ae6cc 5338 if (size_tree != 0)
bbf6f052 5339 {
770ae6cc 5340 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5341 mode = BLKmode, *pbitsize = -1;
5342 else
770ae6cc 5343 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5344 }
5345
5346 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5347 and find the ultimate containing object. */
bbf6f052
RK
5348 while (1)
5349 {
770ae6cc
RK
5350 if (TREE_CODE (exp) == BIT_FIELD_REF)
5351 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5352 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5353 {
770ae6cc
RK
5354 tree field = TREE_OPERAND (exp, 1);
5355 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5356
e7f3c83f
RK
5357 /* If this field hasn't been filled in yet, don't go
5358 past it. This should only happen when folding expressions
5359 made during type construction. */
770ae6cc 5360 if (this_offset == 0)
e7f3c83f 5361 break;
770ae6cc
RK
5362 else if (! TREE_CONSTANT (this_offset)
5363 && contains_placeholder_p (this_offset))
5364 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5365
7156dead 5366 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5367 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5368 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5369
770ae6cc
RK
5370 if (! host_integerp (offset, 0))
5371 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
bbf6f052 5372 }
7156dead 5373
742920c7 5374 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 5375 {
742920c7
RK
5376 tree index = TREE_OPERAND (exp, 1);
5377 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
770ae6cc 5378 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
7156dead 5379 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
742920c7 5380
770ae6cc
RK
5381 /* We assume all arrays have sizes that are a multiple of a byte.
5382 First subtract the lower bound, if any, in the type of the
5383 index, then convert to sizetype and multiply by the size of the
5384 array element. */
5385 if (low_bound != 0 && ! integer_zerop (low_bound))
5386 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5387 index, low_bound));
f8dac6eb 5388
7156dead
RK
5389 /* If the index has a self-referential type, pass it to a
5390 WITH_RECORD_EXPR; if the component size is, pass our
5391 component to one. */
770ae6cc
RK
5392 if (! TREE_CONSTANT (index)
5393 && contains_placeholder_p (index))
5394 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5395 if (! TREE_CONSTANT (unit_size)
5396 && contains_placeholder_p (unit_size))
5397 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5398 TREE_OPERAND (exp, 0));
742920c7 5399
770ae6cc
RK
5400 offset = size_binop (PLUS_EXPR, offset,
5401 size_binop (MULT_EXPR,
5402 convert (sizetype, index),
7156dead 5403 unit_size));
bbf6f052 5404 }
7156dead 5405
bbf6f052
RK
5406 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5407 && ! ((TREE_CODE (exp) == NOP_EXPR
5408 || TREE_CODE (exp) == CONVERT_EXPR)
5409 && (TYPE_MODE (TREE_TYPE (exp))
5410 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5411 break;
7bb0943f
RS
5412
5413 /* If any reference in the chain is volatile, the effect is volatile. */
5414 if (TREE_THIS_VOLATILE (exp))
5415 *pvolatilep = 1;
839c4796
RK
5416
5417 /* If the offset is non-constant already, then we can't assume any
5418 alignment more than the alignment here. */
770ae6cc 5419 if (! TREE_CONSTANT (offset))
839c4796
RK
5420 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5421
bbf6f052
RK
5422 exp = TREE_OPERAND (exp, 0);
5423 }
5424
2f939d94 5425 if (DECL_P (exp))
839c4796 5426 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5427 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5428 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5429
770ae6cc
RK
5430 /* If OFFSET is constant, see if we can return the whole thing as a
5431 constant bit position. Otherwise, split it up. */
5432 if (host_integerp (offset, 0)
5433 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5434 bitsize_unit_node))
5435 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5436 && host_integerp (tem, 0))
5437 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5438 else
5439 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5440
bbf6f052 5441 *pmode = mode;
19caa751 5442 *palignment = alignment;
bbf6f052
RK
5443 return exp;
5444}
921b3427
RK
5445
5446/* Subroutine of expand_exp: compute memory_usage from modifier. */
770ae6cc 5447
921b3427
RK
5448static enum memory_use_mode
5449get_memory_usage_from_modifier (modifier)
5450 enum expand_modifier modifier;
5451{
5452 switch (modifier)
5453 {
5454 case EXPAND_NORMAL:
e5e809f4 5455 case EXPAND_SUM:
921b3427
RK
5456 return MEMORY_USE_RO;
5457 break;
5458 case EXPAND_MEMORY_USE_WO:
5459 return MEMORY_USE_WO;
5460 break;
5461 case EXPAND_MEMORY_USE_RW:
5462 return MEMORY_USE_RW;
5463 break;
921b3427 5464 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5465 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5466 MEMORY_USE_DONT, because they are modifiers to a call of
5467 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5468 case EXPAND_CONST_ADDRESS:
e5e809f4 5469 case EXPAND_INITIALIZER:
921b3427
RK
5470 return MEMORY_USE_DONT;
5471 case EXPAND_MEMORY_USE_BAD:
5472 default:
5473 abort ();
5474 }
5475}
bbf6f052 5476\f
3fe44edd
RK
5477/* Given an rtx VALUE that may contain additions and multiplications, return
5478 an equivalent value that just refers to a register, memory, or constant.
5479 This is done by generating instructions to perform the arithmetic and
5480 returning a pseudo-register containing the value.
c45a13a6
RK
5481
5482 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5483
5484rtx
5485force_operand (value, target)
5486 rtx value, target;
5487{
5488 register optab binoptab = 0;
5489 /* Use a temporary to force order of execution of calls to
5490 `force_operand'. */
5491 rtx tmp;
5492 register rtx op2;
5493 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 5494 register rtx subtarget = get_subtarget (target);
bbf6f052 5495
8b015896
RH
5496 /* Check for a PIC address load. */
5497 if (flag_pic
5498 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5499 && XEXP (value, 0) == pic_offset_table_rtx
5500 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5501 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5502 || GET_CODE (XEXP (value, 1)) == CONST))
5503 {
5504 if (!subtarget)
5505 subtarget = gen_reg_rtx (GET_MODE (value));
5506 emit_move_insn (subtarget, value);
5507 return subtarget;
5508 }
5509
bbf6f052
RK
5510 if (GET_CODE (value) == PLUS)
5511 binoptab = add_optab;
5512 else if (GET_CODE (value) == MINUS)
5513 binoptab = sub_optab;
5514 else if (GET_CODE (value) == MULT)
5515 {
5516 op2 = XEXP (value, 1);
5517 if (!CONSTANT_P (op2)
5518 && !(GET_CODE (op2) == REG && op2 != subtarget))
5519 subtarget = 0;
5520 tmp = force_operand (XEXP (value, 0), subtarget);
5521 return expand_mult (GET_MODE (value), tmp,
906c4e36 5522 force_operand (op2, NULL_RTX),
91ce572a 5523 target, 1);
bbf6f052
RK
5524 }
5525
5526 if (binoptab)
5527 {
5528 op2 = XEXP (value, 1);
5529 if (!CONSTANT_P (op2)
5530 && !(GET_CODE (op2) == REG && op2 != subtarget))
5531 subtarget = 0;
5532 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5533 {
5534 binoptab = add_optab;
5535 op2 = negate_rtx (GET_MODE (value), op2);
5536 }
5537
5538 /* Check for an addition with OP2 a constant integer and our first
5539 operand a PLUS of a virtual register and something else. In that
5540 case, we want to emit the sum of the virtual register and the
5541 constant first and then add the other value. This allows virtual
5542 register instantiation to simply modify the constant rather than
5543 creating another one around this addition. */
5544 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5545 && GET_CODE (XEXP (value, 0)) == PLUS
5546 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5547 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5548 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5549 {
5550 rtx temp = expand_binop (GET_MODE (value), binoptab,
5551 XEXP (XEXP (value, 0), 0), op2,
5552 subtarget, 0, OPTAB_LIB_WIDEN);
5553 return expand_binop (GET_MODE (value), binoptab, temp,
5554 force_operand (XEXP (XEXP (value, 0), 1), 0),
5555 target, 0, OPTAB_LIB_WIDEN);
5556 }
3a94c984 5557
bbf6f052
RK
5558 tmp = force_operand (XEXP (value, 0), subtarget);
5559 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5560 force_operand (op2, NULL_RTX),
bbf6f052 5561 target, 0, OPTAB_LIB_WIDEN);
8008b228 5562 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5563 because the only operations we are expanding here are signed ones. */
5564 }
5565 return value;
5566}
5567\f
5568/* Subroutine of expand_expr:
5569 save the non-copied parts (LIST) of an expr (LHS), and return a list
5570 which can restore these values to their previous values,
5571 should something modify their storage. */
5572
5573static tree
5574save_noncopied_parts (lhs, list)
5575 tree lhs;
5576 tree list;
5577{
5578 tree tail;
5579 tree parts = 0;
5580
5581 for (tail = list; tail; tail = TREE_CHAIN (tail))
5582 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5583 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5584 else
5585 {
5586 tree part = TREE_VALUE (tail);
5587 tree part_type = TREE_TYPE (part);
906c4e36 5588 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
1da68f56
RK
5589 rtx target
5590 = assign_temp (build_qualified_type (part_type,
5591 (TYPE_QUALS (part_type)
5592 | TYPE_QUAL_CONST)),
5593 0, 1, 1);
5594
bbf6f052 5595 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5596 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5597 parts = tree_cons (to_be_saved,
906c4e36
RK
5598 build (RTL_EXPR, part_type, NULL_TREE,
5599 (tree) target),
bbf6f052
RK
5600 parts);
5601 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5602 }
5603 return parts;
5604}
5605
5606/* Subroutine of expand_expr:
5607 record the non-copied parts (LIST) of an expr (LHS), and return a list
5608 which specifies the initial values of these parts. */
5609
5610static tree
5611init_noncopied_parts (lhs, list)
5612 tree lhs;
5613 tree list;
5614{
5615 tree tail;
5616 tree parts = 0;
5617
5618 for (tail = list; tail; tail = TREE_CHAIN (tail))
5619 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5620 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5621 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5622 {
5623 tree part = TREE_VALUE (tail);
5624 tree part_type = TREE_TYPE (part);
906c4e36 5625 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5626 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5627 }
5628 return parts;
5629}
5630
5631/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5632 EXP can reference X, which is being modified. TOP_P is nonzero if this
5633 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5634 for EXP, as opposed to a recursive call to this function.
5635
5636 It is always safe for this routine to return zero since it merely
5637 searches for optimization opportunities. */
bbf6f052 5638
8f17b5c5 5639int
e5e809f4 5640safe_from_p (x, exp, top_p)
bbf6f052
RK
5641 rtx x;
5642 tree exp;
e5e809f4 5643 int top_p;
bbf6f052
RK
5644{
5645 rtx exp_rtl = 0;
5646 int i, nops;
1da68f56 5647 static tree save_expr_list;
bbf6f052 5648
6676e72f
RK
5649 if (x == 0
5650 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5651 have no way of allocating temporaries of variable size
5652 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5653 So we assume here that something at a higher level has prevented a
f4510f37 5654 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5655 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5656 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5657 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5658 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5659 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5660 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5661 != INTEGER_CST)
1da68f56
RK
5662 && GET_MODE (x) == BLKmode)
5663 /* If X is in the outgoing argument area, it is always safe. */
5664 || (GET_CODE (x) == MEM
5665 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5666 || (GET_CODE (XEXP (x, 0)) == PLUS
5667 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5668 return 1;
5669
5670 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5671 find the underlying pseudo. */
5672 if (GET_CODE (x) == SUBREG)
5673 {
5674 x = SUBREG_REG (x);
5675 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5676 return 0;
5677 }
5678
1da68f56
RK
5679 /* A SAVE_EXPR might appear many times in the expression passed to the
5680 top-level safe_from_p call, and if it has a complex subexpression,
5681 examining it multiple times could result in a combinatorial explosion.
5682 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5683 with optimization took about 28 minutes to compile -- even though it was
5684 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5685 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5686 we have processed. Note that the only test of top_p was above. */
5687
5688 if (top_p)
5689 {
5690 int rtn;
5691 tree t;
5692
5693 save_expr_list = 0;
5694
5695 rtn = safe_from_p (x, exp, 0);
5696
5697 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5698 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5699
5700 return rtn;
5701 }
bbf6f052 5702
1da68f56 5703 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5704 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5705 {
5706 case 'd':
5707 exp_rtl = DECL_RTL (exp);
5708 break;
5709
5710 case 'c':
5711 return 1;
5712
5713 case 'x':
5714 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5715 return ((TREE_VALUE (exp) == 0
e5e809f4 5716 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5717 && (TREE_CHAIN (exp) == 0
e5e809f4 5718 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5719 else if (TREE_CODE (exp) == ERROR_MARK)
5720 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5721 else
5722 return 0;
5723
5724 case '1':
e5e809f4 5725 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5726
5727 case '2':
5728 case '<':
e5e809f4
JL
5729 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5730 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5731
5732 case 'e':
5733 case 'r':
5734 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5735 the expression. If it is set, we conflict iff we are that rtx or
5736 both are in memory. Otherwise, we check all operands of the
5737 expression recursively. */
5738
5739 switch (TREE_CODE (exp))
5740 {
5741 case ADDR_EXPR:
e44842fe 5742 return (staticp (TREE_OPERAND (exp, 0))
1da68f56
RK
5743 || TREE_STATIC (exp)
5744 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
bbf6f052
RK
5745
5746 case INDIRECT_REF:
1da68f56
RK
5747 if (GET_CODE (x) == MEM
5748 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5749 get_alias_set (exp)))
bbf6f052
RK
5750 return 0;
5751 break;
5752
5753 case CALL_EXPR:
f9808f81
MM
5754 /* Assume that the call will clobber all hard registers and
5755 all of memory. */
5756 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5757 || GET_CODE (x) == MEM)
5758 return 0;
bbf6f052
RK
5759 break;
5760
5761 case RTL_EXPR:
3bb5826a
RK
5762 /* If a sequence exists, we would have to scan every instruction
5763 in the sequence to see if it was safe. This is probably not
5764 worthwhile. */
5765 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5766 return 0;
5767
3bb5826a 5768 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5769 break;
5770
5771 case WITH_CLEANUP_EXPR:
5772 exp_rtl = RTL_EXPR_RTL (exp);
5773 break;
5774
5dab5552 5775 case CLEANUP_POINT_EXPR:
e5e809f4 5776 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5777
bbf6f052
RK
5778 case SAVE_EXPR:
5779 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5780 if (exp_rtl)
5781 break;
5782
1da68f56
RK
5783 /* If we've already scanned this, don't do it again. Otherwise,
5784 show we've scanned it and record for clearing the flag if we're
5785 going on. */
5786 if (TREE_PRIVATE (exp))
5787 return 1;
ff439b5f 5788
1da68f56
RK
5789 TREE_PRIVATE (exp) = 1;
5790 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5791 {
1da68f56
RK
5792 TREE_PRIVATE (exp) = 0;
5793 return 0;
ff59bfe6 5794 }
1da68f56
RK
5795
5796 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5797 return 1;
bbf6f052 5798
8129842c
RS
5799 case BIND_EXPR:
5800 /* The only operand we look at is operand 1. The rest aren't
5801 part of the expression. */
e5e809f4 5802 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5803
bbf6f052 5804 case METHOD_CALL_EXPR:
0f41302f 5805 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5806 abort ();
3a94c984 5807
e9a25f70
JL
5808 default:
5809 break;
bbf6f052
RK
5810 }
5811
5812 /* If we have an rtx, we do not need to scan our operands. */
5813 if (exp_rtl)
5814 break;
5815
8f17b5c5 5816 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5817 for (i = 0; i < nops; i++)
5818 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5819 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5820 return 0;
8f17b5c5
MM
5821
5822 /* If this is a language-specific tree code, it may require
5823 special handling. */
dbbbbf3b
JDA
5824 if ((unsigned int) TREE_CODE (exp)
5825 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
8f17b5c5
MM
5826 && lang_safe_from_p
5827 && !(*lang_safe_from_p) (x, exp))
5828 return 0;
bbf6f052
RK
5829 }
5830
5831 /* If we have an rtl, find any enclosed object. Then see if we conflict
5832 with it. */
5833 if (exp_rtl)
5834 {
5835 if (GET_CODE (exp_rtl) == SUBREG)
5836 {
5837 exp_rtl = SUBREG_REG (exp_rtl);
5838 if (GET_CODE (exp_rtl) == REG
5839 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5840 return 0;
5841 }
5842
5843 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5844 are memory and they conflict. */
bbf6f052
RK
5845 return ! (rtx_equal_p (x, exp_rtl)
5846 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
1da68f56
RK
5847 && true_dependence (exp_rtl, GET_MODE (x), x,
5848 rtx_addr_varies_p)));
bbf6f052
RK
5849 }
5850
5851 /* If we reach here, it is safe. */
5852 return 1;
5853}
5854
5855/* Subroutine of expand_expr: return nonzero iff EXP is an
5856 expression whose type is statically determinable. */
5857
5858static int
5859fixed_type_p (exp)
5860 tree exp;
5861{
5862 if (TREE_CODE (exp) == PARM_DECL
5863 || TREE_CODE (exp) == VAR_DECL
5864 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5865 || TREE_CODE (exp) == COMPONENT_REF
5866 || TREE_CODE (exp) == ARRAY_REF)
5867 return 1;
5868 return 0;
5869}
01c8a7c8
RK
5870
5871/* Subroutine of expand_expr: return rtx if EXP is a
5872 variable or parameter; else return 0. */
5873
5874static rtx
5875var_rtx (exp)
5876 tree exp;
5877{
5878 STRIP_NOPS (exp);
5879 switch (TREE_CODE (exp))
5880 {
5881 case PARM_DECL:
5882 case VAR_DECL:
5883 return DECL_RTL (exp);
5884 default:
5885 return 0;
5886 }
5887}
dbecbbe4
JL
5888
5889#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 5890
dbecbbe4
JL
5891void
5892check_max_integer_computation_mode (exp)
3a94c984 5893 tree exp;
dbecbbe4 5894{
5f652c07 5895 enum tree_code code;
dbecbbe4
JL
5896 enum machine_mode mode;
5897
5f652c07
JM
5898 /* Strip any NOPs that don't change the mode. */
5899 STRIP_NOPS (exp);
5900 code = TREE_CODE (exp);
5901
71bca506
JL
5902 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5903 if (code == NOP_EXPR
5904 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5905 return;
5906
dbecbbe4
JL
5907 /* First check the type of the overall operation. We need only look at
5908 unary, binary and relational operations. */
5909 if (TREE_CODE_CLASS (code) == '1'
5910 || TREE_CODE_CLASS (code) == '2'
5911 || TREE_CODE_CLASS (code) == '<')
5912 {
5913 mode = TYPE_MODE (TREE_TYPE (exp));
5914 if (GET_MODE_CLASS (mode) == MODE_INT
5915 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5916 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5917 }
5918
5919 /* Check operand of a unary op. */
5920 if (TREE_CODE_CLASS (code) == '1')
5921 {
5922 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5923 if (GET_MODE_CLASS (mode) == MODE_INT
5924 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5925 internal_error ("unsupported wide integer operation");
dbecbbe4 5926 }
3a94c984 5927
dbecbbe4
JL
5928 /* Check operands of a binary/comparison op. */
5929 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5930 {
5931 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5932 if (GET_MODE_CLASS (mode) == MODE_INT
5933 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5934 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5935
5936 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5937 if (GET_MODE_CLASS (mode) == MODE_INT
5938 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5939 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5940 }
5941}
5942#endif
14a774a9 5943\f
bbf6f052
RK
5944/* expand_expr: generate code for computing expression EXP.
5945 An rtx for the computed value is returned. The value is never null.
5946 In the case of a void EXP, const0_rtx is returned.
5947
5948 The value may be stored in TARGET if TARGET is nonzero.
5949 TARGET is just a suggestion; callers must assume that
5950 the rtx returned may not be the same as TARGET.
5951
5952 If TARGET is CONST0_RTX, it means that the value will be ignored.
5953
5954 If TMODE is not VOIDmode, it suggests generating the
5955 result in mode TMODE. But this is done only when convenient.
5956 Otherwise, TMODE is ignored and the value generated in its natural mode.
5957 TMODE is just a suggestion; callers must assume that
5958 the rtx returned may not have mode TMODE.
5959
d6a5ac33
RK
5960 Note that TARGET may have neither TMODE nor MODE. In that case, it
5961 probably will not be used.
bbf6f052
RK
5962
5963 If MODIFIER is EXPAND_SUM then when EXP is an addition
5964 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5965 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5966 products as above, or REG or MEM, or constant.
5967 Ordinarily in such cases we would output mul or add instructions
5968 and then return a pseudo reg containing the sum.
5969
5970 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5971 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5972 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5973 This is used for outputting expressions used in initializers.
5974
5975 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5976 with a constant address even if that address is not normally legitimate.
5977 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5978
5979rtx
5980expand_expr (exp, target, tmode, modifier)
5981 register tree exp;
5982 rtx target;
5983 enum machine_mode tmode;
5984 enum expand_modifier modifier;
5985{
5986 register rtx op0, op1, temp;
5987 tree type = TREE_TYPE (exp);
5988 int unsignedp = TREE_UNSIGNED (type);
68557e14 5989 register enum machine_mode mode;
bbf6f052
RK
5990 register enum tree_code code = TREE_CODE (exp);
5991 optab this_optab;
68557e14
ML
5992 rtx subtarget, original_target;
5993 int ignore;
bbf6f052 5994 tree context;
921b3427
RK
5995 /* Used by check-memory-usage to make modifier read only. */
5996 enum expand_modifier ro_modifier;
bbf6f052 5997
3a94c984 5998 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 5999 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6000 {
6001 op0 = CONST0_RTX (tmode);
6002 if (op0 != 0)
6003 return op0;
6004 return const0_rtx;
6005 }
6006
6007 mode = TYPE_MODE (type);
6008 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6009 subtarget = get_subtarget (target);
68557e14
ML
6010 original_target = target;
6011 ignore = (target == const0_rtx
6012 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6013 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6014 || code == COND_EXPR)
6015 && TREE_CODE (type) == VOID_TYPE));
6016
921b3427
RK
6017 /* Make a read-only version of the modifier. */
6018 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6019 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6020 ro_modifier = modifier;
6021 else
6022 ro_modifier = EXPAND_NORMAL;
ca695ac9 6023
dd27116b
RK
6024 /* If we are going to ignore this result, we need only do something
6025 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6026 is, short-circuit the most common cases here. Note that we must
6027 not call expand_expr with anything but const0_rtx in case this
6028 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6029
dd27116b
RK
6030 if (ignore)
6031 {
6032 if (! TREE_SIDE_EFFECTS (exp))
6033 return const0_rtx;
6034
14a774a9
RK
6035 /* Ensure we reference a volatile object even if value is ignored, but
6036 don't do this if all we are doing is taking its address. */
dd27116b
RK
6037 if (TREE_THIS_VOLATILE (exp)
6038 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6039 && mode != VOIDmode && mode != BLKmode
6040 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6041 {
921b3427 6042 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
6043 if (GET_CODE (temp) == MEM)
6044 temp = copy_to_reg (temp);
6045 return const0_rtx;
6046 }
6047
14a774a9
RK
6048 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6049 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 6050 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6051 VOIDmode, ro_modifier);
14a774a9 6052 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
3a94c984 6053 || code == ARRAY_REF)
dd27116b 6054 {
921b3427
RK
6055 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6056 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
6057 return const0_rtx;
6058 }
6059 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6060 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6061 /* If the second operand has no side effects, just evaluate
0f41302f 6062 the first. */
dd27116b 6063 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6064 VOIDmode, ro_modifier);
14a774a9
RK
6065 else if (code == BIT_FIELD_REF)
6066 {
6067 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6068 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6069 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6070 return const0_rtx;
6071 }
3a94c984 6072 ;
90764a87 6073 target = 0;
dd27116b 6074 }
bbf6f052 6075
dbecbbe4 6076#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
6077 /* Only check stuff here if the mode we want is different from the mode
6078 of the expression; if it's the same, check_max_integer_computiation_mode
6079 will handle it. Do we really need to check this stuff at all? */
6080
ce3c0b53 6081 if (target
5f652c07 6082 && GET_MODE (target) != mode
ce3c0b53
JL
6083 && TREE_CODE (exp) != INTEGER_CST
6084 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
6085 && TREE_CODE (exp) != ARRAY_REF
6086 && TREE_CODE (exp) != COMPONENT_REF
6087 && TREE_CODE (exp) != BIT_FIELD_REF
6088 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6089 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6090 && TREE_CODE (exp) != VAR_DECL
6091 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6092 {
6093 enum machine_mode mode = GET_MODE (target);
6094
6095 if (GET_MODE_CLASS (mode) == MODE_INT
6096 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6097 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6098 }
6099
5f652c07
JM
6100 if (tmode != mode
6101 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6102 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
6103 && TREE_CODE (exp) != ARRAY_REF
6104 && TREE_CODE (exp) != COMPONENT_REF
6105 && TREE_CODE (exp) != BIT_FIELD_REF
6106 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6107 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6108 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6109 && TREE_CODE (exp) != RTL_EXPR
71bca506 6110 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6111 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6112 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6113
6114 check_max_integer_computation_mode (exp);
6115#endif
6116
e44842fe
RK
6117 /* If will do cse, generate all results into pseudo registers
6118 since 1) that allows cse to find more things
6119 and 2) otherwise cse could produce an insn the machine
6120 cannot support. */
6121
bbf6f052
RK
6122 if (! cse_not_expected && mode != BLKmode && target
6123 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6124 target = subtarget;
6125
bbf6f052
RK
6126 switch (code)
6127 {
6128 case LABEL_DECL:
b552441b
RS
6129 {
6130 tree function = decl_function_context (exp);
6131 /* Handle using a label in a containing function. */
d0977240
RK
6132 if (function != current_function_decl
6133 && function != inline_function_decl && function != 0)
b552441b
RS
6134 {
6135 struct function *p = find_function_data (function);
49ad7cfa
BS
6136 p->expr->x_forced_labels
6137 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6138 p->expr->x_forced_labels);
b552441b 6139 }
ab87f8c8
JL
6140 else
6141 {
ab87f8c8
JL
6142 if (modifier == EXPAND_INITIALIZER)
6143 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6144 label_rtx (exp),
6145 forced_labels);
6146 }
c5c76735 6147
38a448ca
RH
6148 temp = gen_rtx_MEM (FUNCTION_MODE,
6149 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6150 if (function != current_function_decl
6151 && function != inline_function_decl && function != 0)
26fcb35a
RS
6152 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6153 return temp;
b552441b 6154 }
bbf6f052
RK
6155
6156 case PARM_DECL:
6157 if (DECL_RTL (exp) == 0)
6158 {
6159 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6160 return CONST0_RTX (mode);
bbf6f052
RK
6161 }
6162
0f41302f 6163 /* ... fall through ... */
d6a5ac33 6164
bbf6f052 6165 case VAR_DECL:
2dca20cd
RS
6166 /* If a static var's type was incomplete when the decl was written,
6167 but the type is complete now, lay out the decl now. */
d0f062fb 6168 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6169 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6170 {
2dca20cd
RS
6171 layout_decl (exp, 0);
6172 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
2dca20cd 6173 }
d6a5ac33 6174
7d384cc0
KR
6175 /* Although static-storage variables start off initialized, according to
6176 ANSI C, a memcpy could overwrite them with uninitialized values. So
6177 we check them too. This also lets us check for read-only variables
6178 accessed via a non-const declaration, in case it won't be detected
6179 any other way (e.g., in an embedded system or OS kernel without
6180 memory protection).
6181
6182 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 6183 if (cfun && current_function_check_memory_usage
49ad7cfa 6184 && code == VAR_DECL
921b3427 6185 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
6186 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6187 {
6188 enum memory_use_mode memory_usage;
6189 memory_usage = get_memory_usage_from_modifier (modifier);
6190
ea4da9db 6191 in_check_memory_usage = 1;
921b3427 6192 if (memory_usage != MEMORY_USE_DONT)
ebb1b59a
BS
6193 emit_library_call (chkr_check_addr_libfunc,
6194 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 6195 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
6196 GEN_INT (int_size_in_bytes (type)),
6197 TYPE_MODE (sizetype),
956d6950
JL
6198 GEN_INT (memory_usage),
6199 TYPE_MODE (integer_type_node));
ea4da9db 6200 in_check_memory_usage = 0;
921b3427
RK
6201 }
6202
0f41302f 6203 /* ... fall through ... */
d6a5ac33 6204
2dca20cd 6205 case FUNCTION_DECL:
bbf6f052
RK
6206 case RESULT_DECL:
6207 if (DECL_RTL (exp) == 0)
6208 abort ();
d6a5ac33 6209
e44842fe
RK
6210 /* Ensure variable marked as used even if it doesn't go through
6211 a parser. If it hasn't be used yet, write out an external
6212 definition. */
6213 if (! TREE_USED (exp))
6214 {
6215 assemble_external (exp);
6216 TREE_USED (exp) = 1;
6217 }
6218
dc6d66b3
RK
6219 /* Show we haven't gotten RTL for this yet. */
6220 temp = 0;
6221
bbf6f052
RK
6222 /* Handle variables inherited from containing functions. */
6223 context = decl_function_context (exp);
6224
6225 /* We treat inline_function_decl as an alias for the current function
6226 because that is the inline function whose vars, types, etc.
6227 are being merged into the current function.
6228 See expand_inline_function. */
d6a5ac33 6229
bbf6f052
RK
6230 if (context != 0 && context != current_function_decl
6231 && context != inline_function_decl
6232 /* If var is static, we don't need a static chain to access it. */
6233 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6234 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6235 {
6236 rtx addr;
6237
6238 /* Mark as non-local and addressable. */
81feeecb 6239 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6240 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6241 abort ();
bbf6f052
RK
6242 mark_addressable (exp);
6243 if (GET_CODE (DECL_RTL (exp)) != MEM)
6244 abort ();
6245 addr = XEXP (DECL_RTL (exp), 0);
6246 if (GET_CODE (addr) == MEM)
3a94c984 6247 addr = change_address (addr, Pmode,
3bdf5ad1 6248 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6249 else
6250 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6251
dc6d66b3 6252 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 6253 }
4af3895e 6254
bbf6f052
RK
6255 /* This is the case of an array whose size is to be determined
6256 from its initializer, while the initializer is still being parsed.
6257 See expand_decl. */
d6a5ac33 6258
dc6d66b3
RK
6259 else if (GET_CODE (DECL_RTL (exp)) == MEM
6260 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6261 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 6262 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
6263
6264 /* If DECL_RTL is memory, we are in the normal case and either
6265 the address is not valid or it is not a register and -fforce-addr
6266 is specified, get the address into a register. */
6267
dc6d66b3
RK
6268 else if (GET_CODE (DECL_RTL (exp)) == MEM
6269 && modifier != EXPAND_CONST_ADDRESS
6270 && modifier != EXPAND_SUM
6271 && modifier != EXPAND_INITIALIZER
6272 && (! memory_address_p (DECL_MODE (exp),
6273 XEXP (DECL_RTL (exp), 0))
6274 || (flag_force_addr
6275 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6276 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 6277 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6278
dc6d66b3
RK
6279 /* If we got something, return it. But first, set the alignment
6280 the address is a register. */
6281 if (temp != 0)
6282 {
6283 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6284 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6285
6286 return temp;
6287 }
6288
1499e0a8
RK
6289 /* If the mode of DECL_RTL does not match that of the decl, it
6290 must be a promoted value. We return a SUBREG of the wanted mode,
6291 but mark it so that we know that it was already extended. */
6292
6293 if (GET_CODE (DECL_RTL (exp)) == REG
6294 && GET_MODE (DECL_RTL (exp)) != mode)
6295 {
1499e0a8
RK
6296 /* Get the signedness used for this variable. Ensure we get the
6297 same mode we got when the variable was declared. */
78911e8b
RK
6298 if (GET_MODE (DECL_RTL (exp))
6299 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
6300 abort ();
6301
38a448ca 6302 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
6303 SUBREG_PROMOTED_VAR_P (temp) = 1;
6304 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6305 return temp;
6306 }
6307
bbf6f052
RK
6308 return DECL_RTL (exp);
6309
6310 case INTEGER_CST:
6311 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6312 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6313
6314 case CONST_DECL:
921b3427 6315 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
3a94c984 6316 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6317
6318 case REAL_CST:
6319 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6320 which will be turned into memory by reload if necessary.
6321
bbf6f052
RK
6322 We used to force a register so that loop.c could see it. But
6323 this does not allow gen_* patterns to perform optimizations with
6324 the constants. It also produces two insns in cases like "x = 1.0;".
6325 On most machines, floating-point constants are not permitted in
6326 many insns, so we'd end up copying it to a register in any case.
6327
6328 Now, we do the copying in expand_binop, if appropriate. */
6329 return immed_real_const (exp);
6330
6331 case COMPLEX_CST:
6332 case STRING_CST:
6333 if (! TREE_CST_RTL (exp))
bd7cf17e 6334 output_constant_def (exp, 1);
bbf6f052
RK
6335
6336 /* TREE_CST_RTL probably contains a constant address.
6337 On RISC machines where a constant address isn't valid,
6338 make some insns to get that address into a register. */
6339 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6340 && modifier != EXPAND_CONST_ADDRESS
6341 && modifier != EXPAND_INITIALIZER
6342 && modifier != EXPAND_SUM
d6a5ac33
RK
6343 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6344 || (flag_force_addr
6345 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
6346 return change_address (TREE_CST_RTL (exp), VOIDmode,
6347 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6348 return TREE_CST_RTL (exp);
6349
bf1e5319 6350 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6351 {
6352 rtx to_return;
3b304f5b 6353 const char *saved_input_filename = input_filename;
b24f65cd
APB
6354 int saved_lineno = lineno;
6355 input_filename = EXPR_WFL_FILENAME (exp);
6356 lineno = EXPR_WFL_LINENO (exp);
6357 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6358 emit_line_note (input_filename, lineno);
3a94c984 6359 /* Possibly avoid switching back and force here. */
b24f65cd
APB
6360 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6361 input_filename = saved_input_filename;
6362 lineno = saved_lineno;
6363 return to_return;
6364 }
bf1e5319 6365
bbf6f052
RK
6366 case SAVE_EXPR:
6367 context = decl_function_context (exp);
d6a5ac33 6368
d0977240
RK
6369 /* If this SAVE_EXPR was at global context, assume we are an
6370 initialization function and move it into our context. */
6371 if (context == 0)
6372 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6373
bbf6f052
RK
6374 /* We treat inline_function_decl as an alias for the current function
6375 because that is the inline function whose vars, types, etc.
6376 are being merged into the current function.
6377 See expand_inline_function. */
6378 if (context == current_function_decl || context == inline_function_decl)
6379 context = 0;
6380
6381 /* If this is non-local, handle it. */
6382 if (context)
6383 {
d0977240
RK
6384 /* The following call just exists to abort if the context is
6385 not of a containing function. */
6386 find_function_data (context);
6387
bbf6f052
RK
6388 temp = SAVE_EXPR_RTL (exp);
6389 if (temp && GET_CODE (temp) == REG)
6390 {
6391 put_var_into_stack (exp);
6392 temp = SAVE_EXPR_RTL (exp);
6393 }
6394 if (temp == 0 || GET_CODE (temp) != MEM)
6395 abort ();
6396 return change_address (temp, mode,
6397 fix_lexical_addr (XEXP (temp, 0), exp));
6398 }
6399 if (SAVE_EXPR_RTL (exp) == 0)
6400 {
06089a8b
RK
6401 if (mode == VOIDmode)
6402 temp = const0_rtx;
6403 else
1da68f56
RK
6404 temp = assign_temp (build_qualified_type (type,
6405 (TYPE_QUALS (type)
6406 | TYPE_QUAL_CONST)),
6407 3, 0, 0);
1499e0a8 6408
bbf6f052 6409 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6410 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6411 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6412 save_expr_regs);
ff78f773
RK
6413
6414 /* If the mode of TEMP does not match that of the expression, it
6415 must be a promoted value. We pass store_expr a SUBREG of the
6416 wanted mode but mark it so that we know that it was already
6417 extended. Note that `unsignedp' was modified above in
6418 this case. */
6419
6420 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6421 {
38a448ca 6422 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
6423 SUBREG_PROMOTED_VAR_P (temp) = 1;
6424 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6425 }
6426
4c7a0be9 6427 if (temp == const0_rtx)
921b3427
RK
6428 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6429 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6430 else
6431 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6432
6433 TREE_USED (exp) = 1;
bbf6f052 6434 }
1499e0a8
RK
6435
6436 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6437 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6438 but mark it so that we know that it was already extended. */
1499e0a8
RK
6439
6440 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6441 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6442 {
e70d22c8
RK
6443 /* Compute the signedness and make the proper SUBREG. */
6444 promote_mode (type, mode, &unsignedp, 0);
38a448ca 6445 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
6446 SUBREG_PROMOTED_VAR_P (temp) = 1;
6447 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6448 return temp;
6449 }
6450
bbf6f052
RK
6451 return SAVE_EXPR_RTL (exp);
6452
679163cf
MS
6453 case UNSAVE_EXPR:
6454 {
6455 rtx temp;
6456 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6457 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6458 return temp;
6459 }
6460
b50d17a1 6461 case PLACEHOLDER_EXPR:
e9a25f70
JL
6462 {
6463 tree placeholder_expr;
6464
6465 /* If there is an object on the head of the placeholder list,
e5e809f4 6466 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6467 further information, see tree.def. */
6468 for (placeholder_expr = placeholder_list;
6469 placeholder_expr != 0;
6470 placeholder_expr = TREE_CHAIN (placeholder_expr))
6471 {
6472 tree need_type = TYPE_MAIN_VARIANT (type);
6473 tree object = 0;
6474 tree old_list = placeholder_list;
6475 tree elt;
6476
e5e809f4 6477 /* Find the outermost reference that is of the type we want.
3a94c984 6478 If none, see if any object has a type that is a pointer to
e5e809f4
JL
6479 the type we want. */
6480 for (elt = TREE_PURPOSE (placeholder_expr);
6481 elt != 0 && object == 0;
6482 elt
6483 = ((TREE_CODE (elt) == COMPOUND_EXPR
6484 || TREE_CODE (elt) == COND_EXPR)
6485 ? TREE_OPERAND (elt, 1)
6486 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6487 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6488 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6489 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6490 ? TREE_OPERAND (elt, 0) : 0))
6491 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6492 object = elt;
e9a25f70 6493
e9a25f70 6494 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6495 elt != 0 && object == 0;
6496 elt
6497 = ((TREE_CODE (elt) == COMPOUND_EXPR
6498 || TREE_CODE (elt) == COND_EXPR)
6499 ? TREE_OPERAND (elt, 1)
6500 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6501 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6502 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6503 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6504 ? TREE_OPERAND (elt, 0) : 0))
6505 if (POINTER_TYPE_P (TREE_TYPE (elt))
6506 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6507 == need_type))
e5e809f4 6508 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6509
e9a25f70 6510 if (object != 0)
2cde2255 6511 {
e9a25f70
JL
6512 /* Expand this object skipping the list entries before
6513 it was found in case it is also a PLACEHOLDER_EXPR.
6514 In that case, we want to translate it using subsequent
6515 entries. */
6516 placeholder_list = TREE_CHAIN (placeholder_expr);
6517 temp = expand_expr (object, original_target, tmode,
6518 ro_modifier);
6519 placeholder_list = old_list;
6520 return temp;
2cde2255 6521 }
e9a25f70
JL
6522 }
6523 }
b50d17a1
RK
6524
6525 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6526 abort ();
6527
6528 case WITH_RECORD_EXPR:
6529 /* Put the object on the placeholder list, expand our first operand,
6530 and pop the list. */
6531 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6532 placeholder_list);
6533 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6534 tmode, ro_modifier);
b50d17a1
RK
6535 placeholder_list = TREE_CHAIN (placeholder_list);
6536 return target;
6537
70e6ca43
APB
6538 case GOTO_EXPR:
6539 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6540 expand_goto (TREE_OPERAND (exp, 0));
6541 else
6542 expand_computed_goto (TREE_OPERAND (exp, 0));
6543 return const0_rtx;
6544
bbf6f052 6545 case EXIT_EXPR:
e44842fe
RK
6546 expand_exit_loop_if_false (NULL_PTR,
6547 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6548 return const0_rtx;
6549
f42e28dd
APB
6550 case LABELED_BLOCK_EXPR:
6551 if (LABELED_BLOCK_BODY (exp))
6552 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6553 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6554 return const0_rtx;
6555
6556 case EXIT_BLOCK_EXPR:
6557 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6558 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6559 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6560 return const0_rtx;
6561
bbf6f052 6562 case LOOP_EXPR:
0088fcb1 6563 push_temp_slots ();
bbf6f052
RK
6564 expand_start_loop (1);
6565 expand_expr_stmt (TREE_OPERAND (exp, 0));
6566 expand_end_loop ();
0088fcb1 6567 pop_temp_slots ();
bbf6f052
RK
6568
6569 return const0_rtx;
6570
6571 case BIND_EXPR:
6572 {
6573 tree vars = TREE_OPERAND (exp, 0);
6574 int vars_need_expansion = 0;
6575
6576 /* Need to open a binding contour here because
e976b8b2 6577 if there are any cleanups they must be contained here. */
8e91754e 6578 expand_start_bindings (2);
bbf6f052 6579
2df53c0b
RS
6580 /* Mark the corresponding BLOCK for output in its proper place. */
6581 if (TREE_OPERAND (exp, 2) != 0
6582 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6583 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6584
6585 /* If VARS have not yet been expanded, expand them now. */
6586 while (vars)
6587 {
6588 if (DECL_RTL (vars) == 0)
6589 {
6590 vars_need_expansion = 1;
6591 expand_decl (vars);
6592 }
6593 expand_decl_init (vars);
6594 vars = TREE_CHAIN (vars);
6595 }
6596
921b3427 6597 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6598
6599 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6600
6601 return temp;
6602 }
6603
6604 case RTL_EXPR:
83b853c9
JM
6605 if (RTL_EXPR_SEQUENCE (exp))
6606 {
6607 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6608 abort ();
6609 emit_insns (RTL_EXPR_SEQUENCE (exp));
6610 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6611 }
64dc53f3
MM
6612 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6613 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6614 return RTL_EXPR_RTL (exp);
6615
6616 case CONSTRUCTOR:
dd27116b
RK
6617 /* If we don't need the result, just ensure we evaluate any
6618 subexpressions. */
6619 if (ignore)
6620 {
6621 tree elt;
6622 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6623 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6624 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6625 return const0_rtx;
6626 }
3207b172 6627
4af3895e
JVA
6628 /* All elts simple constants => refer to a constant in memory. But
6629 if this is a non-BLKmode mode, let it store a field at a time
6630 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6631 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6632 store directly into the target unless the type is large enough
6633 that memcpy will be used. If we are making an initializer and
3207b172 6634 all operands are constant, put it in memory as well. */
dd27116b 6635 else if ((TREE_STATIC (exp)
3207b172 6636 && ((mode == BLKmode
e5e809f4 6637 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6638 || TREE_ADDRESSABLE (exp)
19caa751 6639 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6640 && (! MOVE_BY_PIECES_P
19caa751
RK
6641 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6642 TYPE_ALIGN (type)))
9de08200 6643 && ! mostly_zeros_p (exp))))
dd27116b 6644 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 6645 {
bd7cf17e 6646 rtx constructor = output_constant_def (exp, 1);
19caa751 6647
b552441b
RS
6648 if (modifier != EXPAND_CONST_ADDRESS
6649 && modifier != EXPAND_INITIALIZER
6650 && modifier != EXPAND_SUM
d6a5ac33
RK
6651 && (! memory_address_p (GET_MODE (constructor),
6652 XEXP (constructor, 0))
6653 || (flag_force_addr
6654 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6655 constructor = change_address (constructor, VOIDmode,
6656 XEXP (constructor, 0));
6657 return constructor;
6658 }
bbf6f052
RK
6659 else
6660 {
e9ac02a6
JW
6661 /* Handle calls that pass values in multiple non-contiguous
6662 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6663 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6664 || GET_CODE (target) == PARALLEL)
1da68f56
RK
6665 target
6666 = assign_temp (build_qualified_type (type,
6667 (TYPE_QUALS (type)
6668 | (TREE_READONLY (exp)
6669 * TYPE_QUAL_CONST))),
6670 TREE_ADDRESSABLE (exp), 1, 1);
07604beb 6671
b7010412
RK
6672 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6673 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6674 return target;
6675 }
6676
6677 case INDIRECT_REF:
6678 {
6679 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6680 tree index;
3a94c984
KH
6681 tree string = string_constant (exp1, &index);
6682
06eaa86f 6683 /* Try to optimize reads from const strings. */
7581a30f
JW
6684 if (string
6685 && TREE_CODE (string) == STRING_CST
6686 && TREE_CODE (index) == INTEGER_CST
05bccae2 6687 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6688 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6689 && GET_MODE_SIZE (mode) == 1
6690 && modifier != EXPAND_MEMORY_USE_WO)
05bccae2
RK
6691 return
6692 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6693
405f0da6
JW
6694 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6695 op0 = memory_address (mode, op0);
8c8a8e34 6696
01d939e8 6697 if (cfun && current_function_check_memory_usage
49ad7cfa 6698 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6699 {
6700 enum memory_use_mode memory_usage;
6701 memory_usage = get_memory_usage_from_modifier (modifier);
6702
6703 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6704 {
6705 in_check_memory_usage = 1;
ebb1b59a
BS
6706 emit_library_call (chkr_check_addr_libfunc,
6707 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6708 Pmode, GEN_INT (int_size_in_bytes (type)),
c85f7c16
JL
6709 TYPE_MODE (sizetype),
6710 GEN_INT (memory_usage),
6711 TYPE_MODE (integer_type_node));
6712 in_check_memory_usage = 0;
6713 }
921b3427
RK
6714 }
6715
38a448ca 6716 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6717 set_mem_attributes (temp, exp, 0);
1125706f
RK
6718
6719 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6720 here, because, in C and C++, the fact that a location is accessed
6721 through a pointer to const does not mean that the value there can
6722 never change. Languages where it can never change should
6723 also set TREE_STATIC. */
5cb7a25a 6724 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6725
6726 /* If we are writing to this object and its type is a record with
6727 readonly fields, we must mark it as readonly so it will
6728 conflict with readonly references to those fields. */
1da68f56 6729 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
14a774a9
RK
6730 RTX_UNCHANGING_P (temp) = 1;
6731
8c8a8e34
JW
6732 return temp;
6733 }
bbf6f052
RK
6734
6735 case ARRAY_REF:
742920c7
RK
6736 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6737 abort ();
bbf6f052 6738
bbf6f052 6739 {
742920c7
RK
6740 tree array = TREE_OPERAND (exp, 0);
6741 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6742 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6743 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6744 HOST_WIDE_INT i;
b50d17a1 6745
d4c89139
PB
6746 /* Optimize the special-case of a zero lower bound.
6747
6748 We convert the low_bound to sizetype to avoid some problems
6749 with constant folding. (E.g. suppose the lower bound is 1,
6750 and its mode is QI. Without the conversion, (ARRAY
6751 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6752 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6753
742920c7 6754 if (! integer_zerop (low_bound))
fed3cef0 6755 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6756
742920c7 6757 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6758 This is not done in fold so it won't happen inside &.
6759 Don't fold if this is for wide characters since it's too
6760 difficult to do correctly and this is a very rare case. */
742920c7
RK
6761
6762 if (TREE_CODE (array) == STRING_CST
6763 && TREE_CODE (index) == INTEGER_CST
05bccae2 6764 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6765 && GET_MODE_CLASS (mode) == MODE_INT
6766 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6767 return
6768 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6769
742920c7
RK
6770 /* If this is a constant index into a constant array,
6771 just get the value from the array. Handle both the cases when
6772 we have an explicit constructor and when our operand is a variable
6773 that was declared const. */
4af3895e 6774
05bccae2
RK
6775 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6776 && TREE_CODE (index) == INTEGER_CST
3a94c984 6777 && 0 > compare_tree_int (index,
05bccae2
RK
6778 list_length (CONSTRUCTOR_ELTS
6779 (TREE_OPERAND (exp, 0)))))
742920c7 6780 {
05bccae2
RK
6781 tree elem;
6782
6783 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6784 i = TREE_INT_CST_LOW (index);
6785 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6786 ;
6787
6788 if (elem)
6789 return expand_expr (fold (TREE_VALUE (elem)), target,
6790 tmode, ro_modifier);
742920c7 6791 }
3a94c984 6792
742920c7
RK
6793 else if (optimize >= 1
6794 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6795 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6796 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6797 {
08293add 6798 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6799 {
6800 tree init = DECL_INITIAL (array);
6801
742920c7
RK
6802 if (TREE_CODE (init) == CONSTRUCTOR)
6803 {
665f2503 6804 tree elem;
742920c7 6805
05bccae2 6806 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6807 (elem
6808 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6809 elem = TREE_CHAIN (elem))
6810 ;
6811
742920c7
RK
6812 if (elem)
6813 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6814 tmode, ro_modifier);
742920c7
RK
6815 }
6816 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6817 && 0 > compare_tree_int (index,
6818 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6819 {
6820 tree type = TREE_TYPE (TREE_TYPE (init));
6821 enum machine_mode mode = TYPE_MODE (type);
6822
6823 if (GET_MODE_CLASS (mode) == MODE_INT
6824 && GET_MODE_SIZE (mode) == 1)
6825 return (GEN_INT
6826 (TREE_STRING_POINTER
6827 (init)[TREE_INT_CST_LOW (index)]));
6828 }
742920c7
RK
6829 }
6830 }
6831 }
3a94c984 6832 /* Fall through. */
bbf6f052
RK
6833
6834 case COMPONENT_REF:
6835 case BIT_FIELD_REF:
4af3895e 6836 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6837 appropriate field if it is present. Don't do this if we have
6838 already written the data since we want to refer to that copy
6839 and varasm.c assumes that's what we'll do. */
4af3895e 6840 if (code != ARRAY_REF
7a0b7b9a
RK
6841 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6842 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6843 {
6844 tree elt;
6845
6846 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6847 elt = TREE_CHAIN (elt))
86b5812c
RK
6848 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6849 /* We can normally use the value of the field in the
6850 CONSTRUCTOR. However, if this is a bitfield in
6851 an integral mode that we can fit in a HOST_WIDE_INT,
6852 we must mask only the number of bits in the bitfield,
6853 since this is done implicitly by the constructor. If
6854 the bitfield does not meet either of those conditions,
6855 we can't do this optimization. */
6856 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6857 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6858 == MODE_INT)
6859 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6860 <= HOST_BITS_PER_WIDE_INT))))
6861 {
3a94c984 6862 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6863 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6864 {
9df2c88c
RK
6865 HOST_WIDE_INT bitsize
6866 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
86b5812c
RK
6867
6868 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6869 {
6870 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6871 op0 = expand_and (op0, op1, target);
6872 }
6873 else
6874 {
e5e809f4
JL
6875 enum machine_mode imode
6876 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6877 tree count
e5e809f4
JL
6878 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6879 0);
86b5812c
RK
6880
6881 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6882 target, 0);
6883 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6884 target, 0);
6885 }
6886 }
6887
6888 return op0;
6889 }
4af3895e
JVA
6890 }
6891
bbf6f052
RK
6892 {
6893 enum machine_mode mode1;
770ae6cc 6894 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6895 tree offset;
bbf6f052 6896 int volatilep = 0;
729a2125 6897 unsigned int alignment;
839c4796
RK
6898 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6899 &mode1, &unsignedp, &volatilep,
6900 &alignment);
bbf6f052 6901
e7f3c83f
RK
6902 /* If we got back the original object, something is wrong. Perhaps
6903 we are evaluating an expression too early. In any event, don't
6904 infinitely recurse. */
6905 if (tem == exp)
6906 abort ();
6907
3d27140a 6908 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6909 computation, since it will need a temporary and TARGET is known
6910 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 6911
b74f5ff2
RK
6912 op0 = expand_expr (tem,
6913 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6914 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6915 != INTEGER_CST)
6916 ? target : NULL_RTX),
4ed67205 6917 VOIDmode,
14a774a9
RK
6918 (modifier == EXPAND_INITIALIZER
6919 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 6920 ? modifier : EXPAND_NORMAL);
bbf6f052 6921
8c8a8e34 6922 /* If this is a constant, put it into a register if it is a
14a774a9 6923 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6924 if (CONSTANT_P (op0))
6925 {
6926 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6927 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6928 && offset == 0)
8c8a8e34
JW
6929 op0 = force_reg (mode, op0);
6930 else
6931 op0 = validize_mem (force_const_mem (mode, op0));
6932 }
6933
7bb0943f
RS
6934 if (offset != 0)
6935 {
906c4e36 6936 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 6937
14a774a9
RK
6938 /* If this object is in memory, put it into a register.
6939 This case can't occur in C, but can in Ada if we have
6940 unchecked conversion of an expression from a scalar type to
6941 an array or record type. */
6942 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6943 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6944 {
1da68f56
RK
6945 tree nt = build_qualified_type (TREE_TYPE (tem),
6946 (TYPE_QUALS (TREE_TYPE (tem))
6947 | TYPE_QUAL_CONST));
6948 rtx memloc = assign_temp (nt, 1, 1, 1);
14a774a9
RK
6949
6950 mark_temp_addr_taken (memloc);
6951 emit_move_insn (memloc, op0);
6952 op0 = memloc;
6953 }
6954
7bb0943f
RS
6955 if (GET_CODE (op0) != MEM)
6956 abort ();
2d48c13d
JL
6957
6958 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6959 {
2d48c13d 6960#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6961 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6962#else
bd070e1a 6963 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6964#endif
bd070e1a 6965 }
2d48c13d 6966
14a774a9 6967 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 6968 to call force_reg for that case. Avoid that case. */
89752202
HB
6969 if (GET_CODE (op0) == MEM
6970 && GET_MODE (op0) == BLKmode
efd07ca7 6971 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6972 && bitsize != 0
3a94c984 6973 && (bitpos % bitsize) == 0
89752202 6974 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 6975 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
6976 {
6977 rtx temp = change_address (op0, mode1,
6978 plus_constant (XEXP (op0, 0),
6979 (bitpos /
6980 BITS_PER_UNIT)));
6981 if (GET_CODE (XEXP (temp, 0)) == REG)
6982 op0 = temp;
6983 else
6984 op0 = change_address (op0, mode1,
6985 force_reg (GET_MODE (XEXP (temp, 0)),
6986 XEXP (temp, 0)));
6987 bitpos = 0;
6988 }
6989
7bb0943f 6990 op0 = change_address (op0, VOIDmode,
38a448ca 6991 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
6992 force_reg (ptr_mode,
6993 offset_rtx)));
7bb0943f
RS
6994 }
6995
bbf6f052
RK
6996 /* Don't forget about volatility even if this is a bitfield. */
6997 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6998 {
6999 op0 = copy_rtx (op0);
7000 MEM_VOLATILE_P (op0) = 1;
7001 }
7002
921b3427 7003 /* Check the access. */
32919a0d
RK
7004 if (cfun != 0 && current_function_check_memory_usage
7005 && GET_CODE (op0) == MEM)
3a94c984 7006 {
921b3427
RK
7007 enum memory_use_mode memory_usage;
7008 memory_usage = get_memory_usage_from_modifier (modifier);
7009
7010 if (memory_usage != MEMORY_USE_DONT)
7011 {
7012 rtx to;
7013 int size;
7014
7015 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7016 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7017
7018 /* Check the access right of the pointer. */
ea4da9db 7019 in_check_memory_usage = 1;
e9a25f70 7020 if (size > BITS_PER_UNIT)
ebb1b59a
BS
7021 emit_library_call (chkr_check_addr_libfunc,
7022 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7023 Pmode, GEN_INT (size / BITS_PER_UNIT),
e9a25f70 7024 TYPE_MODE (sizetype),
3a94c984 7025 GEN_INT (memory_usage),
956d6950 7026 TYPE_MODE (integer_type_node));
ea4da9db 7027 in_check_memory_usage = 0;
921b3427
RK
7028 }
7029 }
7030
ccc98036
RS
7031 /* In cases where an aligned union has an unaligned object
7032 as a field, we might be extracting a BLKmode value from
7033 an integer-mode (e.g., SImode) object. Handle this case
7034 by doing the extract into an object as wide as the field
7035 (which we know to be the width of a basic mode), then
f2420d0b
JW
7036 storing into memory, and changing the mode to BLKmode.
7037 If we ultimately want the address (EXPAND_CONST_ADDRESS or
7038 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 7039 if (mode1 == VOIDmode
ccc98036 7040 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 7041 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 7042 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
7043 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
7044 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7045 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
7046 /* If the field isn't aligned enough to fetch as a memref,
7047 fetch it as a bit field. */
e1565e65
DE
7048 || (mode1 != BLKmode
7049 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
14a774a9 7050 && ((TYPE_ALIGN (TREE_TYPE (tem))
19caa751 7051 < GET_MODE_ALIGNMENT (mode))
dd841181
RK
7052 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7053 /* If the type and the field are a constant size and the
7054 size of the type isn't the same size as the bitfield,
7055 we must use bitfield operations. */
7056 || ((bitsize >= 0
7057 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7058 == INTEGER_CST)
05bccae2
RK
7059 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7060 bitsize)))))
14a774a9
RK
7061 || (modifier != EXPAND_CONST_ADDRESS
7062 && modifier != EXPAND_INITIALIZER
7063 && mode == BLKmode
e1565e65 7064 && SLOW_UNALIGNED_ACCESS (mode, alignment)
19caa751 7065 && (TYPE_ALIGN (type) > alignment
14a774a9 7066 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 7067 {
bbf6f052
RK
7068 enum machine_mode ext_mode = mode;
7069
14a774a9
RK
7070 if (ext_mode == BLKmode
7071 && ! (target != 0 && GET_CODE (op0) == MEM
7072 && GET_CODE (target) == MEM
7073 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7074 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7075
7076 if (ext_mode == BLKmode)
a281e72d
RK
7077 {
7078 /* In this case, BITPOS must start at a byte boundary and
7079 TARGET, if specified, must be a MEM. */
7080 if (GET_CODE (op0) != MEM
7081 || (target != 0 && GET_CODE (target) != MEM)
7082 || bitpos % BITS_PER_UNIT != 0)
7083 abort ();
7084
7085 op0 = change_address (op0, VOIDmode,
7086 plus_constant (XEXP (op0, 0),
7087 bitpos / BITS_PER_UNIT));
7088 if (target == 0)
7089 target = assign_temp (type, 0, 1, 1);
7090
7091 emit_block_move (target, op0,
bd5dab53
RK
7092 bitsize == -1 ? expr_size (exp)
7093 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7094 / BITS_PER_UNIT),
19caa751 7095 BITS_PER_UNIT);
3a94c984 7096
a281e72d
RK
7097 return target;
7098 }
bbf6f052 7099
dc6d66b3
RK
7100 op0 = validize_mem (op0);
7101
7102 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7103 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3
RK
7104
7105 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 7106 unsignedp, target, ext_mode, ext_mode,
034f9101 7107 alignment,
bbf6f052 7108 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7109
7110 /* If the result is a record type and BITSIZE is narrower than
7111 the mode of OP0, an integral mode, and this is a big endian
7112 machine, we must put the field into the high-order bits. */
7113 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7114 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7115 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7116 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7117 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7118 - bitsize),
7119 op0, 1);
7120
bbf6f052
RK
7121 if (mode == BLKmode)
7122 {
27fb3e16 7123 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
1da68f56
RK
7124 TYPE_QUAL_CONST);
7125 rtx new = assign_temp (nt, 0, 1, 1);
bbf6f052
RK
7126
7127 emit_move_insn (new, op0);
7128 op0 = copy_rtx (new);
7129 PUT_MODE (op0, BLKmode);
7130 }
7131
7132 return op0;
7133 }
7134
05019f83
RK
7135 /* If the result is BLKmode, use that to access the object
7136 now as well. */
7137 if (mode == BLKmode)
7138 mode1 = BLKmode;
7139
bbf6f052
RK
7140 /* Get a reference to just this component. */
7141 if (modifier == EXPAND_CONST_ADDRESS
7142 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
fe7a8445
RK
7143 {
7144 rtx new = gen_rtx_MEM (mode1,
7145 plus_constant (XEXP (op0, 0),
7146 (bitpos / BITS_PER_UNIT)));
7147
7148 MEM_COPY_ATTRIBUTES (new, op0);
7149 op0 = new;
7150 }
bbf6f052
RK
7151 else
7152 op0 = change_address (op0, mode1,
7153 plus_constant (XEXP (op0, 0),
7154 (bitpos / BITS_PER_UNIT)));
41472af8 7155
3bdf5ad1 7156 set_mem_attributes (op0, exp, 0);
dc6d66b3 7157 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7158 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3 7159
bbf6f052 7160 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7161 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7162 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7163 || modifier == EXPAND_INITIALIZER)
bbf6f052 7164 return op0;
0d15e60c 7165 else if (target == 0)
bbf6f052 7166 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7167
bbf6f052
RK
7168 convert_move (target, op0, unsignedp);
7169 return target;
7170 }
7171
bbf6f052
RK
7172 /* Intended for a reference to a buffer of a file-object in Pascal.
7173 But it's not certain that a special tree code will really be
7174 necessary for these. INDIRECT_REF might work for them. */
7175 case BUFFER_REF:
7176 abort ();
7177
7308a047 7178 case IN_EXPR:
7308a047 7179 {
d6a5ac33
RK
7180 /* Pascal set IN expression.
7181
7182 Algorithm:
7183 rlo = set_low - (set_low%bits_per_word);
7184 the_word = set [ (index - rlo)/bits_per_word ];
7185 bit_index = index % bits_per_word;
7186 bitmask = 1 << bit_index;
7187 return !!(the_word & bitmask); */
7188
7308a047
RS
7189 tree set = TREE_OPERAND (exp, 0);
7190 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7191 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7192 tree set_type = TREE_TYPE (set);
7308a047
RS
7193 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7194 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7195 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7196 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7197 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7198 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7199 rtx setaddr = XEXP (setval, 0);
7200 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7201 rtx rlow;
7202 rtx diff, quo, rem, addr, bit, result;
7308a047 7203
d6a5ac33
RK
7204 /* If domain is empty, answer is no. Likewise if index is constant
7205 and out of bounds. */
51723711 7206 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7207 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7208 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7209 || (TREE_CODE (index) == INTEGER_CST
7210 && TREE_CODE (set_low_bound) == INTEGER_CST
7211 && tree_int_cst_lt (index, set_low_bound))
7212 || (TREE_CODE (set_high_bound) == INTEGER_CST
7213 && TREE_CODE (index) == INTEGER_CST
7214 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7215 return const0_rtx;
7216
d6a5ac33
RK
7217 if (target == 0)
7218 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7219
7220 /* If we get here, we have to generate the code for both cases
7221 (in range and out of range). */
7222
7223 op0 = gen_label_rtx ();
7224 op1 = gen_label_rtx ();
7225
7226 if (! (GET_CODE (index_val) == CONST_INT
7227 && GET_CODE (lo_r) == CONST_INT))
7228 {
c5d5d461
JL
7229 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7230 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7231 }
7232
7233 if (! (GET_CODE (index_val) == CONST_INT
7234 && GET_CODE (hi_r) == CONST_INT))
7235 {
c5d5d461
JL
7236 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7237 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7238 }
7239
7240 /* Calculate the element number of bit zero in the first word
7241 of the set. */
7242 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7243 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7244 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7245 else
17938e57
RK
7246 rlow = expand_binop (index_mode, and_optab, lo_r,
7247 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7248 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7249
d6a5ac33
RK
7250 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7251 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7252
7253 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7254 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7255 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7256 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7257
7308a047 7258 addr = memory_address (byte_mode,
d6a5ac33
RK
7259 expand_binop (index_mode, add_optab, diff,
7260 setaddr, NULL_RTX, iunsignedp,
17938e57 7261 OPTAB_LIB_WIDEN));
d6a5ac33 7262
3a94c984 7263 /* Extract the bit we want to examine. */
7308a047 7264 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7265 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7266 make_tree (TREE_TYPE (index), rem),
7267 NULL_RTX, 1);
7268 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7269 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7270 1, OPTAB_LIB_WIDEN);
17938e57
RK
7271
7272 if (result != target)
7273 convert_move (target, result, 1);
7308a047
RS
7274
7275 /* Output the code to handle the out-of-range case. */
7276 emit_jump (op0);
7277 emit_label (op1);
7278 emit_move_insn (target, const0_rtx);
7279 emit_label (op0);
7280 return target;
7281 }
7282
bbf6f052
RK
7283 case WITH_CLEANUP_EXPR:
7284 if (RTL_EXPR_RTL (exp) == 0)
7285 {
7286 RTL_EXPR_RTL (exp)
921b3427 7287 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
7288 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7289
bbf6f052
RK
7290 /* That's it for this cleanup. */
7291 TREE_OPERAND (exp, 2) = 0;
7292 }
7293 return RTL_EXPR_RTL (exp);
7294
5dab5552
MS
7295 case CLEANUP_POINT_EXPR:
7296 {
e976b8b2
MS
7297 /* Start a new binding layer that will keep track of all cleanup
7298 actions to be performed. */
8e91754e 7299 expand_start_bindings (2);
e976b8b2 7300
d93d4205 7301 target_temp_slot_level = temp_slot_level;
e976b8b2 7302
921b3427 7303 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
7304 /* If we're going to use this value, load it up now. */
7305 if (! ignore)
7306 op0 = force_not_mem (op0);
d93d4205 7307 preserve_temp_slots (op0);
e976b8b2 7308 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7309 }
7310 return op0;
7311
bbf6f052
RK
7312 case CALL_EXPR:
7313 /* Check for a built-in function. */
7314 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7315 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7316 == FUNCTION_DECL)
bbf6f052 7317 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
c70eaeaf
KG
7318 {
7319 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7320 == BUILT_IN_FRONTEND)
7321 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7322 else
7323 return expand_builtin (exp, target, subtarget, tmode, ignore);
7324 }
d6a5ac33 7325
8129842c 7326 return expand_call (exp, target, ignore);
bbf6f052
RK
7327
7328 case NON_LVALUE_EXPR:
7329 case NOP_EXPR:
7330 case CONVERT_EXPR:
7331 case REFERENCE_EXPR:
4a53008b 7332 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7333 return const0_rtx;
4a53008b 7334
bbf6f052
RK
7335 if (TREE_CODE (type) == UNION_TYPE)
7336 {
7337 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7338
7339 /* If both input and output are BLKmode, this conversion
7340 isn't actually doing anything unless we need to make the
7341 alignment stricter. */
7342 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7343 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7344 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7345 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7346 modifier);
7347
bbf6f052 7348 if (target == 0)
1da68f56 7349 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7350
bbf6f052
RK
7351 if (GET_CODE (target) == MEM)
7352 /* Store data into beginning of memory target. */
7353 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
7354 change_address (target, TYPE_MODE (valtype), 0), 0);
7355
bbf6f052
RK
7356 else if (GET_CODE (target) == REG)
7357 /* Store this field into a union of the proper type. */
14a774a9
RK
7358 store_field (target,
7359 MIN ((int_size_in_bytes (TREE_TYPE
7360 (TREE_OPERAND (exp, 0)))
7361 * BITS_PER_UNIT),
8752c357 7362 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7363 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7156dead
RK
7364 VOIDmode, 0, BITS_PER_UNIT,
7365 int_size_in_bytes (type), 0);
bbf6f052
RK
7366 else
7367 abort ();
7368
7369 /* Return the entire union. */
7370 return target;
7371 }
d6a5ac33 7372
7f62854a
RK
7373 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7374 {
7375 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7376 ro_modifier);
7f62854a
RK
7377
7378 /* If the signedness of the conversion differs and OP0 is
7379 a promoted SUBREG, clear that indication since we now
7380 have to do the proper extension. */
7381 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7382 && GET_CODE (op0) == SUBREG)
7383 SUBREG_PROMOTED_VAR_P (op0) = 0;
7384
7385 return op0;
7386 }
7387
1499e0a8 7388 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7389 if (GET_MODE (op0) == mode)
7390 return op0;
12342f90 7391
d6a5ac33
RK
7392 /* If OP0 is a constant, just convert it into the proper mode. */
7393 if (CONSTANT_P (op0))
7394 return
7395 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7396 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7397
26fcb35a 7398 if (modifier == EXPAND_INITIALIZER)
38a448ca 7399 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7400
bbf6f052 7401 if (target == 0)
d6a5ac33
RK
7402 return
7403 convert_to_mode (mode, op0,
7404 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7405 else
d6a5ac33
RK
7406 convert_move (target, op0,
7407 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7408 return target;
7409
7410 case PLUS_EXPR:
0f41302f
MS
7411 /* We come here from MINUS_EXPR when the second operand is a
7412 constant. */
bbf6f052 7413 plus_expr:
91ce572a
CC
7414 this_optab = ! unsignedp && flag_trapv
7415 && (GET_MODE_CLASS(mode) == MODE_INT)
7416 ? addv_optab : add_optab;
bbf6f052
RK
7417
7418 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7419 something else, make sure we add the register to the constant and
7420 then to the other thing. This case can occur during strength
7421 reduction and doing it this way will produce better code if the
7422 frame pointer or argument pointer is eliminated.
7423
7424 fold-const.c will ensure that the constant is always in the inner
7425 PLUS_EXPR, so the only case we need to do anything about is if
7426 sp, ap, or fp is our second argument, in which case we must swap
7427 the innermost first argument and our second argument. */
7428
7429 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7430 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7431 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7432 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7433 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7434 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7435 {
7436 tree t = TREE_OPERAND (exp, 1);
7437
7438 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7439 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7440 }
7441
88f63c77 7442 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7443 something, we might be forming a constant. So try to use
7444 plus_constant. If it produces a sum and we can't accept it,
7445 use force_operand. This allows P = &ARR[const] to generate
7446 efficient code on machines where a SYMBOL_REF is not a valid
7447 address.
7448
7449 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7450 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
91ce572a 7451 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7452 {
c980ac49
RS
7453 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7454 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7455 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7456 {
cbbc503e
JL
7457 rtx constant_part;
7458
c980ac49
RS
7459 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7460 EXPAND_SUM);
cbbc503e
JL
7461 /* Use immed_double_const to ensure that the constant is
7462 truncated according to the mode of OP1, then sign extended
7463 to a HOST_WIDE_INT. Using the constant directly can result
7464 in non-canonical RTL in a 64x32 cross compile. */
7465 constant_part
7466 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7467 (HOST_WIDE_INT) 0,
a5efcd63 7468 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7469 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7470 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7471 op1 = force_operand (op1, target);
7472 return op1;
7473 }
bbf6f052 7474
c980ac49
RS
7475 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7476 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7477 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7478 {
cbbc503e
JL
7479 rtx constant_part;
7480
c980ac49
RS
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7482 EXPAND_SUM);
7483 if (! CONSTANT_P (op0))
7484 {
7485 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7486 VOIDmode, modifier);
709f5be1
RS
7487 /* Don't go to both_summands if modifier
7488 says it's not right to return a PLUS. */
7489 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7490 goto binop2;
c980ac49
RS
7491 goto both_summands;
7492 }
cbbc503e
JL
7493 /* Use immed_double_const to ensure that the constant is
7494 truncated according to the mode of OP1, then sign extended
7495 to a HOST_WIDE_INT. Using the constant directly can result
7496 in non-canonical RTL in a 64x32 cross compile. */
7497 constant_part
7498 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7499 (HOST_WIDE_INT) 0,
2a94e396 7500 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7501 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7502 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7503 op0 = force_operand (op0, target);
7504 return op0;
7505 }
bbf6f052
RK
7506 }
7507
7508 /* No sense saving up arithmetic to be done
7509 if it's all in the wrong mode to form part of an address.
7510 And force_operand won't know whether to sign-extend or
7511 zero-extend. */
7512 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7513 || mode != ptr_mode)
c980ac49 7514 goto binop;
bbf6f052 7515
e5e809f4 7516 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7517 subtarget = 0;
7518
921b3427
RK
7519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7520 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7521
c980ac49 7522 both_summands:
bbf6f052
RK
7523 /* Make sure any term that's a sum with a constant comes last. */
7524 if (GET_CODE (op0) == PLUS
7525 && CONSTANT_P (XEXP (op0, 1)))
7526 {
7527 temp = op0;
7528 op0 = op1;
7529 op1 = temp;
7530 }
7531 /* If adding to a sum including a constant,
7532 associate it to put the constant outside. */
7533 if (GET_CODE (op1) == PLUS
7534 && CONSTANT_P (XEXP (op1, 1)))
7535 {
7536 rtx constant_term = const0_rtx;
7537
7538 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7539 if (temp != 0)
7540 op0 = temp;
6f90e075
JW
7541 /* Ensure that MULT comes first if there is one. */
7542 else if (GET_CODE (op0) == MULT)
38a448ca 7543 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7544 else
38a448ca 7545 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7546
7547 /* Let's also eliminate constants from op0 if possible. */
7548 op0 = eliminate_constant_term (op0, &constant_term);
7549
7550 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 7551 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
7552 result we want will then be OP0 + OP1. */
7553
7554 temp = simplify_binary_operation (PLUS, mode, constant_term,
7555 XEXP (op1, 1));
7556 if (temp != 0)
7557 op1 = temp;
7558 else
38a448ca 7559 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7560 }
7561
7562 /* Put a constant term last and put a multiplication first. */
7563 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7564 temp = op1, op1 = op0, op0 = temp;
7565
7566 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7567 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7568
7569 case MINUS_EXPR:
ea87523e
RK
7570 /* For initializers, we are allowed to return a MINUS of two
7571 symbolic constants. Here we handle all cases when both operands
7572 are constant. */
bbf6f052
RK
7573 /* Handle difference of two symbolic constants,
7574 for the sake of an initializer. */
7575 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7576 && really_constant_p (TREE_OPERAND (exp, 0))
7577 && really_constant_p (TREE_OPERAND (exp, 1)))
7578 {
906c4e36 7579 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7580 VOIDmode, ro_modifier);
906c4e36 7581 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7582 VOIDmode, ro_modifier);
ea87523e 7583
ea87523e
RK
7584 /* If the last operand is a CONST_INT, use plus_constant of
7585 the negated constant. Else make the MINUS. */
7586 if (GET_CODE (op1) == CONST_INT)
7587 return plus_constant (op0, - INTVAL (op1));
7588 else
38a448ca 7589 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7590 }
7591 /* Convert A - const to A + (-const). */
7592 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7593 {
ae431183
RK
7594 tree negated = fold (build1 (NEGATE_EXPR, type,
7595 TREE_OPERAND (exp, 1)));
7596
ae431183 7597 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7598 /* If we can't negate the constant in TYPE, leave it alone and
7599 expand_binop will negate it for us. We used to try to do it
7600 here in the signed version of TYPE, but that doesn't work
7601 on POINTER_TYPEs. */;
ae431183
RK
7602 else
7603 {
7604 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7605 goto plus_expr;
7606 }
bbf6f052 7607 }
91ce572a
CC
7608 this_optab = ! unsignedp && flag_trapv
7609 && (GET_MODE_CLASS(mode) == MODE_INT)
7610 ? subv_optab : sub_optab;
bbf6f052
RK
7611 goto binop;
7612
7613 case MULT_EXPR:
bbf6f052
RK
7614 /* If first operand is constant, swap them.
7615 Thus the following special case checks need only
7616 check the second operand. */
7617 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7618 {
7619 register tree t1 = TREE_OPERAND (exp, 0);
7620 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7621 TREE_OPERAND (exp, 1) = t1;
7622 }
7623
7624 /* Attempt to return something suitable for generating an
7625 indexed address, for machines that support that. */
7626
88f63c77 7627 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7628 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7629 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7630 {
921b3427
RK
7631 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7632 EXPAND_SUM);
bbf6f052
RK
7633
7634 /* Apply distributive law if OP0 is x+c. */
7635 if (GET_CODE (op0) == PLUS
7636 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7637 return
7638 gen_rtx_PLUS
7639 (mode,
7640 gen_rtx_MULT
7641 (mode, XEXP (op0, 0),
7642 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7643 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7644 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7645
7646 if (GET_CODE (op0) != REG)
906c4e36 7647 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7648 if (GET_CODE (op0) != REG)
7649 op0 = copy_to_mode_reg (mode, op0);
7650
c5c76735
JL
7651 return
7652 gen_rtx_MULT (mode, op0,
7653 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7654 }
7655
e5e809f4 7656 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7657 subtarget = 0;
7658
7659 /* Check for multiplying things that have been extended
7660 from a narrower type. If this machine supports multiplying
7661 in that narrower type with a result in the desired type,
7662 do it that way, and avoid the explicit type-conversion. */
7663 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7664 && TREE_CODE (type) == INTEGER_TYPE
7665 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7666 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7667 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7668 && int_fits_type_p (TREE_OPERAND (exp, 1),
7669 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7670 /* Don't use a widening multiply if a shift will do. */
7671 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7672 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7673 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7674 ||
7675 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7676 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7677 ==
7678 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7679 /* If both operands are extended, they must either both
7680 be zero-extended or both be sign-extended. */
7681 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7682 ==
7683 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7684 {
7685 enum machine_mode innermode
7686 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7687 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7688 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7689 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7690 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7691 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7692 {
b10af0c8
TG
7693 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7694 {
7695 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7696 NULL_RTX, VOIDmode, 0);
7697 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7698 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7699 VOIDmode, 0);
7700 else
7701 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7702 NULL_RTX, VOIDmode, 0);
7703 goto binop2;
7704 }
7705 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7706 && innermode == word_mode)
7707 {
7708 rtx htem;
7709 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7710 NULL_RTX, VOIDmode, 0);
7711 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7712 op1 = convert_modes (innermode, mode,
7713 expand_expr (TREE_OPERAND (exp, 1),
7714 NULL_RTX, VOIDmode, 0),
7715 unsignedp);
b10af0c8
TG
7716 else
7717 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7718 NULL_RTX, VOIDmode, 0);
7719 temp = expand_binop (mode, other_optab, op0, op1, target,
7720 unsignedp, OPTAB_LIB_WIDEN);
7721 htem = expand_mult_highpart_adjust (innermode,
7722 gen_highpart (innermode, temp),
7723 op0, op1,
7724 gen_highpart (innermode, temp),
7725 unsignedp);
7726 emit_move_insn (gen_highpart (innermode, temp), htem);
7727 return temp;
7728 }
bbf6f052
RK
7729 }
7730 }
7731 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7732 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7733 return expand_mult (mode, op0, op1, target, unsignedp);
7734
7735 case TRUNC_DIV_EXPR:
7736 case FLOOR_DIV_EXPR:
7737 case CEIL_DIV_EXPR:
7738 case ROUND_DIV_EXPR:
7739 case EXACT_DIV_EXPR:
e5e809f4 7740 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7741 subtarget = 0;
7742 /* Possible optimization: compute the dividend with EXPAND_SUM
7743 then if the divisor is constant can optimize the case
7744 where some terms of the dividend have coeffs divisible by it. */
7745 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7746 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7747 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7748
7749 case RDIV_EXPR:
7750 this_optab = flodiv_optab;
7751 goto binop;
7752
7753 case TRUNC_MOD_EXPR:
7754 case FLOOR_MOD_EXPR:
7755 case CEIL_MOD_EXPR:
7756 case ROUND_MOD_EXPR:
e5e809f4 7757 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7758 subtarget = 0;
7759 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7760 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7761 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7762
7763 case FIX_ROUND_EXPR:
7764 case FIX_FLOOR_EXPR:
7765 case FIX_CEIL_EXPR:
7766 abort (); /* Not used for C. */
7767
7768 case FIX_TRUNC_EXPR:
906c4e36 7769 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7770 if (target == 0)
7771 target = gen_reg_rtx (mode);
7772 expand_fix (target, op0, unsignedp);
7773 return target;
7774
7775 case FLOAT_EXPR:
906c4e36 7776 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7777 if (target == 0)
7778 target = gen_reg_rtx (mode);
7779 /* expand_float can't figure out what to do if FROM has VOIDmode.
7780 So give it the correct mode. With -O, cse will optimize this. */
7781 if (GET_MODE (op0) == VOIDmode)
7782 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7783 op0);
7784 expand_float (target, op0,
7785 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7786 return target;
7787
7788 case NEGATE_EXPR:
5b22bee8 7789 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
91ce572a
CC
7790 temp = expand_unop (mode,
7791 ! unsignedp && flag_trapv
7792 && (GET_MODE_CLASS(mode) == MODE_INT)
7793 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7794 if (temp == 0)
7795 abort ();
7796 return temp;
7797
7798 case ABS_EXPR:
7799 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7800
2d7050fd 7801 /* Handle complex values specially. */
d6a5ac33
RK
7802 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7803 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7804 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7805
bbf6f052
RK
7806 /* Unsigned abs is simply the operand. Testing here means we don't
7807 risk generating incorrect code below. */
7808 if (TREE_UNSIGNED (type))
7809 return op0;
7810
91ce572a 7811 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7812 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7813
7814 case MAX_EXPR:
7815 case MIN_EXPR:
7816 target = original_target;
e5e809f4 7817 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7818 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7819 || GET_MODE (target) != mode
bbf6f052
RK
7820 || (GET_CODE (target) == REG
7821 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7822 target = gen_reg_rtx (mode);
906c4e36 7823 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7824 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7825
7826 /* First try to do it with a special MIN or MAX instruction.
7827 If that does not win, use a conditional jump to select the proper
7828 value. */
7829 this_optab = (TREE_UNSIGNED (type)
7830 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7831 : (code == MIN_EXPR ? smin_optab : smax_optab));
7832
7833 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7834 OPTAB_WIDEN);
7835 if (temp != 0)
7836 return temp;
7837
fa2981d8
JW
7838 /* At this point, a MEM target is no longer useful; we will get better
7839 code without it. */
3a94c984 7840
fa2981d8
JW
7841 if (GET_CODE (target) == MEM)
7842 target = gen_reg_rtx (mode);
7843
ee456b1c
RK
7844 if (target != op0)
7845 emit_move_insn (target, op0);
d6a5ac33 7846
bbf6f052 7847 op0 = gen_label_rtx ();
d6a5ac33 7848
f81497d9
RS
7849 /* If this mode is an integer too wide to compare properly,
7850 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7851 if (GET_MODE_CLASS (mode) == MODE_INT
7852 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7853 {
f81497d9 7854 if (code == MAX_EXPR)
d6a5ac33
RK
7855 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7856 target, op1, NULL_RTX, op0);
bbf6f052 7857 else
d6a5ac33
RK
7858 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7859 op1, target, NULL_RTX, op0);
bbf6f052 7860 }
f81497d9
RS
7861 else
7862 {
b30f05db
BS
7863 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7864 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7865 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7866 op0);
f81497d9 7867 }
b30f05db 7868 emit_move_insn (target, op1);
bbf6f052
RK
7869 emit_label (op0);
7870 return target;
7871
bbf6f052
RK
7872 case BIT_NOT_EXPR:
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7874 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7875 if (temp == 0)
7876 abort ();
7877 return temp;
7878
7879 case FFS_EXPR:
7880 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7881 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7882 if (temp == 0)
7883 abort ();
7884 return temp;
7885
d6a5ac33
RK
7886 /* ??? Can optimize bitwise operations with one arg constant.
7887 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7888 and (a bitwise1 b) bitwise2 b (etc)
7889 but that is probably not worth while. */
7890
7891 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7892 boolean values when we want in all cases to compute both of them. In
7893 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7894 as actual zero-or-1 values and then bitwise anding. In cases where
7895 there cannot be any side effects, better code would be made by
7896 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7897 how to recognize those cases. */
7898
bbf6f052
RK
7899 case TRUTH_AND_EXPR:
7900 case BIT_AND_EXPR:
7901 this_optab = and_optab;
7902 goto binop;
7903
bbf6f052
RK
7904 case TRUTH_OR_EXPR:
7905 case BIT_IOR_EXPR:
7906 this_optab = ior_optab;
7907 goto binop;
7908
874726a8 7909 case TRUTH_XOR_EXPR:
bbf6f052
RK
7910 case BIT_XOR_EXPR:
7911 this_optab = xor_optab;
7912 goto binop;
7913
7914 case LSHIFT_EXPR:
7915 case RSHIFT_EXPR:
7916 case LROTATE_EXPR:
7917 case RROTATE_EXPR:
e5e809f4 7918 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7919 subtarget = 0;
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7921 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7922 unsignedp);
7923
d6a5ac33
RK
7924 /* Could determine the answer when only additive constants differ. Also,
7925 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7926 case LT_EXPR:
7927 case LE_EXPR:
7928 case GT_EXPR:
7929 case GE_EXPR:
7930 case EQ_EXPR:
7931 case NE_EXPR:
1eb8759b
RH
7932 case UNORDERED_EXPR:
7933 case ORDERED_EXPR:
7934 case UNLT_EXPR:
7935 case UNLE_EXPR:
7936 case UNGT_EXPR:
7937 case UNGE_EXPR:
7938 case UNEQ_EXPR:
bbf6f052
RK
7939 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7940 if (temp != 0)
7941 return temp;
d6a5ac33 7942
0f41302f 7943 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7944 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7945 && original_target
7946 && GET_CODE (original_target) == REG
7947 && (GET_MODE (original_target)
7948 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7949 {
d6a5ac33
RK
7950 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7951 VOIDmode, 0);
7952
bbf6f052
RK
7953 if (temp != original_target)
7954 temp = copy_to_reg (temp);
d6a5ac33 7955
bbf6f052 7956 op1 = gen_label_rtx ();
c5d5d461
JL
7957 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7958 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7959 emit_move_insn (temp, const1_rtx);
7960 emit_label (op1);
7961 return temp;
7962 }
d6a5ac33 7963
bbf6f052
RK
7964 /* If no set-flag instruction, must generate a conditional
7965 store into a temporary variable. Drop through
7966 and handle this like && and ||. */
7967
7968 case TRUTH_ANDIF_EXPR:
7969 case TRUTH_ORIF_EXPR:
e44842fe 7970 if (! ignore
e5e809f4 7971 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7972 /* Make sure we don't have a hard reg (such as function's return
7973 value) live across basic blocks, if not optimizing. */
7974 || (!optimize && GET_CODE (target) == REG
7975 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7976 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7977
7978 if (target)
7979 emit_clr_insn (target);
7980
bbf6f052
RK
7981 op1 = gen_label_rtx ();
7982 jumpifnot (exp, op1);
e44842fe
RK
7983
7984 if (target)
7985 emit_0_to_1_insn (target);
7986
bbf6f052 7987 emit_label (op1);
e44842fe 7988 return ignore ? const0_rtx : target;
bbf6f052
RK
7989
7990 case TRUTH_NOT_EXPR:
7991 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7992 /* The parser is careful to generate TRUTH_NOT_EXPR
7993 only with operands that are always zero or one. */
906c4e36 7994 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7995 target, 1, OPTAB_LIB_WIDEN);
7996 if (temp == 0)
7997 abort ();
7998 return temp;
7999
8000 case COMPOUND_EXPR:
8001 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8002 emit_queue ();
8003 return expand_expr (TREE_OPERAND (exp, 1),
8004 (ignore ? const0_rtx : target),
8005 VOIDmode, 0);
8006
8007 case COND_EXPR:
ac01eace
RK
8008 /* If we would have a "singleton" (see below) were it not for a
8009 conversion in each arm, bring that conversion back out. */
8010 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8011 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8012 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8013 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8014 {
d6edb99e
ZW
8015 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8016 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8017
8018 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8019 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8020 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8021 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8022 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8023 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8024 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8025 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8026 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8027 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8028 TREE_OPERAND (exp, 0),
d6edb99e 8029 iftrue, iffalse)),
ac01eace
RK
8030 target, tmode, modifier);
8031 }
8032
bbf6f052
RK
8033 {
8034 /* Note that COND_EXPRs whose type is a structure or union
8035 are required to be constructed to contain assignments of
8036 a temporary variable, so that we can evaluate them here
8037 for side effect only. If type is void, we must do likewise. */
8038
8039 /* If an arm of the branch requires a cleanup,
8040 only that cleanup is performed. */
8041
8042 tree singleton = 0;
8043 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8044
8045 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8046 convert it to our mode, if necessary. */
8047 if (integer_onep (TREE_OPERAND (exp, 1))
8048 && integer_zerop (TREE_OPERAND (exp, 2))
8049 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8050 {
dd27116b
RK
8051 if (ignore)
8052 {
8053 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 8054 ro_modifier);
dd27116b
RK
8055 return const0_rtx;
8056 }
8057
921b3427 8058 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
8059 if (GET_MODE (op0) == mode)
8060 return op0;
d6a5ac33 8061
bbf6f052
RK
8062 if (target == 0)
8063 target = gen_reg_rtx (mode);
8064 convert_move (target, op0, unsignedp);
8065 return target;
8066 }
8067
ac01eace
RK
8068 /* Check for X ? A + B : A. If we have this, we can copy A to the
8069 output and conditionally add B. Similarly for unary operations.
8070 Don't do this if X has side-effects because those side effects
8071 might affect A or B and the "?" operation is a sequence point in
8072 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8073
8074 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8075 && operand_equal_p (TREE_OPERAND (exp, 2),
8076 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8077 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8078 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8079 && operand_equal_p (TREE_OPERAND (exp, 1),
8080 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8081 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8082 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8083 && operand_equal_p (TREE_OPERAND (exp, 2),
8084 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8085 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8086 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8087 && operand_equal_p (TREE_OPERAND (exp, 1),
8088 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8089 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8090
01c8a7c8
RK
8091 /* If we are not to produce a result, we have no target. Otherwise,
8092 if a target was specified use it; it will not be used as an
3a94c984 8093 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8094 temporary. */
8095
8096 if (ignore)
8097 temp = 0;
8098 else if (original_target
e5e809f4 8099 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8100 || (singleton && GET_CODE (original_target) == REG
8101 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8102 && original_target == var_rtx (singleton)))
8103 && GET_MODE (original_target) == mode
7c00d1fe
RK
8104#ifdef HAVE_conditional_move
8105 && (! can_conditionally_move_p (mode)
8106 || GET_CODE (original_target) == REG
8107 || TREE_ADDRESSABLE (type))
8108#endif
01c8a7c8
RK
8109 && ! (GET_CODE (original_target) == MEM
8110 && MEM_VOLATILE_P (original_target)))
8111 temp = original_target;
8112 else if (TREE_ADDRESSABLE (type))
8113 abort ();
8114 else
8115 temp = assign_temp (type, 0, 0, 1);
8116
ac01eace
RK
8117 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8118 do the test of X as a store-flag operation, do this as
8119 A + ((X != 0) << log C). Similarly for other simple binary
8120 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8121 if (temp && singleton && binary_op
bbf6f052
RK
8122 && (TREE_CODE (binary_op) == PLUS_EXPR
8123 || TREE_CODE (binary_op) == MINUS_EXPR
8124 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8125 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8126 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8127 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8128 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8129 {
8130 rtx result;
91ce572a
CC
8131 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8132 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8133 ? addv_optab : add_optab)
8134 : TREE_CODE (binary_op) == MINUS_EXPR
8135 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8136 ? subv_optab : sub_optab)
8137 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8138 : xor_optab);
bbf6f052
RK
8139
8140 /* If we had X ? A : A + 1, do this as A + (X == 0).
8141
8142 We have to invert the truth value here and then put it
8143 back later if do_store_flag fails. We cannot simply copy
8144 TREE_OPERAND (exp, 0) to another variable and modify that
8145 because invert_truthvalue can modify the tree pointed to
8146 by its argument. */
8147 if (singleton == TREE_OPERAND (exp, 1))
8148 TREE_OPERAND (exp, 0)
8149 = invert_truthvalue (TREE_OPERAND (exp, 0));
8150
8151 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 8152 (safe_from_p (temp, singleton, 1)
906c4e36 8153 ? temp : NULL_RTX),
bbf6f052
RK
8154 mode, BRANCH_COST <= 1);
8155
ac01eace
RK
8156 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8157 result = expand_shift (LSHIFT_EXPR, mode, result,
8158 build_int_2 (tree_log2
8159 (TREE_OPERAND
8160 (binary_op, 1)),
8161 0),
e5e809f4 8162 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8163 ? temp : NULL_RTX), 0);
8164
bbf6f052
RK
8165 if (result)
8166 {
906c4e36 8167 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8168 return expand_binop (mode, boptab, op1, result, temp,
8169 unsignedp, OPTAB_LIB_WIDEN);
8170 }
8171 else if (singleton == TREE_OPERAND (exp, 1))
8172 TREE_OPERAND (exp, 0)
8173 = invert_truthvalue (TREE_OPERAND (exp, 0));
8174 }
3a94c984 8175
dabf8373 8176 do_pending_stack_adjust ();
bbf6f052
RK
8177 NO_DEFER_POP;
8178 op0 = gen_label_rtx ();
8179
8180 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8181 {
8182 if (temp != 0)
8183 {
8184 /* If the target conflicts with the other operand of the
8185 binary op, we can't use it. Also, we can't use the target
8186 if it is a hard register, because evaluating the condition
8187 might clobber it. */
8188 if ((binary_op
e5e809f4 8189 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8190 || (GET_CODE (temp) == REG
8191 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8192 temp = gen_reg_rtx (mode);
8193 store_expr (singleton, temp, 0);
8194 }
8195 else
906c4e36 8196 expand_expr (singleton,
2937cf87 8197 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8198 if (singleton == TREE_OPERAND (exp, 1))
8199 jumpif (TREE_OPERAND (exp, 0), op0);
8200 else
8201 jumpifnot (TREE_OPERAND (exp, 0), op0);
8202
956d6950 8203 start_cleanup_deferral ();
bbf6f052
RK
8204 if (binary_op && temp == 0)
8205 /* Just touch the other operand. */
8206 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8207 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8208 else if (binary_op)
8209 store_expr (build (TREE_CODE (binary_op), type,
8210 make_tree (type, temp),
8211 TREE_OPERAND (binary_op, 1)),
8212 temp, 0);
8213 else
8214 store_expr (build1 (TREE_CODE (unary_op), type,
8215 make_tree (type, temp)),
8216 temp, 0);
8217 op1 = op0;
bbf6f052 8218 }
bbf6f052
RK
8219 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8220 comparison operator. If we have one of these cases, set the
8221 output to A, branch on A (cse will merge these two references),
8222 then set the output to FOO. */
8223 else if (temp
8224 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8225 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8226 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8227 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8228 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8229 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8230 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8231 {
3a94c984
KH
8232 if (GET_CODE (temp) == REG
8233 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8234 temp = gen_reg_rtx (mode);
8235 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8236 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8237
956d6950 8238 start_cleanup_deferral ();
bbf6f052
RK
8239 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8240 op1 = op0;
8241 }
8242 else if (temp
8243 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8244 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8245 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8246 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8247 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8248 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8249 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8250 {
3a94c984
KH
8251 if (GET_CODE (temp) == REG
8252 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8253 temp = gen_reg_rtx (mode);
8254 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8255 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8256
956d6950 8257 start_cleanup_deferral ();
bbf6f052
RK
8258 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8259 op1 = op0;
8260 }
8261 else
8262 {
8263 op1 = gen_label_rtx ();
8264 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8265
956d6950 8266 start_cleanup_deferral ();
3a94c984 8267
2ac84cfe 8268 /* One branch of the cond can be void, if it never returns. For
3a94c984 8269 example A ? throw : E */
2ac84cfe 8270 if (temp != 0
3a94c984 8271 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
8272 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8273 else
906c4e36
RK
8274 expand_expr (TREE_OPERAND (exp, 1),
8275 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8276 end_cleanup_deferral ();
bbf6f052
RK
8277 emit_queue ();
8278 emit_jump_insn (gen_jump (op1));
8279 emit_barrier ();
8280 emit_label (op0);
956d6950 8281 start_cleanup_deferral ();
2ac84cfe 8282 if (temp != 0
3a94c984 8283 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
8284 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8285 else
906c4e36
RK
8286 expand_expr (TREE_OPERAND (exp, 2),
8287 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8288 }
8289
956d6950 8290 end_cleanup_deferral ();
bbf6f052
RK
8291
8292 emit_queue ();
8293 emit_label (op1);
8294 OK_DEFER_POP;
5dab5552 8295
bbf6f052
RK
8296 return temp;
8297 }
8298
8299 case TARGET_EXPR:
8300 {
8301 /* Something needs to be initialized, but we didn't know
8302 where that thing was when building the tree. For example,
8303 it could be the return value of a function, or a parameter
8304 to a function which lays down in the stack, or a temporary
8305 variable which must be passed by reference.
8306
8307 We guarantee that the expression will either be constructed
8308 or copied into our original target. */
8309
8310 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8311 tree cleanups = NULL_TREE;
5c062816 8312 tree exp1;
bbf6f052
RK
8313
8314 if (TREE_CODE (slot) != VAR_DECL)
8315 abort ();
8316
9c51f375
RK
8317 if (! ignore)
8318 target = original_target;
8319
6fbfac92
JM
8320 /* Set this here so that if we get a target that refers to a
8321 register variable that's already been used, put_reg_into_stack
3a94c984 8322 knows that it should fix up those uses. */
6fbfac92
JM
8323 TREE_USED (slot) = 1;
8324
bbf6f052
RK
8325 if (target == 0)
8326 {
8327 if (DECL_RTL (slot) != 0)
ac993f4f
MS
8328 {
8329 target = DECL_RTL (slot);
5c062816 8330 /* If we have already expanded the slot, so don't do
ac993f4f 8331 it again. (mrs) */
5c062816
MS
8332 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8333 return target;
ac993f4f 8334 }
bbf6f052
RK
8335 else
8336 {
e9a25f70 8337 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8338 /* All temp slots at this level must not conflict. */
8339 preserve_temp_slots (target);
8340 DECL_RTL (slot) = target;
e9a25f70 8341 if (TREE_ADDRESSABLE (slot))
4361b41d 8342 put_var_into_stack (slot);
bbf6f052 8343
e287fd6e
RK
8344 /* Since SLOT is not known to the called function
8345 to belong to its stack frame, we must build an explicit
8346 cleanup. This case occurs when we must build up a reference
8347 to pass the reference as an argument. In this case,
8348 it is very likely that such a reference need not be
8349 built here. */
8350
8351 if (TREE_OPERAND (exp, 2) == 0)
8352 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8353 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8354 }
bbf6f052
RK
8355 }
8356 else
8357 {
8358 /* This case does occur, when expanding a parameter which
8359 needs to be constructed on the stack. The target
8360 is the actual stack address that we want to initialize.
8361 The function we call will perform the cleanup in this case. */
8362
8c042b47
RS
8363 /* If we have already assigned it space, use that space,
8364 not target that we were passed in, as our target
8365 parameter is only a hint. */
8366 if (DECL_RTL (slot) != 0)
3a94c984
KH
8367 {
8368 target = DECL_RTL (slot);
8369 /* If we have already expanded the slot, so don't do
8c042b47 8370 it again. (mrs) */
3a94c984
KH
8371 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8372 return target;
8c042b47 8373 }
21002281
JW
8374 else
8375 {
8376 DECL_RTL (slot) = target;
8377 /* If we must have an addressable slot, then make sure that
8378 the RTL that we just stored in slot is OK. */
8379 if (TREE_ADDRESSABLE (slot))
4361b41d 8380 put_var_into_stack (slot);
21002281 8381 }
bbf6f052
RK
8382 }
8383
4847c938 8384 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8385 /* Mark it as expanded. */
8386 TREE_OPERAND (exp, 1) = NULL_TREE;
8387
41531e5b 8388 store_expr (exp1, target, 0);
61d6b1cc 8389
e976b8b2 8390 expand_decl_cleanup (NULL_TREE, cleanups);
3a94c984 8391
41531e5b 8392 return target;
bbf6f052
RK
8393 }
8394
8395 case INIT_EXPR:
8396 {
8397 tree lhs = TREE_OPERAND (exp, 0);
8398 tree rhs = TREE_OPERAND (exp, 1);
8399 tree noncopied_parts = 0;
8400 tree lhs_type = TREE_TYPE (lhs);
8401
8402 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8403 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8404 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8405 TYPE_NONCOPIED_PARTS (lhs_type));
8406 while (noncopied_parts != 0)
8407 {
8408 expand_assignment (TREE_VALUE (noncopied_parts),
8409 TREE_PURPOSE (noncopied_parts), 0, 0);
8410 noncopied_parts = TREE_CHAIN (noncopied_parts);
8411 }
8412 return temp;
8413 }
8414
8415 case MODIFY_EXPR:
8416 {
8417 /* If lhs is complex, expand calls in rhs before computing it.
8418 That's so we don't compute a pointer and save it over a call.
8419 If lhs is simple, compute it first so we can give it as a
8420 target if the rhs is just a call. This avoids an extra temp and copy
8421 and that prevents a partial-subsumption which makes bad code.
8422 Actually we could treat component_ref's of vars like vars. */
8423
8424 tree lhs = TREE_OPERAND (exp, 0);
8425 tree rhs = TREE_OPERAND (exp, 1);
8426 tree noncopied_parts = 0;
8427 tree lhs_type = TREE_TYPE (lhs);
8428
8429 temp = 0;
8430
bbf6f052
RK
8431 /* Check for |= or &= of a bitfield of size one into another bitfield
8432 of size 1. In this case, (unless we need the result of the
8433 assignment) we can do this more efficiently with a
8434 test followed by an assignment, if necessary.
8435
8436 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8437 things change so we do, this code should be enhanced to
8438 support it. */
8439 if (ignore
8440 && TREE_CODE (lhs) == COMPONENT_REF
8441 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8442 || TREE_CODE (rhs) == BIT_AND_EXPR)
8443 && TREE_OPERAND (rhs, 0) == lhs
8444 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8445 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8446 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8447 {
8448 rtx label = gen_label_rtx ();
8449
8450 do_jump (TREE_OPERAND (rhs, 1),
8451 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8452 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8453 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8454 (TREE_CODE (rhs) == BIT_IOR_EXPR
8455 ? integer_one_node
8456 : integer_zero_node)),
8457 0, 0);
e7c33f54 8458 do_pending_stack_adjust ();
bbf6f052
RK
8459 emit_label (label);
8460 return const0_rtx;
8461 }
8462
8463 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8464 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8465 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8466 TYPE_NONCOPIED_PARTS (lhs_type));
8467
8468 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8469 while (noncopied_parts != 0)
8470 {
8471 expand_assignment (TREE_PURPOSE (noncopied_parts),
8472 TREE_VALUE (noncopied_parts), 0, 0);
8473 noncopied_parts = TREE_CHAIN (noncopied_parts);
8474 }
8475 return temp;
8476 }
8477
6e7f84a7
APB
8478 case RETURN_EXPR:
8479 if (!TREE_OPERAND (exp, 0))
8480 expand_null_return ();
8481 else
8482 expand_return (TREE_OPERAND (exp, 0));
8483 return const0_rtx;
8484
bbf6f052
RK
8485 case PREINCREMENT_EXPR:
8486 case PREDECREMENT_EXPR:
7b8b9722 8487 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8488
8489 case POSTINCREMENT_EXPR:
8490 case POSTDECREMENT_EXPR:
8491 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8492 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8493
8494 case ADDR_EXPR:
987c71d9 8495 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8496 be a MEM corresponding to a stack slot. */
987c71d9
RK
8497 temp = 0;
8498
bbf6f052
RK
8499 /* Are we taking the address of a nested function? */
8500 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8501 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8502 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8503 && ! TREE_STATIC (exp))
bbf6f052
RK
8504 {
8505 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8506 op0 = force_operand (op0, target);
8507 }
682ba3a6
RK
8508 /* If we are taking the address of something erroneous, just
8509 return a zero. */
8510 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8511 return const0_rtx;
bbf6f052
RK
8512 else
8513 {
e287fd6e
RK
8514 /* We make sure to pass const0_rtx down if we came in with
8515 ignore set, to avoid doing the cleanups twice for something. */
8516 op0 = expand_expr (TREE_OPERAND (exp, 0),
8517 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8518 (modifier == EXPAND_INITIALIZER
8519 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8520
119af78a
RK
8521 /* If we are going to ignore the result, OP0 will have been set
8522 to const0_rtx, so just return it. Don't get confused and
8523 think we are taking the address of the constant. */
8524 if (ignore)
8525 return op0;
8526
3539e816
MS
8527 op0 = protect_from_queue (op0, 0);
8528
c5c76735
JL
8529 /* We would like the object in memory. If it is a constant, we can
8530 have it be statically allocated into memory. For a non-constant,
8531 we need to allocate some memory and store the value into it. */
896102d0
RK
8532
8533 if (CONSTANT_P (op0))
8534 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8535 op0);
987c71d9 8536 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8537 {
8538 mark_temp_addr_taken (op0);
8539 temp = XEXP (op0, 0);
8540 }
896102d0 8541
682ba3a6 8542 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
8543 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8544 || GET_CODE (op0) == PARALLEL)
896102d0
RK
8545 {
8546 /* If this object is in a register, it must be not
0f41302f 8547 be BLKmode. */
896102d0 8548 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
1da68f56
RK
8549 tree nt = build_qualified_type (inner_type,
8550 (TYPE_QUALS (inner_type)
8551 | TYPE_QUAL_CONST));
8552 rtx memloc = assign_temp (nt, 1, 1, 1);
896102d0 8553
7a0b7b9a 8554 mark_temp_addr_taken (memloc);
df6018fd
JJ
8555 if (GET_CODE (op0) == PARALLEL)
8556 /* Handle calls that pass values in multiple non-contiguous
8557 locations. The Irix 6 ABI has examples of this. */
8558 emit_group_store (memloc, op0,
8559 int_size_in_bytes (inner_type),
8560 TYPE_ALIGN (inner_type));
8561 else
8562 emit_move_insn (memloc, op0);
896102d0
RK
8563 op0 = memloc;
8564 }
8565
bbf6f052
RK
8566 if (GET_CODE (op0) != MEM)
8567 abort ();
3a94c984 8568
bbf6f052 8569 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8570 {
8571 temp = XEXP (op0, 0);
8572#ifdef POINTERS_EXTEND_UNSIGNED
8573 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8574 && mode == ptr_mode)
9fcfcce7 8575 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8576#endif
8577 return temp;
8578 }
987c71d9 8579
bbf6f052
RK
8580 op0 = force_operand (XEXP (op0, 0), target);
8581 }
987c71d9 8582
bbf6f052 8583 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8584 op0 = force_reg (Pmode, op0);
8585
dc6d66b3
RK
8586 if (GET_CODE (op0) == REG
8587 && ! REG_USERVAR_P (op0))
bdb429a5 8588 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9
RK
8589
8590 /* If we might have had a temp slot, add an equivalent address
8591 for it. */
8592 if (temp != 0)
8593 update_temp_slot_address (temp, op0);
8594
88f63c77
RK
8595#ifdef POINTERS_EXTEND_UNSIGNED
8596 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8597 && mode == ptr_mode)
9fcfcce7 8598 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8599#endif
8600
bbf6f052
RK
8601 return op0;
8602
8603 case ENTRY_VALUE_EXPR:
8604 abort ();
8605
7308a047
RS
8606 /* COMPLEX type for Extended Pascal & Fortran */
8607 case COMPLEX_EXPR:
8608 {
8609 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8610 rtx insns;
7308a047
RS
8611
8612 /* Get the rtx code of the operands. */
8613 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8614 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8615
8616 if (! target)
8617 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8618
6551fa4d 8619 start_sequence ();
7308a047
RS
8620
8621 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8622 emit_move_insn (gen_realpart (mode, target), op0);
8623 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8624
6551fa4d
JW
8625 insns = get_insns ();
8626 end_sequence ();
8627
7308a047 8628 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8629 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8630 each with a separate pseudo as destination.
8631 It's not correct for flow to treat them as a unit. */
6d6e61ce 8632 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8633 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8634 else
8635 emit_insns (insns);
7308a047
RS
8636
8637 return target;
8638 }
8639
8640 case REALPART_EXPR:
2d7050fd
RS
8641 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8642 return gen_realpart (mode, op0);
3a94c984 8643
7308a047 8644 case IMAGPART_EXPR:
2d7050fd
RS
8645 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8646 return gen_imagpart (mode, op0);
7308a047
RS
8647
8648 case CONJ_EXPR:
8649 {
62acb978 8650 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8651 rtx imag_t;
6551fa4d 8652 rtx insns;
3a94c984
KH
8653
8654 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8655
8656 if (! target)
d6a5ac33 8657 target = gen_reg_rtx (mode);
3a94c984 8658
6551fa4d 8659 start_sequence ();
7308a047
RS
8660
8661 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8662 emit_move_insn (gen_realpart (partmode, target),
8663 gen_realpart (partmode, op0));
7308a047 8664
62acb978 8665 imag_t = gen_imagpart (partmode, target);
91ce572a
CC
8666 temp = expand_unop (partmode,
8667 ! unsignedp && flag_trapv
8668 && (GET_MODE_CLASS(partmode) == MODE_INT)
8669 ? negv_optab : neg_optab,
3a94c984 8670 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8671 if (temp != imag_t)
8672 emit_move_insn (imag_t, temp);
8673
6551fa4d
JW
8674 insns = get_insns ();
8675 end_sequence ();
8676
3a94c984 8677 /* Conjugate should appear as a single unit
d6a5ac33 8678 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8679 each with a separate pseudo as destination.
8680 It's not correct for flow to treat them as a unit. */
6d6e61ce 8681 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8682 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8683 else
8684 emit_insns (insns);
7308a047
RS
8685
8686 return target;
8687 }
8688
e976b8b2
MS
8689 case TRY_CATCH_EXPR:
8690 {
8691 tree handler = TREE_OPERAND (exp, 1);
8692
8693 expand_eh_region_start ();
8694
8695 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8696
8697 expand_eh_region_end (handler);
8698
8699 return op0;
8700 }
8701
b335b813
PB
8702 case TRY_FINALLY_EXPR:
8703 {
8704 tree try_block = TREE_OPERAND (exp, 0);
8705 tree finally_block = TREE_OPERAND (exp, 1);
8706 rtx finally_label = gen_label_rtx ();
8707 rtx done_label = gen_label_rtx ();
8708 rtx return_link = gen_reg_rtx (Pmode);
8709 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8710 (tree) finally_label, (tree) return_link);
8711 TREE_SIDE_EFFECTS (cleanup) = 1;
8712
8713 /* Start a new binding layer that will keep track of all cleanup
8714 actions to be performed. */
8e91754e 8715 expand_start_bindings (2);
b335b813
PB
8716
8717 target_temp_slot_level = temp_slot_level;
8718
8719 expand_decl_cleanup (NULL_TREE, cleanup);
8720 op0 = expand_expr (try_block, target, tmode, modifier);
8721
8722 preserve_temp_slots (op0);
8723 expand_end_bindings (NULL_TREE, 0, 0);
8724 emit_jump (done_label);
8725 emit_label (finally_label);
8726 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8727 emit_indirect_jump (return_link);
8728 emit_label (done_label);
8729 return op0;
8730 }
8731
3a94c984 8732 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8733 {
8734 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8735 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8736 rtx return_address = gen_label_rtx ();
3a94c984
KH
8737 emit_move_insn (return_link,
8738 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8739 emit_jump (subr);
8740 emit_label (return_address);
8741 return const0_rtx;
8742 }
8743
e976b8b2
MS
8744 case POPDCC_EXPR:
8745 {
8746 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8747 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8748 return const0_rtx;
8749 }
8750
8751 case POPDHC_EXPR:
8752 {
8753 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8754 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8755 return const0_rtx;
8756 }
8757
d3707adb
RH
8758 case VA_ARG_EXPR:
8759 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8760
bbf6f052 8761 default:
90764a87 8762 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8763 }
8764
8765 /* Here to do an ordinary binary operator, generating an instruction
8766 from the optab already placed in `this_optab'. */
8767 binop:
e5e809f4 8768 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8769 subtarget = 0;
8770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8771 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8772 binop2:
8773 temp = expand_binop (mode, this_optab, op0, op1, target,
8774 unsignedp, OPTAB_LIB_WIDEN);
8775 if (temp == 0)
8776 abort ();
8777 return temp;
8778}
b93a436e 8779\f
14a774a9
RK
8780/* Similar to expand_expr, except that we don't specify a target, target
8781 mode, or modifier and we return the alignment of the inner type. This is
8782 used in cases where it is not necessary to align the result to the
8783 alignment of its type as long as we know the alignment of the result, for
8784 example for comparisons of BLKmode values. */
8785
8786static rtx
8787expand_expr_unaligned (exp, palign)
8788 register tree exp;
729a2125 8789 unsigned int *palign;
14a774a9
RK
8790{
8791 register rtx op0;
8792 tree type = TREE_TYPE (exp);
8793 register enum machine_mode mode = TYPE_MODE (type);
8794
8795 /* Default the alignment we return to that of the type. */
8796 *palign = TYPE_ALIGN (type);
8797
8798 /* The only cases in which we do anything special is if the resulting mode
8799 is BLKmode. */
8800 if (mode != BLKmode)
8801 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8802
8803 switch (TREE_CODE (exp))
8804 {
8805 case CONVERT_EXPR:
8806 case NOP_EXPR:
8807 case NON_LVALUE_EXPR:
8808 /* Conversions between BLKmode values don't change the underlying
8809 alignment or value. */
8810 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8811 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8812 break;
8813
8814 case ARRAY_REF:
8815 /* Much of the code for this case is copied directly from expand_expr.
8816 We need to duplicate it here because we will do something different
8817 in the fall-through case, so we need to handle the same exceptions
8818 it does. */
8819 {
8820 tree array = TREE_OPERAND (exp, 0);
8821 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8822 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 8823 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
14a774a9
RK
8824 HOST_WIDE_INT i;
8825
8826 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8827 abort ();
8828
8829 /* Optimize the special-case of a zero lower bound.
8830
8831 We convert the low_bound to sizetype to avoid some problems
8832 with constant folding. (E.g. suppose the lower bound is 1,
8833 and its mode is QI. Without the conversion, (ARRAY
8834 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 8835 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14a774a9
RK
8836
8837 if (! integer_zerop (low_bound))
fed3cef0 8838 index = size_diffop (index, convert (sizetype, low_bound));
14a774a9
RK
8839
8840 /* If this is a constant index into a constant array,
8841 just get the value from the array. Handle both the cases when
8842 we have an explicit constructor and when our operand is a variable
8843 that was declared const. */
8844
05bccae2 8845 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
235783d1 8846 && host_integerp (index, 0)
3a94c984 8847 && 0 > compare_tree_int (index,
05bccae2
RK
8848 list_length (CONSTRUCTOR_ELTS
8849 (TREE_OPERAND (exp, 0)))))
14a774a9 8850 {
05bccae2
RK
8851 tree elem;
8852
8853 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
235783d1 8854 i = tree_low_cst (index, 0);
05bccae2
RK
8855 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8856 ;
8857
8858 if (elem)
8859 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
14a774a9 8860 }
3a94c984 8861
14a774a9
RK
8862 else if (optimize >= 1
8863 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8864 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8865 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8866 {
8867 if (TREE_CODE (index) == INTEGER_CST)
8868 {
8869 tree init = DECL_INITIAL (array);
8870
14a774a9
RK
8871 if (TREE_CODE (init) == CONSTRUCTOR)
8872 {
05bccae2
RK
8873 tree elem;
8874
8875 for (elem = CONSTRUCTOR_ELTS (init);
8876 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8877 elem = TREE_CHAIN (elem))
8878 ;
14a774a9 8879
14a774a9
RK
8880 if (elem)
8881 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8882 palign);
8883 }
8884 }
8885 }
8886 }
3a94c984 8887 /* Fall through. */
14a774a9
RK
8888
8889 case COMPONENT_REF:
8890 case BIT_FIELD_REF:
8891 /* If the operand is a CONSTRUCTOR, we can just extract the
8892 appropriate field if it is present. Don't do this if we have
8893 already written the data since we want to refer to that copy
8894 and varasm.c assumes that's what we'll do. */
8895 if (TREE_CODE (exp) != ARRAY_REF
8896 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8897 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8898 {
8899 tree elt;
8900
8901 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8902 elt = TREE_CHAIN (elt))
8903 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8904 /* Note that unlike the case in expand_expr, we know this is
8905 BLKmode and hence not an integer. */
8906 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8907 }
8908
8909 {
8910 enum machine_mode mode1;
770ae6cc 8911 HOST_WIDE_INT bitsize, bitpos;
14a774a9
RK
8912 tree offset;
8913 int volatilep = 0;
729a2125 8914 unsigned int alignment;
14a774a9
RK
8915 int unsignedp;
8916 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8917 &mode1, &unsignedp, &volatilep,
8918 &alignment);
8919
8920 /* If we got back the original object, something is wrong. Perhaps
8921 we are evaluating an expression too early. In any event, don't
8922 infinitely recurse. */
8923 if (tem == exp)
8924 abort ();
8925
8926 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8927
8928 /* If this is a constant, put it into a register if it is a
8929 legitimate constant and OFFSET is 0 and memory if it isn't. */
8930 if (CONSTANT_P (op0))
8931 {
8932 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8933
8934 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8935 && offset == 0)
8936 op0 = force_reg (inner_mode, op0);
8937 else
8938 op0 = validize_mem (force_const_mem (inner_mode, op0));
8939 }
8940
8941 if (offset != 0)
8942 {
8943 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8944
8945 /* If this object is in a register, put it into memory.
8946 This case can't occur in C, but can in Ada if we have
8947 unchecked conversion of an expression from a scalar type to
8948 an array or record type. */
8949 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8950 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8951 {
1da68f56
RK
8952 tree nt = build_qualified_type (TREE_TYPE (tem),
8953 (TYPE_QUALS (TREE_TYPE (tem))
8954 | TYPE_QUAL_CONST));
8955 rtx memloc = assign_temp (nt, 1, 1, 1);
14a774a9
RK
8956
8957 mark_temp_addr_taken (memloc);
8958 emit_move_insn (memloc, op0);
8959 op0 = memloc;
8960 }
8961
8962 if (GET_CODE (op0) != MEM)
8963 abort ();
8964
8965 if (GET_MODE (offset_rtx) != ptr_mode)
8966 {
8967#ifdef POINTERS_EXTEND_UNSIGNED
8968 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8969#else
8970 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8971#endif
8972 }
8973
8974 op0 = change_address (op0, VOIDmode,
8975 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8976 force_reg (ptr_mode,
8977 offset_rtx)));
8978 }
8979
8980 /* Don't forget about volatility even if this is a bitfield. */
8981 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8982 {
8983 op0 = copy_rtx (op0);
8984 MEM_VOLATILE_P (op0) = 1;
8985 }
8986
8987 /* Check the access. */
8988 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
3a94c984 8989 {
14a774a9
RK
8990 rtx to;
8991 int size;
8992
8993 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8994 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8995
8996 /* Check the access right of the pointer. */
ea4da9db 8997 in_check_memory_usage = 1;
14a774a9 8998 if (size > BITS_PER_UNIT)
ebb1b59a
BS
8999 emit_library_call (chkr_check_addr_libfunc,
9000 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
14a774a9
RK
9001 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9002 TYPE_MODE (sizetype),
3a94c984 9003 GEN_INT (MEMORY_USE_RO),
14a774a9 9004 TYPE_MODE (integer_type_node));
ea4da9db 9005 in_check_memory_usage = 0;
14a774a9
RK
9006 }
9007
a2b99161
RK
9008 /* In cases where an aligned union has an unaligned object
9009 as a field, we might be extracting a BLKmode value from
9010 an integer-mode (e.g., SImode) object. Handle this case
9011 by doing the extract into an object as wide as the field
9012 (which we know to be the width of a basic mode), then
9013 storing into memory, and changing the mode to BLKmode.
9014 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9015 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9016 if (mode1 == VOIDmode
9017 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 9018 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
19caa751 9019 && (TYPE_ALIGN (type) > alignment
a2b99161
RK
9020 || bitpos % TYPE_ALIGN (type) != 0)))
9021 {
9022 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9023
9024 if (ext_mode == BLKmode)
9025 {
9026 /* In this case, BITPOS must start at a byte boundary. */
9027 if (GET_CODE (op0) != MEM
9028 || bitpos % BITS_PER_UNIT != 0)
9029 abort ();
9030
9031 op0 = change_address (op0, VOIDmode,
9032 plus_constant (XEXP (op0, 0),
9033 bitpos / BITS_PER_UNIT));
9034 }
9035 else
9036 {
1da68f56
RK
9037 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9038 TYPE_QUAL_CONST);
9039 rtx new = assign_temp (nt, 0, 1, 1);
a2b99161
RK
9040
9041 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9042 unsignedp, NULL_RTX, ext_mode,
9043 ext_mode, alignment,
9044 int_size_in_bytes (TREE_TYPE (tem)));
9045
9046 /* If the result is a record type and BITSIZE is narrower than
9047 the mode of OP0, an integral mode, and this is a big endian
9048 machine, we must put the field into the high-order bits. */
9049 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9050 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9051 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9052 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9053 size_int (GET_MODE_BITSIZE
9054 (GET_MODE (op0))
9055 - bitsize),
9056 op0, 1);
9057
a2b99161
RK
9058 emit_move_insn (new, op0);
9059 op0 = copy_rtx (new);
9060 PUT_MODE (op0, BLKmode);
9061 }
9062 }
9063 else
9064 /* Get a reference to just this component. */
9065 op0 = change_address (op0, mode1,
3a94c984
KH
9066 plus_constant (XEXP (op0, 0),
9067 (bitpos / BITS_PER_UNIT)));
14a774a9
RK
9068
9069 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9070
9071 /* Adjust the alignment in case the bit position is not
9072 a multiple of the alignment of the inner object. */
9073 while (bitpos % alignment != 0)
9074 alignment >>= 1;
9075
9076 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 9077 mark_reg_pointer (XEXP (op0, 0), alignment);
14a774a9
RK
9078
9079 MEM_IN_STRUCT_P (op0) = 1;
9080 MEM_VOLATILE_P (op0) |= volatilep;
9081
9082 *palign = alignment;
9083 return op0;
9084 }
9085
9086 default:
9087 break;
9088
9089 }
9090
9091 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9092}
9093\f
fed3cef0
RK
9094/* Return the tree node if a ARG corresponds to a string constant or zero
9095 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9096 in bytes within the string that ARG is accessing. The type of the
9097 offset will be `sizetype'. */
b93a436e 9098
28f4ec01 9099tree
b93a436e
JL
9100string_constant (arg, ptr_offset)
9101 tree arg;
9102 tree *ptr_offset;
9103{
9104 STRIP_NOPS (arg);
9105
9106 if (TREE_CODE (arg) == ADDR_EXPR
9107 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9108 {
fed3cef0 9109 *ptr_offset = size_zero_node;
b93a436e
JL
9110 return TREE_OPERAND (arg, 0);
9111 }
9112 else if (TREE_CODE (arg) == PLUS_EXPR)
9113 {
9114 tree arg0 = TREE_OPERAND (arg, 0);
9115 tree arg1 = TREE_OPERAND (arg, 1);
9116
9117 STRIP_NOPS (arg0);
9118 STRIP_NOPS (arg1);
9119
9120 if (TREE_CODE (arg0) == ADDR_EXPR
9121 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9122 {
fed3cef0 9123 *ptr_offset = convert (sizetype, arg1);
b93a436e 9124 return TREE_OPERAND (arg0, 0);
bbf6f052 9125 }
b93a436e
JL
9126 else if (TREE_CODE (arg1) == ADDR_EXPR
9127 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9128 {
fed3cef0 9129 *ptr_offset = convert (sizetype, arg0);
b93a436e 9130 return TREE_OPERAND (arg1, 0);
bbf6f052 9131 }
b93a436e 9132 }
ca695ac9 9133
b93a436e
JL
9134 return 0;
9135}
ca695ac9 9136\f
b93a436e
JL
9137/* Expand code for a post- or pre- increment or decrement
9138 and return the RTX for the result.
9139 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9140
b93a436e
JL
9141static rtx
9142expand_increment (exp, post, ignore)
9143 register tree exp;
9144 int post, ignore;
ca695ac9 9145{
b93a436e
JL
9146 register rtx op0, op1;
9147 register rtx temp, value;
9148 register tree incremented = TREE_OPERAND (exp, 0);
9149 optab this_optab = add_optab;
9150 int icode;
9151 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9152 int op0_is_copy = 0;
9153 int single_insn = 0;
9154 /* 1 means we can't store into OP0 directly,
9155 because it is a subreg narrower than a word,
9156 and we don't dare clobber the rest of the word. */
9157 int bad_subreg = 0;
1499e0a8 9158
b93a436e
JL
9159 /* Stabilize any component ref that might need to be
9160 evaluated more than once below. */
9161 if (!post
9162 || TREE_CODE (incremented) == BIT_FIELD_REF
9163 || (TREE_CODE (incremented) == COMPONENT_REF
9164 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9165 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9166 incremented = stabilize_reference (incremented);
9167 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9168 ones into save exprs so that they don't accidentally get evaluated
9169 more than once by the code below. */
9170 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9171 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9172 incremented = save_expr (incremented);
e9a25f70 9173
b93a436e
JL
9174 /* Compute the operands as RTX.
9175 Note whether OP0 is the actual lvalue or a copy of it:
9176 I believe it is a copy iff it is a register or subreg
9177 and insns were generated in computing it. */
e9a25f70 9178
b93a436e
JL
9179 temp = get_last_insn ();
9180 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9181
b93a436e
JL
9182 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9183 in place but instead must do sign- or zero-extension during assignment,
9184 so we copy it into a new register and let the code below use it as
9185 a copy.
e9a25f70 9186
b93a436e
JL
9187 Note that we can safely modify this SUBREG since it is know not to be
9188 shared (it was made by the expand_expr call above). */
9189
9190 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9191 {
9192 if (post)
9193 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9194 else
9195 bad_subreg = 1;
9196 }
9197 else if (GET_CODE (op0) == SUBREG
9198 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9199 {
9200 /* We cannot increment this SUBREG in place. If we are
9201 post-incrementing, get a copy of the old value. Otherwise,
9202 just mark that we cannot increment in place. */
9203 if (post)
9204 op0 = copy_to_reg (op0);
9205 else
9206 bad_subreg = 1;
e9a25f70
JL
9207 }
9208
b93a436e
JL
9209 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9210 && temp != get_last_insn ());
9211 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9212 EXPAND_MEMORY_USE_BAD);
1499e0a8 9213
b93a436e
JL
9214 /* Decide whether incrementing or decrementing. */
9215 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9216 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9217 this_optab = sub_optab;
9218
9219 /* Convert decrement by a constant into a negative increment. */
9220 if (this_optab == sub_optab
9221 && GET_CODE (op1) == CONST_INT)
ca695ac9 9222 {
3a94c984 9223 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9224 this_optab = add_optab;
ca695ac9 9225 }
1499e0a8 9226
91ce572a
CC
9227 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9228 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9229
b93a436e
JL
9230 /* For a preincrement, see if we can do this with a single instruction. */
9231 if (!post)
9232 {
9233 icode = (int) this_optab->handlers[(int) mode].insn_code;
9234 if (icode != (int) CODE_FOR_nothing
9235 /* Make sure that OP0 is valid for operands 0 and 1
9236 of the insn we want to queue. */
a995e389
RH
9237 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9238 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9239 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9240 single_insn = 1;
9241 }
bbf6f052 9242
b93a436e
JL
9243 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9244 then we cannot just increment OP0. We must therefore contrive to
9245 increment the original value. Then, for postincrement, we can return
9246 OP0 since it is a copy of the old value. For preincrement, expand here
9247 unless we can do it with a single insn.
bbf6f052 9248
b93a436e
JL
9249 Likewise if storing directly into OP0 would clobber high bits
9250 we need to preserve (bad_subreg). */
9251 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9252 {
b93a436e
JL
9253 /* This is the easiest way to increment the value wherever it is.
9254 Problems with multiple evaluation of INCREMENTED are prevented
9255 because either (1) it is a component_ref or preincrement,
9256 in which case it was stabilized above, or (2) it is an array_ref
9257 with constant index in an array in a register, which is
9258 safe to reevaluate. */
9259 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9260 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9261 ? MINUS_EXPR : PLUS_EXPR),
9262 TREE_TYPE (exp),
9263 incremented,
9264 TREE_OPERAND (exp, 1));
a358cee0 9265
b93a436e
JL
9266 while (TREE_CODE (incremented) == NOP_EXPR
9267 || TREE_CODE (incremented) == CONVERT_EXPR)
9268 {
9269 newexp = convert (TREE_TYPE (incremented), newexp);
9270 incremented = TREE_OPERAND (incremented, 0);
9271 }
bbf6f052 9272
b93a436e
JL
9273 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9274 return post ? op0 : temp;
9275 }
bbf6f052 9276
b93a436e
JL
9277 if (post)
9278 {
9279 /* We have a true reference to the value in OP0.
9280 If there is an insn to add or subtract in this mode, queue it.
9281 Queueing the increment insn avoids the register shuffling
9282 that often results if we must increment now and first save
9283 the old value for subsequent use. */
bbf6f052 9284
b93a436e
JL
9285#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9286 op0 = stabilize (op0);
9287#endif
41dfd40c 9288
b93a436e
JL
9289 icode = (int) this_optab->handlers[(int) mode].insn_code;
9290 if (icode != (int) CODE_FOR_nothing
9291 /* Make sure that OP0 is valid for operands 0 and 1
9292 of the insn we want to queue. */
a995e389
RH
9293 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9294 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9295 {
a995e389 9296 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9297 op1 = force_reg (mode, op1);
bbf6f052 9298
b93a436e
JL
9299 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9300 }
9301 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9302 {
9303 rtx addr = (general_operand (XEXP (op0, 0), mode)
9304 ? force_reg (Pmode, XEXP (op0, 0))
9305 : copy_to_reg (XEXP (op0, 0)));
9306 rtx temp, result;
ca695ac9 9307
b93a436e
JL
9308 op0 = change_address (op0, VOIDmode, addr);
9309 temp = force_reg (GET_MODE (op0), op0);
a995e389 9310 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9311 op1 = force_reg (mode, op1);
ca695ac9 9312
b93a436e
JL
9313 /* The increment queue is LIFO, thus we have to `queue'
9314 the instructions in reverse order. */
9315 enqueue_insn (op0, gen_move_insn (op0, temp));
9316 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9317 return result;
bbf6f052
RK
9318 }
9319 }
ca695ac9 9320
b93a436e
JL
9321 /* Preincrement, or we can't increment with one simple insn. */
9322 if (post)
9323 /* Save a copy of the value before inc or dec, to return it later. */
9324 temp = value = copy_to_reg (op0);
9325 else
9326 /* Arrange to return the incremented value. */
9327 /* Copy the rtx because expand_binop will protect from the queue,
9328 and the results of that would be invalid for us to return
9329 if our caller does emit_queue before using our result. */
9330 temp = copy_rtx (value = op0);
bbf6f052 9331
b93a436e
JL
9332 /* Increment however we can. */
9333 op1 = expand_binop (mode, this_optab, value, op1,
3a94c984 9334 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9335 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9336 /* Make sure the value is stored into OP0. */
9337 if (op1 != op0)
9338 emit_move_insn (op0, op1);
5718612f 9339
b93a436e
JL
9340 return temp;
9341}
9342\f
b93a436e
JL
9343/* At the start of a function, record that we have no previously-pushed
9344 arguments waiting to be popped. */
bbf6f052 9345
b93a436e
JL
9346void
9347init_pending_stack_adjust ()
9348{
9349 pending_stack_adjust = 0;
9350}
bbf6f052 9351
b93a436e 9352/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9353 so the adjustment won't get done.
9354
9355 Note, if the current function calls alloca, then it must have a
9356 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9357
b93a436e
JL
9358void
9359clear_pending_stack_adjust ()
9360{
9361#ifdef EXIT_IGNORE_STACK
9362 if (optimize > 0
060fbabf
JL
9363 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9364 && EXIT_IGNORE_STACK
b93a436e
JL
9365 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9366 && ! flag_inline_functions)
1503a7ec
JH
9367 {
9368 stack_pointer_delta -= pending_stack_adjust,
9369 pending_stack_adjust = 0;
9370 }
b93a436e
JL
9371#endif
9372}
bbf6f052 9373
b93a436e
JL
9374/* Pop any previously-pushed arguments that have not been popped yet. */
9375
9376void
9377do_pending_stack_adjust ()
9378{
9379 if (inhibit_defer_pop == 0)
ca695ac9 9380 {
b93a436e
JL
9381 if (pending_stack_adjust != 0)
9382 adjust_stack (GEN_INT (pending_stack_adjust));
9383 pending_stack_adjust = 0;
bbf6f052 9384 }
bbf6f052
RK
9385}
9386\f
b93a436e 9387/* Expand conditional expressions. */
bbf6f052 9388
b93a436e
JL
9389/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9390 LABEL is an rtx of code CODE_LABEL, in this function and all the
9391 functions here. */
bbf6f052 9392
b93a436e
JL
9393void
9394jumpifnot (exp, label)
ca695ac9 9395 tree exp;
b93a436e 9396 rtx label;
bbf6f052 9397{
b93a436e
JL
9398 do_jump (exp, label, NULL_RTX);
9399}
bbf6f052 9400
b93a436e 9401/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9402
b93a436e
JL
9403void
9404jumpif (exp, label)
9405 tree exp;
9406 rtx label;
9407{
9408 do_jump (exp, NULL_RTX, label);
9409}
ca695ac9 9410
b93a436e
JL
9411/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9412 the result is zero, or IF_TRUE_LABEL if the result is one.
9413 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9414 meaning fall through in that case.
ca695ac9 9415
b93a436e
JL
9416 do_jump always does any pending stack adjust except when it does not
9417 actually perform a jump. An example where there is no jump
9418 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9419
b93a436e
JL
9420 This function is responsible for optimizing cases such as
9421 &&, || and comparison operators in EXP. */
5718612f 9422
b93a436e
JL
9423void
9424do_jump (exp, if_false_label, if_true_label)
9425 tree exp;
9426 rtx if_false_label, if_true_label;
9427{
9428 register enum tree_code code = TREE_CODE (exp);
9429 /* Some cases need to create a label to jump to
9430 in order to properly fall through.
9431 These cases set DROP_THROUGH_LABEL nonzero. */
9432 rtx drop_through_label = 0;
9433 rtx temp;
b93a436e
JL
9434 int i;
9435 tree type;
9436 enum machine_mode mode;
ca695ac9 9437
dbecbbe4
JL
9438#ifdef MAX_INTEGER_COMPUTATION_MODE
9439 check_max_integer_computation_mode (exp);
9440#endif
9441
b93a436e 9442 emit_queue ();
ca695ac9 9443
b93a436e 9444 switch (code)
ca695ac9 9445 {
b93a436e 9446 case ERROR_MARK:
ca695ac9 9447 break;
bbf6f052 9448
b93a436e
JL
9449 case INTEGER_CST:
9450 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9451 if (temp)
9452 emit_jump (temp);
9453 break;
bbf6f052 9454
b93a436e
JL
9455#if 0
9456 /* This is not true with #pragma weak */
9457 case ADDR_EXPR:
9458 /* The address of something can never be zero. */
9459 if (if_true_label)
9460 emit_jump (if_true_label);
9461 break;
9462#endif
bbf6f052 9463
b93a436e
JL
9464 case NOP_EXPR:
9465 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9466 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9467 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9468 goto normal;
9469 case CONVERT_EXPR:
9470 /* If we are narrowing the operand, we have to do the compare in the
9471 narrower mode. */
9472 if ((TYPE_PRECISION (TREE_TYPE (exp))
9473 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9474 goto normal;
9475 case NON_LVALUE_EXPR:
9476 case REFERENCE_EXPR:
9477 case ABS_EXPR:
9478 case NEGATE_EXPR:
9479 case LROTATE_EXPR:
9480 case RROTATE_EXPR:
9481 /* These cannot change zero->non-zero or vice versa. */
9482 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9483 break;
bbf6f052 9484
14a774a9
RK
9485 case WITH_RECORD_EXPR:
9486 /* Put the object on the placeholder list, recurse through our first
9487 operand, and pop the list. */
9488 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9489 placeholder_list);
9490 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9491 placeholder_list = TREE_CHAIN (placeholder_list);
9492 break;
9493
b93a436e
JL
9494#if 0
9495 /* This is never less insns than evaluating the PLUS_EXPR followed by
9496 a test and can be longer if the test is eliminated. */
9497 case PLUS_EXPR:
9498 /* Reduce to minus. */
9499 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9500 TREE_OPERAND (exp, 0),
9501 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9502 TREE_OPERAND (exp, 1))));
9503 /* Process as MINUS. */
ca695ac9 9504#endif
bbf6f052 9505
b93a436e
JL
9506 case MINUS_EXPR:
9507 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9508 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9509 TREE_OPERAND (exp, 0),
9510 TREE_OPERAND (exp, 1)),
9511 NE, NE, if_false_label, if_true_label);
b93a436e 9512 break;
bbf6f052 9513
b93a436e
JL
9514 case BIT_AND_EXPR:
9515 /* If we are AND'ing with a small constant, do this comparison in the
9516 smallest type that fits. If the machine doesn't have comparisons
9517 that small, it will be converted back to the wider comparison.
9518 This helps if we are testing the sign bit of a narrower object.
9519 combine can't do this for us because it can't know whether a
9520 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9521
b93a436e
JL
9522 if (! SLOW_BYTE_ACCESS
9523 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9524 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9525 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9526 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9527 && (type = type_for_mode (mode, 1)) != 0
9528 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9529 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9530 != CODE_FOR_nothing))
9531 {
9532 do_jump (convert (type, exp), if_false_label, if_true_label);
9533 break;
9534 }
9535 goto normal;
bbf6f052 9536
b93a436e
JL
9537 case TRUTH_NOT_EXPR:
9538 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9539 break;
bbf6f052 9540
b93a436e
JL
9541 case TRUTH_ANDIF_EXPR:
9542 if (if_false_label == 0)
9543 if_false_label = drop_through_label = gen_label_rtx ();
9544 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9545 start_cleanup_deferral ();
9546 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9547 end_cleanup_deferral ();
9548 break;
bbf6f052 9549
b93a436e
JL
9550 case TRUTH_ORIF_EXPR:
9551 if (if_true_label == 0)
9552 if_true_label = drop_through_label = gen_label_rtx ();
9553 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9554 start_cleanup_deferral ();
9555 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9556 end_cleanup_deferral ();
9557 break;
bbf6f052 9558
b93a436e
JL
9559 case COMPOUND_EXPR:
9560 push_temp_slots ();
9561 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9562 preserve_temp_slots (NULL_RTX);
9563 free_temp_slots ();
9564 pop_temp_slots ();
9565 emit_queue ();
9566 do_pending_stack_adjust ();
9567 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9568 break;
bbf6f052 9569
b93a436e
JL
9570 case COMPONENT_REF:
9571 case BIT_FIELD_REF:
9572 case ARRAY_REF:
9573 {
770ae6cc
RK
9574 HOST_WIDE_INT bitsize, bitpos;
9575 int unsignedp;
b93a436e
JL
9576 enum machine_mode mode;
9577 tree type;
9578 tree offset;
9579 int volatilep = 0;
729a2125 9580 unsigned int alignment;
bbf6f052 9581
b93a436e
JL
9582 /* Get description of this reference. We don't actually care
9583 about the underlying object here. */
19caa751
RK
9584 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9585 &unsignedp, &volatilep, &alignment);
bbf6f052 9586
b93a436e
JL
9587 type = type_for_size (bitsize, unsignedp);
9588 if (! SLOW_BYTE_ACCESS
9589 && type != 0 && bitsize >= 0
9590 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9591 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9592 != CODE_FOR_nothing))
9593 {
9594 do_jump (convert (type, exp), if_false_label, if_true_label);
9595 break;
9596 }
9597 goto normal;
9598 }
bbf6f052 9599
b93a436e
JL
9600 case COND_EXPR:
9601 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9602 if (integer_onep (TREE_OPERAND (exp, 1))
9603 && integer_zerop (TREE_OPERAND (exp, 2)))
9604 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9605
b93a436e
JL
9606 else if (integer_zerop (TREE_OPERAND (exp, 1))
9607 && integer_onep (TREE_OPERAND (exp, 2)))
9608 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9609
b93a436e
JL
9610 else
9611 {
9612 register rtx label1 = gen_label_rtx ();
9613 drop_through_label = gen_label_rtx ();
bbf6f052 9614
b93a436e 9615 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9616
b93a436e
JL
9617 start_cleanup_deferral ();
9618 /* Now the THEN-expression. */
9619 do_jump (TREE_OPERAND (exp, 1),
9620 if_false_label ? if_false_label : drop_through_label,
9621 if_true_label ? if_true_label : drop_through_label);
9622 /* In case the do_jump just above never jumps. */
9623 do_pending_stack_adjust ();
9624 emit_label (label1);
bbf6f052 9625
b93a436e
JL
9626 /* Now the ELSE-expression. */
9627 do_jump (TREE_OPERAND (exp, 2),
9628 if_false_label ? if_false_label : drop_through_label,
9629 if_true_label ? if_true_label : drop_through_label);
9630 end_cleanup_deferral ();
9631 }
9632 break;
bbf6f052 9633
b93a436e
JL
9634 case EQ_EXPR:
9635 {
9636 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9637
9ec36da5
JL
9638 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9639 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9640 {
9641 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9642 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9643 do_jump
9644 (fold
9645 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9646 fold (build (EQ_EXPR, TREE_TYPE (exp),
9647 fold (build1 (REALPART_EXPR,
9648 TREE_TYPE (inner_type),
9649 exp0)),
9650 fold (build1 (REALPART_EXPR,
9651 TREE_TYPE (inner_type),
9652 exp1)))),
9653 fold (build (EQ_EXPR, TREE_TYPE (exp),
9654 fold (build1 (IMAGPART_EXPR,
9655 TREE_TYPE (inner_type),
9656 exp0)),
9657 fold (build1 (IMAGPART_EXPR,
9658 TREE_TYPE (inner_type),
9659 exp1)))))),
9660 if_false_label, if_true_label);
9661 }
9ec36da5
JL
9662
9663 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9664 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9665
b93a436e 9666 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9667 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9668 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9669 else
b30f05db 9670 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9671 break;
9672 }
bbf6f052 9673
b93a436e
JL
9674 case NE_EXPR:
9675 {
9676 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9677
9ec36da5
JL
9678 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9679 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9680 {
9681 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9682 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9683 do_jump
9684 (fold
9685 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9686 fold (build (NE_EXPR, TREE_TYPE (exp),
9687 fold (build1 (REALPART_EXPR,
9688 TREE_TYPE (inner_type),
9689 exp0)),
9690 fold (build1 (REALPART_EXPR,
9691 TREE_TYPE (inner_type),
9692 exp1)))),
9693 fold (build (NE_EXPR, TREE_TYPE (exp),
9694 fold (build1 (IMAGPART_EXPR,
9695 TREE_TYPE (inner_type),
9696 exp0)),
9697 fold (build1 (IMAGPART_EXPR,
9698 TREE_TYPE (inner_type),
9699 exp1)))))),
9700 if_false_label, if_true_label);
9701 }
9ec36da5
JL
9702
9703 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9704 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9705
b93a436e 9706 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9707 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9708 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9709 else
b30f05db 9710 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9711 break;
9712 }
bbf6f052 9713
b93a436e 9714 case LT_EXPR:
1c0290ea
BS
9715 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9716 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9717 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9718 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9719 else
b30f05db 9720 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9721 break;
bbf6f052 9722
b93a436e 9723 case LE_EXPR:
1c0290ea
BS
9724 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9725 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9726 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9727 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9728 else
b30f05db 9729 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9730 break;
bbf6f052 9731
b93a436e 9732 case GT_EXPR:
1c0290ea
BS
9733 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9734 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9735 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9736 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9737 else
b30f05db 9738 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9739 break;
bbf6f052 9740
b93a436e 9741 case GE_EXPR:
1c0290ea
BS
9742 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9743 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9744 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9745 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9746 else
b30f05db 9747 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9748 break;
bbf6f052 9749
1eb8759b
RH
9750 case UNORDERED_EXPR:
9751 case ORDERED_EXPR:
9752 {
9753 enum rtx_code cmp, rcmp;
9754 int do_rev;
9755
9756 if (code == UNORDERED_EXPR)
9757 cmp = UNORDERED, rcmp = ORDERED;
9758 else
9759 cmp = ORDERED, rcmp = UNORDERED;
3a94c984 9760 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
9761
9762 do_rev = 0;
9763 if (! can_compare_p (cmp, mode, ccp_jump)
9764 && (can_compare_p (rcmp, mode, ccp_jump)
9765 /* If the target doesn't provide either UNORDERED or ORDERED
9766 comparisons, canonicalize on UNORDERED for the library. */
9767 || rcmp == UNORDERED))
9768 do_rev = 1;
9769
9770 if (! do_rev)
9771 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9772 else
9773 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9774 }
9775 break;
9776
9777 {
9778 enum rtx_code rcode1;
9779 enum tree_code tcode2;
9780
9781 case UNLT_EXPR:
9782 rcode1 = UNLT;
9783 tcode2 = LT_EXPR;
9784 goto unordered_bcc;
9785 case UNLE_EXPR:
9786 rcode1 = UNLE;
9787 tcode2 = LE_EXPR;
9788 goto unordered_bcc;
9789 case UNGT_EXPR:
9790 rcode1 = UNGT;
9791 tcode2 = GT_EXPR;
9792 goto unordered_bcc;
9793 case UNGE_EXPR:
9794 rcode1 = UNGE;
9795 tcode2 = GE_EXPR;
9796 goto unordered_bcc;
9797 case UNEQ_EXPR:
9798 rcode1 = UNEQ;
9799 tcode2 = EQ_EXPR;
9800 goto unordered_bcc;
7913f3d0 9801
1eb8759b
RH
9802 unordered_bcc:
9803 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9804 if (can_compare_p (rcode1, mode, ccp_jump))
9805 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9806 if_true_label);
9807 else
9808 {
9809 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9810 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9811 tree cmp0, cmp1;
9812
3a94c984 9813 /* If the target doesn't support combined unordered
1eb8759b
RH
9814 compares, decompose into UNORDERED + comparison. */
9815 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9816 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9817 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9818 do_jump (exp, if_false_label, if_true_label);
9819 }
9820 }
9821 break;
9822
b93a436e
JL
9823 default:
9824 normal:
9825 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9826#if 0
9827 /* This is not needed any more and causes poor code since it causes
9828 comparisons and tests from non-SI objects to have different code
9829 sequences. */
9830 /* Copy to register to avoid generating bad insns by cse
9831 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9832 if (!cse_not_expected && GET_CODE (temp) == MEM)
9833 temp = copy_to_reg (temp);
ca695ac9 9834#endif
b93a436e 9835 do_pending_stack_adjust ();
b30f05db
BS
9836 /* Do any postincrements in the expression that was tested. */
9837 emit_queue ();
9838
998a298e
GK
9839 if (GET_CODE (temp) == CONST_INT
9840 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9841 || GET_CODE (temp) == LABEL_REF)
b30f05db
BS
9842 {
9843 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9844 if (target)
9845 emit_jump (target);
9846 }
b93a436e 9847 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9848 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9849 /* Note swapping the labels gives us not-equal. */
9850 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9851 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9852 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9853 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9854 GET_MODE (temp), NULL_RTX, 0,
9855 if_false_label, if_true_label);
b93a436e
JL
9856 else
9857 abort ();
9858 }
bbf6f052 9859
b93a436e
JL
9860 if (drop_through_label)
9861 {
9862 /* If do_jump produces code that might be jumped around,
9863 do any stack adjusts from that code, before the place
9864 where control merges in. */
9865 do_pending_stack_adjust ();
9866 emit_label (drop_through_label);
9867 }
bbf6f052 9868}
b93a436e
JL
9869\f
9870/* Given a comparison expression EXP for values too wide to be compared
9871 with one insn, test the comparison and jump to the appropriate label.
9872 The code of EXP is ignored; we always test GT if SWAP is 0,
9873 and LT if SWAP is 1. */
bbf6f052 9874
b93a436e
JL
9875static void
9876do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9877 tree exp;
9878 int swap;
9879 rtx if_false_label, if_true_label;
9880{
9881 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9882 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9883 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9884 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9885
b30f05db 9886 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9887}
9888
b93a436e
JL
9889/* Compare OP0 with OP1, word at a time, in mode MODE.
9890 UNSIGNEDP says to do unsigned comparison.
9891 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9892
b93a436e
JL
9893void
9894do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9895 enum machine_mode mode;
9896 int unsignedp;
9897 rtx op0, op1;
9898 rtx if_false_label, if_true_label;
f81497d9 9899{
b93a436e
JL
9900 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9901 rtx drop_through_label = 0;
9902 int i;
f81497d9 9903
b93a436e
JL
9904 if (! if_true_label || ! if_false_label)
9905 drop_through_label = gen_label_rtx ();
9906 if (! if_true_label)
9907 if_true_label = drop_through_label;
9908 if (! if_false_label)
9909 if_false_label = drop_through_label;
f81497d9 9910
b93a436e
JL
9911 /* Compare a word at a time, high order first. */
9912 for (i = 0; i < nwords; i++)
9913 {
b93a436e 9914 rtx op0_word, op1_word;
bbf6f052 9915
b93a436e
JL
9916 if (WORDS_BIG_ENDIAN)
9917 {
9918 op0_word = operand_subword_force (op0, i, mode);
9919 op1_word = operand_subword_force (op1, i, mode);
9920 }
9921 else
9922 {
9923 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9924 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9925 }
bbf6f052 9926
b93a436e 9927 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9928 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9929 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9930 NULL_RTX, if_true_label);
bbf6f052 9931
b93a436e 9932 /* Consider lower words only if these are equal. */
b30f05db
BS
9933 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9934 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9935 }
bbf6f052 9936
b93a436e
JL
9937 if (if_false_label)
9938 emit_jump (if_false_label);
9939 if (drop_through_label)
9940 emit_label (drop_through_label);
bbf6f052
RK
9941}
9942
b93a436e
JL
9943/* Given an EQ_EXPR expression EXP for values too wide to be compared
9944 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9945
b93a436e
JL
9946static void
9947do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9948 tree exp;
9949 rtx if_false_label, if_true_label;
bbf6f052 9950{
b93a436e
JL
9951 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9952 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9953 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9954 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9955 int i;
9956 rtx drop_through_label = 0;
bbf6f052 9957
b93a436e
JL
9958 if (! if_false_label)
9959 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9960
b93a436e 9961 for (i = 0; i < nwords; i++)
b30f05db
BS
9962 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9963 operand_subword_force (op1, i, mode),
9964 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9965 word_mode, NULL_RTX, 0, if_false_label,
9966 NULL_RTX);
bbf6f052 9967
b93a436e
JL
9968 if (if_true_label)
9969 emit_jump (if_true_label);
9970 if (drop_through_label)
9971 emit_label (drop_through_label);
bbf6f052 9972}
b93a436e
JL
9973\f
9974/* Jump according to whether OP0 is 0.
9975 We assume that OP0 has an integer mode that is too wide
9976 for the available compare insns. */
bbf6f052 9977
f5963e61 9978void
b93a436e
JL
9979do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9980 rtx op0;
9981 rtx if_false_label, if_true_label;
ca695ac9 9982{
b93a436e
JL
9983 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9984 rtx part;
9985 int i;
9986 rtx drop_through_label = 0;
bbf6f052 9987
b93a436e
JL
9988 /* The fastest way of doing this comparison on almost any machine is to
9989 "or" all the words and compare the result. If all have to be loaded
9990 from memory and this is a very wide item, it's possible this may
9991 be slower, but that's highly unlikely. */
bbf6f052 9992
b93a436e
JL
9993 part = gen_reg_rtx (word_mode);
9994 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9995 for (i = 1; i < nwords && part != 0; i++)
9996 part = expand_binop (word_mode, ior_optab, part,
9997 operand_subword_force (op0, i, GET_MODE (op0)),
9998 part, 1, OPTAB_WIDEN);
bbf6f052 9999
b93a436e
JL
10000 if (part != 0)
10001 {
b30f05db
BS
10002 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10003 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 10004
b93a436e
JL
10005 return;
10006 }
bbf6f052 10007
b93a436e
JL
10008 /* If we couldn't do the "or" simply, do this with a series of compares. */
10009 if (! if_false_label)
10010 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10011
b93a436e 10012 for (i = 0; i < nwords; i++)
b30f05db
BS
10013 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10014 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10015 if_false_label, NULL_RTX);
bbf6f052 10016
b93a436e
JL
10017 if (if_true_label)
10018 emit_jump (if_true_label);
0f41302f 10019
b93a436e
JL
10020 if (drop_through_label)
10021 emit_label (drop_through_label);
bbf6f052 10022}
b93a436e 10023\f
b30f05db 10024/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
10025 (including code to compute the values to be compared)
10026 and set (CC0) according to the result.
b30f05db 10027 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10028
b93a436e 10029 We force a stack adjustment unless there are currently
b30f05db 10030 things pushed on the stack that aren't yet used.
ca695ac9 10031
b30f05db
BS
10032 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10033 compared.
10034
10035 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10036 size of MODE should be used. */
10037
10038rtx
10039compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10040 register rtx op0, op1;
10041 enum rtx_code code;
10042 int unsignedp;
10043 enum machine_mode mode;
10044 rtx size;
729a2125 10045 unsigned int align;
b93a436e 10046{
b30f05db 10047 rtx tem;
76bbe028 10048
b30f05db
BS
10049 /* If one operand is constant, make it the second one. Only do this
10050 if the other operand is not constant as well. */
ca695ac9 10051
b30f05db
BS
10052 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10053 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 10054 {
b30f05db
BS
10055 tem = op0;
10056 op0 = op1;
10057 op1 = tem;
10058 code = swap_condition (code);
ca695ac9 10059 }
bbf6f052 10060
b30f05db 10061 if (flag_force_mem)
b93a436e 10062 {
b30f05db
BS
10063 op0 = force_not_mem (op0);
10064 op1 = force_not_mem (op1);
10065 }
bbf6f052 10066
b30f05db
BS
10067 do_pending_stack_adjust ();
10068
10069 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10070 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10071 return tem;
10072
10073#if 0
10074 /* There's no need to do this now that combine.c can eliminate lots of
10075 sign extensions. This can be less efficient in certain cases on other
10076 machines. */
10077
10078 /* If this is a signed equality comparison, we can do it as an
10079 unsigned comparison since zero-extension is cheaper than sign
10080 extension and comparisons with zero are done as unsigned. This is
10081 the case even on machines that can do fast sign extension, since
10082 zero-extension is easier to combine with other operations than
10083 sign-extension is. If we are comparing against a constant, we must
10084 convert it to what it would look like unsigned. */
10085 if ((code == EQ || code == NE) && ! unsignedp
10086 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10087 {
10088 if (GET_CODE (op1) == CONST_INT
10089 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10090 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10091 unsignedp = 1;
b93a436e
JL
10092 }
10093#endif
3a94c984 10094
b30f05db 10095 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 10096
b30f05db 10097 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 10098}
bbf6f052 10099
b30f05db 10100/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 10101 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10102
b93a436e
JL
10103 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10104 compared.
bbf6f052 10105
b93a436e
JL
10106 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10107 size of MODE should be used. */
ca695ac9 10108
b30f05db
BS
10109void
10110do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10111 if_false_label, if_true_label)
b93a436e
JL
10112 register rtx op0, op1;
10113 enum rtx_code code;
10114 int unsignedp;
10115 enum machine_mode mode;
10116 rtx size;
729a2125 10117 unsigned int align;
b30f05db 10118 rtx if_false_label, if_true_label;
bbf6f052 10119{
b93a436e 10120 rtx tem;
b30f05db
BS
10121 int dummy_true_label = 0;
10122
10123 /* Reverse the comparison if that is safe and we want to jump if it is
10124 false. */
10125 if (! if_true_label && ! FLOAT_MODE_P (mode))
10126 {
10127 if_true_label = if_false_label;
10128 if_false_label = 0;
10129 code = reverse_condition (code);
10130 }
bbf6f052 10131
b93a436e
JL
10132 /* If one operand is constant, make it the second one. Only do this
10133 if the other operand is not constant as well. */
e7c33f54 10134
b93a436e
JL
10135 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10136 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10137 {
b93a436e
JL
10138 tem = op0;
10139 op0 = op1;
10140 op1 = tem;
10141 code = swap_condition (code);
10142 }
bbf6f052 10143
b93a436e
JL
10144 if (flag_force_mem)
10145 {
10146 op0 = force_not_mem (op0);
10147 op1 = force_not_mem (op1);
10148 }
bbf6f052 10149
b93a436e 10150 do_pending_stack_adjust ();
ca695ac9 10151
b93a436e
JL
10152 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10153 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
10154 {
10155 if (tem == const_true_rtx)
10156 {
10157 if (if_true_label)
10158 emit_jump (if_true_label);
10159 }
10160 else
10161 {
10162 if (if_false_label)
10163 emit_jump (if_false_label);
10164 }
10165 return;
10166 }
ca695ac9 10167
b93a436e
JL
10168#if 0
10169 /* There's no need to do this now that combine.c can eliminate lots of
10170 sign extensions. This can be less efficient in certain cases on other
10171 machines. */
ca695ac9 10172
b93a436e
JL
10173 /* If this is a signed equality comparison, we can do it as an
10174 unsigned comparison since zero-extension is cheaper than sign
10175 extension and comparisons with zero are done as unsigned. This is
10176 the case even on machines that can do fast sign extension, since
10177 zero-extension is easier to combine with other operations than
10178 sign-extension is. If we are comparing against a constant, we must
10179 convert it to what it would look like unsigned. */
10180 if ((code == EQ || code == NE) && ! unsignedp
10181 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10182 {
10183 if (GET_CODE (op1) == CONST_INT
10184 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10185 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10186 unsignedp = 1;
10187 }
10188#endif
ca695ac9 10189
b30f05db
BS
10190 if (! if_true_label)
10191 {
10192 dummy_true_label = 1;
10193 if_true_label = gen_label_rtx ();
10194 }
10195
10196 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10197 if_true_label);
10198
10199 if (if_false_label)
10200 emit_jump (if_false_label);
10201 if (dummy_true_label)
10202 emit_label (if_true_label);
10203}
10204
10205/* Generate code for a comparison expression EXP (including code to compute
10206 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10207 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10208 generated code will drop through.
10209 SIGNED_CODE should be the rtx operation for this comparison for
10210 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10211
10212 We force a stack adjustment unless there are currently
10213 things pushed on the stack that aren't yet used. */
10214
10215static void
10216do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10217 if_true_label)
10218 register tree exp;
10219 enum rtx_code signed_code, unsigned_code;
10220 rtx if_false_label, if_true_label;
10221{
729a2125 10222 unsigned int align0, align1;
b30f05db
BS
10223 register rtx op0, op1;
10224 register tree type;
10225 register enum machine_mode mode;
10226 int unsignedp;
10227 enum rtx_code code;
10228
10229 /* Don't crash if the comparison was erroneous. */
14a774a9 10230 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
10231 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10232 return;
10233
14a774a9 10234 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
6b16805e
JJ
10235 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10236 return;
10237
b30f05db
BS
10238 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10239 mode = TYPE_MODE (type);
6b16805e
JJ
10240 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10241 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10242 || (GET_MODE_BITSIZE (mode)
31a7659b
JDA
10243 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10244 1)))))))
6b16805e
JJ
10245 {
10246 /* op0 might have been replaced by promoted constant, in which
10247 case the type of second argument should be used. */
10248 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10249 mode = TYPE_MODE (type);
10250 }
b30f05db
BS
10251 unsignedp = TREE_UNSIGNED (type);
10252 code = unsignedp ? unsigned_code : signed_code;
10253
10254#ifdef HAVE_canonicalize_funcptr_for_compare
10255 /* If function pointers need to be "canonicalized" before they can
10256 be reliably compared, then canonicalize them. */
10257 if (HAVE_canonicalize_funcptr_for_compare
10258 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10259 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10260 == FUNCTION_TYPE))
10261 {
10262 rtx new_op0 = gen_reg_rtx (mode);
10263
10264 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10265 op0 = new_op0;
10266 }
10267
10268 if (HAVE_canonicalize_funcptr_for_compare
10269 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10270 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10271 == FUNCTION_TYPE))
10272 {
10273 rtx new_op1 = gen_reg_rtx (mode);
10274
10275 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10276 op1 = new_op1;
10277 }
10278#endif
10279
10280 /* Do any postincrements in the expression that was tested. */
10281 emit_queue ();
10282
10283 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10284 ((mode == BLKmode)
10285 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
19caa751 10286 MIN (align0, align1),
b30f05db 10287 if_false_label, if_true_label);
b93a436e
JL
10288}
10289\f
10290/* Generate code to calculate EXP using a store-flag instruction
10291 and return an rtx for the result. EXP is either a comparison
10292 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10293
b93a436e 10294 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10295
b93a436e
JL
10296 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10297 cheap.
ca695ac9 10298
b93a436e
JL
10299 Return zero if there is no suitable set-flag instruction
10300 available on this machine.
ca695ac9 10301
b93a436e
JL
10302 Once expand_expr has been called on the arguments of the comparison,
10303 we are committed to doing the store flag, since it is not safe to
10304 re-evaluate the expression. We emit the store-flag insn by calling
10305 emit_store_flag, but only expand the arguments if we have a reason
10306 to believe that emit_store_flag will be successful. If we think that
10307 it will, but it isn't, we have to simulate the store-flag with a
10308 set/jump/set sequence. */
ca695ac9 10309
b93a436e
JL
10310static rtx
10311do_store_flag (exp, target, mode, only_cheap)
10312 tree exp;
10313 rtx target;
10314 enum machine_mode mode;
10315 int only_cheap;
10316{
10317 enum rtx_code code;
10318 tree arg0, arg1, type;
10319 tree tem;
10320 enum machine_mode operand_mode;
10321 int invert = 0;
10322 int unsignedp;
10323 rtx op0, op1;
10324 enum insn_code icode;
10325 rtx subtarget = target;
381127e8 10326 rtx result, label;
ca695ac9 10327
b93a436e
JL
10328 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10329 result at the end. We can't simply invert the test since it would
10330 have already been inverted if it were valid. This case occurs for
10331 some floating-point comparisons. */
ca695ac9 10332
b93a436e
JL
10333 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10334 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10335
b93a436e
JL
10336 arg0 = TREE_OPERAND (exp, 0);
10337 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
10338
10339 /* Don't crash if the comparison was erroneous. */
10340 if (arg0 == error_mark_node || arg1 == error_mark_node)
10341 return const0_rtx;
10342
b93a436e
JL
10343 type = TREE_TYPE (arg0);
10344 operand_mode = TYPE_MODE (type);
10345 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10346
b93a436e
JL
10347 /* We won't bother with BLKmode store-flag operations because it would mean
10348 passing a lot of information to emit_store_flag. */
10349 if (operand_mode == BLKmode)
10350 return 0;
ca695ac9 10351
b93a436e
JL
10352 /* We won't bother with store-flag operations involving function pointers
10353 when function pointers must be canonicalized before comparisons. */
10354#ifdef HAVE_canonicalize_funcptr_for_compare
10355 if (HAVE_canonicalize_funcptr_for_compare
10356 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10357 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10358 == FUNCTION_TYPE))
10359 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10360 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10361 == FUNCTION_TYPE))))
10362 return 0;
ca695ac9
JB
10363#endif
10364
b93a436e
JL
10365 STRIP_NOPS (arg0);
10366 STRIP_NOPS (arg1);
ca695ac9 10367
b93a436e
JL
10368 /* Get the rtx comparison code to use. We know that EXP is a comparison
10369 operation of some type. Some comparisons against 1 and -1 can be
10370 converted to comparisons with zero. Do so here so that the tests
10371 below will be aware that we have a comparison with zero. These
10372 tests will not catch constants in the first operand, but constants
10373 are rarely passed as the first operand. */
ca695ac9 10374
b93a436e
JL
10375 switch (TREE_CODE (exp))
10376 {
10377 case EQ_EXPR:
10378 code = EQ;
bbf6f052 10379 break;
b93a436e
JL
10380 case NE_EXPR:
10381 code = NE;
bbf6f052 10382 break;
b93a436e
JL
10383 case LT_EXPR:
10384 if (integer_onep (arg1))
10385 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10386 else
10387 code = unsignedp ? LTU : LT;
ca695ac9 10388 break;
b93a436e
JL
10389 case LE_EXPR:
10390 if (! unsignedp && integer_all_onesp (arg1))
10391 arg1 = integer_zero_node, code = LT;
10392 else
10393 code = unsignedp ? LEU : LE;
ca695ac9 10394 break;
b93a436e
JL
10395 case GT_EXPR:
10396 if (! unsignedp && integer_all_onesp (arg1))
10397 arg1 = integer_zero_node, code = GE;
10398 else
10399 code = unsignedp ? GTU : GT;
10400 break;
10401 case GE_EXPR:
10402 if (integer_onep (arg1))
10403 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10404 else
10405 code = unsignedp ? GEU : GE;
ca695ac9 10406 break;
1eb8759b
RH
10407
10408 case UNORDERED_EXPR:
10409 code = UNORDERED;
10410 break;
10411 case ORDERED_EXPR:
10412 code = ORDERED;
10413 break;
10414 case UNLT_EXPR:
10415 code = UNLT;
10416 break;
10417 case UNLE_EXPR:
10418 code = UNLE;
10419 break;
10420 case UNGT_EXPR:
10421 code = UNGT;
10422 break;
10423 case UNGE_EXPR:
10424 code = UNGE;
10425 break;
10426 case UNEQ_EXPR:
10427 code = UNEQ;
10428 break;
1eb8759b 10429
ca695ac9 10430 default:
b93a436e 10431 abort ();
bbf6f052 10432 }
bbf6f052 10433
b93a436e
JL
10434 /* Put a constant second. */
10435 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10436 {
10437 tem = arg0; arg0 = arg1; arg1 = tem;
10438 code = swap_condition (code);
ca695ac9 10439 }
bbf6f052 10440
b93a436e
JL
10441 /* If this is an equality or inequality test of a single bit, we can
10442 do this by shifting the bit being tested to the low-order bit and
10443 masking the result with the constant 1. If the condition was EQ,
10444 we xor it with 1. This does not require an scc insn and is faster
10445 than an scc insn even if we have it. */
d39985fa 10446
b93a436e
JL
10447 if ((code == NE || code == EQ)
10448 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10449 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10450 {
10451 tree inner = TREE_OPERAND (arg0, 0);
10452 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10453 int ops_unsignedp;
bbf6f052 10454
b93a436e
JL
10455 /* If INNER is a right shift of a constant and it plus BITNUM does
10456 not overflow, adjust BITNUM and INNER. */
ca695ac9 10457
b93a436e
JL
10458 if (TREE_CODE (inner) == RSHIFT_EXPR
10459 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10460 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10461 && bitnum < TYPE_PRECISION (type)
10462 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10463 bitnum - TYPE_PRECISION (type)))
ca695ac9 10464 {
b93a436e
JL
10465 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10466 inner = TREE_OPERAND (inner, 0);
ca695ac9 10467 }
ca695ac9 10468
b93a436e
JL
10469 /* If we are going to be able to omit the AND below, we must do our
10470 operations as unsigned. If we must use the AND, we have a choice.
10471 Normally unsigned is faster, but for some machines signed is. */
10472 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10473#ifdef LOAD_EXTEND_OP
10474 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10475#else
10476 : 1
10477#endif
10478 );
bbf6f052 10479
296b4ed9 10480 if (! get_subtarget (subtarget)
a47fed55 10481 || GET_MODE (subtarget) != operand_mode
e5e809f4 10482 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10483 subtarget = 0;
bbf6f052 10484
b93a436e 10485 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10486
b93a436e 10487 if (bitnum != 0)
681cb233 10488 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10489 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10490
b93a436e
JL
10491 if (GET_MODE (op0) != mode)
10492 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10493
b93a436e
JL
10494 if ((code == EQ && ! invert) || (code == NE && invert))
10495 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10496 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10497
b93a436e
JL
10498 /* Put the AND last so it can combine with more things. */
10499 if (bitnum != TYPE_PRECISION (type) - 1)
10500 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10501
b93a436e
JL
10502 return op0;
10503 }
bbf6f052 10504
b93a436e 10505 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10506 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10507 return 0;
1eb8759b 10508
b93a436e
JL
10509 icode = setcc_gen_code[(int) code];
10510 if (icode == CODE_FOR_nothing
a995e389 10511 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10512 {
b93a436e
JL
10513 /* We can only do this if it is one of the special cases that
10514 can be handled without an scc insn. */
10515 if ((code == LT && integer_zerop (arg1))
10516 || (! only_cheap && code == GE && integer_zerop (arg1)))
10517 ;
10518 else if (BRANCH_COST >= 0
10519 && ! only_cheap && (code == NE || code == EQ)
10520 && TREE_CODE (type) != REAL_TYPE
10521 && ((abs_optab->handlers[(int) operand_mode].insn_code
10522 != CODE_FOR_nothing)
10523 || (ffs_optab->handlers[(int) operand_mode].insn_code
10524 != CODE_FOR_nothing)))
10525 ;
10526 else
10527 return 0;
ca695ac9 10528 }
3a94c984 10529
296b4ed9 10530 if (! get_subtarget (target)
a47fed55 10531 || GET_MODE (subtarget) != operand_mode
e5e809f4 10532 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10533 subtarget = 0;
10534
10535 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10536 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10537
10538 if (target == 0)
10539 target = gen_reg_rtx (mode);
10540
10541 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10542 because, if the emit_store_flag does anything it will succeed and
10543 OP0 and OP1 will not be used subsequently. */
ca695ac9 10544
b93a436e
JL
10545 result = emit_store_flag (target, code,
10546 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10547 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10548 operand_mode, unsignedp, 1);
ca695ac9 10549
b93a436e
JL
10550 if (result)
10551 {
10552 if (invert)
10553 result = expand_binop (mode, xor_optab, result, const1_rtx,
10554 result, 0, OPTAB_LIB_WIDEN);
10555 return result;
ca695ac9 10556 }
bbf6f052 10557
b93a436e
JL
10558 /* If this failed, we have to do this with set/compare/jump/set code. */
10559 if (GET_CODE (target) != REG
10560 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10561 target = gen_reg_rtx (GET_MODE (target));
10562
10563 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10564 result = compare_from_rtx (op0, op1, code, unsignedp,
10565 operand_mode, NULL_RTX, 0);
10566 if (GET_CODE (result) == CONST_INT)
10567 return (((result == const0_rtx && ! invert)
10568 || (result != const0_rtx && invert))
10569 ? const0_rtx : const1_rtx);
ca695ac9 10570
b93a436e
JL
10571 label = gen_label_rtx ();
10572 if (bcc_gen_fctn[(int) code] == 0)
10573 abort ();
0f41302f 10574
b93a436e
JL
10575 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10576 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10577 emit_label (label);
bbf6f052 10578
b93a436e 10579 return target;
ca695ac9 10580}
b93a436e
JL
10581\f
10582/* Generate a tablejump instruction (used for switch statements). */
10583
10584#ifdef HAVE_tablejump
e87b4f3f 10585
b93a436e
JL
10586/* INDEX is the value being switched on, with the lowest value
10587 in the table already subtracted.
10588 MODE is its expected mode (needed if INDEX is constant).
10589 RANGE is the length of the jump table.
10590 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10591
b93a436e
JL
10592 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10593 index value is out of range. */
0f41302f 10594
ca695ac9 10595void
b93a436e
JL
10596do_tablejump (index, mode, range, table_label, default_label)
10597 rtx index, range, table_label, default_label;
10598 enum machine_mode mode;
ca695ac9 10599{
b93a436e 10600 register rtx temp, vector;
88d3b7f0 10601
b93a436e
JL
10602 /* Do an unsigned comparison (in the proper mode) between the index
10603 expression and the value which represents the length of the range.
10604 Since we just finished subtracting the lower bound of the range
10605 from the index expression, this comparison allows us to simultaneously
10606 check that the original index expression value is both greater than
10607 or equal to the minimum value of the range and less than or equal to
10608 the maximum value of the range. */
709f5be1 10609
c5d5d461
JL
10610 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10611 0, default_label);
bbf6f052 10612
b93a436e
JL
10613 /* If index is in range, it must fit in Pmode.
10614 Convert to Pmode so we can index with it. */
10615 if (mode != Pmode)
10616 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10617
b93a436e
JL
10618 /* Don't let a MEM slip thru, because then INDEX that comes
10619 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10620 and break_out_memory_refs will go to work on it and mess it up. */
10621#ifdef PIC_CASE_VECTOR_ADDRESS
10622 if (flag_pic && GET_CODE (index) != REG)
10623 index = copy_to_mode_reg (Pmode, index);
10624#endif
ca695ac9 10625
b93a436e
JL
10626 /* If flag_force_addr were to affect this address
10627 it could interfere with the tricky assumptions made
10628 about addresses that contain label-refs,
10629 which may be valid only very near the tablejump itself. */
10630 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10631 GET_MODE_SIZE, because this indicates how large insns are. The other
10632 uses should all be Pmode, because they are addresses. This code
10633 could fail if addresses and insns are not the same size. */
10634 index = gen_rtx_PLUS (Pmode,
10635 gen_rtx_MULT (Pmode, index,
10636 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10637 gen_rtx_LABEL_REF (Pmode, table_label));
10638#ifdef PIC_CASE_VECTOR_ADDRESS
10639 if (flag_pic)
10640 index = PIC_CASE_VECTOR_ADDRESS (index);
10641 else
bbf6f052 10642#endif
b93a436e
JL
10643 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10644 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10645 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10646 RTX_UNCHANGING_P (vector) = 1;
10647 convert_move (temp, vector, 0);
10648
10649 emit_jump_insn (gen_tablejump (temp, table_label));
10650
10651 /* If we are generating PIC code or if the table is PC-relative, the
10652 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10653 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10654 emit_barrier ();
bbf6f052 10655}
b93a436e 10656
3a94c984 10657#endif /* HAVE_tablejump */
This page took 3.17109 seconds and 5 git commands to generate.