]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Makefile.in, [...]: replace "GNU CC" with "GCC".
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357
AJ
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052 32#include "function.h"
bbf6f052 33#include "insn-config.h"
3a94c984 34/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 35#include "expr.h"
e78d8e51
ZW
36#include "optabs.h"
37#include "libfuncs.h"
bbf6f052 38#include "recog.h"
3ef1eef4 39#include "reload.h"
bbf6f052 40#include "output.h"
bbf6f052 41#include "typeclass.h"
10f0ad3d 42#include "toplev.h"
d7db6646 43#include "ggc.h"
e2c49ac2 44#include "intl.h"
b1474bb7 45#include "tm_p.h"
bbf6f052 46
bbf6f052 47/* Decide whether a function's arguments should be processed
bbc8a071
RK
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
bbf6f052 52
bbf6f052 53#ifdef PUSH_ROUNDING
bbc8a071 54
3319a347 55#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 56#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 57#endif
bbc8a071 58
bbf6f052
RK
59#endif
60
61#ifndef STACK_PUSH_CODE
62#ifdef STACK_GROWS_DOWNWARD
63#define STACK_PUSH_CODE PRE_DEC
64#else
65#define STACK_PUSH_CODE PRE_INC
66#endif
67#endif
68
18543a22
ILT
69/* Assume that case vectors are not pc-relative. */
70#ifndef CASE_VECTOR_PC_RELATIVE
71#define CASE_VECTOR_PC_RELATIVE 0
72#endif
73
8f17b5c5
MM
74/* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
80 parameter. */
81int (*lang_safe_from_p) PARAMS ((rtx, tree));
82
bbf6f052
RK
83/* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89int cse_not_expected;
90
956d6950 91/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
956d6950
JL
94static int in_check_memory_usage;
95
14a774a9
RK
96/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97static tree placeholder_list = 0;
98
4969d05d
RK
99/* This structure is used by move_by_pieces to describe the move to
100 be performed. */
4969d05d
RK
101struct move_by_pieces
102{
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
3bdf5ad1
RK
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
4969d05d
RK
113 int reverse;
114};
115
57814e5e 116/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
117 be performed. */
118
57814e5e 119struct store_by_pieces
9de08200
RK
120{
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
3bdf5ad1
RK
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
57814e5e
JJ
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
9de08200
RK
129 int reverse;
130};
131
292b1216 132extern struct obstack permanent_obstack;
c02bd5d9 133
711d877c
KG
134static rtx get_push_address PARAMS ((int));
135
136static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
137static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
139 unsigned int));
711d877c
KG
140static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
57814e5e
JJ
142static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
3bdf5ad1
RK
144static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
57814e5e
JJ
146static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 149 enum machine_mode,
57814e5e 150 struct store_by_pieces *));
296b4ed9 151static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
152static int is_zeros_p PARAMS ((tree));
153static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
154static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
23cb1766
RK
156 tree, tree, unsigned int, int,
157 int));
770ae6cc 158static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
13eb1f7f 159 HOST_WIDE_INT));
770ae6cc
RK
160static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
729a2125 162 tree, enum machine_mode, int,
770ae6cc 163 unsigned int, HOST_WIDE_INT, int));
e009aaf3 164static enum memory_use_mode
711d877c
KG
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166static tree save_noncopied_parts PARAMS ((tree, tree));
167static tree init_noncopied_parts PARAMS ((tree, tree));
711d877c
KG
168static int fixed_type_p PARAMS ((tree));
169static rtx var_rtx PARAMS ((tree));
729a2125 170static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
711d877c 171static rtx expand_increment PARAMS ((tree, int, int));
711d877c
KG
172static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
173static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
174static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
175 rtx, rtx));
711d877c 176static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
21d93687 177#ifdef PUSH_ROUNDING
566aa174 178static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
21d93687 179#endif
ad82abb8 180static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
bbf6f052 181
4fa52007
RK
182/* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
7e24ffc9
HPN
189/* If a memory-to-memory move would take MOVE_RATIO or more simple
190 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
191
192#ifndef MOVE_RATIO
266007a7 193#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
194#define MOVE_RATIO 2
195#else
3a94c984 196/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 197#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
198#endif
199#endif
e87b4f3f 200
fbe1758d 201/* This macro is used to determine whether move_by_pieces should be called
3a94c984 202 to perform a structure copy. */
fbe1758d 203#ifndef MOVE_BY_PIECES_P
19caa751 204#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 205 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
206#endif
207
266007a7 208/* This array records the insn_code of insns to perform block moves. */
e6677db3 209enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 210
9de08200
RK
211/* This array records the insn_code of insns to perform block clears. */
212enum insn_code clrstr_optab[NUM_MACHINE_MODES];
213
0f41302f 214/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
215
216#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 217#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 218#endif
bbf6f052 219\f
4fa52007 220/* This is run once per compilation to set up which modes can be used
266007a7 221 directly in memory and to initialize the block move optab. */
4fa52007
RK
222
223void
224init_expr_once ()
225{
226 rtx insn, pat;
227 enum machine_mode mode;
cff48d8f 228 int num_clobbers;
9ec36da5 229 rtx mem, mem1;
9ec36da5
JL
230
231 start_sequence ();
232
e2549997
RS
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
9ec36da5
JL
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 238
38a448ca 239 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
240 pat = PATTERN (insn);
241
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
244 {
245 int regno;
246 rtx reg;
4fa52007
RK
247
248 direct_load[(int) mode] = direct_store[(int) mode] = 0;
249 PUT_MODE (mem, mode);
e2549997 250 PUT_MODE (mem1, mode);
4fa52007 251
e6fe56a4
RK
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
254
7308a047
RS
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
259 {
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
e6fe56a4 262
38a448ca 263 reg = gen_rtx_REG (mode, regno);
e6fe56a4 264
7308a047
RS
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
e6fe56a4 269
e2549997
RS
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
274
7308a047
RS
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
e2549997
RS
279
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
7308a047 284 }
4fa52007
RK
285 }
286
287 end_sequence ();
288}
cff48d8f 289
bbf6f052
RK
290/* This is run at the start of compiling a function. */
291
292void
293init_expr ()
294{
01d939e8 295 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 296
49ad7cfa 297 pending_chain = 0;
bbf6f052 298 pending_stack_adjust = 0;
1503a7ec 299 stack_pointer_delta = 0;
bbf6f052 300 inhibit_defer_pop = 0;
bbf6f052 301 saveregs_value = 0;
0006469d 302 apply_args_value = 0;
e87b4f3f 303 forced_labels = 0;
bbf6f052
RK
304}
305
fa51b01b
RH
306void
307mark_expr_status (p)
308 struct expr_status *p;
309{
310 if (p == NULL)
311 return;
312
313 ggc_mark_rtx (p->x_saveregs_value);
314 ggc_mark_rtx (p->x_apply_args_value);
315 ggc_mark_rtx (p->x_forced_labels);
316}
317
318void
319free_expr_status (f)
320 struct function *f;
321{
322 free (f->expr);
323 f->expr = NULL;
324}
325
49ad7cfa 326/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 327
bbf6f052 328void
49ad7cfa 329finish_expr_for_function ()
bbf6f052 330{
49ad7cfa
BS
331 if (pending_chain)
332 abort ();
bbf6f052
RK
333}
334\f
335/* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
337
bbf6f052
RK
338/* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
341
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
344
345static rtx
346enqueue_insn (var, body)
347 rtx var, body;
348{
c5c76735
JL
349 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
350 body, pending_chain);
bbf6f052
RK
351 return pending_chain;
352}
353
354/* Use protect_from_queue to convert a QUEUED expression
355 into something that you can put immediately into an instruction.
356 If the queued incrementation has not happened yet,
357 protect_from_queue returns the variable itself.
358 If the incrementation has happened, protect_from_queue returns a temp
359 that contains a copy of the old value of the variable.
360
361 Any time an rtx which might possibly be a QUEUED is to be put
362 into an instruction, it must be passed through protect_from_queue first.
363 QUEUED expressions are not meaningful in instructions.
364
365 Do not pass a value through protect_from_queue and then hold
366 on to it for a while before putting it in an instruction!
367 If the queue is flushed in between, incorrect code will result. */
368
369rtx
370protect_from_queue (x, modify)
371 register rtx x;
372 int modify;
373{
374 register RTX_CODE code = GET_CODE (x);
375
376#if 0 /* A QUEUED can hang around after the queue is forced out. */
377 /* Shortcut for most common case. */
378 if (pending_chain == 0)
379 return x;
380#endif
381
382 if (code != QUEUED)
383 {
e9baa644
RK
384 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
385 use of autoincrement. Make a copy of the contents of the memory
386 location rather than a copy of the address, but not if the value is
387 of mode BLKmode. Don't modify X in place since it might be
388 shared. */
bbf6f052
RK
389 if (code == MEM && GET_MODE (x) != BLKmode
390 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
391 {
f1ec5147
RK
392 rtx y = XEXP (x, 0);
393 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 394
bbf6f052
RK
395 if (QUEUED_INSN (y))
396 {
f1ec5147
RK
397 rtx temp = gen_reg_rtx (GET_MODE (x));
398
e9baa644 399 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
400 QUEUED_INSN (y));
401 return temp;
402 }
f1ec5147 403
73b7f58c
BS
404 /* Copy the address into a pseudo, so that the returned value
405 remains correct across calls to emit_queue. */
f1ec5147 406 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 407 }
f1ec5147 408
bbf6f052
RK
409 /* Otherwise, recursively protect the subexpressions of all
410 the kinds of rtx's that can contain a QUEUED. */
411 if (code == MEM)
3f15938e
RS
412 {
413 rtx tem = protect_from_queue (XEXP (x, 0), 0);
414 if (tem != XEXP (x, 0))
415 {
416 x = copy_rtx (x);
417 XEXP (x, 0) = tem;
418 }
419 }
bbf6f052
RK
420 else if (code == PLUS || code == MULT)
421 {
3f15938e
RS
422 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
423 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
424 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
425 {
426 x = copy_rtx (x);
427 XEXP (x, 0) = new0;
428 XEXP (x, 1) = new1;
429 }
bbf6f052
RK
430 }
431 return x;
432 }
73b7f58c
BS
433 /* If the increment has not happened, use the variable itself. Copy it
434 into a new pseudo so that the value remains correct across calls to
435 emit_queue. */
bbf6f052 436 if (QUEUED_INSN (x) == 0)
73b7f58c 437 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
438 /* If the increment has happened and a pre-increment copy exists,
439 use that copy. */
440 if (QUEUED_COPY (x) != 0)
441 return QUEUED_COPY (x);
442 /* The increment has happened but we haven't set up a pre-increment copy.
443 Set one up now, and use it. */
444 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
445 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
446 QUEUED_INSN (x));
447 return QUEUED_COPY (x);
448}
449
450/* Return nonzero if X contains a QUEUED expression:
451 if it contains anything that will be altered by a queued increment.
452 We handle only combinations of MEM, PLUS, MINUS and MULT operators
453 since memory addresses generally contain only those. */
454
1f06ee8d 455int
bbf6f052
RK
456queued_subexp_p (x)
457 rtx x;
458{
459 register enum rtx_code code = GET_CODE (x);
460 switch (code)
461 {
462 case QUEUED:
463 return 1;
464 case MEM:
465 return queued_subexp_p (XEXP (x, 0));
466 case MULT:
467 case PLUS:
468 case MINUS:
e9a25f70
JL
469 return (queued_subexp_p (XEXP (x, 0))
470 || queued_subexp_p (XEXP (x, 1)));
471 default:
472 return 0;
bbf6f052 473 }
bbf6f052
RK
474}
475
476/* Perform all the pending incrementations. */
477
478void
479emit_queue ()
480{
481 register rtx p;
381127e8 482 while ((p = pending_chain))
bbf6f052 483 {
41b083c4
R
484 rtx body = QUEUED_BODY (p);
485
486 if (GET_CODE (body) == SEQUENCE)
487 {
488 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
489 emit_insn (QUEUED_BODY (p));
490 }
491 else
492 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
493 pending_chain = QUEUED_NEXT (p);
494 }
495}
bbf6f052
RK
496\f
497/* Copy data from FROM to TO, where the machine modes are not the same.
498 Both modes may be integer, or both may be floating.
499 UNSIGNEDP should be nonzero if FROM is an unsigned type.
500 This causes zero-extension instead of sign-extension. */
501
502void
503convert_move (to, from, unsignedp)
504 register rtx to, from;
505 int unsignedp;
506{
507 enum machine_mode to_mode = GET_MODE (to);
508 enum machine_mode from_mode = GET_MODE (from);
509 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
510 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
511 enum insn_code code;
512 rtx libcall;
513
514 /* rtx code for making an equivalent value. */
515 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
516
517 to = protect_from_queue (to, 1);
518 from = protect_from_queue (from, 0);
519
520 if (to_real != from_real)
521 abort ();
522
1499e0a8
RK
523 /* If FROM is a SUBREG that indicates that we have already done at least
524 the required extension, strip it. We don't handle such SUBREGs as
525 TO here. */
526
527 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
528 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
529 >= GET_MODE_SIZE (to_mode))
530 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
531 from = gen_lowpart (to_mode, from), from_mode = to_mode;
532
533 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 abort ();
535
bbf6f052
RK
536 if (to_mode == from_mode
537 || (from_mode == VOIDmode && CONSTANT_P (from)))
538 {
539 emit_move_insn (to, from);
540 return;
541 }
542
0b4565c9
BS
543 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
544 {
545 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 abort ();
3a94c984 547
0b4565c9
BS
548 if (VECTOR_MODE_P (to_mode))
549 from = gen_rtx_SUBREG (to_mode, from, 0);
550 else
551 to = gen_rtx_SUBREG (from_mode, to, 0);
552
553 emit_move_insn (to, from);
554 return;
555 }
556
557 if (to_real != from_real)
558 abort ();
559
bbf6f052
RK
560 if (to_real)
561 {
642dfa8b 562 rtx value, insns;
81d79e2c 563
2b01c326 564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 565 {
2b01c326
RK
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
568 != CODE_FOR_nothing)
569 {
570 emit_unop_insn (code, to, from, UNKNOWN);
571 return;
572 }
bbf6f052 573 }
3a94c984 574
b424402e
RS
575#ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 return;
580 }
581#endif
704af6a1
JL
582#ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 return;
587 }
588#endif
b424402e
RS
589#ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 return;
594 }
595#endif
596#ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
598 {
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 return;
601 }
602#endif
603#ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
605 {
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 return;
608 }
609#endif
610#ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
612 {
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 return;
615 }
616#endif
03747aa3
RK
617
618#ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 return;
623 }
624#endif
b424402e
RS
625#ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 return;
630 }
631#endif
632#ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 return;
637 }
638#endif
639#ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 return;
644 }
645#endif
646#ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
648 {
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 return;
651 }
652#endif
2b01c326
RK
653
654#ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
661#ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
668#ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
675#ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
682
bbf6f052
RK
683#ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 return;
688 }
689#endif
b092b471
JW
690#ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
692 {
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 return;
695 }
696#endif
bbf6f052
RK
697#ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
699 {
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 return;
702 }
703#endif
b092b471
JW
704#ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
706 {
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 return;
709 }
710#endif
bbf6f052
RK
711#ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
713 {
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
715 return;
716 }
717#endif
718
b092b471
JW
719 libcall = (rtx) 0;
720 switch (from_mode)
721 {
722 case SFmode:
723 switch (to_mode)
724 {
725 case DFmode:
726 libcall = extendsfdf2_libfunc;
727 break;
728
729 case XFmode:
730 libcall = extendsfxf2_libfunc;
731 break;
732
733 case TFmode:
734 libcall = extendsftf2_libfunc;
735 break;
3a94c984 736
e9a25f70
JL
737 default:
738 break;
b092b471
JW
739 }
740 break;
741
742 case DFmode:
743 switch (to_mode)
744 {
745 case SFmode:
746 libcall = truncdfsf2_libfunc;
747 break;
748
749 case XFmode:
750 libcall = extenddfxf2_libfunc;
751 break;
752
753 case TFmode:
754 libcall = extenddftf2_libfunc;
755 break;
3a94c984 756
e9a25f70
JL
757 default:
758 break;
b092b471
JW
759 }
760 break;
761
762 case XFmode:
763 switch (to_mode)
764 {
765 case SFmode:
766 libcall = truncxfsf2_libfunc;
767 break;
768
769 case DFmode:
770 libcall = truncxfdf2_libfunc;
771 break;
3a94c984 772
e9a25f70
JL
773 default:
774 break;
b092b471
JW
775 }
776 break;
777
778 case TFmode:
779 switch (to_mode)
780 {
781 case SFmode:
782 libcall = trunctfsf2_libfunc;
783 break;
784
785 case DFmode:
786 libcall = trunctfdf2_libfunc;
787 break;
3a94c984 788
e9a25f70
JL
789 default:
790 break;
b092b471
JW
791 }
792 break;
3a94c984 793
e9a25f70
JL
794 default:
795 break;
b092b471
JW
796 }
797
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
bbf6f052
RK
800 abort ();
801
642dfa8b 802 start_sequence ();
ebb1b59a 803 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 804 1, from, from_mode);
642dfa8b
BS
805 insns = get_insns ();
806 end_sequence ();
807 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
808 from));
bbf6f052
RK
809 return;
810 }
811
812 /* Now both modes are integers. */
813
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
817 {
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
825
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
829 {
cd1b4b44
RK
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
bbf6f052
RK
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
838 }
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
843 {
a81fee56 844 if (GET_CODE (to) == REG)
38a448ca 845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
850 }
851
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
854
5c5033c3
RK
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
857
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
860
bbf6f052
RK
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
866
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
868
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
871
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
876 {
877#ifdef HAVE_slt
878 if (HAVE_slt
a995e389 879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
880 && STORE_FLAG_VALUE == -1)
881 {
906c4e36
RK
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
bbf6f052
RK
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
886 }
887 else
888#endif
889 {
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 893 NULL_RTX, 0);
bbf6f052
RK
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
895 }
896 }
897
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
900 {
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
903
904 if (subword == 0)
905 abort ();
906
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
909 }
910
911 insns = get_insns ();
912 end_sequence ();
913
906c4e36 914 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
916 return;
917 }
918
d3c64ee3
RS
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 922 {
431a6eca
JW
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
bbf6f052
RK
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
932 }
933
3a94c984 934 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
935 if (to_mode == PQImode)
936 {
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
939
940#ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
942 {
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
945 }
946#endif /* HAVE_truncqipqi2 */
947 abort ();
948 }
949
950 if (from_mode == PQImode)
951 {
952 if (to_mode != QImode)
953 {
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
956 }
957 else
958 {
959#ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
961 {
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
964 }
965#endif /* HAVE_extendpqiqi2 */
966 abort ();
967 }
968 }
969
bbf6f052
RK
970 if (to_mode == PSImode)
971 {
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
974
1f584163
DE
975#ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
bbf6f052 977 {
1f584163 978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
979 return;
980 }
1f584163 981#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
982 abort ();
983 }
984
985 if (from_mode == PSImode)
986 {
987 if (to_mode != SImode)
988 {
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
991 }
992 else
993 {
1f584163 994#ifdef HAVE_extendpsisi2
43d75418 995 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 996 {
1f584163 997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
998 return;
999 }
1f584163 1000#endif /* HAVE_extendpsisi2 */
43d75418
R
1001#ifdef HAVE_zero_extendpsisi2
1002 if (unsignedp && HAVE_zero_extendpsisi2)
1003 {
1004 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 return;
1006 }
1007#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1008 abort ();
1009 }
1010 }
1011
0407367d
RK
1012 if (to_mode == PDImode)
1013 {
1014 if (from_mode != DImode)
1015 from = convert_to_mode (DImode, from, unsignedp);
1016
1017#ifdef HAVE_truncdipdi2
1018 if (HAVE_truncdipdi2)
1019 {
1020 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 return;
1022 }
1023#endif /* HAVE_truncdipdi2 */
1024 abort ();
1025 }
1026
1027 if (from_mode == PDImode)
1028 {
1029 if (to_mode != DImode)
1030 {
1031 from = convert_to_mode (DImode, from, unsignedp);
1032 from_mode = DImode;
1033 }
1034 else
1035 {
1036#ifdef HAVE_extendpdidi2
1037 if (HAVE_extendpdidi2)
1038 {
1039 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 return;
1041 }
1042#endif /* HAVE_extendpdidi2 */
1043 abort ();
1044 }
1045 }
1046
bbf6f052
RK
1047 /* Now follow all the conversions between integers
1048 no more than a word long. */
1049
1050 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1051 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1053 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1054 {
d3c64ee3
RS
1055 if (!((GET_CODE (from) == MEM
1056 && ! MEM_VOLATILE_P (from)
1057 && direct_load[(int) to_mode]
1058 && ! mode_dependent_address_p (XEXP (from, 0)))
1059 || GET_CODE (from) == REG
1060 || GET_CODE (from) == SUBREG))
1061 from = force_reg (from_mode, from);
34aa3599
RK
1062 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1063 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1064 from = copy_to_reg (from);
bbf6f052
RK
1065 emit_move_insn (to, gen_lowpart (to_mode, from));
1066 return;
1067 }
1068
d3c64ee3 1069 /* Handle extension. */
bbf6f052
RK
1070 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1071 {
1072 /* Convert directly if that works. */
1073 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1074 != CODE_FOR_nothing)
1075 {
1076 emit_unop_insn (code, to, from, equiv_code);
1077 return;
1078 }
1079 else
1080 {
1081 enum machine_mode intermediate;
2b28d92e
NC
1082 rtx tmp;
1083 tree shift_amount;
bbf6f052
RK
1084
1085 /* Search for a mode to convert via. */
1086 for (intermediate = from_mode; intermediate != VOIDmode;
1087 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1088 if (((can_extend_p (to_mode, intermediate, unsignedp)
1089 != CODE_FOR_nothing)
1090 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1091 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1092 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1093 && (can_extend_p (intermediate, from_mode, unsignedp)
1094 != CODE_FOR_nothing))
1095 {
1096 convert_move (to, convert_to_mode (intermediate, from,
1097 unsignedp), unsignedp);
1098 return;
1099 }
1100
2b28d92e 1101 /* No suitable intermediate mode.
3a94c984 1102 Generate what we need with shifts. */
2b28d92e
NC
1103 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1104 - GET_MODE_BITSIZE (from_mode), 0);
1105 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1106 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1107 to, unsignedp);
3a94c984 1108 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1109 to, unsignedp);
1110 if (tmp != to)
1111 emit_move_insn (to, tmp);
1112 return;
bbf6f052
RK
1113 }
1114 }
1115
3a94c984 1116 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1117
1118 if (from_mode == DImode && to_mode == SImode)
1119 {
1120#ifdef HAVE_truncdisi2
1121 if (HAVE_truncdisi2)
1122 {
1123 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1124 return;
1125 }
1126#endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1129 }
1130
1131 if (from_mode == DImode && to_mode == HImode)
1132 {
1133#ifdef HAVE_truncdihi2
1134 if (HAVE_truncdihi2)
1135 {
1136 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1137 return;
1138 }
1139#endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == DImode && to_mode == QImode)
1145 {
1146#ifdef HAVE_truncdiqi2
1147 if (HAVE_truncdiqi2)
1148 {
1149 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1150 return;
1151 }
1152#endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
1157 if (from_mode == SImode && to_mode == HImode)
1158 {
1159#ifdef HAVE_truncsihi2
1160 if (HAVE_truncsihi2)
1161 {
1162 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1163 return;
1164 }
1165#endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == SImode && to_mode == QImode)
1171 {
1172#ifdef HAVE_truncsiqi2
1173 if (HAVE_truncsiqi2)
1174 {
1175 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1176 return;
1177 }
1178#endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
1183 if (from_mode == HImode && to_mode == QImode)
1184 {
1185#ifdef HAVE_trunchiqi2
1186 if (HAVE_trunchiqi2)
1187 {
1188 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1189 return;
1190 }
1191#endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1194 }
1195
b9bcad65
RK
1196 if (from_mode == TImode && to_mode == DImode)
1197 {
1198#ifdef HAVE_trunctidi2
1199 if (HAVE_trunctidi2)
1200 {
1201 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1202 return;
1203 }
1204#endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1207 }
1208
1209 if (from_mode == TImode && to_mode == SImode)
1210 {
1211#ifdef HAVE_trunctisi2
1212 if (HAVE_trunctisi2)
1213 {
1214 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1215 return;
1216 }
1217#endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1220 }
1221
1222 if (from_mode == TImode && to_mode == HImode)
1223 {
1224#ifdef HAVE_trunctihi2
1225 if (HAVE_trunctihi2)
1226 {
1227 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1228 return;
1229 }
1230#endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1233 }
1234
1235 if (from_mode == TImode && to_mode == QImode)
1236 {
1237#ifdef HAVE_trunctiqi2
1238 if (HAVE_trunctiqi2)
1239 {
1240 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1241 return;
1242 }
1243#endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1246 }
1247
bbf6f052
RK
1248 /* Handle truncation of volatile memrefs, and so on;
1249 the things that couldn't be truncated directly,
1250 and for which there was no special instruction. */
1251 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1252 {
1253 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1254 emit_move_insn (to, temp);
1255 return;
1256 }
1257
1258 /* Mode combination is not recognized. */
1259 abort ();
1260}
1261
1262/* Return an rtx for a value that would result
1263 from converting X to mode MODE.
1264 Both X and MODE may be floating, or both integer.
1265 UNSIGNEDP is nonzero if X is an unsigned value.
1266 This can be done by referring to a part of X in place
5d901c31
RS
1267 or by copying to a new temporary with conversion.
1268
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1271
1272rtx
1273convert_to_mode (mode, x, unsignedp)
1274 enum machine_mode mode;
1275 rtx x;
1276 int unsignedp;
5ffe63ed
RS
1277{
1278 return convert_modes (mode, VOIDmode, x, unsignedp);
1279}
1280
1281/* Return an rtx for a value that would result
1282 from converting X from mode OLDMODE to mode MODE.
1283 Both modes may be floating, or both integer.
1284 UNSIGNEDP is nonzero if X is an unsigned value.
1285
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1288
1289 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1290
1291 This function *must not* call protect_from_queue
1292 except when putting X into an insn (in which case convert_move does it). */
1293
1294rtx
1295convert_modes (mode, oldmode, x, unsignedp)
1296 enum machine_mode mode, oldmode;
1297 rtx x;
1298 int unsignedp;
bbf6f052
RK
1299{
1300 register rtx temp;
5ffe63ed 1301
1499e0a8
RK
1302 /* If FROM is a SUBREG that indicates that we have already done at least
1303 the required extension, strip it. */
1304
1305 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1306 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1307 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1308 x = gen_lowpart (mode, x);
bbf6f052 1309
64791b18
RK
1310 if (GET_MODE (x) != VOIDmode)
1311 oldmode = GET_MODE (x);
3a94c984 1312
5ffe63ed 1313 if (mode == oldmode)
bbf6f052
RK
1314 return x;
1315
1316 /* There is one case that we must handle specially: If we are converting
906c4e36 1317 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1318 we are to interpret the constant as unsigned, gen_lowpart will do
1319 the wrong if the constant appears negative. What we want to do is
1320 make the high-order word of the constant zero, not all ones. */
1321
1322 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1323 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1324 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1325 {
1326 HOST_WIDE_INT val = INTVAL (x);
1327
1328 if (oldmode != VOIDmode
1329 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1330 {
1331 int width = GET_MODE_BITSIZE (oldmode);
1332
1333 /* We need to zero extend VAL. */
1334 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1335 }
1336
1337 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1338 }
bbf6f052
RK
1339
1340 /* We can do this with a gen_lowpart if both desired and current modes
1341 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1342 non-volatile MEM. Except for the constant case where MODE is no
1343 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1344
ba2e110c
RK
1345 if ((GET_CODE (x) == CONST_INT
1346 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1347 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1348 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1349 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1350 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1351 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1352 && direct_load[(int) mode])
2bf29316
JW
1353 || (GET_CODE (x) == REG
1354 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1355 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1356 {
1357 /* ?? If we don't know OLDMODE, we have to assume here that
1358 X does not need sign- or zero-extension. This may not be
1359 the case, but it's the best we can do. */
1360 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1361 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1362 {
1363 HOST_WIDE_INT val = INTVAL (x);
1364 int width = GET_MODE_BITSIZE (oldmode);
1365
1366 /* We must sign or zero-extend in this case. Start by
1367 zero-extending, then sign extend if we need to. */
1368 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369 if (! unsignedp
1370 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1371 val |= (HOST_WIDE_INT) (-1) << width;
1372
69107307 1373 return GEN_INT (trunc_int_for_mode (val, mode));
ba2e110c
RK
1374 }
1375
1376 return gen_lowpart (mode, x);
1377 }
bbf6f052
RK
1378
1379 temp = gen_reg_rtx (mode);
1380 convert_move (temp, x, unsignedp);
1381 return temp;
1382}
1383\f
fbe1758d 1384/* This macro is used to determine what the largest unit size that
3a94c984 1385 move_by_pieces can use is. */
fbe1758d
AM
1386
1387/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1388 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1389 number of bytes we can move with a single instruction. */
fbe1758d
AM
1390
1391#ifndef MOVE_MAX_PIECES
1392#define MOVE_MAX_PIECES MOVE_MAX
1393#endif
1394
21d93687
RK
1395/* Generate several move instructions to copy LEN bytes from block FROM to
1396 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1397 and TO through protect_from_queue before calling.
566aa174 1398
21d93687
RK
1399 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1400 used to push FROM to the stack.
566aa174 1401
19caa751 1402 ALIGN is maximum alignment we can assume. */
bbf6f052 1403
2e245dac 1404void
bbf6f052
RK
1405move_by_pieces (to, from, len, align)
1406 rtx to, from;
3bdf5ad1 1407 unsigned HOST_WIDE_INT len;
729a2125 1408 unsigned int align;
bbf6f052
RK
1409{
1410 struct move_by_pieces data;
566aa174 1411 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1412 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1413 enum machine_mode mode = VOIDmode, tmode;
1414 enum insn_code icode;
bbf6f052
RK
1415
1416 data.offset = 0;
bbf6f052 1417 data.from_addr = from_addr;
566aa174
JH
1418 if (to)
1419 {
1420 to_addr = XEXP (to, 0);
1421 data.to = to;
1422 data.autinc_to
1423 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1424 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1425 data.reverse
1426 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1427 }
1428 else
1429 {
1430 to_addr = NULL_RTX;
1431 data.to = NULL_RTX;
1432 data.autinc_to = 1;
1433#ifdef STACK_GROWS_DOWNWARD
1434 data.reverse = 1;
1435#else
1436 data.reverse = 0;
1437#endif
1438 }
1439 data.to_addr = to_addr;
bbf6f052 1440 data.from = from;
bbf6f052
RK
1441 data.autinc_from
1442 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1443 || GET_CODE (from_addr) == POST_INC
1444 || GET_CODE (from_addr) == POST_DEC);
1445
1446 data.explicit_inc_from = 0;
1447 data.explicit_inc_to = 0;
bbf6f052
RK
1448 if (data.reverse) data.offset = len;
1449 data.len = len;
1450
1451 /* If copying requires more than two move insns,
1452 copy addresses to registers (to make displacements shorter)
1453 and use post-increment if available. */
1454 if (!(data.autinc_from && data.autinc_to)
1455 && move_by_pieces_ninsns (len, align) > 2)
1456 {
3a94c984 1457 /* Find the mode of the largest move... */
fbe1758d
AM
1458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1460 if (GET_MODE_SIZE (tmode) < max_size)
1461 mode = tmode;
1462
1463 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1464 {
1465 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1466 data.autinc_from = 1;
1467 data.explicit_inc_from = -1;
1468 }
fbe1758d 1469 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1470 {
1471 data.from_addr = copy_addr_to_reg (from_addr);
1472 data.autinc_from = 1;
1473 data.explicit_inc_from = 1;
1474 }
bbf6f052
RK
1475 if (!data.autinc_from && CONSTANT_P (from_addr))
1476 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1477 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1478 {
1479 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1480 data.autinc_to = 1;
1481 data.explicit_inc_to = -1;
1482 }
fbe1758d 1483 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1484 {
1485 data.to_addr = copy_addr_to_reg (to_addr);
1486 data.autinc_to = 1;
1487 data.explicit_inc_to = 1;
1488 }
bbf6f052
RK
1489 if (!data.autinc_to && CONSTANT_P (to_addr))
1490 data.to_addr = copy_addr_to_reg (to_addr);
1491 }
1492
e1565e65 1493 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1494 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1495 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1496
1497 /* First move what we can in the largest integer mode, then go to
1498 successively smaller modes. */
1499
1500 while (max_size > 1)
1501 {
e7c33f54
RK
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1505 mode = tmode;
1506
1507 if (mode == VOIDmode)
1508 break;
1509
1510 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1511 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1512 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1513
1514 max_size = GET_MODE_SIZE (mode);
1515 }
1516
1517 /* The code above should have handled everything. */
2a8e278c 1518 if (data.len > 0)
bbf6f052
RK
1519 abort ();
1520}
1521
1522/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1523 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1524
3bdf5ad1 1525static unsigned HOST_WIDE_INT
bbf6f052 1526move_by_pieces_ninsns (l, align)
3bdf5ad1 1527 unsigned HOST_WIDE_INT l;
729a2125 1528 unsigned int align;
bbf6f052 1529{
3bdf5ad1
RK
1530 unsigned HOST_WIDE_INT n_insns = 0;
1531 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1532
e1565e65 1533 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1534 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1535 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1536
1537 while (max_size > 1)
1538 {
1539 enum machine_mode mode = VOIDmode, tmode;
1540 enum insn_code icode;
1541
e7c33f54
RK
1542 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1543 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1544 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1545 mode = tmode;
1546
1547 if (mode == VOIDmode)
1548 break;
1549
1550 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1551 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1552 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1553
1554 max_size = GET_MODE_SIZE (mode);
1555 }
1556
13c6f0d5
NS
1557 if (l)
1558 abort ();
bbf6f052
RK
1559 return n_insns;
1560}
1561
1562/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1563 with move instructions for mode MODE. GENFUN is the gen_... function
1564 to make a move insn for that mode. DATA has all the other info. */
1565
1566static void
1567move_by_pieces_1 (genfun, mode, data)
711d877c 1568 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1569 enum machine_mode mode;
1570 struct move_by_pieces *data;
1571{
3bdf5ad1 1572 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1573 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1574
1575 while (data->len >= size)
1576 {
3bdf5ad1
RK
1577 if (data->reverse)
1578 data->offset -= size;
1579
566aa174 1580 if (data->to)
3bdf5ad1 1581 {
566aa174
JH
1582 if (data->autinc_to)
1583 {
f1ec5147
RK
1584 to1 = replace_equiv_address (data->to, data->to_addr);
1585 to1 = adjust_address (to1, mode, 0);
566aa174
JH
1586 }
1587 else
f4ef873c 1588 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1589 }
3bdf5ad1
RK
1590
1591 if (data->autinc_from)
1592 {
f1ec5147
RK
1593 from1 = replace_equiv_address (data->from, data->from_addr);
1594 from1 = adjust_address (from1, mode, 0);
3bdf5ad1
RK
1595 }
1596 else
f4ef873c 1597 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1598
940da324 1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1600 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1601 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1602 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052 1603
566aa174
JH
1604 if (data->to)
1605 emit_insn ((*genfun) (to1, from1));
1606 else
21d93687
RK
1607 {
1608#ifdef PUSH_ROUNDING
1609 emit_single_push_insn (mode, from1, NULL);
1610#else
1611 abort ();
1612#endif
1613 }
3bdf5ad1 1614
940da324 1615 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1616 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1617 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1618 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1619
3bdf5ad1
RK
1620 if (! data->reverse)
1621 data->offset += size;
bbf6f052
RK
1622
1623 data->len -= size;
1624 }
1625}
1626\f
1627/* Emit code to move a block Y to a block X.
1628 This may be done with string-move instructions,
1629 with multiple scalar move instructions, or with a library call.
1630
1631 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1632 with mode BLKmode.
1633 SIZE is an rtx that says how long they are.
19caa751 1634 ALIGN is the maximum alignment we can assume they have.
bbf6f052 1635
e9a25f70
JL
1636 Return the address of the new block, if memcpy is called and returns it,
1637 0 otherwise. */
1638
1639rtx
bbf6f052
RK
1640emit_block_move (x, y, size, align)
1641 rtx x, y;
1642 rtx size;
729a2125 1643 unsigned int align;
bbf6f052 1644{
e9a25f70 1645 rtx retval = 0;
52cf7115
JL
1646#ifdef TARGET_MEM_FUNCTIONS
1647 static tree fn;
1648 tree call_expr, arg_list;
1649#endif
e9a25f70 1650
bbf6f052
RK
1651 if (GET_MODE (x) != BLKmode)
1652 abort ();
1653
1654 if (GET_MODE (y) != BLKmode)
1655 abort ();
1656
1657 x = protect_from_queue (x, 1);
1658 y = protect_from_queue (y, 0);
5d901c31 1659 size = protect_from_queue (size, 0);
bbf6f052
RK
1660
1661 if (GET_CODE (x) != MEM)
1662 abort ();
1663 if (GET_CODE (y) != MEM)
1664 abort ();
1665 if (size == 0)
1666 abort ();
1667
fbe1758d 1668 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1669 move_by_pieces (x, y, INTVAL (size), align);
1670 else
1671 {
1672 /* Try the most limited insn first, because there's no point
1673 including more than one in the machine description unless
1674 the more limited one has some advantage. */
266007a7 1675
19caa751 1676 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
266007a7
RK
1677 enum machine_mode mode;
1678
3ef1eef4
RK
1679 /* Since this is a move insn, we don't care about volatility. */
1680 volatile_ok = 1;
1681
266007a7
RK
1682 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1683 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1684 {
266007a7 1685 enum insn_code code = movstr_optab[(int) mode];
a995e389 1686 insn_operand_predicate_fn pred;
266007a7
RK
1687
1688 if (code != CODE_FOR_nothing
803090c4
RK
1689 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1690 here because if SIZE is less than the mode mask, as it is
8008b228 1691 returned by the macro, it will definitely be less than the
803090c4 1692 actual mode mask. */
8ca00751
RK
1693 && ((GET_CODE (size) == CONST_INT
1694 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1695 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1696 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1697 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1698 || (*pred) (x, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1700 || (*pred) (y, BLKmode))
1701 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1702 || (*pred) (opalign, VOIDmode)))
bbf6f052 1703 {
1ba1e2a8 1704 rtx op2;
266007a7
RK
1705 rtx last = get_last_insn ();
1706 rtx pat;
1707
1ba1e2a8 1708 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1709 pred = insn_data[(int) code].operand[2].predicate;
1710 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1711 op2 = copy_to_mode_reg (mode, op2);
1712
1713 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1714 if (pat)
1715 {
1716 emit_insn (pat);
3ef1eef4 1717 volatile_ok = 0;
e9a25f70 1718 return 0;
266007a7
RK
1719 }
1720 else
1721 delete_insns_since (last);
bbf6f052
RK
1722 }
1723 }
bbf6f052 1724
3ef1eef4
RK
1725 volatile_ok = 0;
1726
4bc973ae
JL
1727 /* X, Y, or SIZE may have been passed through protect_from_queue.
1728
1729 It is unsafe to save the value generated by protect_from_queue
1730 and reuse it later. Consider what happens if emit_queue is
1731 called before the return value from protect_from_queue is used.
1732
1733 Expansion of the CALL_EXPR below will call emit_queue before
1734 we are finished emitting RTL for argument setup. So if we are
1735 not careful we could get the wrong value for an argument.
1736
1737 To avoid this problem we go ahead and emit code to copy X, Y &
1738 SIZE into new pseudos. We can then place those new pseudos
1739 into an RTL_EXPR and use them later, even after a call to
3a94c984 1740 emit_queue.
4bc973ae
JL
1741
1742 Note this is not strictly needed for library calls since they
1743 do not call emit_queue before loading their arguments. However,
1744 we may need to have library calls call emit_queue in the future
1745 since failing to do so could cause problems for targets which
1746 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1747 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1748 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1749
1750#ifdef TARGET_MEM_FUNCTIONS
1751 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1752#else
1753 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1754 TREE_UNSIGNED (integer_type_node));
f3dc586a 1755 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1756#endif
1757
bbf6f052 1758#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1759 /* It is incorrect to use the libcall calling conventions to call
1760 memcpy in this context.
1761
1762 This could be a user call to memcpy and the user may wish to
1763 examine the return value from memcpy.
1764
1765 For targets where libcalls and normal calls have different conventions
3a94c984 1766 for returning pointers, we could end up generating incorrect code.
52cf7115
JL
1767
1768 So instead of using a libcall sequence we build up a suitable
1769 CALL_EXPR and expand the call in the normal fashion. */
1770 if (fn == NULL_TREE)
1771 {
1772 tree fntype;
1773
1774 /* This was copied from except.c, I don't know if all this is
1775 necessary in this context or not. */
1776 fn = get_identifier ("memcpy");
52cf7115
JL
1777 fntype = build_pointer_type (void_type_node);
1778 fntype = build_function_type (fntype, NULL_TREE);
1779 fn = build_decl (FUNCTION_DECL, fn, fntype);
3a94c984 1780 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1781 DECL_EXTERNAL (fn) = 1;
1782 TREE_PUBLIC (fn) = 1;
1783 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 1784 TREE_NOTHROW (fn) = 1;
6496a589 1785 make_decl_rtl (fn, NULL);
52cf7115 1786 assemble_external (fn);
52cf7115
JL
1787 }
1788
3a94c984 1789 /* We need to make an argument list for the function call.
52cf7115
JL
1790
1791 memcpy has three arguments, the first two are void * addresses and
1792 the last is a size_t byte count for the copy. */
1793 arg_list
1794 = build_tree_list (NULL_TREE,
4bc973ae 1795 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1796 TREE_CHAIN (arg_list)
1797 = build_tree_list (NULL_TREE,
4bc973ae 1798 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1799 TREE_CHAIN (TREE_CHAIN (arg_list))
1800 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1801 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1802
1803 /* Now we have to build up the CALL_EXPR itself. */
1804 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1805 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1806 call_expr, arg_list, NULL_TREE);
1807 TREE_SIDE_EFFECTS (call_expr) = 1;
1808
1809 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1810#else
ebb1b59a 1811 emit_library_call (bcopy_libfunc, LCT_NORMAL,
fe7bbd2a 1812 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1813 convert_to_mode (TYPE_MODE (integer_type_node), size,
1814 TREE_UNSIGNED (integer_type_node)),
1815 TYPE_MODE (integer_type_node));
bbf6f052
RK
1816#endif
1817 }
e9a25f70
JL
1818
1819 return retval;
bbf6f052
RK
1820}
1821\f
1822/* Copy all or part of a value X into registers starting at REGNO.
1823 The number of registers to be filled is NREGS. */
1824
1825void
1826move_block_to_reg (regno, x, nregs, mode)
1827 int regno;
1828 rtx x;
1829 int nregs;
1830 enum machine_mode mode;
1831{
1832 int i;
381127e8 1833#ifdef HAVE_load_multiple
3a94c984 1834 rtx pat;
381127e8
RL
1835 rtx last;
1836#endif
bbf6f052 1837
72bb9717
RK
1838 if (nregs == 0)
1839 return;
1840
bbf6f052
RK
1841 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1842 x = validize_mem (force_const_mem (mode, x));
1843
1844 /* See if the machine can do this with a load multiple insn. */
1845#ifdef HAVE_load_multiple
c3a02afe 1846 if (HAVE_load_multiple)
bbf6f052 1847 {
c3a02afe 1848 last = get_last_insn ();
38a448ca 1849 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1850 GEN_INT (nregs));
1851 if (pat)
1852 {
1853 emit_insn (pat);
1854 return;
1855 }
1856 else
1857 delete_insns_since (last);
bbf6f052 1858 }
bbf6f052
RK
1859#endif
1860
1861 for (i = 0; i < nregs; i++)
38a448ca 1862 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1863 operand_subword_force (x, i, mode));
1864}
1865
1866/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1867 The number of registers to be filled is NREGS. SIZE indicates the number
1868 of bytes in the object X. */
1869
bbf6f052 1870void
0040593d 1871move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1872 int regno;
1873 rtx x;
1874 int nregs;
0040593d 1875 int size;
bbf6f052
RK
1876{
1877 int i;
381127e8 1878#ifdef HAVE_store_multiple
3a94c984 1879 rtx pat;
381127e8
RL
1880 rtx last;
1881#endif
58a32c5c 1882 enum machine_mode mode;
bbf6f052 1883
2954d7db
RK
1884 if (nregs == 0)
1885 return;
1886
58a32c5c
DE
1887 /* If SIZE is that of a mode no bigger than a word, just use that
1888 mode's store operation. */
1889 if (size <= UNITS_PER_WORD
1890 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1891 {
792760b9 1892 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
58a32c5c
DE
1893 return;
1894 }
3a94c984 1895
0040593d 1896 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1897 to the left before storing to memory. Note that the previous test
1898 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1899 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1900 {
1901 rtx tem = operand_subword (x, 0, 1, BLKmode);
1902 rtx shift;
1903
1904 if (tem == 0)
1905 abort ();
1906
1907 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1908 gen_rtx_REG (word_mode, regno),
0040593d
JW
1909 build_int_2 ((UNITS_PER_WORD - size)
1910 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1911 emit_move_insn (tem, shift);
1912 return;
1913 }
1914
bbf6f052
RK
1915 /* See if the machine can do this with a store multiple insn. */
1916#ifdef HAVE_store_multiple
c3a02afe 1917 if (HAVE_store_multiple)
bbf6f052 1918 {
c3a02afe 1919 last = get_last_insn ();
38a448ca 1920 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1921 GEN_INT (nregs));
1922 if (pat)
1923 {
1924 emit_insn (pat);
1925 return;
1926 }
1927 else
1928 delete_insns_since (last);
bbf6f052 1929 }
bbf6f052
RK
1930#endif
1931
1932 for (i = 0; i < nregs; i++)
1933 {
1934 rtx tem = operand_subword (x, i, 1, BLKmode);
1935
1936 if (tem == 0)
1937 abort ();
1938
38a448ca 1939 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1940 }
1941}
1942
aac5cc16
RH
1943/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1944 registers represented by a PARALLEL. SSIZE represents the total size of
1945 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1946 SRC in bits. */
1947/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1948 the balance will be in what would be the low-order memory addresses, i.e.
1949 left justified for big endian, right justified for little endian. This
1950 happens to be true for the targets currently using this support. If this
1951 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1952 would be needed. */
fffa9c1d
JW
1953
1954void
aac5cc16
RH
1955emit_group_load (dst, orig_src, ssize, align)
1956 rtx dst, orig_src;
729a2125
RK
1957 unsigned int align;
1958 int ssize;
fffa9c1d 1959{
aac5cc16
RH
1960 rtx *tmps, src;
1961 int start, i;
fffa9c1d 1962
aac5cc16 1963 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1964 abort ();
1965
1966 /* Check for a NULL entry, used to indicate that the parameter goes
1967 both on the stack and in registers. */
aac5cc16
RH
1968 if (XEXP (XVECEXP (dst, 0, 0), 0))
1969 start = 0;
fffa9c1d 1970 else
aac5cc16
RH
1971 start = 1;
1972
3a94c984 1973 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1974
aac5cc16
RH
1975 /* Process the pieces. */
1976 for (i = start; i < XVECLEN (dst, 0); i++)
1977 {
1978 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1979 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1980 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1981 int shift = 0;
1982
1983 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1984 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
1985 {
1986 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1987 bytelen = ssize - bytepos;
1988 if (bytelen <= 0)
729a2125 1989 abort ();
aac5cc16
RH
1990 }
1991
f3ce87a9
DE
1992 /* If we won't be loading directly from memory, protect the real source
1993 from strange tricks we might play; but make sure that the source can
1994 be loaded directly into the destination. */
1995 src = orig_src;
1996 if (GET_CODE (orig_src) != MEM
1997 && (!CONSTANT_P (orig_src)
1998 || (GET_MODE (orig_src) != mode
1999 && GET_MODE (orig_src) != VOIDmode)))
2000 {
2001 if (GET_MODE (orig_src) == VOIDmode)
2002 src = gen_reg_rtx (mode);
2003 else
2004 src = gen_reg_rtx (GET_MODE (orig_src));
2005 emit_move_insn (src, orig_src);
2006 }
2007
aac5cc16
RH
2008 /* Optimize the access just a bit. */
2009 if (GET_CODE (src) == MEM
19caa751 2010 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2011 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2012 && bytelen == GET_MODE_SIZE (mode))
2013 {
2014 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2015 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2016 }
7c4a6db0
JW
2017 else if (GET_CODE (src) == CONCAT)
2018 {
2019 if (bytepos == 0
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2021 tmps[i] = XEXP (src, 0);
8752c357 2022 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7c4a6db0
JW
2023 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2024 tmps[i] = XEXP (src, 1);
2025 else
2026 abort ();
2027 }
f3ce87a9 2028 else if (CONSTANT_P (src)
2ee5437b
RH
2029 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2030 tmps[i] = src;
fffa9c1d 2031 else
19caa751
RK
2032 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2033 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2034 mode, mode, align, ssize);
fffa9c1d 2035
aac5cc16 2036 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2037 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2039 }
19caa751 2040
3a94c984 2041 emit_queue ();
aac5cc16
RH
2042
2043 /* Copy the extracted pieces into the proper (probable) hard regs. */
2044 for (i = start; i < XVECLEN (dst, 0); i++)
2045 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2046}
2047
aac5cc16
RH
2048/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2049 registers represented by a PARALLEL. SSIZE represents the total size of
2050 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
2051
2052void
aac5cc16
RH
2053emit_group_store (orig_dst, src, ssize, align)
2054 rtx orig_dst, src;
729a2125
RK
2055 int ssize;
2056 unsigned int align;
fffa9c1d 2057{
aac5cc16
RH
2058 rtx *tmps, dst;
2059 int start, i;
fffa9c1d 2060
aac5cc16 2061 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2062 abort ();
2063
2064 /* Check for a NULL entry, used to indicate that the parameter goes
2065 both on the stack and in registers. */
aac5cc16
RH
2066 if (XEXP (XVECEXP (src, 0, 0), 0))
2067 start = 0;
fffa9c1d 2068 else
aac5cc16
RH
2069 start = 1;
2070
3a94c984 2071 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2072
aac5cc16
RH
2073 /* Copy the (probable) hard regs into pseudos. */
2074 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2075 {
aac5cc16
RH
2076 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2077 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2078 emit_move_insn (tmps[i], reg);
2079 }
3a94c984 2080 emit_queue ();
fffa9c1d 2081
aac5cc16
RH
2082 /* If we won't be storing directly into memory, protect the real destination
2083 from strange tricks we might play. */
2084 dst = orig_dst;
10a9f2be
JW
2085 if (GET_CODE (dst) == PARALLEL)
2086 {
2087 rtx temp;
2088
2089 /* We can get a PARALLEL dst if there is a conditional expression in
2090 a return statement. In that case, the dst and src are the same,
2091 so no action is necessary. */
2092 if (rtx_equal_p (dst, src))
2093 return;
2094
2095 /* It is unclear if we can ever reach here, but we may as well handle
2096 it. Allocate a temporary, and split this into a store/load to/from
2097 the temporary. */
2098
2099 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2100 emit_group_store (temp, src, ssize, align);
2101 emit_group_load (dst, temp, ssize, align);
2102 return;
2103 }
2104 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2105 {
2106 dst = gen_reg_rtx (GET_MODE (orig_dst));
2107 /* Make life a bit easier for combine. */
2108 emit_move_insn (dst, const0_rtx);
2109 }
aac5cc16
RH
2110
2111 /* Process the pieces. */
2112 for (i = start; i < XVECLEN (src, 0); i++)
2113 {
770ae6cc 2114 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2115 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2116 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2117
2118 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2119 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2120 {
aac5cc16
RH
2121 if (BYTES_BIG_ENDIAN)
2122 {
2123 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2124 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2125 tmps[i], 0, OPTAB_WIDEN);
2126 }
2127 bytelen = ssize - bytepos;
71bc0330 2128 }
fffa9c1d 2129
aac5cc16
RH
2130 /* Optimize the access just a bit. */
2131 if (GET_CODE (dst) == MEM
19caa751 2132 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2133 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2134 && bytelen == GET_MODE_SIZE (mode))
f4ef873c 2135 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
aac5cc16 2136 else
729a2125 2137 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
f4ef873c 2138 mode, tmps[i], align, ssize);
fffa9c1d 2139 }
729a2125 2140
3a94c984 2141 emit_queue ();
aac5cc16
RH
2142
2143 /* Copy from the pseudo into the (probable) hard reg. */
2144 if (GET_CODE (dst) == REG)
2145 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2146}
2147
c36fce9a
GRK
2148/* Generate code to copy a BLKmode object of TYPE out of a
2149 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2150 is null, a stack temporary is created. TGTBLK is returned.
2151
2152 The primary purpose of this routine is to handle functions
2153 that return BLKmode structures in registers. Some machines
2154 (the PA for example) want to return all small structures
3a94c984 2155 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2156
2157rtx
19caa751 2158copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2159 rtx tgtblk;
2160 rtx srcreg;
2161 tree type;
2162{
19caa751
RK
2163 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2164 rtx src = NULL, dst = NULL;
2165 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2166 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2167
2168 if (tgtblk == 0)
2169 {
1da68f56
RK
2170 tgtblk = assign_temp (build_qualified_type (type,
2171 (TYPE_QUALS (type)
2172 | TYPE_QUAL_CONST)),
2173 0, 1, 1);
19caa751
RK
2174 preserve_temp_slots (tgtblk);
2175 }
3a94c984 2176
19caa751
RK
2177 /* This code assumes srcreg is at least a full word. If it isn't,
2178 copy it into a new pseudo which is a full word. */
2179 if (GET_MODE (srcreg) != BLKmode
2180 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2181 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2182
2183 /* Structures whose size is not a multiple of a word are aligned
2184 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2185 machine, this means we must skip the empty high order bytes when
2186 calculating the bit offset. */
2187 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2188 big_endian_correction
2189 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2190
2191 /* Copy the structure BITSIZE bites at a time.
3a94c984 2192
19caa751
RK
2193 We could probably emit more efficient code for machines which do not use
2194 strict alignment, but it doesn't seem worth the effort at the current
2195 time. */
2196 for (bitpos = 0, xbitpos = big_endian_correction;
2197 bitpos < bytes * BITS_PER_UNIT;
2198 bitpos += bitsize, xbitpos += bitsize)
2199 {
3a94c984 2200 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2201 word boundary and when xbitpos == big_endian_correction
2202 (the first time through). */
2203 if (xbitpos % BITS_PER_WORD == 0
2204 || xbitpos == big_endian_correction)
b47f8cfc
JH
2205 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2206 GET_MODE (srcreg));
19caa751
RK
2207
2208 /* We need a new destination operand each time bitpos is on
2209 a word boundary. */
2210 if (bitpos % BITS_PER_WORD == 0)
2211 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2212
19caa751
RK
2213 /* Use xbitpos for the source extraction (right justified) and
2214 xbitpos for the destination store (left justified). */
2215 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2216 extract_bit_field (src, bitsize,
2217 xbitpos % BITS_PER_WORD, 1,
2218 NULL_RTX, word_mode, word_mode,
2219 bitsize, BITS_PER_WORD),
2220 bitsize, BITS_PER_WORD);
2221 }
2222
2223 return tgtblk;
c36fce9a
GRK
2224}
2225
94b25f81
RK
2226/* Add a USE expression for REG to the (possibly empty) list pointed
2227 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2228
2229void
b3f8cf4a
RK
2230use_reg (call_fusage, reg)
2231 rtx *call_fusage, reg;
2232{
0304dfbb
DE
2233 if (GET_CODE (reg) != REG
2234 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2235 abort ();
b3f8cf4a
RK
2236
2237 *call_fusage
38a448ca
RH
2238 = gen_rtx_EXPR_LIST (VOIDmode,
2239 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2240}
2241
94b25f81
RK
2242/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2243 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2244
2245void
0304dfbb
DE
2246use_regs (call_fusage, regno, nregs)
2247 rtx *call_fusage;
bbf6f052
RK
2248 int regno;
2249 int nregs;
2250{
0304dfbb 2251 int i;
bbf6f052 2252
0304dfbb
DE
2253 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2254 abort ();
2255
2256 for (i = 0; i < nregs; i++)
38a448ca 2257 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2258}
fffa9c1d
JW
2259
2260/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2261 PARALLEL REGS. This is for calls that pass values in multiple
2262 non-contiguous locations. The Irix 6 ABI has examples of this. */
2263
2264void
2265use_group_regs (call_fusage, regs)
2266 rtx *call_fusage;
2267 rtx regs;
2268{
2269 int i;
2270
6bd35f86
DE
2271 for (i = 0; i < XVECLEN (regs, 0); i++)
2272 {
2273 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2274
6bd35f86
DE
2275 /* A NULL entry means the parameter goes both on the stack and in
2276 registers. This can also be a MEM for targets that pass values
2277 partially on the stack and partially in registers. */
e9a25f70 2278 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2279 use_reg (call_fusage, reg);
2280 }
fffa9c1d 2281}
bbf6f052 2282\f
57814e5e
JJ
2283
2284int
2285can_store_by_pieces (len, constfun, constfundata, align)
2286 unsigned HOST_WIDE_INT len;
2287 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 PTR constfundata;
2289 unsigned int align;
2290{
98166639 2291 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2292 HOST_WIDE_INT offset = 0;
2293 enum machine_mode mode, tmode;
2294 enum insn_code icode;
2295 int reverse;
2296 rtx cst;
2297
2298 if (! MOVE_BY_PIECES_P (len, align))
2299 return 0;
2300
2301 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2302 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2303 align = MOVE_MAX * BITS_PER_UNIT;
2304
2305 /* We would first store what we can in the largest integer mode, then go to
2306 successively smaller modes. */
2307
2308 for (reverse = 0;
2309 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2310 reverse++)
2311 {
2312 l = len;
2313 mode = VOIDmode;
98166639 2314 max_size = MOVE_MAX_PIECES + 1;
57814e5e
JJ
2315 while (max_size > 1)
2316 {
2317 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2318 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2319 if (GET_MODE_SIZE (tmode) < max_size)
2320 mode = tmode;
2321
2322 if (mode == VOIDmode)
2323 break;
2324
2325 icode = mov_optab->handlers[(int) mode].insn_code;
2326 if (icode != CODE_FOR_nothing
2327 && align >= GET_MODE_ALIGNMENT (mode))
2328 {
2329 unsigned int size = GET_MODE_SIZE (mode);
2330
2331 while (l >= size)
2332 {
2333 if (reverse)
2334 offset -= size;
2335
2336 cst = (*constfun) (constfundata, offset, mode);
2337 if (!LEGITIMATE_CONSTANT_P (cst))
2338 return 0;
2339
2340 if (!reverse)
2341 offset += size;
2342
2343 l -= size;
2344 }
2345 }
2346
2347 max_size = GET_MODE_SIZE (mode);
2348 }
2349
2350 /* The code above should have handled everything. */
2351 if (l != 0)
2352 abort ();
2353 }
2354
2355 return 1;
2356}
2357
2358/* Generate several move instructions to store LEN bytes generated by
2359 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2360 pointer which will be passed as argument in every CONSTFUN call.
2361 ALIGN is maximum alignment we can assume. */
2362
2363void
2364store_by_pieces (to, len, constfun, constfundata, align)
2365 rtx to;
2366 unsigned HOST_WIDE_INT len;
2367 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 PTR constfundata;
2369 unsigned int align;
2370{
2371 struct store_by_pieces data;
2372
2373 if (! MOVE_BY_PIECES_P (len, align))
2374 abort ();
2375 to = protect_from_queue (to, 1);
2376 data.constfun = constfun;
2377 data.constfundata = constfundata;
2378 data.len = len;
2379 data.to = to;
2380 store_by_pieces_1 (&data, align);
2381}
2382
19caa751
RK
2383/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2384 rtx with BLKmode). The caller must pass TO through protect_from_queue
2385 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2386
2387static void
2388clear_by_pieces (to, len, align)
2389 rtx to;
3bdf5ad1 2390 unsigned HOST_WIDE_INT len;
729a2125 2391 unsigned int align;
9de08200 2392{
57814e5e
JJ
2393 struct store_by_pieces data;
2394
2395 data.constfun = clear_by_pieces_1;
df4ae160 2396 data.constfundata = NULL;
57814e5e
JJ
2397 data.len = len;
2398 data.to = to;
2399 store_by_pieces_1 (&data, align);
2400}
2401
2402/* Callback routine for clear_by_pieces.
2403 Return const0_rtx unconditionally. */
2404
2405static rtx
2406clear_by_pieces_1 (data, offset, mode)
2407 PTR data ATTRIBUTE_UNUSED;
2408 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2409 enum machine_mode mode ATTRIBUTE_UNUSED;
2410{
2411 return const0_rtx;
2412}
2413
2414/* Subroutine of clear_by_pieces and store_by_pieces.
2415 Generate several move instructions to store LEN bytes of block TO. (A MEM
2416 rtx with BLKmode). The caller must pass TO through protect_from_queue
2417 before calling. ALIGN is maximum alignment we can assume. */
2418
2419static void
2420store_by_pieces_1 (data, align)
2421 struct store_by_pieces *data;
2422 unsigned int align;
2423{
2424 rtx to_addr = XEXP (data->to, 0);
3bdf5ad1 2425 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
2426 enum machine_mode mode = VOIDmode, tmode;
2427 enum insn_code icode;
9de08200 2428
57814e5e
JJ
2429 data->offset = 0;
2430 data->to_addr = to_addr;
2431 data->autinc_to
9de08200
RK
2432 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2433 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2434
57814e5e
JJ
2435 data->explicit_inc_to = 0;
2436 data->reverse
9de08200 2437 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2438 if (data->reverse)
2439 data->offset = data->len;
9de08200 2440
57814e5e 2441 /* If storing requires more than two move insns,
9de08200
RK
2442 copy addresses to registers (to make displacements shorter)
2443 and use post-increment if available. */
57814e5e
JJ
2444 if (!data->autinc_to
2445 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2446 {
3a94c984 2447 /* Determine the main mode we'll be using. */
fbe1758d
AM
2448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2450 if (GET_MODE_SIZE (tmode) < max_size)
2451 mode = tmode;
2452
57814e5e 2453 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2454 {
57814e5e
JJ
2455 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2456 data->autinc_to = 1;
2457 data->explicit_inc_to = -1;
9de08200 2458 }
3bdf5ad1 2459
57814e5e
JJ
2460 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2461 && ! data->autinc_to)
9de08200 2462 {
57814e5e
JJ
2463 data->to_addr = copy_addr_to_reg (to_addr);
2464 data->autinc_to = 1;
2465 data->explicit_inc_to = 1;
9de08200 2466 }
3bdf5ad1 2467
57814e5e
JJ
2468 if ( !data->autinc_to && CONSTANT_P (to_addr))
2469 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2470 }
2471
e1565e65 2472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2474 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2475
57814e5e 2476 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2477 successively smaller modes. */
2478
2479 while (max_size > 1)
2480 {
9de08200
RK
2481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2483 if (GET_MODE_SIZE (tmode) < max_size)
2484 mode = tmode;
2485
2486 if (mode == VOIDmode)
2487 break;
2488
2489 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2491 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2492
2493 max_size = GET_MODE_SIZE (mode);
2494 }
2495
2496 /* The code above should have handled everything. */
57814e5e 2497 if (data->len != 0)
9de08200
RK
2498 abort ();
2499}
2500
57814e5e 2501/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2502 with move instructions for mode MODE. GENFUN is the gen_... function
2503 to make a move insn for that mode. DATA has all the other info. */
2504
2505static void
57814e5e 2506store_by_pieces_2 (genfun, mode, data)
711d877c 2507 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2508 enum machine_mode mode;
57814e5e 2509 struct store_by_pieces *data;
9de08200 2510{
3bdf5ad1 2511 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2512 rtx to1, cst;
9de08200
RK
2513
2514 while (data->len >= size)
2515 {
3bdf5ad1
RK
2516 if (data->reverse)
2517 data->offset -= size;
9de08200 2518
3bdf5ad1
RK
2519 if (data->autinc_to)
2520 {
f1ec5147
RK
2521 to1 = replace_equiv_address (data->to, data->to_addr);
2522 to1 = adjust_address (to1, mode, 0);
3bdf5ad1 2523 }
3a94c984 2524 else
f4ef873c 2525 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2526
940da324 2527 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2528 emit_insn (gen_add2_insn (data->to_addr,
2529 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2530
57814e5e
JJ
2531 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2532 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2533
940da324 2534 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2536
3bdf5ad1
RK
2537 if (! data->reverse)
2538 data->offset += size;
9de08200
RK
2539
2540 data->len -= size;
2541 }
2542}
2543\f
19caa751
RK
2544/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2545 its length in bytes and ALIGN is the maximum alignment we can is has.
bbf6f052 2546
e9a25f70
JL
2547 If we call a function that returns the length of the block, return it. */
2548
2549rtx
9de08200 2550clear_storage (object, size, align)
bbf6f052 2551 rtx object;
4c08eef0 2552 rtx size;
729a2125 2553 unsigned int align;
bbf6f052 2554{
52cf7115
JL
2555#ifdef TARGET_MEM_FUNCTIONS
2556 static tree fn;
2557 tree call_expr, arg_list;
2558#endif
e9a25f70
JL
2559 rtx retval = 0;
2560
fcf1b822
RK
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
8752c357 2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
fcf1b822
RK
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2567 else
bbf6f052 2568 {
9de08200
RK
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2571
2572 if (GET_CODE (size) == CONST_INT
fbe1758d 2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200 2574 clear_by_pieces (object, INTVAL (size), align);
9de08200
RK
2575 else
2576 {
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2580
19caa751 2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
9de08200
RK
2582 enum machine_mode mode;
2583
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2586 {
2587 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2588 insn_operand_predicate_fn pred;
9de08200
RK
2589
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2597 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2603 {
2604 rtx op1;
2605 rtx last = get_last_insn ();
2606 rtx pat;
2607
2608 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2611 op1 = copy_to_mode_reg (mode, op1);
2612
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2614 if (pat)
2615 {
2616 emit_insn (pat);
e9a25f70 2617 return 0;
9de08200
RK
2618 }
2619 else
2620 delete_insns_since (last);
2621 }
2622 }
2623
4bc973ae 2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2625
4bc973ae
JL
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
52cf7115 2629
4bc973ae
JL
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
52cf7115 2633
4bc973ae
JL
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2637 emit_queue.
52cf7115 2638
4bc973ae
JL
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2645
4bc973ae
JL
2646#ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2648#else
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
f3dc586a 2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2652#endif
52cf7115 2653
4bc973ae
JL
2654#ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
52cf7115 2657
4bc973ae
JL
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
52cf7115 2660
4bc973ae
JL
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
0d97bf4c 2663 incorrect code.
4bc973ae
JL
2664
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2668 {
2669 tree fntype;
2670
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
4bc973ae
JL
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2677 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 2681 TREE_NOTHROW (fn) = 1;
6496a589 2682 make_decl_rtl (fn, NULL);
4bc973ae 2683 assemble_external (fn);
4bc973ae
JL
2684 }
2685
3a94c984 2686 /* We need to make an argument list for the function call.
4bc973ae
JL
2687
2688 memset has three arguments, the first is a void * addresses, the
2689 second a integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2691 arg_list
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2694 object));
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
3a94c984 2697 make_tree (integer_type_node, const0_rtx));
4bc973ae
JL
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2701
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2708
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2710#else
ebb1b59a 2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
fe7bbd2a 2712 VOIDmode, 2, object, Pmode, size,
9de08200 2713 TYPE_MODE (integer_type_node));
bbf6f052 2714#endif
9de08200 2715 }
bbf6f052 2716 }
e9a25f70
JL
2717
2718 return retval;
bbf6f052
RK
2719}
2720
2721/* Generate code to copy Y into X.
2722 Both Y and X must have the same mode, except that
2723 Y can be a constant with VOIDmode.
2724 This mode cannot be BLKmode; use emit_block_move for that.
2725
2726 Return the last instruction emitted. */
2727
2728rtx
2729emit_move_insn (x, y)
2730 rtx x, y;
2731{
2732 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
2733 rtx y_cst = NULL_RTX;
2734 rtx last_insn;
bbf6f052
RK
2735
2736 x = protect_from_queue (x, 1);
2737 y = protect_from_queue (y, 0);
2738
2739 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2740 abort ();
2741
ee5332b8
RH
2742 /* Never force constant_p_rtx to memory. */
2743 if (GET_CODE (y) == CONSTANT_P_RTX)
2744 ;
2745 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
de1b33dd
AO
2746 {
2747 y_cst = y;
2748 y = force_const_mem (mode, y);
2749 }
bbf6f052
RK
2750
2751 /* If X or Y are memory references, verify that their addresses are valid
2752 for the machine. */
2753 if (GET_CODE (x) == MEM
2754 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2755 && ! push_operand (x, GET_MODE (x)))
2756 || (flag_force_addr
2757 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2758 x = validize_mem (x);
bbf6f052
RK
2759
2760 if (GET_CODE (y) == MEM
2761 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2762 || (flag_force_addr
2763 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2764 y = validize_mem (y);
bbf6f052
RK
2765
2766 if (mode == BLKmode)
2767 abort ();
2768
de1b33dd
AO
2769 last_insn = emit_move_insn_1 (x, y);
2770
2771 if (y_cst && GET_CODE (x) == REG)
2772 REG_NOTES (last_insn)
2773 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2774
2775 return last_insn;
261c4230
RS
2776}
2777
2778/* Low level part of emit_move_insn.
2779 Called just like emit_move_insn, but assumes X and Y
2780 are basically valid. */
2781
2782rtx
2783emit_move_insn_1 (x, y)
2784 rtx x, y;
2785{
2786 enum machine_mode mode = GET_MODE (x);
2787 enum machine_mode submode;
2788 enum mode_class class = GET_MODE_CLASS (mode);
770ae6cc 2789 unsigned int i;
261c4230 2790
dbbbbf3b 2791 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2792 abort ();
76bbe028 2793
bbf6f052
RK
2794 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2795 return
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2797
89742723 2798 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2799 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2800 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2801 * BITS_PER_UNIT),
2802 (class == MODE_COMPLEX_INT
2803 ? MODE_INT : MODE_FLOAT),
2804 0))
7308a047
RS
2805 && (mov_optab->handlers[(int) submode].insn_code
2806 != CODE_FOR_nothing))
2807 {
2808 /* Don't split destination if it is a stack push. */
2809 int stack = push_operand (x, GET_MODE (x));
7308a047 2810
79ce92d7 2811#ifdef PUSH_ROUNDING
1a06f5fe
JH
2812 /* In case we output to the stack, but the size is smaller machine can
2813 push exactly, we need to use move instructions. */
2814 if (stack
2815 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2816 {
2817 rtx temp;
2818 int offset1, offset2;
2819
2820 /* Do not use anti_adjust_stack, since we don't want to update
2821 stack_pointer_delta. */
2822 temp = expand_binop (Pmode,
2823#ifdef STACK_GROWS_DOWNWARD
2824 sub_optab,
2825#else
2826 add_optab,
2827#endif
2828 stack_pointer_rtx,
2829 GEN_INT
2830 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 stack_pointer_rtx,
2832 0,
2833 OPTAB_LIB_WIDEN);
2834 if (temp != stack_pointer_rtx)
2835 emit_move_insn (stack_pointer_rtx, temp);
2836#ifdef STACK_GROWS_DOWNWARD
2837 offset1 = 0;
2838 offset2 = GET_MODE_SIZE (submode);
2839#else
2840 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2841 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2842 + GET_MODE_SIZE (submode));
2843#endif
2844 emit_move_insn (change_address (x, submode,
2845 gen_rtx_PLUS (Pmode,
2846 stack_pointer_rtx,
2847 GEN_INT (offset1))),
2848 gen_realpart (submode, y));
2849 emit_move_insn (change_address (x, submode,
2850 gen_rtx_PLUS (Pmode,
2851 stack_pointer_rtx,
2852 GEN_INT (offset2))),
2853 gen_imagpart (submode, y));
2854 }
e9c0bd54 2855 else
79ce92d7 2856#endif
7308a047
RS
2857 /* If this is a stack, push the highpart first, so it
2858 will be in the argument order.
2859
2860 In that case, change_address is used only to convert
2861 the mode, not to change the address. */
e9c0bd54 2862 if (stack)
c937357e 2863 {
e33c0d66
RS
2864 /* Note that the real part always precedes the imag part in memory
2865 regardless of machine's endianness. */
c937357e
RS
2866#ifdef STACK_GROWS_DOWNWARD
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2869 gen_imagpart (submode, y)));
c937357e 2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2871 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2872 gen_realpart (submode, y)));
c937357e
RS
2873#else
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2876 gen_realpart (submode, y)));
c937357e 2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2878 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2879 gen_imagpart (submode, y)));
c937357e
RS
2880#endif
2881 }
2882 else
2883 {
235ae7be
DM
2884 rtx realpart_x, realpart_y;
2885 rtx imagpart_x, imagpart_y;
2886
405f63da
MM
2887 /* If this is a complex value with each part being smaller than a
2888 word, the usual calling sequence will likely pack the pieces into
2889 a single register. Unfortunately, SUBREG of hard registers only
2890 deals in terms of words, so we have a problem converting input
2891 arguments to the CONCAT of two registers that is used elsewhere
2892 for complex values. If this is before reload, we can copy it into
2893 memory and reload. FIXME, we should see about using extract and
2894 insert on integer registers, but complex short and complex char
2895 variables should be rarely used. */
3a94c984 2896 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2897 && (reload_in_progress | reload_completed) == 0)
2898 {
2899 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2900 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2901
2902 if (packed_dest_p || packed_src_p)
2903 {
2904 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2905 ? MODE_FLOAT : MODE_INT);
2906
1da68f56
RK
2907 enum machine_mode reg_mode
2908 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2909
2910 if (reg_mode != BLKmode)
2911 {
2912 rtx mem = assign_stack_temp (reg_mode,
2913 GET_MODE_SIZE (mode), 0);
f4ef873c 2914 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2915
1da68f56
RK
2916 cfun->cannot_inline
2917 = N_("function using short complex types cannot be inline");
405f63da
MM
2918
2919 if (packed_dest_p)
2920 {
2921 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2922 emit_move_insn_1 (cmem, y);
2923 return emit_move_insn_1 (sreg, mem);
2924 }
2925 else
2926 {
2927 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2928 emit_move_insn_1 (mem, sreg);
2929 return emit_move_insn_1 (x, cmem);
2930 }
2931 }
2932 }
2933 }
2934
235ae7be
DM
2935 realpart_x = gen_realpart (submode, x);
2936 realpart_y = gen_realpart (submode, y);
2937 imagpart_x = gen_imagpart (submode, x);
2938 imagpart_y = gen_imagpart (submode, y);
2939
2940 /* Show the output dies here. This is necessary for SUBREGs
2941 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2942 hard regs shouldn't appear here except as return values.
2943 We never want to emit such a clobber after reload. */
2944 if (x != y
235ae7be
DM
2945 && ! (reload_in_progress || reload_completed)
2946 && (GET_CODE (realpart_x) == SUBREG
2947 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2948 {
c14c6529 2949 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2950 }
2638126a 2951
c937357e 2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2953 (realpart_x, realpart_y));
c937357e 2954 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2955 (imagpart_x, imagpart_y));
c937357e 2956 }
7308a047 2957
7a1ab50a 2958 return get_last_insn ();
7308a047
RS
2959 }
2960
bbf6f052
RK
2961 /* This will handle any multi-word mode that lacks a move_insn pattern.
2962 However, you will get better code if you define such patterns,
2963 even if they must turn into multiple assembler instructions. */
a4320483 2964 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2965 {
2966 rtx last_insn = 0;
3ef1eef4 2967 rtx seq, inner;
235ae7be 2968 int need_clobber;
3a94c984 2969
a98c9f1a
RK
2970#ifdef PUSH_ROUNDING
2971
2972 /* If X is a push on the stack, do the push now and replace
2973 X with a reference to the stack pointer. */
2974 if (push_operand (x, GET_MODE (x)))
2975 {
918a6124
GK
2976 rtx temp;
2977 enum rtx_code code;
2978
2979 /* Do not use anti_adjust_stack, since we don't want to update
2980 stack_pointer_delta. */
2981 temp = expand_binop (Pmode,
2982#ifdef STACK_GROWS_DOWNWARD
2983 sub_optab,
2984#else
2985 add_optab,
2986#endif
2987 stack_pointer_rtx,
2988 GEN_INT
2989 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2990 stack_pointer_rtx,
2991 0,
2992 OPTAB_LIB_WIDEN);
2993 if (temp != stack_pointer_rtx)
2994 emit_move_insn (stack_pointer_rtx, temp);
2995
2996 code = GET_CODE (XEXP (x, 0));
2997 /* Just hope that small offsets off SP are OK. */
2998 if (code == POST_INC)
2999 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3000 GEN_INT (-(HOST_WIDE_INT)
3001 GET_MODE_SIZE (GET_MODE (x))));
3002 else if (code == POST_DEC)
3003 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3004 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3005 else
3006 temp = stack_pointer_rtx;
3007
3008 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3009 }
3010#endif
3a94c984 3011
3ef1eef4
RK
3012 /* If we are in reload, see if either operand is a MEM whose address
3013 is scheduled for replacement. */
3014 if (reload_in_progress && GET_CODE (x) == MEM
3015 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3016 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3017 if (reload_in_progress && GET_CODE (y) == MEM
3018 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3019 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3020
235ae7be 3021 start_sequence ();
15a7a8ec 3022
235ae7be 3023 need_clobber = 0;
bbf6f052 3024 for (i = 0;
3a94c984 3025 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3026 i++)
3027 {
3028 rtx xpart = operand_subword (x, i, 1, mode);
3029 rtx ypart = operand_subword (y, i, 1, mode);
3030
3031 /* If we can't get a part of Y, put Y into memory if it is a
3032 constant. Otherwise, force it into a register. If we still
3033 can't get a part of Y, abort. */
3034 if (ypart == 0 && CONSTANT_P (y))
3035 {
3036 y = force_const_mem (mode, y);
3037 ypart = operand_subword (y, i, 1, mode);
3038 }
3039 else if (ypart == 0)
3040 ypart = operand_subword_force (y, i, mode);
3041
3042 if (xpart == 0 || ypart == 0)
3043 abort ();
3044
235ae7be
DM
3045 need_clobber |= (GET_CODE (xpart) == SUBREG);
3046
bbf6f052
RK
3047 last_insn = emit_move_insn (xpart, ypart);
3048 }
6551fa4d 3049
235ae7be
DM
3050 seq = gen_sequence ();
3051 end_sequence ();
3052
3053 /* Show the output dies here. This is necessary for SUBREGs
3054 of pseudos since we cannot track their lifetimes correctly;
3055 hard regs shouldn't appear here except as return values.
3056 We never want to emit such a clobber after reload. */
3057 if (x != y
3058 && ! (reload_in_progress || reload_completed)
3059 && need_clobber != 0)
3060 {
3061 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3062 }
3063
3064 emit_insn (seq);
3065
bbf6f052
RK
3066 return last_insn;
3067 }
3068 else
3069 abort ();
3070}
3071\f
3072/* Pushing data onto the stack. */
3073
3074/* Push a block of length SIZE (perhaps variable)
3075 and return an rtx to address the beginning of the block.
3076 Note that it is not possible for the value returned to be a QUEUED.
3077 The value may be virtual_outgoing_args_rtx.
3078
3079 EXTRA is the number of bytes of padding to push in addition to SIZE.
3080 BELOW nonzero means this padding comes at low addresses;
3081 otherwise, the padding comes at high addresses. */
3082
3083rtx
3084push_block (size, extra, below)
3085 rtx size;
3086 int extra, below;
3087{
3088 register rtx temp;
88f63c77
RK
3089
3090 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3091 if (CONSTANT_P (size))
3092 anti_adjust_stack (plus_constant (size, extra));
3093 else if (GET_CODE (size) == REG && extra == 0)
3094 anti_adjust_stack (size);
3095 else
3096 {
ce48579b 3097 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3098 if (extra != 0)
906c4e36 3099 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3100 temp, 0, OPTAB_LIB_WIDEN);
3101 anti_adjust_stack (temp);
3102 }
3103
f73ad30e
JH
3104#ifndef STACK_GROWS_DOWNWARD
3105#ifdef ARGS_GROW_DOWNWARD
3106 if (!ACCUMULATE_OUTGOING_ARGS)
bbf6f052 3107#else
f73ad30e
JH
3108 if (0)
3109#endif
3110#else
3111 if (1)
bbf6f052 3112#endif
f73ad30e 3113 {
f73ad30e
JH
3114 /* Return the lowest stack address when STACK or ARGS grow downward and
3115 we are not aaccumulating outgoing arguments (the c4x port uses such
3116 conventions). */
3117 temp = virtual_outgoing_args_rtx;
3118 if (extra != 0 && below)
3119 temp = plus_constant (temp, extra);
3120 }
3121 else
3122 {
3123 if (GET_CODE (size) == CONST_INT)
3124 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3125 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3126 else if (extra != 0 && !below)
3127 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3128 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3129 else
3130 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3131 negate_rtx (Pmode, size));
3132 }
bbf6f052
RK
3133
3134 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3135}
3136
bbf6f052 3137
921b3427
RK
3138/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3139 block of SIZE bytes. */
3140
3141static rtx
3142get_push_address (size)
3a94c984 3143 int size;
921b3427
RK
3144{
3145 register rtx temp;
3146
3147 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 3148 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 3149 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 3150 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
3151 else
3152 temp = stack_pointer_rtx;
3153
c85f7c16 3154 return copy_to_reg (temp);
921b3427
RK
3155}
3156
21d93687
RK
3157#ifdef PUSH_ROUNDING
3158
566aa174 3159/* Emit single push insn. */
21d93687 3160
566aa174
JH
3161static void
3162emit_single_push_insn (mode, x, type)
3163 rtx x;
3164 enum machine_mode mode;
3165 tree type;
3166{
566aa174 3167 rtx dest_addr;
918a6124 3168 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3169 rtx dest;
371b8fc0
JH
3170 enum insn_code icode;
3171 insn_operand_predicate_fn pred;
566aa174 3172
371b8fc0
JH
3173 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3174 /* If there is push pattern, use it. Otherwise try old way of throwing
3175 MEM representing push operation to move expander. */
3176 icode = push_optab->handlers[(int) mode].insn_code;
3177 if (icode != CODE_FOR_nothing)
3178 {
3179 if (((pred = insn_data[(int) icode].operand[0].predicate)
3180 && !((*pred) (x, mode))))
3181 x = force_reg (mode, x);
3182 emit_insn (GEN_FCN (icode) (x));
3183 return;
3184 }
566aa174
JH
3185 if (GET_MODE_SIZE (mode) == rounded_size)
3186 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3187 else
3188 {
3189#ifdef STACK_GROWS_DOWNWARD
3190 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124 3191 GEN_INT (-(HOST_WIDE_INT)rounded_size));
566aa174
JH
3192#else
3193 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3194 GEN_INT (rounded_size));
3195#endif
3196 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3197 }
3198
3199 dest = gen_rtx_MEM (mode, dest_addr);
3200
566aa174
JH
3201 if (type != 0)
3202 {
3203 set_mem_attributes (dest, type, 1);
3204 /* Function incoming arguments may overlap with sibling call
3205 outgoing arguments and we cannot allow reordering of reads
3206 from function arguments with stores to outgoing arguments
3207 of sibling calls. */
ba4828e0 3208 set_mem_alias_set (dest, 0);
566aa174
JH
3209 }
3210 emit_move_insn (dest, x);
566aa174 3211}
21d93687 3212#endif
566aa174 3213
bbf6f052
RK
3214/* Generate code to push X onto the stack, assuming it has mode MODE and
3215 type TYPE.
3216 MODE is redundant except when X is a CONST_INT (since they don't
3217 carry mode info).
3218 SIZE is an rtx for the size of data to be copied (in bytes),
3219 needed only if X is BLKmode.
3220
f1eaaf73 3221 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3222
cd048831
RK
3223 If PARTIAL and REG are both nonzero, then copy that many of the first
3224 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3225 The amount of space pushed is decreased by PARTIAL words,
3226 rounded *down* to a multiple of PARM_BOUNDARY.
3227 REG must be a hard register in this case.
cd048831
RK
3228 If REG is zero but PARTIAL is not, take any all others actions for an
3229 argument partially in registers, but do not actually load any
3230 registers.
bbf6f052
RK
3231
3232 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3233 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3234
3235 On a machine that lacks real push insns, ARGS_ADDR is the address of
3236 the bottom of the argument block for this call. We use indexing off there
3237 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3238 argument block has not been preallocated.
3239
e5e809f4
JL
3240 ARGS_SO_FAR is the size of args previously pushed for this call.
3241
3242 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3243 for arguments passed in registers. If nonzero, it will be the number
3244 of bytes required. */
bbf6f052
RK
3245
3246void
3247emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
3248 args_addr, args_so_far, reg_parm_stack_space,
3249 alignment_pad)
bbf6f052
RK
3250 register rtx x;
3251 enum machine_mode mode;
3252 tree type;
3253 rtx size;
729a2125 3254 unsigned int align;
bbf6f052
RK
3255 int partial;
3256 rtx reg;
3257 int extra;
3258 rtx args_addr;
3259 rtx args_so_far;
e5e809f4 3260 int reg_parm_stack_space;
4fc026cd 3261 rtx alignment_pad;
bbf6f052
RK
3262{
3263 rtx xinner;
3264 enum direction stack_direction
3265#ifdef STACK_GROWS_DOWNWARD
3266 = downward;
3267#else
3268 = upward;
3269#endif
3270
3271 /* Decide where to pad the argument: `downward' for below,
3272 `upward' for above, or `none' for don't pad it.
3273 Default is below for small data on big-endian machines; else above. */
3274 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3275
3276 /* Invert direction if stack is post-update. */
3277 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3278 if (where_pad != none)
3279 where_pad = (where_pad == downward ? upward : downward);
3280
3281 xinner = x = protect_from_queue (x, 0);
3282
3283 if (mode == BLKmode)
3284 {
3285 /* Copy a block into the stack, entirely or partially. */
3286
3287 register rtx temp;
3288 int used = partial * UNITS_PER_WORD;
3289 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3290 int skip;
3a94c984 3291
bbf6f052
RK
3292 if (size == 0)
3293 abort ();
3294
3295 used -= offset;
3296
3297 /* USED is now the # of bytes we need not copy to the stack
3298 because registers will take care of them. */
3299
3300 if (partial != 0)
f4ef873c 3301 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3302
3303 /* If the partial register-part of the arg counts in its stack size,
3304 skip the part of stack space corresponding to the registers.
3305 Otherwise, start copying to the beginning of the stack space,
3306 by setting SKIP to 0. */
e5e809f4 3307 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3308
3309#ifdef PUSH_ROUNDING
3310 /* Do it with several push insns if that doesn't take lots of insns
3311 and if there is no difficulty with push insns that skip bytes
3312 on the stack for alignment purposes. */
3313 if (args_addr == 0
f73ad30e 3314 && PUSH_ARGS
bbf6f052
RK
3315 && GET_CODE (size) == CONST_INT
3316 && skip == 0
15914757 3317 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3318 /* Here we avoid the case of a structure whose weak alignment
3319 forces many pushes of a small amount of data,
3320 and such small pushes do rounding that causes trouble. */
e1565e65 3321 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3322 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3323 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3324 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3325 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3326 {
3327 /* Push padding now if padding above and stack grows down,
3328 or if padding below and stack grows up.
3329 But if space already allocated, this has already been done. */
3330 if (extra && args_addr == 0
3331 && where_pad != none && where_pad != stack_direction)
906c4e36 3332 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3333
566aa174 3334 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
921b3427 3335
7d384cc0 3336 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3337 {
3338 rtx temp;
3a94c984 3339
956d6950 3340 in_check_memory_usage = 1;
3a94c984 3341 temp = get_push_address (INTVAL (size) - used);
c85f7c16 3342 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3343 emit_library_call (chkr_copy_bitmap_libfunc,
3344 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3345 Pmode, XEXP (xinner, 0), Pmode,
3a94c984 3346 GEN_INT (INTVAL (size) - used),
921b3427
RK
3347 TYPE_MODE (sizetype));
3348 else
ebb1b59a
BS
3349 emit_library_call (chkr_set_right_libfunc,
3350 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3351 Pmode, GEN_INT (INTVAL (size) - used),
921b3427 3352 TYPE_MODE (sizetype),
956d6950
JL
3353 GEN_INT (MEMORY_USE_RW),
3354 TYPE_MODE (integer_type_node));
3355 in_check_memory_usage = 0;
921b3427 3356 }
bbf6f052
RK
3357 }
3358 else
3a94c984 3359#endif /* PUSH_ROUNDING */
bbf6f052 3360 {
7ab923cc
JJ
3361 rtx target;
3362
bbf6f052
RK
3363 /* Otherwise make space on the stack and copy the data
3364 to the address of that space. */
3365
3366 /* Deduct words put into registers from the size we must copy. */
3367 if (partial != 0)
3368 {
3369 if (GET_CODE (size) == CONST_INT)
906c4e36 3370 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3371 else
3372 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3373 GEN_INT (used), NULL_RTX, 0,
3374 OPTAB_LIB_WIDEN);
bbf6f052
RK
3375 }
3376
3377 /* Get the address of the stack space.
3378 In this case, we do not deal with EXTRA separately.
3379 A single stack adjust will do. */
3380 if (! args_addr)
3381 {
3382 temp = push_block (size, extra, where_pad == downward);
3383 extra = 0;
3384 }
3385 else if (GET_CODE (args_so_far) == CONST_INT)
3386 temp = memory_address (BLKmode,
3387 plus_constant (args_addr,
3388 skip + INTVAL (args_so_far)));
3389 else
3390 temp = memory_address (BLKmode,
38a448ca
RH
3391 plus_constant (gen_rtx_PLUS (Pmode,
3392 args_addr,
3393 args_so_far),
bbf6f052 3394 skip));
7d384cc0 3395 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3396 {
956d6950 3397 in_check_memory_usage = 1;
921b3427 3398 target = copy_to_reg (temp);
c85f7c16 3399 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3400 emit_library_call (chkr_copy_bitmap_libfunc,
3401 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed
MK
3402 target, Pmode,
3403 XEXP (xinner, 0), Pmode,
921b3427
RK
3404 size, TYPE_MODE (sizetype));
3405 else
ebb1b59a
BS
3406 emit_library_call (chkr_set_right_libfunc,
3407 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 3408 target, Pmode,
921b3427 3409 size, TYPE_MODE (sizetype),
956d6950
JL
3410 GEN_INT (MEMORY_USE_RW),
3411 TYPE_MODE (integer_type_node));
3412 in_check_memory_usage = 0;
921b3427 3413 }
bbf6f052 3414
3a94c984 3415 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3416
3a94c984
KH
3417 if (type != 0)
3418 {
3419 set_mem_attributes (target, type, 1);
3420 /* Function incoming arguments may overlap with sibling call
3421 outgoing arguments and we cannot allow reordering of reads
3422 from function arguments with stores to outgoing arguments
3423 of sibling calls. */
ba4828e0 3424 set_mem_alias_set (target, 0);
3a94c984 3425 }
7ab923cc 3426
bbf6f052
RK
3427 /* TEMP is the address of the block. Copy the data there. */
3428 if (GET_CODE (size) == CONST_INT
729a2125 3429 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3430 {
7ab923cc 3431 move_by_pieces (target, xinner, INTVAL (size), align);
bbf6f052
RK
3432 goto ret;
3433 }
e5e809f4 3434 else
bbf6f052 3435 {
19caa751 3436 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
e5e809f4 3437 enum machine_mode mode;
3bdf5ad1 3438
e5e809f4
JL
3439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3440 mode != VOIDmode;
3441 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3442 {
e5e809f4 3443 enum insn_code code = movstr_optab[(int) mode];
a995e389 3444 insn_operand_predicate_fn pred;
e5e809f4
JL
3445
3446 if (code != CODE_FOR_nothing
3447 && ((GET_CODE (size) == CONST_INT
3448 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3449 <= (GET_MODE_MASK (mode) >> 1)))
3450 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3451 && (!(pred = insn_data[(int) code].operand[0].predicate)
3452 || ((*pred) (target, BLKmode)))
3453 && (!(pred = insn_data[(int) code].operand[1].predicate)
3454 || ((*pred) (xinner, BLKmode)))
3455 && (!(pred = insn_data[(int) code].operand[3].predicate)
3456 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3457 {
3458 rtx op2 = convert_to_mode (mode, size, 1);
3459 rtx last = get_last_insn ();
3460 rtx pat;
3461
a995e389
RH
3462 pred = insn_data[(int) code].operand[2].predicate;
3463 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3464 op2 = copy_to_mode_reg (mode, op2);
3465
3466 pat = GEN_FCN ((int) code) (target, xinner,
3467 op2, opalign);
3468 if (pat)
3469 {
3470 emit_insn (pat);
3471 goto ret;
3472 }
3473 else
3474 delete_insns_since (last);
3475 }
c841050e 3476 }
bbf6f052 3477 }
bbf6f052 3478
f73ad30e
JH
3479 if (!ACCUMULATE_OUTGOING_ARGS)
3480 {
3481 /* If the source is referenced relative to the stack pointer,
3482 copy it to another register to stabilize it. We do not need
3483 to do this if we know that we won't be changing sp. */
bbf6f052 3484
f73ad30e
JH
3485 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3486 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3487 temp = copy_to_reg (temp);
3488 }
bbf6f052
RK
3489
3490 /* Make inhibit_defer_pop nonzero around the library call
3491 to force it to pop the bcopy-arguments right away. */
3492 NO_DEFER_POP;
3493#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3494 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052 3495 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3496 convert_to_mode (TYPE_MODE (sizetype),
3497 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3498 TYPE_MODE (sizetype));
bbf6f052 3499#else
ebb1b59a 3500 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052 3501 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3502 convert_to_mode (TYPE_MODE (integer_type_node),
3503 size,
3504 TREE_UNSIGNED (integer_type_node)),
3505 TYPE_MODE (integer_type_node));
bbf6f052
RK
3506#endif
3507 OK_DEFER_POP;
3508 }
3509 }
3510 else if (partial > 0)
3511 {
3512 /* Scalar partly in registers. */
3513
3514 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3515 int i;
3516 int not_stack;
3517 /* # words of start of argument
3518 that we must make space for but need not store. */
3519 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3520 int args_offset = INTVAL (args_so_far);
3521 int skip;
3522
3523 /* Push padding now if padding above and stack grows down,
3524 or if padding below and stack grows up.
3525 But if space already allocated, this has already been done. */
3526 if (extra && args_addr == 0
3527 && where_pad != none && where_pad != stack_direction)
906c4e36 3528 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3529
3530 /* If we make space by pushing it, we might as well push
3531 the real data. Otherwise, we can leave OFFSET nonzero
3532 and leave the space uninitialized. */
3533 if (args_addr == 0)
3534 offset = 0;
3535
3536 /* Now NOT_STACK gets the number of words that we don't need to
3537 allocate on the stack. */
3538 not_stack = partial - offset;
3539
3540 /* If the partial register-part of the arg counts in its stack size,
3541 skip the part of stack space corresponding to the registers.
3542 Otherwise, start copying to the beginning of the stack space,
3543 by setting SKIP to 0. */
e5e809f4 3544 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3545
3546 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3547 x = validize_mem (force_const_mem (mode, x));
3548
3549 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3550 SUBREGs of such registers are not allowed. */
3551 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3552 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3553 x = copy_to_reg (x);
3554
3555 /* Loop over all the words allocated on the stack for this arg. */
3556 /* We can do it by words, because any scalar bigger than a word
3557 has a size a multiple of a word. */
3558#ifndef PUSH_ARGS_REVERSED
3559 for (i = not_stack; i < size; i++)
3560#else
3561 for (i = size - 1; i >= not_stack; i--)
3562#endif
3563 if (i >= not_stack + offset)
3564 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3565 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3566 0, args_addr,
3567 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3568 * UNITS_PER_WORD)),
4fc026cd 3569 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3570 }
3571 else
3572 {
3573 rtx addr;
921b3427 3574 rtx target = NULL_RTX;
3bdf5ad1 3575 rtx dest;
bbf6f052
RK
3576
3577 /* Push padding now if padding above and stack grows down,
3578 or if padding below and stack grows up.
3579 But if space already allocated, this has already been done. */
3580 if (extra && args_addr == 0
3581 && where_pad != none && where_pad != stack_direction)
906c4e36 3582 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3583
3584#ifdef PUSH_ROUNDING
f73ad30e 3585 if (args_addr == 0 && PUSH_ARGS)
566aa174 3586 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3587 else
3588#endif
921b3427
RK
3589 {
3590 if (GET_CODE (args_so_far) == CONST_INT)
3591 addr
3592 = memory_address (mode,
3a94c984 3593 plus_constant (args_addr,
921b3427 3594 INTVAL (args_so_far)));
3a94c984 3595 else
38a448ca
RH
3596 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3597 args_so_far));
921b3427 3598 target = addr;
566aa174
JH
3599 dest = gen_rtx_MEM (mode, addr);
3600 if (type != 0)
3601 {
3602 set_mem_attributes (dest, type, 1);
3603 /* Function incoming arguments may overlap with sibling call
3604 outgoing arguments and we cannot allow reordering of reads
3605 from function arguments with stores to outgoing arguments
3606 of sibling calls. */
ba4828e0 3607 set_mem_alias_set (dest, 0);
566aa174 3608 }
bbf6f052 3609
566aa174 3610 emit_move_insn (dest, x);
3bdf5ad1 3611
566aa174 3612 }
921b3427 3613
7d384cc0 3614 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3615 {
956d6950 3616 in_check_memory_usage = 1;
921b3427
RK
3617 if (target == 0)
3618 target = get_push_address (GET_MODE_SIZE (mode));
3619
c85f7c16 3620 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3621 emit_library_call (chkr_copy_bitmap_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, XEXP (x, 0), Pmode,
921b3427
RK
3624 GEN_INT (GET_MODE_SIZE (mode)),
3625 TYPE_MODE (sizetype));
3626 else
ebb1b59a
BS
3627 emit_library_call (chkr_set_right_libfunc,
3628 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3629 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
921b3427 3630 TYPE_MODE (sizetype),
956d6950
JL
3631 GEN_INT (MEMORY_USE_RW),
3632 TYPE_MODE (integer_type_node));
3633 in_check_memory_usage = 0;
921b3427 3634 }
bbf6f052
RK
3635 }
3636
3637 ret:
3638 /* If part should go in registers, copy that part
3639 into the appropriate registers. Do this now, at the end,
3640 since mem-to-mem copies above may do function calls. */
cd048831 3641 if (partial > 0 && reg != 0)
fffa9c1d
JW
3642 {
3643 /* Handle calls that pass values in multiple non-contiguous locations.
3644 The Irix 6 ABI has examples of this. */
3645 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3646 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3647 else
3648 move_block_to_reg (REGNO (reg), x, partial, mode);
3649 }
bbf6f052
RK
3650
3651 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3652 anti_adjust_stack (GEN_INT (extra));
3a94c984 3653
3ea2292a 3654 if (alignment_pad && args_addr == 0)
4fc026cd 3655 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3656}
3657\f
296b4ed9
RK
3658/* Return X if X can be used as a subtarget in a sequence of arithmetic
3659 operations. */
3660
3661static rtx
3662get_subtarget (x)
3663 rtx x;
3664{
3665 return ((x == 0
3666 /* Only registers can be subtargets. */
3667 || GET_CODE (x) != REG
3668 /* If the register is readonly, it can't be set more than once. */
3669 || RTX_UNCHANGING_P (x)
3670 /* Don't use hard regs to avoid extending their life. */
3671 || REGNO (x) < FIRST_PSEUDO_REGISTER
3672 /* Avoid subtargets inside loops,
3673 since they hide some invariant expressions. */
3674 || preserve_subexpressions_p ())
3675 ? 0 : x);
3676}
3677
bbf6f052
RK
3678/* Expand an assignment that stores the value of FROM into TO.
3679 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3680 (This may contain a QUEUED rtx;
3681 if the value is constant, this rtx is a constant.)
3682 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3683
3684 SUGGEST_REG is no longer actually used.
3685 It used to mean, copy the value through a register
3686 and return that register, if that is possible.
709f5be1 3687 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3688
3689rtx
3690expand_assignment (to, from, want_value, suggest_reg)
3691 tree to, from;
3692 int want_value;
c5c76735 3693 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3694{
3695 register rtx to_rtx = 0;
3696 rtx result;
3697
3698 /* Don't crash if the lhs of the assignment was erroneous. */
3699
3700 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3701 {
3702 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3703 return want_value ? result : NULL_RTX;
3704 }
bbf6f052
RK
3705
3706 /* Assignment of a structure component needs special treatment
3707 if the structure component's rtx is not simply a MEM.
6be58303
JW
3708 Assignment of an array element at a constant index, and assignment of
3709 an array element in an unaligned packed structure field, has the same
3710 problem. */
bbf6f052 3711
08293add 3712 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
b4e3fabb 3713 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
bbf6f052
RK
3714 {
3715 enum machine_mode mode1;
770ae6cc 3716 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3717 tree offset;
bbf6f052
RK
3718 int unsignedp;
3719 int volatilep = 0;
0088fcb1 3720 tree tem;
729a2125 3721 unsigned int alignment;
0088fcb1
RK
3722
3723 push_temp_slots ();
839c4796
RK
3724 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3725 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3726
3727 /* If we are going to use store_bit_field and extract_bit_field,
3728 make sure to_rtx will be safe for multiple use. */
3729
3730 if (mode1 == VOIDmode && want_value)
3731 tem = stabilize_reference (tem);
3732
921b3427 3733 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3734 if (offset != 0)
3735 {
906c4e36 3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3737
3738 if (GET_CODE (to_rtx) != MEM)
3739 abort ();
bd070e1a
RH
3740
3741 if (GET_MODE (offset_rtx) != ptr_mode)
3742 {
3743#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3744 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3745#else
3746 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3747#endif
3748 }
3749
9a7b9f4f
JL
3750 /* A constant address in TO_RTX can have VOIDmode, we must not try
3751 to call force_reg for that case. Avoid that case. */
89752202
HB
3752 if (GET_CODE (to_rtx) == MEM
3753 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3754 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202 3755 && bitsize
3a94c984 3756 && (bitpos % bitsize) == 0
89752202 3757 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 3758 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202 3759 {
f4ef873c
RK
3760 rtx temp
3761 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3762
89752202
HB
3763 if (GET_CODE (XEXP (temp, 0)) == REG)
3764 to_rtx = temp;
3765 else
792760b9
RK
3766 to_rtx = (replace_equiv_address
3767 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3768 XEXP (temp, 0))));
89752202
HB
3769 bitpos = 0;
3770 }
3771
7bb0943f 3772 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3773 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3774 force_reg (ptr_mode,
3775 offset_rtx)));
7bb0943f 3776 }
c5c76735 3777
bbf6f052
RK
3778 if (volatilep)
3779 {
3780 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3781 {
3782 /* When the offset is zero, to_rtx is the address of the
3783 structure we are storing into, and hence may be shared.
3784 We must make a new MEM before setting the volatile bit. */
3785 if (offset == 0)
effbcc6a
RK
3786 to_rtx = copy_rtx (to_rtx);
3787
01188446
JW
3788 MEM_VOLATILE_P (to_rtx) = 1;
3789 }
bbf6f052
RK
3790#if 0 /* This was turned off because, when a field is volatile
3791 in an object which is not volatile, the object may be in a register,
3792 and then we would abort over here. */
3793 else
3794 abort ();
3795#endif
3796 }
3797
956d6950
JL
3798 if (TREE_CODE (to) == COMPONENT_REF
3799 && TREE_READONLY (TREE_OPERAND (to, 1)))
3800 {
8bd6ecc2 3801 if (offset == 0)
956d6950
JL
3802 to_rtx = copy_rtx (to_rtx);
3803
3804 RTX_UNCHANGING_P (to_rtx) = 1;
3805 }
3806
921b3427 3807 /* Check the access. */
7d384cc0 3808 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3809 {
3810 rtx to_addr;
3811 int size;
3812 int best_mode_size;
3813 enum machine_mode best_mode;
3814
3815 best_mode = get_best_mode (bitsize, bitpos,
3816 TYPE_ALIGN (TREE_TYPE (tem)),
3817 mode1, volatilep);
3818 if (best_mode == VOIDmode)
3819 best_mode = QImode;
3820
3821 best_mode_size = GET_MODE_BITSIZE (best_mode);
3822 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3823 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3824 size *= GET_MODE_SIZE (best_mode);
3825
3826 /* Check the access right of the pointer. */
ea4da9db 3827 in_check_memory_usage = 1;
e9a25f70 3828 if (size)
ebb1b59a
BS
3829 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3830 VOIDmode, 3, to_addr, Pmode,
e9a25f70 3831 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3832 GEN_INT (MEMORY_USE_WO),
3833 TYPE_MODE (integer_type_node));
ea4da9db 3834 in_check_memory_usage = 0;
921b3427
RK
3835 }
3836
a69beca1
RK
3837 /* If this is a varying-length object, we must get the address of
3838 the source and do an explicit block move. */
3839 if (bitsize < 0)
3840 {
3841 unsigned int from_align;
3842 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3843 rtx inner_to_rtx
f4ef873c 3844 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
a69beca1
RK
3845
3846 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
19caa751 3847 MIN (alignment, from_align));
a69beca1
RK
3848 free_temp_slots ();
3849 pop_temp_slots ();
3850 return to_rtx;
3851 }
3852 else
3853 {
3854 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3855 (want_value
3856 /* Spurious cast for HPUX compiler. */
3857 ? ((enum machine_mode)
3858 TYPE_MODE (TREE_TYPE (to)))
3859 : VOIDmode),
3860 unsignedp,
a69beca1
RK
3861 alignment,
3862 int_size_in_bytes (TREE_TYPE (tem)),
3863 get_alias_set (to));
3864
3865 preserve_temp_slots (result);
3866 free_temp_slots ();
3867 pop_temp_slots ();
3868
3869 /* If the value is meaningful, convert RESULT to the proper mode.
3870 Otherwise, return nothing. */
3871 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3872 TYPE_MODE (TREE_TYPE (from)),
3873 result,
3874 TREE_UNSIGNED (TREE_TYPE (to)))
3875 : NULL_RTX);
3876 }
bbf6f052
RK
3877 }
3878
cd1db108
RS
3879 /* If the rhs is a function call and its value is not an aggregate,
3880 call the function before we start to compute the lhs.
3881 This is needed for correct code for cases such as
3882 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3883 requires loading up part of an address in a separate insn.
3884
1858863b
JW
3885 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3886 since it might be a promoted variable where the zero- or sign- extension
3887 needs to be done. Handling this in the normal way is safe because no
3888 computation is done before the call. */
1ad87b63 3889 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3891 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3892 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3893 {
0088fcb1
RK
3894 rtx value;
3895
3896 push_temp_slots ();
3897 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3898 if (to_rtx == 0)
921b3427 3899 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3900
fffa9c1d
JW
3901 /* Handle calls that return values in multiple non-contiguous locations.
3902 The Irix 6 ABI has examples of this. */
3903 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16 3904 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3905 TYPE_ALIGN (TREE_TYPE (from)));
fffa9c1d 3906 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3907 emit_block_move (to_rtx, value, expr_size (from),
19caa751 3908 TYPE_ALIGN (TREE_TYPE (from)));
aaf87c45 3909 else
6419e5b0
DT
3910 {
3911#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3912 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3913 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3914 value = convert_memory_address (GET_MODE (to_rtx), value);
3915#endif
3916 emit_move_insn (to_rtx, value);
3917 }
cd1db108
RS
3918 preserve_temp_slots (to_rtx);
3919 free_temp_slots ();
0088fcb1 3920 pop_temp_slots ();
709f5be1 3921 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3922 }
3923
bbf6f052
RK
3924 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3925 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3926
3927 if (to_rtx == 0)
41472af8
MM
3928 {
3929 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3930 if (GET_CODE (to_rtx) == MEM)
ba4828e0 3931 set_mem_alias_set (to_rtx, get_alias_set (to));
41472af8 3932 }
bbf6f052 3933
86d38d25 3934 /* Don't move directly into a return register. */
14a774a9
RK
3935 if (TREE_CODE (to) == RESULT_DECL
3936 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3937 {
0088fcb1
RK
3938 rtx temp;
3939
3940 push_temp_slots ();
3941 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3942
3943 if (GET_CODE (to_rtx) == PARALLEL)
3944 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3945 TYPE_ALIGN (TREE_TYPE (from)));
14a774a9
RK
3946 else
3947 emit_move_insn (to_rtx, temp);
3948
86d38d25
RS
3949 preserve_temp_slots (to_rtx);
3950 free_temp_slots ();
0088fcb1 3951 pop_temp_slots ();
709f5be1 3952 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3953 }
3954
bbf6f052
RK
3955 /* In case we are returning the contents of an object which overlaps
3956 the place the value is being stored, use a safe function when copying
3957 a value through a pointer into a structure value return block. */
3958 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3959 && current_function_returns_struct
3960 && !current_function_returns_pcc_struct)
3961 {
0088fcb1
RK
3962 rtx from_rtx, size;
3963
3964 push_temp_slots ();
33a20d10 3965 size = expr_size (from);
921b3427
RK
3966 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3967 EXPAND_MEMORY_USE_DONT);
3968
3969 /* Copy the rights of the bitmap. */
7d384cc0 3970 if (current_function_check_memory_usage)
ebb1b59a
BS
3971 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3972 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
6a9c4aed 3973 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3974 convert_to_mode (TYPE_MODE (sizetype),
3975 size, TREE_UNSIGNED (sizetype)),
3976 TYPE_MODE (sizetype));
bbf6f052
RK
3977
3978#ifdef TARGET_MEM_FUNCTIONS
b215b52e 3979 emit_library_call (memmove_libfunc, LCT_NORMAL,
bbf6f052
RK
3980 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3981 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3982 convert_to_mode (TYPE_MODE (sizetype),
3983 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3984 TYPE_MODE (sizetype));
bbf6f052 3985#else
ebb1b59a 3986 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052
RK
3987 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3988 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3989 convert_to_mode (TYPE_MODE (integer_type_node),
3990 size, TREE_UNSIGNED (integer_type_node)),
3991 TYPE_MODE (integer_type_node));
bbf6f052
RK
3992#endif
3993
3994 preserve_temp_slots (to_rtx);
3995 free_temp_slots ();
0088fcb1 3996 pop_temp_slots ();
709f5be1 3997 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3998 }
3999
4000 /* Compute FROM and store the value in the rtx we got. */
4001
0088fcb1 4002 push_temp_slots ();
bbf6f052
RK
4003 result = store_expr (from, to_rtx, want_value);
4004 preserve_temp_slots (result);
4005 free_temp_slots ();
0088fcb1 4006 pop_temp_slots ();
709f5be1 4007 return want_value ? result : NULL_RTX;
bbf6f052
RK
4008}
4009
4010/* Generate code for computing expression EXP,
4011 and storing the value into TARGET.
bbf6f052
RK
4012 TARGET may contain a QUEUED rtx.
4013
709f5be1
RS
4014 If WANT_VALUE is nonzero, return a copy of the value
4015 not in TARGET, so that we can be sure to use the proper
4016 value in a containing expression even if TARGET has something
4017 else stored in it. If possible, we copy the value through a pseudo
4018 and return that pseudo. Or, if the value is constant, we try to
4019 return the constant. In some cases, we return a pseudo
4020 copied *from* TARGET.
4021
4022 If the mode is BLKmode then we may return TARGET itself.
4023 It turns out that in BLKmode it doesn't cause a problem.
4024 because C has no operators that could combine two different
4025 assignments into the same BLKmode object with different values
4026 with no sequence point. Will other languages need this to
4027 be more thorough?
4028
4029 If WANT_VALUE is 0, we return NULL, to make sure
4030 to catch quickly any cases where the caller uses the value
4031 and fails to set WANT_VALUE. */
bbf6f052
RK
4032
4033rtx
709f5be1 4034store_expr (exp, target, want_value)
bbf6f052
RK
4035 register tree exp;
4036 register rtx target;
709f5be1 4037 int want_value;
bbf6f052
RK
4038{
4039 register rtx temp;
4040 int dont_return_target = 0;
e5408e52 4041 int dont_store_target = 0;
bbf6f052
RK
4042
4043 if (TREE_CODE (exp) == COMPOUND_EXPR)
4044 {
4045 /* Perform first part of compound expression, then assign from second
4046 part. */
4047 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4048 emit_queue ();
709f5be1 4049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4050 }
4051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4052 {
4053 /* For conditional expression, get safe form of the target. Then
4054 test the condition, doing the appropriate assignment on either
4055 side. This avoids the creation of unnecessary temporaries.
4056 For non-BLKmode, it is more efficient not to do this. */
4057
4058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059
4060 emit_queue ();
4061 target = protect_from_queue (target, 1);
4062
dabf8373 4063 do_pending_stack_adjust ();
bbf6f052
RK
4064 NO_DEFER_POP;
4065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4066 start_cleanup_deferral ();
709f5be1 4067 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 4068 end_cleanup_deferral ();
bbf6f052
RK
4069 emit_queue ();
4070 emit_jump_insn (gen_jump (lab2));
4071 emit_barrier ();
4072 emit_label (lab1);
956d6950 4073 start_cleanup_deferral ();
709f5be1 4074 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 4075 end_cleanup_deferral ();
bbf6f052
RK
4076 emit_queue ();
4077 emit_label (lab2);
4078 OK_DEFER_POP;
a3a58acc 4079
709f5be1 4080 return want_value ? target : NULL_RTX;
bbf6f052 4081 }
bbf6f052 4082 else if (queued_subexp_p (target))
709f5be1
RS
4083 /* If target contains a postincrement, let's not risk
4084 using it as the place to generate the rhs. */
bbf6f052
RK
4085 {
4086 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4087 {
4088 /* Expand EXP into a new pseudo. */
4089 temp = gen_reg_rtx (GET_MODE (target));
4090 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4091 }
4092 else
906c4e36 4093 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
4094
4095 /* If target is volatile, ANSI requires accessing the value
4096 *from* the target, if it is accessed. So make that happen.
4097 In no case return the target itself. */
4098 if (! MEM_VOLATILE_P (target) && want_value)
4099 dont_return_target = 1;
bbf6f052 4100 }
12f06d17
CH
4101 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4102 && GET_MODE (target) != BLKmode)
4103 /* If target is in memory and caller wants value in a register instead,
4104 arrange that. Pass TARGET as target for expand_expr so that,
4105 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4106 We know expand_expr will not use the target in that case.
4107 Don't do this if TARGET is volatile because we are supposed
4108 to write it and then read it. */
4109 {
1da93fe0 4110 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17 4111 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4112 {
4113 /* If TEMP is already in the desired TARGET, only copy it from
4114 memory and don't store it there again. */
4115 if (temp == target
4116 || (rtx_equal_p (temp, target)
4117 && ! side_effects_p (temp) && ! side_effects_p (target)))
4118 dont_store_target = 1;
4119 temp = copy_to_reg (temp);
4120 }
12f06d17
CH
4121 dont_return_target = 1;
4122 }
1499e0a8
RK
4123 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4124 /* If this is an scalar in a register that is stored in a wider mode
4125 than the declared mode, compute the result into its declared mode
4126 and then convert to the wider mode. Our value is the computed
4127 expression. */
4128 {
5a32d038 4129 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4130 which will often result in some optimizations. Do the conversion
4131 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4132 the extend. But don't do this if the type of EXP is a subtype
4133 of something else since then the conversion might involve
4134 more than just converting modes. */
4135 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4136 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4137 {
4138 if (TREE_UNSIGNED (TREE_TYPE (exp))
4139 != SUBREG_PROMOTED_UNSIGNED_P (target))
4140 exp
4141 = convert
4142 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4143 TREE_TYPE (exp)),
4144 exp);
4145
4146 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4147 SUBREG_PROMOTED_UNSIGNED_P (target)),
4148 exp);
4149 }
3a94c984 4150
1499e0a8 4151 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 4152
766f36c7 4153 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
4154 the access now so it gets done only once. Likewise if
4155 it contains TARGET. */
4156 if (GET_CODE (temp) == MEM && want_value
4157 && (MEM_VOLATILE_P (temp)
4158 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
4159 temp = copy_to_reg (temp);
4160
b258707c
RS
4161 /* If TEMP is a VOIDmode constant, use convert_modes to make
4162 sure that we properly convert it. */
4163 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4164 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4165 TYPE_MODE (TREE_TYPE (exp)), temp,
4166 SUBREG_PROMOTED_UNSIGNED_P (target));
4167
1499e0a8
RK
4168 convert_move (SUBREG_REG (target), temp,
4169 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4170
4171 /* If we promoted a constant, change the mode back down to match
4172 target. Otherwise, the caller might get confused by a result whose
4173 mode is larger than expected. */
4174
4175 if (want_value && GET_MODE (temp) != GET_MODE (target)
4176 && GET_MODE (temp) != VOIDmode)
4177 {
ddef6bc7 4178 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3dbecef9
JW
4179 SUBREG_PROMOTED_VAR_P (temp) = 1;
4180 SUBREG_PROMOTED_UNSIGNED_P (temp)
4181 = SUBREG_PROMOTED_UNSIGNED_P (target);
4182 }
4183
709f5be1 4184 return want_value ? temp : NULL_RTX;
1499e0a8 4185 }
bbf6f052
RK
4186 else
4187 {
4188 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 4189 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4190 If TARGET is a volatile mem ref, either return TARGET
4191 or return a reg copied *from* TARGET; ANSI requires this.
4192
4193 Otherwise, if TEMP is not TARGET, return TEMP
4194 if it is constant (for efficiency),
4195 or if we really want the correct value. */
bbf6f052
RK
4196 if (!(target && GET_CODE (target) == REG
4197 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4198 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4199 && ! rtx_equal_p (temp, target)
709f5be1 4200 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
4201 dont_return_target = 1;
4202 }
4203
b258707c
RS
4204 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4205 the same as that of TARGET, adjust the constant. This is needed, for
4206 example, in case it is a CONST_DOUBLE and we want only a word-sized
4207 value. */
4208 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4209 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4210 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4211 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4212 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4213
7d384cc0 4214 if (current_function_check_memory_usage
921b3427
RK
4215 && GET_CODE (target) == MEM
4216 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4217 {
ea4da9db 4218 in_check_memory_usage = 1;
921b3427 4219 if (GET_CODE (temp) == MEM)
ebb1b59a
BS
4220 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4221 VOIDmode, 3, XEXP (target, 0), Pmode,
6a9c4aed 4222 XEXP (temp, 0), Pmode,
921b3427
RK
4223 expr_size (exp), TYPE_MODE (sizetype));
4224 else
ebb1b59a
BS
4225 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4226 VOIDmode, 3, XEXP (target, 0), Pmode,
921b3427 4227 expr_size (exp), TYPE_MODE (sizetype),
3a94c984 4228 GEN_INT (MEMORY_USE_WO),
956d6950 4229 TYPE_MODE (integer_type_node));
ea4da9db 4230 in_check_memory_usage = 0;
921b3427
RK
4231 }
4232
bbf6f052
RK
4233 /* If value was not generated in the target, store it there.
4234 Convert the value to TARGET's type first if nec. */
f3f2255a
R
4235 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4236 one or both of them are volatile memory refs, we have to distinguish
4237 two cases:
4238 - expand_expr has used TARGET. In this case, we must not generate
4239 another copy. This can be detected by TARGET being equal according
4240 to == .
4241 - expand_expr has not used TARGET - that means that the source just
4242 happens to have the same RTX form. Since temp will have been created
4243 by expand_expr, it will compare unequal according to == .
4244 We must generate a copy in this case, to reach the correct number
4245 of volatile memory references. */
bbf6f052 4246
6036acbb 4247 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4248 || (temp != target && (side_effects_p (temp)
4249 || side_effects_p (target))))
e5408e52
JJ
4250 && TREE_CODE (exp) != ERROR_MARK
4251 && ! dont_store_target)
bbf6f052
RK
4252 {
4253 target = protect_from_queue (target, 1);
4254 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4255 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4256 {
4257 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4258 if (dont_return_target)
4259 {
4260 /* In this case, we will return TEMP,
4261 so make sure it has the proper mode.
4262 But don't forget to store the value into TARGET. */
4263 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4264 emit_move_insn (target, temp);
4265 }
4266 else
4267 convert_move (target, temp, unsignedp);
4268 }
4269
4270 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4271 {
4272 /* Handle copying a string constant into an array.
4273 The string constant may be shorter than the array.
4274 So copy just the string's actual length, and clear the rest. */
4275 rtx size;
22619c3f 4276 rtx addr;
bbf6f052 4277
e87b4f3f
RS
4278 /* Get the size of the data type of the string,
4279 which is actually the size of the target. */
4280 size = expr_size (exp);
4281 if (GET_CODE (size) == CONST_INT
4282 && INTVAL (size) < TREE_STRING_LENGTH (exp))
19caa751 4283 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4284 else
bbf6f052 4285 {
e87b4f3f
RS
4286 /* Compute the size of the data to copy from the string. */
4287 tree copy_size
c03b7665 4288 = size_binop (MIN_EXPR,
b50d17a1 4289 make_tree (sizetype, size),
fed3cef0 4290 size_int (TREE_STRING_LENGTH (exp)));
f9e158c3 4291 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
906c4e36
RK
4292 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4293 VOIDmode, 0);
e87b4f3f
RS
4294 rtx label = 0;
4295
4296 /* Copy that much. */
4297 emit_block_move (target, temp, copy_size_rtx,
19caa751 4298 TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4299
88f63c77
RK
4300 /* Figure out how much is left in TARGET that we have to clear.
4301 Do all calculations in ptr_mode. */
4302
4303 addr = XEXP (target, 0);
4304 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4305
e87b4f3f
RS
4306 if (GET_CODE (copy_size_rtx) == CONST_INT)
4307 {
88f63c77 4308 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3a94c984 4309 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
8752c357
AJ
4310 align = MIN (align,
4311 (unsigned int) (BITS_PER_UNIT
4312 * (INTVAL (copy_size_rtx)
4313 & - INTVAL (copy_size_rtx))));
e87b4f3f
RS
4314 }
4315 else
4316 {
88f63c77
RK
4317 addr = force_reg (ptr_mode, addr);
4318 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
4319 copy_size_rtx, NULL_RTX, 0,
4320 OPTAB_LIB_WIDEN);
e87b4f3f 4321
88f63c77 4322 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
4323 copy_size_rtx, NULL_RTX, 0,
4324 OPTAB_LIB_WIDEN);
e87b4f3f 4325
2a5b96fd 4326 align = BITS_PER_UNIT;
e87b4f3f 4327 label = gen_label_rtx ();
c5d5d461
JL
4328 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4329 GET_MODE (size), 0, 0, label);
e87b4f3f 4330 }
2a5b96fd 4331 align = MIN (align, expr_align (copy_size));
e87b4f3f
RS
4332
4333 if (size != const0_rtx)
4334 {
3bdf5ad1
RK
4335 rtx dest = gen_rtx_MEM (BLKmode, addr);
4336
4337 MEM_COPY_ATTRIBUTES (dest, target);
4338
921b3427 4339 /* Be sure we can write on ADDR. */
ea4da9db 4340 in_check_memory_usage = 1;
7d384cc0 4341 if (current_function_check_memory_usage)
ebb1b59a
BS
4342 emit_library_call (chkr_check_addr_libfunc,
4343 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 4344 addr, Pmode,
921b3427 4345 size, TYPE_MODE (sizetype),
3a94c984 4346 GEN_INT (MEMORY_USE_WO),
956d6950 4347 TYPE_MODE (integer_type_node));
ea4da9db 4348 in_check_memory_usage = 0;
051ffad5 4349 clear_storage (dest, size, align);
e87b4f3f 4350 }
22619c3f 4351
e87b4f3f
RS
4352 if (label)
4353 emit_label (label);
bbf6f052
RK
4354 }
4355 }
fffa9c1d
JW
4356 /* Handle calls that return values in multiple non-contiguous locations.
4357 The Irix 6 ABI has examples of this. */
4358 else if (GET_CODE (target) == PARALLEL)
aac5cc16 4359 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
19caa751 4360 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4361 else if (GET_MODE (temp) == BLKmode)
4362 emit_block_move (target, temp, expr_size (exp),
19caa751 4363 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4364 else
4365 emit_move_insn (target, temp);
4366 }
709f5be1 4367
766f36c7
RK
4368 /* If we don't want a value, return NULL_RTX. */
4369 if (! want_value)
4370 return NULL_RTX;
4371
4372 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4373 ??? The latter test doesn't seem to make sense. */
4374 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4375 return temp;
766f36c7
RK
4376
4377 /* Return TARGET itself if it is a hard register. */
4378 else if (want_value && GET_MODE (target) != BLKmode
4379 && ! (GET_CODE (target) == REG
4380 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4381 return copy_to_reg (target);
3a94c984 4382
766f36c7 4383 else
709f5be1 4384 return target;
bbf6f052
RK
4385}
4386\f
9de08200
RK
4387/* Return 1 if EXP just contains zeros. */
4388
4389static int
4390is_zeros_p (exp)
4391 tree exp;
4392{
4393 tree elt;
4394
4395 switch (TREE_CODE (exp))
4396 {
4397 case CONVERT_EXPR:
4398 case NOP_EXPR:
4399 case NON_LVALUE_EXPR:
4400 return is_zeros_p (TREE_OPERAND (exp, 0));
4401
4402 case INTEGER_CST:
05bccae2 4403 return integer_zerop (exp);
9de08200
RK
4404
4405 case COMPLEX_CST:
4406 return
4407 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4408
4409 case REAL_CST:
41c9120b 4410 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
4411
4412 case CONSTRUCTOR:
e1a43f73
PB
4413 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4414 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4415 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4416 if (! is_zeros_p (TREE_VALUE (elt)))
4417 return 0;
4418
4419 return 1;
3a94c984 4420
e9a25f70
JL
4421 default:
4422 return 0;
9de08200 4423 }
9de08200
RK
4424}
4425
4426/* Return 1 if EXP contains mostly (3/4) zeros. */
4427
4428static int
4429mostly_zeros_p (exp)
4430 tree exp;
4431{
9de08200
RK
4432 if (TREE_CODE (exp) == CONSTRUCTOR)
4433 {
e1a43f73
PB
4434 int elts = 0, zeros = 0;
4435 tree elt = CONSTRUCTOR_ELTS (exp);
4436 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4437 {
4438 /* If there are no ranges of true bits, it is all zero. */
4439 return elt == NULL_TREE;
4440 }
4441 for (; elt; elt = TREE_CHAIN (elt))
4442 {
4443 /* We do not handle the case where the index is a RANGE_EXPR,
4444 so the statistic will be somewhat inaccurate.
4445 We do make a more accurate count in store_constructor itself,
4446 so since this function is only used for nested array elements,
0f41302f 4447 this should be close enough. */
e1a43f73
PB
4448 if (mostly_zeros_p (TREE_VALUE (elt)))
4449 zeros++;
4450 elts++;
4451 }
9de08200
RK
4452
4453 return 4 * zeros >= 3 * elts;
4454 }
4455
4456 return is_zeros_p (exp);
4457}
4458\f
e1a43f73
PB
4459/* Helper function for store_constructor.
4460 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4461 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4462 ALIGN and CLEARED are as for store_constructor.
23cb1766 4463 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4464
4465 This provides a recursive shortcut back to store_constructor when it isn't
4466 necessary to go through store_field. This is so that we can pass through
4467 the cleared field to let store_constructor know that we may not have to
4468 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4469
4470static void
4471store_constructor_field (target, bitsize, bitpos,
23cb1766 4472 mode, exp, type, align, cleared, alias_set)
e1a43f73 4473 rtx target;
770ae6cc
RK
4474 unsigned HOST_WIDE_INT bitsize;
4475 HOST_WIDE_INT bitpos;
e1a43f73
PB
4476 enum machine_mode mode;
4477 tree exp, type;
729a2125 4478 unsigned int align;
e1a43f73 4479 int cleared;
23cb1766 4480 int alias_set;
e1a43f73
PB
4481{
4482 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4483 && bitpos % BITS_PER_UNIT == 0
4484 /* If we have a non-zero bitpos for a register target, then we just
4485 let store_field do the bitfield handling. This is unlikely to
4486 generate unnecessary clear instructions anyways. */
4487 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4488 {
126e5b0d 4489 if (bitpos != 0)
ce64861e 4490 target
f4ef873c 4491 = adjust_address (target,
ce64861e
RK
4492 GET_MODE (target) == BLKmode
4493 || 0 != (bitpos
4494 % GET_MODE_ALIGNMENT (GET_MODE (target)))
f4ef873c 4495 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4496
e0339ef7
RK
4497
4498 /* Show the alignment may no longer be what it was and update the alias
4499 set, if required. */
eeebb824 4500 if (bitpos != 0)
8752c357 4501 align = MIN (align, (unsigned int) bitpos & - bitpos);
832ea3b3 4502 if (GET_CODE (target) == MEM)
ba4828e0 4503 set_mem_alias_set (target, alias_set);
e0339ef7 4504
b7010412 4505 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4506 }
4507 else
19caa751 4508 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
23cb1766 4509 int_size_in_bytes (type), alias_set);
e1a43f73
PB
4510}
4511
bbf6f052 4512/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4513 TARGET is either a REG or a MEM.
19caa751 4514 ALIGN is the maximum known alignment for TARGET.
b7010412
RK
4515 CLEARED is true if TARGET is known to have been zero'd.
4516 SIZE is the number of bytes of TARGET we are allowed to modify: this
4517 may not be the same as the size of EXP if we are assigning to a field
4518 which has been packed to exclude padding bits. */
bbf6f052
RK
4519
4520static void
b7010412 4521store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4522 tree exp;
4523 rtx target;
729a2125 4524 unsigned int align;
e1a43f73 4525 int cleared;
13eb1f7f 4526 HOST_WIDE_INT size;
bbf6f052 4527{
4af3895e 4528 tree type = TREE_TYPE (exp);
a5efcd63 4529#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4530 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4531#endif
4af3895e 4532
bbf6f052
RK
4533 /* We know our target cannot conflict, since safe_from_p has been called. */
4534#if 0
4535 /* Don't try copying piece by piece into a hard register
4536 since that is vulnerable to being clobbered by EXP.
4537 Instead, construct in a pseudo register and then copy it all. */
4538 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4539 {
4540 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4541 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4542 emit_move_insn (target, temp);
4543 return;
4544 }
4545#endif
4546
e44842fe
RK
4547 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4548 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4549 {
4550 register tree elt;
4551
4af3895e 4552 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4553 if ((TREE_CODE (type) == UNION_TYPE
4554 || TREE_CODE (type) == QUAL_UNION_TYPE)
4555 && ! cleared)
a59f8640
R
4556 {
4557 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4558
4559 /* If the constructor is empty, clear the union. */
4560 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
19caa751 4561 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
a59f8640 4562 }
4af3895e
JVA
4563
4564 /* If we are building a static constructor into a register,
4565 set the initial value as zero so we can fold the value into
67225c15
RK
4566 a constant. But if more than one register is involved,
4567 this probably loses. */
4568 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4569 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4570 {
4571 if (! cleared)
e9a25f70 4572 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4573
9de08200
RK
4574 cleared = 1;
4575 }
4576
4577 /* If the constructor has fewer fields than the structure
4578 or if we are initializing the structure to mostly zeros,
0d97bf4c 4579 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4580 register whose mode size isn't equal to SIZE since clear_storage
4581 can't handle this case. */
9376fcd6
RK
4582 else if (size > 0
4583 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4584 != fields_length (type))
fcf1b822
RK
4585 || mostly_zeros_p (exp))
4586 && (GET_CODE (target) != REG
8752c357 4587 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
9de08200
RK
4588 {
4589 if (! cleared)
19caa751 4590 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4591
4592 cleared = 1;
4593 }
dd1db5ec 4594 else if (! cleared)
bbf6f052 4595 /* Inform later passes that the old value is dead. */
38a448ca 4596 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4597
4598 /* Store each element of the constructor into
4599 the corresponding field of TARGET. */
4600
4601 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4602 {
4603 register tree field = TREE_PURPOSE (elt);
c5c76735 4604#ifdef WORD_REGISTER_OPERATIONS
34c73909 4605 tree value = TREE_VALUE (elt);
c5c76735 4606#endif
bbf6f052 4607 register enum machine_mode mode;
770ae6cc
RK
4608 HOST_WIDE_INT bitsize;
4609 HOST_WIDE_INT bitpos = 0;
bbf6f052 4610 int unsignedp;
770ae6cc 4611 tree offset;
b50d17a1 4612 rtx to_rtx = target;
bbf6f052 4613
f32fd778
RS
4614 /* Just ignore missing fields.
4615 We cleared the whole structure, above,
4616 if any fields are missing. */
4617 if (field == 0)
4618 continue;
4619
e1a43f73
PB
4620 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4621 continue;
9de08200 4622
770ae6cc
RK
4623 if (host_integerp (DECL_SIZE (field), 1))
4624 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4625 else
4626 bitsize = -1;
4627
bbf6f052
RK
4628 unsignedp = TREE_UNSIGNED (field);
4629 mode = DECL_MODE (field);
4630 if (DECL_BIT_FIELD (field))
4631 mode = VOIDmode;
4632
770ae6cc
RK
4633 offset = DECL_FIELD_OFFSET (field);
4634 if (host_integerp (offset, 0)
4635 && host_integerp (bit_position (field), 0))
4636 {
4637 bitpos = int_bit_position (field);
4638 offset = 0;
4639 }
b50d17a1 4640 else
770ae6cc 4641 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4642
b50d17a1
RK
4643 if (offset)
4644 {
4645 rtx offset_rtx;
4646
4647 if (contains_placeholder_p (offset))
7fa96708 4648 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4649 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4650
b50d17a1
RK
4651 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4652 if (GET_CODE (to_rtx) != MEM)
4653 abort ();
4654
3a94c984
KH
4655 if (GET_MODE (offset_rtx) != ptr_mode)
4656 {
bd070e1a 4657#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4658 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4659#else
4660 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4661#endif
4662 }
4663
b50d17a1
RK
4664 to_rtx
4665 = change_address (to_rtx, VOIDmode,
38a448ca 4666 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4667 force_reg (ptr_mode,
4668 offset_rtx)));
7fa96708 4669 align = DECL_OFFSET_ALIGN (field);
b50d17a1 4670 }
c5c76735 4671
cf04eb80
RK
4672 if (TREE_READONLY (field))
4673 {
9151b3bf 4674 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4675 to_rtx = copy_rtx (to_rtx);
4676
cf04eb80
RK
4677 RTX_UNCHANGING_P (to_rtx) = 1;
4678 }
4679
34c73909
R
4680#ifdef WORD_REGISTER_OPERATIONS
4681 /* If this initializes a field that is smaller than a word, at the
4682 start of a word, try to widen it to a full word.
4683 This special case allows us to output C++ member function
4684 initializations in a form that the optimizers can understand. */
770ae6cc 4685 if (GET_CODE (target) == REG
34c73909
R
4686 && bitsize < BITS_PER_WORD
4687 && bitpos % BITS_PER_WORD == 0
4688 && GET_MODE_CLASS (mode) == MODE_INT
4689 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4690 && exp_size >= 0
4691 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4692 {
4693 tree type = TREE_TYPE (value);
4694 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4695 {
4696 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4697 value = convert (type, value);
4698 }
4699 if (BYTES_BIG_ENDIAN)
4700 value
4701 = fold (build (LSHIFT_EXPR, type, value,
4702 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4703 bitsize = BITS_PER_WORD;
4704 mode = word_mode;
4705 }
4706#endif
c5c76735 4707 store_constructor_field (to_rtx, bitsize, bitpos, mode,
23cb1766 4708 TREE_VALUE (elt), type, align, cleared,
963a2a84 4709 (DECL_NONADDRESSABLE_P (field)
1ccfe3fa 4710 && GET_CODE (to_rtx) == MEM)
23cb1766
RK
4711 ? MEM_ALIAS_SET (to_rtx)
4712 : get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4713 }
4714 }
4af3895e 4715 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4716 {
4717 register tree elt;
4718 register int i;
e1a43f73 4719 int need_to_clear;
4af3895e 4720 tree domain = TYPE_DOMAIN (type);
4af3895e 4721 tree elttype = TREE_TYPE (type);
85f3d674
RK
4722 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4723 && host_integerp (TYPE_MAX_VALUE (domain), 0));
ae0ed63a
JM
4724 HOST_WIDE_INT minelt = 0;
4725 HOST_WIDE_INT maxelt = 0;
85f3d674
RK
4726
4727 /* If we have constant bounds for the range of the type, get them. */
4728 if (const_bounds_p)
4729 {
4730 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4731 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4732 }
bbf6f052 4733
e1a43f73 4734 /* If the constructor has fewer elements than the array,
38e01259 4735 clear the whole array first. Similarly if this is
e1a43f73
PB
4736 static constructor of a non-BLKmode object. */
4737 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4738 need_to_clear = 1;
4739 else
4740 {
4741 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4742 need_to_clear = ! const_bounds_p;
4743
e1a43f73
PB
4744 /* This loop is a more accurate version of the loop in
4745 mostly_zeros_p (it handles RANGE_EXPR in an index).
4746 It is also needed to check for missing elements. */
4747 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4748 elt != NULL_TREE && ! need_to_clear;
df0faff1 4749 elt = TREE_CHAIN (elt))
e1a43f73
PB
4750 {
4751 tree index = TREE_PURPOSE (elt);
4752 HOST_WIDE_INT this_node_count;
19caa751 4753
e1a43f73
PB
4754 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4755 {
4756 tree lo_index = TREE_OPERAND (index, 0);
4757 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4758
19caa751
RK
4759 if (! host_integerp (lo_index, 1)
4760 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4761 {
4762 need_to_clear = 1;
4763 break;
4764 }
19caa751
RK
4765
4766 this_node_count = (tree_low_cst (hi_index, 1)
4767 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4768 }
4769 else
4770 this_node_count = 1;
85f3d674 4771
e1a43f73
PB
4772 count += this_node_count;
4773 if (mostly_zeros_p (TREE_VALUE (elt)))
4774 zero_count += this_node_count;
4775 }
85f3d674 4776
8e958f70 4777 /* Clear the entire array first if there are any missing elements,
0f41302f 4778 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4779 if (! need_to_clear
4780 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4781 need_to_clear = 1;
4782 }
85f3d674 4783
9376fcd6 4784 if (need_to_clear && size > 0)
9de08200
RK
4785 {
4786 if (! cleared)
19caa751 4787 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4788 cleared = 1;
4789 }
df4556a3 4790 else if (REG_P (target))
bbf6f052 4791 /* Inform later passes that the old value is dead. */
38a448ca 4792 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4793
4794 /* Store each element of the constructor into
4795 the corresponding element of TARGET, determined
4796 by counting the elements. */
4797 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4798 elt;
4799 elt = TREE_CHAIN (elt), i++)
4800 {
4801 register enum machine_mode mode;
19caa751
RK
4802 HOST_WIDE_INT bitsize;
4803 HOST_WIDE_INT bitpos;
bbf6f052 4804 int unsignedp;
e1a43f73 4805 tree value = TREE_VALUE (elt);
729a2125 4806 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4807 tree index = TREE_PURPOSE (elt);
4808 rtx xtarget = target;
bbf6f052 4809
e1a43f73
PB
4810 if (cleared && is_zeros_p (value))
4811 continue;
9de08200 4812
bbf6f052 4813 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4814 mode = TYPE_MODE (elttype);
4815 if (mode == BLKmode)
19caa751
RK
4816 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4817 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4818 : -1);
14a774a9
RK
4819 else
4820 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4821
e1a43f73
PB
4822 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4823 {
4824 tree lo_index = TREE_OPERAND (index, 0);
4825 tree hi_index = TREE_OPERAND (index, 1);
4826 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4827 struct nesting *loop;
05c0b405
PB
4828 HOST_WIDE_INT lo, hi, count;
4829 tree position;
e1a43f73 4830
0f41302f 4831 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4832 if (const_bounds_p
4833 && host_integerp (lo_index, 0)
19caa751
RK
4834 && host_integerp (hi_index, 0)
4835 && (lo = tree_low_cst (lo_index, 0),
4836 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4837 count = hi - lo + 1,
4838 (GET_CODE (target) != MEM
4839 || count <= 2
19caa751
RK
4840 || (host_integerp (TYPE_SIZE (elttype), 1)
4841 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4842 <= 40 * 8)))))
e1a43f73 4843 {
05c0b405
PB
4844 lo -= minelt; hi -= minelt;
4845 for (; lo <= hi; lo++)
e1a43f73 4846 {
19caa751 4847 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
23cb1766
RK
4848 store_constructor_field
4849 (target, bitsize, bitpos, mode, value, type, align,
4850 cleared,
4851 TYPE_NONALIASED_COMPONENT (type)
4852 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
e1a43f73
PB
4853 }
4854 }
4855 else
4856 {
4857 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4858 loop_top = gen_label_rtx ();
4859 loop_end = gen_label_rtx ();
4860
4861 unsignedp = TREE_UNSIGNED (domain);
4862
4863 index = build_decl (VAR_DECL, NULL_TREE, domain);
4864
19e7881c 4865 index_r
e1a43f73
PB
4866 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4867 &unsignedp, 0));
19e7881c 4868 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4869 if (TREE_CODE (value) == SAVE_EXPR
4870 && SAVE_EXPR_RTL (value) == 0)
4871 {
0f41302f
MS
4872 /* Make sure value gets expanded once before the
4873 loop. */
e1a43f73
PB
4874 expand_expr (value, const0_rtx, VOIDmode, 0);
4875 emit_queue ();
4876 }
4877 store_expr (lo_index, index_r, 0);
4878 loop = expand_start_loop (0);
4879
0f41302f 4880 /* Assign value to element index. */
fed3cef0
RK
4881 position
4882 = convert (ssizetype,
4883 fold (build (MINUS_EXPR, TREE_TYPE (index),
4884 index, TYPE_MIN_VALUE (domain))));
4885 position = size_binop (MULT_EXPR, position,
4886 convert (ssizetype,
4887 TYPE_SIZE_UNIT (elttype)));
4888
e1a43f73 4889 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4890 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4891 xtarget = change_address (target, mode, addr);
4892 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4893 store_constructor (value, xtarget, align, cleared,
4894 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4895 else
4896 store_expr (value, xtarget, 0);
4897
4898 expand_exit_loop_if_false (loop,
4899 build (LT_EXPR, integer_type_node,
4900 index, hi_index));
4901
4902 expand_increment (build (PREINCREMENT_EXPR,
4903 TREE_TYPE (index),
7b8b9722 4904 index, integer_one_node), 0, 0);
e1a43f73
PB
4905 expand_end_loop ();
4906 emit_label (loop_end);
e1a43f73
PB
4907 }
4908 }
19caa751
RK
4909 else if ((index != 0 && ! host_integerp (index, 0))
4910 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4911 {
e1a43f73 4912 rtx pos_rtx, addr;
03dc44a6
RS
4913 tree position;
4914
5b6c44ff 4915 if (index == 0)
fed3cef0 4916 index = ssize_int (1);
5b6c44ff 4917
e1a43f73 4918 if (minelt)
fed3cef0
RK
4919 index = convert (ssizetype,
4920 fold (build (MINUS_EXPR, index,
4921 TYPE_MIN_VALUE (domain))));
19caa751 4922
fed3cef0
RK
4923 position = size_binop (MULT_EXPR, index,
4924 convert (ssizetype,
4925 TYPE_SIZE_UNIT (elttype)));
03dc44a6 4926 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4927 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4928 xtarget = change_address (target, mode, addr);
e1a43f73 4929 store_expr (value, xtarget, 0);
03dc44a6
RS
4930 }
4931 else
4932 {
4933 if (index != 0)
19caa751
RK
4934 bitpos = ((tree_low_cst (index, 0) - minelt)
4935 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4936 else
19caa751
RK
4937 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4938
c5c76735 4939 store_constructor_field (target, bitsize, bitpos, mode, value,
23cb1766
RK
4940 type, align, cleared,
4941 TYPE_NONALIASED_COMPONENT (type)
831ecbd4 4942 && GET_CODE (target) == MEM
23cb1766
RK
4943 ? MEM_ALIAS_SET (target) :
4944 get_alias_set (elttype));
4945
03dc44a6 4946 }
bbf6f052
RK
4947 }
4948 }
19caa751 4949
3a94c984 4950 /* Set constructor assignments. */
071a6595
PB
4951 else if (TREE_CODE (type) == SET_TYPE)
4952 {
e1a43f73 4953 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4954 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4955 tree domain = TYPE_DOMAIN (type);
4956 tree domain_min, domain_max, bitlength;
4957
9faa82d8 4958 /* The default implementation strategy is to extract the constant
071a6595
PB
4959 parts of the constructor, use that to initialize the target,
4960 and then "or" in whatever non-constant ranges we need in addition.
4961
4962 If a large set is all zero or all ones, it is
4963 probably better to set it using memset (if available) or bzero.
4964 Also, if a large set has just a single range, it may also be
4965 better to first clear all the first clear the set (using
0f41302f 4966 bzero/memset), and set the bits we want. */
3a94c984 4967
0f41302f 4968 /* Check for all zeros. */
9376fcd6 4969 if (elt == NULL_TREE && size > 0)
071a6595 4970 {
e1a43f73 4971 if (!cleared)
19caa751 4972 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
071a6595
PB
4973 return;
4974 }
4975
071a6595
PB
4976 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4977 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4978 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4979 size_diffop (domain_max, domain_min),
4980 ssize_int (1));
071a6595 4981
19caa751 4982 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4983
4984 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4985 are "complicated" (more than one range), initialize (the
3a94c984 4986 constant parts) by copying from a constant. */
e1a43f73
PB
4987 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4988 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4989 {
19caa751 4990 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4991 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4992 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 4993 HOST_WIDE_INT word = 0;
19caa751
RK
4994 unsigned int bit_pos = 0;
4995 unsigned int ibit = 0;
4996 unsigned int offset = 0; /* In bytes from beginning of set. */
4997
e1a43f73 4998 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4999 for (;;)
071a6595 5000 {
b4ee5a72
PB
5001 if (bit_buffer[ibit])
5002 {
b09f3348 5003 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5004 word |= (1 << (set_word_size - 1 - bit_pos));
5005 else
5006 word |= 1 << bit_pos;
5007 }
19caa751 5008
b4ee5a72
PB
5009 bit_pos++; ibit++;
5010 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5011 {
e1a43f73
PB
5012 if (word != 0 || ! cleared)
5013 {
5014 rtx datum = GEN_INT (word);
5015 rtx to_rtx;
19caa751 5016
0f41302f
MS
5017 /* The assumption here is that it is safe to use
5018 XEXP if the set is multi-word, but not if
5019 it's single-word. */
e1a43f73 5020 if (GET_CODE (target) == MEM)
f4ef873c 5021 to_rtx = adjust_address (target, mode, offset);
3a94c984 5022 else if (offset == 0)
e1a43f73
PB
5023 to_rtx = target;
5024 else
5025 abort ();
5026 emit_move_insn (to_rtx, datum);
5027 }
19caa751 5028
b4ee5a72
PB
5029 if (ibit == nbits)
5030 break;
5031 word = 0;
5032 bit_pos = 0;
5033 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5034 }
5035 }
071a6595 5036 }
e1a43f73 5037 else if (!cleared)
19caa751
RK
5038 /* Don't bother clearing storage if the set is all ones. */
5039 if (TREE_CHAIN (elt) != NULL_TREE
5040 || (TREE_PURPOSE (elt) == NULL_TREE
5041 ? nbits != 1
5042 : ( ! host_integerp (TREE_VALUE (elt), 0)
5043 || ! host_integerp (TREE_PURPOSE (elt), 0)
5044 || (tree_low_cst (TREE_VALUE (elt), 0)
5045 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5046 != (HOST_WIDE_INT) nbits))))
5047 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
3a94c984 5048
e1a43f73 5049 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5050 {
3a94c984 5051 /* Start of range of element or NULL. */
071a6595 5052 tree startbit = TREE_PURPOSE (elt);
3a94c984 5053 /* End of range of element, or element value. */
071a6595 5054 tree endbit = TREE_VALUE (elt);
381127e8 5055#ifdef TARGET_MEM_FUNCTIONS
071a6595 5056 HOST_WIDE_INT startb, endb;
381127e8 5057#endif
19caa751 5058 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5059
5060 bitlength_rtx = expand_expr (bitlength,
19caa751 5061 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5062
3a94c984 5063 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5064 if (startbit == NULL_TREE)
5065 {
5066 startbit = save_expr (endbit);
5067 endbit = startbit;
5068 }
19caa751 5069
071a6595
PB
5070 startbit = convert (sizetype, startbit);
5071 endbit = convert (sizetype, endbit);
5072 if (! integer_zerop (domain_min))
5073 {
5074 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5075 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5076 }
3a94c984 5077 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5078 EXPAND_CONST_ADDRESS);
3a94c984 5079 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5080 EXPAND_CONST_ADDRESS);
5081
5082 if (REG_P (target))
5083 {
1da68f56
RK
5084 targetx
5085 = assign_temp
5086 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5087 TYPE_QUAL_CONST)),
5088 0, 1, 1);
071a6595
PB
5089 emit_move_insn (targetx, target);
5090 }
19caa751 5091
071a6595
PB
5092 else if (GET_CODE (target) == MEM)
5093 targetx = target;
5094 else
5095 abort ();
5096
5097#ifdef TARGET_MEM_FUNCTIONS
5098 /* Optimization: If startbit and endbit are
9faa82d8 5099 constants divisible by BITS_PER_UNIT,
0f41302f 5100 call memset instead. */
071a6595
PB
5101 if (TREE_CODE (startbit) == INTEGER_CST
5102 && TREE_CODE (endbit) == INTEGER_CST
5103 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5104 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5105 {
ebb1b59a 5106 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5107 VOIDmode, 3,
e1a43f73
PB
5108 plus_constant (XEXP (targetx, 0),
5109 startb / BITS_PER_UNIT),
071a6595 5110 Pmode,
3b6f75e2 5111 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5112 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5113 TYPE_MODE (sizetype));
071a6595
PB
5114 }
5115 else
5116#endif
19caa751 5117 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
5118 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5119 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5120 startbit_rtx, TYPE_MODE (sizetype),
5121 endbit_rtx, TYPE_MODE (sizetype));
5122
071a6595
PB
5123 if (REG_P (target))
5124 emit_move_insn (target, targetx);
5125 }
5126 }
bbf6f052
RK
5127
5128 else
5129 abort ();
5130}
5131
5132/* Store the value of EXP (an expression tree)
5133 into a subfield of TARGET which has mode MODE and occupies
5134 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5135 If MODE is VOIDmode, it means that we are storing into a bit-field.
5136
5137 If VALUE_MODE is VOIDmode, return nothing in particular.
5138 UNSIGNEDP is not used in this case.
5139
5140 Otherwise, return an rtx for the value stored. This rtx
5141 has mode VALUE_MODE if that is convenient to do.
5142 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5143
19caa751 5144 ALIGN is the alignment that TARGET is known to have.
3a94c984 5145 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
ece32014
MM
5146
5147 ALIAS_SET is the alias set for the destination. This value will
5148 (in general) be different from that for TARGET, since TARGET is a
5149 reference to the containing structure. */
bbf6f052
RK
5150
5151static rtx
5152store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 5153 unsignedp, align, total_size, alias_set)
bbf6f052 5154 rtx target;
770ae6cc
RK
5155 HOST_WIDE_INT bitsize;
5156 HOST_WIDE_INT bitpos;
bbf6f052
RK
5157 enum machine_mode mode;
5158 tree exp;
5159 enum machine_mode value_mode;
5160 int unsignedp;
729a2125 5161 unsigned int align;
770ae6cc 5162 HOST_WIDE_INT total_size;
ece32014 5163 int alias_set;
bbf6f052 5164{
906c4e36 5165 HOST_WIDE_INT width_mask = 0;
bbf6f052 5166
e9a25f70
JL
5167 if (TREE_CODE (exp) == ERROR_MARK)
5168 return const0_rtx;
5169
2be6a7e9
RK
5170 /* If we have nothing to store, do nothing unless the expression has
5171 side-effects. */
5172 if (bitsize == 0)
5173 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5174
906c4e36
RK
5175 if (bitsize < HOST_BITS_PER_WIDE_INT)
5176 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5177
5178 /* If we are storing into an unaligned field of an aligned union that is
5179 in a register, we may have the mode of TARGET being an integer mode but
5180 MODE == BLKmode. In that case, get an aligned object whose size and
5181 alignment are the same as TARGET and store TARGET into it (we can avoid
5182 the store if the field being stored is the entire width of TARGET). Then
5183 call ourselves recursively to store the field into a BLKmode version of
5184 that object. Finally, load from the object into TARGET. This is not
5185 very efficient in general, but should only be slightly more expensive
5186 than the otherwise-required unaligned accesses. Perhaps this can be
5187 cleaned up later. */
5188
5189 if (mode == BLKmode
5190 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5191 {
1da68f56
RK
5192 rtx object
5193 = assign_temp
5194 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5195 TYPE_QUAL_CONST),
5196 0, 1, 1);
bbf6f052
RK
5197 rtx blk_object = copy_rtx (object);
5198
5199 PUT_MODE (blk_object, BLKmode);
5200
8752c357 5201 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5202 emit_move_insn (object, target);
5203
5204 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 5205 align, total_size, alias_set);
bbf6f052 5206
46093b97
RS
5207 /* Even though we aren't returning target, we need to
5208 give it the updated value. */
bbf6f052
RK
5209 emit_move_insn (target, object);
5210
46093b97 5211 return blk_object;
bbf6f052 5212 }
c3b247b4
JM
5213
5214 if (GET_CODE (target) == CONCAT)
5215 {
5216 /* We're storing into a struct containing a single __complex. */
5217
5218 if (bitpos != 0)
5219 abort ();
5220 return store_expr (exp, target, 0);
5221 }
bbf6f052
RK
5222
5223 /* If the structure is in a register or if the component
5224 is a bit field, we cannot use addressing to access it.
5225 Use bit-field techniques or SUBREG to store in it. */
5226
4fa52007 5227 if (mode == VOIDmode
6ab06cbb
JW
5228 || (mode != BLKmode && ! direct_store[(int) mode]
5229 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5230 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5231 || GET_CODE (target) == REG
c980ac49 5232 || GET_CODE (target) == SUBREG
ccc98036
RS
5233 /* If the field isn't aligned enough to store as an ordinary memref,
5234 store it as a bit field. */
e1565e65 5235 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5236 && (align < GET_MODE_ALIGNMENT (mode)
14a774a9 5237 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 5238 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5239 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
14a774a9
RK
5240 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5241 /* If the RHS and field are a constant size and the size of the
5242 RHS isn't the same size as the bitfield, we must use bitfield
5243 operations. */
05bccae2
RK
5244 || (bitsize >= 0
5245 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5246 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5247 {
906c4e36 5248 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5249
ef19912d
RK
5250 /* If BITSIZE is narrower than the size of the type of EXP
5251 we will be narrowing TEMP. Normally, what's wanted are the
5252 low-order bits. However, if EXP's type is a record and this is
5253 big-endian machine, we want the upper BITSIZE bits. */
5254 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5255 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5256 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5257 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5258 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5259 - bitsize),
5260 temp, 1);
5261
bbd6cf73
RK
5262 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5263 MODE. */
5264 if (mode != VOIDmode && mode != BLKmode
5265 && mode != TYPE_MODE (TREE_TYPE (exp)))
5266 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5267
a281e72d
RK
5268 /* If the modes of TARGET and TEMP are both BLKmode, both
5269 must be in memory and BITPOS must be aligned on a byte
5270 boundary. If so, we simply do a block copy. */
5271 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5272 {
19caa751 5273 unsigned int exp_align = expr_align (exp);
729a2125 5274
a281e72d
RK
5275 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5276 || bitpos % BITS_PER_UNIT != 0)
5277 abort ();
5278
f4ef873c 5279 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5280
729a2125
RK
5281 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5282 align = MIN (exp_align, align);
c297a34e 5283
14a774a9 5284 /* Find an alignment that is consistent with the bit position. */
19caa751 5285 while ((bitpos % align) != 0)
14a774a9
RK
5286 align >>= 1;
5287
a281e72d 5288 emit_block_move (target, temp,
bd5dab53
RK
5289 bitsize == -1 ? expr_size (exp)
5290 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5291 / BITS_PER_UNIT),
14a774a9 5292 align);
a281e72d
RK
5293
5294 return value_mode == VOIDmode ? const0_rtx : target;
5295 }
5296
bbf6f052
RK
5297 /* Store the value in the bitfield. */
5298 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5299 if (value_mode != VOIDmode)
5300 {
5301 /* The caller wants an rtx for the value. */
5302 /* If possible, avoid refetching from the bitfield itself. */
5303 if (width_mask != 0
5304 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5305 {
9074de27 5306 tree count;
5c4d7cfb 5307 enum machine_mode tmode;
86a2c12a 5308
5c4d7cfb 5309 if (unsignedp)
69107307
AO
5310 return expand_and (temp,
5311 GEN_INT
5312 (trunc_int_for_mode
5313 (width_mask,
5314 GET_MODE (temp) == VOIDmode
5315 ? value_mode
5316 : GET_MODE (temp))), NULL_RTX);
5c4d7cfb 5317 tmode = GET_MODE (temp);
86a2c12a
RS
5318 if (tmode == VOIDmode)
5319 tmode = value_mode;
5c4d7cfb
RS
5320 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5321 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5322 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5323 }
bbf6f052 5324 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
5325 NULL_RTX, value_mode, 0, align,
5326 total_size);
bbf6f052
RK
5327 }
5328 return const0_rtx;
5329 }
5330 else
5331 {
5332 rtx addr = XEXP (target, 0);
5333 rtx to_rtx;
5334
5335 /* If a value is wanted, it must be the lhs;
5336 so make the address stable for multiple use. */
5337
5338 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5339 && ! CONSTANT_ADDRESS_P (addr)
5340 /* A frame-pointer reference is already stable. */
5341 && ! (GET_CODE (addr) == PLUS
5342 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5343 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5344 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
792760b9 5345 target = replace_equiv_address (target, copy_to_reg (addr));
bbf6f052
RK
5346
5347 /* Now build a reference to just the desired component. */
5348
792760b9
RK
5349 to_rtx = copy_rtx (adjust_address (target, mode,
5350 bitpos / BITS_PER_UNIT));
5351
c6df88cb 5352 MEM_SET_IN_STRUCT_P (to_rtx, 1);
0ea834c1
MM
5353 /* If the address of the structure varies, then it might be on
5354 the stack. And, stack slots may be shared across scopes.
5355 So, two different structures, of different types, can end up
5356 at the same location. We will give the structures alias set
5357 zero; here we must be careful not to give non-zero alias sets
5358 to their fields. */
ba4828e0
RK
5359 set_mem_alias_set (to_rtx,
5360 rtx_varies_p (addr, /*for_alias=*/0)
5361 ? 0 : alias_set);
bbf6f052
RK
5362
5363 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5364 }
5365}
5366\f
5367/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5368 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5369 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5370
5371 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5372 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5373 If the position of the field is variable, we store a tree
5374 giving the variable offset (in units) in *POFFSET.
5375 This offset is in addition to the bit position.
5376 If the position is not variable, we store 0 in *POFFSET.
19caa751 5377 We set *PALIGNMENT to the alignment of the address that will be
839c4796
RK
5378 computed. This is the alignment of the thing we return if *POFFSET
5379 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
5380
5381 If any of the extraction expressions is volatile,
5382 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5383
5384 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5385 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5386 is redundant.
5387
5388 If the field describes a variable-sized object, *PMODE is set to
5389 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 5390 this case, but the address of the object can be found. */
bbf6f052
RK
5391
5392tree
4969d05d 5393get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 5394 punsignedp, pvolatilep, palignment)
bbf6f052 5395 tree exp;
770ae6cc
RK
5396 HOST_WIDE_INT *pbitsize;
5397 HOST_WIDE_INT *pbitpos;
7bb0943f 5398 tree *poffset;
bbf6f052
RK
5399 enum machine_mode *pmode;
5400 int *punsignedp;
5401 int *pvolatilep;
729a2125 5402 unsigned int *palignment;
bbf6f052
RK
5403{
5404 tree size_tree = 0;
5405 enum machine_mode mode = VOIDmode;
fed3cef0 5406 tree offset = size_zero_node;
770ae6cc 5407 tree bit_offset = bitsize_zero_node;
c84e2712 5408 unsigned int alignment = BIGGEST_ALIGNMENT;
770ae6cc 5409 tree tem;
bbf6f052 5410
770ae6cc
RK
5411 /* First get the mode, signedness, and size. We do this from just the
5412 outermost expression. */
bbf6f052
RK
5413 if (TREE_CODE (exp) == COMPONENT_REF)
5414 {
5415 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5416 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5417 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5418
bbf6f052
RK
5419 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5420 }
5421 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5422 {
5423 size_tree = TREE_OPERAND (exp, 1);
5424 *punsignedp = TREE_UNSIGNED (exp);
5425 }
5426 else
5427 {
5428 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5429 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5430
ab87f8c8
JL
5431 if (mode == BLKmode)
5432 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5433 else
5434 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5435 }
3a94c984 5436
770ae6cc 5437 if (size_tree != 0)
bbf6f052 5438 {
770ae6cc 5439 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5440 mode = BLKmode, *pbitsize = -1;
5441 else
770ae6cc 5442 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5443 }
5444
5445 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5446 and find the ultimate containing object. */
bbf6f052
RK
5447 while (1)
5448 {
770ae6cc
RK
5449 if (TREE_CODE (exp) == BIT_FIELD_REF)
5450 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5451 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5452 {
770ae6cc
RK
5453 tree field = TREE_OPERAND (exp, 1);
5454 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5455
e7f3c83f
RK
5456 /* If this field hasn't been filled in yet, don't go
5457 past it. This should only happen when folding expressions
5458 made during type construction. */
770ae6cc 5459 if (this_offset == 0)
e7f3c83f 5460 break;
770ae6cc
RK
5461 else if (! TREE_CONSTANT (this_offset)
5462 && contains_placeholder_p (this_offset))
5463 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5464
7156dead 5465 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5466 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5467 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5468
770ae6cc
RK
5469 if (! host_integerp (offset, 0))
5470 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
bbf6f052 5471 }
7156dead 5472
b4e3fabb
RK
5473 else if (TREE_CODE (exp) == ARRAY_REF
5474 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5475 {
742920c7 5476 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5477 tree array = TREE_OPERAND (exp, 0);
5478 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5479 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5480 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5481
770ae6cc
RK
5482 /* We assume all arrays have sizes that are a multiple of a byte.
5483 First subtract the lower bound, if any, in the type of the
5484 index, then convert to sizetype and multiply by the size of the
5485 array element. */
5486 if (low_bound != 0 && ! integer_zerop (low_bound))
5487 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5488 index, low_bound));
f8dac6eb 5489
7156dead
RK
5490 /* If the index has a self-referential type, pass it to a
5491 WITH_RECORD_EXPR; if the component size is, pass our
5492 component to one. */
770ae6cc
RK
5493 if (! TREE_CONSTANT (index)
5494 && contains_placeholder_p (index))
5495 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5496 if (! TREE_CONSTANT (unit_size)
5497 && contains_placeholder_p (unit_size))
b4e3fabb 5498 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5499
770ae6cc
RK
5500 offset = size_binop (PLUS_EXPR, offset,
5501 size_binop (MULT_EXPR,
5502 convert (sizetype, index),
7156dead 5503 unit_size));
bbf6f052 5504 }
7156dead 5505
bbf6f052
RK
5506 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5507 && ! ((TREE_CODE (exp) == NOP_EXPR
5508 || TREE_CODE (exp) == CONVERT_EXPR)
5509 && (TYPE_MODE (TREE_TYPE (exp))
5510 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5511 break;
7bb0943f
RS
5512
5513 /* If any reference in the chain is volatile, the effect is volatile. */
5514 if (TREE_THIS_VOLATILE (exp))
5515 *pvolatilep = 1;
839c4796
RK
5516
5517 /* If the offset is non-constant already, then we can't assume any
5518 alignment more than the alignment here. */
770ae6cc 5519 if (! TREE_CONSTANT (offset))
839c4796
RK
5520 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5521
bbf6f052
RK
5522 exp = TREE_OPERAND (exp, 0);
5523 }
5524
2f939d94 5525 if (DECL_P (exp))
839c4796 5526 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5527 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5528 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5529
770ae6cc
RK
5530 /* If OFFSET is constant, see if we can return the whole thing as a
5531 constant bit position. Otherwise, split it up. */
5532 if (host_integerp (offset, 0)
5533 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5534 bitsize_unit_node))
5535 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5536 && host_integerp (tem, 0))
5537 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5538 else
5539 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5540
bbf6f052 5541 *pmode = mode;
19caa751 5542 *palignment = alignment;
bbf6f052
RK
5543 return exp;
5544}
921b3427
RK
5545
5546/* Subroutine of expand_exp: compute memory_usage from modifier. */
770ae6cc 5547
921b3427
RK
5548static enum memory_use_mode
5549get_memory_usage_from_modifier (modifier)
5550 enum expand_modifier modifier;
5551{
5552 switch (modifier)
5553 {
5554 case EXPAND_NORMAL:
e5e809f4 5555 case EXPAND_SUM:
921b3427
RK
5556 return MEMORY_USE_RO;
5557 break;
5558 case EXPAND_MEMORY_USE_WO:
5559 return MEMORY_USE_WO;
5560 break;
5561 case EXPAND_MEMORY_USE_RW:
5562 return MEMORY_USE_RW;
5563 break;
921b3427 5564 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5565 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5566 MEMORY_USE_DONT, because they are modifiers to a call of
5567 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5568 case EXPAND_CONST_ADDRESS:
e5e809f4 5569 case EXPAND_INITIALIZER:
921b3427
RK
5570 return MEMORY_USE_DONT;
5571 case EXPAND_MEMORY_USE_BAD:
5572 default:
5573 abort ();
5574 }
5575}
bbf6f052 5576\f
3fe44edd
RK
5577/* Given an rtx VALUE that may contain additions and multiplications, return
5578 an equivalent value that just refers to a register, memory, or constant.
5579 This is done by generating instructions to perform the arithmetic and
5580 returning a pseudo-register containing the value.
c45a13a6
RK
5581
5582 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5583
5584rtx
5585force_operand (value, target)
5586 rtx value, target;
5587{
5588 register optab binoptab = 0;
5589 /* Use a temporary to force order of execution of calls to
5590 `force_operand'. */
5591 rtx tmp;
5592 register rtx op2;
5593 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 5594 register rtx subtarget = get_subtarget (target);
bbf6f052 5595
8b015896
RH
5596 /* Check for a PIC address load. */
5597 if (flag_pic
5598 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5599 && XEXP (value, 0) == pic_offset_table_rtx
5600 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5601 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5602 || GET_CODE (XEXP (value, 1)) == CONST))
5603 {
5604 if (!subtarget)
5605 subtarget = gen_reg_rtx (GET_MODE (value));
5606 emit_move_insn (subtarget, value);
5607 return subtarget;
5608 }
5609
bbf6f052
RK
5610 if (GET_CODE (value) == PLUS)
5611 binoptab = add_optab;
5612 else if (GET_CODE (value) == MINUS)
5613 binoptab = sub_optab;
5614 else if (GET_CODE (value) == MULT)
5615 {
5616 op2 = XEXP (value, 1);
5617 if (!CONSTANT_P (op2)
5618 && !(GET_CODE (op2) == REG && op2 != subtarget))
5619 subtarget = 0;
5620 tmp = force_operand (XEXP (value, 0), subtarget);
5621 return expand_mult (GET_MODE (value), tmp,
906c4e36 5622 force_operand (op2, NULL_RTX),
91ce572a 5623 target, 1);
bbf6f052
RK
5624 }
5625
5626 if (binoptab)
5627 {
5628 op2 = XEXP (value, 1);
5629 if (!CONSTANT_P (op2)
5630 && !(GET_CODE (op2) == REG && op2 != subtarget))
5631 subtarget = 0;
5632 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5633 {
5634 binoptab = add_optab;
5635 op2 = negate_rtx (GET_MODE (value), op2);
5636 }
5637
5638 /* Check for an addition with OP2 a constant integer and our first
5639 operand a PLUS of a virtual register and something else. In that
5640 case, we want to emit the sum of the virtual register and the
5641 constant first and then add the other value. This allows virtual
5642 register instantiation to simply modify the constant rather than
5643 creating another one around this addition. */
5644 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5645 && GET_CODE (XEXP (value, 0)) == PLUS
5646 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5647 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5648 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5649 {
5650 rtx temp = expand_binop (GET_MODE (value), binoptab,
5651 XEXP (XEXP (value, 0), 0), op2,
5652 subtarget, 0, OPTAB_LIB_WIDEN);
5653 return expand_binop (GET_MODE (value), binoptab, temp,
5654 force_operand (XEXP (XEXP (value, 0), 1), 0),
5655 target, 0, OPTAB_LIB_WIDEN);
5656 }
3a94c984 5657
bbf6f052
RK
5658 tmp = force_operand (XEXP (value, 0), subtarget);
5659 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5660 force_operand (op2, NULL_RTX),
bbf6f052 5661 target, 0, OPTAB_LIB_WIDEN);
8008b228 5662 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5663 because the only operations we are expanding here are signed ones. */
5664 }
5665 return value;
5666}
5667\f
5668/* Subroutine of expand_expr:
5669 save the non-copied parts (LIST) of an expr (LHS), and return a list
5670 which can restore these values to their previous values,
5671 should something modify their storage. */
5672
5673static tree
5674save_noncopied_parts (lhs, list)
5675 tree lhs;
5676 tree list;
5677{
5678 tree tail;
5679 tree parts = 0;
5680
5681 for (tail = list; tail; tail = TREE_CHAIN (tail))
5682 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5683 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5684 else
5685 {
5686 tree part = TREE_VALUE (tail);
5687 tree part_type = TREE_TYPE (part);
906c4e36 5688 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
1da68f56
RK
5689 rtx target
5690 = assign_temp (build_qualified_type (part_type,
5691 (TYPE_QUALS (part_type)
5692 | TYPE_QUAL_CONST)),
5693 0, 1, 1);
5694
bbf6f052 5695 parts = tree_cons (to_be_saved,
906c4e36 5696 build (RTL_EXPR, part_type, NULL_TREE,
792760b9 5697 (tree) validize_mem (target)),
bbf6f052 5698 parts);
792760b9
RK
5699 store_expr (TREE_PURPOSE (parts),
5700 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
bbf6f052
RK
5701 }
5702 return parts;
5703}
5704
5705/* Subroutine of expand_expr:
5706 record the non-copied parts (LIST) of an expr (LHS), and return a list
5707 which specifies the initial values of these parts. */
5708
5709static tree
5710init_noncopied_parts (lhs, list)
5711 tree lhs;
5712 tree list;
5713{
5714 tree tail;
5715 tree parts = 0;
5716
5717 for (tail = list; tail; tail = TREE_CHAIN (tail))
5718 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5719 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5720 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5721 {
5722 tree part = TREE_VALUE (tail);
5723 tree part_type = TREE_TYPE (part);
906c4e36 5724 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5725 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5726 }
5727 return parts;
5728}
5729
5730/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5731 EXP can reference X, which is being modified. TOP_P is nonzero if this
5732 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5733 for EXP, as opposed to a recursive call to this function.
5734
5735 It is always safe for this routine to return zero since it merely
5736 searches for optimization opportunities. */
bbf6f052 5737
8f17b5c5 5738int
e5e809f4 5739safe_from_p (x, exp, top_p)
bbf6f052
RK
5740 rtx x;
5741 tree exp;
e5e809f4 5742 int top_p;
bbf6f052
RK
5743{
5744 rtx exp_rtl = 0;
5745 int i, nops;
1da68f56 5746 static tree save_expr_list;
bbf6f052 5747
6676e72f
RK
5748 if (x == 0
5749 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5750 have no way of allocating temporaries of variable size
5751 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5752 So we assume here that something at a higher level has prevented a
f4510f37 5753 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5754 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5755 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5756 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5757 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5758 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5759 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5760 != INTEGER_CST)
1da68f56
RK
5761 && GET_MODE (x) == BLKmode)
5762 /* If X is in the outgoing argument area, it is always safe. */
5763 || (GET_CODE (x) == MEM
5764 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5765 || (GET_CODE (XEXP (x, 0)) == PLUS
5766 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5767 return 1;
5768
5769 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5770 find the underlying pseudo. */
5771 if (GET_CODE (x) == SUBREG)
5772 {
5773 x = SUBREG_REG (x);
5774 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5775 return 0;
5776 }
5777
1da68f56
RK
5778 /* A SAVE_EXPR might appear many times in the expression passed to the
5779 top-level safe_from_p call, and if it has a complex subexpression,
5780 examining it multiple times could result in a combinatorial explosion.
5781 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5782 with optimization took about 28 minutes to compile -- even though it was
5783 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5784 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5785 we have processed. Note that the only test of top_p was above. */
5786
5787 if (top_p)
5788 {
5789 int rtn;
5790 tree t;
5791
5792 save_expr_list = 0;
5793
5794 rtn = safe_from_p (x, exp, 0);
5795
5796 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5797 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5798
5799 return rtn;
5800 }
bbf6f052 5801
1da68f56 5802 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5803 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5804 {
5805 case 'd':
19e7881c 5806 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
bbf6f052
RK
5807 break;
5808
5809 case 'c':
5810 return 1;
5811
5812 case 'x':
5813 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5814 return ((TREE_VALUE (exp) == 0
e5e809f4 5815 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5816 && (TREE_CHAIN (exp) == 0
e5e809f4 5817 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5818 else if (TREE_CODE (exp) == ERROR_MARK)
5819 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5820 else
5821 return 0;
5822
5823 case '1':
e5e809f4 5824 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5825
5826 case '2':
5827 case '<':
e5e809f4
JL
5828 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5829 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5830
5831 case 'e':
5832 case 'r':
5833 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5834 the expression. If it is set, we conflict iff we are that rtx or
5835 both are in memory. Otherwise, we check all operands of the
5836 expression recursively. */
5837
5838 switch (TREE_CODE (exp))
5839 {
5840 case ADDR_EXPR:
e44842fe 5841 return (staticp (TREE_OPERAND (exp, 0))
1da68f56
RK
5842 || TREE_STATIC (exp)
5843 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
bbf6f052
RK
5844
5845 case INDIRECT_REF:
1da68f56
RK
5846 if (GET_CODE (x) == MEM
5847 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5848 get_alias_set (exp)))
bbf6f052
RK
5849 return 0;
5850 break;
5851
5852 case CALL_EXPR:
f9808f81
MM
5853 /* Assume that the call will clobber all hard registers and
5854 all of memory. */
5855 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5856 || GET_CODE (x) == MEM)
5857 return 0;
bbf6f052
RK
5858 break;
5859
5860 case RTL_EXPR:
3bb5826a
RK
5861 /* If a sequence exists, we would have to scan every instruction
5862 in the sequence to see if it was safe. This is probably not
5863 worthwhile. */
5864 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5865 return 0;
5866
3bb5826a 5867 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5868 break;
5869
5870 case WITH_CLEANUP_EXPR:
6ad7895a 5871 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
5872 break;
5873
5dab5552 5874 case CLEANUP_POINT_EXPR:
e5e809f4 5875 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5876
bbf6f052
RK
5877 case SAVE_EXPR:
5878 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5879 if (exp_rtl)
5880 break;
5881
1da68f56
RK
5882 /* If we've already scanned this, don't do it again. Otherwise,
5883 show we've scanned it and record for clearing the flag if we're
5884 going on. */
5885 if (TREE_PRIVATE (exp))
5886 return 1;
ff439b5f 5887
1da68f56
RK
5888 TREE_PRIVATE (exp) = 1;
5889 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5890 {
1da68f56
RK
5891 TREE_PRIVATE (exp) = 0;
5892 return 0;
ff59bfe6 5893 }
1da68f56
RK
5894
5895 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5896 return 1;
bbf6f052 5897
8129842c
RS
5898 case BIND_EXPR:
5899 /* The only operand we look at is operand 1. The rest aren't
5900 part of the expression. */
e5e809f4 5901 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5902
bbf6f052 5903 case METHOD_CALL_EXPR:
0f41302f 5904 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5905 abort ();
3a94c984 5906
e9a25f70
JL
5907 default:
5908 break;
bbf6f052
RK
5909 }
5910
5911 /* If we have an rtx, we do not need to scan our operands. */
5912 if (exp_rtl)
5913 break;
5914
8f17b5c5 5915 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5916 for (i = 0; i < nops; i++)
5917 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5918 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5919 return 0;
8f17b5c5
MM
5920
5921 /* If this is a language-specific tree code, it may require
5922 special handling. */
dbbbbf3b
JDA
5923 if ((unsigned int) TREE_CODE (exp)
5924 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
8f17b5c5
MM
5925 && lang_safe_from_p
5926 && !(*lang_safe_from_p) (x, exp))
5927 return 0;
bbf6f052
RK
5928 }
5929
5930 /* If we have an rtl, find any enclosed object. Then see if we conflict
5931 with it. */
5932 if (exp_rtl)
5933 {
5934 if (GET_CODE (exp_rtl) == SUBREG)
5935 {
5936 exp_rtl = SUBREG_REG (exp_rtl);
5937 if (GET_CODE (exp_rtl) == REG
5938 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5939 return 0;
5940 }
5941
5942 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5943 are memory and they conflict. */
bbf6f052
RK
5944 return ! (rtx_equal_p (x, exp_rtl)
5945 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
1da68f56
RK
5946 && true_dependence (exp_rtl, GET_MODE (x), x,
5947 rtx_addr_varies_p)));
bbf6f052
RK
5948 }
5949
5950 /* If we reach here, it is safe. */
5951 return 1;
5952}
5953
5954/* Subroutine of expand_expr: return nonzero iff EXP is an
5955 expression whose type is statically determinable. */
5956
5957static int
5958fixed_type_p (exp)
5959 tree exp;
5960{
5961 if (TREE_CODE (exp) == PARM_DECL
5962 || TREE_CODE (exp) == VAR_DECL
5963 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5964 || TREE_CODE (exp) == COMPONENT_REF
5965 || TREE_CODE (exp) == ARRAY_REF)
5966 return 1;
5967 return 0;
5968}
01c8a7c8
RK
5969
5970/* Subroutine of expand_expr: return rtx if EXP is a
5971 variable or parameter; else return 0. */
5972
5973static rtx
5974var_rtx (exp)
5975 tree exp;
5976{
5977 STRIP_NOPS (exp);
5978 switch (TREE_CODE (exp))
5979 {
5980 case PARM_DECL:
5981 case VAR_DECL:
5982 return DECL_RTL (exp);
5983 default:
5984 return 0;
5985 }
5986}
dbecbbe4
JL
5987
5988#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 5989
dbecbbe4
JL
5990void
5991check_max_integer_computation_mode (exp)
3a94c984 5992 tree exp;
dbecbbe4 5993{
5f652c07 5994 enum tree_code code;
dbecbbe4
JL
5995 enum machine_mode mode;
5996
5f652c07
JM
5997 /* Strip any NOPs that don't change the mode. */
5998 STRIP_NOPS (exp);
5999 code = TREE_CODE (exp);
6000
71bca506
JL
6001 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6002 if (code == NOP_EXPR
6003 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6004 return;
6005
dbecbbe4
JL
6006 /* First check the type of the overall operation. We need only look at
6007 unary, binary and relational operations. */
6008 if (TREE_CODE_CLASS (code) == '1'
6009 || TREE_CODE_CLASS (code) == '2'
6010 || TREE_CODE_CLASS (code) == '<')
6011 {
6012 mode = TYPE_MODE (TREE_TYPE (exp));
6013 if (GET_MODE_CLASS (mode) == MODE_INT
6014 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6015 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6016 }
6017
6018 /* Check operand of a unary op. */
6019 if (TREE_CODE_CLASS (code) == '1')
6020 {
6021 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6022 if (GET_MODE_CLASS (mode) == MODE_INT
6023 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6024 internal_error ("unsupported wide integer operation");
dbecbbe4 6025 }
3a94c984 6026
dbecbbe4
JL
6027 /* Check operands of a binary/comparison op. */
6028 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6029 {
6030 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6031 if (GET_MODE_CLASS (mode) == MODE_INT
6032 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6033 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6034
6035 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6036 if (GET_MODE_CLASS (mode) == MODE_INT
6037 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6038 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6039 }
6040}
6041#endif
14a774a9 6042\f
bbf6f052
RK
6043/* expand_expr: generate code for computing expression EXP.
6044 An rtx for the computed value is returned. The value is never null.
6045 In the case of a void EXP, const0_rtx is returned.
6046
6047 The value may be stored in TARGET if TARGET is nonzero.
6048 TARGET is just a suggestion; callers must assume that
6049 the rtx returned may not be the same as TARGET.
6050
6051 If TARGET is CONST0_RTX, it means that the value will be ignored.
6052
6053 If TMODE is not VOIDmode, it suggests generating the
6054 result in mode TMODE. But this is done only when convenient.
6055 Otherwise, TMODE is ignored and the value generated in its natural mode.
6056 TMODE is just a suggestion; callers must assume that
6057 the rtx returned may not have mode TMODE.
6058
d6a5ac33
RK
6059 Note that TARGET may have neither TMODE nor MODE. In that case, it
6060 probably will not be used.
bbf6f052
RK
6061
6062 If MODIFIER is EXPAND_SUM then when EXP is an addition
6063 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6064 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6065 products as above, or REG or MEM, or constant.
6066 Ordinarily in such cases we would output mul or add instructions
6067 and then return a pseudo reg containing the sum.
6068
6069 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6070 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6071 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6072 This is used for outputting expressions used in initializers.
6073
6074 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6075 with a constant address even if that address is not normally legitimate.
6076 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
6077
6078rtx
6079expand_expr (exp, target, tmode, modifier)
6080 register tree exp;
6081 rtx target;
6082 enum machine_mode tmode;
6083 enum expand_modifier modifier;
6084{
6085 register rtx op0, op1, temp;
6086 tree type = TREE_TYPE (exp);
6087 int unsignedp = TREE_UNSIGNED (type);
68557e14 6088 register enum machine_mode mode;
bbf6f052
RK
6089 register enum tree_code code = TREE_CODE (exp);
6090 optab this_optab;
68557e14
ML
6091 rtx subtarget, original_target;
6092 int ignore;
bbf6f052 6093 tree context;
921b3427
RK
6094 /* Used by check-memory-usage to make modifier read only. */
6095 enum expand_modifier ro_modifier;
bbf6f052 6096
3a94c984 6097 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6098 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6099 {
6100 op0 = CONST0_RTX (tmode);
6101 if (op0 != 0)
6102 return op0;
6103 return const0_rtx;
6104 }
6105
6106 mode = TYPE_MODE (type);
6107 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6108 subtarget = get_subtarget (target);
68557e14
ML
6109 original_target = target;
6110 ignore = (target == const0_rtx
6111 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6112 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6113 || code == COND_EXPR)
6114 && TREE_CODE (type) == VOID_TYPE));
6115
921b3427
RK
6116 /* Make a read-only version of the modifier. */
6117 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6118 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6119 ro_modifier = modifier;
6120 else
6121 ro_modifier = EXPAND_NORMAL;
ca695ac9 6122
dd27116b
RK
6123 /* If we are going to ignore this result, we need only do something
6124 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6125 is, short-circuit the most common cases here. Note that we must
6126 not call expand_expr with anything but const0_rtx in case this
6127 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6128
dd27116b
RK
6129 if (ignore)
6130 {
6131 if (! TREE_SIDE_EFFECTS (exp))
6132 return const0_rtx;
6133
14a774a9
RK
6134 /* Ensure we reference a volatile object even if value is ignored, but
6135 don't do this if all we are doing is taking its address. */
dd27116b
RK
6136 if (TREE_THIS_VOLATILE (exp)
6137 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6138 && mode != VOIDmode && mode != BLKmode
6139 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6140 {
921b3427 6141 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
6142 if (GET_CODE (temp) == MEM)
6143 temp = copy_to_reg (temp);
6144 return const0_rtx;
6145 }
6146
14a774a9
RK
6147 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6148 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 6149 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6150 VOIDmode, ro_modifier);
14a774a9 6151 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6152 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6153 {
b4e3fabb
RK
6154 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6155 ro_modifier);
6156 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6157 ro_modifier);
dd27116b
RK
6158 return const0_rtx;
6159 }
6160 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6161 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6162 /* If the second operand has no side effects, just evaluate
0f41302f 6163 the first. */
dd27116b 6164 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6165 VOIDmode, ro_modifier);
14a774a9
RK
6166 else if (code == BIT_FIELD_REF)
6167 {
b4e3fabb
RK
6168 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6169 ro_modifier);
6170 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6171 ro_modifier);
6172 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6173 ro_modifier);
14a774a9
RK
6174 return const0_rtx;
6175 }
3a94c984 6176 ;
90764a87 6177 target = 0;
dd27116b 6178 }
bbf6f052 6179
dbecbbe4 6180#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
6181 /* Only check stuff here if the mode we want is different from the mode
6182 of the expression; if it's the same, check_max_integer_computiation_mode
6183 will handle it. Do we really need to check this stuff at all? */
6184
ce3c0b53 6185 if (target
5f652c07 6186 && GET_MODE (target) != mode
ce3c0b53
JL
6187 && TREE_CODE (exp) != INTEGER_CST
6188 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6189 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6190 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6191 && TREE_CODE (exp) != COMPONENT_REF
6192 && TREE_CODE (exp) != BIT_FIELD_REF
6193 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6194 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6195 && TREE_CODE (exp) != VAR_DECL
6196 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6197 {
6198 enum machine_mode mode = GET_MODE (target);
6199
6200 if (GET_MODE_CLASS (mode) == MODE_INT
6201 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6202 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6203 }
6204
5f652c07
JM
6205 if (tmode != mode
6206 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6207 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6208 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6209 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6210 && TREE_CODE (exp) != COMPONENT_REF
6211 && TREE_CODE (exp) != BIT_FIELD_REF
6212 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6213 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6214 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6215 && TREE_CODE (exp) != RTL_EXPR
71bca506 6216 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6217 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6218 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6219
6220 check_max_integer_computation_mode (exp);
6221#endif
6222
e44842fe
RK
6223 /* If will do cse, generate all results into pseudo registers
6224 since 1) that allows cse to find more things
6225 and 2) otherwise cse could produce an insn the machine
6226 cannot support. */
6227
bbf6f052
RK
6228 if (! cse_not_expected && mode != BLKmode && target
6229 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6230 target = subtarget;
6231
bbf6f052
RK
6232 switch (code)
6233 {
6234 case LABEL_DECL:
b552441b
RS
6235 {
6236 tree function = decl_function_context (exp);
6237 /* Handle using a label in a containing function. */
d0977240
RK
6238 if (function != current_function_decl
6239 && function != inline_function_decl && function != 0)
b552441b
RS
6240 {
6241 struct function *p = find_function_data (function);
49ad7cfa
BS
6242 p->expr->x_forced_labels
6243 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6244 p->expr->x_forced_labels);
b552441b 6245 }
ab87f8c8
JL
6246 else
6247 {
ab87f8c8
JL
6248 if (modifier == EXPAND_INITIALIZER)
6249 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6250 label_rtx (exp),
6251 forced_labels);
6252 }
c5c76735 6253
38a448ca
RH
6254 temp = gen_rtx_MEM (FUNCTION_MODE,
6255 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6256 if (function != current_function_decl
6257 && function != inline_function_decl && function != 0)
26fcb35a
RS
6258 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6259 return temp;
b552441b 6260 }
bbf6f052
RK
6261
6262 case PARM_DECL:
6263 if (DECL_RTL (exp) == 0)
6264 {
6265 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6266 return CONST0_RTX (mode);
bbf6f052
RK
6267 }
6268
0f41302f 6269 /* ... fall through ... */
d6a5ac33 6270
bbf6f052 6271 case VAR_DECL:
2dca20cd
RS
6272 /* If a static var's type was incomplete when the decl was written,
6273 but the type is complete now, lay out the decl now. */
d0f062fb 6274 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6275 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6276 {
2dca20cd
RS
6277 layout_decl (exp, 0);
6278 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
2dca20cd 6279 }
d6a5ac33 6280
7d384cc0
KR
6281 /* Although static-storage variables start off initialized, according to
6282 ANSI C, a memcpy could overwrite them with uninitialized values. So
6283 we check them too. This also lets us check for read-only variables
6284 accessed via a non-const declaration, in case it won't be detected
6285 any other way (e.g., in an embedded system or OS kernel without
6286 memory protection).
6287
6288 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 6289 if (cfun && current_function_check_memory_usage
49ad7cfa 6290 && code == VAR_DECL
921b3427 6291 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
6292 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6293 {
6294 enum memory_use_mode memory_usage;
6295 memory_usage = get_memory_usage_from_modifier (modifier);
6296
ea4da9db 6297 in_check_memory_usage = 1;
921b3427 6298 if (memory_usage != MEMORY_USE_DONT)
ebb1b59a
BS
6299 emit_library_call (chkr_check_addr_libfunc,
6300 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 6301 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
6302 GEN_INT (int_size_in_bytes (type)),
6303 TYPE_MODE (sizetype),
956d6950
JL
6304 GEN_INT (memory_usage),
6305 TYPE_MODE (integer_type_node));
ea4da9db 6306 in_check_memory_usage = 0;
921b3427
RK
6307 }
6308
0f41302f 6309 /* ... fall through ... */
d6a5ac33 6310
2dca20cd 6311 case FUNCTION_DECL:
bbf6f052
RK
6312 case RESULT_DECL:
6313 if (DECL_RTL (exp) == 0)
6314 abort ();
d6a5ac33 6315
e44842fe
RK
6316 /* Ensure variable marked as used even if it doesn't go through
6317 a parser. If it hasn't be used yet, write out an external
6318 definition. */
6319 if (! TREE_USED (exp))
6320 {
6321 assemble_external (exp);
6322 TREE_USED (exp) = 1;
6323 }
6324
dc6d66b3
RK
6325 /* Show we haven't gotten RTL for this yet. */
6326 temp = 0;
6327
bbf6f052
RK
6328 /* Handle variables inherited from containing functions. */
6329 context = decl_function_context (exp);
6330
6331 /* We treat inline_function_decl as an alias for the current function
6332 because that is the inline function whose vars, types, etc.
6333 are being merged into the current function.
6334 See expand_inline_function. */
d6a5ac33 6335
bbf6f052
RK
6336 if (context != 0 && context != current_function_decl
6337 && context != inline_function_decl
6338 /* If var is static, we don't need a static chain to access it. */
6339 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6340 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6341 {
6342 rtx addr;
6343
6344 /* Mark as non-local and addressable. */
81feeecb 6345 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6346 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6347 abort ();
bbf6f052
RK
6348 mark_addressable (exp);
6349 if (GET_CODE (DECL_RTL (exp)) != MEM)
6350 abort ();
6351 addr = XEXP (DECL_RTL (exp), 0);
6352 if (GET_CODE (addr) == MEM)
792760b9
RK
6353 addr
6354 = replace_equiv_address (addr,
6355 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6356 else
6357 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6358
792760b9 6359 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6360 }
4af3895e 6361
bbf6f052
RK
6362 /* This is the case of an array whose size is to be determined
6363 from its initializer, while the initializer is still being parsed.
6364 See expand_decl. */
d6a5ac33 6365
dc6d66b3
RK
6366 else if (GET_CODE (DECL_RTL (exp)) == MEM
6367 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6368 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6369
6370 /* If DECL_RTL is memory, we are in the normal case and either
6371 the address is not valid or it is not a register and -fforce-addr
6372 is specified, get the address into a register. */
6373
dc6d66b3
RK
6374 else if (GET_CODE (DECL_RTL (exp)) == MEM
6375 && modifier != EXPAND_CONST_ADDRESS
6376 && modifier != EXPAND_SUM
6377 && modifier != EXPAND_INITIALIZER
6378 && (! memory_address_p (DECL_MODE (exp),
6379 XEXP (DECL_RTL (exp), 0))
6380 || (flag_force_addr
6381 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6382 temp = replace_equiv_address (DECL_RTL (exp),
6383 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6384
dc6d66b3 6385 /* If we got something, return it. But first, set the alignment
04956a1a 6386 if the address is a register. */
dc6d66b3
RK
6387 if (temp != 0)
6388 {
6389 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6390 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6391
6392 return temp;
6393 }
6394
1499e0a8
RK
6395 /* If the mode of DECL_RTL does not match that of the decl, it
6396 must be a promoted value. We return a SUBREG of the wanted mode,
6397 but mark it so that we know that it was already extended. */
6398
6399 if (GET_CODE (DECL_RTL (exp)) == REG
6400 && GET_MODE (DECL_RTL (exp)) != mode)
6401 {
1499e0a8
RK
6402 /* Get the signedness used for this variable. Ensure we get the
6403 same mode we got when the variable was declared. */
78911e8b
RK
6404 if (GET_MODE (DECL_RTL (exp))
6405 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
6406 abort ();
6407
ddef6bc7 6408 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8
RK
6409 SUBREG_PROMOTED_VAR_P (temp) = 1;
6410 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6411 return temp;
6412 }
6413
bbf6f052
RK
6414 return DECL_RTL (exp);
6415
6416 case INTEGER_CST:
6417 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6418 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6419
6420 case CONST_DECL:
921b3427 6421 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
3a94c984 6422 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6423
6424 case REAL_CST:
6425 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6426 which will be turned into memory by reload if necessary.
6427
bbf6f052
RK
6428 We used to force a register so that loop.c could see it. But
6429 this does not allow gen_* patterns to perform optimizations with
6430 the constants. It also produces two insns in cases like "x = 1.0;".
6431 On most machines, floating-point constants are not permitted in
6432 many insns, so we'd end up copying it to a register in any case.
6433
6434 Now, we do the copying in expand_binop, if appropriate. */
6435 return immed_real_const (exp);
6436
6437 case COMPLEX_CST:
6438 case STRING_CST:
6439 if (! TREE_CST_RTL (exp))
bd7cf17e 6440 output_constant_def (exp, 1);
bbf6f052
RK
6441
6442 /* TREE_CST_RTL probably contains a constant address.
6443 On RISC machines where a constant address isn't valid,
6444 make some insns to get that address into a register. */
6445 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6446 && modifier != EXPAND_CONST_ADDRESS
6447 && modifier != EXPAND_INITIALIZER
6448 && modifier != EXPAND_SUM
d6a5ac33
RK
6449 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6450 || (flag_force_addr
6451 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
792760b9
RK
6452 return replace_equiv_address (TREE_CST_RTL (exp),
6453 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
bbf6f052
RK
6454 return TREE_CST_RTL (exp);
6455
bf1e5319 6456 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6457 {
6458 rtx to_return;
3b304f5b 6459 const char *saved_input_filename = input_filename;
b24f65cd
APB
6460 int saved_lineno = lineno;
6461 input_filename = EXPR_WFL_FILENAME (exp);
6462 lineno = EXPR_WFL_LINENO (exp);
6463 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6464 emit_line_note (input_filename, lineno);
6ad7895a 6465 /* Possibly avoid switching back and forth here. */
b24f65cd
APB
6466 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6467 input_filename = saved_input_filename;
6468 lineno = saved_lineno;
6469 return to_return;
6470 }
bf1e5319 6471
bbf6f052
RK
6472 case SAVE_EXPR:
6473 context = decl_function_context (exp);
d6a5ac33 6474
d0977240
RK
6475 /* If this SAVE_EXPR was at global context, assume we are an
6476 initialization function and move it into our context. */
6477 if (context == 0)
6478 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6479
bbf6f052
RK
6480 /* We treat inline_function_decl as an alias for the current function
6481 because that is the inline function whose vars, types, etc.
6482 are being merged into the current function.
6483 See expand_inline_function. */
6484 if (context == current_function_decl || context == inline_function_decl)
6485 context = 0;
6486
6487 /* If this is non-local, handle it. */
6488 if (context)
6489 {
d0977240
RK
6490 /* The following call just exists to abort if the context is
6491 not of a containing function. */
6492 find_function_data (context);
6493
bbf6f052
RK
6494 temp = SAVE_EXPR_RTL (exp);
6495 if (temp && GET_CODE (temp) == REG)
6496 {
6497 put_var_into_stack (exp);
6498 temp = SAVE_EXPR_RTL (exp);
6499 }
6500 if (temp == 0 || GET_CODE (temp) != MEM)
6501 abort ();
792760b9
RK
6502 return
6503 replace_equiv_address (temp,
6504 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6505 }
6506 if (SAVE_EXPR_RTL (exp) == 0)
6507 {
06089a8b
RK
6508 if (mode == VOIDmode)
6509 temp = const0_rtx;
6510 else
1da68f56
RK
6511 temp = assign_temp (build_qualified_type (type,
6512 (TYPE_QUALS (type)
6513 | TYPE_QUAL_CONST)),
6514 3, 0, 0);
1499e0a8 6515
bbf6f052 6516 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6517 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6518 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6519 save_expr_regs);
ff78f773
RK
6520
6521 /* If the mode of TEMP does not match that of the expression, it
6522 must be a promoted value. We pass store_expr a SUBREG of the
6523 wanted mode but mark it so that we know that it was already
6524 extended. Note that `unsignedp' was modified above in
6525 this case. */
6526
6527 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6528 {
ddef6bc7 6529 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
ff78f773
RK
6530 SUBREG_PROMOTED_VAR_P (temp) = 1;
6531 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6532 }
6533
4c7a0be9 6534 if (temp == const0_rtx)
921b3427
RK
6535 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6536 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6537 else
6538 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6539
6540 TREE_USED (exp) = 1;
bbf6f052 6541 }
1499e0a8
RK
6542
6543 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6544 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6545 but mark it so that we know that it was already extended. */
1499e0a8
RK
6546
6547 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6548 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6549 {
e70d22c8
RK
6550 /* Compute the signedness and make the proper SUBREG. */
6551 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6552 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8
RK
6553 SUBREG_PROMOTED_VAR_P (temp) = 1;
6554 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6555 return temp;
6556 }
6557
bbf6f052
RK
6558 return SAVE_EXPR_RTL (exp);
6559
679163cf
MS
6560 case UNSAVE_EXPR:
6561 {
6562 rtx temp;
6563 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6564 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6565 return temp;
6566 }
6567
b50d17a1 6568 case PLACEHOLDER_EXPR:
e9a25f70
JL
6569 {
6570 tree placeholder_expr;
6571
6572 /* If there is an object on the head of the placeholder list,
e5e809f4 6573 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6574 further information, see tree.def. */
6575 for (placeholder_expr = placeholder_list;
6576 placeholder_expr != 0;
6577 placeholder_expr = TREE_CHAIN (placeholder_expr))
6578 {
6579 tree need_type = TYPE_MAIN_VARIANT (type);
6580 tree object = 0;
6581 tree old_list = placeholder_list;
6582 tree elt;
6583
e5e809f4 6584 /* Find the outermost reference that is of the type we want.
3a94c984 6585 If none, see if any object has a type that is a pointer to
e5e809f4
JL
6586 the type we want. */
6587 for (elt = TREE_PURPOSE (placeholder_expr);
6588 elt != 0 && object == 0;
6589 elt
6590 = ((TREE_CODE (elt) == COMPOUND_EXPR
6591 || TREE_CODE (elt) == COND_EXPR)
6592 ? TREE_OPERAND (elt, 1)
6593 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6594 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6595 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6596 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6597 ? TREE_OPERAND (elt, 0) : 0))
6598 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6599 object = elt;
e9a25f70 6600
e9a25f70 6601 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6602 elt != 0 && object == 0;
6603 elt
6604 = ((TREE_CODE (elt) == COMPOUND_EXPR
6605 || TREE_CODE (elt) == COND_EXPR)
6606 ? TREE_OPERAND (elt, 1)
6607 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6608 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6609 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6610 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6611 ? TREE_OPERAND (elt, 0) : 0))
6612 if (POINTER_TYPE_P (TREE_TYPE (elt))
6613 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6614 == need_type))
e5e809f4 6615 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6616
e9a25f70 6617 if (object != 0)
2cde2255 6618 {
e9a25f70
JL
6619 /* Expand this object skipping the list entries before
6620 it was found in case it is also a PLACEHOLDER_EXPR.
6621 In that case, we want to translate it using subsequent
6622 entries. */
6623 placeholder_list = TREE_CHAIN (placeholder_expr);
6624 temp = expand_expr (object, original_target, tmode,
6625 ro_modifier);
6626 placeholder_list = old_list;
6627 return temp;
2cde2255 6628 }
e9a25f70
JL
6629 }
6630 }
b50d17a1
RK
6631
6632 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6633 abort ();
6634
6635 case WITH_RECORD_EXPR:
6636 /* Put the object on the placeholder list, expand our first operand,
6637 and pop the list. */
6638 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6639 placeholder_list);
6640 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6641 tmode, ro_modifier);
b50d17a1
RK
6642 placeholder_list = TREE_CHAIN (placeholder_list);
6643 return target;
6644
70e6ca43
APB
6645 case GOTO_EXPR:
6646 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6647 expand_goto (TREE_OPERAND (exp, 0));
6648 else
6649 expand_computed_goto (TREE_OPERAND (exp, 0));
6650 return const0_rtx;
6651
bbf6f052 6652 case EXIT_EXPR:
df4ae160 6653 expand_exit_loop_if_false (NULL,
e44842fe 6654 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6655 return const0_rtx;
6656
f42e28dd
APB
6657 case LABELED_BLOCK_EXPR:
6658 if (LABELED_BLOCK_BODY (exp))
6659 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
30f7a378 6660 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6661 do_pending_stack_adjust ();
f42e28dd
APB
6662 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6663 return const0_rtx;
6664
6665 case EXIT_BLOCK_EXPR:
6666 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6667 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6668 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6669 return const0_rtx;
6670
bbf6f052 6671 case LOOP_EXPR:
0088fcb1 6672 push_temp_slots ();
bbf6f052
RK
6673 expand_start_loop (1);
6674 expand_expr_stmt (TREE_OPERAND (exp, 0));
6675 expand_end_loop ();
0088fcb1 6676 pop_temp_slots ();
bbf6f052
RK
6677
6678 return const0_rtx;
6679
6680 case BIND_EXPR:
6681 {
6682 tree vars = TREE_OPERAND (exp, 0);
6683 int vars_need_expansion = 0;
6684
6685 /* Need to open a binding contour here because
e976b8b2 6686 if there are any cleanups they must be contained here. */
8e91754e 6687 expand_start_bindings (2);
bbf6f052 6688
2df53c0b
RS
6689 /* Mark the corresponding BLOCK for output in its proper place. */
6690 if (TREE_OPERAND (exp, 2) != 0
6691 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6692 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6693
6694 /* If VARS have not yet been expanded, expand them now. */
6695 while (vars)
6696 {
19e7881c 6697 if (!DECL_RTL_SET_P (vars))
bbf6f052
RK
6698 {
6699 vars_need_expansion = 1;
6700 expand_decl (vars);
6701 }
6702 expand_decl_init (vars);
6703 vars = TREE_CHAIN (vars);
6704 }
6705
921b3427 6706 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6707
6708 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6709
6710 return temp;
6711 }
6712
6713 case RTL_EXPR:
83b853c9
JM
6714 if (RTL_EXPR_SEQUENCE (exp))
6715 {
6716 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6717 abort ();
6718 emit_insns (RTL_EXPR_SEQUENCE (exp));
6719 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6720 }
64dc53f3
MM
6721 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6722 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6723 return RTL_EXPR_RTL (exp);
6724
6725 case CONSTRUCTOR:
dd27116b
RK
6726 /* If we don't need the result, just ensure we evaluate any
6727 subexpressions. */
6728 if (ignore)
6729 {
6730 tree elt;
6731 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6732 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6733 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6734 return const0_rtx;
6735 }
3207b172 6736
4af3895e
JVA
6737 /* All elts simple constants => refer to a constant in memory. But
6738 if this is a non-BLKmode mode, let it store a field at a time
6739 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6740 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6741 store directly into the target unless the type is large enough
6742 that memcpy will be used. If we are making an initializer and
3207b172 6743 all operands are constant, put it in memory as well. */
dd27116b 6744 else if ((TREE_STATIC (exp)
3207b172 6745 && ((mode == BLKmode
e5e809f4 6746 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6747 || TREE_ADDRESSABLE (exp)
19caa751 6748 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6749 && (! MOVE_BY_PIECES_P
19caa751
RK
6750 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6751 TYPE_ALIGN (type)))
9de08200 6752 && ! mostly_zeros_p (exp))))
dd27116b 6753 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 6754 {
bd7cf17e 6755 rtx constructor = output_constant_def (exp, 1);
19caa751 6756
b552441b
RS
6757 if (modifier != EXPAND_CONST_ADDRESS
6758 && modifier != EXPAND_INITIALIZER
792760b9
RK
6759 && modifier != EXPAND_SUM)
6760 constructor = validize_mem (constructor);
6761
bbf6f052
RK
6762 return constructor;
6763 }
bbf6f052
RK
6764 else
6765 {
e9ac02a6
JW
6766 /* Handle calls that pass values in multiple non-contiguous
6767 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6768 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6769 || GET_CODE (target) == PARALLEL)
1da68f56
RK
6770 target
6771 = assign_temp (build_qualified_type (type,
6772 (TYPE_QUALS (type)
6773 | (TREE_READONLY (exp)
6774 * TYPE_QUAL_CONST))),
6775 TREE_ADDRESSABLE (exp), 1, 1);
07604beb 6776
b7010412
RK
6777 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6778 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6779 return target;
6780 }
6781
6782 case INDIRECT_REF:
6783 {
6784 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6785 tree index;
3a94c984
KH
6786 tree string = string_constant (exp1, &index);
6787
06eaa86f 6788 /* Try to optimize reads from const strings. */
7581a30f
JW
6789 if (string
6790 && TREE_CODE (string) == STRING_CST
6791 && TREE_CODE (index) == INTEGER_CST
05bccae2 6792 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6793 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6794 && GET_MODE_SIZE (mode) == 1
6795 && modifier != EXPAND_MEMORY_USE_WO)
05bccae2
RK
6796 return
6797 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6798
405f0da6
JW
6799 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6800 op0 = memory_address (mode, op0);
8c8a8e34 6801
01d939e8 6802 if (cfun && current_function_check_memory_usage
49ad7cfa 6803 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6804 {
6805 enum memory_use_mode memory_usage;
6806 memory_usage = get_memory_usage_from_modifier (modifier);
6807
6808 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6809 {
6810 in_check_memory_usage = 1;
ebb1b59a
BS
6811 emit_library_call (chkr_check_addr_libfunc,
6812 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6813 Pmode, GEN_INT (int_size_in_bytes (type)),
c85f7c16
JL
6814 TYPE_MODE (sizetype),
6815 GEN_INT (memory_usage),
6816 TYPE_MODE (integer_type_node));
6817 in_check_memory_usage = 0;
6818 }
921b3427
RK
6819 }
6820
38a448ca 6821 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6822 set_mem_attributes (temp, exp, 0);
1125706f
RK
6823
6824 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6825 here, because, in C and C++, the fact that a location is accessed
6826 through a pointer to const does not mean that the value there can
6827 never change. Languages where it can never change should
6828 also set TREE_STATIC. */
5cb7a25a 6829 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6830
6831 /* If we are writing to this object and its type is a record with
6832 readonly fields, we must mark it as readonly so it will
6833 conflict with readonly references to those fields. */
1da68f56 6834 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
14a774a9
RK
6835 RTX_UNCHANGING_P (temp) = 1;
6836
8c8a8e34
JW
6837 return temp;
6838 }
bbf6f052
RK
6839
6840 case ARRAY_REF:
742920c7
RK
6841 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6842 abort ();
bbf6f052 6843
bbf6f052 6844 {
742920c7
RK
6845 tree array = TREE_OPERAND (exp, 0);
6846 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6847 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6848 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6849 HOST_WIDE_INT i;
b50d17a1 6850
d4c89139
PB
6851 /* Optimize the special-case of a zero lower bound.
6852
6853 We convert the low_bound to sizetype to avoid some problems
6854 with constant folding. (E.g. suppose the lower bound is 1,
6855 and its mode is QI. Without the conversion, (ARRAY
6856 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6857 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6858
742920c7 6859 if (! integer_zerop (low_bound))
fed3cef0 6860 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6861
742920c7 6862 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6863 This is not done in fold so it won't happen inside &.
6864 Don't fold if this is for wide characters since it's too
6865 difficult to do correctly and this is a very rare case. */
742920c7 6866
cb5fa0f8
RK
6867 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6868 && TREE_CODE (array) == STRING_CST
742920c7 6869 && TREE_CODE (index) == INTEGER_CST
05bccae2 6870 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6871 && GET_MODE_CLASS (mode) == MODE_INT
6872 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6873 return
6874 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6875
742920c7
RK
6876 /* If this is a constant index into a constant array,
6877 just get the value from the array. Handle both the cases when
6878 we have an explicit constructor and when our operand is a variable
6879 that was declared const. */
4af3895e 6880
cb5fa0f8
RK
6881 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6882 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 6883 && TREE_CODE (index) == INTEGER_CST
3a94c984 6884 && 0 > compare_tree_int (index,
05bccae2
RK
6885 list_length (CONSTRUCTOR_ELTS
6886 (TREE_OPERAND (exp, 0)))))
742920c7 6887 {
05bccae2
RK
6888 tree elem;
6889
6890 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6891 i = TREE_INT_CST_LOW (index);
6892 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6893 ;
6894
6895 if (elem)
6896 return expand_expr (fold (TREE_VALUE (elem)), target,
6897 tmode, ro_modifier);
742920c7 6898 }
3a94c984 6899
742920c7 6900 else if (optimize >= 1
cb5fa0f8
RK
6901 && modifier != EXPAND_CONST_ADDRESS
6902 && modifier != EXPAND_INITIALIZER
742920c7
RK
6903 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6904 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6905 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6906 {
08293add 6907 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6908 {
6909 tree init = DECL_INITIAL (array);
6910
742920c7
RK
6911 if (TREE_CODE (init) == CONSTRUCTOR)
6912 {
665f2503 6913 tree elem;
742920c7 6914
05bccae2 6915 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6916 (elem
6917 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6918 elem = TREE_CHAIN (elem))
6919 ;
6920
c54b0a5e 6921 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6922 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6923 tmode, ro_modifier);
742920c7
RK
6924 }
6925 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6926 && 0 > compare_tree_int (index,
6927 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6928 {
6929 tree type = TREE_TYPE (TREE_TYPE (init));
6930 enum machine_mode mode = TYPE_MODE (type);
6931
6932 if (GET_MODE_CLASS (mode) == MODE_INT
6933 && GET_MODE_SIZE (mode) == 1)
6934 return (GEN_INT
6935 (TREE_STRING_POINTER
6936 (init)[TREE_INT_CST_LOW (index)]));
6937 }
742920c7
RK
6938 }
6939 }
6940 }
3a94c984 6941 /* Fall through. */
bbf6f052
RK
6942
6943 case COMPONENT_REF:
6944 case BIT_FIELD_REF:
b4e3fabb 6945 case ARRAY_RANGE_REF:
4af3895e 6946 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6947 appropriate field if it is present. Don't do this if we have
6948 already written the data since we want to refer to that copy
6949 and varasm.c assumes that's what we'll do. */
b4e3fabb 6950 if (code == COMPONENT_REF
7a0b7b9a
RK
6951 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6952 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6953 {
6954 tree elt;
6955
6956 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6957 elt = TREE_CHAIN (elt))
86b5812c
RK
6958 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6959 /* We can normally use the value of the field in the
6960 CONSTRUCTOR. However, if this is a bitfield in
6961 an integral mode that we can fit in a HOST_WIDE_INT,
6962 we must mask only the number of bits in the bitfield,
6963 since this is done implicitly by the constructor. If
6964 the bitfield does not meet either of those conditions,
6965 we can't do this optimization. */
6966 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6967 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6968 == MODE_INT)
6969 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6970 <= HOST_BITS_PER_WIDE_INT))))
6971 {
3a94c984 6972 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6973 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6974 {
9df2c88c
RK
6975 HOST_WIDE_INT bitsize
6976 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
86b5812c
RK
6977
6978 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6979 {
6980 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6981 op0 = expand_and (op0, op1, target);
6982 }
6983 else
6984 {
e5e809f4
JL
6985 enum machine_mode imode
6986 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6987 tree count
e5e809f4
JL
6988 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6989 0);
86b5812c
RK
6990
6991 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6992 target, 0);
6993 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6994 target, 0);
6995 }
6996 }
6997
6998 return op0;
6999 }
4af3895e
JVA
7000 }
7001
bbf6f052
RK
7002 {
7003 enum machine_mode mode1;
770ae6cc 7004 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7005 tree offset;
bbf6f052 7006 int volatilep = 0;
729a2125 7007 unsigned int alignment;
839c4796
RK
7008 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7009 &mode1, &unsignedp, &volatilep,
7010 &alignment);
bbf6f052 7011
e7f3c83f
RK
7012 /* If we got back the original object, something is wrong. Perhaps
7013 we are evaluating an expression too early. In any event, don't
7014 infinitely recurse. */
7015 if (tem == exp)
7016 abort ();
7017
3d27140a 7018 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7019 computation, since it will need a temporary and TARGET is known
7020 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7021
b74f5ff2
RK
7022 op0 = expand_expr (tem,
7023 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7024 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7025 != INTEGER_CST)
7026 ? target : NULL_RTX),
4ed67205 7027 VOIDmode,
14a774a9
RK
7028 (modifier == EXPAND_INITIALIZER
7029 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 7030 ? modifier : EXPAND_NORMAL);
bbf6f052 7031
8c8a8e34 7032 /* If this is a constant, put it into a register if it is a
14a774a9 7033 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7034 if (CONSTANT_P (op0))
7035 {
7036 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7037 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7038 && offset == 0)
8c8a8e34
JW
7039 op0 = force_reg (mode, op0);
7040 else
7041 op0 = validize_mem (force_const_mem (mode, op0));
7042 }
7043
7bb0943f
RS
7044 if (offset != 0)
7045 {
906c4e36 7046 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 7047
a2725049 7048 /* If this object is in a register, put it into memory.
14a774a9
RK
7049 This case can't occur in C, but can in Ada if we have
7050 unchecked conversion of an expression from a scalar type to
7051 an array or record type. */
7052 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7053 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7054 {
d04218c0
RK
7055 /* If the operand is a SAVE_EXPR, we can deal with this by
7056 forcing the SAVE_EXPR into memory. */
7057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45
RK
7058 {
7059 put_var_into_stack (TREE_OPERAND (exp, 0));
7060 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7061 }
d04218c0
RK
7062 else
7063 {
7064 tree nt
7065 = build_qualified_type (TREE_TYPE (tem),
7066 (TYPE_QUALS (TREE_TYPE (tem))
7067 | TYPE_QUAL_CONST));
7068 rtx memloc = assign_temp (nt, 1, 1, 1);
7069
7070 mark_temp_addr_taken (memloc);
7071 emit_move_insn (memloc, op0);
7072 op0 = memloc;
7073 }
14a774a9
RK
7074 }
7075
7bb0943f
RS
7076 if (GET_CODE (op0) != MEM)
7077 abort ();
2d48c13d
JL
7078
7079 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 7080 {
2d48c13d 7081#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 7082 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 7083#else
bd070e1a 7084 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 7085#endif
bd070e1a 7086 }
2d48c13d 7087
14a774a9 7088 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7089 to call force_reg for that case. Avoid that case. */
89752202
HB
7090 if (GET_CODE (op0) == MEM
7091 && GET_MODE (op0) == BLKmode
efd07ca7 7092 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7093 && bitsize != 0
3a94c984 7094 && (bitpos % bitsize) == 0
89752202 7095 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 7096 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202 7097 {
f4ef873c
RK
7098 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7099
89752202
HB
7100 if (GET_CODE (XEXP (temp, 0)) == REG)
7101 op0 = temp;
7102 else
792760b9
RK
7103 op0 = (replace_equiv_address
7104 (op0,
7105 force_reg (GET_MODE (XEXP (temp, 0)),
7106 XEXP (temp, 0))));
89752202
HB
7107 bitpos = 0;
7108 }
7109
7bb0943f 7110 op0 = change_address (op0, VOIDmode,
38a448ca 7111 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
7112 force_reg (ptr_mode,
7113 offset_rtx)));
7bb0943f
RS
7114 }
7115
bbf6f052
RK
7116 /* Don't forget about volatility even if this is a bitfield. */
7117 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7118 {
7119 op0 = copy_rtx (op0);
7120 MEM_VOLATILE_P (op0) = 1;
7121 }
7122
921b3427 7123 /* Check the access. */
32919a0d
RK
7124 if (cfun != 0 && current_function_check_memory_usage
7125 && GET_CODE (op0) == MEM)
3a94c984 7126 {
921b3427
RK
7127 enum memory_use_mode memory_usage;
7128 memory_usage = get_memory_usage_from_modifier (modifier);
7129
7130 if (memory_usage != MEMORY_USE_DONT)
7131 {
7132 rtx to;
7133 int size;
7134
7135 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7136 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7137
7138 /* Check the access right of the pointer. */
ea4da9db 7139 in_check_memory_usage = 1;
e9a25f70 7140 if (size > BITS_PER_UNIT)
ebb1b59a
BS
7141 emit_library_call (chkr_check_addr_libfunc,
7142 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7143 Pmode, GEN_INT (size / BITS_PER_UNIT),
e9a25f70 7144 TYPE_MODE (sizetype),
3a94c984 7145 GEN_INT (memory_usage),
956d6950 7146 TYPE_MODE (integer_type_node));
ea4da9db 7147 in_check_memory_usage = 0;
921b3427
RK
7148 }
7149 }
7150
ccc98036
RS
7151 /* In cases where an aligned union has an unaligned object
7152 as a field, we might be extracting a BLKmode value from
7153 an integer-mode (e.g., SImode) object. Handle this case
7154 by doing the extract into an object as wide as the field
7155 (which we know to be the width of a basic mode), then
cb5fa0f8 7156 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7157 if (mode1 == VOIDmode
ccc98036 7158 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7159 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7160 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7161 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7162 && modifier != EXPAND_CONST_ADDRESS
7163 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7164 /* If the field isn't aligned enough to fetch as a memref,
7165 fetch it as a bit field. */
7166 || (mode1 != BLKmode
7167 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7168 && ((TYPE_ALIGN (TREE_TYPE (tem))
7169 < GET_MODE_ALIGNMENT (mode))
7170 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7171 /* If the type and the field are a constant size and the
7172 size of the type isn't the same size as the bitfield,
7173 we must use bitfield operations. */
7174 || (bitsize >= 0
7175 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7176 == INTEGER_CST)
7177 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7178 bitsize))
7179 || (mode == BLKmode
e1565e65 7180 && SLOW_UNALIGNED_ACCESS (mode, alignment)
19caa751 7181 && (TYPE_ALIGN (type) > alignment
14a774a9 7182 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 7183 {
bbf6f052
RK
7184 enum machine_mode ext_mode = mode;
7185
14a774a9
RK
7186 if (ext_mode == BLKmode
7187 && ! (target != 0 && GET_CODE (op0) == MEM
7188 && GET_CODE (target) == MEM
7189 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7190 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7191
7192 if (ext_mode == BLKmode)
a281e72d
RK
7193 {
7194 /* In this case, BITPOS must start at a byte boundary and
7195 TARGET, if specified, must be a MEM. */
7196 if (GET_CODE (op0) != MEM
7197 || (target != 0 && GET_CODE (target) != MEM)
7198 || bitpos % BITS_PER_UNIT != 0)
7199 abort ();
7200
f4ef873c 7201 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
7202 if (target == 0)
7203 target = assign_temp (type, 0, 1, 1);
7204
7205 emit_block_move (target, op0,
bd5dab53
RK
7206 bitsize == -1 ? expr_size (exp)
7207 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7208 / BITS_PER_UNIT),
19caa751 7209 BITS_PER_UNIT);
3a94c984 7210
a281e72d
RK
7211 return target;
7212 }
bbf6f052 7213
dc6d66b3
RK
7214 op0 = validize_mem (op0);
7215
7216 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7217 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3
RK
7218
7219 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 7220 unsignedp, target, ext_mode, ext_mode,
034f9101 7221 alignment,
bbf6f052 7222 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7223
7224 /* If the result is a record type and BITSIZE is narrower than
7225 the mode of OP0, an integral mode, and this is a big endian
7226 machine, we must put the field into the high-order bits. */
7227 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7228 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7229 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7230 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7231 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7232 - bitsize),
7233 op0, 1);
7234
bbf6f052
RK
7235 if (mode == BLKmode)
7236 {
27fb3e16 7237 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
1da68f56
RK
7238 TYPE_QUAL_CONST);
7239 rtx new = assign_temp (nt, 0, 1, 1);
bbf6f052
RK
7240
7241 emit_move_insn (new, op0);
7242 op0 = copy_rtx (new);
7243 PUT_MODE (op0, BLKmode);
7244 }
7245
7246 return op0;
7247 }
7248
05019f83
RK
7249 /* If the result is BLKmode, use that to access the object
7250 now as well. */
7251 if (mode == BLKmode)
7252 mode1 = BLKmode;
7253
bbf6f052
RK
7254 /* Get a reference to just this component. */
7255 if (modifier == EXPAND_CONST_ADDRESS
7256 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7257 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7258 else
f4ef873c 7259 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7260
3bdf5ad1 7261 set_mem_attributes (op0, exp, 0);
dc6d66b3 7262 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7263 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3 7264
bbf6f052 7265 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7266 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7267 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7268 || modifier == EXPAND_INITIALIZER)
bbf6f052 7269 return op0;
0d15e60c 7270 else if (target == 0)
bbf6f052 7271 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7272
bbf6f052
RK
7273 convert_move (target, op0, unsignedp);
7274 return target;
7275 }
7276
bbf6f052
RK
7277 /* Intended for a reference to a buffer of a file-object in Pascal.
7278 But it's not certain that a special tree code will really be
7279 necessary for these. INDIRECT_REF might work for them. */
7280 case BUFFER_REF:
7281 abort ();
7282
7308a047 7283 case IN_EXPR:
7308a047 7284 {
d6a5ac33
RK
7285 /* Pascal set IN expression.
7286
7287 Algorithm:
7288 rlo = set_low - (set_low%bits_per_word);
7289 the_word = set [ (index - rlo)/bits_per_word ];
7290 bit_index = index % bits_per_word;
7291 bitmask = 1 << bit_index;
7292 return !!(the_word & bitmask); */
7293
7308a047
RS
7294 tree set = TREE_OPERAND (exp, 0);
7295 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7296 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7297 tree set_type = TREE_TYPE (set);
7308a047
RS
7298 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7299 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7300 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7301 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7302 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7303 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7304 rtx setaddr = XEXP (setval, 0);
7305 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7306 rtx rlow;
7307 rtx diff, quo, rem, addr, bit, result;
7308a047 7308
d6a5ac33
RK
7309 /* If domain is empty, answer is no. Likewise if index is constant
7310 and out of bounds. */
51723711 7311 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7312 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7313 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7314 || (TREE_CODE (index) == INTEGER_CST
7315 && TREE_CODE (set_low_bound) == INTEGER_CST
7316 && tree_int_cst_lt (index, set_low_bound))
7317 || (TREE_CODE (set_high_bound) == INTEGER_CST
7318 && TREE_CODE (index) == INTEGER_CST
7319 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7320 return const0_rtx;
7321
d6a5ac33
RK
7322 if (target == 0)
7323 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7324
7325 /* If we get here, we have to generate the code for both cases
7326 (in range and out of range). */
7327
7328 op0 = gen_label_rtx ();
7329 op1 = gen_label_rtx ();
7330
7331 if (! (GET_CODE (index_val) == CONST_INT
7332 && GET_CODE (lo_r) == CONST_INT))
7333 {
c5d5d461
JL
7334 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7335 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7336 }
7337
7338 if (! (GET_CODE (index_val) == CONST_INT
7339 && GET_CODE (hi_r) == CONST_INT))
7340 {
c5d5d461
JL
7341 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7342 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7343 }
7344
7345 /* Calculate the element number of bit zero in the first word
7346 of the set. */
7347 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7348 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7349 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7350 else
17938e57
RK
7351 rlow = expand_binop (index_mode, and_optab, lo_r,
7352 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7353 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7354
d6a5ac33
RK
7355 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7356 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7357
7358 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7359 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7360 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7361 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7362
7308a047 7363 addr = memory_address (byte_mode,
d6a5ac33
RK
7364 expand_binop (index_mode, add_optab, diff,
7365 setaddr, NULL_RTX, iunsignedp,
17938e57 7366 OPTAB_LIB_WIDEN));
d6a5ac33 7367
3a94c984 7368 /* Extract the bit we want to examine. */
7308a047 7369 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7370 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7371 make_tree (TREE_TYPE (index), rem),
7372 NULL_RTX, 1);
7373 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7374 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7375 1, OPTAB_LIB_WIDEN);
17938e57
RK
7376
7377 if (result != target)
7378 convert_move (target, result, 1);
7308a047
RS
7379
7380 /* Output the code to handle the out-of-range case. */
7381 emit_jump (op0);
7382 emit_label (op1);
7383 emit_move_insn (target, const0_rtx);
7384 emit_label (op0);
7385 return target;
7386 }
7387
bbf6f052 7388 case WITH_CLEANUP_EXPR:
6ad7895a 7389 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7390 {
6ad7895a 7391 WITH_CLEANUP_EXPR_RTL (exp)
921b3427 7392 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6ad7895a 7393 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
e976b8b2 7394
bbf6f052 7395 /* That's it for this cleanup. */
6ad7895a 7396 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7397 }
6ad7895a 7398 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7399
5dab5552
MS
7400 case CLEANUP_POINT_EXPR:
7401 {
e976b8b2
MS
7402 /* Start a new binding layer that will keep track of all cleanup
7403 actions to be performed. */
8e91754e 7404 expand_start_bindings (2);
e976b8b2 7405
d93d4205 7406 target_temp_slot_level = temp_slot_level;
e976b8b2 7407
921b3427 7408 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
7409 /* If we're going to use this value, load it up now. */
7410 if (! ignore)
7411 op0 = force_not_mem (op0);
d93d4205 7412 preserve_temp_slots (op0);
e976b8b2 7413 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7414 }
7415 return op0;
7416
bbf6f052
RK
7417 case CALL_EXPR:
7418 /* Check for a built-in function. */
7419 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7420 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7421 == FUNCTION_DECL)
bbf6f052 7422 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
c70eaeaf
KG
7423 {
7424 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7425 == BUILT_IN_FRONTEND)
7426 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7427 else
7428 return expand_builtin (exp, target, subtarget, tmode, ignore);
7429 }
d6a5ac33 7430
8129842c 7431 return expand_call (exp, target, ignore);
bbf6f052
RK
7432
7433 case NON_LVALUE_EXPR:
7434 case NOP_EXPR:
7435 case CONVERT_EXPR:
7436 case REFERENCE_EXPR:
4a53008b 7437 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7438 return const0_rtx;
4a53008b 7439
bbf6f052
RK
7440 if (TREE_CODE (type) == UNION_TYPE)
7441 {
7442 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7443
7444 /* If both input and output are BLKmode, this conversion
7445 isn't actually doing anything unless we need to make the
7446 alignment stricter. */
7447 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7448 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7449 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7450 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7451 modifier);
7452
bbf6f052 7453 if (target == 0)
1da68f56 7454 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7455
bbf6f052
RK
7456 if (GET_CODE (target) == MEM)
7457 /* Store data into beginning of memory target. */
7458 store_expr (TREE_OPERAND (exp, 0),
f4ef873c 7459 adjust_address (target, TYPE_MODE (valtype), 0), 0);
1499e0a8 7460
bbf6f052
RK
7461 else if (GET_CODE (target) == REG)
7462 /* Store this field into a union of the proper type. */
14a774a9
RK
7463 store_field (target,
7464 MIN ((int_size_in_bytes (TREE_TYPE
7465 (TREE_OPERAND (exp, 0)))
7466 * BITS_PER_UNIT),
8752c357 7467 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7468 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7156dead
RK
7469 VOIDmode, 0, BITS_PER_UNIT,
7470 int_size_in_bytes (type), 0);
bbf6f052
RK
7471 else
7472 abort ();
7473
7474 /* Return the entire union. */
7475 return target;
7476 }
d6a5ac33 7477
7f62854a
RK
7478 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7479 {
7480 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7481 ro_modifier);
7f62854a
RK
7482
7483 /* If the signedness of the conversion differs and OP0 is
7484 a promoted SUBREG, clear that indication since we now
7485 have to do the proper extension. */
7486 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7487 && GET_CODE (op0) == SUBREG)
7488 SUBREG_PROMOTED_VAR_P (op0) = 0;
7489
7490 return op0;
7491 }
7492
1499e0a8 7493 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7494 if (GET_MODE (op0) == mode)
7495 return op0;
12342f90 7496
d6a5ac33
RK
7497 /* If OP0 is a constant, just convert it into the proper mode. */
7498 if (CONSTANT_P (op0))
7499 return
7500 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7501 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7502
26fcb35a 7503 if (modifier == EXPAND_INITIALIZER)
38a448ca 7504 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7505
bbf6f052 7506 if (target == 0)
d6a5ac33
RK
7507 return
7508 convert_to_mode (mode, op0,
7509 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7510 else
d6a5ac33
RK
7511 convert_move (target, op0,
7512 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7513 return target;
7514
7515 case PLUS_EXPR:
0f41302f
MS
7516 /* We come here from MINUS_EXPR when the second operand is a
7517 constant. */
bbf6f052 7518 plus_expr:
91ce572a
CC
7519 this_optab = ! unsignedp && flag_trapv
7520 && (GET_MODE_CLASS(mode) == MODE_INT)
7521 ? addv_optab : add_optab;
bbf6f052
RK
7522
7523 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7524 something else, make sure we add the register to the constant and
7525 then to the other thing. This case can occur during strength
7526 reduction and doing it this way will produce better code if the
7527 frame pointer or argument pointer is eliminated.
7528
7529 fold-const.c will ensure that the constant is always in the inner
7530 PLUS_EXPR, so the only case we need to do anything about is if
7531 sp, ap, or fp is our second argument, in which case we must swap
7532 the innermost first argument and our second argument. */
7533
7534 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7535 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7536 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7537 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7538 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7539 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7540 {
7541 tree t = TREE_OPERAND (exp, 1);
7542
7543 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7544 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7545 }
7546
88f63c77 7547 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7548 something, we might be forming a constant. So try to use
7549 plus_constant. If it produces a sum and we can't accept it,
7550 use force_operand. This allows P = &ARR[const] to generate
7551 efficient code on machines where a SYMBOL_REF is not a valid
7552 address.
7553
7554 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7555 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
91ce572a 7556 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7557 {
c980ac49
RS
7558 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7559 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7560 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7561 {
cbbc503e
JL
7562 rtx constant_part;
7563
c980ac49
RS
7564 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7565 EXPAND_SUM);
cbbc503e
JL
7566 /* Use immed_double_const to ensure that the constant is
7567 truncated according to the mode of OP1, then sign extended
7568 to a HOST_WIDE_INT. Using the constant directly can result
7569 in non-canonical RTL in a 64x32 cross compile. */
7570 constant_part
7571 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7572 (HOST_WIDE_INT) 0,
a5efcd63 7573 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7574 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7575 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7576 op1 = force_operand (op1, target);
7577 return op1;
7578 }
bbf6f052 7579
c980ac49
RS
7580 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7581 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7582 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7583 {
cbbc503e
JL
7584 rtx constant_part;
7585
c980ac49
RS
7586 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7587 EXPAND_SUM);
7588 if (! CONSTANT_P (op0))
7589 {
7590 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7591 VOIDmode, modifier);
709f5be1
RS
7592 /* Don't go to both_summands if modifier
7593 says it's not right to return a PLUS. */
7594 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7595 goto binop2;
c980ac49
RS
7596 goto both_summands;
7597 }
cbbc503e
JL
7598 /* Use immed_double_const to ensure that the constant is
7599 truncated according to the mode of OP1, then sign extended
7600 to a HOST_WIDE_INT. Using the constant directly can result
7601 in non-canonical RTL in a 64x32 cross compile. */
7602 constant_part
7603 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7604 (HOST_WIDE_INT) 0,
2a94e396 7605 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7606 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7607 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7608 op0 = force_operand (op0, target);
7609 return op0;
7610 }
bbf6f052
RK
7611 }
7612
7613 /* No sense saving up arithmetic to be done
7614 if it's all in the wrong mode to form part of an address.
7615 And force_operand won't know whether to sign-extend or
7616 zero-extend. */
7617 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7618 || mode != ptr_mode)
c980ac49 7619 goto binop;
bbf6f052 7620
e5e809f4 7621 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7622 subtarget = 0;
7623
921b3427
RK
7624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7625 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7626
c980ac49 7627 both_summands:
bbf6f052
RK
7628 /* Make sure any term that's a sum with a constant comes last. */
7629 if (GET_CODE (op0) == PLUS
7630 && CONSTANT_P (XEXP (op0, 1)))
7631 {
7632 temp = op0;
7633 op0 = op1;
7634 op1 = temp;
7635 }
7636 /* If adding to a sum including a constant,
7637 associate it to put the constant outside. */
7638 if (GET_CODE (op1) == PLUS
7639 && CONSTANT_P (XEXP (op1, 1)))
7640 {
7641 rtx constant_term = const0_rtx;
7642
7643 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7644 if (temp != 0)
7645 op0 = temp;
6f90e075
JW
7646 /* Ensure that MULT comes first if there is one. */
7647 else if (GET_CODE (op0) == MULT)
38a448ca 7648 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7649 else
38a448ca 7650 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7651
7652 /* Let's also eliminate constants from op0 if possible. */
7653 op0 = eliminate_constant_term (op0, &constant_term);
7654
7655 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 7656 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
7657 result we want will then be OP0 + OP1. */
7658
7659 temp = simplify_binary_operation (PLUS, mode, constant_term,
7660 XEXP (op1, 1));
7661 if (temp != 0)
7662 op1 = temp;
7663 else
38a448ca 7664 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7665 }
7666
7667 /* Put a constant term last and put a multiplication first. */
7668 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7669 temp = op1, op1 = op0, op0 = temp;
7670
7671 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7672 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7673
7674 case MINUS_EXPR:
ea87523e
RK
7675 /* For initializers, we are allowed to return a MINUS of two
7676 symbolic constants. Here we handle all cases when both operands
7677 are constant. */
bbf6f052
RK
7678 /* Handle difference of two symbolic constants,
7679 for the sake of an initializer. */
7680 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7681 && really_constant_p (TREE_OPERAND (exp, 0))
7682 && really_constant_p (TREE_OPERAND (exp, 1)))
7683 {
906c4e36 7684 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7685 VOIDmode, ro_modifier);
906c4e36 7686 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7687 VOIDmode, ro_modifier);
ea87523e 7688
ea87523e
RK
7689 /* If the last operand is a CONST_INT, use plus_constant of
7690 the negated constant. Else make the MINUS. */
7691 if (GET_CODE (op1) == CONST_INT)
7692 return plus_constant (op0, - INTVAL (op1));
7693 else
38a448ca 7694 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7695 }
7696 /* Convert A - const to A + (-const). */
7697 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7698 {
ae431183
RK
7699 tree negated = fold (build1 (NEGATE_EXPR, type,
7700 TREE_OPERAND (exp, 1)));
7701
ae431183 7702 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7703 /* If we can't negate the constant in TYPE, leave it alone and
7704 expand_binop will negate it for us. We used to try to do it
7705 here in the signed version of TYPE, but that doesn't work
7706 on POINTER_TYPEs. */;
ae431183
RK
7707 else
7708 {
7709 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7710 goto plus_expr;
7711 }
bbf6f052 7712 }
91ce572a
CC
7713 this_optab = ! unsignedp && flag_trapv
7714 && (GET_MODE_CLASS(mode) == MODE_INT)
7715 ? subv_optab : sub_optab;
bbf6f052
RK
7716 goto binop;
7717
7718 case MULT_EXPR:
bbf6f052
RK
7719 /* If first operand is constant, swap them.
7720 Thus the following special case checks need only
7721 check the second operand. */
7722 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7723 {
7724 register tree t1 = TREE_OPERAND (exp, 0);
7725 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7726 TREE_OPERAND (exp, 1) = t1;
7727 }
7728
7729 /* Attempt to return something suitable for generating an
7730 indexed address, for machines that support that. */
7731
88f63c77 7732 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7733 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7734 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7735 {
921b3427
RK
7736 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7737 EXPAND_SUM);
bbf6f052
RK
7738
7739 /* Apply distributive law if OP0 is x+c. */
7740 if (GET_CODE (op0) == PLUS
7741 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7742 return
7743 gen_rtx_PLUS
7744 (mode,
7745 gen_rtx_MULT
7746 (mode, XEXP (op0, 0),
7747 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7748 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7749 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7750
7751 if (GET_CODE (op0) != REG)
906c4e36 7752 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7753 if (GET_CODE (op0) != REG)
7754 op0 = copy_to_mode_reg (mode, op0);
7755
c5c76735
JL
7756 return
7757 gen_rtx_MULT (mode, op0,
7758 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7759 }
7760
e5e809f4 7761 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7762 subtarget = 0;
7763
7764 /* Check for multiplying things that have been extended
7765 from a narrower type. If this machine supports multiplying
7766 in that narrower type with a result in the desired type,
7767 do it that way, and avoid the explicit type-conversion. */
7768 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7769 && TREE_CODE (type) == INTEGER_TYPE
7770 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7771 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7772 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7773 && int_fits_type_p (TREE_OPERAND (exp, 1),
7774 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7775 /* Don't use a widening multiply if a shift will do. */
7776 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7777 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7778 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7779 ||
7780 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7781 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7782 ==
7783 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7784 /* If both operands are extended, they must either both
7785 be zero-extended or both be sign-extended. */
7786 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7787 ==
7788 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7789 {
7790 enum machine_mode innermode
7791 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7792 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7793 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7794 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7795 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7796 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7797 {
b10af0c8
TG
7798 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7799 {
7800 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7801 NULL_RTX, VOIDmode, 0);
7802 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7803 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7804 VOIDmode, 0);
7805 else
7806 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7807 NULL_RTX, VOIDmode, 0);
7808 goto binop2;
7809 }
7810 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7811 && innermode == word_mode)
7812 {
7813 rtx htem;
7814 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7815 NULL_RTX, VOIDmode, 0);
7816 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7817 op1 = convert_modes (innermode, mode,
7818 expand_expr (TREE_OPERAND (exp, 1),
7819 NULL_RTX, VOIDmode, 0),
7820 unsignedp);
b10af0c8
TG
7821 else
7822 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7823 NULL_RTX, VOIDmode, 0);
7824 temp = expand_binop (mode, other_optab, op0, op1, target,
7825 unsignedp, OPTAB_LIB_WIDEN);
7826 htem = expand_mult_highpart_adjust (innermode,
7827 gen_highpart (innermode, temp),
7828 op0, op1,
7829 gen_highpart (innermode, temp),
7830 unsignedp);
7831 emit_move_insn (gen_highpart (innermode, temp), htem);
7832 return temp;
7833 }
bbf6f052
RK
7834 }
7835 }
7836 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7837 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7838 return expand_mult (mode, op0, op1, target, unsignedp);
7839
7840 case TRUNC_DIV_EXPR:
7841 case FLOOR_DIV_EXPR:
7842 case CEIL_DIV_EXPR:
7843 case ROUND_DIV_EXPR:
7844 case EXACT_DIV_EXPR:
e5e809f4 7845 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7846 subtarget = 0;
7847 /* Possible optimization: compute the dividend with EXPAND_SUM
7848 then if the divisor is constant can optimize the case
7849 where some terms of the dividend have coeffs divisible by it. */
7850 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7851 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7852 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7853
7854 case RDIV_EXPR:
b7e9703c
JH
7855 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7856 expensive divide. If not, combine will rebuild the original
7857 computation. */
7858 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7859 && !real_onep (TREE_OPERAND (exp, 0)))
7860 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7861 build (RDIV_EXPR, type,
7862 build_real (type, dconst1),
7863 TREE_OPERAND (exp, 1))),
7864 target, tmode, unsignedp);
ef89d648 7865 this_optab = sdiv_optab;
bbf6f052
RK
7866 goto binop;
7867
7868 case TRUNC_MOD_EXPR:
7869 case FLOOR_MOD_EXPR:
7870 case CEIL_MOD_EXPR:
7871 case ROUND_MOD_EXPR:
e5e809f4 7872 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7873 subtarget = 0;
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7875 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7876 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7877
7878 case FIX_ROUND_EXPR:
7879 case FIX_FLOOR_EXPR:
7880 case FIX_CEIL_EXPR:
7881 abort (); /* Not used for C. */
7882
7883 case FIX_TRUNC_EXPR:
906c4e36 7884 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7885 if (target == 0)
7886 target = gen_reg_rtx (mode);
7887 expand_fix (target, op0, unsignedp);
7888 return target;
7889
7890 case FLOAT_EXPR:
906c4e36 7891 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7892 if (target == 0)
7893 target = gen_reg_rtx (mode);
7894 /* expand_float can't figure out what to do if FROM has VOIDmode.
7895 So give it the correct mode. With -O, cse will optimize this. */
7896 if (GET_MODE (op0) == VOIDmode)
7897 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7898 op0);
7899 expand_float (target, op0,
7900 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7901 return target;
7902
7903 case NEGATE_EXPR:
5b22bee8 7904 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
91ce572a
CC
7905 temp = expand_unop (mode,
7906 ! unsignedp && flag_trapv
7907 && (GET_MODE_CLASS(mode) == MODE_INT)
7908 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7909 if (temp == 0)
7910 abort ();
7911 return temp;
7912
7913 case ABS_EXPR:
7914 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7915
2d7050fd 7916 /* Handle complex values specially. */
d6a5ac33
RK
7917 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7918 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7919 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7920
bbf6f052
RK
7921 /* Unsigned abs is simply the operand. Testing here means we don't
7922 risk generating incorrect code below. */
7923 if (TREE_UNSIGNED (type))
7924 return op0;
7925
91ce572a 7926 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7927 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7928
7929 case MAX_EXPR:
7930 case MIN_EXPR:
7931 target = original_target;
e5e809f4 7932 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7933 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7934 || GET_MODE (target) != mode
bbf6f052
RK
7935 || (GET_CODE (target) == REG
7936 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7937 target = gen_reg_rtx (mode);
906c4e36 7938 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7939 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7940
7941 /* First try to do it with a special MIN or MAX instruction.
7942 If that does not win, use a conditional jump to select the proper
7943 value. */
7944 this_optab = (TREE_UNSIGNED (type)
7945 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7946 : (code == MIN_EXPR ? smin_optab : smax_optab));
7947
7948 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7949 OPTAB_WIDEN);
7950 if (temp != 0)
7951 return temp;
7952
fa2981d8
JW
7953 /* At this point, a MEM target is no longer useful; we will get better
7954 code without it. */
3a94c984 7955
fa2981d8
JW
7956 if (GET_CODE (target) == MEM)
7957 target = gen_reg_rtx (mode);
7958
ee456b1c
RK
7959 if (target != op0)
7960 emit_move_insn (target, op0);
d6a5ac33 7961
bbf6f052 7962 op0 = gen_label_rtx ();
d6a5ac33 7963
f81497d9
RS
7964 /* If this mode is an integer too wide to compare properly,
7965 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7966 if (GET_MODE_CLASS (mode) == MODE_INT
7967 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7968 {
f81497d9 7969 if (code == MAX_EXPR)
d6a5ac33
RK
7970 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7971 target, op1, NULL_RTX, op0);
bbf6f052 7972 else
d6a5ac33
RK
7973 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7974 op1, target, NULL_RTX, op0);
bbf6f052 7975 }
f81497d9
RS
7976 else
7977 {
b30f05db
BS
7978 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7979 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7980 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7981 op0);
f81497d9 7982 }
b30f05db 7983 emit_move_insn (target, op1);
bbf6f052
RK
7984 emit_label (op0);
7985 return target;
7986
bbf6f052
RK
7987 case BIT_NOT_EXPR:
7988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7989 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7990 if (temp == 0)
7991 abort ();
7992 return temp;
7993
7994 case FFS_EXPR:
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7996 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7997 if (temp == 0)
7998 abort ();
7999 return temp;
8000
d6a5ac33
RK
8001 /* ??? Can optimize bitwise operations with one arg constant.
8002 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8003 and (a bitwise1 b) bitwise2 b (etc)
8004 but that is probably not worth while. */
8005
8006 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8007 boolean values when we want in all cases to compute both of them. In
8008 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8009 as actual zero-or-1 values and then bitwise anding. In cases where
8010 there cannot be any side effects, better code would be made by
8011 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8012 how to recognize those cases. */
8013
bbf6f052
RK
8014 case TRUTH_AND_EXPR:
8015 case BIT_AND_EXPR:
8016 this_optab = and_optab;
8017 goto binop;
8018
bbf6f052
RK
8019 case TRUTH_OR_EXPR:
8020 case BIT_IOR_EXPR:
8021 this_optab = ior_optab;
8022 goto binop;
8023
874726a8 8024 case TRUTH_XOR_EXPR:
bbf6f052
RK
8025 case BIT_XOR_EXPR:
8026 this_optab = xor_optab;
8027 goto binop;
8028
8029 case LSHIFT_EXPR:
8030 case RSHIFT_EXPR:
8031 case LROTATE_EXPR:
8032 case RROTATE_EXPR:
e5e809f4 8033 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8034 subtarget = 0;
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8036 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8037 unsignedp);
8038
d6a5ac33
RK
8039 /* Could determine the answer when only additive constants differ. Also,
8040 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8041 case LT_EXPR:
8042 case LE_EXPR:
8043 case GT_EXPR:
8044 case GE_EXPR:
8045 case EQ_EXPR:
8046 case NE_EXPR:
1eb8759b
RH
8047 case UNORDERED_EXPR:
8048 case ORDERED_EXPR:
8049 case UNLT_EXPR:
8050 case UNLE_EXPR:
8051 case UNGT_EXPR:
8052 case UNGE_EXPR:
8053 case UNEQ_EXPR:
bbf6f052
RK
8054 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8055 if (temp != 0)
8056 return temp;
d6a5ac33 8057
0f41302f 8058 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8059 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8060 && original_target
8061 && GET_CODE (original_target) == REG
8062 && (GET_MODE (original_target)
8063 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8064 {
d6a5ac33
RK
8065 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8066 VOIDmode, 0);
8067
bbf6f052
RK
8068 if (temp != original_target)
8069 temp = copy_to_reg (temp);
d6a5ac33 8070
bbf6f052 8071 op1 = gen_label_rtx ();
c5d5d461
JL
8072 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8073 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
8074 emit_move_insn (temp, const1_rtx);
8075 emit_label (op1);
8076 return temp;
8077 }
d6a5ac33 8078
bbf6f052
RK
8079 /* If no set-flag instruction, must generate a conditional
8080 store into a temporary variable. Drop through
8081 and handle this like && and ||. */
8082
8083 case TRUTH_ANDIF_EXPR:
8084 case TRUTH_ORIF_EXPR:
e44842fe 8085 if (! ignore
e5e809f4 8086 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8087 /* Make sure we don't have a hard reg (such as function's return
8088 value) live across basic blocks, if not optimizing. */
8089 || (!optimize && GET_CODE (target) == REG
8090 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8091 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8092
8093 if (target)
8094 emit_clr_insn (target);
8095
bbf6f052
RK
8096 op1 = gen_label_rtx ();
8097 jumpifnot (exp, op1);
e44842fe
RK
8098
8099 if (target)
8100 emit_0_to_1_insn (target);
8101
bbf6f052 8102 emit_label (op1);
e44842fe 8103 return ignore ? const0_rtx : target;
bbf6f052
RK
8104
8105 case TRUTH_NOT_EXPR:
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8107 /* The parser is careful to generate TRUTH_NOT_EXPR
8108 only with operands that are always zero or one. */
906c4e36 8109 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8110 target, 1, OPTAB_LIB_WIDEN);
8111 if (temp == 0)
8112 abort ();
8113 return temp;
8114
8115 case COMPOUND_EXPR:
8116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8117 emit_queue ();
8118 return expand_expr (TREE_OPERAND (exp, 1),
8119 (ignore ? const0_rtx : target),
8120 VOIDmode, 0);
8121
8122 case COND_EXPR:
ac01eace
RK
8123 /* If we would have a "singleton" (see below) were it not for a
8124 conversion in each arm, bring that conversion back out. */
8125 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8126 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8127 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8128 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8129 {
d6edb99e
ZW
8130 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8131 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8132
8133 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8134 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8135 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8136 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8137 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8138 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8139 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8140 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8141 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8142 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8143 TREE_OPERAND (exp, 0),
d6edb99e 8144 iftrue, iffalse)),
ac01eace
RK
8145 target, tmode, modifier);
8146 }
8147
bbf6f052
RK
8148 {
8149 /* Note that COND_EXPRs whose type is a structure or union
8150 are required to be constructed to contain assignments of
8151 a temporary variable, so that we can evaluate them here
8152 for side effect only. If type is void, we must do likewise. */
8153
8154 /* If an arm of the branch requires a cleanup,
8155 only that cleanup is performed. */
8156
8157 tree singleton = 0;
8158 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8159
8160 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8161 convert it to our mode, if necessary. */
8162 if (integer_onep (TREE_OPERAND (exp, 1))
8163 && integer_zerop (TREE_OPERAND (exp, 2))
8164 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8165 {
dd27116b
RK
8166 if (ignore)
8167 {
8168 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 8169 ro_modifier);
dd27116b
RK
8170 return const0_rtx;
8171 }
8172
921b3427 8173 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
8174 if (GET_MODE (op0) == mode)
8175 return op0;
d6a5ac33 8176
bbf6f052
RK
8177 if (target == 0)
8178 target = gen_reg_rtx (mode);
8179 convert_move (target, op0, unsignedp);
8180 return target;
8181 }
8182
ac01eace
RK
8183 /* Check for X ? A + B : A. If we have this, we can copy A to the
8184 output and conditionally add B. Similarly for unary operations.
8185 Don't do this if X has side-effects because those side effects
8186 might affect A or B and the "?" operation is a sequence point in
8187 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8188
8189 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8190 && operand_equal_p (TREE_OPERAND (exp, 2),
8191 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8192 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8193 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8194 && operand_equal_p (TREE_OPERAND (exp, 1),
8195 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8196 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8197 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8198 && operand_equal_p (TREE_OPERAND (exp, 2),
8199 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8200 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8201 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8202 && operand_equal_p (TREE_OPERAND (exp, 1),
8203 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8204 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8205
01c8a7c8
RK
8206 /* If we are not to produce a result, we have no target. Otherwise,
8207 if a target was specified use it; it will not be used as an
3a94c984 8208 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8209 temporary. */
8210
8211 if (ignore)
8212 temp = 0;
8213 else if (original_target
e5e809f4 8214 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8215 || (singleton && GET_CODE (original_target) == REG
8216 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8217 && original_target == var_rtx (singleton)))
8218 && GET_MODE (original_target) == mode
7c00d1fe
RK
8219#ifdef HAVE_conditional_move
8220 && (! can_conditionally_move_p (mode)
8221 || GET_CODE (original_target) == REG
8222 || TREE_ADDRESSABLE (type))
8223#endif
8125d7e9
BS
8224 && (GET_CODE (original_target) != MEM
8225 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8226 temp = original_target;
8227 else if (TREE_ADDRESSABLE (type))
8228 abort ();
8229 else
8230 temp = assign_temp (type, 0, 0, 1);
8231
ac01eace
RK
8232 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8233 do the test of X as a store-flag operation, do this as
8234 A + ((X != 0) << log C). Similarly for other simple binary
8235 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8236 if (temp && singleton && binary_op
bbf6f052
RK
8237 && (TREE_CODE (binary_op) == PLUS_EXPR
8238 || TREE_CODE (binary_op) == MINUS_EXPR
8239 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8240 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8241 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8242 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8243 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8244 {
8245 rtx result;
91ce572a
CC
8246 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8247 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8248 ? addv_optab : add_optab)
8249 : TREE_CODE (binary_op) == MINUS_EXPR
8250 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8251 ? subv_optab : sub_optab)
8252 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8253 : xor_optab);
bbf6f052
RK
8254
8255 /* If we had X ? A : A + 1, do this as A + (X == 0).
8256
8257 We have to invert the truth value here and then put it
8258 back later if do_store_flag fails. We cannot simply copy
8259 TREE_OPERAND (exp, 0) to another variable and modify that
8260 because invert_truthvalue can modify the tree pointed to
8261 by its argument. */
8262 if (singleton == TREE_OPERAND (exp, 1))
8263 TREE_OPERAND (exp, 0)
8264 = invert_truthvalue (TREE_OPERAND (exp, 0));
8265
8266 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 8267 (safe_from_p (temp, singleton, 1)
906c4e36 8268 ? temp : NULL_RTX),
bbf6f052
RK
8269 mode, BRANCH_COST <= 1);
8270
ac01eace
RK
8271 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8272 result = expand_shift (LSHIFT_EXPR, mode, result,
8273 build_int_2 (tree_log2
8274 (TREE_OPERAND
8275 (binary_op, 1)),
8276 0),
e5e809f4 8277 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8278 ? temp : NULL_RTX), 0);
8279
bbf6f052
RK
8280 if (result)
8281 {
906c4e36 8282 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8283 return expand_binop (mode, boptab, op1, result, temp,
8284 unsignedp, OPTAB_LIB_WIDEN);
8285 }
8286 else if (singleton == TREE_OPERAND (exp, 1))
8287 TREE_OPERAND (exp, 0)
8288 = invert_truthvalue (TREE_OPERAND (exp, 0));
8289 }
3a94c984 8290
dabf8373 8291 do_pending_stack_adjust ();
bbf6f052
RK
8292 NO_DEFER_POP;
8293 op0 = gen_label_rtx ();
8294
8295 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8296 {
8297 if (temp != 0)
8298 {
8299 /* If the target conflicts with the other operand of the
8300 binary op, we can't use it. Also, we can't use the target
8301 if it is a hard register, because evaluating the condition
8302 might clobber it. */
8303 if ((binary_op
e5e809f4 8304 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8305 || (GET_CODE (temp) == REG
8306 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8307 temp = gen_reg_rtx (mode);
8308 store_expr (singleton, temp, 0);
8309 }
8310 else
906c4e36 8311 expand_expr (singleton,
2937cf87 8312 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8313 if (singleton == TREE_OPERAND (exp, 1))
8314 jumpif (TREE_OPERAND (exp, 0), op0);
8315 else
8316 jumpifnot (TREE_OPERAND (exp, 0), op0);
8317
956d6950 8318 start_cleanup_deferral ();
bbf6f052
RK
8319 if (binary_op && temp == 0)
8320 /* Just touch the other operand. */
8321 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8322 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8323 else if (binary_op)
8324 store_expr (build (TREE_CODE (binary_op), type,
8325 make_tree (type, temp),
8326 TREE_OPERAND (binary_op, 1)),
8327 temp, 0);
8328 else
8329 store_expr (build1 (TREE_CODE (unary_op), type,
8330 make_tree (type, temp)),
8331 temp, 0);
8332 op1 = op0;
bbf6f052 8333 }
bbf6f052
RK
8334 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8335 comparison operator. If we have one of these cases, set the
8336 output to A, branch on A (cse will merge these two references),
8337 then set the output to FOO. */
8338 else if (temp
8339 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8340 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8342 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8343 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8344 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8345 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8346 {
3a94c984
KH
8347 if (GET_CODE (temp) == REG
8348 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8349 temp = gen_reg_rtx (mode);
8350 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8351 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8352
956d6950 8353 start_cleanup_deferral ();
bbf6f052
RK
8354 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8355 op1 = op0;
8356 }
8357 else if (temp
8358 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8359 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8360 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8361 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8362 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8363 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8364 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8365 {
3a94c984
KH
8366 if (GET_CODE (temp) == REG
8367 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8368 temp = gen_reg_rtx (mode);
8369 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8370 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8371
956d6950 8372 start_cleanup_deferral ();
bbf6f052
RK
8373 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8374 op1 = op0;
8375 }
8376 else
8377 {
8378 op1 = gen_label_rtx ();
8379 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8380
956d6950 8381 start_cleanup_deferral ();
3a94c984 8382
2ac84cfe 8383 /* One branch of the cond can be void, if it never returns. For
3a94c984 8384 example A ? throw : E */
2ac84cfe 8385 if (temp != 0
3a94c984 8386 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
8387 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8388 else
906c4e36
RK
8389 expand_expr (TREE_OPERAND (exp, 1),
8390 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8391 end_cleanup_deferral ();
bbf6f052
RK
8392 emit_queue ();
8393 emit_jump_insn (gen_jump (op1));
8394 emit_barrier ();
8395 emit_label (op0);
956d6950 8396 start_cleanup_deferral ();
2ac84cfe 8397 if (temp != 0
3a94c984 8398 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
8399 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8400 else
906c4e36
RK
8401 expand_expr (TREE_OPERAND (exp, 2),
8402 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8403 }
8404
956d6950 8405 end_cleanup_deferral ();
bbf6f052
RK
8406
8407 emit_queue ();
8408 emit_label (op1);
8409 OK_DEFER_POP;
5dab5552 8410
bbf6f052
RK
8411 return temp;
8412 }
8413
8414 case TARGET_EXPR:
8415 {
8416 /* Something needs to be initialized, but we didn't know
8417 where that thing was when building the tree. For example,
8418 it could be the return value of a function, or a parameter
8419 to a function which lays down in the stack, or a temporary
8420 variable which must be passed by reference.
8421
8422 We guarantee that the expression will either be constructed
8423 or copied into our original target. */
8424
8425 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8426 tree cleanups = NULL_TREE;
5c062816 8427 tree exp1;
bbf6f052
RK
8428
8429 if (TREE_CODE (slot) != VAR_DECL)
8430 abort ();
8431
9c51f375
RK
8432 if (! ignore)
8433 target = original_target;
8434
6fbfac92
JM
8435 /* Set this here so that if we get a target that refers to a
8436 register variable that's already been used, put_reg_into_stack
3a94c984 8437 knows that it should fix up those uses. */
6fbfac92
JM
8438 TREE_USED (slot) = 1;
8439
bbf6f052
RK
8440 if (target == 0)
8441 {
19e7881c 8442 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8443 {
8444 target = DECL_RTL (slot);
5c062816 8445 /* If we have already expanded the slot, so don't do
ac993f4f 8446 it again. (mrs) */
5c062816
MS
8447 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8448 return target;
ac993f4f 8449 }
bbf6f052
RK
8450 else
8451 {
e9a25f70 8452 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8453 /* All temp slots at this level must not conflict. */
8454 preserve_temp_slots (target);
19e7881c 8455 SET_DECL_RTL (slot, target);
e9a25f70 8456 if (TREE_ADDRESSABLE (slot))
4361b41d 8457 put_var_into_stack (slot);
bbf6f052 8458
e287fd6e
RK
8459 /* Since SLOT is not known to the called function
8460 to belong to its stack frame, we must build an explicit
8461 cleanup. This case occurs when we must build up a reference
8462 to pass the reference as an argument. In this case,
8463 it is very likely that such a reference need not be
8464 built here. */
8465
8466 if (TREE_OPERAND (exp, 2) == 0)
8467 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8468 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8469 }
bbf6f052
RK
8470 }
8471 else
8472 {
8473 /* This case does occur, when expanding a parameter which
8474 needs to be constructed on the stack. The target
8475 is the actual stack address that we want to initialize.
8476 The function we call will perform the cleanup in this case. */
8477
8c042b47
RS
8478 /* If we have already assigned it space, use that space,
8479 not target that we were passed in, as our target
8480 parameter is only a hint. */
19e7881c 8481 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8482 {
8483 target = DECL_RTL (slot);
8484 /* If we have already expanded the slot, so don't do
8c042b47 8485 it again. (mrs) */
3a94c984
KH
8486 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8487 return target;
8c042b47 8488 }
21002281
JW
8489 else
8490 {
19e7881c 8491 SET_DECL_RTL (slot, target);
21002281
JW
8492 /* If we must have an addressable slot, then make sure that
8493 the RTL that we just stored in slot is OK. */
8494 if (TREE_ADDRESSABLE (slot))
4361b41d 8495 put_var_into_stack (slot);
21002281 8496 }
bbf6f052
RK
8497 }
8498
4847c938 8499 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8500 /* Mark it as expanded. */
8501 TREE_OPERAND (exp, 1) = NULL_TREE;
8502
41531e5b 8503 store_expr (exp1, target, 0);
61d6b1cc 8504
e976b8b2 8505 expand_decl_cleanup (NULL_TREE, cleanups);
3a94c984 8506
41531e5b 8507 return target;
bbf6f052
RK
8508 }
8509
8510 case INIT_EXPR:
8511 {
8512 tree lhs = TREE_OPERAND (exp, 0);
8513 tree rhs = TREE_OPERAND (exp, 1);
8514 tree noncopied_parts = 0;
8515 tree lhs_type = TREE_TYPE (lhs);
8516
8517 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8518 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
b4e3fabb
RK
8519 noncopied_parts
8520 = init_noncopied_parts (stabilize_reference (lhs),
8521 TYPE_NONCOPIED_PARTS (lhs_type));
8522
bbf6f052
RK
8523 while (noncopied_parts != 0)
8524 {
8525 expand_assignment (TREE_VALUE (noncopied_parts),
8526 TREE_PURPOSE (noncopied_parts), 0, 0);
8527 noncopied_parts = TREE_CHAIN (noncopied_parts);
8528 }
8529 return temp;
8530 }
8531
8532 case MODIFY_EXPR:
8533 {
8534 /* If lhs is complex, expand calls in rhs before computing it.
8535 That's so we don't compute a pointer and save it over a call.
8536 If lhs is simple, compute it first so we can give it as a
8537 target if the rhs is just a call. This avoids an extra temp and copy
8538 and that prevents a partial-subsumption which makes bad code.
8539 Actually we could treat component_ref's of vars like vars. */
8540
8541 tree lhs = TREE_OPERAND (exp, 0);
8542 tree rhs = TREE_OPERAND (exp, 1);
8543 tree noncopied_parts = 0;
8544 tree lhs_type = TREE_TYPE (lhs);
8545
8546 temp = 0;
8547
bbf6f052
RK
8548 /* Check for |= or &= of a bitfield of size one into another bitfield
8549 of size 1. In this case, (unless we need the result of the
8550 assignment) we can do this more efficiently with a
8551 test followed by an assignment, if necessary.
8552
8553 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8554 things change so we do, this code should be enhanced to
8555 support it. */
8556 if (ignore
8557 && TREE_CODE (lhs) == COMPONENT_REF
8558 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8559 || TREE_CODE (rhs) == BIT_AND_EXPR)
8560 && TREE_OPERAND (rhs, 0) == lhs
8561 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8562 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8563 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8564 {
8565 rtx label = gen_label_rtx ();
8566
8567 do_jump (TREE_OPERAND (rhs, 1),
8568 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8569 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8570 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8571 (TREE_CODE (rhs) == BIT_IOR_EXPR
8572 ? integer_one_node
8573 : integer_zero_node)),
8574 0, 0);
e7c33f54 8575 do_pending_stack_adjust ();
bbf6f052
RK
8576 emit_label (label);
8577 return const0_rtx;
8578 }
8579
8580 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8581 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
b4e3fabb
RK
8582 noncopied_parts
8583 = save_noncopied_parts (stabilize_reference (lhs),
8584 TYPE_NONCOPIED_PARTS (lhs_type));
bbf6f052
RK
8585
8586 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8587 while (noncopied_parts != 0)
8588 {
8589 expand_assignment (TREE_PURPOSE (noncopied_parts),
8590 TREE_VALUE (noncopied_parts), 0, 0);
8591 noncopied_parts = TREE_CHAIN (noncopied_parts);
8592 }
8593 return temp;
8594 }
8595
6e7f84a7
APB
8596 case RETURN_EXPR:
8597 if (!TREE_OPERAND (exp, 0))
8598 expand_null_return ();
8599 else
8600 expand_return (TREE_OPERAND (exp, 0));
8601 return const0_rtx;
8602
bbf6f052
RK
8603 case PREINCREMENT_EXPR:
8604 case PREDECREMENT_EXPR:
7b8b9722 8605 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8606
8607 case POSTINCREMENT_EXPR:
8608 case POSTDECREMENT_EXPR:
8609 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8610 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8611
8612 case ADDR_EXPR:
987c71d9 8613 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8614 be a MEM corresponding to a stack slot. */
987c71d9
RK
8615 temp = 0;
8616
bbf6f052
RK
8617 /* Are we taking the address of a nested function? */
8618 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8619 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8620 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8621 && ! TREE_STATIC (exp))
bbf6f052
RK
8622 {
8623 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8624 op0 = force_operand (op0, target);
8625 }
682ba3a6
RK
8626 /* If we are taking the address of something erroneous, just
8627 return a zero. */
8628 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8629 return const0_rtx;
bbf6f052
RK
8630 else
8631 {
e287fd6e
RK
8632 /* We make sure to pass const0_rtx down if we came in with
8633 ignore set, to avoid doing the cleanups twice for something. */
8634 op0 = expand_expr (TREE_OPERAND (exp, 0),
8635 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8636 (modifier == EXPAND_INITIALIZER
8637 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8638
119af78a
RK
8639 /* If we are going to ignore the result, OP0 will have been set
8640 to const0_rtx, so just return it. Don't get confused and
8641 think we are taking the address of the constant. */
8642 if (ignore)
8643 return op0;
8644
73b7f58c
BS
8645 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8646 clever and returns a REG when given a MEM. */
8647 op0 = protect_from_queue (op0, 1);
3539e816 8648
c5c76735
JL
8649 /* We would like the object in memory. If it is a constant, we can
8650 have it be statically allocated into memory. For a non-constant,
8651 we need to allocate some memory and store the value into it. */
896102d0
RK
8652
8653 if (CONSTANT_P (op0))
8654 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8655 op0);
987c71d9 8656 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8657 {
8658 mark_temp_addr_taken (op0);
8659 temp = XEXP (op0, 0);
8660 }
896102d0 8661
682ba3a6 8662 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
8663 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8664 || GET_CODE (op0) == PARALLEL)
896102d0
RK
8665 {
8666 /* If this object is in a register, it must be not
0f41302f 8667 be BLKmode. */
896102d0 8668 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
1da68f56
RK
8669 tree nt = build_qualified_type (inner_type,
8670 (TYPE_QUALS (inner_type)
8671 | TYPE_QUAL_CONST));
8672 rtx memloc = assign_temp (nt, 1, 1, 1);
896102d0 8673
7a0b7b9a 8674 mark_temp_addr_taken (memloc);
df6018fd
JJ
8675 if (GET_CODE (op0) == PARALLEL)
8676 /* Handle calls that pass values in multiple non-contiguous
8677 locations. The Irix 6 ABI has examples of this. */
8678 emit_group_store (memloc, op0,
8679 int_size_in_bytes (inner_type),
8680 TYPE_ALIGN (inner_type));
8681 else
8682 emit_move_insn (memloc, op0);
896102d0
RK
8683 op0 = memloc;
8684 }
8685
bbf6f052
RK
8686 if (GET_CODE (op0) != MEM)
8687 abort ();
3a94c984 8688
bbf6f052 8689 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8690 {
8691 temp = XEXP (op0, 0);
8692#ifdef POINTERS_EXTEND_UNSIGNED
8693 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8694 && mode == ptr_mode)
9fcfcce7 8695 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8696#endif
8697 return temp;
8698 }
987c71d9 8699
bbf6f052
RK
8700 op0 = force_operand (XEXP (op0, 0), target);
8701 }
987c71d9 8702
bbf6f052 8703 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8704 op0 = force_reg (Pmode, op0);
8705
dc6d66b3
RK
8706 if (GET_CODE (op0) == REG
8707 && ! REG_USERVAR_P (op0))
bdb429a5 8708 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9
RK
8709
8710 /* If we might have had a temp slot, add an equivalent address
8711 for it. */
8712 if (temp != 0)
8713 update_temp_slot_address (temp, op0);
8714
88f63c77
RK
8715#ifdef POINTERS_EXTEND_UNSIGNED
8716 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8717 && mode == ptr_mode)
9fcfcce7 8718 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8719#endif
8720
bbf6f052
RK
8721 return op0;
8722
8723 case ENTRY_VALUE_EXPR:
8724 abort ();
8725
7308a047
RS
8726 /* COMPLEX type for Extended Pascal & Fortran */
8727 case COMPLEX_EXPR:
8728 {
8729 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8730 rtx insns;
7308a047
RS
8731
8732 /* Get the rtx code of the operands. */
8733 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8734 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8735
8736 if (! target)
8737 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8738
6551fa4d 8739 start_sequence ();
7308a047
RS
8740
8741 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8742 emit_move_insn (gen_realpart (mode, target), op0);
8743 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8744
6551fa4d
JW
8745 insns = get_insns ();
8746 end_sequence ();
8747
7308a047 8748 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8749 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8750 each with a separate pseudo as destination.
8751 It's not correct for flow to treat them as a unit. */
6d6e61ce 8752 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8753 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8754 else
8755 emit_insns (insns);
7308a047
RS
8756
8757 return target;
8758 }
8759
8760 case REALPART_EXPR:
2d7050fd
RS
8761 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8762 return gen_realpart (mode, op0);
3a94c984 8763
7308a047 8764 case IMAGPART_EXPR:
2d7050fd
RS
8765 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766 return gen_imagpart (mode, op0);
7308a047
RS
8767
8768 case CONJ_EXPR:
8769 {
62acb978 8770 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8771 rtx imag_t;
6551fa4d 8772 rtx insns;
3a94c984
KH
8773
8774 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8775
8776 if (! target)
d6a5ac33 8777 target = gen_reg_rtx (mode);
3a94c984 8778
6551fa4d 8779 start_sequence ();
7308a047
RS
8780
8781 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8782 emit_move_insn (gen_realpart (partmode, target),
8783 gen_realpart (partmode, op0));
7308a047 8784
62acb978 8785 imag_t = gen_imagpart (partmode, target);
91ce572a
CC
8786 temp = expand_unop (partmode,
8787 ! unsignedp && flag_trapv
8788 && (GET_MODE_CLASS(partmode) == MODE_INT)
8789 ? negv_optab : neg_optab,
3a94c984 8790 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8791 if (temp != imag_t)
8792 emit_move_insn (imag_t, temp);
8793
6551fa4d
JW
8794 insns = get_insns ();
8795 end_sequence ();
8796
3a94c984 8797 /* Conjugate should appear as a single unit
d6a5ac33 8798 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8799 each with a separate pseudo as destination.
8800 It's not correct for flow to treat them as a unit. */
6d6e61ce 8801 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8802 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8803 else
8804 emit_insns (insns);
7308a047
RS
8805
8806 return target;
8807 }
8808
e976b8b2
MS
8809 case TRY_CATCH_EXPR:
8810 {
8811 tree handler = TREE_OPERAND (exp, 1);
8812
8813 expand_eh_region_start ();
8814
8815 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8816
52a11cbf 8817 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8818
8819 return op0;
8820 }
8821
b335b813
PB
8822 case TRY_FINALLY_EXPR:
8823 {
8824 tree try_block = TREE_OPERAND (exp, 0);
8825 tree finally_block = TREE_OPERAND (exp, 1);
8826 rtx finally_label = gen_label_rtx ();
8827 rtx done_label = gen_label_rtx ();
8828 rtx return_link = gen_reg_rtx (Pmode);
8829 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8830 (tree) finally_label, (tree) return_link);
8831 TREE_SIDE_EFFECTS (cleanup) = 1;
8832
8833 /* Start a new binding layer that will keep track of all cleanup
8834 actions to be performed. */
8e91754e 8835 expand_start_bindings (2);
b335b813
PB
8836
8837 target_temp_slot_level = temp_slot_level;
8838
8839 expand_decl_cleanup (NULL_TREE, cleanup);
8840 op0 = expand_expr (try_block, target, tmode, modifier);
8841
8842 preserve_temp_slots (op0);
8843 expand_end_bindings (NULL_TREE, 0, 0);
8844 emit_jump (done_label);
8845 emit_label (finally_label);
8846 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8847 emit_indirect_jump (return_link);
8848 emit_label (done_label);
8849 return op0;
8850 }
8851
3a94c984 8852 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8853 {
8854 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8855 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8856 rtx return_address = gen_label_rtx ();
3a94c984
KH
8857 emit_move_insn (return_link,
8858 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8859 emit_jump (subr);
8860 emit_label (return_address);
8861 return const0_rtx;
8862 }
8863
d3707adb
RH
8864 case VA_ARG_EXPR:
8865 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8866
52a11cbf 8867 case EXC_PTR_EXPR:
86c99549 8868 return get_exception_pointer (cfun);
52a11cbf 8869
bbf6f052 8870 default:
90764a87 8871 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8872 }
8873
8874 /* Here to do an ordinary binary operator, generating an instruction
8875 from the optab already placed in `this_optab'. */
8876 binop:
e5e809f4 8877 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8878 subtarget = 0;
8879 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8880 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8881 binop2:
8882 temp = expand_binop (mode, this_optab, op0, op1, target,
8883 unsignedp, OPTAB_LIB_WIDEN);
8884 if (temp == 0)
8885 abort ();
8886 return temp;
8887}
b93a436e 8888\f
14a774a9
RK
8889/* Similar to expand_expr, except that we don't specify a target, target
8890 mode, or modifier and we return the alignment of the inner type. This is
8891 used in cases where it is not necessary to align the result to the
8892 alignment of its type as long as we know the alignment of the result, for
8893 example for comparisons of BLKmode values. */
8894
8895static rtx
8896expand_expr_unaligned (exp, palign)
8897 register tree exp;
729a2125 8898 unsigned int *palign;
14a774a9
RK
8899{
8900 register rtx op0;
8901 tree type = TREE_TYPE (exp);
8902 register enum machine_mode mode = TYPE_MODE (type);
8903
8904 /* Default the alignment we return to that of the type. */
8905 *palign = TYPE_ALIGN (type);
8906
8907 /* The only cases in which we do anything special is if the resulting mode
8908 is BLKmode. */
8909 if (mode != BLKmode)
8910 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8911
8912 switch (TREE_CODE (exp))
8913 {
8914 case CONVERT_EXPR:
8915 case NOP_EXPR:
8916 case NON_LVALUE_EXPR:
8917 /* Conversions between BLKmode values don't change the underlying
8918 alignment or value. */
8919 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8920 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8921 break;
8922
8923 case ARRAY_REF:
8924 /* Much of the code for this case is copied directly from expand_expr.
8925 We need to duplicate it here because we will do something different
8926 in the fall-through case, so we need to handle the same exceptions
8927 it does. */
8928 {
8929 tree array = TREE_OPERAND (exp, 0);
8930 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8931 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 8932 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
14a774a9
RK
8933 HOST_WIDE_INT i;
8934
8935 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8936 abort ();
8937
8938 /* Optimize the special-case of a zero lower bound.
8939
8940 We convert the low_bound to sizetype to avoid some problems
8941 with constant folding. (E.g. suppose the lower bound is 1,
8942 and its mode is QI. Without the conversion, (ARRAY
8943 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 8944 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14a774a9
RK
8945
8946 if (! integer_zerop (low_bound))
fed3cef0 8947 index = size_diffop (index, convert (sizetype, low_bound));
14a774a9
RK
8948
8949 /* If this is a constant index into a constant array,
8950 just get the value from the array. Handle both the cases when
8951 we have an explicit constructor and when our operand is a variable
8952 that was declared const. */
8953
05bccae2 8954 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
235783d1 8955 && host_integerp (index, 0)
3a94c984 8956 && 0 > compare_tree_int (index,
05bccae2
RK
8957 list_length (CONSTRUCTOR_ELTS
8958 (TREE_OPERAND (exp, 0)))))
14a774a9 8959 {
05bccae2
RK
8960 tree elem;
8961
8962 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
235783d1 8963 i = tree_low_cst (index, 0);
05bccae2
RK
8964 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8965 ;
8966
8967 if (elem)
8968 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
14a774a9 8969 }
3a94c984 8970
14a774a9
RK
8971 else if (optimize >= 1
8972 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8973 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8974 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8975 {
8976 if (TREE_CODE (index) == INTEGER_CST)
8977 {
8978 tree init = DECL_INITIAL (array);
8979
14a774a9
RK
8980 if (TREE_CODE (init) == CONSTRUCTOR)
8981 {
05bccae2
RK
8982 tree elem;
8983
8984 for (elem = CONSTRUCTOR_ELTS (init);
8985 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8986 elem = TREE_CHAIN (elem))
8987 ;
14a774a9 8988
14a774a9
RK
8989 if (elem)
8990 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8991 palign);
8992 }
8993 }
8994 }
8995 }
3a94c984 8996 /* Fall through. */
14a774a9
RK
8997
8998 case COMPONENT_REF:
8999 case BIT_FIELD_REF:
b4e3fabb 9000 case ARRAY_RANGE_REF:
14a774a9
RK
9001 /* If the operand is a CONSTRUCTOR, we can just extract the
9002 appropriate field if it is present. Don't do this if we have
9003 already written the data since we want to refer to that copy
9004 and varasm.c assumes that's what we'll do. */
b4e3fabb 9005 if (TREE_CODE (exp) == COMPONENT_REF
14a774a9
RK
9006 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9007 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9008 {
9009 tree elt;
9010
9011 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9012 elt = TREE_CHAIN (elt))
9013 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9014 /* Note that unlike the case in expand_expr, we know this is
9015 BLKmode and hence not an integer. */
9016 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9017 }
9018
9019 {
9020 enum machine_mode mode1;
770ae6cc 9021 HOST_WIDE_INT bitsize, bitpos;
14a774a9
RK
9022 tree offset;
9023 int volatilep = 0;
729a2125 9024 unsigned int alignment;
14a774a9
RK
9025 int unsignedp;
9026 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9027 &mode1, &unsignedp, &volatilep,
9028 &alignment);
9029
9030 /* If we got back the original object, something is wrong. Perhaps
9031 we are evaluating an expression too early. In any event, don't
9032 infinitely recurse. */
9033 if (tem == exp)
9034 abort ();
9035
9036 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9037
9038 /* If this is a constant, put it into a register if it is a
9039 legitimate constant and OFFSET is 0 and memory if it isn't. */
9040 if (CONSTANT_P (op0))
9041 {
9042 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9043
9044 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9045 && offset == 0)
9046 op0 = force_reg (inner_mode, op0);
9047 else
9048 op0 = validize_mem (force_const_mem (inner_mode, op0));
9049 }
9050
9051 if (offset != 0)
9052 {
9053 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9054
9055 /* If this object is in a register, put it into memory.
9056 This case can't occur in C, but can in Ada if we have
9057 unchecked conversion of an expression from a scalar type to
9058 an array or record type. */
9059 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9060 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9061 {
1da68f56
RK
9062 tree nt = build_qualified_type (TREE_TYPE (tem),
9063 (TYPE_QUALS (TREE_TYPE (tem))
9064 | TYPE_QUAL_CONST));
9065 rtx memloc = assign_temp (nt, 1, 1, 1);
14a774a9
RK
9066
9067 mark_temp_addr_taken (memloc);
9068 emit_move_insn (memloc, op0);
9069 op0 = memloc;
9070 }
9071
9072 if (GET_CODE (op0) != MEM)
9073 abort ();
9074
9075 if (GET_MODE (offset_rtx) != ptr_mode)
9076 {
9077#ifdef POINTERS_EXTEND_UNSIGNED
9078 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9079#else
9080 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9081#endif
9082 }
9083
9084 op0 = change_address (op0, VOIDmode,
9085 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9086 force_reg (ptr_mode,
9087 offset_rtx)));
9088 }
9089
9090 /* Don't forget about volatility even if this is a bitfield. */
9091 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9092 {
9093 op0 = copy_rtx (op0);
9094 MEM_VOLATILE_P (op0) = 1;
9095 }
9096
9097 /* Check the access. */
9098 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
3a94c984 9099 {
14a774a9
RK
9100 rtx to;
9101 int size;
9102
9103 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9104 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9105
9106 /* Check the access right of the pointer. */
ea4da9db 9107 in_check_memory_usage = 1;
14a774a9 9108 if (size > BITS_PER_UNIT)
ebb1b59a
BS
9109 emit_library_call (chkr_check_addr_libfunc,
9110 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
14a774a9
RK
9111 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9112 TYPE_MODE (sizetype),
3a94c984 9113 GEN_INT (MEMORY_USE_RO),
14a774a9 9114 TYPE_MODE (integer_type_node));
ea4da9db 9115 in_check_memory_usage = 0;
14a774a9
RK
9116 }
9117
a2b99161
RK
9118 /* In cases where an aligned union has an unaligned object
9119 as a field, we might be extracting a BLKmode value from
9120 an integer-mode (e.g., SImode) object. Handle this case
9121 by doing the extract into an object as wide as the field
9122 (which we know to be the width of a basic mode), then
9123 storing into memory, and changing the mode to BLKmode.
9124 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9125 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9126 if (mode1 == VOIDmode
9127 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 9128 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
19caa751 9129 && (TYPE_ALIGN (type) > alignment
a2b99161
RK
9130 || bitpos % TYPE_ALIGN (type) != 0)))
9131 {
9132 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9133
9134 if (ext_mode == BLKmode)
9135 {
9136 /* In this case, BITPOS must start at a byte boundary. */
9137 if (GET_CODE (op0) != MEM
9138 || bitpos % BITS_PER_UNIT != 0)
9139 abort ();
9140
f4ef873c 9141 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a2b99161
RK
9142 }
9143 else
9144 {
1da68f56
RK
9145 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9146 TYPE_QUAL_CONST);
9147 rtx new = assign_temp (nt, 0, 1, 1);
a2b99161
RK
9148
9149 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9150 unsignedp, NULL_RTX, ext_mode,
9151 ext_mode, alignment,
9152 int_size_in_bytes (TREE_TYPE (tem)));
9153
9154 /* If the result is a record type and BITSIZE is narrower than
9155 the mode of OP0, an integral mode, and this is a big endian
9156 machine, we must put the field into the high-order bits. */
9157 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9158 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9159 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9160 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9161 size_int (GET_MODE_BITSIZE
9162 (GET_MODE (op0))
9163 - bitsize),
9164 op0, 1);
9165
a2b99161
RK
9166 emit_move_insn (new, op0);
9167 op0 = copy_rtx (new);
9168 PUT_MODE (op0, BLKmode);
9169 }
9170 }
9171 else
9172 /* Get a reference to just this component. */
f4ef873c 9173 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
14a774a9 9174
ba4828e0 9175 set_mem_alias_set (op0, get_alias_set (exp));
14a774a9
RK
9176
9177 /* Adjust the alignment in case the bit position is not
9178 a multiple of the alignment of the inner object. */
9179 while (bitpos % alignment != 0)
9180 alignment >>= 1;
9181
9182 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 9183 mark_reg_pointer (XEXP (op0, 0), alignment);
14a774a9
RK
9184
9185 MEM_IN_STRUCT_P (op0) = 1;
9186 MEM_VOLATILE_P (op0) |= volatilep;
9187
9188 *palign = alignment;
9189 return op0;
9190 }
9191
9192 default:
9193 break;
9194
9195 }
9196
9197 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9198}
9199\f
fed3cef0
RK
9200/* Return the tree node if a ARG corresponds to a string constant or zero
9201 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9202 in bytes within the string that ARG is accessing. The type of the
9203 offset will be `sizetype'. */
b93a436e 9204
28f4ec01 9205tree
b93a436e
JL
9206string_constant (arg, ptr_offset)
9207 tree arg;
9208 tree *ptr_offset;
9209{
9210 STRIP_NOPS (arg);
9211
9212 if (TREE_CODE (arg) == ADDR_EXPR
9213 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9214 {
fed3cef0 9215 *ptr_offset = size_zero_node;
b93a436e
JL
9216 return TREE_OPERAND (arg, 0);
9217 }
9218 else if (TREE_CODE (arg) == PLUS_EXPR)
9219 {
9220 tree arg0 = TREE_OPERAND (arg, 0);
9221 tree arg1 = TREE_OPERAND (arg, 1);
9222
9223 STRIP_NOPS (arg0);
9224 STRIP_NOPS (arg1);
9225
9226 if (TREE_CODE (arg0) == ADDR_EXPR
9227 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9228 {
fed3cef0 9229 *ptr_offset = convert (sizetype, arg1);
b93a436e 9230 return TREE_OPERAND (arg0, 0);
bbf6f052 9231 }
b93a436e
JL
9232 else if (TREE_CODE (arg1) == ADDR_EXPR
9233 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9234 {
fed3cef0 9235 *ptr_offset = convert (sizetype, arg0);
b93a436e 9236 return TREE_OPERAND (arg1, 0);
bbf6f052 9237 }
b93a436e 9238 }
ca695ac9 9239
b93a436e
JL
9240 return 0;
9241}
ca695ac9 9242\f
b93a436e
JL
9243/* Expand code for a post- or pre- increment or decrement
9244 and return the RTX for the result.
9245 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9246
b93a436e
JL
9247static rtx
9248expand_increment (exp, post, ignore)
9249 register tree exp;
9250 int post, ignore;
ca695ac9 9251{
b93a436e
JL
9252 register rtx op0, op1;
9253 register rtx temp, value;
9254 register tree incremented = TREE_OPERAND (exp, 0);
9255 optab this_optab = add_optab;
9256 int icode;
9257 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9258 int op0_is_copy = 0;
9259 int single_insn = 0;
9260 /* 1 means we can't store into OP0 directly,
9261 because it is a subreg narrower than a word,
9262 and we don't dare clobber the rest of the word. */
9263 int bad_subreg = 0;
1499e0a8 9264
b93a436e
JL
9265 /* Stabilize any component ref that might need to be
9266 evaluated more than once below. */
9267 if (!post
9268 || TREE_CODE (incremented) == BIT_FIELD_REF
9269 || (TREE_CODE (incremented) == COMPONENT_REF
9270 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9271 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9272 incremented = stabilize_reference (incremented);
9273 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9274 ones into save exprs so that they don't accidentally get evaluated
9275 more than once by the code below. */
9276 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9277 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9278 incremented = save_expr (incremented);
e9a25f70 9279
b93a436e
JL
9280 /* Compute the operands as RTX.
9281 Note whether OP0 is the actual lvalue or a copy of it:
9282 I believe it is a copy iff it is a register or subreg
9283 and insns were generated in computing it. */
e9a25f70 9284
b93a436e
JL
9285 temp = get_last_insn ();
9286 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9287
b93a436e
JL
9288 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9289 in place but instead must do sign- or zero-extension during assignment,
9290 so we copy it into a new register and let the code below use it as
9291 a copy.
e9a25f70 9292
b93a436e
JL
9293 Note that we can safely modify this SUBREG since it is know not to be
9294 shared (it was made by the expand_expr call above). */
9295
9296 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9297 {
9298 if (post)
9299 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9300 else
9301 bad_subreg = 1;
9302 }
9303 else if (GET_CODE (op0) == SUBREG
9304 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9305 {
9306 /* We cannot increment this SUBREG in place. If we are
9307 post-incrementing, get a copy of the old value. Otherwise,
9308 just mark that we cannot increment in place. */
9309 if (post)
9310 op0 = copy_to_reg (op0);
9311 else
9312 bad_subreg = 1;
e9a25f70
JL
9313 }
9314
b93a436e
JL
9315 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9316 && temp != get_last_insn ());
9317 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9318 EXPAND_MEMORY_USE_BAD);
1499e0a8 9319
b93a436e
JL
9320 /* Decide whether incrementing or decrementing. */
9321 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9322 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9323 this_optab = sub_optab;
9324
9325 /* Convert decrement by a constant into a negative increment. */
9326 if (this_optab == sub_optab
9327 && GET_CODE (op1) == CONST_INT)
ca695ac9 9328 {
3a94c984 9329 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9330 this_optab = add_optab;
ca695ac9 9331 }
1499e0a8 9332
91ce572a
CC
9333 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9334 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9335
b93a436e
JL
9336 /* For a preincrement, see if we can do this with a single instruction. */
9337 if (!post)
9338 {
9339 icode = (int) this_optab->handlers[(int) mode].insn_code;
9340 if (icode != (int) CODE_FOR_nothing
9341 /* Make sure that OP0 is valid for operands 0 and 1
9342 of the insn we want to queue. */
a995e389
RH
9343 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9344 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9345 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9346 single_insn = 1;
9347 }
bbf6f052 9348
b93a436e
JL
9349 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9350 then we cannot just increment OP0. We must therefore contrive to
9351 increment the original value. Then, for postincrement, we can return
9352 OP0 since it is a copy of the old value. For preincrement, expand here
9353 unless we can do it with a single insn.
bbf6f052 9354
b93a436e
JL
9355 Likewise if storing directly into OP0 would clobber high bits
9356 we need to preserve (bad_subreg). */
9357 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9358 {
b93a436e
JL
9359 /* This is the easiest way to increment the value wherever it is.
9360 Problems with multiple evaluation of INCREMENTED are prevented
9361 because either (1) it is a component_ref or preincrement,
9362 in which case it was stabilized above, or (2) it is an array_ref
9363 with constant index in an array in a register, which is
9364 safe to reevaluate. */
9365 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9366 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9367 ? MINUS_EXPR : PLUS_EXPR),
9368 TREE_TYPE (exp),
9369 incremented,
9370 TREE_OPERAND (exp, 1));
a358cee0 9371
b93a436e
JL
9372 while (TREE_CODE (incremented) == NOP_EXPR
9373 || TREE_CODE (incremented) == CONVERT_EXPR)
9374 {
9375 newexp = convert (TREE_TYPE (incremented), newexp);
9376 incremented = TREE_OPERAND (incremented, 0);
9377 }
bbf6f052 9378
b93a436e
JL
9379 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9380 return post ? op0 : temp;
9381 }
bbf6f052 9382
b93a436e
JL
9383 if (post)
9384 {
9385 /* We have a true reference to the value in OP0.
9386 If there is an insn to add or subtract in this mode, queue it.
9387 Queueing the increment insn avoids the register shuffling
9388 that often results if we must increment now and first save
9389 the old value for subsequent use. */
bbf6f052 9390
b93a436e
JL
9391#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9392 op0 = stabilize (op0);
9393#endif
41dfd40c 9394
b93a436e
JL
9395 icode = (int) this_optab->handlers[(int) mode].insn_code;
9396 if (icode != (int) CODE_FOR_nothing
9397 /* Make sure that OP0 is valid for operands 0 and 1
9398 of the insn we want to queue. */
a995e389
RH
9399 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9400 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9401 {
a995e389 9402 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9403 op1 = force_reg (mode, op1);
bbf6f052 9404
b93a436e
JL
9405 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9406 }
9407 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9408 {
9409 rtx addr = (general_operand (XEXP (op0, 0), mode)
9410 ? force_reg (Pmode, XEXP (op0, 0))
9411 : copy_to_reg (XEXP (op0, 0)));
9412 rtx temp, result;
ca695ac9 9413
792760b9 9414 op0 = replace_equiv_address (op0, addr);
b93a436e 9415 temp = force_reg (GET_MODE (op0), op0);
a995e389 9416 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9417 op1 = force_reg (mode, op1);
ca695ac9 9418
b93a436e
JL
9419 /* The increment queue is LIFO, thus we have to `queue'
9420 the instructions in reverse order. */
9421 enqueue_insn (op0, gen_move_insn (op0, temp));
9422 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9423 return result;
bbf6f052
RK
9424 }
9425 }
ca695ac9 9426
b93a436e
JL
9427 /* Preincrement, or we can't increment with one simple insn. */
9428 if (post)
9429 /* Save a copy of the value before inc or dec, to return it later. */
9430 temp = value = copy_to_reg (op0);
9431 else
9432 /* Arrange to return the incremented value. */
9433 /* Copy the rtx because expand_binop will protect from the queue,
9434 and the results of that would be invalid for us to return
9435 if our caller does emit_queue before using our result. */
9436 temp = copy_rtx (value = op0);
bbf6f052 9437
b93a436e
JL
9438 /* Increment however we can. */
9439 op1 = expand_binop (mode, this_optab, value, op1,
3a94c984 9440 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9441 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9442 /* Make sure the value is stored into OP0. */
9443 if (op1 != op0)
9444 emit_move_insn (op0, op1);
5718612f 9445
b93a436e
JL
9446 return temp;
9447}
9448\f
b93a436e
JL
9449/* At the start of a function, record that we have no previously-pushed
9450 arguments waiting to be popped. */
bbf6f052 9451
b93a436e
JL
9452void
9453init_pending_stack_adjust ()
9454{
9455 pending_stack_adjust = 0;
9456}
bbf6f052 9457
b93a436e 9458/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9459 so the adjustment won't get done.
9460
9461 Note, if the current function calls alloca, then it must have a
9462 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9463
b93a436e
JL
9464void
9465clear_pending_stack_adjust ()
9466{
9467#ifdef EXIT_IGNORE_STACK
9468 if (optimize > 0
060fbabf
JL
9469 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9470 && EXIT_IGNORE_STACK
b93a436e
JL
9471 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9472 && ! flag_inline_functions)
1503a7ec
JH
9473 {
9474 stack_pointer_delta -= pending_stack_adjust,
9475 pending_stack_adjust = 0;
9476 }
b93a436e
JL
9477#endif
9478}
bbf6f052 9479
b93a436e
JL
9480/* Pop any previously-pushed arguments that have not been popped yet. */
9481
9482void
9483do_pending_stack_adjust ()
9484{
9485 if (inhibit_defer_pop == 0)
ca695ac9 9486 {
b93a436e
JL
9487 if (pending_stack_adjust != 0)
9488 adjust_stack (GEN_INT (pending_stack_adjust));
9489 pending_stack_adjust = 0;
bbf6f052 9490 }
bbf6f052
RK
9491}
9492\f
b93a436e 9493/* Expand conditional expressions. */
bbf6f052 9494
b93a436e
JL
9495/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9496 LABEL is an rtx of code CODE_LABEL, in this function and all the
9497 functions here. */
bbf6f052 9498
b93a436e
JL
9499void
9500jumpifnot (exp, label)
ca695ac9 9501 tree exp;
b93a436e 9502 rtx label;
bbf6f052 9503{
b93a436e
JL
9504 do_jump (exp, label, NULL_RTX);
9505}
bbf6f052 9506
b93a436e 9507/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9508
b93a436e
JL
9509void
9510jumpif (exp, label)
9511 tree exp;
9512 rtx label;
9513{
9514 do_jump (exp, NULL_RTX, label);
9515}
ca695ac9 9516
b93a436e
JL
9517/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9518 the result is zero, or IF_TRUE_LABEL if the result is one.
9519 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9520 meaning fall through in that case.
ca695ac9 9521
b93a436e
JL
9522 do_jump always does any pending stack adjust except when it does not
9523 actually perform a jump. An example where there is no jump
9524 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9525
b93a436e
JL
9526 This function is responsible for optimizing cases such as
9527 &&, || and comparison operators in EXP. */
5718612f 9528
b93a436e
JL
9529void
9530do_jump (exp, if_false_label, if_true_label)
9531 tree exp;
9532 rtx if_false_label, if_true_label;
9533{
9534 register enum tree_code code = TREE_CODE (exp);
9535 /* Some cases need to create a label to jump to
9536 in order to properly fall through.
9537 These cases set DROP_THROUGH_LABEL nonzero. */
9538 rtx drop_through_label = 0;
9539 rtx temp;
b93a436e
JL
9540 int i;
9541 tree type;
9542 enum machine_mode mode;
ca695ac9 9543
dbecbbe4
JL
9544#ifdef MAX_INTEGER_COMPUTATION_MODE
9545 check_max_integer_computation_mode (exp);
9546#endif
9547
b93a436e 9548 emit_queue ();
ca695ac9 9549
b93a436e 9550 switch (code)
ca695ac9 9551 {
b93a436e 9552 case ERROR_MARK:
ca695ac9 9553 break;
bbf6f052 9554
b93a436e
JL
9555 case INTEGER_CST:
9556 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9557 if (temp)
9558 emit_jump (temp);
9559 break;
bbf6f052 9560
b93a436e
JL
9561#if 0
9562 /* This is not true with #pragma weak */
9563 case ADDR_EXPR:
9564 /* The address of something can never be zero. */
9565 if (if_true_label)
9566 emit_jump (if_true_label);
9567 break;
9568#endif
bbf6f052 9569
b93a436e
JL
9570 case NOP_EXPR:
9571 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9572 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
b4e3fabb
RK
9573 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9574 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
b93a436e
JL
9575 goto normal;
9576 case CONVERT_EXPR:
9577 /* If we are narrowing the operand, we have to do the compare in the
9578 narrower mode. */
9579 if ((TYPE_PRECISION (TREE_TYPE (exp))
9580 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9581 goto normal;
9582 case NON_LVALUE_EXPR:
9583 case REFERENCE_EXPR:
9584 case ABS_EXPR:
9585 case NEGATE_EXPR:
9586 case LROTATE_EXPR:
9587 case RROTATE_EXPR:
9588 /* These cannot change zero->non-zero or vice versa. */
9589 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9590 break;
bbf6f052 9591
14a774a9
RK
9592 case WITH_RECORD_EXPR:
9593 /* Put the object on the placeholder list, recurse through our first
9594 operand, and pop the list. */
9595 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9596 placeholder_list);
9597 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9598 placeholder_list = TREE_CHAIN (placeholder_list);
9599 break;
9600
b93a436e
JL
9601#if 0
9602 /* This is never less insns than evaluating the PLUS_EXPR followed by
9603 a test and can be longer if the test is eliminated. */
9604 case PLUS_EXPR:
9605 /* Reduce to minus. */
9606 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9607 TREE_OPERAND (exp, 0),
9608 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9609 TREE_OPERAND (exp, 1))));
9610 /* Process as MINUS. */
ca695ac9 9611#endif
bbf6f052 9612
b93a436e
JL
9613 case MINUS_EXPR:
9614 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9615 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9616 TREE_OPERAND (exp, 0),
9617 TREE_OPERAND (exp, 1)),
9618 NE, NE, if_false_label, if_true_label);
b93a436e 9619 break;
bbf6f052 9620
b93a436e
JL
9621 case BIT_AND_EXPR:
9622 /* If we are AND'ing with a small constant, do this comparison in the
9623 smallest type that fits. If the machine doesn't have comparisons
9624 that small, it will be converted back to the wider comparison.
9625 This helps if we are testing the sign bit of a narrower object.
9626 combine can't do this for us because it can't know whether a
9627 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9628
b93a436e
JL
9629 if (! SLOW_BYTE_ACCESS
9630 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9631 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9632 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9633 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9634 && (type = type_for_mode (mode, 1)) != 0
9635 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9636 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9637 != CODE_FOR_nothing))
9638 {
9639 do_jump (convert (type, exp), if_false_label, if_true_label);
9640 break;
9641 }
9642 goto normal;
bbf6f052 9643
b93a436e
JL
9644 case TRUTH_NOT_EXPR:
9645 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9646 break;
bbf6f052 9647
b93a436e
JL
9648 case TRUTH_ANDIF_EXPR:
9649 if (if_false_label == 0)
9650 if_false_label = drop_through_label = gen_label_rtx ();
9651 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9652 start_cleanup_deferral ();
9653 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9654 end_cleanup_deferral ();
9655 break;
bbf6f052 9656
b93a436e
JL
9657 case TRUTH_ORIF_EXPR:
9658 if (if_true_label == 0)
9659 if_true_label = drop_through_label = gen_label_rtx ();
9660 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9661 start_cleanup_deferral ();
9662 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9663 end_cleanup_deferral ();
9664 break;
bbf6f052 9665
b93a436e
JL
9666 case COMPOUND_EXPR:
9667 push_temp_slots ();
9668 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9669 preserve_temp_slots (NULL_RTX);
9670 free_temp_slots ();
9671 pop_temp_slots ();
9672 emit_queue ();
9673 do_pending_stack_adjust ();
9674 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9675 break;
bbf6f052 9676
b93a436e
JL
9677 case COMPONENT_REF:
9678 case BIT_FIELD_REF:
9679 case ARRAY_REF:
b4e3fabb 9680 case ARRAY_RANGE_REF:
b93a436e 9681 {
770ae6cc
RK
9682 HOST_WIDE_INT bitsize, bitpos;
9683 int unsignedp;
b93a436e
JL
9684 enum machine_mode mode;
9685 tree type;
9686 tree offset;
9687 int volatilep = 0;
729a2125 9688 unsigned int alignment;
bbf6f052 9689
b93a436e
JL
9690 /* Get description of this reference. We don't actually care
9691 about the underlying object here. */
19caa751
RK
9692 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9693 &unsignedp, &volatilep, &alignment);
bbf6f052 9694
b93a436e
JL
9695 type = type_for_size (bitsize, unsignedp);
9696 if (! SLOW_BYTE_ACCESS
9697 && type != 0 && bitsize >= 0
9698 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9699 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9700 != CODE_FOR_nothing))
9701 {
9702 do_jump (convert (type, exp), if_false_label, if_true_label);
9703 break;
9704 }
9705 goto normal;
9706 }
bbf6f052 9707
b93a436e
JL
9708 case COND_EXPR:
9709 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9710 if (integer_onep (TREE_OPERAND (exp, 1))
9711 && integer_zerop (TREE_OPERAND (exp, 2)))
9712 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9713
b93a436e
JL
9714 else if (integer_zerop (TREE_OPERAND (exp, 1))
9715 && integer_onep (TREE_OPERAND (exp, 2)))
9716 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9717
b93a436e
JL
9718 else
9719 {
9720 register rtx label1 = gen_label_rtx ();
9721 drop_through_label = gen_label_rtx ();
bbf6f052 9722
b93a436e 9723 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9724
b93a436e
JL
9725 start_cleanup_deferral ();
9726 /* Now the THEN-expression. */
9727 do_jump (TREE_OPERAND (exp, 1),
9728 if_false_label ? if_false_label : drop_through_label,
9729 if_true_label ? if_true_label : drop_through_label);
9730 /* In case the do_jump just above never jumps. */
9731 do_pending_stack_adjust ();
9732 emit_label (label1);
bbf6f052 9733
b93a436e
JL
9734 /* Now the ELSE-expression. */
9735 do_jump (TREE_OPERAND (exp, 2),
9736 if_false_label ? if_false_label : drop_through_label,
9737 if_true_label ? if_true_label : drop_through_label);
9738 end_cleanup_deferral ();
9739 }
9740 break;
bbf6f052 9741
b93a436e
JL
9742 case EQ_EXPR:
9743 {
9744 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9745
9ec36da5
JL
9746 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9747 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9748 {
9749 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9750 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9751 do_jump
9752 (fold
9753 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9754 fold (build (EQ_EXPR, TREE_TYPE (exp),
9755 fold (build1 (REALPART_EXPR,
9756 TREE_TYPE (inner_type),
9757 exp0)),
9758 fold (build1 (REALPART_EXPR,
9759 TREE_TYPE (inner_type),
9760 exp1)))),
9761 fold (build (EQ_EXPR, TREE_TYPE (exp),
9762 fold (build1 (IMAGPART_EXPR,
9763 TREE_TYPE (inner_type),
9764 exp0)),
9765 fold (build1 (IMAGPART_EXPR,
9766 TREE_TYPE (inner_type),
9767 exp1)))))),
9768 if_false_label, if_true_label);
9769 }
9ec36da5
JL
9770
9771 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9772 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9773
b93a436e 9774 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9775 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9776 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9777 else
b30f05db 9778 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9779 break;
9780 }
bbf6f052 9781
b93a436e
JL
9782 case NE_EXPR:
9783 {
9784 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9785
9ec36da5
JL
9786 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9787 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9788 {
9789 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9790 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9791 do_jump
9792 (fold
9793 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9794 fold (build (NE_EXPR, TREE_TYPE (exp),
9795 fold (build1 (REALPART_EXPR,
9796 TREE_TYPE (inner_type),
9797 exp0)),
9798 fold (build1 (REALPART_EXPR,
9799 TREE_TYPE (inner_type),
9800 exp1)))),
9801 fold (build (NE_EXPR, TREE_TYPE (exp),
9802 fold (build1 (IMAGPART_EXPR,
9803 TREE_TYPE (inner_type),
9804 exp0)),
9805 fold (build1 (IMAGPART_EXPR,
9806 TREE_TYPE (inner_type),
9807 exp1)))))),
9808 if_false_label, if_true_label);
9809 }
9ec36da5
JL
9810
9811 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9812 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9813
b93a436e 9814 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9815 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9816 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9817 else
b30f05db 9818 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9819 break;
9820 }
bbf6f052 9821
b93a436e 9822 case LT_EXPR:
1c0290ea
BS
9823 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9824 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9825 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9826 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9827 else
b30f05db 9828 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9829 break;
bbf6f052 9830
b93a436e 9831 case LE_EXPR:
1c0290ea
BS
9832 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9833 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9834 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9835 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9836 else
b30f05db 9837 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9838 break;
bbf6f052 9839
b93a436e 9840 case GT_EXPR:
1c0290ea
BS
9841 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9842 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9843 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9844 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9845 else
b30f05db 9846 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9847 break;
bbf6f052 9848
b93a436e 9849 case GE_EXPR:
1c0290ea
BS
9850 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9851 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9852 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9853 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9854 else
b30f05db 9855 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9856 break;
bbf6f052 9857
1eb8759b
RH
9858 case UNORDERED_EXPR:
9859 case ORDERED_EXPR:
9860 {
9861 enum rtx_code cmp, rcmp;
9862 int do_rev;
9863
9864 if (code == UNORDERED_EXPR)
9865 cmp = UNORDERED, rcmp = ORDERED;
9866 else
9867 cmp = ORDERED, rcmp = UNORDERED;
3a94c984 9868 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
9869
9870 do_rev = 0;
9871 if (! can_compare_p (cmp, mode, ccp_jump)
9872 && (can_compare_p (rcmp, mode, ccp_jump)
9873 /* If the target doesn't provide either UNORDERED or ORDERED
9874 comparisons, canonicalize on UNORDERED for the library. */
9875 || rcmp == UNORDERED))
9876 do_rev = 1;
9877
9878 if (! do_rev)
9879 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9880 else
9881 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9882 }
9883 break;
9884
9885 {
9886 enum rtx_code rcode1;
9887 enum tree_code tcode2;
9888
9889 case UNLT_EXPR:
9890 rcode1 = UNLT;
9891 tcode2 = LT_EXPR;
9892 goto unordered_bcc;
9893 case UNLE_EXPR:
9894 rcode1 = UNLE;
9895 tcode2 = LE_EXPR;
9896 goto unordered_bcc;
9897 case UNGT_EXPR:
9898 rcode1 = UNGT;
9899 tcode2 = GT_EXPR;
9900 goto unordered_bcc;
9901 case UNGE_EXPR:
9902 rcode1 = UNGE;
9903 tcode2 = GE_EXPR;
9904 goto unordered_bcc;
9905 case UNEQ_EXPR:
9906 rcode1 = UNEQ;
9907 tcode2 = EQ_EXPR;
9908 goto unordered_bcc;
7913f3d0 9909
1eb8759b
RH
9910 unordered_bcc:
9911 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9912 if (can_compare_p (rcode1, mode, ccp_jump))
9913 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9914 if_true_label);
9915 else
9916 {
9917 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9918 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9919 tree cmp0, cmp1;
9920
3a94c984 9921 /* If the target doesn't support combined unordered
1eb8759b
RH
9922 compares, decompose into UNORDERED + comparison. */
9923 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9924 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9925 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9926 do_jump (exp, if_false_label, if_true_label);
9927 }
9928 }
9929 break;
9930
5f2d6cfa
MM
9931 /* Special case:
9932 __builtin_expect (<test>, 0) and
9933 __builtin_expect (<test>, 1)
9934
9935 We need to do this here, so that <test> is not converted to a SCC
9936 operation on machines that use condition code registers and COMPARE
9937 like the PowerPC, and then the jump is done based on whether the SCC
9938 operation produced a 1 or 0. */
9939 case CALL_EXPR:
9940 /* Check for a built-in function. */
9941 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9942 {
9943 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9944 tree arglist = TREE_OPERAND (exp, 1);
9945
9946 if (TREE_CODE (fndecl) == FUNCTION_DECL
9947 && DECL_BUILT_IN (fndecl)
9948 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9949 && arglist != NULL_TREE
9950 && TREE_CHAIN (arglist) != NULL_TREE)
9951 {
9952 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9953 if_true_label);
9954
9955 if (seq != NULL_RTX)
9956 {
9957 emit_insn (seq);
9958 return;
9959 }
9960 }
9961 }
9962 /* fall through and generate the normal code. */
9963
b93a436e
JL
9964 default:
9965 normal:
9966 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9967#if 0
9968 /* This is not needed any more and causes poor code since it causes
9969 comparisons and tests from non-SI objects to have different code
9970 sequences. */
9971 /* Copy to register to avoid generating bad insns by cse
9972 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9973 if (!cse_not_expected && GET_CODE (temp) == MEM)
9974 temp = copy_to_reg (temp);
ca695ac9 9975#endif
b93a436e 9976 do_pending_stack_adjust ();
b30f05db
BS
9977 /* Do any postincrements in the expression that was tested. */
9978 emit_queue ();
9979
998a298e
GK
9980 if (GET_CODE (temp) == CONST_INT
9981 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9982 || GET_CODE (temp) == LABEL_REF)
b30f05db
BS
9983 {
9984 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9985 if (target)
9986 emit_jump (target);
9987 }
b93a436e 9988 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9989 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9990 /* Note swapping the labels gives us not-equal. */
9991 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9992 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9993 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9994 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9995 GET_MODE (temp), NULL_RTX, 0,
9996 if_false_label, if_true_label);
b93a436e
JL
9997 else
9998 abort ();
9999 }
bbf6f052 10000
b93a436e
JL
10001 if (drop_through_label)
10002 {
10003 /* If do_jump produces code that might be jumped around,
10004 do any stack adjusts from that code, before the place
10005 where control merges in. */
10006 do_pending_stack_adjust ();
10007 emit_label (drop_through_label);
10008 }
bbf6f052 10009}
b93a436e
JL
10010\f
10011/* Given a comparison expression EXP for values too wide to be compared
10012 with one insn, test the comparison and jump to the appropriate label.
10013 The code of EXP is ignored; we always test GT if SWAP is 0,
10014 and LT if SWAP is 1. */
bbf6f052 10015
b93a436e
JL
10016static void
10017do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10018 tree exp;
10019 int swap;
10020 rtx if_false_label, if_true_label;
10021{
10022 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10023 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10024 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 10025 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 10026
b30f05db 10027 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
10028}
10029
b93a436e
JL
10030/* Compare OP0 with OP1, word at a time, in mode MODE.
10031 UNSIGNEDP says to do unsigned comparison.
10032 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10033
b93a436e
JL
10034void
10035do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10036 enum machine_mode mode;
10037 int unsignedp;
10038 rtx op0, op1;
10039 rtx if_false_label, if_true_label;
f81497d9 10040{
b93a436e
JL
10041 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10042 rtx drop_through_label = 0;
10043 int i;
f81497d9 10044
b93a436e
JL
10045 if (! if_true_label || ! if_false_label)
10046 drop_through_label = gen_label_rtx ();
10047 if (! if_true_label)
10048 if_true_label = drop_through_label;
10049 if (! if_false_label)
10050 if_false_label = drop_through_label;
f81497d9 10051
b93a436e
JL
10052 /* Compare a word at a time, high order first. */
10053 for (i = 0; i < nwords; i++)
10054 {
b93a436e 10055 rtx op0_word, op1_word;
bbf6f052 10056
b93a436e
JL
10057 if (WORDS_BIG_ENDIAN)
10058 {
10059 op0_word = operand_subword_force (op0, i, mode);
10060 op1_word = operand_subword_force (op1, i, mode);
10061 }
10062 else
10063 {
10064 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10065 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10066 }
bbf6f052 10067
b93a436e 10068 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
10069 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10070 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10071 NULL_RTX, if_true_label);
bbf6f052 10072
b93a436e 10073 /* Consider lower words only if these are equal. */
b30f05db
BS
10074 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10075 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 10076 }
bbf6f052 10077
b93a436e
JL
10078 if (if_false_label)
10079 emit_jump (if_false_label);
10080 if (drop_through_label)
10081 emit_label (drop_through_label);
bbf6f052
RK
10082}
10083
b93a436e
JL
10084/* Given an EQ_EXPR expression EXP for values too wide to be compared
10085 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10086
b93a436e
JL
10087static void
10088do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10089 tree exp;
10090 rtx if_false_label, if_true_label;
bbf6f052 10091{
b93a436e
JL
10092 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10093 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10094 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10095 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10096 int i;
10097 rtx drop_through_label = 0;
bbf6f052 10098
b93a436e
JL
10099 if (! if_false_label)
10100 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10101
b93a436e 10102 for (i = 0; i < nwords; i++)
b30f05db
BS
10103 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10104 operand_subword_force (op1, i, mode),
10105 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10106 word_mode, NULL_RTX, 0, if_false_label,
10107 NULL_RTX);
bbf6f052 10108
b93a436e
JL
10109 if (if_true_label)
10110 emit_jump (if_true_label);
10111 if (drop_through_label)
10112 emit_label (drop_through_label);
bbf6f052 10113}
b93a436e
JL
10114\f
10115/* Jump according to whether OP0 is 0.
10116 We assume that OP0 has an integer mode that is too wide
10117 for the available compare insns. */
bbf6f052 10118
f5963e61 10119void
b93a436e
JL
10120do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10121 rtx op0;
10122 rtx if_false_label, if_true_label;
ca695ac9 10123{
b93a436e
JL
10124 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10125 rtx part;
10126 int i;
10127 rtx drop_through_label = 0;
bbf6f052 10128
b93a436e
JL
10129 /* The fastest way of doing this comparison on almost any machine is to
10130 "or" all the words and compare the result. If all have to be loaded
10131 from memory and this is a very wide item, it's possible this may
10132 be slower, but that's highly unlikely. */
bbf6f052 10133
b93a436e
JL
10134 part = gen_reg_rtx (word_mode);
10135 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10136 for (i = 1; i < nwords && part != 0; i++)
10137 part = expand_binop (word_mode, ior_optab, part,
10138 operand_subword_force (op0, i, GET_MODE (op0)),
10139 part, 1, OPTAB_WIDEN);
bbf6f052 10140
b93a436e
JL
10141 if (part != 0)
10142 {
b30f05db
BS
10143 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10144 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 10145
b93a436e
JL
10146 return;
10147 }
bbf6f052 10148
b93a436e
JL
10149 /* If we couldn't do the "or" simply, do this with a series of compares. */
10150 if (! if_false_label)
10151 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10152
b93a436e 10153 for (i = 0; i < nwords; i++)
b30f05db
BS
10154 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10155 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10156 if_false_label, NULL_RTX);
bbf6f052 10157
b93a436e
JL
10158 if (if_true_label)
10159 emit_jump (if_true_label);
0f41302f 10160
b93a436e
JL
10161 if (drop_through_label)
10162 emit_label (drop_through_label);
bbf6f052 10163}
b93a436e 10164\f
b30f05db 10165/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
10166 (including code to compute the values to be compared)
10167 and set (CC0) according to the result.
b30f05db 10168 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10169
b93a436e 10170 We force a stack adjustment unless there are currently
b30f05db 10171 things pushed on the stack that aren't yet used.
ca695ac9 10172
b30f05db
BS
10173 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10174 compared.
10175
10176 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10177 size of MODE should be used. */
10178
10179rtx
10180compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10181 register rtx op0, op1;
10182 enum rtx_code code;
10183 int unsignedp;
10184 enum machine_mode mode;
10185 rtx size;
729a2125 10186 unsigned int align;
b93a436e 10187{
b30f05db 10188 rtx tem;
76bbe028 10189
b30f05db
BS
10190 /* If one operand is constant, make it the second one. Only do this
10191 if the other operand is not constant as well. */
ca695ac9 10192
8c9864f3 10193 if (swap_commutative_operands_p (op0, op1))
bbf6f052 10194 {
b30f05db
BS
10195 tem = op0;
10196 op0 = op1;
10197 op1 = tem;
10198 code = swap_condition (code);
ca695ac9 10199 }
bbf6f052 10200
b30f05db 10201 if (flag_force_mem)
b93a436e 10202 {
b30f05db
BS
10203 op0 = force_not_mem (op0);
10204 op1 = force_not_mem (op1);
10205 }
bbf6f052 10206
b30f05db
BS
10207 do_pending_stack_adjust ();
10208
10209 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10210 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10211 return tem;
10212
10213#if 0
10214 /* There's no need to do this now that combine.c can eliminate lots of
10215 sign extensions. This can be less efficient in certain cases on other
10216 machines. */
10217
10218 /* If this is a signed equality comparison, we can do it as an
10219 unsigned comparison since zero-extension is cheaper than sign
10220 extension and comparisons with zero are done as unsigned. This is
10221 the case even on machines that can do fast sign extension, since
10222 zero-extension is easier to combine with other operations than
10223 sign-extension is. If we are comparing against a constant, we must
10224 convert it to what it would look like unsigned. */
10225 if ((code == EQ || code == NE) && ! unsignedp
10226 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10227 {
10228 if (GET_CODE (op1) == CONST_INT
10229 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10230 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10231 unsignedp = 1;
b93a436e
JL
10232 }
10233#endif
3a94c984 10234
b30f05db 10235 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 10236
b30f05db 10237 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 10238}
bbf6f052 10239
b30f05db 10240/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 10241 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10242
b93a436e
JL
10243 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10244 compared.
bbf6f052 10245
b93a436e
JL
10246 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10247 size of MODE should be used. */
ca695ac9 10248
b30f05db
BS
10249void
10250do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10251 if_false_label, if_true_label)
b93a436e
JL
10252 register rtx op0, op1;
10253 enum rtx_code code;
10254 int unsignedp;
10255 enum machine_mode mode;
10256 rtx size;
729a2125 10257 unsigned int align;
b30f05db 10258 rtx if_false_label, if_true_label;
bbf6f052 10259{
b93a436e 10260 rtx tem;
b30f05db
BS
10261 int dummy_true_label = 0;
10262
10263 /* Reverse the comparison if that is safe and we want to jump if it is
10264 false. */
10265 if (! if_true_label && ! FLOAT_MODE_P (mode))
10266 {
10267 if_true_label = if_false_label;
10268 if_false_label = 0;
10269 code = reverse_condition (code);
10270 }
bbf6f052 10271
b93a436e
JL
10272 /* If one operand is constant, make it the second one. Only do this
10273 if the other operand is not constant as well. */
e7c33f54 10274
8c9864f3 10275 if (swap_commutative_operands_p (op0, op1))
ca695ac9 10276 {
b93a436e
JL
10277 tem = op0;
10278 op0 = op1;
10279 op1 = tem;
10280 code = swap_condition (code);
10281 }
bbf6f052 10282
b93a436e
JL
10283 if (flag_force_mem)
10284 {
10285 op0 = force_not_mem (op0);
10286 op1 = force_not_mem (op1);
10287 }
bbf6f052 10288
b93a436e 10289 do_pending_stack_adjust ();
ca695ac9 10290
b93a436e
JL
10291 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10292 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
10293 {
10294 if (tem == const_true_rtx)
10295 {
10296 if (if_true_label)
10297 emit_jump (if_true_label);
10298 }
10299 else
10300 {
10301 if (if_false_label)
10302 emit_jump (if_false_label);
10303 }
10304 return;
10305 }
ca695ac9 10306
b93a436e
JL
10307#if 0
10308 /* There's no need to do this now that combine.c can eliminate lots of
10309 sign extensions. This can be less efficient in certain cases on other
10310 machines. */
ca695ac9 10311
b93a436e
JL
10312 /* If this is a signed equality comparison, we can do it as an
10313 unsigned comparison since zero-extension is cheaper than sign
10314 extension and comparisons with zero are done as unsigned. This is
10315 the case even on machines that can do fast sign extension, since
10316 zero-extension is easier to combine with other operations than
10317 sign-extension is. If we are comparing against a constant, we must
10318 convert it to what it would look like unsigned. */
10319 if ((code == EQ || code == NE) && ! unsignedp
10320 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10321 {
10322 if (GET_CODE (op1) == CONST_INT
10323 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10324 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10325 unsignedp = 1;
10326 }
10327#endif
ca695ac9 10328
b30f05db
BS
10329 if (! if_true_label)
10330 {
10331 dummy_true_label = 1;
10332 if_true_label = gen_label_rtx ();
10333 }
10334
10335 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10336 if_true_label);
10337
10338 if (if_false_label)
10339 emit_jump (if_false_label);
10340 if (dummy_true_label)
10341 emit_label (if_true_label);
10342}
10343
10344/* Generate code for a comparison expression EXP (including code to compute
10345 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10346 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10347 generated code will drop through.
10348 SIGNED_CODE should be the rtx operation for this comparison for
10349 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10350
10351 We force a stack adjustment unless there are currently
10352 things pushed on the stack that aren't yet used. */
10353
10354static void
10355do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10356 if_true_label)
10357 register tree exp;
10358 enum rtx_code signed_code, unsigned_code;
10359 rtx if_false_label, if_true_label;
10360{
729a2125 10361 unsigned int align0, align1;
b30f05db
BS
10362 register rtx op0, op1;
10363 register tree type;
10364 register enum machine_mode mode;
10365 int unsignedp;
10366 enum rtx_code code;
10367
10368 /* Don't crash if the comparison was erroneous. */
14a774a9 10369 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
10370 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10371 return;
10372
14a774a9 10373 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
6b16805e
JJ
10374 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10375 return;
10376
b30f05db
BS
10377 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10378 mode = TYPE_MODE (type);
6b16805e
JJ
10379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10380 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10381 || (GET_MODE_BITSIZE (mode)
31a7659b
JDA
10382 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10383 1)))))))
6b16805e
JJ
10384 {
10385 /* op0 might have been replaced by promoted constant, in which
10386 case the type of second argument should be used. */
10387 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10388 mode = TYPE_MODE (type);
10389 }
b30f05db
BS
10390 unsignedp = TREE_UNSIGNED (type);
10391 code = unsignedp ? unsigned_code : signed_code;
10392
10393#ifdef HAVE_canonicalize_funcptr_for_compare
10394 /* If function pointers need to be "canonicalized" before they can
10395 be reliably compared, then canonicalize them. */
10396 if (HAVE_canonicalize_funcptr_for_compare
10397 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10398 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10399 == FUNCTION_TYPE))
10400 {
10401 rtx new_op0 = gen_reg_rtx (mode);
10402
10403 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10404 op0 = new_op0;
10405 }
10406
10407 if (HAVE_canonicalize_funcptr_for_compare
10408 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10409 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10410 == FUNCTION_TYPE))
10411 {
10412 rtx new_op1 = gen_reg_rtx (mode);
10413
10414 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10415 op1 = new_op1;
10416 }
10417#endif
10418
10419 /* Do any postincrements in the expression that was tested. */
10420 emit_queue ();
10421
10422 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10423 ((mode == BLKmode)
10424 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
19caa751 10425 MIN (align0, align1),
b30f05db 10426 if_false_label, if_true_label);
b93a436e
JL
10427}
10428\f
10429/* Generate code to calculate EXP using a store-flag instruction
10430 and return an rtx for the result. EXP is either a comparison
10431 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10432
b93a436e 10433 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10434
b93a436e
JL
10435 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10436 cheap.
ca695ac9 10437
b93a436e
JL
10438 Return zero if there is no suitable set-flag instruction
10439 available on this machine.
ca695ac9 10440
b93a436e
JL
10441 Once expand_expr has been called on the arguments of the comparison,
10442 we are committed to doing the store flag, since it is not safe to
10443 re-evaluate the expression. We emit the store-flag insn by calling
10444 emit_store_flag, but only expand the arguments if we have a reason
10445 to believe that emit_store_flag will be successful. If we think that
10446 it will, but it isn't, we have to simulate the store-flag with a
10447 set/jump/set sequence. */
ca695ac9 10448
b93a436e
JL
10449static rtx
10450do_store_flag (exp, target, mode, only_cheap)
10451 tree exp;
10452 rtx target;
10453 enum machine_mode mode;
10454 int only_cheap;
10455{
10456 enum rtx_code code;
10457 tree arg0, arg1, type;
10458 tree tem;
10459 enum machine_mode operand_mode;
10460 int invert = 0;
10461 int unsignedp;
10462 rtx op0, op1;
10463 enum insn_code icode;
10464 rtx subtarget = target;
381127e8 10465 rtx result, label;
ca695ac9 10466
b93a436e
JL
10467 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10468 result at the end. We can't simply invert the test since it would
10469 have already been inverted if it were valid. This case occurs for
10470 some floating-point comparisons. */
ca695ac9 10471
b93a436e
JL
10472 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10473 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10474
b93a436e
JL
10475 arg0 = TREE_OPERAND (exp, 0);
10476 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
10477
10478 /* Don't crash if the comparison was erroneous. */
10479 if (arg0 == error_mark_node || arg1 == error_mark_node)
10480 return const0_rtx;
10481
b93a436e
JL
10482 type = TREE_TYPE (arg0);
10483 operand_mode = TYPE_MODE (type);
10484 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10485
b93a436e
JL
10486 /* We won't bother with BLKmode store-flag operations because it would mean
10487 passing a lot of information to emit_store_flag. */
10488 if (operand_mode == BLKmode)
10489 return 0;
ca695ac9 10490
b93a436e
JL
10491 /* We won't bother with store-flag operations involving function pointers
10492 when function pointers must be canonicalized before comparisons. */
10493#ifdef HAVE_canonicalize_funcptr_for_compare
10494 if (HAVE_canonicalize_funcptr_for_compare
10495 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10496 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10497 == FUNCTION_TYPE))
10498 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10499 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10500 == FUNCTION_TYPE))))
10501 return 0;
ca695ac9
JB
10502#endif
10503
b93a436e
JL
10504 STRIP_NOPS (arg0);
10505 STRIP_NOPS (arg1);
ca695ac9 10506
b93a436e
JL
10507 /* Get the rtx comparison code to use. We know that EXP is a comparison
10508 operation of some type. Some comparisons against 1 and -1 can be
10509 converted to comparisons with zero. Do so here so that the tests
10510 below will be aware that we have a comparison with zero. These
10511 tests will not catch constants in the first operand, but constants
10512 are rarely passed as the first operand. */
ca695ac9 10513
b93a436e
JL
10514 switch (TREE_CODE (exp))
10515 {
10516 case EQ_EXPR:
10517 code = EQ;
bbf6f052 10518 break;
b93a436e
JL
10519 case NE_EXPR:
10520 code = NE;
bbf6f052 10521 break;
b93a436e
JL
10522 case LT_EXPR:
10523 if (integer_onep (arg1))
10524 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10525 else
10526 code = unsignedp ? LTU : LT;
ca695ac9 10527 break;
b93a436e
JL
10528 case LE_EXPR:
10529 if (! unsignedp && integer_all_onesp (arg1))
10530 arg1 = integer_zero_node, code = LT;
10531 else
10532 code = unsignedp ? LEU : LE;
ca695ac9 10533 break;
b93a436e
JL
10534 case GT_EXPR:
10535 if (! unsignedp && integer_all_onesp (arg1))
10536 arg1 = integer_zero_node, code = GE;
10537 else
10538 code = unsignedp ? GTU : GT;
10539 break;
10540 case GE_EXPR:
10541 if (integer_onep (arg1))
10542 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10543 else
10544 code = unsignedp ? GEU : GE;
ca695ac9 10545 break;
1eb8759b
RH
10546
10547 case UNORDERED_EXPR:
10548 code = UNORDERED;
10549 break;
10550 case ORDERED_EXPR:
10551 code = ORDERED;
10552 break;
10553 case UNLT_EXPR:
10554 code = UNLT;
10555 break;
10556 case UNLE_EXPR:
10557 code = UNLE;
10558 break;
10559 case UNGT_EXPR:
10560 code = UNGT;
10561 break;
10562 case UNGE_EXPR:
10563 code = UNGE;
10564 break;
10565 case UNEQ_EXPR:
10566 code = UNEQ;
10567 break;
1eb8759b 10568
ca695ac9 10569 default:
b93a436e 10570 abort ();
bbf6f052 10571 }
bbf6f052 10572
b93a436e
JL
10573 /* Put a constant second. */
10574 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10575 {
10576 tem = arg0; arg0 = arg1; arg1 = tem;
10577 code = swap_condition (code);
ca695ac9 10578 }
bbf6f052 10579
b93a436e
JL
10580 /* If this is an equality or inequality test of a single bit, we can
10581 do this by shifting the bit being tested to the low-order bit and
10582 masking the result with the constant 1. If the condition was EQ,
10583 we xor it with 1. This does not require an scc insn and is faster
10584 than an scc insn even if we have it. */
d39985fa 10585
b93a436e
JL
10586 if ((code == NE || code == EQ)
10587 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10588 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10589 {
10590 tree inner = TREE_OPERAND (arg0, 0);
10591 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10592 int ops_unsignedp;
bbf6f052 10593
b93a436e
JL
10594 /* If INNER is a right shift of a constant and it plus BITNUM does
10595 not overflow, adjust BITNUM and INNER. */
ca695ac9 10596
b93a436e
JL
10597 if (TREE_CODE (inner) == RSHIFT_EXPR
10598 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10599 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10600 && bitnum < TYPE_PRECISION (type)
10601 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10602 bitnum - TYPE_PRECISION (type)))
ca695ac9 10603 {
b93a436e
JL
10604 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10605 inner = TREE_OPERAND (inner, 0);
ca695ac9 10606 }
ca695ac9 10607
b93a436e
JL
10608 /* If we are going to be able to omit the AND below, we must do our
10609 operations as unsigned. If we must use the AND, we have a choice.
10610 Normally unsigned is faster, but for some machines signed is. */
10611 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10612#ifdef LOAD_EXTEND_OP
10613 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10614#else
10615 : 1
10616#endif
10617 );
bbf6f052 10618
296b4ed9 10619 if (! get_subtarget (subtarget)
a47fed55 10620 || GET_MODE (subtarget) != operand_mode
e5e809f4 10621 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10622 subtarget = 0;
bbf6f052 10623
b93a436e 10624 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10625
b93a436e 10626 if (bitnum != 0)
681cb233 10627 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10628 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10629
b93a436e
JL
10630 if (GET_MODE (op0) != mode)
10631 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10632
b93a436e
JL
10633 if ((code == EQ && ! invert) || (code == NE && invert))
10634 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10635 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10636
b93a436e
JL
10637 /* Put the AND last so it can combine with more things. */
10638 if (bitnum != TYPE_PRECISION (type) - 1)
10639 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10640
b93a436e
JL
10641 return op0;
10642 }
bbf6f052 10643
b93a436e 10644 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10645 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10646 return 0;
1eb8759b 10647
b93a436e
JL
10648 icode = setcc_gen_code[(int) code];
10649 if (icode == CODE_FOR_nothing
a995e389 10650 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10651 {
b93a436e
JL
10652 /* We can only do this if it is one of the special cases that
10653 can be handled without an scc insn. */
10654 if ((code == LT && integer_zerop (arg1))
10655 || (! only_cheap && code == GE && integer_zerop (arg1)))
10656 ;
10657 else if (BRANCH_COST >= 0
10658 && ! only_cheap && (code == NE || code == EQ)
10659 && TREE_CODE (type) != REAL_TYPE
10660 && ((abs_optab->handlers[(int) operand_mode].insn_code
10661 != CODE_FOR_nothing)
10662 || (ffs_optab->handlers[(int) operand_mode].insn_code
10663 != CODE_FOR_nothing)))
10664 ;
10665 else
10666 return 0;
ca695ac9 10667 }
3a94c984 10668
296b4ed9 10669 if (! get_subtarget (target)
a47fed55 10670 || GET_MODE (subtarget) != operand_mode
e5e809f4 10671 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10672 subtarget = 0;
10673
10674 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10675 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10676
10677 if (target == 0)
10678 target = gen_reg_rtx (mode);
10679
10680 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10681 because, if the emit_store_flag does anything it will succeed and
10682 OP0 and OP1 will not be used subsequently. */
ca695ac9 10683
b93a436e
JL
10684 result = emit_store_flag (target, code,
10685 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10686 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10687 operand_mode, unsignedp, 1);
ca695ac9 10688
b93a436e
JL
10689 if (result)
10690 {
10691 if (invert)
10692 result = expand_binop (mode, xor_optab, result, const1_rtx,
10693 result, 0, OPTAB_LIB_WIDEN);
10694 return result;
ca695ac9 10695 }
bbf6f052 10696
b93a436e
JL
10697 /* If this failed, we have to do this with set/compare/jump/set code. */
10698 if (GET_CODE (target) != REG
10699 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10700 target = gen_reg_rtx (GET_MODE (target));
10701
10702 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10703 result = compare_from_rtx (op0, op1, code, unsignedp,
10704 operand_mode, NULL_RTX, 0);
10705 if (GET_CODE (result) == CONST_INT)
10706 return (((result == const0_rtx && ! invert)
10707 || (result != const0_rtx && invert))
10708 ? const0_rtx : const1_rtx);
ca695ac9 10709
b93a436e
JL
10710 label = gen_label_rtx ();
10711 if (bcc_gen_fctn[(int) code] == 0)
10712 abort ();
0f41302f 10713
b93a436e
JL
10714 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10715 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10716 emit_label (label);
bbf6f052 10717
b93a436e 10718 return target;
ca695ac9 10719}
b93a436e 10720\f
b93a436e 10721
ad82abb8
ZW
10722/* Stubs in case we haven't got a casesi insn. */
10723#ifndef HAVE_casesi
10724# define HAVE_casesi 0
10725# define gen_casesi(a, b, c, d, e) (0)
10726# define CODE_FOR_casesi CODE_FOR_nothing
10727#endif
10728
10729/* If the machine does not have a case insn that compares the bounds,
10730 this means extra overhead for dispatch tables, which raises the
10731 threshold for using them. */
10732#ifndef CASE_VALUES_THRESHOLD
10733#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10734#endif /* CASE_VALUES_THRESHOLD */
10735
10736unsigned int
10737case_values_threshold ()
10738{
10739 return CASE_VALUES_THRESHOLD;
10740}
10741
10742/* Attempt to generate a casesi instruction. Returns 1 if successful,
10743 0 otherwise (i.e. if there is no casesi instruction). */
10744int
10745try_casesi (index_type, index_expr, minval, range,
10746 table_label, default_label)
10747 tree index_type, index_expr, minval, range;
10748 rtx table_label ATTRIBUTE_UNUSED;
10749 rtx default_label;
10750{
10751 enum machine_mode index_mode = SImode;
10752 int index_bits = GET_MODE_BITSIZE (index_mode);
10753 rtx op1, op2, index;
10754 enum machine_mode op_mode;
10755
10756 if (! HAVE_casesi)
10757 return 0;
10758
10759 /* Convert the index to SImode. */
10760 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10761 {
10762 enum machine_mode omode = TYPE_MODE (index_type);
10763 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10764
10765 /* We must handle the endpoints in the original mode. */
10766 index_expr = build (MINUS_EXPR, index_type,
10767 index_expr, minval);
10768 minval = integer_zero_node;
10769 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10770 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10771 omode, 1, 0, default_label);
10772 /* Now we can safely truncate. */
10773 index = convert_to_mode (index_mode, index, 0);
10774 }
10775 else
10776 {
10777 if (TYPE_MODE (index_type) != index_mode)
10778 {
10779 index_expr = convert (type_for_size (index_bits, 0),
10780 index_expr);
10781 index_type = TREE_TYPE (index_expr);
10782 }
10783
10784 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10785 }
10786 emit_queue ();
10787 index = protect_from_queue (index, 0);
10788 do_pending_stack_adjust ();
10789
10790 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10791 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10792 (index, op_mode))
10793 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10794
ad82abb8
ZW
10795 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10796
10797 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10798 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10799 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10800 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10801 (op1, op_mode))
10802 op1 = copy_to_mode_reg (op_mode, op1);
10803
10804 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10805
10806 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10807 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10808 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10809 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10810 (op2, op_mode))
10811 op2 = copy_to_mode_reg (op_mode, op2);
10812
10813 emit_jump_insn (gen_casesi (index, op1, op2,
10814 table_label, default_label));
10815 return 1;
10816}
10817
10818/* Attempt to generate a tablejump instruction; same concept. */
10819#ifndef HAVE_tablejump
10820#define HAVE_tablejump 0
10821#define gen_tablejump(x, y) (0)
10822#endif
10823
10824/* Subroutine of the next function.
10825
10826 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10827 in the table already subtracted.
10828 MODE is its expected mode (needed if INDEX is constant).
10829 RANGE is the length of the jump table.
10830 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10831
b93a436e
JL
10832 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10833 index value is out of range. */
0f41302f 10834
ad82abb8 10835static void
b93a436e
JL
10836do_tablejump (index, mode, range, table_label, default_label)
10837 rtx index, range, table_label, default_label;
10838 enum machine_mode mode;
ca695ac9 10839{
b93a436e 10840 register rtx temp, vector;
88d3b7f0 10841
b93a436e
JL
10842 /* Do an unsigned comparison (in the proper mode) between the index
10843 expression and the value which represents the length of the range.
10844 Since we just finished subtracting the lower bound of the range
10845 from the index expression, this comparison allows us to simultaneously
10846 check that the original index expression value is both greater than
10847 or equal to the minimum value of the range and less than or equal to
10848 the maximum value of the range. */
709f5be1 10849
c5d5d461
JL
10850 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10851 0, default_label);
bbf6f052 10852
b93a436e
JL
10853 /* If index is in range, it must fit in Pmode.
10854 Convert to Pmode so we can index with it. */
10855 if (mode != Pmode)
10856 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10857
b93a436e
JL
10858 /* Don't let a MEM slip thru, because then INDEX that comes
10859 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10860 and break_out_memory_refs will go to work on it and mess it up. */
10861#ifdef PIC_CASE_VECTOR_ADDRESS
10862 if (flag_pic && GET_CODE (index) != REG)
10863 index = copy_to_mode_reg (Pmode, index);
10864#endif
ca695ac9 10865
b93a436e
JL
10866 /* If flag_force_addr were to affect this address
10867 it could interfere with the tricky assumptions made
10868 about addresses that contain label-refs,
10869 which may be valid only very near the tablejump itself. */
10870 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10871 GET_MODE_SIZE, because this indicates how large insns are. The other
10872 uses should all be Pmode, because they are addresses. This code
10873 could fail if addresses and insns are not the same size. */
10874 index = gen_rtx_PLUS (Pmode,
10875 gen_rtx_MULT (Pmode, index,
10876 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10877 gen_rtx_LABEL_REF (Pmode, table_label));
10878#ifdef PIC_CASE_VECTOR_ADDRESS
10879 if (flag_pic)
10880 index = PIC_CASE_VECTOR_ADDRESS (index);
10881 else
bbf6f052 10882#endif
b93a436e
JL
10883 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10884 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10885 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10886 RTX_UNCHANGING_P (vector) = 1;
10887 convert_move (temp, vector, 0);
10888
10889 emit_jump_insn (gen_tablejump (temp, table_label));
10890
10891 /* If we are generating PIC code or if the table is PC-relative, the
10892 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10893 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10894 emit_barrier ();
bbf6f052 10895}
b93a436e 10896
ad82abb8
ZW
10897int
10898try_tablejump (index_type, index_expr, minval, range,
10899 table_label, default_label)
10900 tree index_type, index_expr, minval, range;
10901 rtx table_label, default_label;
10902{
10903 rtx index;
10904
10905 if (! HAVE_tablejump)
10906 return 0;
10907
10908 index_expr = fold (build (MINUS_EXPR, index_type,
10909 convert (index_type, index_expr),
10910 convert (index_type, minval)));
10911 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10912 emit_queue ();
10913 index = protect_from_queue (index, 0);
10914 do_pending_stack_adjust ();
10915
10916 do_tablejump (index, TYPE_MODE (index_type),
10917 convert_modes (TYPE_MODE (index_type),
10918 TYPE_MODE (TREE_TYPE (range)),
10919 expand_expr (range, NULL_RTX,
10920 VOIDmode, 0),
10921 TREE_UNSIGNED (TREE_TYPE (range))),
10922 table_label, default_label);
10923 return 1;
10924}
This page took 3.713734 seconds and 5 git commands to generate.