]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
i386.md (cmp?i): Simplify; refuse immediate operand in alternative 0
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357
AJ
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
bbf6f052
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052 32#include "function.h"
bbf6f052 33#include "insn-config.h"
3a94c984 34/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 35#include "expr.h"
bbf6f052 36#include "recog.h"
3ef1eef4 37#include "reload.h"
bbf6f052 38#include "output.h"
bbf6f052 39#include "typeclass.h"
10f0ad3d 40#include "toplev.h"
d7db6646 41#include "ggc.h"
e2c49ac2 42#include "intl.h"
b1474bb7 43#include "tm_p.h"
bbf6f052 44
bbf6f052 45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 54#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
18543a22
ILT
67/* Assume that case vectors are not pc-relative. */
68#ifndef CASE_VECTOR_PC_RELATIVE
69#define CASE_VECTOR_PC_RELATIVE 0
70#endif
71
8f17b5c5
MM
72/* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79int (*lang_safe_from_p) PARAMS ((rtx, tree));
80
bbf6f052
RK
81/* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87int cse_not_expected;
88
956d6950 89/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
956d6950
JL
92static int in_check_memory_usage;
93
14a774a9
RK
94/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95static tree placeholder_list = 0;
96
4969d05d
RK
97/* This structure is used by move_by_pieces to describe the move to
98 be performed. */
4969d05d
RK
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
3bdf5ad1
RK
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
4969d05d
RK
111 int reverse;
112};
113
57814e5e 114/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
115 be performed. */
116
57814e5e 117struct store_by_pieces
9de08200
RK
118{
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
3bdf5ad1
RK
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
57814e5e
JJ
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
9de08200
RK
127 int reverse;
128};
129
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9 131
711d877c
KG
132static rtx get_push_address PARAMS ((int));
133
134static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
135static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
711d877c
KG
138static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
57814e5e
JJ
140static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
3bdf5ad1
RK
142static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
57814e5e
JJ
144static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 147 enum machine_mode,
57814e5e 148 struct store_by_pieces *));
296b4ed9 149static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
150static int is_zeros_p PARAMS ((tree));
151static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
152static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
23cb1766
RK
154 tree, tree, unsigned int, int,
155 int));
770ae6cc 156static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
13eb1f7f 157 HOST_WIDE_INT));
770ae6cc
RK
158static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
729a2125 160 tree, enum machine_mode, int,
770ae6cc 161 unsigned int, HOST_WIDE_INT, int));
e009aaf3 162static enum memory_use_mode
711d877c
KG
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164static tree save_noncopied_parts PARAMS ((tree, tree));
165static tree init_noncopied_parts PARAMS ((tree, tree));
711d877c
KG
166static int fixed_type_p PARAMS ((tree));
167static rtx var_rtx PARAMS ((tree));
729a2125 168static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
711d877c 169static rtx expand_increment PARAMS ((tree, int, int));
711d877c
KG
170static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
172static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
711d877c 174static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
566aa174 175static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
bbf6f052 176
4fa52007
RK
177/* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181static char direct_load[NUM_MACHINE_MODES];
182static char direct_store[NUM_MACHINE_MODES];
183
7e24ffc9
HPN
184/* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
186
187#ifndef MOVE_RATIO
266007a7 188#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
189#define MOVE_RATIO 2
190#else
3a94c984 191/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 192#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
193#endif
194#endif
e87b4f3f 195
fbe1758d 196/* This macro is used to determine whether move_by_pieces should be called
3a94c984 197 to perform a structure copy. */
fbe1758d 198#ifndef MOVE_BY_PIECES_P
19caa751 199#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
201#endif
202
266007a7 203/* This array records the insn_code of insns to perform block moves. */
e6677db3 204enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 205
9de08200
RK
206/* This array records the insn_code of insns to perform block clears. */
207enum insn_code clrstr_optab[NUM_MACHINE_MODES];
208
0f41302f 209/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
210
211#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 212#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 213#endif
bbf6f052 214\f
4fa52007 215/* This is run once per compilation to set up which modes can be used
266007a7 216 directly in memory and to initialize the block move optab. */
4fa52007
RK
217
218void
219init_expr_once ()
220{
221 rtx insn, pat;
222 enum machine_mode mode;
cff48d8f 223 int num_clobbers;
9ec36da5 224 rtx mem, mem1;
9ec36da5
JL
225
226 start_sequence ();
227
e2549997
RS
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
9ec36da5
JL
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 233
38a448ca 234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
235 pat = PATTERN (insn);
236
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
239 {
240 int regno;
241 rtx reg;
4fa52007
RK
242
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
e2549997 245 PUT_MODE (mem1, mode);
4fa52007 246
e6fe56a4
RK
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
249
7308a047
RS
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
254 {
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
e6fe56a4 257
38a448ca 258 reg = gen_rtx_REG (mode, regno);
e6fe56a4 259
7308a047
RS
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
e6fe56a4 264
e2549997
RS
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
269
7308a047
RS
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
e2549997
RS
274
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
7308a047 279 }
4fa52007
RK
280 }
281
282 end_sequence ();
283}
cff48d8f 284
bbf6f052
RK
285/* This is run at the start of compiling a function. */
286
287void
288init_expr ()
289{
01d939e8 290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 291
49ad7cfa 292 pending_chain = 0;
bbf6f052 293 pending_stack_adjust = 0;
1503a7ec 294 stack_pointer_delta = 0;
bbf6f052 295 inhibit_defer_pop = 0;
bbf6f052 296 saveregs_value = 0;
0006469d 297 apply_args_value = 0;
e87b4f3f 298 forced_labels = 0;
bbf6f052
RK
299}
300
fa51b01b
RH
301void
302mark_expr_status (p)
303 struct expr_status *p;
304{
305 if (p == NULL)
306 return;
307
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
311}
312
313void
314free_expr_status (f)
315 struct function *f;
316{
317 free (f->expr);
318 f->expr = NULL;
319}
320
49ad7cfa 321/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 322
bbf6f052 323void
49ad7cfa 324finish_expr_for_function ()
bbf6f052 325{
49ad7cfa
BS
326 if (pending_chain)
327 abort ();
bbf6f052
RK
328}
329\f
330/* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
332
bbf6f052
RK
333/* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
336
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
339
340static rtx
341enqueue_insn (var, body)
342 rtx var, body;
343{
c5c76735
JL
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
bbf6f052
RK
346 return pending_chain;
347}
348
349/* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
355
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
359
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
363
364rtx
365protect_from_queue (x, modify)
366 register rtx x;
367 int modify;
368{
369 register RTX_CODE code = GET_CODE (x);
370
371#if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
374 return x;
375#endif
376
377 if (code != QUEUED)
378 {
e9baa644
RK
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
383 shared. */
bbf6f052
RK
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 {
387 register rtx y = XEXP (x, 0);
38a448ca 388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 389
c6df88cb 390 MEM_COPY_ATTRIBUTES (new, x);
e9baa644 391
bbf6f052
RK
392 if (QUEUED_INSN (y))
393 {
e9baa644
RK
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
396 QUEUED_INSN (y));
397 return temp;
398 }
73b7f58c
BS
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
e9baa644 402 return new;
bbf6f052
RK
403 }
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
406 if (code == MEM)
3f15938e
RS
407 {
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
410 {
411 x = copy_rtx (x);
412 XEXP (x, 0) = tem;
413 }
414 }
bbf6f052
RK
415 else if (code == PLUS || code == MULT)
416 {
3f15938e
RS
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 {
421 x = copy_rtx (x);
422 XEXP (x, 0) = new0;
423 XEXP (x, 1) = new1;
424 }
bbf6f052
RK
425 }
426 return x;
427 }
73b7f58c
BS
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
430 emit_queue. */
bbf6f052 431 if (QUEUED_INSN (x) == 0)
73b7f58c 432 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
433 /* If the increment has happened and a pre-increment copy exists,
434 use that copy. */
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 QUEUED_INSN (x));
442 return QUEUED_COPY (x);
443}
444
445/* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
449
1f06ee8d 450int
bbf6f052
RK
451queued_subexp_p (x)
452 rtx x;
453{
454 register enum rtx_code code = GET_CODE (x);
455 switch (code)
456 {
457 case QUEUED:
458 return 1;
459 case MEM:
460 return queued_subexp_p (XEXP (x, 0));
461 case MULT:
462 case PLUS:
463 case MINUS:
e9a25f70
JL
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
466 default:
467 return 0;
bbf6f052 468 }
bbf6f052
RK
469}
470
471/* Perform all the pending incrementations. */
472
473void
474emit_queue ()
475{
476 register rtx p;
381127e8 477 while ((p = pending_chain))
bbf6f052 478 {
41b083c4
R
479 rtx body = QUEUED_BODY (p);
480
481 if (GET_CODE (body) == SEQUENCE)
482 {
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
485 }
486 else
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
488 pending_chain = QUEUED_NEXT (p);
489 }
490}
bbf6f052
RK
491\f
492/* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
496
497void
498convert_move (to, from, unsignedp)
499 register rtx to, from;
500 int unsignedp;
501{
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
506 enum insn_code code;
507 rtx libcall;
508
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
511
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
514
515 if (to_real != from_real)
516 abort ();
517
1499e0a8
RK
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
520 TO here. */
521
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
527
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
529 abort ();
530
bbf6f052
RK
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
533 {
534 emit_move_insn (to, from);
535 return;
536 }
537
0b4565c9
BS
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
539 {
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
541 abort ();
3a94c984 542
0b4565c9
BS
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
545 else
546 to = gen_rtx_SUBREG (from_mode, to, 0);
547
548 emit_move_insn (to, from);
549 return;
550 }
551
552 if (to_real != from_real)
553 abort ();
554
bbf6f052
RK
555 if (to_real)
556 {
642dfa8b 557 rtx value, insns;
81d79e2c 558
2b01c326 559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 560 {
2b01c326
RK
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
563 != CODE_FOR_nothing)
564 {
565 emit_unop_insn (code, to, from, UNKNOWN);
566 return;
567 }
bbf6f052 568 }
3a94c984 569
b424402e
RS
570#ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
574 return;
575 }
576#endif
704af6a1
JL
577#ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
581 return;
582 }
583#endif
b424402e
RS
584#ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
588 return;
589 }
590#endif
591#ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
595 return;
596 }
597#endif
598#ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
03747aa3
RK
612
613#ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
617 return;
618 }
619#endif
b424402e
RS
620#ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
624 return;
625 }
626#endif
627#ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
634#ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641#ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
643 {
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
2b01c326
RK
648
649#ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
653 return;
654 }
655#endif
656#ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
660 return;
661 }
662#endif
663#ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
667 return;
668 }
669#endif
670#ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
677
bbf6f052
RK
678#ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
682 return;
683 }
684#endif
b092b471
JW
685#ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
689 return;
690 }
691#endif
bbf6f052
RK
692#ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
694 {
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
696 return;
697 }
698#endif
b092b471
JW
699#ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
703 return;
704 }
705#endif
bbf6f052
RK
706#ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
708 {
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
710 return;
711 }
712#endif
713
b092b471
JW
714 libcall = (rtx) 0;
715 switch (from_mode)
716 {
717 case SFmode:
718 switch (to_mode)
719 {
720 case DFmode:
721 libcall = extendsfdf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extendsfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extendsftf2_libfunc;
730 break;
3a94c984 731
e9a25f70
JL
732 default:
733 break;
b092b471
JW
734 }
735 break;
736
737 case DFmode:
738 switch (to_mode)
739 {
740 case SFmode:
741 libcall = truncdfsf2_libfunc;
742 break;
743
744 case XFmode:
745 libcall = extenddfxf2_libfunc;
746 break;
747
748 case TFmode:
749 libcall = extenddftf2_libfunc;
750 break;
3a94c984 751
e9a25f70
JL
752 default:
753 break;
b092b471
JW
754 }
755 break;
756
757 case XFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = truncxfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = truncxfdf2_libfunc;
766 break;
3a94c984 767
e9a25f70
JL
768 default:
769 break;
b092b471
JW
770 }
771 break;
772
773 case TFmode:
774 switch (to_mode)
775 {
776 case SFmode:
777 libcall = trunctfsf2_libfunc;
778 break;
779
780 case DFmode:
781 libcall = trunctfdf2_libfunc;
782 break;
3a94c984 783
e9a25f70
JL
784 default:
785 break;
b092b471
JW
786 }
787 break;
3a94c984 788
e9a25f70
JL
789 default:
790 break;
b092b471
JW
791 }
792
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
bbf6f052
RK
795 abort ();
796
642dfa8b 797 start_sequence ();
ebb1b59a 798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 799 1, from, from_mode);
642dfa8b
BS
800 insns = get_insns ();
801 end_sequence ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
803 from));
bbf6f052
RK
804 return;
805 }
806
807 /* Now both modes are integers. */
808
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
812 {
813 rtx insns;
814 rtx lowpart;
815 rtx fill_value;
816 rtx lowfrom;
817 int i;
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
820
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
823 != CODE_FOR_nothing)
824 {
cd1b4b44
RK
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
bbf6f052
RK
831 emit_unop_insn (code, to, from, equiv_code);
832 return;
833 }
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
838 {
a81fee56 839 if (GET_CODE (to) == REG)
38a448ca 840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
844 return;
845 }
846
847 /* No special multiword conversion insn; do it by hand. */
848 start_sequence ();
849
5c5033c3
RK
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
852
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
855
bbf6f052
RK
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
859 else
860 lowpart_mode = from_mode;
861
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
863
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
866
867 /* Compute the value to put in each remaining word. */
868 if (unsignedp)
869 fill_value = const0_rtx;
870 else
871 {
872#ifdef HAVE_slt
873 if (HAVE_slt
a995e389 874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
875 && STORE_FLAG_VALUE == -1)
876 {
906c4e36
RK
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
878 lowpart_mode, 0, 0);
bbf6f052
RK
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
881 }
882 else
883#endif
884 {
885 fill_value
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 888 NULL_RTX, 0);
bbf6f052
RK
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
890 }
891 }
892
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
895 {
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
898
899 if (subword == 0)
900 abort ();
901
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
904 }
905
906 insns = get_insns ();
907 end_sequence ();
908
906c4e36 909 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
911 return;
912 }
913
d3c64ee3
RS
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 917 {
431a6eca
JW
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
bbf6f052
RK
925 convert_move (to, gen_lowpart (word_mode, from), 0);
926 return;
927 }
928
3a94c984 929 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
930 if (to_mode == PQImode)
931 {
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
934
935#ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
937 {
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
939 return;
940 }
941#endif /* HAVE_truncqipqi2 */
942 abort ();
943 }
944
945 if (from_mode == PQImode)
946 {
947 if (to_mode != QImode)
948 {
949 from = convert_to_mode (QImode, from, unsignedp);
950 from_mode = QImode;
951 }
952 else
953 {
954#ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
956 {
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
958 return;
959 }
960#endif /* HAVE_extendpqiqi2 */
961 abort ();
962 }
963 }
964
bbf6f052
RK
965 if (to_mode == PSImode)
966 {
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
969
1f584163
DE
970#ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
bbf6f052 972 {
1f584163 973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
974 return;
975 }
1f584163 976#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
977 abort ();
978 }
979
980 if (from_mode == PSImode)
981 {
982 if (to_mode != SImode)
983 {
984 from = convert_to_mode (SImode, from, unsignedp);
985 from_mode = SImode;
986 }
987 else
988 {
1f584163 989#ifdef HAVE_extendpsisi2
43d75418 990 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 991 {
1f584163 992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
993 return;
994 }
1f584163 995#endif /* HAVE_extendpsisi2 */
43d75418
R
996#ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
998 {
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1000 return;
1001 }
1002#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1003 abort ();
1004 }
1005 }
1006
0407367d
RK
1007 if (to_mode == PDImode)
1008 {
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1011
1012#ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1014 {
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1016 return;
1017 }
1018#endif /* HAVE_truncdipdi2 */
1019 abort ();
1020 }
1021
1022 if (from_mode == PDImode)
1023 {
1024 if (to_mode != DImode)
1025 {
1026 from = convert_to_mode (DImode, from, unsignedp);
1027 from_mode = DImode;
1028 }
1029 else
1030 {
1031#ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1033 {
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1035 return;
1036 }
1037#endif /* HAVE_extendpdidi2 */
1038 abort ();
1039 }
1040 }
1041
bbf6f052
RK
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1044
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1048 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1049 {
d3c64ee3
RS
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
34aa3599
RK
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
bbf6f052
RK
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1061 return;
1062 }
1063
d3c64ee3 1064 /* Handle extension. */
bbf6f052
RK
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 {
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1070 {
1071 emit_unop_insn (code, to, from, equiv_code);
1072 return;
1073 }
1074 else
1075 {
1076 enum machine_mode intermediate;
2b28d92e
NC
1077 rtx tmp;
1078 tree shift_amount;
bbf6f052
RK
1079
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1090 {
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1093 return;
1094 }
1095
2b28d92e 1096 /* No suitable intermediate mode.
3a94c984 1097 Generate what we need with shifts. */
2b28d92e
NC
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1102 to, unsignedp);
3a94c984 1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1104 to, unsignedp);
1105 if (tmp != to)
1106 emit_move_insn (to, tmp);
1107 return;
bbf6f052
RK
1108 }
1109 }
1110
3a94c984 1111 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1112
1113 if (from_mode == DImode && to_mode == SImode)
1114 {
1115#ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1119 return;
1120 }
1121#endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == HImode)
1127 {
1128#ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1132 return;
1133 }
1134#endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == DImode && to_mode == QImode)
1140 {
1141#ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1145 return;
1146 }
1147#endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == HImode)
1153 {
1154#ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1158 return;
1159 }
1160#endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == SImode && to_mode == QImode)
1166 {
1167#ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173#endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == HImode && to_mode == QImode)
1179 {
1180#ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1184 return;
1185 }
1186#endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
b9bcad65
RK
1191 if (from_mode == TImode && to_mode == DImode)
1192 {
1193#ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1197 return;
1198 }
1199#endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == SImode)
1205 {
1206#ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1210 return;
1211 }
1212#endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == HImode)
1218 {
1219#ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1223 return;
1224 }
1225#endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 if (from_mode == TImode && to_mode == QImode)
1231 {
1232#ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1234 {
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1236 return;
1237 }
1238#endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1241 }
1242
bbf6f052
RK
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 {
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1250 return;
1251 }
1252
1253 /* Mode combination is not recognized. */
1254 abort ();
1255}
1256
1257/* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
5d901c31
RS
1262 or by copying to a new temporary with conversion.
1263
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1266
1267rtx
1268convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1270 rtx x;
1271 int unsignedp;
5ffe63ed
RS
1272{
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1274}
1275
1276/* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1280
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1283
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1288
1289rtx
1290convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1292 rtx x;
1293 int unsignedp;
bbf6f052
RK
1294{
1295 register rtx temp;
5ffe63ed 1296
1499e0a8
RK
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1299
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
bbf6f052 1304
64791b18
RK
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
3a94c984 1307
5ffe63ed 1308 if (mode == oldmode)
bbf6f052
RK
1309 return x;
1310
1311 /* There is one case that we must handle specially: If we are converting
906c4e36 1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1316
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 {
1326 int width = GET_MODE_BITSIZE (oldmode);
1327
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 }
1331
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1333 }
bbf6f052
RK
1334
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1339
ba2e110c
RK
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1342 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1343 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1344 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
2bf29316
JW
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1351 {
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 {
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1360
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 if (! unsignedp
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1367
69107307 1368 return GEN_INT (trunc_int_for_mode (val, mode));
ba2e110c
RK
1369 }
1370
1371 return gen_lowpart (mode, x);
1372 }
bbf6f052
RK
1373
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1376 return temp;
1377}
1378\f
fbe1758d 1379/* This macro is used to determine what the largest unit size that
3a94c984 1380 move_by_pieces can use is. */
fbe1758d
AM
1381
1382/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1384 number of bytes we can move with a single instruction. */
fbe1758d
AM
1385
1386#ifndef MOVE_MAX_PIECES
1387#define MOVE_MAX_PIECES MOVE_MAX
1388#endif
1389
bbf6f052
RK
1390/* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
566aa174
JH
1394
1395 When TO is NULL, the emit_single_push_insn is used to push the
1396 FROM to stack.
1397
19caa751 1398 ALIGN is maximum alignment we can assume. */
bbf6f052 1399
2e245dac 1400void
bbf6f052
RK
1401move_by_pieces (to, from, len, align)
1402 rtx to, from;
3bdf5ad1 1403 unsigned HOST_WIDE_INT len;
729a2125 1404 unsigned int align;
bbf6f052
RK
1405{
1406 struct move_by_pieces data;
566aa174 1407 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
bbf6f052
RK
1411
1412 data.offset = 0;
bbf6f052 1413 data.from_addr = from_addr;
566aa174
JH
1414 if (to)
1415 {
1416 to_addr = XEXP (to, 0);
1417 data.to = to;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 }
1424 else
1425 {
1426 to_addr = NULL_RTX;
1427 data.to = NULL_RTX;
1428 data.autinc_to = 1;
1429#ifdef STACK_GROWS_DOWNWARD
1430 data.reverse = 1;
1431#else
1432 data.reverse = 0;
1433#endif
1434 }
1435 data.to_addr = to_addr;
bbf6f052 1436 data.from = from;
bbf6f052
RK
1437 data.autinc_from
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1441
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
bbf6f052
RK
1444 if (data.reverse) data.offset = len;
1445 data.len = len;
1446
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1452 {
3a94c984 1453 /* Find the mode of the largest move... */
fbe1758d
AM
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1458
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1460 {
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1464 }
fbe1758d 1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1466 {
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1470 }
bbf6f052
RK
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1474 {
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1476 data.autinc_to = 1;
1477 data.explicit_inc_to = -1;
1478 }
fbe1758d 1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
bbf6f052
RK
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1487 }
1488
e1565e65 1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1492
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1495
1496 while (max_size > 1)
1497 {
e7c33f54
RK
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1501 mode = tmode;
1502
1503 if (mode == VOIDmode)
1504 break;
1505
1506 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 /* The code above should have handled everything. */
2a8e278c 1514 if (data.len > 0)
bbf6f052
RK
1515 abort ();
1516}
1517
1518/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1519 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1520
3bdf5ad1 1521static unsigned HOST_WIDE_INT
bbf6f052 1522move_by_pieces_ninsns (l, align)
3bdf5ad1 1523 unsigned HOST_WIDE_INT l;
729a2125 1524 unsigned int align;
bbf6f052 1525{
3bdf5ad1
RK
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1528
e1565e65 1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1531 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1532
1533 while (max_size > 1)
1534 {
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1537
e7c33f54
RK
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1541 mode = tmode;
1542
1543 if (mode == VOIDmode)
1544 break;
1545
1546 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1549
1550 max_size = GET_MODE_SIZE (mode);
1551 }
1552
13c6f0d5
NS
1553 if (l)
1554 abort ();
bbf6f052
RK
1555 return n_insns;
1556}
1557
1558/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1561
1562static void
1563move_by_pieces_1 (genfun, mode, data)
711d877c 1564 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1567{
3bdf5ad1
RK
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1, from1;
bbf6f052
RK
1570
1571 while (data->len >= size)
1572 {
3bdf5ad1
RK
1573 if (data->reverse)
1574 data->offset -= size;
1575
566aa174 1576 if (data->to)
3bdf5ad1 1577 {
566aa174
JH
1578 if (data->autinc_to)
1579 {
1580 to1 = gen_rtx_MEM (mode, data->to_addr);
1581 MEM_COPY_ATTRIBUTES (to1, data->to);
1582 }
1583 else
1584 to1 = change_address (data->to, mode,
1585 plus_constant (data->to_addr, data->offset));
3bdf5ad1 1586 }
3bdf5ad1
RK
1587
1588 if (data->autinc_from)
1589 {
1590 from1 = gen_rtx_MEM (mode, data->from_addr);
1591 MEM_COPY_ATTRIBUTES (from1, data->from);
1592 }
1593 else
1594 from1 = change_address (data->from, mode,
1595 plus_constant (data->from_addr, data->offset));
bbf6f052 1596
940da324 1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052 1601
566aa174
JH
1602 if (data->to)
1603 emit_insn ((*genfun) (to1, from1));
1604 else
1605 emit_single_push_insn (mode, from1, NULL);
3bdf5ad1 1606
940da324 1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1611
3bdf5ad1
RK
1612 if (! data->reverse)
1613 data->offset += size;
bbf6f052
RK
1614
1615 data->len -= size;
1616 }
1617}
1618\f
1619/* Emit code to move a block Y to a block X.
1620 This may be done with string-move instructions,
1621 with multiple scalar move instructions, or with a library call.
1622
1623 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1624 with mode BLKmode.
1625 SIZE is an rtx that says how long they are.
19caa751 1626 ALIGN is the maximum alignment we can assume they have.
bbf6f052 1627
e9a25f70
JL
1628 Return the address of the new block, if memcpy is called and returns it,
1629 0 otherwise. */
1630
1631rtx
bbf6f052
RK
1632emit_block_move (x, y, size, align)
1633 rtx x, y;
1634 rtx size;
729a2125 1635 unsigned int align;
bbf6f052 1636{
e9a25f70 1637 rtx retval = 0;
52cf7115
JL
1638#ifdef TARGET_MEM_FUNCTIONS
1639 static tree fn;
1640 tree call_expr, arg_list;
1641#endif
e9a25f70 1642
bbf6f052
RK
1643 if (GET_MODE (x) != BLKmode)
1644 abort ();
1645
1646 if (GET_MODE (y) != BLKmode)
1647 abort ();
1648
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
5d901c31 1651 size = protect_from_queue (size, 0);
bbf6f052
RK
1652
1653 if (GET_CODE (x) != MEM)
1654 abort ();
1655 if (GET_CODE (y) != MEM)
1656 abort ();
1657 if (size == 0)
1658 abort ();
1659
fbe1758d 1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1661 move_by_pieces (x, y, INTVAL (size), align);
1662 else
1663 {
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
266007a7 1667
19caa751 1668 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
266007a7
RK
1669 enum machine_mode mode;
1670
3ef1eef4
RK
1671 /* Since this is a move insn, we don't care about volatility. */
1672 volatile_ok = 1;
1673
266007a7
RK
1674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1675 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1676 {
266007a7 1677 enum insn_code code = movstr_optab[(int) mode];
a995e389 1678 insn_operand_predicate_fn pred;
266007a7
RK
1679
1680 if (code != CODE_FOR_nothing
803090c4
RK
1681 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1682 here because if SIZE is less than the mode mask, as it is
8008b228 1683 returned by the macro, it will definitely be less than the
803090c4 1684 actual mode mask. */
8ca00751
RK
1685 && ((GET_CODE (size) == CONST_INT
1686 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1687 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1688 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1689 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1690 || (*pred) (x, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1692 || (*pred) (y, BLKmode))
1693 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1694 || (*pred) (opalign, VOIDmode)))
bbf6f052 1695 {
1ba1e2a8 1696 rtx op2;
266007a7
RK
1697 rtx last = get_last_insn ();
1698 rtx pat;
1699
1ba1e2a8 1700 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1701 pred = insn_data[(int) code].operand[2].predicate;
1702 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1703 op2 = copy_to_mode_reg (mode, op2);
1704
1705 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1706 if (pat)
1707 {
1708 emit_insn (pat);
3ef1eef4 1709 volatile_ok = 0;
e9a25f70 1710 return 0;
266007a7
RK
1711 }
1712 else
1713 delete_insns_since (last);
bbf6f052
RK
1714 }
1715 }
bbf6f052 1716
3ef1eef4
RK
1717 volatile_ok = 0;
1718
4bc973ae
JL
1719 /* X, Y, or SIZE may have been passed through protect_from_queue.
1720
1721 It is unsafe to save the value generated by protect_from_queue
1722 and reuse it later. Consider what happens if emit_queue is
1723 called before the return value from protect_from_queue is used.
1724
1725 Expansion of the CALL_EXPR below will call emit_queue before
1726 we are finished emitting RTL for argument setup. So if we are
1727 not careful we could get the wrong value for an argument.
1728
1729 To avoid this problem we go ahead and emit code to copy X, Y &
1730 SIZE into new pseudos. We can then place those new pseudos
1731 into an RTL_EXPR and use them later, even after a call to
3a94c984 1732 emit_queue.
4bc973ae
JL
1733
1734 Note this is not strictly needed for library calls since they
1735 do not call emit_queue before loading their arguments. However,
1736 we may need to have library calls call emit_queue in the future
1737 since failing to do so could cause problems for targets which
1738 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1739 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1740 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1741
1742#ifdef TARGET_MEM_FUNCTIONS
1743 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1744#else
1745 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node));
f3dc586a 1747 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1748#endif
1749
bbf6f052 1750#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1751 /* It is incorrect to use the libcall calling conventions to call
1752 memcpy in this context.
1753
1754 This could be a user call to memcpy and the user may wish to
1755 examine the return value from memcpy.
1756
1757 For targets where libcalls and normal calls have different conventions
3a94c984 1758 for returning pointers, we could end up generating incorrect code.
52cf7115
JL
1759
1760 So instead of using a libcall sequence we build up a suitable
1761 CALL_EXPR and expand the call in the normal fashion. */
1762 if (fn == NULL_TREE)
1763 {
1764 tree fntype;
1765
1766 /* This was copied from except.c, I don't know if all this is
1767 necessary in this context or not. */
1768 fn = get_identifier ("memcpy");
52cf7115
JL
1769 fntype = build_pointer_type (void_type_node);
1770 fntype = build_function_type (fntype, NULL_TREE);
1771 fn = build_decl (FUNCTION_DECL, fn, fntype);
3a94c984 1772 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1773 DECL_EXTERNAL (fn) = 1;
1774 TREE_PUBLIC (fn) = 1;
1775 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 1776 TREE_NOTHROW (fn) = 1;
6496a589 1777 make_decl_rtl (fn, NULL);
52cf7115 1778 assemble_external (fn);
52cf7115
JL
1779 }
1780
3a94c984 1781 /* We need to make an argument list for the function call.
52cf7115
JL
1782
1783 memcpy has three arguments, the first two are void * addresses and
1784 the last is a size_t byte count for the copy. */
1785 arg_list
1786 = build_tree_list (NULL_TREE,
4bc973ae 1787 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1788 TREE_CHAIN (arg_list)
1789 = build_tree_list (NULL_TREE,
4bc973ae 1790 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1791 TREE_CHAIN (TREE_CHAIN (arg_list))
1792 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1793 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1794
1795 /* Now we have to build up the CALL_EXPR itself. */
1796 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1797 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1798 call_expr, arg_list, NULL_TREE);
1799 TREE_SIDE_EFFECTS (call_expr) = 1;
1800
1801 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1802#else
ebb1b59a 1803 emit_library_call (bcopy_libfunc, LCT_NORMAL,
fe7bbd2a 1804 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1805 convert_to_mode (TYPE_MODE (integer_type_node), size,
1806 TREE_UNSIGNED (integer_type_node)),
1807 TYPE_MODE (integer_type_node));
bbf6f052
RK
1808#endif
1809 }
e9a25f70
JL
1810
1811 return retval;
bbf6f052
RK
1812}
1813\f
1814/* Copy all or part of a value X into registers starting at REGNO.
1815 The number of registers to be filled is NREGS. */
1816
1817void
1818move_block_to_reg (regno, x, nregs, mode)
1819 int regno;
1820 rtx x;
1821 int nregs;
1822 enum machine_mode mode;
1823{
1824 int i;
381127e8 1825#ifdef HAVE_load_multiple
3a94c984 1826 rtx pat;
381127e8
RL
1827 rtx last;
1828#endif
bbf6f052 1829
72bb9717
RK
1830 if (nregs == 0)
1831 return;
1832
bbf6f052
RK
1833 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1834 x = validize_mem (force_const_mem (mode, x));
1835
1836 /* See if the machine can do this with a load multiple insn. */
1837#ifdef HAVE_load_multiple
c3a02afe 1838 if (HAVE_load_multiple)
bbf6f052 1839 {
c3a02afe 1840 last = get_last_insn ();
38a448ca 1841 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1842 GEN_INT (nregs));
1843 if (pat)
1844 {
1845 emit_insn (pat);
1846 return;
1847 }
1848 else
1849 delete_insns_since (last);
bbf6f052 1850 }
bbf6f052
RK
1851#endif
1852
1853 for (i = 0; i < nregs; i++)
38a448ca 1854 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1855 operand_subword_force (x, i, mode));
1856}
1857
1858/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1859 The number of registers to be filled is NREGS. SIZE indicates the number
1860 of bytes in the object X. */
1861
bbf6f052 1862void
0040593d 1863move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1864 int regno;
1865 rtx x;
1866 int nregs;
0040593d 1867 int size;
bbf6f052
RK
1868{
1869 int i;
381127e8 1870#ifdef HAVE_store_multiple
3a94c984 1871 rtx pat;
381127e8
RL
1872 rtx last;
1873#endif
58a32c5c 1874 enum machine_mode mode;
bbf6f052 1875
2954d7db
RK
1876 if (nregs == 0)
1877 return;
1878
58a32c5c
DE
1879 /* If SIZE is that of a mode no bigger than a word, just use that
1880 mode's store operation. */
1881 if (size <= UNITS_PER_WORD
1882 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1883 {
1884 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1885 gen_rtx_REG (mode, regno));
58a32c5c
DE
1886 return;
1887 }
3a94c984 1888
0040593d 1889 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1890 to the left before storing to memory. Note that the previous test
1891 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1892 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1893 {
1894 rtx tem = operand_subword (x, 0, 1, BLKmode);
1895 rtx shift;
1896
1897 if (tem == 0)
1898 abort ();
1899
1900 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1901 gen_rtx_REG (word_mode, regno),
0040593d
JW
1902 build_int_2 ((UNITS_PER_WORD - size)
1903 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1904 emit_move_insn (tem, shift);
1905 return;
1906 }
1907
bbf6f052
RK
1908 /* See if the machine can do this with a store multiple insn. */
1909#ifdef HAVE_store_multiple
c3a02afe 1910 if (HAVE_store_multiple)
bbf6f052 1911 {
c3a02afe 1912 last = get_last_insn ();
38a448ca 1913 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1914 GEN_INT (nregs));
1915 if (pat)
1916 {
1917 emit_insn (pat);
1918 return;
1919 }
1920 else
1921 delete_insns_since (last);
bbf6f052 1922 }
bbf6f052
RK
1923#endif
1924
1925 for (i = 0; i < nregs; i++)
1926 {
1927 rtx tem = operand_subword (x, i, 1, BLKmode);
1928
1929 if (tem == 0)
1930 abort ();
1931
38a448ca 1932 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1933 }
1934}
1935
aac5cc16
RH
1936/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1937 registers represented by a PARALLEL. SSIZE represents the total size of
1938 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1939 SRC in bits. */
1940/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1941 the balance will be in what would be the low-order memory addresses, i.e.
1942 left justified for big endian, right justified for little endian. This
1943 happens to be true for the targets currently using this support. If this
1944 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1945 would be needed. */
fffa9c1d
JW
1946
1947void
aac5cc16
RH
1948emit_group_load (dst, orig_src, ssize, align)
1949 rtx dst, orig_src;
729a2125
RK
1950 unsigned int align;
1951 int ssize;
fffa9c1d 1952{
aac5cc16
RH
1953 rtx *tmps, src;
1954 int start, i;
fffa9c1d 1955
aac5cc16 1956 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1957 abort ();
1958
1959 /* Check for a NULL entry, used to indicate that the parameter goes
1960 both on the stack and in registers. */
aac5cc16
RH
1961 if (XEXP (XVECEXP (dst, 0, 0), 0))
1962 start = 0;
fffa9c1d 1963 else
aac5cc16
RH
1964 start = 1;
1965
3a94c984 1966 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1967
aac5cc16
RH
1968 /* Process the pieces. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 {
1971 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1972 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1973 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1974 int shift = 0;
1975
1976 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1977 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
1978 {
1979 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1980 bytelen = ssize - bytepos;
1981 if (bytelen <= 0)
729a2125 1982 abort ();
aac5cc16
RH
1983 }
1984
f3ce87a9
DE
1985 /* If we won't be loading directly from memory, protect the real source
1986 from strange tricks we might play; but make sure that the source can
1987 be loaded directly into the destination. */
1988 src = orig_src;
1989 if (GET_CODE (orig_src) != MEM
1990 && (!CONSTANT_P (orig_src)
1991 || (GET_MODE (orig_src) != mode
1992 && GET_MODE (orig_src) != VOIDmode)))
1993 {
1994 if (GET_MODE (orig_src) == VOIDmode)
1995 src = gen_reg_rtx (mode);
1996 else
1997 src = gen_reg_rtx (GET_MODE (orig_src));
1998 emit_move_insn (src, orig_src);
1999 }
2000
aac5cc16
RH
2001 /* Optimize the access just a bit. */
2002 if (GET_CODE (src) == MEM
19caa751 2003 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2004 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2005 && bytelen == GET_MODE_SIZE (mode))
2006 {
2007 tmps[i] = gen_reg_rtx (mode);
2008 emit_move_insn (tmps[i],
2009 change_address (src, mode,
2010 plus_constant (XEXP (src, 0),
2011 bytepos)));
fffa9c1d 2012 }
7c4a6db0
JW
2013 else if (GET_CODE (src) == CONCAT)
2014 {
2015 if (bytepos == 0
2016 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2017 tmps[i] = XEXP (src, 0);
8752c357 2018 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7c4a6db0
JW
2019 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2020 tmps[i] = XEXP (src, 1);
2021 else
2022 abort ();
2023 }
f3ce87a9 2024 else if (CONSTANT_P (src)
2ee5437b
RH
2025 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2026 tmps[i] = src;
fffa9c1d 2027 else
19caa751
RK
2028 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2029 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2030 mode, mode, align, ssize);
fffa9c1d 2031
aac5cc16 2032 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2033 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2034 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2035 }
19caa751 2036
3a94c984 2037 emit_queue ();
aac5cc16
RH
2038
2039 /* Copy the extracted pieces into the proper (probable) hard regs. */
2040 for (i = start; i < XVECLEN (dst, 0); i++)
2041 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2042}
2043
aac5cc16
RH
2044/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2045 registers represented by a PARALLEL. SSIZE represents the total size of
2046 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
2047
2048void
aac5cc16
RH
2049emit_group_store (orig_dst, src, ssize, align)
2050 rtx orig_dst, src;
729a2125
RK
2051 int ssize;
2052 unsigned int align;
fffa9c1d 2053{
aac5cc16
RH
2054 rtx *tmps, dst;
2055 int start, i;
fffa9c1d 2056
aac5cc16 2057 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2058 abort ();
2059
2060 /* Check for a NULL entry, used to indicate that the parameter goes
2061 both on the stack and in registers. */
aac5cc16
RH
2062 if (XEXP (XVECEXP (src, 0, 0), 0))
2063 start = 0;
fffa9c1d 2064 else
aac5cc16
RH
2065 start = 1;
2066
3a94c984 2067 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2068
aac5cc16
RH
2069 /* Copy the (probable) hard regs into pseudos. */
2070 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2071 {
aac5cc16
RH
2072 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2073 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2074 emit_move_insn (tmps[i], reg);
2075 }
3a94c984 2076 emit_queue ();
fffa9c1d 2077
aac5cc16
RH
2078 /* If we won't be storing directly into memory, protect the real destination
2079 from strange tricks we might play. */
2080 dst = orig_dst;
10a9f2be
JW
2081 if (GET_CODE (dst) == PARALLEL)
2082 {
2083 rtx temp;
2084
2085 /* We can get a PARALLEL dst if there is a conditional expression in
2086 a return statement. In that case, the dst and src are the same,
2087 so no action is necessary. */
2088 if (rtx_equal_p (dst, src))
2089 return;
2090
2091 /* It is unclear if we can ever reach here, but we may as well handle
2092 it. Allocate a temporary, and split this into a store/load to/from
2093 the temporary. */
2094
2095 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2096 emit_group_store (temp, src, ssize, align);
2097 emit_group_load (dst, temp, ssize, align);
2098 return;
2099 }
2100 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2101 {
2102 dst = gen_reg_rtx (GET_MODE (orig_dst));
2103 /* Make life a bit easier for combine. */
2104 emit_move_insn (dst, const0_rtx);
2105 }
aac5cc16
RH
2106
2107 /* Process the pieces. */
2108 for (i = start; i < XVECLEN (src, 0); i++)
2109 {
770ae6cc 2110 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2111 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2112 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2113
2114 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2115 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2116 {
aac5cc16
RH
2117 if (BYTES_BIG_ENDIAN)
2118 {
2119 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2120 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2121 tmps[i], 0, OPTAB_WIDEN);
2122 }
2123 bytelen = ssize - bytepos;
71bc0330 2124 }
fffa9c1d 2125
aac5cc16
RH
2126 /* Optimize the access just a bit. */
2127 if (GET_CODE (dst) == MEM
19caa751 2128 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2129 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2130 && bytelen == GET_MODE_SIZE (mode))
729a2125
RK
2131 emit_move_insn (change_address (dst, mode,
2132 plus_constant (XEXP (dst, 0),
2133 bytepos)),
2134 tmps[i]);
aac5cc16 2135 else
729a2125 2136 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
aac5cc16 2137 mode, tmps[i], align, ssize);
fffa9c1d 2138 }
729a2125 2139
3a94c984 2140 emit_queue ();
aac5cc16
RH
2141
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (GET_CODE (dst) == REG)
2144 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2145}
2146
c36fce9a
GRK
2147/* Generate code to copy a BLKmode object of TYPE out of a
2148 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2149 is null, a stack temporary is created. TGTBLK is returned.
2150
2151 The primary purpose of this routine is to handle functions
2152 that return BLKmode structures in registers. Some machines
2153 (the PA for example) want to return all small structures
3a94c984 2154 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2155
2156rtx
19caa751 2157copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2158 rtx tgtblk;
2159 rtx srcreg;
2160 tree type;
2161{
19caa751
RK
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2166
2167 if (tgtblk == 0)
2168 {
1da68f56
RK
2169 tgtblk = assign_temp (build_qualified_type (type,
2170 (TYPE_QUALS (type)
2171 | TYPE_QUAL_CONST)),
2172 0, 1, 1);
19caa751
RK
2173 preserve_temp_slots (tgtblk);
2174 }
3a94c984 2175
19caa751
RK
2176 /* This code assumes srcreg is at least a full word. If it isn't,
2177 copy it into a new pseudo which is a full word. */
2178 if (GET_MODE (srcreg) != BLKmode
2179 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2180 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2181
2182 /* Structures whose size is not a multiple of a word are aligned
2183 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2184 machine, this means we must skip the empty high order bytes when
2185 calculating the bit offset. */
2186 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2187 big_endian_correction
2188 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189
2190 /* Copy the structure BITSIZE bites at a time.
3a94c984 2191
19caa751
RK
2192 We could probably emit more efficient code for machines which do not use
2193 strict alignment, but it doesn't seem worth the effort at the current
2194 time. */
2195 for (bitpos = 0, xbitpos = big_endian_correction;
2196 bitpos < bytes * BITS_PER_UNIT;
2197 bitpos += bitsize, xbitpos += bitsize)
2198 {
3a94c984 2199 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2200 word boundary and when xbitpos == big_endian_correction
2201 (the first time through). */
2202 if (xbitpos % BITS_PER_WORD == 0
2203 || xbitpos == big_endian_correction)
b47f8cfc
JH
2204 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 GET_MODE (srcreg));
19caa751
RK
2206
2207 /* We need a new destination operand each time bitpos is on
2208 a word boundary. */
2209 if (bitpos % BITS_PER_WORD == 0)
2210 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2211
19caa751
RK
2212 /* Use xbitpos for the source extraction (right justified) and
2213 xbitpos for the destination store (left justified). */
2214 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2215 extract_bit_field (src, bitsize,
2216 xbitpos % BITS_PER_WORD, 1,
2217 NULL_RTX, word_mode, word_mode,
2218 bitsize, BITS_PER_WORD),
2219 bitsize, BITS_PER_WORD);
2220 }
2221
2222 return tgtblk;
c36fce9a
GRK
2223}
2224
94b25f81
RK
2225/* Add a USE expression for REG to the (possibly empty) list pointed
2226 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2227
2228void
b3f8cf4a
RK
2229use_reg (call_fusage, reg)
2230 rtx *call_fusage, reg;
2231{
0304dfbb
DE
2232 if (GET_CODE (reg) != REG
2233 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2234 abort ();
b3f8cf4a
RK
2235
2236 *call_fusage
38a448ca
RH
2237 = gen_rtx_EXPR_LIST (VOIDmode,
2238 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2239}
2240
94b25f81
RK
2241/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2242 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2243
2244void
0304dfbb
DE
2245use_regs (call_fusage, regno, nregs)
2246 rtx *call_fusage;
bbf6f052
RK
2247 int regno;
2248 int nregs;
2249{
0304dfbb 2250 int i;
bbf6f052 2251
0304dfbb
DE
2252 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2253 abort ();
2254
2255 for (i = 0; i < nregs; i++)
38a448ca 2256 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2257}
fffa9c1d
JW
2258
2259/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2260 PARALLEL REGS. This is for calls that pass values in multiple
2261 non-contiguous locations. The Irix 6 ABI has examples of this. */
2262
2263void
2264use_group_regs (call_fusage, regs)
2265 rtx *call_fusage;
2266 rtx regs;
2267{
2268 int i;
2269
6bd35f86
DE
2270 for (i = 0; i < XVECLEN (regs, 0); i++)
2271 {
2272 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2273
6bd35f86
DE
2274 /* A NULL entry means the parameter goes both on the stack and in
2275 registers. This can also be a MEM for targets that pass values
2276 partially on the stack and partially in registers. */
e9a25f70 2277 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2278 use_reg (call_fusage, reg);
2279 }
fffa9c1d 2280}
bbf6f052 2281\f
57814e5e
JJ
2282
2283int
2284can_store_by_pieces (len, constfun, constfundata, align)
2285 unsigned HOST_WIDE_INT len;
2286 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2287 PTR constfundata;
2288 unsigned int align;
2289{
98166639 2290 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2291 HOST_WIDE_INT offset = 0;
2292 enum machine_mode mode, tmode;
2293 enum insn_code icode;
2294 int reverse;
2295 rtx cst;
2296
2297 if (! MOVE_BY_PIECES_P (len, align))
2298 return 0;
2299
2300 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2301 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2302 align = MOVE_MAX * BITS_PER_UNIT;
2303
2304 /* We would first store what we can in the largest integer mode, then go to
2305 successively smaller modes. */
2306
2307 for (reverse = 0;
2308 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2309 reverse++)
2310 {
2311 l = len;
2312 mode = VOIDmode;
98166639 2313 max_size = MOVE_MAX_PIECES + 1;
57814e5e
JJ
2314 while (max_size > 1)
2315 {
2316 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2318 if (GET_MODE_SIZE (tmode) < max_size)
2319 mode = tmode;
2320
2321 if (mode == VOIDmode)
2322 break;
2323
2324 icode = mov_optab->handlers[(int) mode].insn_code;
2325 if (icode != CODE_FOR_nothing
2326 && align >= GET_MODE_ALIGNMENT (mode))
2327 {
2328 unsigned int size = GET_MODE_SIZE (mode);
2329
2330 while (l >= size)
2331 {
2332 if (reverse)
2333 offset -= size;
2334
2335 cst = (*constfun) (constfundata, offset, mode);
2336 if (!LEGITIMATE_CONSTANT_P (cst))
2337 return 0;
2338
2339 if (!reverse)
2340 offset += size;
2341
2342 l -= size;
2343 }
2344 }
2345
2346 max_size = GET_MODE_SIZE (mode);
2347 }
2348
2349 /* The code above should have handled everything. */
2350 if (l != 0)
2351 abort ();
2352 }
2353
2354 return 1;
2355}
2356
2357/* Generate several move instructions to store LEN bytes generated by
2358 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2359 pointer which will be passed as argument in every CONSTFUN call.
2360 ALIGN is maximum alignment we can assume. */
2361
2362void
2363store_by_pieces (to, len, constfun, constfundata, align)
2364 rtx to;
2365 unsigned HOST_WIDE_INT len;
2366 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2367 PTR constfundata;
2368 unsigned int align;
2369{
2370 struct store_by_pieces data;
2371
2372 if (! MOVE_BY_PIECES_P (len, align))
2373 abort ();
2374 to = protect_from_queue (to, 1);
2375 data.constfun = constfun;
2376 data.constfundata = constfundata;
2377 data.len = len;
2378 data.to = to;
2379 store_by_pieces_1 (&data, align);
2380}
2381
19caa751
RK
2382/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2383 rtx with BLKmode). The caller must pass TO through protect_from_queue
2384 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2385
2386static void
2387clear_by_pieces (to, len, align)
2388 rtx to;
3bdf5ad1 2389 unsigned HOST_WIDE_INT len;
729a2125 2390 unsigned int align;
9de08200 2391{
57814e5e
JJ
2392 struct store_by_pieces data;
2393
2394 data.constfun = clear_by_pieces_1;
df4ae160 2395 data.constfundata = NULL;
57814e5e
JJ
2396 data.len = len;
2397 data.to = to;
2398 store_by_pieces_1 (&data, align);
2399}
2400
2401/* Callback routine for clear_by_pieces.
2402 Return const0_rtx unconditionally. */
2403
2404static rtx
2405clear_by_pieces_1 (data, offset, mode)
2406 PTR data ATTRIBUTE_UNUSED;
2407 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2408 enum machine_mode mode ATTRIBUTE_UNUSED;
2409{
2410 return const0_rtx;
2411}
2412
2413/* Subroutine of clear_by_pieces and store_by_pieces.
2414 Generate several move instructions to store LEN bytes of block TO. (A MEM
2415 rtx with BLKmode). The caller must pass TO through protect_from_queue
2416 before calling. ALIGN is maximum alignment we can assume. */
2417
2418static void
2419store_by_pieces_1 (data, align)
2420 struct store_by_pieces *data;
2421 unsigned int align;
2422{
2423 rtx to_addr = XEXP (data->to, 0);
3bdf5ad1 2424 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
2425 enum machine_mode mode = VOIDmode, tmode;
2426 enum insn_code icode;
9de08200 2427
57814e5e
JJ
2428 data->offset = 0;
2429 data->to_addr = to_addr;
2430 data->autinc_to
9de08200
RK
2431 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2432 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2433
57814e5e
JJ
2434 data->explicit_inc_to = 0;
2435 data->reverse
9de08200 2436 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2437 if (data->reverse)
2438 data->offset = data->len;
9de08200 2439
57814e5e 2440 /* If storing requires more than two move insns,
9de08200
RK
2441 copy addresses to registers (to make displacements shorter)
2442 and use post-increment if available. */
57814e5e
JJ
2443 if (!data->autinc_to
2444 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2445 {
3a94c984 2446 /* Determine the main mode we'll be using. */
fbe1758d
AM
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2449 if (GET_MODE_SIZE (tmode) < max_size)
2450 mode = tmode;
2451
57814e5e 2452 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2453 {
57814e5e
JJ
2454 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2455 data->autinc_to = 1;
2456 data->explicit_inc_to = -1;
9de08200 2457 }
3bdf5ad1 2458
57814e5e
JJ
2459 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2460 && ! data->autinc_to)
9de08200 2461 {
57814e5e
JJ
2462 data->to_addr = copy_addr_to_reg (to_addr);
2463 data->autinc_to = 1;
2464 data->explicit_inc_to = 1;
9de08200 2465 }
3bdf5ad1 2466
57814e5e
JJ
2467 if ( !data->autinc_to && CONSTANT_P (to_addr))
2468 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2469 }
2470
e1565e65 2471 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2472 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2473 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2474
57814e5e 2475 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2476 successively smaller modes. */
2477
2478 while (max_size > 1)
2479 {
9de08200
RK
2480 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2481 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2482 if (GET_MODE_SIZE (tmode) < max_size)
2483 mode = tmode;
2484
2485 if (mode == VOIDmode)
2486 break;
2487
2488 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2489 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2490 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2491
2492 max_size = GET_MODE_SIZE (mode);
2493 }
2494
2495 /* The code above should have handled everything. */
57814e5e 2496 if (data->len != 0)
9de08200
RK
2497 abort ();
2498}
2499
57814e5e 2500/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2501 with move instructions for mode MODE. GENFUN is the gen_... function
2502 to make a move insn for that mode. DATA has all the other info. */
2503
2504static void
57814e5e 2505store_by_pieces_2 (genfun, mode, data)
711d877c 2506 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2507 enum machine_mode mode;
57814e5e 2508 struct store_by_pieces *data;
9de08200 2509{
3bdf5ad1 2510 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2511 rtx to1, cst;
9de08200
RK
2512
2513 while (data->len >= size)
2514 {
3bdf5ad1
RK
2515 if (data->reverse)
2516 data->offset -= size;
9de08200 2517
3bdf5ad1
RK
2518 if (data->autinc_to)
2519 {
2520 to1 = gen_rtx_MEM (mode, data->to_addr);
2521 MEM_COPY_ATTRIBUTES (to1, data->to);
2522 }
3a94c984 2523 else
3bdf5ad1
RK
2524 to1 = change_address (data->to, mode,
2525 plus_constant (data->to_addr, data->offset));
9de08200 2526
940da324 2527 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2528 emit_insn (gen_add2_insn (data->to_addr,
2529 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2530
57814e5e
JJ
2531 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2532 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2533
940da324 2534 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2536
3bdf5ad1
RK
2537 if (! data->reverse)
2538 data->offset += size;
9de08200
RK
2539
2540 data->len -= size;
2541 }
2542}
2543\f
19caa751
RK
2544/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2545 its length in bytes and ALIGN is the maximum alignment we can is has.
bbf6f052 2546
e9a25f70
JL
2547 If we call a function that returns the length of the block, return it. */
2548
2549rtx
9de08200 2550clear_storage (object, size, align)
bbf6f052 2551 rtx object;
4c08eef0 2552 rtx size;
729a2125 2553 unsigned int align;
bbf6f052 2554{
52cf7115
JL
2555#ifdef TARGET_MEM_FUNCTIONS
2556 static tree fn;
2557 tree call_expr, arg_list;
2558#endif
e9a25f70
JL
2559 rtx retval = 0;
2560
fcf1b822
RK
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
8752c357 2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
fcf1b822
RK
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2567 else
bbf6f052 2568 {
9de08200
RK
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2571
2572 if (GET_CODE (size) == CONST_INT
fbe1758d 2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200 2574 clear_by_pieces (object, INTVAL (size), align);
9de08200
RK
2575 else
2576 {
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2580
19caa751 2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
9de08200
RK
2582 enum machine_mode mode;
2583
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2586 {
2587 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2588 insn_operand_predicate_fn pred;
9de08200
RK
2589
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2597 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2603 {
2604 rtx op1;
2605 rtx last = get_last_insn ();
2606 rtx pat;
2607
2608 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2611 op1 = copy_to_mode_reg (mode, op1);
2612
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2614 if (pat)
2615 {
2616 emit_insn (pat);
e9a25f70 2617 return 0;
9de08200
RK
2618 }
2619 else
2620 delete_insns_since (last);
2621 }
2622 }
2623
4bc973ae 2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2625
4bc973ae
JL
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
52cf7115 2629
4bc973ae
JL
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
52cf7115 2633
4bc973ae
JL
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2637 emit_queue.
52cf7115 2638
4bc973ae
JL
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2645
4bc973ae
JL
2646#ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2648#else
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
f3dc586a 2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2652#endif
52cf7115 2653
4bc973ae
JL
2654#ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
52cf7115 2657
4bc973ae
JL
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
52cf7115 2660
4bc973ae
JL
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
0d97bf4c 2663 incorrect code.
4bc973ae
JL
2664
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2668 {
2669 tree fntype;
2670
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
4bc973ae
JL
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2677 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 2681 TREE_NOTHROW (fn) = 1;
6496a589 2682 make_decl_rtl (fn, NULL);
4bc973ae 2683 assemble_external (fn);
4bc973ae
JL
2684 }
2685
3a94c984 2686 /* We need to make an argument list for the function call.
4bc973ae
JL
2687
2688 memset has three arguments, the first is a void * addresses, the
2689 second a integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2691 arg_list
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2694 object));
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
3a94c984 2697 make_tree (integer_type_node, const0_rtx));
4bc973ae
JL
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2701
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2708
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2710#else
ebb1b59a 2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
fe7bbd2a 2712 VOIDmode, 2, object, Pmode, size,
9de08200 2713 TYPE_MODE (integer_type_node));
bbf6f052 2714#endif
9de08200 2715 }
bbf6f052 2716 }
e9a25f70
JL
2717
2718 return retval;
bbf6f052
RK
2719}
2720
2721/* Generate code to copy Y into X.
2722 Both Y and X must have the same mode, except that
2723 Y can be a constant with VOIDmode.
2724 This mode cannot be BLKmode; use emit_block_move for that.
2725
2726 Return the last instruction emitted. */
2727
2728rtx
2729emit_move_insn (x, y)
2730 rtx x, y;
2731{
2732 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
2733 rtx y_cst = NULL_RTX;
2734 rtx last_insn;
bbf6f052
RK
2735
2736 x = protect_from_queue (x, 1);
2737 y = protect_from_queue (y, 0);
2738
2739 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2740 abort ();
2741
ee5332b8
RH
2742 /* Never force constant_p_rtx to memory. */
2743 if (GET_CODE (y) == CONSTANT_P_RTX)
2744 ;
2745 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
de1b33dd
AO
2746 {
2747 y_cst = y;
2748 y = force_const_mem (mode, y);
2749 }
bbf6f052
RK
2750
2751 /* If X or Y are memory references, verify that their addresses are valid
2752 for the machine. */
2753 if (GET_CODE (x) == MEM
2754 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2755 && ! push_operand (x, GET_MODE (x)))
2756 || (flag_force_addr
2757 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2758 x = change_address (x, VOIDmode, XEXP (x, 0));
2759
2760 if (GET_CODE (y) == MEM
2761 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2762 || (flag_force_addr
2763 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2764 y = change_address (y, VOIDmode, XEXP (y, 0));
2765
2766 if (mode == BLKmode)
2767 abort ();
2768
de1b33dd
AO
2769 last_insn = emit_move_insn_1 (x, y);
2770
2771 if (y_cst && GET_CODE (x) == REG)
2772 REG_NOTES (last_insn)
2773 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2774
2775 return last_insn;
261c4230
RS
2776}
2777
2778/* Low level part of emit_move_insn.
2779 Called just like emit_move_insn, but assumes X and Y
2780 are basically valid. */
2781
2782rtx
2783emit_move_insn_1 (x, y)
2784 rtx x, y;
2785{
2786 enum machine_mode mode = GET_MODE (x);
2787 enum machine_mode submode;
2788 enum mode_class class = GET_MODE_CLASS (mode);
770ae6cc 2789 unsigned int i;
261c4230 2790
dbbbbf3b 2791 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2792 abort ();
76bbe028 2793
bbf6f052
RK
2794 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2795 return
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2797
89742723 2798 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2799 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2800 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2801 * BITS_PER_UNIT),
2802 (class == MODE_COMPLEX_INT
2803 ? MODE_INT : MODE_FLOAT),
2804 0))
7308a047
RS
2805 && (mov_optab->handlers[(int) submode].insn_code
2806 != CODE_FOR_nothing))
2807 {
2808 /* Don't split destination if it is a stack push. */
2809 int stack = push_operand (x, GET_MODE (x));
7308a047 2810
79ce92d7 2811#ifdef PUSH_ROUNDING
1a06f5fe
JH
2812 /* In case we output to the stack, but the size is smaller machine can
2813 push exactly, we need to use move instructions. */
2814 if (stack
2815 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2816 {
2817 rtx temp;
2818 int offset1, offset2;
2819
2820 /* Do not use anti_adjust_stack, since we don't want to update
2821 stack_pointer_delta. */
2822 temp = expand_binop (Pmode,
2823#ifdef STACK_GROWS_DOWNWARD
2824 sub_optab,
2825#else
2826 add_optab,
2827#endif
2828 stack_pointer_rtx,
2829 GEN_INT
2830 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 stack_pointer_rtx,
2832 0,
2833 OPTAB_LIB_WIDEN);
2834 if (temp != stack_pointer_rtx)
2835 emit_move_insn (stack_pointer_rtx, temp);
2836#ifdef STACK_GROWS_DOWNWARD
2837 offset1 = 0;
2838 offset2 = GET_MODE_SIZE (submode);
2839#else
2840 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2841 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2842 + GET_MODE_SIZE (submode));
2843#endif
2844 emit_move_insn (change_address (x, submode,
2845 gen_rtx_PLUS (Pmode,
2846 stack_pointer_rtx,
2847 GEN_INT (offset1))),
2848 gen_realpart (submode, y));
2849 emit_move_insn (change_address (x, submode,
2850 gen_rtx_PLUS (Pmode,
2851 stack_pointer_rtx,
2852 GEN_INT (offset2))),
2853 gen_imagpart (submode, y));
2854 }
e9c0bd54 2855 else
79ce92d7 2856#endif
7308a047
RS
2857 /* If this is a stack, push the highpart first, so it
2858 will be in the argument order.
2859
2860 In that case, change_address is used only to convert
2861 the mode, not to change the address. */
e9c0bd54 2862 if (stack)
c937357e 2863 {
e33c0d66
RS
2864 /* Note that the real part always precedes the imag part in memory
2865 regardless of machine's endianness. */
c937357e
RS
2866#ifdef STACK_GROWS_DOWNWARD
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2869 gen_imagpart (submode, y)));
c937357e 2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2871 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2872 gen_realpart (submode, y)));
c937357e
RS
2873#else
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2876 gen_realpart (submode, y)));
c937357e 2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2878 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2879 gen_imagpart (submode, y)));
c937357e
RS
2880#endif
2881 }
2882 else
2883 {
235ae7be
DM
2884 rtx realpart_x, realpart_y;
2885 rtx imagpart_x, imagpart_y;
2886
405f63da
MM
2887 /* If this is a complex value with each part being smaller than a
2888 word, the usual calling sequence will likely pack the pieces into
2889 a single register. Unfortunately, SUBREG of hard registers only
2890 deals in terms of words, so we have a problem converting input
2891 arguments to the CONCAT of two registers that is used elsewhere
2892 for complex values. If this is before reload, we can copy it into
2893 memory and reload. FIXME, we should see about using extract and
2894 insert on integer registers, but complex short and complex char
2895 variables should be rarely used. */
3a94c984 2896 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2897 && (reload_in_progress | reload_completed) == 0)
2898 {
2899 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2900 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2901
2902 if (packed_dest_p || packed_src_p)
2903 {
2904 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2905 ? MODE_FLOAT : MODE_INT);
2906
1da68f56
RK
2907 enum machine_mode reg_mode
2908 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2909
2910 if (reg_mode != BLKmode)
2911 {
2912 rtx mem = assign_stack_temp (reg_mode,
2913 GET_MODE_SIZE (mode), 0);
405f63da
MM
2914 rtx cmem = change_address (mem, mode, NULL_RTX);
2915
1da68f56
RK
2916 cfun->cannot_inline
2917 = N_("function using short complex types cannot be inline");
405f63da
MM
2918
2919 if (packed_dest_p)
2920 {
2921 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2922 emit_move_insn_1 (cmem, y);
2923 return emit_move_insn_1 (sreg, mem);
2924 }
2925 else
2926 {
2927 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2928 emit_move_insn_1 (mem, sreg);
2929 return emit_move_insn_1 (x, cmem);
2930 }
2931 }
2932 }
2933 }
2934
235ae7be
DM
2935 realpart_x = gen_realpart (submode, x);
2936 realpart_y = gen_realpart (submode, y);
2937 imagpart_x = gen_imagpart (submode, x);
2938 imagpart_y = gen_imagpart (submode, y);
2939
2940 /* Show the output dies here. This is necessary for SUBREGs
2941 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2942 hard regs shouldn't appear here except as return values.
2943 We never want to emit such a clobber after reload. */
2944 if (x != y
235ae7be
DM
2945 && ! (reload_in_progress || reload_completed)
2946 && (GET_CODE (realpart_x) == SUBREG
2947 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2948 {
c14c6529 2949 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2950 }
2638126a 2951
c937357e 2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2953 (realpart_x, realpart_y));
c937357e 2954 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2955 (imagpart_x, imagpart_y));
c937357e 2956 }
7308a047 2957
7a1ab50a 2958 return get_last_insn ();
7308a047
RS
2959 }
2960
bbf6f052
RK
2961 /* This will handle any multi-word mode that lacks a move_insn pattern.
2962 However, you will get better code if you define such patterns,
2963 even if they must turn into multiple assembler instructions. */
a4320483 2964 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2965 {
2966 rtx last_insn = 0;
3ef1eef4 2967 rtx seq, inner;
235ae7be 2968 int need_clobber;
3a94c984 2969
a98c9f1a
RK
2970#ifdef PUSH_ROUNDING
2971
2972 /* If X is a push on the stack, do the push now and replace
2973 X with a reference to the stack pointer. */
2974 if (push_operand (x, GET_MODE (x)))
2975 {
2976 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2977 x = change_address (x, VOIDmode, stack_pointer_rtx);
2978 }
2979#endif
3a94c984 2980
3ef1eef4
RK
2981 /* If we are in reload, see if either operand is a MEM whose address
2982 is scheduled for replacement. */
2983 if (reload_in_progress && GET_CODE (x) == MEM
2984 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2985 {
2986 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2987
3ef1eef4 2988 MEM_COPY_ATTRIBUTES (new, x);
3ef1eef4
RK
2989 x = new;
2990 }
2991 if (reload_in_progress && GET_CODE (y) == MEM
2992 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2993 {
2994 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2995
3ef1eef4 2996 MEM_COPY_ATTRIBUTES (new, y);
3ef1eef4
RK
2997 y = new;
2998 }
2999
235ae7be 3000 start_sequence ();
15a7a8ec 3001
235ae7be 3002 need_clobber = 0;
bbf6f052 3003 for (i = 0;
3a94c984 3004 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3005 i++)
3006 {
3007 rtx xpart = operand_subword (x, i, 1, mode);
3008 rtx ypart = operand_subword (y, i, 1, mode);
3009
3010 /* If we can't get a part of Y, put Y into memory if it is a
3011 constant. Otherwise, force it into a register. If we still
3012 can't get a part of Y, abort. */
3013 if (ypart == 0 && CONSTANT_P (y))
3014 {
3015 y = force_const_mem (mode, y);
3016 ypart = operand_subword (y, i, 1, mode);
3017 }
3018 else if (ypart == 0)
3019 ypart = operand_subword_force (y, i, mode);
3020
3021 if (xpart == 0 || ypart == 0)
3022 abort ();
3023
235ae7be
DM
3024 need_clobber |= (GET_CODE (xpart) == SUBREG);
3025
bbf6f052
RK
3026 last_insn = emit_move_insn (xpart, ypart);
3027 }
6551fa4d 3028
235ae7be
DM
3029 seq = gen_sequence ();
3030 end_sequence ();
3031
3032 /* Show the output dies here. This is necessary for SUBREGs
3033 of pseudos since we cannot track their lifetimes correctly;
3034 hard regs shouldn't appear here except as return values.
3035 We never want to emit such a clobber after reload. */
3036 if (x != y
3037 && ! (reload_in_progress || reload_completed)
3038 && need_clobber != 0)
3039 {
3040 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3041 }
3042
3043 emit_insn (seq);
3044
bbf6f052
RK
3045 return last_insn;
3046 }
3047 else
3048 abort ();
3049}
3050\f
3051/* Pushing data onto the stack. */
3052
3053/* Push a block of length SIZE (perhaps variable)
3054 and return an rtx to address the beginning of the block.
3055 Note that it is not possible for the value returned to be a QUEUED.
3056 The value may be virtual_outgoing_args_rtx.
3057
3058 EXTRA is the number of bytes of padding to push in addition to SIZE.
3059 BELOW nonzero means this padding comes at low addresses;
3060 otherwise, the padding comes at high addresses. */
3061
3062rtx
3063push_block (size, extra, below)
3064 rtx size;
3065 int extra, below;
3066{
3067 register rtx temp;
88f63c77
RK
3068
3069 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3070 if (CONSTANT_P (size))
3071 anti_adjust_stack (plus_constant (size, extra));
3072 else if (GET_CODE (size) == REG && extra == 0)
3073 anti_adjust_stack (size);
3074 else
3075 {
ce48579b 3076 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3077 if (extra != 0)
906c4e36 3078 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3079 temp, 0, OPTAB_LIB_WIDEN);
3080 anti_adjust_stack (temp);
3081 }
3082
f73ad30e
JH
3083#ifndef STACK_GROWS_DOWNWARD
3084#ifdef ARGS_GROW_DOWNWARD
3085 if (!ACCUMULATE_OUTGOING_ARGS)
bbf6f052 3086#else
f73ad30e
JH
3087 if (0)
3088#endif
3089#else
3090 if (1)
bbf6f052 3091#endif
f73ad30e 3092 {
f73ad30e
JH
3093 /* Return the lowest stack address when STACK or ARGS grow downward and
3094 we are not aaccumulating outgoing arguments (the c4x port uses such
3095 conventions). */
3096 temp = virtual_outgoing_args_rtx;
3097 if (extra != 0 && below)
3098 temp = plus_constant (temp, extra);
3099 }
3100 else
3101 {
3102 if (GET_CODE (size) == CONST_INT)
3103 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3104 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3105 else if (extra != 0 && !below)
3106 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3107 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3108 else
3109 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3110 negate_rtx (Pmode, size));
3111 }
bbf6f052
RK
3112
3113 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3114}
3115
bbf6f052 3116
921b3427
RK
3117/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3118 block of SIZE bytes. */
3119
3120static rtx
3121get_push_address (size)
3a94c984 3122 int size;
921b3427
RK
3123{
3124 register rtx temp;
3125
3126 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 3127 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 3128 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 3129 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
3130 else
3131 temp = stack_pointer_rtx;
3132
c85f7c16 3133 return copy_to_reg (temp);
921b3427
RK
3134}
3135
566aa174
JH
3136/* Emit single push insn. */
3137static void
3138emit_single_push_insn (mode, x, type)
3139 rtx x;
3140 enum machine_mode mode;
3141 tree type;
3142{
3143#ifdef PUSH_ROUNDING
3144 rtx dest_addr;
3145 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3146 rtx dest;
3147
3148 if (GET_MODE_SIZE (mode) == rounded_size)
3149 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3150 else
3151 {
3152#ifdef STACK_GROWS_DOWNWARD
3153 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3154 GEN_INT (-rounded_size));
3155#else
3156 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3157 GEN_INT (rounded_size));
3158#endif
3159 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3160 }
3161
3162 dest = gen_rtx_MEM (mode, dest_addr);
3163
3164 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3165
3166 if (type != 0)
3167 {
3168 set_mem_attributes (dest, type, 1);
3169 /* Function incoming arguments may overlap with sibling call
3170 outgoing arguments and we cannot allow reordering of reads
3171 from function arguments with stores to outgoing arguments
3172 of sibling calls. */
3173 MEM_ALIAS_SET (dest) = 0;
3174 }
3175 emit_move_insn (dest, x);
3176#else
3177 abort();
3178#endif
3179}
3180
bbf6f052
RK
3181/* Generate code to push X onto the stack, assuming it has mode MODE and
3182 type TYPE.
3183 MODE is redundant except when X is a CONST_INT (since they don't
3184 carry mode info).
3185 SIZE is an rtx for the size of data to be copied (in bytes),
3186 needed only if X is BLKmode.
3187
f1eaaf73 3188 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3189
cd048831
RK
3190 If PARTIAL and REG are both nonzero, then copy that many of the first
3191 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3192 The amount of space pushed is decreased by PARTIAL words,
3193 rounded *down* to a multiple of PARM_BOUNDARY.
3194 REG must be a hard register in this case.
cd048831
RK
3195 If REG is zero but PARTIAL is not, take any all others actions for an
3196 argument partially in registers, but do not actually load any
3197 registers.
bbf6f052
RK
3198
3199 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3200 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3201
3202 On a machine that lacks real push insns, ARGS_ADDR is the address of
3203 the bottom of the argument block for this call. We use indexing off there
3204 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3205 argument block has not been preallocated.
3206
e5e809f4
JL
3207 ARGS_SO_FAR is the size of args previously pushed for this call.
3208
3209 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3210 for arguments passed in registers. If nonzero, it will be the number
3211 of bytes required. */
bbf6f052
RK
3212
3213void
3214emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
3215 args_addr, args_so_far, reg_parm_stack_space,
3216 alignment_pad)
bbf6f052
RK
3217 register rtx x;
3218 enum machine_mode mode;
3219 tree type;
3220 rtx size;
729a2125 3221 unsigned int align;
bbf6f052
RK
3222 int partial;
3223 rtx reg;
3224 int extra;
3225 rtx args_addr;
3226 rtx args_so_far;
e5e809f4 3227 int reg_parm_stack_space;
4fc026cd 3228 rtx alignment_pad;
bbf6f052
RK
3229{
3230 rtx xinner;
3231 enum direction stack_direction
3232#ifdef STACK_GROWS_DOWNWARD
3233 = downward;
3234#else
3235 = upward;
3236#endif
3237
3238 /* Decide where to pad the argument: `downward' for below,
3239 `upward' for above, or `none' for don't pad it.
3240 Default is below for small data on big-endian machines; else above. */
3241 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3242
3243 /* Invert direction if stack is post-update. */
3244 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3245 if (where_pad != none)
3246 where_pad = (where_pad == downward ? upward : downward);
3247
3248 xinner = x = protect_from_queue (x, 0);
3249
3250 if (mode == BLKmode)
3251 {
3252 /* Copy a block into the stack, entirely or partially. */
3253
3254 register rtx temp;
3255 int used = partial * UNITS_PER_WORD;
3256 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3257 int skip;
3a94c984 3258
bbf6f052
RK
3259 if (size == 0)
3260 abort ();
3261
3262 used -= offset;
3263
3264 /* USED is now the # of bytes we need not copy to the stack
3265 because registers will take care of them. */
3266
3267 if (partial != 0)
3268 xinner = change_address (xinner, BLKmode,
3269 plus_constant (XEXP (xinner, 0), used));
3270
3271 /* If the partial register-part of the arg counts in its stack size,
3272 skip the part of stack space corresponding to the registers.
3273 Otherwise, start copying to the beginning of the stack space,
3274 by setting SKIP to 0. */
e5e809f4 3275 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3276
3277#ifdef PUSH_ROUNDING
3278 /* Do it with several push insns if that doesn't take lots of insns
3279 and if there is no difficulty with push insns that skip bytes
3280 on the stack for alignment purposes. */
3281 if (args_addr == 0
f73ad30e 3282 && PUSH_ARGS
bbf6f052
RK
3283 && GET_CODE (size) == CONST_INT
3284 && skip == 0
15914757 3285 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3286 /* Here we avoid the case of a structure whose weak alignment
3287 forces many pushes of a small amount of data,
3288 and such small pushes do rounding that causes trouble. */
e1565e65 3289 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3290 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3291 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3292 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3293 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3294 {
3295 /* Push padding now if padding above and stack grows down,
3296 or if padding below and stack grows up.
3297 But if space already allocated, this has already been done. */
3298 if (extra && args_addr == 0
3299 && where_pad != none && where_pad != stack_direction)
906c4e36 3300 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3301
566aa174 3302 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
921b3427 3303
7d384cc0 3304 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3305 {
3306 rtx temp;
3a94c984 3307
956d6950 3308 in_check_memory_usage = 1;
3a94c984 3309 temp = get_push_address (INTVAL (size) - used);
c85f7c16 3310 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3311 emit_library_call (chkr_copy_bitmap_libfunc,
3312 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3313 Pmode, XEXP (xinner, 0), Pmode,
3a94c984 3314 GEN_INT (INTVAL (size) - used),
921b3427
RK
3315 TYPE_MODE (sizetype));
3316 else
ebb1b59a
BS
3317 emit_library_call (chkr_set_right_libfunc,
3318 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3319 Pmode, GEN_INT (INTVAL (size) - used),
921b3427 3320 TYPE_MODE (sizetype),
956d6950
JL
3321 GEN_INT (MEMORY_USE_RW),
3322 TYPE_MODE (integer_type_node));
3323 in_check_memory_usage = 0;
921b3427 3324 }
bbf6f052
RK
3325 }
3326 else
3a94c984 3327#endif /* PUSH_ROUNDING */
bbf6f052 3328 {
7ab923cc
JJ
3329 rtx target;
3330
bbf6f052
RK
3331 /* Otherwise make space on the stack and copy the data
3332 to the address of that space. */
3333
3334 /* Deduct words put into registers from the size we must copy. */
3335 if (partial != 0)
3336 {
3337 if (GET_CODE (size) == CONST_INT)
906c4e36 3338 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3339 else
3340 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3341 GEN_INT (used), NULL_RTX, 0,
3342 OPTAB_LIB_WIDEN);
bbf6f052
RK
3343 }
3344
3345 /* Get the address of the stack space.
3346 In this case, we do not deal with EXTRA separately.
3347 A single stack adjust will do. */
3348 if (! args_addr)
3349 {
3350 temp = push_block (size, extra, where_pad == downward);
3351 extra = 0;
3352 }
3353 else if (GET_CODE (args_so_far) == CONST_INT)
3354 temp = memory_address (BLKmode,
3355 plus_constant (args_addr,
3356 skip + INTVAL (args_so_far)));
3357 else
3358 temp = memory_address (BLKmode,
38a448ca
RH
3359 plus_constant (gen_rtx_PLUS (Pmode,
3360 args_addr,
3361 args_so_far),
bbf6f052 3362 skip));
7d384cc0 3363 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3364 {
956d6950 3365 in_check_memory_usage = 1;
921b3427 3366 target = copy_to_reg (temp);
c85f7c16 3367 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3368 emit_library_call (chkr_copy_bitmap_libfunc,
3369 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed
MK
3370 target, Pmode,
3371 XEXP (xinner, 0), Pmode,
921b3427
RK
3372 size, TYPE_MODE (sizetype));
3373 else
ebb1b59a
BS
3374 emit_library_call (chkr_set_right_libfunc,
3375 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 3376 target, Pmode,
921b3427 3377 size, TYPE_MODE (sizetype),
956d6950
JL
3378 GEN_INT (MEMORY_USE_RW),
3379 TYPE_MODE (integer_type_node));
3380 in_check_memory_usage = 0;
921b3427 3381 }
bbf6f052 3382
3a94c984 3383 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3384
3a94c984
KH
3385 if (type != 0)
3386 {
3387 set_mem_attributes (target, type, 1);
3388 /* Function incoming arguments may overlap with sibling call
3389 outgoing arguments and we cannot allow reordering of reads
3390 from function arguments with stores to outgoing arguments
3391 of sibling calls. */
3392 MEM_ALIAS_SET (target) = 0;
3393 }
7ab923cc 3394
bbf6f052
RK
3395 /* TEMP is the address of the block. Copy the data there. */
3396 if (GET_CODE (size) == CONST_INT
729a2125 3397 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3398 {
7ab923cc 3399 move_by_pieces (target, xinner, INTVAL (size), align);
bbf6f052
RK
3400 goto ret;
3401 }
e5e809f4 3402 else
bbf6f052 3403 {
19caa751 3404 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
e5e809f4 3405 enum machine_mode mode;
3bdf5ad1 3406
e5e809f4
JL
3407 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3408 mode != VOIDmode;
3409 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3410 {
e5e809f4 3411 enum insn_code code = movstr_optab[(int) mode];
a995e389 3412 insn_operand_predicate_fn pred;
e5e809f4
JL
3413
3414 if (code != CODE_FOR_nothing
3415 && ((GET_CODE (size) == CONST_INT
3416 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3417 <= (GET_MODE_MASK (mode) >> 1)))
3418 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3419 && (!(pred = insn_data[(int) code].operand[0].predicate)
3420 || ((*pred) (target, BLKmode)))
3421 && (!(pred = insn_data[(int) code].operand[1].predicate)
3422 || ((*pred) (xinner, BLKmode)))
3423 && (!(pred = insn_data[(int) code].operand[3].predicate)
3424 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3425 {
3426 rtx op2 = convert_to_mode (mode, size, 1);
3427 rtx last = get_last_insn ();
3428 rtx pat;
3429
a995e389
RH
3430 pred = insn_data[(int) code].operand[2].predicate;
3431 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3432 op2 = copy_to_mode_reg (mode, op2);
3433
3434 pat = GEN_FCN ((int) code) (target, xinner,
3435 op2, opalign);
3436 if (pat)
3437 {
3438 emit_insn (pat);
3439 goto ret;
3440 }
3441 else
3442 delete_insns_since (last);
3443 }
c841050e 3444 }
bbf6f052 3445 }
bbf6f052 3446
f73ad30e
JH
3447 if (!ACCUMULATE_OUTGOING_ARGS)
3448 {
3449 /* If the source is referenced relative to the stack pointer,
3450 copy it to another register to stabilize it. We do not need
3451 to do this if we know that we won't be changing sp. */
bbf6f052 3452
f73ad30e
JH
3453 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3454 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3455 temp = copy_to_reg (temp);
3456 }
bbf6f052
RK
3457
3458 /* Make inhibit_defer_pop nonzero around the library call
3459 to force it to pop the bcopy-arguments right away. */
3460 NO_DEFER_POP;
3461#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3462 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052 3463 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3464 convert_to_mode (TYPE_MODE (sizetype),
3465 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3466 TYPE_MODE (sizetype));
bbf6f052 3467#else
ebb1b59a 3468 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052 3469 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3470 convert_to_mode (TYPE_MODE (integer_type_node),
3471 size,
3472 TREE_UNSIGNED (integer_type_node)),
3473 TYPE_MODE (integer_type_node));
bbf6f052
RK
3474#endif
3475 OK_DEFER_POP;
3476 }
3477 }
3478 else if (partial > 0)
3479 {
3480 /* Scalar partly in registers. */
3481
3482 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3483 int i;
3484 int not_stack;
3485 /* # words of start of argument
3486 that we must make space for but need not store. */
3487 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3488 int args_offset = INTVAL (args_so_far);
3489 int skip;
3490
3491 /* Push padding now if padding above and stack grows down,
3492 or if padding below and stack grows up.
3493 But if space already allocated, this has already been done. */
3494 if (extra && args_addr == 0
3495 && where_pad != none && where_pad != stack_direction)
906c4e36 3496 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3497
3498 /* If we make space by pushing it, we might as well push
3499 the real data. Otherwise, we can leave OFFSET nonzero
3500 and leave the space uninitialized. */
3501 if (args_addr == 0)
3502 offset = 0;
3503
3504 /* Now NOT_STACK gets the number of words that we don't need to
3505 allocate on the stack. */
3506 not_stack = partial - offset;
3507
3508 /* If the partial register-part of the arg counts in its stack size,
3509 skip the part of stack space corresponding to the registers.
3510 Otherwise, start copying to the beginning of the stack space,
3511 by setting SKIP to 0. */
e5e809f4 3512 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3513
3514 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3515 x = validize_mem (force_const_mem (mode, x));
3516
3517 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3518 SUBREGs of such registers are not allowed. */
3519 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3520 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3521 x = copy_to_reg (x);
3522
3523 /* Loop over all the words allocated on the stack for this arg. */
3524 /* We can do it by words, because any scalar bigger than a word
3525 has a size a multiple of a word. */
3526#ifndef PUSH_ARGS_REVERSED
3527 for (i = not_stack; i < size; i++)
3528#else
3529 for (i = size - 1; i >= not_stack; i--)
3530#endif
3531 if (i >= not_stack + offset)
3532 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3533 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3534 0, args_addr,
3535 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3536 * UNITS_PER_WORD)),
4fc026cd 3537 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3538 }
3539 else
3540 {
3541 rtx addr;
921b3427 3542 rtx target = NULL_RTX;
3bdf5ad1 3543 rtx dest;
bbf6f052
RK
3544
3545 /* Push padding now if padding above and stack grows down,
3546 or if padding below and stack grows up.
3547 But if space already allocated, this has already been done. */
3548 if (extra && args_addr == 0
3549 && where_pad != none && where_pad != stack_direction)
906c4e36 3550 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3551
3552#ifdef PUSH_ROUNDING
f73ad30e 3553 if (args_addr == 0 && PUSH_ARGS)
566aa174 3554 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3555 else
3556#endif
921b3427
RK
3557 {
3558 if (GET_CODE (args_so_far) == CONST_INT)
3559 addr
3560 = memory_address (mode,
3a94c984 3561 plus_constant (args_addr,
921b3427 3562 INTVAL (args_so_far)));
3a94c984 3563 else
38a448ca
RH
3564 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3565 args_so_far));
921b3427 3566 target = addr;
566aa174
JH
3567 dest = gen_rtx_MEM (mode, addr);
3568 if (type != 0)
3569 {
3570 set_mem_attributes (dest, type, 1);
3571 /* Function incoming arguments may overlap with sibling call
3572 outgoing arguments and we cannot allow reordering of reads
3573 from function arguments with stores to outgoing arguments
3574 of sibling calls. */
3575 MEM_ALIAS_SET (dest) = 0;
3576 }
bbf6f052 3577
566aa174 3578 emit_move_insn (dest, x);
3bdf5ad1 3579
566aa174 3580 }
921b3427 3581
7d384cc0 3582 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3583 {
956d6950 3584 in_check_memory_usage = 1;
921b3427
RK
3585 if (target == 0)
3586 target = get_push_address (GET_MODE_SIZE (mode));
3587
c85f7c16 3588 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3589 emit_library_call (chkr_copy_bitmap_libfunc,
3590 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3591 Pmode, XEXP (x, 0), Pmode,
921b3427
RK
3592 GEN_INT (GET_MODE_SIZE (mode)),
3593 TYPE_MODE (sizetype));
3594 else
ebb1b59a
BS
3595 emit_library_call (chkr_set_right_libfunc,
3596 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3597 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
921b3427 3598 TYPE_MODE (sizetype),
956d6950
JL
3599 GEN_INT (MEMORY_USE_RW),
3600 TYPE_MODE (integer_type_node));
3601 in_check_memory_usage = 0;
921b3427 3602 }
bbf6f052
RK
3603 }
3604
3605 ret:
3606 /* If part should go in registers, copy that part
3607 into the appropriate registers. Do this now, at the end,
3608 since mem-to-mem copies above may do function calls. */
cd048831 3609 if (partial > 0 && reg != 0)
fffa9c1d
JW
3610 {
3611 /* Handle calls that pass values in multiple non-contiguous locations.
3612 The Irix 6 ABI has examples of this. */
3613 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3614 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3615 else
3616 move_block_to_reg (REGNO (reg), x, partial, mode);
3617 }
bbf6f052
RK
3618
3619 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3620 anti_adjust_stack (GEN_INT (extra));
3a94c984 3621
3ea2292a 3622 if (alignment_pad && args_addr == 0)
4fc026cd 3623 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3624}
3625\f
296b4ed9
RK
3626/* Return X if X can be used as a subtarget in a sequence of arithmetic
3627 operations. */
3628
3629static rtx
3630get_subtarget (x)
3631 rtx x;
3632{
3633 return ((x == 0
3634 /* Only registers can be subtargets. */
3635 || GET_CODE (x) != REG
3636 /* If the register is readonly, it can't be set more than once. */
3637 || RTX_UNCHANGING_P (x)
3638 /* Don't use hard regs to avoid extending their life. */
3639 || REGNO (x) < FIRST_PSEUDO_REGISTER
3640 /* Avoid subtargets inside loops,
3641 since they hide some invariant expressions. */
3642 || preserve_subexpressions_p ())
3643 ? 0 : x);
3644}
3645
bbf6f052
RK
3646/* Expand an assignment that stores the value of FROM into TO.
3647 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3648 (This may contain a QUEUED rtx;
3649 if the value is constant, this rtx is a constant.)
3650 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3651
3652 SUGGEST_REG is no longer actually used.
3653 It used to mean, copy the value through a register
3654 and return that register, if that is possible.
709f5be1 3655 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3656
3657rtx
3658expand_assignment (to, from, want_value, suggest_reg)
3659 tree to, from;
3660 int want_value;
c5c76735 3661 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3662{
3663 register rtx to_rtx = 0;
3664 rtx result;
3665
3666 /* Don't crash if the lhs of the assignment was erroneous. */
3667
3668 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3669 {
3670 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3671 return want_value ? result : NULL_RTX;
3672 }
bbf6f052
RK
3673
3674 /* Assignment of a structure component needs special treatment
3675 if the structure component's rtx is not simply a MEM.
6be58303
JW
3676 Assignment of an array element at a constant index, and assignment of
3677 an array element in an unaligned packed structure field, has the same
3678 problem. */
bbf6f052 3679
08293add 3680 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
b4e3fabb 3681 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
bbf6f052
RK
3682 {
3683 enum machine_mode mode1;
770ae6cc 3684 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3685 tree offset;
bbf6f052
RK
3686 int unsignedp;
3687 int volatilep = 0;
0088fcb1 3688 tree tem;
729a2125 3689 unsigned int alignment;
0088fcb1
RK
3690
3691 push_temp_slots ();
839c4796
RK
3692 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3693 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3694
3695 /* If we are going to use store_bit_field and extract_bit_field,
3696 make sure to_rtx will be safe for multiple use. */
3697
3698 if (mode1 == VOIDmode && want_value)
3699 tem = stabilize_reference (tem);
3700
921b3427 3701 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3702 if (offset != 0)
3703 {
906c4e36 3704 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3705
3706 if (GET_CODE (to_rtx) != MEM)
3707 abort ();
bd070e1a
RH
3708
3709 if (GET_MODE (offset_rtx) != ptr_mode)
3710 {
3711#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3712 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3713#else
3714 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3715#endif
3716 }
3717
9a7b9f4f
JL
3718 /* A constant address in TO_RTX can have VOIDmode, we must not try
3719 to call force_reg for that case. Avoid that case. */
89752202
HB
3720 if (GET_CODE (to_rtx) == MEM
3721 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3722 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202 3723 && bitsize
3a94c984 3724 && (bitpos % bitsize) == 0
89752202 3725 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 3726 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
3727 {
3728 rtx temp = change_address (to_rtx, mode1,
3729 plus_constant (XEXP (to_rtx, 0),
3730 (bitpos /
3731 BITS_PER_UNIT)));
3732 if (GET_CODE (XEXP (temp, 0)) == REG)
3733 to_rtx = temp;
3734 else
3735 to_rtx = change_address (to_rtx, mode1,
3736 force_reg (GET_MODE (XEXP (temp, 0)),
3737 XEXP (temp, 0)));
3738 bitpos = 0;
3739 }
3740
7bb0943f 3741 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3742 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3743 force_reg (ptr_mode,
3744 offset_rtx)));
7bb0943f 3745 }
c5c76735 3746
bbf6f052
RK
3747 if (volatilep)
3748 {
3749 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3750 {
3751 /* When the offset is zero, to_rtx is the address of the
3752 structure we are storing into, and hence may be shared.
3753 We must make a new MEM before setting the volatile bit. */
3754 if (offset == 0)
effbcc6a
RK
3755 to_rtx = copy_rtx (to_rtx);
3756
01188446
JW
3757 MEM_VOLATILE_P (to_rtx) = 1;
3758 }
bbf6f052
RK
3759#if 0 /* This was turned off because, when a field is volatile
3760 in an object which is not volatile, the object may be in a register,
3761 and then we would abort over here. */
3762 else
3763 abort ();
3764#endif
3765 }
3766
956d6950
JL
3767 if (TREE_CODE (to) == COMPONENT_REF
3768 && TREE_READONLY (TREE_OPERAND (to, 1)))
3769 {
8bd6ecc2 3770 if (offset == 0)
956d6950
JL
3771 to_rtx = copy_rtx (to_rtx);
3772
3773 RTX_UNCHANGING_P (to_rtx) = 1;
3774 }
3775
921b3427 3776 /* Check the access. */
7d384cc0 3777 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3778 {
3779 rtx to_addr;
3780 int size;
3781 int best_mode_size;
3782 enum machine_mode best_mode;
3783
3784 best_mode = get_best_mode (bitsize, bitpos,
3785 TYPE_ALIGN (TREE_TYPE (tem)),
3786 mode1, volatilep);
3787 if (best_mode == VOIDmode)
3788 best_mode = QImode;
3789
3790 best_mode_size = GET_MODE_BITSIZE (best_mode);
3791 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3792 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3793 size *= GET_MODE_SIZE (best_mode);
3794
3795 /* Check the access right of the pointer. */
ea4da9db 3796 in_check_memory_usage = 1;
e9a25f70 3797 if (size)
ebb1b59a
BS
3798 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3799 VOIDmode, 3, to_addr, Pmode,
e9a25f70 3800 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3801 GEN_INT (MEMORY_USE_WO),
3802 TYPE_MODE (integer_type_node));
ea4da9db 3803 in_check_memory_usage = 0;
921b3427
RK
3804 }
3805
a69beca1
RK
3806 /* If this is a varying-length object, we must get the address of
3807 the source and do an explicit block move. */
3808 if (bitsize < 0)
3809 {
3810 unsigned int from_align;
3811 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3812 rtx inner_to_rtx
3813 = change_address (to_rtx, VOIDmode,
3814 plus_constant (XEXP (to_rtx, 0),
3815 bitpos / BITS_PER_UNIT));
3816
3817 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
19caa751 3818 MIN (alignment, from_align));
a69beca1
RK
3819 free_temp_slots ();
3820 pop_temp_slots ();
3821 return to_rtx;
3822 }
3823 else
3824 {
3825 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3826 (want_value
3827 /* Spurious cast for HPUX compiler. */
3828 ? ((enum machine_mode)
3829 TYPE_MODE (TREE_TYPE (to)))
3830 : VOIDmode),
3831 unsignedp,
a69beca1
RK
3832 alignment,
3833 int_size_in_bytes (TREE_TYPE (tem)),
3834 get_alias_set (to));
3835
3836 preserve_temp_slots (result);
3837 free_temp_slots ();
3838 pop_temp_slots ();
3839
3840 /* If the value is meaningful, convert RESULT to the proper mode.
3841 Otherwise, return nothing. */
3842 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3843 TYPE_MODE (TREE_TYPE (from)),
3844 result,
3845 TREE_UNSIGNED (TREE_TYPE (to)))
3846 : NULL_RTX);
3847 }
bbf6f052
RK
3848 }
3849
cd1db108
RS
3850 /* If the rhs is a function call and its value is not an aggregate,
3851 call the function before we start to compute the lhs.
3852 This is needed for correct code for cases such as
3853 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3854 requires loading up part of an address in a separate insn.
3855
1858863b
JW
3856 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3857 since it might be a promoted variable where the zero- or sign- extension
3858 needs to be done. Handling this in the normal way is safe because no
3859 computation is done before the call. */
1ad87b63 3860 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3861 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3862 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3863 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3864 {
0088fcb1
RK
3865 rtx value;
3866
3867 push_temp_slots ();
3868 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3869 if (to_rtx == 0)
921b3427 3870 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3871
fffa9c1d
JW
3872 /* Handle calls that return values in multiple non-contiguous locations.
3873 The Irix 6 ABI has examples of this. */
3874 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16 3875 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3876 TYPE_ALIGN (TREE_TYPE (from)));
fffa9c1d 3877 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3878 emit_block_move (to_rtx, value, expr_size (from),
19caa751 3879 TYPE_ALIGN (TREE_TYPE (from)));
aaf87c45 3880 else
6419e5b0
DT
3881 {
3882#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3883 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3884 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3885 value = convert_memory_address (GET_MODE (to_rtx), value);
3886#endif
3887 emit_move_insn (to_rtx, value);
3888 }
cd1db108
RS
3889 preserve_temp_slots (to_rtx);
3890 free_temp_slots ();
0088fcb1 3891 pop_temp_slots ();
709f5be1 3892 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3893 }
3894
bbf6f052
RK
3895 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3896 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3897
3898 if (to_rtx == 0)
41472af8
MM
3899 {
3900 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3901 if (GET_CODE (to_rtx) == MEM)
3902 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3903 }
bbf6f052 3904
86d38d25 3905 /* Don't move directly into a return register. */
14a774a9
RK
3906 if (TREE_CODE (to) == RESULT_DECL
3907 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3908 {
0088fcb1
RK
3909 rtx temp;
3910
3911 push_temp_slots ();
3912 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3913
3914 if (GET_CODE (to_rtx) == PARALLEL)
3915 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3916 TYPE_ALIGN (TREE_TYPE (from)));
14a774a9
RK
3917 else
3918 emit_move_insn (to_rtx, temp);
3919
86d38d25
RS
3920 preserve_temp_slots (to_rtx);
3921 free_temp_slots ();
0088fcb1 3922 pop_temp_slots ();
709f5be1 3923 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3924 }
3925
bbf6f052
RK
3926 /* In case we are returning the contents of an object which overlaps
3927 the place the value is being stored, use a safe function when copying
3928 a value through a pointer into a structure value return block. */
3929 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3930 && current_function_returns_struct
3931 && !current_function_returns_pcc_struct)
3932 {
0088fcb1
RK
3933 rtx from_rtx, size;
3934
3935 push_temp_slots ();
33a20d10 3936 size = expr_size (from);
921b3427
RK
3937 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3938 EXPAND_MEMORY_USE_DONT);
3939
3940 /* Copy the rights of the bitmap. */
7d384cc0 3941 if (current_function_check_memory_usage)
ebb1b59a
BS
3942 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3943 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
6a9c4aed 3944 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3945 convert_to_mode (TYPE_MODE (sizetype),
3946 size, TREE_UNSIGNED (sizetype)),
3947 TYPE_MODE (sizetype));
bbf6f052
RK
3948
3949#ifdef TARGET_MEM_FUNCTIONS
b215b52e 3950 emit_library_call (memmove_libfunc, LCT_NORMAL,
bbf6f052
RK
3951 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3952 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3953 convert_to_mode (TYPE_MODE (sizetype),
3954 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3955 TYPE_MODE (sizetype));
bbf6f052 3956#else
ebb1b59a 3957 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052
RK
3958 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3959 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3960 convert_to_mode (TYPE_MODE (integer_type_node),
3961 size, TREE_UNSIGNED (integer_type_node)),
3962 TYPE_MODE (integer_type_node));
bbf6f052
RK
3963#endif
3964
3965 preserve_temp_slots (to_rtx);
3966 free_temp_slots ();
0088fcb1 3967 pop_temp_slots ();
709f5be1 3968 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3969 }
3970
3971 /* Compute FROM and store the value in the rtx we got. */
3972
0088fcb1 3973 push_temp_slots ();
bbf6f052
RK
3974 result = store_expr (from, to_rtx, want_value);
3975 preserve_temp_slots (result);
3976 free_temp_slots ();
0088fcb1 3977 pop_temp_slots ();
709f5be1 3978 return want_value ? result : NULL_RTX;
bbf6f052
RK
3979}
3980
3981/* Generate code for computing expression EXP,
3982 and storing the value into TARGET.
bbf6f052
RK
3983 TARGET may contain a QUEUED rtx.
3984
709f5be1
RS
3985 If WANT_VALUE is nonzero, return a copy of the value
3986 not in TARGET, so that we can be sure to use the proper
3987 value in a containing expression even if TARGET has something
3988 else stored in it. If possible, we copy the value through a pseudo
3989 and return that pseudo. Or, if the value is constant, we try to
3990 return the constant. In some cases, we return a pseudo
3991 copied *from* TARGET.
3992
3993 If the mode is BLKmode then we may return TARGET itself.
3994 It turns out that in BLKmode it doesn't cause a problem.
3995 because C has no operators that could combine two different
3996 assignments into the same BLKmode object with different values
3997 with no sequence point. Will other languages need this to
3998 be more thorough?
3999
4000 If WANT_VALUE is 0, we return NULL, to make sure
4001 to catch quickly any cases where the caller uses the value
4002 and fails to set WANT_VALUE. */
bbf6f052
RK
4003
4004rtx
709f5be1 4005store_expr (exp, target, want_value)
bbf6f052
RK
4006 register tree exp;
4007 register rtx target;
709f5be1 4008 int want_value;
bbf6f052
RK
4009{
4010 register rtx temp;
4011 int dont_return_target = 0;
e5408e52 4012 int dont_store_target = 0;
bbf6f052
RK
4013
4014 if (TREE_CODE (exp) == COMPOUND_EXPR)
4015 {
4016 /* Perform first part of compound expression, then assign from second
4017 part. */
4018 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4019 emit_queue ();
709f5be1 4020 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4021 }
4022 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4023 {
4024 /* For conditional expression, get safe form of the target. Then
4025 test the condition, doing the appropriate assignment on either
4026 side. This avoids the creation of unnecessary temporaries.
4027 For non-BLKmode, it is more efficient not to do this. */
4028
4029 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4030
4031 emit_queue ();
4032 target = protect_from_queue (target, 1);
4033
dabf8373 4034 do_pending_stack_adjust ();
bbf6f052
RK
4035 NO_DEFER_POP;
4036 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4037 start_cleanup_deferral ();
709f5be1 4038 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 4039 end_cleanup_deferral ();
bbf6f052
RK
4040 emit_queue ();
4041 emit_jump_insn (gen_jump (lab2));
4042 emit_barrier ();
4043 emit_label (lab1);
956d6950 4044 start_cleanup_deferral ();
709f5be1 4045 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 4046 end_cleanup_deferral ();
bbf6f052
RK
4047 emit_queue ();
4048 emit_label (lab2);
4049 OK_DEFER_POP;
a3a58acc 4050
709f5be1 4051 return want_value ? target : NULL_RTX;
bbf6f052 4052 }
bbf6f052 4053 else if (queued_subexp_p (target))
709f5be1
RS
4054 /* If target contains a postincrement, let's not risk
4055 using it as the place to generate the rhs. */
bbf6f052
RK
4056 {
4057 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4058 {
4059 /* Expand EXP into a new pseudo. */
4060 temp = gen_reg_rtx (GET_MODE (target));
4061 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4062 }
4063 else
906c4e36 4064 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
4065
4066 /* If target is volatile, ANSI requires accessing the value
4067 *from* the target, if it is accessed. So make that happen.
4068 In no case return the target itself. */
4069 if (! MEM_VOLATILE_P (target) && want_value)
4070 dont_return_target = 1;
bbf6f052 4071 }
12f06d17
CH
4072 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4073 && GET_MODE (target) != BLKmode)
4074 /* If target is in memory and caller wants value in a register instead,
4075 arrange that. Pass TARGET as target for expand_expr so that,
4076 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4077 We know expand_expr will not use the target in that case.
4078 Don't do this if TARGET is volatile because we are supposed
4079 to write it and then read it. */
4080 {
1da93fe0 4081 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17 4082 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4083 {
4084 /* If TEMP is already in the desired TARGET, only copy it from
4085 memory and don't store it there again. */
4086 if (temp == target
4087 || (rtx_equal_p (temp, target)
4088 && ! side_effects_p (temp) && ! side_effects_p (target)))
4089 dont_store_target = 1;
4090 temp = copy_to_reg (temp);
4091 }
12f06d17
CH
4092 dont_return_target = 1;
4093 }
1499e0a8
RK
4094 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4095 /* If this is an scalar in a register that is stored in a wider mode
4096 than the declared mode, compute the result into its declared mode
4097 and then convert to the wider mode. Our value is the computed
4098 expression. */
4099 {
5a32d038 4100 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4101 which will often result in some optimizations. Do the conversion
4102 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4103 the extend. But don't do this if the type of EXP is a subtype
4104 of something else since then the conversion might involve
4105 more than just converting modes. */
4106 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4107 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4108 {
4109 if (TREE_UNSIGNED (TREE_TYPE (exp))
4110 != SUBREG_PROMOTED_UNSIGNED_P (target))
4111 exp
4112 = convert
4113 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4114 TREE_TYPE (exp)),
4115 exp);
4116
4117 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4118 SUBREG_PROMOTED_UNSIGNED_P (target)),
4119 exp);
4120 }
3a94c984 4121
1499e0a8 4122 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 4123
766f36c7 4124 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
4125 the access now so it gets done only once. Likewise if
4126 it contains TARGET. */
4127 if (GET_CODE (temp) == MEM && want_value
4128 && (MEM_VOLATILE_P (temp)
4129 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
4130 temp = copy_to_reg (temp);
4131
b258707c
RS
4132 /* If TEMP is a VOIDmode constant, use convert_modes to make
4133 sure that we properly convert it. */
4134 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4135 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4136 TYPE_MODE (TREE_TYPE (exp)), temp,
4137 SUBREG_PROMOTED_UNSIGNED_P (target));
4138
1499e0a8
RK
4139 convert_move (SUBREG_REG (target), temp,
4140 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4141
4142 /* If we promoted a constant, change the mode back down to match
4143 target. Otherwise, the caller might get confused by a result whose
4144 mode is larger than expected. */
4145
4146 if (want_value && GET_MODE (temp) != GET_MODE (target)
4147 && GET_MODE (temp) != VOIDmode)
4148 {
ddef6bc7 4149 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3dbecef9
JW
4150 SUBREG_PROMOTED_VAR_P (temp) = 1;
4151 SUBREG_PROMOTED_UNSIGNED_P (temp)
4152 = SUBREG_PROMOTED_UNSIGNED_P (target);
4153 }
4154
709f5be1 4155 return want_value ? temp : NULL_RTX;
1499e0a8 4156 }
bbf6f052
RK
4157 else
4158 {
4159 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 4160 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4161 If TARGET is a volatile mem ref, either return TARGET
4162 or return a reg copied *from* TARGET; ANSI requires this.
4163
4164 Otherwise, if TEMP is not TARGET, return TEMP
4165 if it is constant (for efficiency),
4166 or if we really want the correct value. */
bbf6f052
RK
4167 if (!(target && GET_CODE (target) == REG
4168 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4169 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4170 && ! rtx_equal_p (temp, target)
709f5be1 4171 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
4172 dont_return_target = 1;
4173 }
4174
b258707c
RS
4175 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4176 the same as that of TARGET, adjust the constant. This is needed, for
4177 example, in case it is a CONST_DOUBLE and we want only a word-sized
4178 value. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4180 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4181 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4182 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4183 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4184
7d384cc0 4185 if (current_function_check_memory_usage
921b3427
RK
4186 && GET_CODE (target) == MEM
4187 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4188 {
ea4da9db 4189 in_check_memory_usage = 1;
921b3427 4190 if (GET_CODE (temp) == MEM)
ebb1b59a
BS
4191 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4192 VOIDmode, 3, XEXP (target, 0), Pmode,
6a9c4aed 4193 XEXP (temp, 0), Pmode,
921b3427
RK
4194 expr_size (exp), TYPE_MODE (sizetype));
4195 else
ebb1b59a
BS
4196 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4197 VOIDmode, 3, XEXP (target, 0), Pmode,
921b3427 4198 expr_size (exp), TYPE_MODE (sizetype),
3a94c984 4199 GEN_INT (MEMORY_USE_WO),
956d6950 4200 TYPE_MODE (integer_type_node));
ea4da9db 4201 in_check_memory_usage = 0;
921b3427
RK
4202 }
4203
bbf6f052
RK
4204 /* If value was not generated in the target, store it there.
4205 Convert the value to TARGET's type first if nec. */
f3f2255a
R
4206 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4207 one or both of them are volatile memory refs, we have to distinguish
4208 two cases:
4209 - expand_expr has used TARGET. In this case, we must not generate
4210 another copy. This can be detected by TARGET being equal according
4211 to == .
4212 - expand_expr has not used TARGET - that means that the source just
4213 happens to have the same RTX form. Since temp will have been created
4214 by expand_expr, it will compare unequal according to == .
4215 We must generate a copy in this case, to reach the correct number
4216 of volatile memory references. */
bbf6f052 4217
6036acbb 4218 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4219 || (temp != target && (side_effects_p (temp)
4220 || side_effects_p (target))))
e5408e52
JJ
4221 && TREE_CODE (exp) != ERROR_MARK
4222 && ! dont_store_target)
bbf6f052
RK
4223 {
4224 target = protect_from_queue (target, 1);
4225 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4226 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4227 {
4228 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4229 if (dont_return_target)
4230 {
4231 /* In this case, we will return TEMP,
4232 so make sure it has the proper mode.
4233 But don't forget to store the value into TARGET. */
4234 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4235 emit_move_insn (target, temp);
4236 }
4237 else
4238 convert_move (target, temp, unsignedp);
4239 }
4240
4241 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4242 {
4243 /* Handle copying a string constant into an array.
4244 The string constant may be shorter than the array.
4245 So copy just the string's actual length, and clear the rest. */
4246 rtx size;
22619c3f 4247 rtx addr;
bbf6f052 4248
e87b4f3f
RS
4249 /* Get the size of the data type of the string,
4250 which is actually the size of the target. */
4251 size = expr_size (exp);
4252 if (GET_CODE (size) == CONST_INT
4253 && INTVAL (size) < TREE_STRING_LENGTH (exp))
19caa751 4254 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4255 else
bbf6f052 4256 {
e87b4f3f
RS
4257 /* Compute the size of the data to copy from the string. */
4258 tree copy_size
c03b7665 4259 = size_binop (MIN_EXPR,
b50d17a1 4260 make_tree (sizetype, size),
fed3cef0 4261 size_int (TREE_STRING_LENGTH (exp)));
f9e158c3 4262 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
906c4e36
RK
4263 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4264 VOIDmode, 0);
e87b4f3f
RS
4265 rtx label = 0;
4266
4267 /* Copy that much. */
4268 emit_block_move (target, temp, copy_size_rtx,
19caa751 4269 TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4270
88f63c77
RK
4271 /* Figure out how much is left in TARGET that we have to clear.
4272 Do all calculations in ptr_mode. */
4273
4274 addr = XEXP (target, 0);
4275 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4276
e87b4f3f
RS
4277 if (GET_CODE (copy_size_rtx) == CONST_INT)
4278 {
88f63c77 4279 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3a94c984 4280 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
8752c357
AJ
4281 align = MIN (align,
4282 (unsigned int) (BITS_PER_UNIT
4283 * (INTVAL (copy_size_rtx)
4284 & - INTVAL (copy_size_rtx))));
e87b4f3f
RS
4285 }
4286 else
4287 {
88f63c77
RK
4288 addr = force_reg (ptr_mode, addr);
4289 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
4290 copy_size_rtx, NULL_RTX, 0,
4291 OPTAB_LIB_WIDEN);
e87b4f3f 4292
88f63c77 4293 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
4294 copy_size_rtx, NULL_RTX, 0,
4295 OPTAB_LIB_WIDEN);
e87b4f3f 4296
2a5b96fd 4297 align = BITS_PER_UNIT;
e87b4f3f 4298 label = gen_label_rtx ();
c5d5d461
JL
4299 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4300 GET_MODE (size), 0, 0, label);
e87b4f3f 4301 }
2a5b96fd 4302 align = MIN (align, expr_align (copy_size));
e87b4f3f
RS
4303
4304 if (size != const0_rtx)
4305 {
3bdf5ad1
RK
4306 rtx dest = gen_rtx_MEM (BLKmode, addr);
4307
4308 MEM_COPY_ATTRIBUTES (dest, target);
4309
921b3427 4310 /* Be sure we can write on ADDR. */
ea4da9db 4311 in_check_memory_usage = 1;
7d384cc0 4312 if (current_function_check_memory_usage)
ebb1b59a
BS
4313 emit_library_call (chkr_check_addr_libfunc,
4314 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 4315 addr, Pmode,
921b3427 4316 size, TYPE_MODE (sizetype),
3a94c984 4317 GEN_INT (MEMORY_USE_WO),
956d6950 4318 TYPE_MODE (integer_type_node));
ea4da9db 4319 in_check_memory_usage = 0;
051ffad5 4320 clear_storage (dest, size, align);
e87b4f3f 4321 }
22619c3f 4322
e87b4f3f
RS
4323 if (label)
4324 emit_label (label);
bbf6f052
RK
4325 }
4326 }
fffa9c1d
JW
4327 /* Handle calls that return values in multiple non-contiguous locations.
4328 The Irix 6 ABI has examples of this. */
4329 else if (GET_CODE (target) == PARALLEL)
aac5cc16 4330 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
19caa751 4331 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4332 else if (GET_MODE (temp) == BLKmode)
4333 emit_block_move (target, temp, expr_size (exp),
19caa751 4334 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4335 else
4336 emit_move_insn (target, temp);
4337 }
709f5be1 4338
766f36c7
RK
4339 /* If we don't want a value, return NULL_RTX. */
4340 if (! want_value)
4341 return NULL_RTX;
4342
4343 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4344 ??? The latter test doesn't seem to make sense. */
4345 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4346 return temp;
766f36c7
RK
4347
4348 /* Return TARGET itself if it is a hard register. */
4349 else if (want_value && GET_MODE (target) != BLKmode
4350 && ! (GET_CODE (target) == REG
4351 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4352 return copy_to_reg (target);
3a94c984 4353
766f36c7 4354 else
709f5be1 4355 return target;
bbf6f052
RK
4356}
4357\f
9de08200
RK
4358/* Return 1 if EXP just contains zeros. */
4359
4360static int
4361is_zeros_p (exp)
4362 tree exp;
4363{
4364 tree elt;
4365
4366 switch (TREE_CODE (exp))
4367 {
4368 case CONVERT_EXPR:
4369 case NOP_EXPR:
4370 case NON_LVALUE_EXPR:
4371 return is_zeros_p (TREE_OPERAND (exp, 0));
4372
4373 case INTEGER_CST:
05bccae2 4374 return integer_zerop (exp);
9de08200
RK
4375
4376 case COMPLEX_CST:
4377 return
4378 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4379
4380 case REAL_CST:
41c9120b 4381 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
4382
4383 case CONSTRUCTOR:
e1a43f73
PB
4384 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4385 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4386 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4387 if (! is_zeros_p (TREE_VALUE (elt)))
4388 return 0;
4389
4390 return 1;
3a94c984 4391
e9a25f70
JL
4392 default:
4393 return 0;
9de08200 4394 }
9de08200
RK
4395}
4396
4397/* Return 1 if EXP contains mostly (3/4) zeros. */
4398
4399static int
4400mostly_zeros_p (exp)
4401 tree exp;
4402{
9de08200
RK
4403 if (TREE_CODE (exp) == CONSTRUCTOR)
4404 {
e1a43f73
PB
4405 int elts = 0, zeros = 0;
4406 tree elt = CONSTRUCTOR_ELTS (exp);
4407 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4408 {
4409 /* If there are no ranges of true bits, it is all zero. */
4410 return elt == NULL_TREE;
4411 }
4412 for (; elt; elt = TREE_CHAIN (elt))
4413 {
4414 /* We do not handle the case where the index is a RANGE_EXPR,
4415 so the statistic will be somewhat inaccurate.
4416 We do make a more accurate count in store_constructor itself,
4417 so since this function is only used for nested array elements,
0f41302f 4418 this should be close enough. */
e1a43f73
PB
4419 if (mostly_zeros_p (TREE_VALUE (elt)))
4420 zeros++;
4421 elts++;
4422 }
9de08200
RK
4423
4424 return 4 * zeros >= 3 * elts;
4425 }
4426
4427 return is_zeros_p (exp);
4428}
4429\f
e1a43f73
PB
4430/* Helper function for store_constructor.
4431 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4432 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4433 ALIGN and CLEARED are as for store_constructor.
23cb1766 4434 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4435
4436 This provides a recursive shortcut back to store_constructor when it isn't
4437 necessary to go through store_field. This is so that we can pass through
4438 the cleared field to let store_constructor know that we may not have to
4439 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4440
4441static void
4442store_constructor_field (target, bitsize, bitpos,
23cb1766 4443 mode, exp, type, align, cleared, alias_set)
e1a43f73 4444 rtx target;
770ae6cc
RK
4445 unsigned HOST_WIDE_INT bitsize;
4446 HOST_WIDE_INT bitpos;
e1a43f73
PB
4447 enum machine_mode mode;
4448 tree exp, type;
729a2125 4449 unsigned int align;
e1a43f73 4450 int cleared;
23cb1766 4451 int alias_set;
e1a43f73
PB
4452{
4453 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4454 && bitpos % BITS_PER_UNIT == 0
4455 /* If we have a non-zero bitpos for a register target, then we just
4456 let store_field do the bitfield handling. This is unlikely to
4457 generate unnecessary clear instructions anyways. */
4458 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4459 {
126e5b0d 4460 if (bitpos != 0)
ce64861e
RK
4461 target
4462 = change_address (target,
4463 GET_MODE (target) == BLKmode
4464 || 0 != (bitpos
4465 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4466 ? BLKmode : VOIDmode,
4467 plus_constant (XEXP (target, 0),
4468 bitpos / BITS_PER_UNIT));
23cb1766 4469
e0339ef7
RK
4470
4471 /* Show the alignment may no longer be what it was and update the alias
4472 set, if required. */
eeebb824 4473 if (bitpos != 0)
8752c357 4474 align = MIN (align, (unsigned int) bitpos & - bitpos);
832ea3b3
FS
4475 if (GET_CODE (target) == MEM)
4476 MEM_ALIAS_SET (target) = alias_set;
e0339ef7 4477
b7010412 4478 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4479 }
4480 else
19caa751 4481 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
23cb1766 4482 int_size_in_bytes (type), alias_set);
e1a43f73
PB
4483}
4484
bbf6f052 4485/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4486 TARGET is either a REG or a MEM.
19caa751 4487 ALIGN is the maximum known alignment for TARGET.
b7010412
RK
4488 CLEARED is true if TARGET is known to have been zero'd.
4489 SIZE is the number of bytes of TARGET we are allowed to modify: this
4490 may not be the same as the size of EXP if we are assigning to a field
4491 which has been packed to exclude padding bits. */
bbf6f052
RK
4492
4493static void
b7010412 4494store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4495 tree exp;
4496 rtx target;
729a2125 4497 unsigned int align;
e1a43f73 4498 int cleared;
13eb1f7f 4499 HOST_WIDE_INT size;
bbf6f052 4500{
4af3895e 4501 tree type = TREE_TYPE (exp);
a5efcd63 4502#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4503 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4504#endif
4af3895e 4505
bbf6f052
RK
4506 /* We know our target cannot conflict, since safe_from_p has been called. */
4507#if 0
4508 /* Don't try copying piece by piece into a hard register
4509 since that is vulnerable to being clobbered by EXP.
4510 Instead, construct in a pseudo register and then copy it all. */
4511 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4512 {
4513 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4514 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4515 emit_move_insn (target, temp);
4516 return;
4517 }
4518#endif
4519
e44842fe
RK
4520 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4521 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4522 {
4523 register tree elt;
4524
4af3895e 4525 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4526 if ((TREE_CODE (type) == UNION_TYPE
4527 || TREE_CODE (type) == QUAL_UNION_TYPE)
4528 && ! cleared)
a59f8640
R
4529 {
4530 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4531
4532 /* If the constructor is empty, clear the union. */
4533 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
19caa751 4534 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
a59f8640 4535 }
4af3895e
JVA
4536
4537 /* If we are building a static constructor into a register,
4538 set the initial value as zero so we can fold the value into
67225c15
RK
4539 a constant. But if more than one register is involved,
4540 this probably loses. */
4541 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4542 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4543 {
4544 if (! cleared)
e9a25f70 4545 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4546
9de08200
RK
4547 cleared = 1;
4548 }
4549
4550 /* If the constructor has fewer fields than the structure
4551 or if we are initializing the structure to mostly zeros,
0d97bf4c 4552 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4553 register whose mode size isn't equal to SIZE since clear_storage
4554 can't handle this case. */
9376fcd6
RK
4555 else if (size > 0
4556 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4557 != fields_length (type))
fcf1b822
RK
4558 || mostly_zeros_p (exp))
4559 && (GET_CODE (target) != REG
8752c357 4560 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
9de08200
RK
4561 {
4562 if (! cleared)
19caa751 4563 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4564
4565 cleared = 1;
4566 }
dd1db5ec 4567 else if (! cleared)
bbf6f052 4568 /* Inform later passes that the old value is dead. */
38a448ca 4569 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4570
4571 /* Store each element of the constructor into
4572 the corresponding field of TARGET. */
4573
4574 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4575 {
4576 register tree field = TREE_PURPOSE (elt);
c5c76735 4577#ifdef WORD_REGISTER_OPERATIONS
34c73909 4578 tree value = TREE_VALUE (elt);
c5c76735 4579#endif
bbf6f052 4580 register enum machine_mode mode;
770ae6cc
RK
4581 HOST_WIDE_INT bitsize;
4582 HOST_WIDE_INT bitpos = 0;
bbf6f052 4583 int unsignedp;
770ae6cc 4584 tree offset;
b50d17a1 4585 rtx to_rtx = target;
bbf6f052 4586
f32fd778
RS
4587 /* Just ignore missing fields.
4588 We cleared the whole structure, above,
4589 if any fields are missing. */
4590 if (field == 0)
4591 continue;
4592
e1a43f73
PB
4593 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4594 continue;
9de08200 4595
770ae6cc
RK
4596 if (host_integerp (DECL_SIZE (field), 1))
4597 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4598 else
4599 bitsize = -1;
4600
bbf6f052
RK
4601 unsignedp = TREE_UNSIGNED (field);
4602 mode = DECL_MODE (field);
4603 if (DECL_BIT_FIELD (field))
4604 mode = VOIDmode;
4605
770ae6cc
RK
4606 offset = DECL_FIELD_OFFSET (field);
4607 if (host_integerp (offset, 0)
4608 && host_integerp (bit_position (field), 0))
4609 {
4610 bitpos = int_bit_position (field);
4611 offset = 0;
4612 }
b50d17a1 4613 else
770ae6cc 4614 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4615
b50d17a1
RK
4616 if (offset)
4617 {
4618 rtx offset_rtx;
4619
4620 if (contains_placeholder_p (offset))
7fa96708 4621 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4622 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4623
b50d17a1
RK
4624 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4625 if (GET_CODE (to_rtx) != MEM)
4626 abort ();
4627
3a94c984
KH
4628 if (GET_MODE (offset_rtx) != ptr_mode)
4629 {
bd070e1a 4630#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4631 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4632#else
4633 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4634#endif
4635 }
4636
b50d17a1
RK
4637 to_rtx
4638 = change_address (to_rtx, VOIDmode,
38a448ca 4639 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4640 force_reg (ptr_mode,
4641 offset_rtx)));
7fa96708 4642 align = DECL_OFFSET_ALIGN (field);
b50d17a1 4643 }
c5c76735 4644
cf04eb80
RK
4645 if (TREE_READONLY (field))
4646 {
9151b3bf 4647 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4648 to_rtx = copy_rtx (to_rtx);
4649
cf04eb80
RK
4650 RTX_UNCHANGING_P (to_rtx) = 1;
4651 }
4652
34c73909
R
4653#ifdef WORD_REGISTER_OPERATIONS
4654 /* If this initializes a field that is smaller than a word, at the
4655 start of a word, try to widen it to a full word.
4656 This special case allows us to output C++ member function
4657 initializations in a form that the optimizers can understand. */
770ae6cc 4658 if (GET_CODE (target) == REG
34c73909
R
4659 && bitsize < BITS_PER_WORD
4660 && bitpos % BITS_PER_WORD == 0
4661 && GET_MODE_CLASS (mode) == MODE_INT
4662 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4663 && exp_size >= 0
4664 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4665 {
4666 tree type = TREE_TYPE (value);
4667 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4668 {
4669 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4670 value = convert (type, value);
4671 }
4672 if (BYTES_BIG_ENDIAN)
4673 value
4674 = fold (build (LSHIFT_EXPR, type, value,
4675 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4676 bitsize = BITS_PER_WORD;
4677 mode = word_mode;
4678 }
4679#endif
c5c76735 4680 store_constructor_field (to_rtx, bitsize, bitpos, mode,
23cb1766 4681 TREE_VALUE (elt), type, align, cleared,
963a2a84 4682 (DECL_NONADDRESSABLE_P (field)
1ccfe3fa 4683 && GET_CODE (to_rtx) == MEM)
23cb1766
RK
4684 ? MEM_ALIAS_SET (to_rtx)
4685 : get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4686 }
4687 }
4af3895e 4688 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4689 {
4690 register tree elt;
4691 register int i;
e1a43f73 4692 int need_to_clear;
4af3895e 4693 tree domain = TYPE_DOMAIN (type);
4af3895e 4694 tree elttype = TREE_TYPE (type);
85f3d674
RK
4695 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4696 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4697 HOST_WIDE_INT minelt;
4698 HOST_WIDE_INT maxelt;
4699
4700 /* If we have constant bounds for the range of the type, get them. */
4701 if (const_bounds_p)
4702 {
4703 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4704 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4705 }
bbf6f052 4706
e1a43f73 4707 /* If the constructor has fewer elements than the array,
38e01259 4708 clear the whole array first. Similarly if this is
e1a43f73
PB
4709 static constructor of a non-BLKmode object. */
4710 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4711 need_to_clear = 1;
4712 else
4713 {
4714 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4715 need_to_clear = ! const_bounds_p;
4716
e1a43f73
PB
4717 /* This loop is a more accurate version of the loop in
4718 mostly_zeros_p (it handles RANGE_EXPR in an index).
4719 It is also needed to check for missing elements. */
4720 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4721 elt != NULL_TREE && ! need_to_clear;
df0faff1 4722 elt = TREE_CHAIN (elt))
e1a43f73
PB
4723 {
4724 tree index = TREE_PURPOSE (elt);
4725 HOST_WIDE_INT this_node_count;
19caa751 4726
e1a43f73
PB
4727 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4728 {
4729 tree lo_index = TREE_OPERAND (index, 0);
4730 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4731
19caa751
RK
4732 if (! host_integerp (lo_index, 1)
4733 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4734 {
4735 need_to_clear = 1;
4736 break;
4737 }
19caa751
RK
4738
4739 this_node_count = (tree_low_cst (hi_index, 1)
4740 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4741 }
4742 else
4743 this_node_count = 1;
85f3d674 4744
e1a43f73
PB
4745 count += this_node_count;
4746 if (mostly_zeros_p (TREE_VALUE (elt)))
4747 zero_count += this_node_count;
4748 }
85f3d674 4749
8e958f70 4750 /* Clear the entire array first if there are any missing elements,
0f41302f 4751 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4752 if (! need_to_clear
4753 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4754 need_to_clear = 1;
4755 }
85f3d674 4756
9376fcd6 4757 if (need_to_clear && size > 0)
9de08200
RK
4758 {
4759 if (! cleared)
19caa751 4760 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4761 cleared = 1;
4762 }
bbf6f052
RK
4763 else
4764 /* Inform later passes that the old value is dead. */
38a448ca 4765 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4766
4767 /* Store each element of the constructor into
4768 the corresponding element of TARGET, determined
4769 by counting the elements. */
4770 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4771 elt;
4772 elt = TREE_CHAIN (elt), i++)
4773 {
4774 register enum machine_mode mode;
19caa751
RK
4775 HOST_WIDE_INT bitsize;
4776 HOST_WIDE_INT bitpos;
bbf6f052 4777 int unsignedp;
e1a43f73 4778 tree value = TREE_VALUE (elt);
729a2125 4779 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4780 tree index = TREE_PURPOSE (elt);
4781 rtx xtarget = target;
bbf6f052 4782
e1a43f73
PB
4783 if (cleared && is_zeros_p (value))
4784 continue;
9de08200 4785
bbf6f052 4786 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4787 mode = TYPE_MODE (elttype);
4788 if (mode == BLKmode)
19caa751
RK
4789 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4790 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4791 : -1);
14a774a9
RK
4792 else
4793 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4794
e1a43f73
PB
4795 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4796 {
4797 tree lo_index = TREE_OPERAND (index, 0);
4798 tree hi_index = TREE_OPERAND (index, 1);
4799 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4800 struct nesting *loop;
05c0b405
PB
4801 HOST_WIDE_INT lo, hi, count;
4802 tree position;
e1a43f73 4803
0f41302f 4804 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4805 if (const_bounds_p
4806 && host_integerp (lo_index, 0)
19caa751
RK
4807 && host_integerp (hi_index, 0)
4808 && (lo = tree_low_cst (lo_index, 0),
4809 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4810 count = hi - lo + 1,
4811 (GET_CODE (target) != MEM
4812 || count <= 2
19caa751
RK
4813 || (host_integerp (TYPE_SIZE (elttype), 1)
4814 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4815 <= 40 * 8)))))
e1a43f73 4816 {
05c0b405
PB
4817 lo -= minelt; hi -= minelt;
4818 for (; lo <= hi; lo++)
e1a43f73 4819 {
19caa751 4820 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
23cb1766
RK
4821 store_constructor_field
4822 (target, bitsize, bitpos, mode, value, type, align,
4823 cleared,
4824 TYPE_NONALIASED_COMPONENT (type)
4825 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
e1a43f73
PB
4826 }
4827 }
4828 else
4829 {
4830 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4831 loop_top = gen_label_rtx ();
4832 loop_end = gen_label_rtx ();
4833
4834 unsignedp = TREE_UNSIGNED (domain);
4835
4836 index = build_decl (VAR_DECL, NULL_TREE, domain);
4837
19e7881c 4838 index_r
e1a43f73
PB
4839 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4840 &unsignedp, 0));
19e7881c 4841 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4842 if (TREE_CODE (value) == SAVE_EXPR
4843 && SAVE_EXPR_RTL (value) == 0)
4844 {
0f41302f
MS
4845 /* Make sure value gets expanded once before the
4846 loop. */
e1a43f73
PB
4847 expand_expr (value, const0_rtx, VOIDmode, 0);
4848 emit_queue ();
4849 }
4850 store_expr (lo_index, index_r, 0);
4851 loop = expand_start_loop (0);
4852
0f41302f 4853 /* Assign value to element index. */
fed3cef0
RK
4854 position
4855 = convert (ssizetype,
4856 fold (build (MINUS_EXPR, TREE_TYPE (index),
4857 index, TYPE_MIN_VALUE (domain))));
4858 position = size_binop (MULT_EXPR, position,
4859 convert (ssizetype,
4860 TYPE_SIZE_UNIT (elttype)));
4861
e1a43f73 4862 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4863 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4864 xtarget = change_address (target, mode, addr);
4865 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4866 store_constructor (value, xtarget, align, cleared,
4867 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4868 else
4869 store_expr (value, xtarget, 0);
4870
4871 expand_exit_loop_if_false (loop,
4872 build (LT_EXPR, integer_type_node,
4873 index, hi_index));
4874
4875 expand_increment (build (PREINCREMENT_EXPR,
4876 TREE_TYPE (index),
7b8b9722 4877 index, integer_one_node), 0, 0);
e1a43f73
PB
4878 expand_end_loop ();
4879 emit_label (loop_end);
e1a43f73
PB
4880 }
4881 }
19caa751
RK
4882 else if ((index != 0 && ! host_integerp (index, 0))
4883 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4884 {
e1a43f73 4885 rtx pos_rtx, addr;
03dc44a6
RS
4886 tree position;
4887
5b6c44ff 4888 if (index == 0)
fed3cef0 4889 index = ssize_int (1);
5b6c44ff 4890
e1a43f73 4891 if (minelt)
fed3cef0
RK
4892 index = convert (ssizetype,
4893 fold (build (MINUS_EXPR, index,
4894 TYPE_MIN_VALUE (domain))));
19caa751 4895
fed3cef0
RK
4896 position = size_binop (MULT_EXPR, index,
4897 convert (ssizetype,
4898 TYPE_SIZE_UNIT (elttype)));
03dc44a6 4899 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4900 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4901 xtarget = change_address (target, mode, addr);
e1a43f73 4902 store_expr (value, xtarget, 0);
03dc44a6
RS
4903 }
4904 else
4905 {
4906 if (index != 0)
19caa751
RK
4907 bitpos = ((tree_low_cst (index, 0) - minelt)
4908 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4909 else
19caa751
RK
4910 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4911
c5c76735 4912 store_constructor_field (target, bitsize, bitpos, mode, value,
23cb1766
RK
4913 type, align, cleared,
4914 TYPE_NONALIASED_COMPONENT (type)
831ecbd4 4915 && GET_CODE (target) == MEM
23cb1766
RK
4916 ? MEM_ALIAS_SET (target) :
4917 get_alias_set (elttype));
4918
03dc44a6 4919 }
bbf6f052
RK
4920 }
4921 }
19caa751 4922
3a94c984 4923 /* Set constructor assignments. */
071a6595
PB
4924 else if (TREE_CODE (type) == SET_TYPE)
4925 {
e1a43f73 4926 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4927 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4928 tree domain = TYPE_DOMAIN (type);
4929 tree domain_min, domain_max, bitlength;
4930
9faa82d8 4931 /* The default implementation strategy is to extract the constant
071a6595
PB
4932 parts of the constructor, use that to initialize the target,
4933 and then "or" in whatever non-constant ranges we need in addition.
4934
4935 If a large set is all zero or all ones, it is
4936 probably better to set it using memset (if available) or bzero.
4937 Also, if a large set has just a single range, it may also be
4938 better to first clear all the first clear the set (using
0f41302f 4939 bzero/memset), and set the bits we want. */
3a94c984 4940
0f41302f 4941 /* Check for all zeros. */
9376fcd6 4942 if (elt == NULL_TREE && size > 0)
071a6595 4943 {
e1a43f73 4944 if (!cleared)
19caa751 4945 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
071a6595
PB
4946 return;
4947 }
4948
071a6595
PB
4949 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4950 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4951 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4952 size_diffop (domain_max, domain_min),
4953 ssize_int (1));
071a6595 4954
19caa751 4955 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4956
4957 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4958 are "complicated" (more than one range), initialize (the
3a94c984 4959 constant parts) by copying from a constant. */
e1a43f73
PB
4960 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4961 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4962 {
19caa751 4963 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4964 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4965 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 4966 HOST_WIDE_INT word = 0;
19caa751
RK
4967 unsigned int bit_pos = 0;
4968 unsigned int ibit = 0;
4969 unsigned int offset = 0; /* In bytes from beginning of set. */
4970
e1a43f73 4971 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4972 for (;;)
071a6595 4973 {
b4ee5a72
PB
4974 if (bit_buffer[ibit])
4975 {
b09f3348 4976 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4977 word |= (1 << (set_word_size - 1 - bit_pos));
4978 else
4979 word |= 1 << bit_pos;
4980 }
19caa751 4981
b4ee5a72
PB
4982 bit_pos++; ibit++;
4983 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4984 {
e1a43f73
PB
4985 if (word != 0 || ! cleared)
4986 {
4987 rtx datum = GEN_INT (word);
4988 rtx to_rtx;
19caa751 4989
0f41302f
MS
4990 /* The assumption here is that it is safe to use
4991 XEXP if the set is multi-word, but not if
4992 it's single-word. */
e1a43f73
PB
4993 if (GET_CODE (target) == MEM)
4994 {
4995 to_rtx = plus_constant (XEXP (target, 0), offset);
4996 to_rtx = change_address (target, mode, to_rtx);
4997 }
3a94c984 4998 else if (offset == 0)
e1a43f73
PB
4999 to_rtx = target;
5000 else
5001 abort ();
5002 emit_move_insn (to_rtx, datum);
5003 }
19caa751 5004
b4ee5a72
PB
5005 if (ibit == nbits)
5006 break;
5007 word = 0;
5008 bit_pos = 0;
5009 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5010 }
5011 }
071a6595 5012 }
e1a43f73 5013 else if (!cleared)
19caa751
RK
5014 /* Don't bother clearing storage if the set is all ones. */
5015 if (TREE_CHAIN (elt) != NULL_TREE
5016 || (TREE_PURPOSE (elt) == NULL_TREE
5017 ? nbits != 1
5018 : ( ! host_integerp (TREE_VALUE (elt), 0)
5019 || ! host_integerp (TREE_PURPOSE (elt), 0)
5020 || (tree_low_cst (TREE_VALUE (elt), 0)
5021 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5022 != (HOST_WIDE_INT) nbits))))
5023 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
3a94c984 5024
e1a43f73 5025 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5026 {
3a94c984 5027 /* Start of range of element or NULL. */
071a6595 5028 tree startbit = TREE_PURPOSE (elt);
3a94c984 5029 /* End of range of element, or element value. */
071a6595 5030 tree endbit = TREE_VALUE (elt);
381127e8 5031#ifdef TARGET_MEM_FUNCTIONS
071a6595 5032 HOST_WIDE_INT startb, endb;
381127e8 5033#endif
19caa751 5034 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5035
5036 bitlength_rtx = expand_expr (bitlength,
19caa751 5037 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5038
3a94c984 5039 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5040 if (startbit == NULL_TREE)
5041 {
5042 startbit = save_expr (endbit);
5043 endbit = startbit;
5044 }
19caa751 5045
071a6595
PB
5046 startbit = convert (sizetype, startbit);
5047 endbit = convert (sizetype, endbit);
5048 if (! integer_zerop (domain_min))
5049 {
5050 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5051 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5052 }
3a94c984 5053 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5054 EXPAND_CONST_ADDRESS);
3a94c984 5055 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5056 EXPAND_CONST_ADDRESS);
5057
5058 if (REG_P (target))
5059 {
1da68f56
RK
5060 targetx
5061 = assign_temp
5062 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5063 TYPE_QUAL_CONST)),
5064 0, 1, 1);
071a6595
PB
5065 emit_move_insn (targetx, target);
5066 }
19caa751 5067
071a6595
PB
5068 else if (GET_CODE (target) == MEM)
5069 targetx = target;
5070 else
5071 abort ();
5072
5073#ifdef TARGET_MEM_FUNCTIONS
5074 /* Optimization: If startbit and endbit are
9faa82d8 5075 constants divisible by BITS_PER_UNIT,
0f41302f 5076 call memset instead. */
071a6595
PB
5077 if (TREE_CODE (startbit) == INTEGER_CST
5078 && TREE_CODE (endbit) == INTEGER_CST
5079 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5080 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5081 {
ebb1b59a 5082 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5083 VOIDmode, 3,
e1a43f73
PB
5084 plus_constant (XEXP (targetx, 0),
5085 startb / BITS_PER_UNIT),
071a6595 5086 Pmode,
3b6f75e2 5087 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5088 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5089 TYPE_MODE (sizetype));
071a6595
PB
5090 }
5091 else
5092#endif
19caa751 5093 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
5094 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5095 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5096 startbit_rtx, TYPE_MODE (sizetype),
5097 endbit_rtx, TYPE_MODE (sizetype));
5098
071a6595
PB
5099 if (REG_P (target))
5100 emit_move_insn (target, targetx);
5101 }
5102 }
bbf6f052
RK
5103
5104 else
5105 abort ();
5106}
5107
5108/* Store the value of EXP (an expression tree)
5109 into a subfield of TARGET which has mode MODE and occupies
5110 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5111 If MODE is VOIDmode, it means that we are storing into a bit-field.
5112
5113 If VALUE_MODE is VOIDmode, return nothing in particular.
5114 UNSIGNEDP is not used in this case.
5115
5116 Otherwise, return an rtx for the value stored. This rtx
5117 has mode VALUE_MODE if that is convenient to do.
5118 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5119
19caa751 5120 ALIGN is the alignment that TARGET is known to have.
3a94c984 5121 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
ece32014
MM
5122
5123 ALIAS_SET is the alias set for the destination. This value will
5124 (in general) be different from that for TARGET, since TARGET is a
5125 reference to the containing structure. */
bbf6f052
RK
5126
5127static rtx
5128store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 5129 unsignedp, align, total_size, alias_set)
bbf6f052 5130 rtx target;
770ae6cc
RK
5131 HOST_WIDE_INT bitsize;
5132 HOST_WIDE_INT bitpos;
bbf6f052
RK
5133 enum machine_mode mode;
5134 tree exp;
5135 enum machine_mode value_mode;
5136 int unsignedp;
729a2125 5137 unsigned int align;
770ae6cc 5138 HOST_WIDE_INT total_size;
ece32014 5139 int alias_set;
bbf6f052 5140{
906c4e36 5141 HOST_WIDE_INT width_mask = 0;
bbf6f052 5142
e9a25f70
JL
5143 if (TREE_CODE (exp) == ERROR_MARK)
5144 return const0_rtx;
5145
2be6a7e9
RK
5146 /* If we have nothing to store, do nothing unless the expression has
5147 side-effects. */
5148 if (bitsize == 0)
5149 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5150
906c4e36
RK
5151 if (bitsize < HOST_BITS_PER_WIDE_INT)
5152 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5153
5154 /* If we are storing into an unaligned field of an aligned union that is
5155 in a register, we may have the mode of TARGET being an integer mode but
5156 MODE == BLKmode. In that case, get an aligned object whose size and
5157 alignment are the same as TARGET and store TARGET into it (we can avoid
5158 the store if the field being stored is the entire width of TARGET). Then
5159 call ourselves recursively to store the field into a BLKmode version of
5160 that object. Finally, load from the object into TARGET. This is not
5161 very efficient in general, but should only be slightly more expensive
5162 than the otherwise-required unaligned accesses. Perhaps this can be
5163 cleaned up later. */
5164
5165 if (mode == BLKmode
5166 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5167 {
1da68f56
RK
5168 rtx object
5169 = assign_temp
5170 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5171 TYPE_QUAL_CONST),
5172 0, 1, 1);
bbf6f052
RK
5173 rtx blk_object = copy_rtx (object);
5174
5175 PUT_MODE (blk_object, BLKmode);
5176
8752c357 5177 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5178 emit_move_insn (object, target);
5179
5180 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 5181 align, total_size, alias_set);
bbf6f052 5182
46093b97
RS
5183 /* Even though we aren't returning target, we need to
5184 give it the updated value. */
bbf6f052
RK
5185 emit_move_insn (target, object);
5186
46093b97 5187 return blk_object;
bbf6f052 5188 }
c3b247b4
JM
5189
5190 if (GET_CODE (target) == CONCAT)
5191 {
5192 /* We're storing into a struct containing a single __complex. */
5193
5194 if (bitpos != 0)
5195 abort ();
5196 return store_expr (exp, target, 0);
5197 }
bbf6f052
RK
5198
5199 /* If the structure is in a register or if the component
5200 is a bit field, we cannot use addressing to access it.
5201 Use bit-field techniques or SUBREG to store in it. */
5202
4fa52007 5203 if (mode == VOIDmode
6ab06cbb
JW
5204 || (mode != BLKmode && ! direct_store[(int) mode]
5205 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5206 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5207 || GET_CODE (target) == REG
c980ac49 5208 || GET_CODE (target) == SUBREG
ccc98036
RS
5209 /* If the field isn't aligned enough to store as an ordinary memref,
5210 store it as a bit field. */
e1565e65 5211 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5212 && (align < GET_MODE_ALIGNMENT (mode)
14a774a9 5213 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 5214 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5215 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
14a774a9
RK
5216 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5217 /* If the RHS and field are a constant size and the size of the
5218 RHS isn't the same size as the bitfield, we must use bitfield
5219 operations. */
05bccae2
RK
5220 || (bitsize >= 0
5221 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5222 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5223 {
906c4e36 5224 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5225
ef19912d
RK
5226 /* If BITSIZE is narrower than the size of the type of EXP
5227 we will be narrowing TEMP. Normally, what's wanted are the
5228 low-order bits. However, if EXP's type is a record and this is
5229 big-endian machine, we want the upper BITSIZE bits. */
5230 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5231 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5232 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5233 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5234 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5235 - bitsize),
5236 temp, 1);
5237
bbd6cf73
RK
5238 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5239 MODE. */
5240 if (mode != VOIDmode && mode != BLKmode
5241 && mode != TYPE_MODE (TREE_TYPE (exp)))
5242 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5243
a281e72d
RK
5244 /* If the modes of TARGET and TEMP are both BLKmode, both
5245 must be in memory and BITPOS must be aligned on a byte
5246 boundary. If so, we simply do a block copy. */
5247 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5248 {
19caa751 5249 unsigned int exp_align = expr_align (exp);
729a2125 5250
a281e72d
RK
5251 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5252 || bitpos % BITS_PER_UNIT != 0)
5253 abort ();
5254
0086427c
RK
5255 target = change_address (target, VOIDmode,
5256 plus_constant (XEXP (target, 0),
a281e72d
RK
5257 bitpos / BITS_PER_UNIT));
5258
729a2125
RK
5259 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5260 align = MIN (exp_align, align);
c297a34e 5261
14a774a9 5262 /* Find an alignment that is consistent with the bit position. */
19caa751 5263 while ((bitpos % align) != 0)
14a774a9
RK
5264 align >>= 1;
5265
a281e72d 5266 emit_block_move (target, temp,
bd5dab53
RK
5267 bitsize == -1 ? expr_size (exp)
5268 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5269 / BITS_PER_UNIT),
14a774a9 5270 align);
a281e72d
RK
5271
5272 return value_mode == VOIDmode ? const0_rtx : target;
5273 }
5274
bbf6f052
RK
5275 /* Store the value in the bitfield. */
5276 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5277 if (value_mode != VOIDmode)
5278 {
5279 /* The caller wants an rtx for the value. */
5280 /* If possible, avoid refetching from the bitfield itself. */
5281 if (width_mask != 0
5282 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5283 {
9074de27 5284 tree count;
5c4d7cfb 5285 enum machine_mode tmode;
86a2c12a 5286
5c4d7cfb 5287 if (unsignedp)
69107307
AO
5288 return expand_and (temp,
5289 GEN_INT
5290 (trunc_int_for_mode
5291 (width_mask,
5292 GET_MODE (temp) == VOIDmode
5293 ? value_mode
5294 : GET_MODE (temp))), NULL_RTX);
5c4d7cfb 5295 tmode = GET_MODE (temp);
86a2c12a
RS
5296 if (tmode == VOIDmode)
5297 tmode = value_mode;
5c4d7cfb
RS
5298 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5299 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5300 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5301 }
bbf6f052 5302 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
5303 NULL_RTX, value_mode, 0, align,
5304 total_size);
bbf6f052
RK
5305 }
5306 return const0_rtx;
5307 }
5308 else
5309 {
5310 rtx addr = XEXP (target, 0);
5311 rtx to_rtx;
5312
5313 /* If a value is wanted, it must be the lhs;
5314 so make the address stable for multiple use. */
5315
5316 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5317 && ! CONSTANT_ADDRESS_P (addr)
5318 /* A frame-pointer reference is already stable. */
5319 && ! (GET_CODE (addr) == PLUS
5320 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5321 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5322 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5323 addr = copy_to_reg (addr);
5324
5325 /* Now build a reference to just the desired component. */
5326
effbcc6a
RK
5327 to_rtx = copy_rtx (change_address (target, mode,
5328 plus_constant (addr,
5329 (bitpos
5330 / BITS_PER_UNIT))));
c6df88cb 5331 MEM_SET_IN_STRUCT_P (to_rtx, 1);
0ea834c1
MM
5332 /* If the address of the structure varies, then it might be on
5333 the stack. And, stack slots may be shared across scopes.
5334 So, two different structures, of different types, can end up
5335 at the same location. We will give the structures alias set
5336 zero; here we must be careful not to give non-zero alias sets
5337 to their fields. */
5338 if (!rtx_varies_p (addr, /*for_alias=*/0))
5339 MEM_ALIAS_SET (to_rtx) = alias_set;
5340 else
5341 MEM_ALIAS_SET (to_rtx) = 0;
bbf6f052
RK
5342
5343 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5344 }
5345}
5346\f
5347/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5348 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5349 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5350
5351 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5352 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5353 If the position of the field is variable, we store a tree
5354 giving the variable offset (in units) in *POFFSET.
5355 This offset is in addition to the bit position.
5356 If the position is not variable, we store 0 in *POFFSET.
19caa751 5357 We set *PALIGNMENT to the alignment of the address that will be
839c4796
RK
5358 computed. This is the alignment of the thing we return if *POFFSET
5359 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
5360
5361 If any of the extraction expressions is volatile,
5362 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5363
5364 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5365 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5366 is redundant.
5367
5368 If the field describes a variable-sized object, *PMODE is set to
5369 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 5370 this case, but the address of the object can be found. */
bbf6f052
RK
5371
5372tree
4969d05d 5373get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 5374 punsignedp, pvolatilep, palignment)
bbf6f052 5375 tree exp;
770ae6cc
RK
5376 HOST_WIDE_INT *pbitsize;
5377 HOST_WIDE_INT *pbitpos;
7bb0943f 5378 tree *poffset;
bbf6f052
RK
5379 enum machine_mode *pmode;
5380 int *punsignedp;
5381 int *pvolatilep;
729a2125 5382 unsigned int *palignment;
bbf6f052
RK
5383{
5384 tree size_tree = 0;
5385 enum machine_mode mode = VOIDmode;
fed3cef0 5386 tree offset = size_zero_node;
770ae6cc 5387 tree bit_offset = bitsize_zero_node;
c84e2712 5388 unsigned int alignment = BIGGEST_ALIGNMENT;
770ae6cc 5389 tree tem;
bbf6f052 5390
770ae6cc
RK
5391 /* First get the mode, signedness, and size. We do this from just the
5392 outermost expression. */
bbf6f052
RK
5393 if (TREE_CODE (exp) == COMPONENT_REF)
5394 {
5395 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5396 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5397 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5398
bbf6f052
RK
5399 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5400 }
5401 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5402 {
5403 size_tree = TREE_OPERAND (exp, 1);
5404 *punsignedp = TREE_UNSIGNED (exp);
5405 }
5406 else
5407 {
5408 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5409 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5410
ab87f8c8
JL
5411 if (mode == BLKmode)
5412 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5413 else
5414 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5415 }
3a94c984 5416
770ae6cc 5417 if (size_tree != 0)
bbf6f052 5418 {
770ae6cc 5419 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5420 mode = BLKmode, *pbitsize = -1;
5421 else
770ae6cc 5422 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5423 }
5424
5425 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5426 and find the ultimate containing object. */
bbf6f052
RK
5427 while (1)
5428 {
770ae6cc
RK
5429 if (TREE_CODE (exp) == BIT_FIELD_REF)
5430 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5431 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5432 {
770ae6cc
RK
5433 tree field = TREE_OPERAND (exp, 1);
5434 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5435
e7f3c83f
RK
5436 /* If this field hasn't been filled in yet, don't go
5437 past it. This should only happen when folding expressions
5438 made during type construction. */
770ae6cc 5439 if (this_offset == 0)
e7f3c83f 5440 break;
770ae6cc
RK
5441 else if (! TREE_CONSTANT (this_offset)
5442 && contains_placeholder_p (this_offset))
5443 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5444
7156dead 5445 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5446 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5447 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5448
770ae6cc
RK
5449 if (! host_integerp (offset, 0))
5450 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
bbf6f052 5451 }
7156dead 5452
b4e3fabb
RK
5453 else if (TREE_CODE (exp) == ARRAY_REF
5454 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5455 {
742920c7 5456 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5457 tree array = TREE_OPERAND (exp, 0);
5458 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5459 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5460 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5461
770ae6cc
RK
5462 /* We assume all arrays have sizes that are a multiple of a byte.
5463 First subtract the lower bound, if any, in the type of the
5464 index, then convert to sizetype and multiply by the size of the
5465 array element. */
5466 if (low_bound != 0 && ! integer_zerop (low_bound))
5467 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5468 index, low_bound));
f8dac6eb 5469
7156dead
RK
5470 /* If the index has a self-referential type, pass it to a
5471 WITH_RECORD_EXPR; if the component size is, pass our
5472 component to one. */
770ae6cc
RK
5473 if (! TREE_CONSTANT (index)
5474 && contains_placeholder_p (index))
5475 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5476 if (! TREE_CONSTANT (unit_size)
5477 && contains_placeholder_p (unit_size))
b4e3fabb 5478 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5479
770ae6cc
RK
5480 offset = size_binop (PLUS_EXPR, offset,
5481 size_binop (MULT_EXPR,
5482 convert (sizetype, index),
7156dead 5483 unit_size));
bbf6f052 5484 }
7156dead 5485
bbf6f052
RK
5486 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5487 && ! ((TREE_CODE (exp) == NOP_EXPR
5488 || TREE_CODE (exp) == CONVERT_EXPR)
5489 && (TYPE_MODE (TREE_TYPE (exp))
5490 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5491 break;
7bb0943f
RS
5492
5493 /* If any reference in the chain is volatile, the effect is volatile. */
5494 if (TREE_THIS_VOLATILE (exp))
5495 *pvolatilep = 1;
839c4796
RK
5496
5497 /* If the offset is non-constant already, then we can't assume any
5498 alignment more than the alignment here. */
770ae6cc 5499 if (! TREE_CONSTANT (offset))
839c4796
RK
5500 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5501
bbf6f052
RK
5502 exp = TREE_OPERAND (exp, 0);
5503 }
5504
2f939d94 5505 if (DECL_P (exp))
839c4796 5506 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5507 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5508 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5509
770ae6cc
RK
5510 /* If OFFSET is constant, see if we can return the whole thing as a
5511 constant bit position. Otherwise, split it up. */
5512 if (host_integerp (offset, 0)
5513 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5514 bitsize_unit_node))
5515 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5516 && host_integerp (tem, 0))
5517 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5518 else
5519 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5520
bbf6f052 5521 *pmode = mode;
19caa751 5522 *palignment = alignment;
bbf6f052
RK
5523 return exp;
5524}
921b3427
RK
5525
5526/* Subroutine of expand_exp: compute memory_usage from modifier. */
770ae6cc 5527
921b3427
RK
5528static enum memory_use_mode
5529get_memory_usage_from_modifier (modifier)
5530 enum expand_modifier modifier;
5531{
5532 switch (modifier)
5533 {
5534 case EXPAND_NORMAL:
e5e809f4 5535 case EXPAND_SUM:
921b3427
RK
5536 return MEMORY_USE_RO;
5537 break;
5538 case EXPAND_MEMORY_USE_WO:
5539 return MEMORY_USE_WO;
5540 break;
5541 case EXPAND_MEMORY_USE_RW:
5542 return MEMORY_USE_RW;
5543 break;
921b3427 5544 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5545 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5546 MEMORY_USE_DONT, because they are modifiers to a call of
5547 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5548 case EXPAND_CONST_ADDRESS:
e5e809f4 5549 case EXPAND_INITIALIZER:
921b3427
RK
5550 return MEMORY_USE_DONT;
5551 case EXPAND_MEMORY_USE_BAD:
5552 default:
5553 abort ();
5554 }
5555}
bbf6f052 5556\f
3fe44edd
RK
5557/* Given an rtx VALUE that may contain additions and multiplications, return
5558 an equivalent value that just refers to a register, memory, or constant.
5559 This is done by generating instructions to perform the arithmetic and
5560 returning a pseudo-register containing the value.
c45a13a6
RK
5561
5562 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5563
5564rtx
5565force_operand (value, target)
5566 rtx value, target;
5567{
5568 register optab binoptab = 0;
5569 /* Use a temporary to force order of execution of calls to
5570 `force_operand'. */
5571 rtx tmp;
5572 register rtx op2;
5573 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 5574 register rtx subtarget = get_subtarget (target);
bbf6f052 5575
8b015896
RH
5576 /* Check for a PIC address load. */
5577 if (flag_pic
5578 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5579 && XEXP (value, 0) == pic_offset_table_rtx
5580 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5581 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5582 || GET_CODE (XEXP (value, 1)) == CONST))
5583 {
5584 if (!subtarget)
5585 subtarget = gen_reg_rtx (GET_MODE (value));
5586 emit_move_insn (subtarget, value);
5587 return subtarget;
5588 }
5589
bbf6f052
RK
5590 if (GET_CODE (value) == PLUS)
5591 binoptab = add_optab;
5592 else if (GET_CODE (value) == MINUS)
5593 binoptab = sub_optab;
5594 else if (GET_CODE (value) == MULT)
5595 {
5596 op2 = XEXP (value, 1);
5597 if (!CONSTANT_P (op2)
5598 && !(GET_CODE (op2) == REG && op2 != subtarget))
5599 subtarget = 0;
5600 tmp = force_operand (XEXP (value, 0), subtarget);
5601 return expand_mult (GET_MODE (value), tmp,
906c4e36 5602 force_operand (op2, NULL_RTX),
91ce572a 5603 target, 1);
bbf6f052
RK
5604 }
5605
5606 if (binoptab)
5607 {
5608 op2 = XEXP (value, 1);
5609 if (!CONSTANT_P (op2)
5610 && !(GET_CODE (op2) == REG && op2 != subtarget))
5611 subtarget = 0;
5612 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5613 {
5614 binoptab = add_optab;
5615 op2 = negate_rtx (GET_MODE (value), op2);
5616 }
5617
5618 /* Check for an addition with OP2 a constant integer and our first
5619 operand a PLUS of a virtual register and something else. In that
5620 case, we want to emit the sum of the virtual register and the
5621 constant first and then add the other value. This allows virtual
5622 register instantiation to simply modify the constant rather than
5623 creating another one around this addition. */
5624 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5625 && GET_CODE (XEXP (value, 0)) == PLUS
5626 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5627 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5628 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5629 {
5630 rtx temp = expand_binop (GET_MODE (value), binoptab,
5631 XEXP (XEXP (value, 0), 0), op2,
5632 subtarget, 0, OPTAB_LIB_WIDEN);
5633 return expand_binop (GET_MODE (value), binoptab, temp,
5634 force_operand (XEXP (XEXP (value, 0), 1), 0),
5635 target, 0, OPTAB_LIB_WIDEN);
5636 }
3a94c984 5637
bbf6f052
RK
5638 tmp = force_operand (XEXP (value, 0), subtarget);
5639 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5640 force_operand (op2, NULL_RTX),
bbf6f052 5641 target, 0, OPTAB_LIB_WIDEN);
8008b228 5642 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5643 because the only operations we are expanding here are signed ones. */
5644 }
5645 return value;
5646}
5647\f
5648/* Subroutine of expand_expr:
5649 save the non-copied parts (LIST) of an expr (LHS), and return a list
5650 which can restore these values to their previous values,
5651 should something modify their storage. */
5652
5653static tree
5654save_noncopied_parts (lhs, list)
5655 tree lhs;
5656 tree list;
5657{
5658 tree tail;
5659 tree parts = 0;
5660
5661 for (tail = list; tail; tail = TREE_CHAIN (tail))
5662 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5663 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5664 else
5665 {
5666 tree part = TREE_VALUE (tail);
5667 tree part_type = TREE_TYPE (part);
906c4e36 5668 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
1da68f56
RK
5669 rtx target
5670 = assign_temp (build_qualified_type (part_type,
5671 (TYPE_QUALS (part_type)
5672 | TYPE_QUAL_CONST)),
5673 0, 1, 1);
5674
bbf6f052 5675 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5676 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5677 parts = tree_cons (to_be_saved,
906c4e36
RK
5678 build (RTL_EXPR, part_type, NULL_TREE,
5679 (tree) target),
bbf6f052
RK
5680 parts);
5681 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5682 }
5683 return parts;
5684}
5685
5686/* Subroutine of expand_expr:
5687 record the non-copied parts (LIST) of an expr (LHS), and return a list
5688 which specifies the initial values of these parts. */
5689
5690static tree
5691init_noncopied_parts (lhs, list)
5692 tree lhs;
5693 tree list;
5694{
5695 tree tail;
5696 tree parts = 0;
5697
5698 for (tail = list; tail; tail = TREE_CHAIN (tail))
5699 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5700 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5701 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5702 {
5703 tree part = TREE_VALUE (tail);
5704 tree part_type = TREE_TYPE (part);
906c4e36 5705 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5706 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5707 }
5708 return parts;
5709}
5710
5711/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5712 EXP can reference X, which is being modified. TOP_P is nonzero if this
5713 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5714 for EXP, as opposed to a recursive call to this function.
5715
5716 It is always safe for this routine to return zero since it merely
5717 searches for optimization opportunities. */
bbf6f052 5718
8f17b5c5 5719int
e5e809f4 5720safe_from_p (x, exp, top_p)
bbf6f052
RK
5721 rtx x;
5722 tree exp;
e5e809f4 5723 int top_p;
bbf6f052
RK
5724{
5725 rtx exp_rtl = 0;
5726 int i, nops;
1da68f56 5727 static tree save_expr_list;
bbf6f052 5728
6676e72f
RK
5729 if (x == 0
5730 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5731 have no way of allocating temporaries of variable size
5732 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5733 So we assume here that something at a higher level has prevented a
f4510f37 5734 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5735 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5736 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5737 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5738 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5739 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5740 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5741 != INTEGER_CST)
1da68f56
RK
5742 && GET_MODE (x) == BLKmode)
5743 /* If X is in the outgoing argument area, it is always safe. */
5744 || (GET_CODE (x) == MEM
5745 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5746 || (GET_CODE (XEXP (x, 0)) == PLUS
5747 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5748 return 1;
5749
5750 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5751 find the underlying pseudo. */
5752 if (GET_CODE (x) == SUBREG)
5753 {
5754 x = SUBREG_REG (x);
5755 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5756 return 0;
5757 }
5758
1da68f56
RK
5759 /* A SAVE_EXPR might appear many times in the expression passed to the
5760 top-level safe_from_p call, and if it has a complex subexpression,
5761 examining it multiple times could result in a combinatorial explosion.
5762 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5763 with optimization took about 28 minutes to compile -- even though it was
5764 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5765 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5766 we have processed. Note that the only test of top_p was above. */
5767
5768 if (top_p)
5769 {
5770 int rtn;
5771 tree t;
5772
5773 save_expr_list = 0;
5774
5775 rtn = safe_from_p (x, exp, 0);
5776
5777 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5778 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5779
5780 return rtn;
5781 }
bbf6f052 5782
1da68f56 5783 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5784 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5785 {
5786 case 'd':
19e7881c 5787 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
bbf6f052
RK
5788 break;
5789
5790 case 'c':
5791 return 1;
5792
5793 case 'x':
5794 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5795 return ((TREE_VALUE (exp) == 0
e5e809f4 5796 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5797 && (TREE_CHAIN (exp) == 0
e5e809f4 5798 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5799 else if (TREE_CODE (exp) == ERROR_MARK)
5800 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5801 else
5802 return 0;
5803
5804 case '1':
e5e809f4 5805 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5806
5807 case '2':
5808 case '<':
e5e809f4
JL
5809 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5810 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5811
5812 case 'e':
5813 case 'r':
5814 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5815 the expression. If it is set, we conflict iff we are that rtx or
5816 both are in memory. Otherwise, we check all operands of the
5817 expression recursively. */
5818
5819 switch (TREE_CODE (exp))
5820 {
5821 case ADDR_EXPR:
e44842fe 5822 return (staticp (TREE_OPERAND (exp, 0))
1da68f56
RK
5823 || TREE_STATIC (exp)
5824 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
bbf6f052
RK
5825
5826 case INDIRECT_REF:
1da68f56
RK
5827 if (GET_CODE (x) == MEM
5828 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5829 get_alias_set (exp)))
bbf6f052
RK
5830 return 0;
5831 break;
5832
5833 case CALL_EXPR:
f9808f81
MM
5834 /* Assume that the call will clobber all hard registers and
5835 all of memory. */
5836 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5837 || GET_CODE (x) == MEM)
5838 return 0;
bbf6f052
RK
5839 break;
5840
5841 case RTL_EXPR:
3bb5826a
RK
5842 /* If a sequence exists, we would have to scan every instruction
5843 in the sequence to see if it was safe. This is probably not
5844 worthwhile. */
5845 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5846 return 0;
5847
3bb5826a 5848 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5849 break;
5850
5851 case WITH_CLEANUP_EXPR:
5852 exp_rtl = RTL_EXPR_RTL (exp);
5853 break;
5854
5dab5552 5855 case CLEANUP_POINT_EXPR:
e5e809f4 5856 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5857
bbf6f052
RK
5858 case SAVE_EXPR:
5859 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5860 if (exp_rtl)
5861 break;
5862
1da68f56
RK
5863 /* If we've already scanned this, don't do it again. Otherwise,
5864 show we've scanned it and record for clearing the flag if we're
5865 going on. */
5866 if (TREE_PRIVATE (exp))
5867 return 1;
ff439b5f 5868
1da68f56
RK
5869 TREE_PRIVATE (exp) = 1;
5870 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5871 {
1da68f56
RK
5872 TREE_PRIVATE (exp) = 0;
5873 return 0;
ff59bfe6 5874 }
1da68f56
RK
5875
5876 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5877 return 1;
bbf6f052 5878
8129842c
RS
5879 case BIND_EXPR:
5880 /* The only operand we look at is operand 1. The rest aren't
5881 part of the expression. */
e5e809f4 5882 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5883
bbf6f052 5884 case METHOD_CALL_EXPR:
0f41302f 5885 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5886 abort ();
3a94c984 5887
e9a25f70
JL
5888 default:
5889 break;
bbf6f052
RK
5890 }
5891
5892 /* If we have an rtx, we do not need to scan our operands. */
5893 if (exp_rtl)
5894 break;
5895
8f17b5c5 5896 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5897 for (i = 0; i < nops; i++)
5898 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5899 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5900 return 0;
8f17b5c5
MM
5901
5902 /* If this is a language-specific tree code, it may require
5903 special handling. */
dbbbbf3b
JDA
5904 if ((unsigned int) TREE_CODE (exp)
5905 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
8f17b5c5
MM
5906 && lang_safe_from_p
5907 && !(*lang_safe_from_p) (x, exp))
5908 return 0;
bbf6f052
RK
5909 }
5910
5911 /* If we have an rtl, find any enclosed object. Then see if we conflict
5912 with it. */
5913 if (exp_rtl)
5914 {
5915 if (GET_CODE (exp_rtl) == SUBREG)
5916 {
5917 exp_rtl = SUBREG_REG (exp_rtl);
5918 if (GET_CODE (exp_rtl) == REG
5919 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5920 return 0;
5921 }
5922
5923 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5924 are memory and they conflict. */
bbf6f052
RK
5925 return ! (rtx_equal_p (x, exp_rtl)
5926 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
1da68f56
RK
5927 && true_dependence (exp_rtl, GET_MODE (x), x,
5928 rtx_addr_varies_p)));
bbf6f052
RK
5929 }
5930
5931 /* If we reach here, it is safe. */
5932 return 1;
5933}
5934
5935/* Subroutine of expand_expr: return nonzero iff EXP is an
5936 expression whose type is statically determinable. */
5937
5938static int
5939fixed_type_p (exp)
5940 tree exp;
5941{
5942 if (TREE_CODE (exp) == PARM_DECL
5943 || TREE_CODE (exp) == VAR_DECL
5944 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5945 || TREE_CODE (exp) == COMPONENT_REF
5946 || TREE_CODE (exp) == ARRAY_REF)
5947 return 1;
5948 return 0;
5949}
01c8a7c8
RK
5950
5951/* Subroutine of expand_expr: return rtx if EXP is a
5952 variable or parameter; else return 0. */
5953
5954static rtx
5955var_rtx (exp)
5956 tree exp;
5957{
5958 STRIP_NOPS (exp);
5959 switch (TREE_CODE (exp))
5960 {
5961 case PARM_DECL:
5962 case VAR_DECL:
5963 return DECL_RTL (exp);
5964 default:
5965 return 0;
5966 }
5967}
dbecbbe4
JL
5968
5969#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 5970
dbecbbe4
JL
5971void
5972check_max_integer_computation_mode (exp)
3a94c984 5973 tree exp;
dbecbbe4 5974{
5f652c07 5975 enum tree_code code;
dbecbbe4
JL
5976 enum machine_mode mode;
5977
5f652c07
JM
5978 /* Strip any NOPs that don't change the mode. */
5979 STRIP_NOPS (exp);
5980 code = TREE_CODE (exp);
5981
71bca506
JL
5982 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5983 if (code == NOP_EXPR
5984 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5985 return;
5986
dbecbbe4
JL
5987 /* First check the type of the overall operation. We need only look at
5988 unary, binary and relational operations. */
5989 if (TREE_CODE_CLASS (code) == '1'
5990 || TREE_CODE_CLASS (code) == '2'
5991 || TREE_CODE_CLASS (code) == '<')
5992 {
5993 mode = TYPE_MODE (TREE_TYPE (exp));
5994 if (GET_MODE_CLASS (mode) == MODE_INT
5995 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5996 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5997 }
5998
5999 /* Check operand of a unary op. */
6000 if (TREE_CODE_CLASS (code) == '1')
6001 {
6002 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6003 if (GET_MODE_CLASS (mode) == MODE_INT
6004 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6005 internal_error ("unsupported wide integer operation");
dbecbbe4 6006 }
3a94c984 6007
dbecbbe4
JL
6008 /* Check operands of a binary/comparison op. */
6009 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6010 {
6011 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6012 if (GET_MODE_CLASS (mode) == MODE_INT
6013 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6014 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6015
6016 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6017 if (GET_MODE_CLASS (mode) == MODE_INT
6018 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6019 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6020 }
6021}
6022#endif
14a774a9 6023\f
bbf6f052
RK
6024/* expand_expr: generate code for computing expression EXP.
6025 An rtx for the computed value is returned. The value is never null.
6026 In the case of a void EXP, const0_rtx is returned.
6027
6028 The value may be stored in TARGET if TARGET is nonzero.
6029 TARGET is just a suggestion; callers must assume that
6030 the rtx returned may not be the same as TARGET.
6031
6032 If TARGET is CONST0_RTX, it means that the value will be ignored.
6033
6034 If TMODE is not VOIDmode, it suggests generating the
6035 result in mode TMODE. But this is done only when convenient.
6036 Otherwise, TMODE is ignored and the value generated in its natural mode.
6037 TMODE is just a suggestion; callers must assume that
6038 the rtx returned may not have mode TMODE.
6039
d6a5ac33
RK
6040 Note that TARGET may have neither TMODE nor MODE. In that case, it
6041 probably will not be used.
bbf6f052
RK
6042
6043 If MODIFIER is EXPAND_SUM then when EXP is an addition
6044 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6045 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6046 products as above, or REG or MEM, or constant.
6047 Ordinarily in such cases we would output mul or add instructions
6048 and then return a pseudo reg containing the sum.
6049
6050 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6051 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6052 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6053 This is used for outputting expressions used in initializers.
6054
6055 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6056 with a constant address even if that address is not normally legitimate.
6057 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
6058
6059rtx
6060expand_expr (exp, target, tmode, modifier)
6061 register tree exp;
6062 rtx target;
6063 enum machine_mode tmode;
6064 enum expand_modifier modifier;
6065{
6066 register rtx op0, op1, temp;
6067 tree type = TREE_TYPE (exp);
6068 int unsignedp = TREE_UNSIGNED (type);
68557e14 6069 register enum machine_mode mode;
bbf6f052
RK
6070 register enum tree_code code = TREE_CODE (exp);
6071 optab this_optab;
68557e14
ML
6072 rtx subtarget, original_target;
6073 int ignore;
bbf6f052 6074 tree context;
921b3427
RK
6075 /* Used by check-memory-usage to make modifier read only. */
6076 enum expand_modifier ro_modifier;
bbf6f052 6077
3a94c984 6078 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6079 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6080 {
6081 op0 = CONST0_RTX (tmode);
6082 if (op0 != 0)
6083 return op0;
6084 return const0_rtx;
6085 }
6086
6087 mode = TYPE_MODE (type);
6088 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6089 subtarget = get_subtarget (target);
68557e14
ML
6090 original_target = target;
6091 ignore = (target == const0_rtx
6092 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6093 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6094 || code == COND_EXPR)
6095 && TREE_CODE (type) == VOID_TYPE));
6096
921b3427
RK
6097 /* Make a read-only version of the modifier. */
6098 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6099 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6100 ro_modifier = modifier;
6101 else
6102 ro_modifier = EXPAND_NORMAL;
ca695ac9 6103
dd27116b
RK
6104 /* If we are going to ignore this result, we need only do something
6105 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6106 is, short-circuit the most common cases here. Note that we must
6107 not call expand_expr with anything but const0_rtx in case this
6108 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6109
dd27116b
RK
6110 if (ignore)
6111 {
6112 if (! TREE_SIDE_EFFECTS (exp))
6113 return const0_rtx;
6114
14a774a9
RK
6115 /* Ensure we reference a volatile object even if value is ignored, but
6116 don't do this if all we are doing is taking its address. */
dd27116b
RK
6117 if (TREE_THIS_VOLATILE (exp)
6118 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6119 && mode != VOIDmode && mode != BLKmode
6120 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6121 {
921b3427 6122 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
6123 if (GET_CODE (temp) == MEM)
6124 temp = copy_to_reg (temp);
6125 return const0_rtx;
6126 }
6127
14a774a9
RK
6128 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6129 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 6130 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6131 VOIDmode, ro_modifier);
14a774a9 6132 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6133 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6134 {
b4e3fabb
RK
6135 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6136 ro_modifier);
6137 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6138 ro_modifier);
dd27116b
RK
6139 return const0_rtx;
6140 }
6141 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6142 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6143 /* If the second operand has no side effects, just evaluate
0f41302f 6144 the first. */
dd27116b 6145 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6146 VOIDmode, ro_modifier);
14a774a9
RK
6147 else if (code == BIT_FIELD_REF)
6148 {
b4e3fabb
RK
6149 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6150 ro_modifier);
6151 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6152 ro_modifier);
6153 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6154 ro_modifier);
14a774a9
RK
6155 return const0_rtx;
6156 }
3a94c984 6157 ;
90764a87 6158 target = 0;
dd27116b 6159 }
bbf6f052 6160
dbecbbe4 6161#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
6162 /* Only check stuff here if the mode we want is different from the mode
6163 of the expression; if it's the same, check_max_integer_computiation_mode
6164 will handle it. Do we really need to check this stuff at all? */
6165
ce3c0b53 6166 if (target
5f652c07 6167 && GET_MODE (target) != mode
ce3c0b53
JL
6168 && TREE_CODE (exp) != INTEGER_CST
6169 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6170 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6171 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6172 && TREE_CODE (exp) != COMPONENT_REF
6173 && TREE_CODE (exp) != BIT_FIELD_REF
6174 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6175 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6176 && TREE_CODE (exp) != VAR_DECL
6177 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6178 {
6179 enum machine_mode mode = GET_MODE (target);
6180
6181 if (GET_MODE_CLASS (mode) == MODE_INT
6182 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6183 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6184 }
6185
5f652c07
JM
6186 if (tmode != mode
6187 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6188 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6189 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6190 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6191 && TREE_CODE (exp) != COMPONENT_REF
6192 && TREE_CODE (exp) != BIT_FIELD_REF
6193 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6194 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6195 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6196 && TREE_CODE (exp) != RTL_EXPR
71bca506 6197 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6198 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6199 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6200
6201 check_max_integer_computation_mode (exp);
6202#endif
6203
e44842fe
RK
6204 /* If will do cse, generate all results into pseudo registers
6205 since 1) that allows cse to find more things
6206 and 2) otherwise cse could produce an insn the machine
6207 cannot support. */
6208
bbf6f052
RK
6209 if (! cse_not_expected && mode != BLKmode && target
6210 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6211 target = subtarget;
6212
bbf6f052
RK
6213 switch (code)
6214 {
6215 case LABEL_DECL:
b552441b
RS
6216 {
6217 tree function = decl_function_context (exp);
6218 /* Handle using a label in a containing function. */
d0977240
RK
6219 if (function != current_function_decl
6220 && function != inline_function_decl && function != 0)
b552441b
RS
6221 {
6222 struct function *p = find_function_data (function);
49ad7cfa
BS
6223 p->expr->x_forced_labels
6224 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6225 p->expr->x_forced_labels);
b552441b 6226 }
ab87f8c8
JL
6227 else
6228 {
ab87f8c8
JL
6229 if (modifier == EXPAND_INITIALIZER)
6230 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6231 label_rtx (exp),
6232 forced_labels);
6233 }
c5c76735 6234
38a448ca
RH
6235 temp = gen_rtx_MEM (FUNCTION_MODE,
6236 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6237 if (function != current_function_decl
6238 && function != inline_function_decl && function != 0)
26fcb35a
RS
6239 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6240 return temp;
b552441b 6241 }
bbf6f052
RK
6242
6243 case PARM_DECL:
6244 if (DECL_RTL (exp) == 0)
6245 {
6246 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6247 return CONST0_RTX (mode);
bbf6f052
RK
6248 }
6249
0f41302f 6250 /* ... fall through ... */
d6a5ac33 6251
bbf6f052 6252 case VAR_DECL:
2dca20cd
RS
6253 /* If a static var's type was incomplete when the decl was written,
6254 but the type is complete now, lay out the decl now. */
d0f062fb 6255 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6256 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6257 {
2dca20cd
RS
6258 layout_decl (exp, 0);
6259 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
2dca20cd 6260 }
d6a5ac33 6261
7d384cc0
KR
6262 /* Although static-storage variables start off initialized, according to
6263 ANSI C, a memcpy could overwrite them with uninitialized values. So
6264 we check them too. This also lets us check for read-only variables
6265 accessed via a non-const declaration, in case it won't be detected
6266 any other way (e.g., in an embedded system or OS kernel without
6267 memory protection).
6268
6269 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 6270 if (cfun && current_function_check_memory_usage
49ad7cfa 6271 && code == VAR_DECL
921b3427 6272 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
6273 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6274 {
6275 enum memory_use_mode memory_usage;
6276 memory_usage = get_memory_usage_from_modifier (modifier);
6277
ea4da9db 6278 in_check_memory_usage = 1;
921b3427 6279 if (memory_usage != MEMORY_USE_DONT)
ebb1b59a
BS
6280 emit_library_call (chkr_check_addr_libfunc,
6281 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 6282 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
6283 GEN_INT (int_size_in_bytes (type)),
6284 TYPE_MODE (sizetype),
956d6950
JL
6285 GEN_INT (memory_usage),
6286 TYPE_MODE (integer_type_node));
ea4da9db 6287 in_check_memory_usage = 0;
921b3427
RK
6288 }
6289
0f41302f 6290 /* ... fall through ... */
d6a5ac33 6291
2dca20cd 6292 case FUNCTION_DECL:
bbf6f052
RK
6293 case RESULT_DECL:
6294 if (DECL_RTL (exp) == 0)
6295 abort ();
d6a5ac33 6296
e44842fe
RK
6297 /* Ensure variable marked as used even if it doesn't go through
6298 a parser. If it hasn't be used yet, write out an external
6299 definition. */
6300 if (! TREE_USED (exp))
6301 {
6302 assemble_external (exp);
6303 TREE_USED (exp) = 1;
6304 }
6305
dc6d66b3
RK
6306 /* Show we haven't gotten RTL for this yet. */
6307 temp = 0;
6308
bbf6f052
RK
6309 /* Handle variables inherited from containing functions. */
6310 context = decl_function_context (exp);
6311
6312 /* We treat inline_function_decl as an alias for the current function
6313 because that is the inline function whose vars, types, etc.
6314 are being merged into the current function.
6315 See expand_inline_function. */
d6a5ac33 6316
bbf6f052
RK
6317 if (context != 0 && context != current_function_decl
6318 && context != inline_function_decl
6319 /* If var is static, we don't need a static chain to access it. */
6320 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6321 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6322 {
6323 rtx addr;
6324
6325 /* Mark as non-local and addressable. */
81feeecb 6326 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6327 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6328 abort ();
bbf6f052
RK
6329 mark_addressable (exp);
6330 if (GET_CODE (DECL_RTL (exp)) != MEM)
6331 abort ();
6332 addr = XEXP (DECL_RTL (exp), 0);
6333 if (GET_CODE (addr) == MEM)
3a94c984 6334 addr = change_address (addr, Pmode,
3bdf5ad1 6335 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6336 else
6337 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6338
dc6d66b3 6339 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 6340 }
4af3895e 6341
bbf6f052
RK
6342 /* This is the case of an array whose size is to be determined
6343 from its initializer, while the initializer is still being parsed.
6344 See expand_decl. */
d6a5ac33 6345
dc6d66b3
RK
6346 else if (GET_CODE (DECL_RTL (exp)) == MEM
6347 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6348 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 6349 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
6350
6351 /* If DECL_RTL is memory, we are in the normal case and either
6352 the address is not valid or it is not a register and -fforce-addr
6353 is specified, get the address into a register. */
6354
dc6d66b3
RK
6355 else if (GET_CODE (DECL_RTL (exp)) == MEM
6356 && modifier != EXPAND_CONST_ADDRESS
6357 && modifier != EXPAND_SUM
6358 && modifier != EXPAND_INITIALIZER
6359 && (! memory_address_p (DECL_MODE (exp),
6360 XEXP (DECL_RTL (exp), 0))
6361 || (flag_force_addr
6362 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6363 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 6364 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6365
dc6d66b3 6366 /* If we got something, return it. But first, set the alignment
04956a1a 6367 if the address is a register. */
dc6d66b3
RK
6368 if (temp != 0)
6369 {
6370 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6371 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6372
6373 return temp;
6374 }
6375
1499e0a8
RK
6376 /* If the mode of DECL_RTL does not match that of the decl, it
6377 must be a promoted value. We return a SUBREG of the wanted mode,
6378 but mark it so that we know that it was already extended. */
6379
6380 if (GET_CODE (DECL_RTL (exp)) == REG
6381 && GET_MODE (DECL_RTL (exp)) != mode)
6382 {
1499e0a8
RK
6383 /* Get the signedness used for this variable. Ensure we get the
6384 same mode we got when the variable was declared. */
78911e8b
RK
6385 if (GET_MODE (DECL_RTL (exp))
6386 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
6387 abort ();
6388
ddef6bc7 6389 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8
RK
6390 SUBREG_PROMOTED_VAR_P (temp) = 1;
6391 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6392 return temp;
6393 }
6394
bbf6f052
RK
6395 return DECL_RTL (exp);
6396
6397 case INTEGER_CST:
6398 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6399 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6400
6401 case CONST_DECL:
921b3427 6402 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
3a94c984 6403 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6404
6405 case REAL_CST:
6406 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6407 which will be turned into memory by reload if necessary.
6408
bbf6f052
RK
6409 We used to force a register so that loop.c could see it. But
6410 this does not allow gen_* patterns to perform optimizations with
6411 the constants. It also produces two insns in cases like "x = 1.0;".
6412 On most machines, floating-point constants are not permitted in
6413 many insns, so we'd end up copying it to a register in any case.
6414
6415 Now, we do the copying in expand_binop, if appropriate. */
6416 return immed_real_const (exp);
6417
6418 case COMPLEX_CST:
6419 case STRING_CST:
6420 if (! TREE_CST_RTL (exp))
bd7cf17e 6421 output_constant_def (exp, 1);
bbf6f052
RK
6422
6423 /* TREE_CST_RTL probably contains a constant address.
6424 On RISC machines where a constant address isn't valid,
6425 make some insns to get that address into a register. */
6426 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6427 && modifier != EXPAND_CONST_ADDRESS
6428 && modifier != EXPAND_INITIALIZER
6429 && modifier != EXPAND_SUM
d6a5ac33
RK
6430 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6431 || (flag_force_addr
6432 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
6433 return change_address (TREE_CST_RTL (exp), VOIDmode,
6434 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6435 return TREE_CST_RTL (exp);
6436
bf1e5319 6437 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6438 {
6439 rtx to_return;
3b304f5b 6440 const char *saved_input_filename = input_filename;
b24f65cd
APB
6441 int saved_lineno = lineno;
6442 input_filename = EXPR_WFL_FILENAME (exp);
6443 lineno = EXPR_WFL_LINENO (exp);
6444 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6445 emit_line_note (input_filename, lineno);
3a94c984 6446 /* Possibly avoid switching back and force here. */
b24f65cd
APB
6447 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6448 input_filename = saved_input_filename;
6449 lineno = saved_lineno;
6450 return to_return;
6451 }
bf1e5319 6452
bbf6f052
RK
6453 case SAVE_EXPR:
6454 context = decl_function_context (exp);
d6a5ac33 6455
d0977240
RK
6456 /* If this SAVE_EXPR was at global context, assume we are an
6457 initialization function and move it into our context. */
6458 if (context == 0)
6459 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6460
bbf6f052
RK
6461 /* We treat inline_function_decl as an alias for the current function
6462 because that is the inline function whose vars, types, etc.
6463 are being merged into the current function.
6464 See expand_inline_function. */
6465 if (context == current_function_decl || context == inline_function_decl)
6466 context = 0;
6467
6468 /* If this is non-local, handle it. */
6469 if (context)
6470 {
d0977240
RK
6471 /* The following call just exists to abort if the context is
6472 not of a containing function. */
6473 find_function_data (context);
6474
bbf6f052
RK
6475 temp = SAVE_EXPR_RTL (exp);
6476 if (temp && GET_CODE (temp) == REG)
6477 {
6478 put_var_into_stack (exp);
6479 temp = SAVE_EXPR_RTL (exp);
6480 }
6481 if (temp == 0 || GET_CODE (temp) != MEM)
6482 abort ();
6483 return change_address (temp, mode,
6484 fix_lexical_addr (XEXP (temp, 0), exp));
6485 }
6486 if (SAVE_EXPR_RTL (exp) == 0)
6487 {
06089a8b
RK
6488 if (mode == VOIDmode)
6489 temp = const0_rtx;
6490 else
1da68f56
RK
6491 temp = assign_temp (build_qualified_type (type,
6492 (TYPE_QUALS (type)
6493 | TYPE_QUAL_CONST)),
6494 3, 0, 0);
1499e0a8 6495
bbf6f052 6496 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6497 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6498 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6499 save_expr_regs);
ff78f773
RK
6500
6501 /* If the mode of TEMP does not match that of the expression, it
6502 must be a promoted value. We pass store_expr a SUBREG of the
6503 wanted mode but mark it so that we know that it was already
6504 extended. Note that `unsignedp' was modified above in
6505 this case. */
6506
6507 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6508 {
ddef6bc7 6509 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
ff78f773
RK
6510 SUBREG_PROMOTED_VAR_P (temp) = 1;
6511 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6512 }
6513
4c7a0be9 6514 if (temp == const0_rtx)
921b3427
RK
6515 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6516 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6517 else
6518 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6519
6520 TREE_USED (exp) = 1;
bbf6f052 6521 }
1499e0a8
RK
6522
6523 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6524 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6525 but mark it so that we know that it was already extended. */
1499e0a8
RK
6526
6527 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6528 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6529 {
e70d22c8
RK
6530 /* Compute the signedness and make the proper SUBREG. */
6531 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6532 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8
RK
6533 SUBREG_PROMOTED_VAR_P (temp) = 1;
6534 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6535 return temp;
6536 }
6537
bbf6f052
RK
6538 return SAVE_EXPR_RTL (exp);
6539
679163cf
MS
6540 case UNSAVE_EXPR:
6541 {
6542 rtx temp;
6543 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6544 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6545 return temp;
6546 }
6547
b50d17a1 6548 case PLACEHOLDER_EXPR:
e9a25f70
JL
6549 {
6550 tree placeholder_expr;
6551
6552 /* If there is an object on the head of the placeholder list,
e5e809f4 6553 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6554 further information, see tree.def. */
6555 for (placeholder_expr = placeholder_list;
6556 placeholder_expr != 0;
6557 placeholder_expr = TREE_CHAIN (placeholder_expr))
6558 {
6559 tree need_type = TYPE_MAIN_VARIANT (type);
6560 tree object = 0;
6561 tree old_list = placeholder_list;
6562 tree elt;
6563
e5e809f4 6564 /* Find the outermost reference that is of the type we want.
3a94c984 6565 If none, see if any object has a type that is a pointer to
e5e809f4
JL
6566 the type we want. */
6567 for (elt = TREE_PURPOSE (placeholder_expr);
6568 elt != 0 && object == 0;
6569 elt
6570 = ((TREE_CODE (elt) == COMPOUND_EXPR
6571 || TREE_CODE (elt) == COND_EXPR)
6572 ? TREE_OPERAND (elt, 1)
6573 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6574 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6575 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6576 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6577 ? TREE_OPERAND (elt, 0) : 0))
6578 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6579 object = elt;
e9a25f70 6580
e9a25f70 6581 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6582 elt != 0 && object == 0;
6583 elt
6584 = ((TREE_CODE (elt) == COMPOUND_EXPR
6585 || TREE_CODE (elt) == COND_EXPR)
6586 ? TREE_OPERAND (elt, 1)
6587 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6588 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6589 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6590 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6591 ? TREE_OPERAND (elt, 0) : 0))
6592 if (POINTER_TYPE_P (TREE_TYPE (elt))
6593 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6594 == need_type))
e5e809f4 6595 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6596
e9a25f70 6597 if (object != 0)
2cde2255 6598 {
e9a25f70
JL
6599 /* Expand this object skipping the list entries before
6600 it was found in case it is also a PLACEHOLDER_EXPR.
6601 In that case, we want to translate it using subsequent
6602 entries. */
6603 placeholder_list = TREE_CHAIN (placeholder_expr);
6604 temp = expand_expr (object, original_target, tmode,
6605 ro_modifier);
6606 placeholder_list = old_list;
6607 return temp;
2cde2255 6608 }
e9a25f70
JL
6609 }
6610 }
b50d17a1
RK
6611
6612 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6613 abort ();
6614
6615 case WITH_RECORD_EXPR:
6616 /* Put the object on the placeholder list, expand our first operand,
6617 and pop the list. */
6618 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6619 placeholder_list);
6620 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6621 tmode, ro_modifier);
b50d17a1
RK
6622 placeholder_list = TREE_CHAIN (placeholder_list);
6623 return target;
6624
70e6ca43
APB
6625 case GOTO_EXPR:
6626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6627 expand_goto (TREE_OPERAND (exp, 0));
6628 else
6629 expand_computed_goto (TREE_OPERAND (exp, 0));
6630 return const0_rtx;
6631
bbf6f052 6632 case EXIT_EXPR:
df4ae160 6633 expand_exit_loop_if_false (NULL,
e44842fe 6634 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6635 return const0_rtx;
6636
f42e28dd
APB
6637 case LABELED_BLOCK_EXPR:
6638 if (LABELED_BLOCK_BODY (exp))
6639 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
0a5fee32
PB
6640 /* Should perhaps use expand_label, but this is simpler and safer. */
6641 do_pending_stack_adjust ();
f42e28dd
APB
6642 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6643 return const0_rtx;
6644
6645 case EXIT_BLOCK_EXPR:
6646 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6647 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6648 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6649 return const0_rtx;
6650
bbf6f052 6651 case LOOP_EXPR:
0088fcb1 6652 push_temp_slots ();
bbf6f052
RK
6653 expand_start_loop (1);
6654 expand_expr_stmt (TREE_OPERAND (exp, 0));
6655 expand_end_loop ();
0088fcb1 6656 pop_temp_slots ();
bbf6f052
RK
6657
6658 return const0_rtx;
6659
6660 case BIND_EXPR:
6661 {
6662 tree vars = TREE_OPERAND (exp, 0);
6663 int vars_need_expansion = 0;
6664
6665 /* Need to open a binding contour here because
e976b8b2 6666 if there are any cleanups they must be contained here. */
8e91754e 6667 expand_start_bindings (2);
bbf6f052 6668
2df53c0b
RS
6669 /* Mark the corresponding BLOCK for output in its proper place. */
6670 if (TREE_OPERAND (exp, 2) != 0
6671 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6672 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6673
6674 /* If VARS have not yet been expanded, expand them now. */
6675 while (vars)
6676 {
19e7881c 6677 if (!DECL_RTL_SET_P (vars))
bbf6f052
RK
6678 {
6679 vars_need_expansion = 1;
6680 expand_decl (vars);
6681 }
6682 expand_decl_init (vars);
6683 vars = TREE_CHAIN (vars);
6684 }
6685
921b3427 6686 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6687
6688 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6689
6690 return temp;
6691 }
6692
6693 case RTL_EXPR:
83b853c9
JM
6694 if (RTL_EXPR_SEQUENCE (exp))
6695 {
6696 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6697 abort ();
6698 emit_insns (RTL_EXPR_SEQUENCE (exp));
6699 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6700 }
64dc53f3
MM
6701 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6702 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6703 return RTL_EXPR_RTL (exp);
6704
6705 case CONSTRUCTOR:
dd27116b
RK
6706 /* If we don't need the result, just ensure we evaluate any
6707 subexpressions. */
6708 if (ignore)
6709 {
6710 tree elt;
6711 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6712 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6713 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6714 return const0_rtx;
6715 }
3207b172 6716
4af3895e
JVA
6717 /* All elts simple constants => refer to a constant in memory. But
6718 if this is a non-BLKmode mode, let it store a field at a time
6719 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6720 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6721 store directly into the target unless the type is large enough
6722 that memcpy will be used. If we are making an initializer and
3207b172 6723 all operands are constant, put it in memory as well. */
dd27116b 6724 else if ((TREE_STATIC (exp)
3207b172 6725 && ((mode == BLKmode
e5e809f4 6726 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6727 || TREE_ADDRESSABLE (exp)
19caa751 6728 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6729 && (! MOVE_BY_PIECES_P
19caa751
RK
6730 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6731 TYPE_ALIGN (type)))
9de08200 6732 && ! mostly_zeros_p (exp))))
dd27116b 6733 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 6734 {
bd7cf17e 6735 rtx constructor = output_constant_def (exp, 1);
19caa751 6736
b552441b
RS
6737 if (modifier != EXPAND_CONST_ADDRESS
6738 && modifier != EXPAND_INITIALIZER
6739 && modifier != EXPAND_SUM
d6a5ac33
RK
6740 && (! memory_address_p (GET_MODE (constructor),
6741 XEXP (constructor, 0))
6742 || (flag_force_addr
6743 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6744 constructor = change_address (constructor, VOIDmode,
6745 XEXP (constructor, 0));
6746 return constructor;
6747 }
bbf6f052
RK
6748 else
6749 {
e9ac02a6
JW
6750 /* Handle calls that pass values in multiple non-contiguous
6751 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6752 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6753 || GET_CODE (target) == PARALLEL)
1da68f56
RK
6754 target
6755 = assign_temp (build_qualified_type (type,
6756 (TYPE_QUALS (type)
6757 | (TREE_READONLY (exp)
6758 * TYPE_QUAL_CONST))),
6759 TREE_ADDRESSABLE (exp), 1, 1);
07604beb 6760
b7010412
RK
6761 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6762 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6763 return target;
6764 }
6765
6766 case INDIRECT_REF:
6767 {
6768 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6769 tree index;
3a94c984
KH
6770 tree string = string_constant (exp1, &index);
6771
06eaa86f 6772 /* Try to optimize reads from const strings. */
7581a30f
JW
6773 if (string
6774 && TREE_CODE (string) == STRING_CST
6775 && TREE_CODE (index) == INTEGER_CST
05bccae2 6776 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6777 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6778 && GET_MODE_SIZE (mode) == 1
6779 && modifier != EXPAND_MEMORY_USE_WO)
05bccae2
RK
6780 return
6781 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6782
405f0da6
JW
6783 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6784 op0 = memory_address (mode, op0);
8c8a8e34 6785
01d939e8 6786 if (cfun && current_function_check_memory_usage
49ad7cfa 6787 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6788 {
6789 enum memory_use_mode memory_usage;
6790 memory_usage = get_memory_usage_from_modifier (modifier);
6791
6792 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6793 {
6794 in_check_memory_usage = 1;
ebb1b59a
BS
6795 emit_library_call (chkr_check_addr_libfunc,
6796 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6797 Pmode, GEN_INT (int_size_in_bytes (type)),
c85f7c16
JL
6798 TYPE_MODE (sizetype),
6799 GEN_INT (memory_usage),
6800 TYPE_MODE (integer_type_node));
6801 in_check_memory_usage = 0;
6802 }
921b3427
RK
6803 }
6804
38a448ca 6805 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6806 set_mem_attributes (temp, exp, 0);
1125706f
RK
6807
6808 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6809 here, because, in C and C++, the fact that a location is accessed
6810 through a pointer to const does not mean that the value there can
6811 never change. Languages where it can never change should
6812 also set TREE_STATIC. */
5cb7a25a 6813 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6814
6815 /* If we are writing to this object and its type is a record with
6816 readonly fields, we must mark it as readonly so it will
6817 conflict with readonly references to those fields. */
1da68f56 6818 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
14a774a9
RK
6819 RTX_UNCHANGING_P (temp) = 1;
6820
8c8a8e34
JW
6821 return temp;
6822 }
bbf6f052
RK
6823
6824 case ARRAY_REF:
742920c7
RK
6825 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6826 abort ();
bbf6f052 6827
bbf6f052 6828 {
742920c7
RK
6829 tree array = TREE_OPERAND (exp, 0);
6830 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6831 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6832 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6833 HOST_WIDE_INT i;
b50d17a1 6834
d4c89139
PB
6835 /* Optimize the special-case of a zero lower bound.
6836
6837 We convert the low_bound to sizetype to avoid some problems
6838 with constant folding. (E.g. suppose the lower bound is 1,
6839 and its mode is QI. Without the conversion, (ARRAY
6840 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6841 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6842
742920c7 6843 if (! integer_zerop (low_bound))
fed3cef0 6844 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6845
742920c7 6846 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6847 This is not done in fold so it won't happen inside &.
6848 Don't fold if this is for wide characters since it's too
6849 difficult to do correctly and this is a very rare case. */
742920c7 6850
cb5fa0f8
RK
6851 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6852 && TREE_CODE (array) == STRING_CST
742920c7 6853 && TREE_CODE (index) == INTEGER_CST
05bccae2 6854 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6855 && GET_MODE_CLASS (mode) == MODE_INT
6856 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6857 return
6858 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6859
742920c7
RK
6860 /* If this is a constant index into a constant array,
6861 just get the value from the array. Handle both the cases when
6862 we have an explicit constructor and when our operand is a variable
6863 that was declared const. */
4af3895e 6864
cb5fa0f8
RK
6865 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6866 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 6867 && TREE_CODE (index) == INTEGER_CST
3a94c984 6868 && 0 > compare_tree_int (index,
05bccae2
RK
6869 list_length (CONSTRUCTOR_ELTS
6870 (TREE_OPERAND (exp, 0)))))
742920c7 6871 {
05bccae2
RK
6872 tree elem;
6873
6874 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6875 i = TREE_INT_CST_LOW (index);
6876 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6877 ;
6878
6879 if (elem)
6880 return expand_expr (fold (TREE_VALUE (elem)), target,
6881 tmode, ro_modifier);
742920c7 6882 }
3a94c984 6883
742920c7 6884 else if (optimize >= 1
cb5fa0f8
RK
6885 && modifier != EXPAND_CONST_ADDRESS
6886 && modifier != EXPAND_INITIALIZER
742920c7
RK
6887 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6888 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6889 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6890 {
08293add 6891 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6892 {
6893 tree init = DECL_INITIAL (array);
6894
742920c7
RK
6895 if (TREE_CODE (init) == CONSTRUCTOR)
6896 {
665f2503 6897 tree elem;
742920c7 6898
05bccae2 6899 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6900 (elem
6901 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6902 elem = TREE_CHAIN (elem))
6903 ;
6904
e69decfd 6905 if (elem && !TREE_SIDE_EFFECTS (elem))
742920c7 6906 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6907 tmode, ro_modifier);
742920c7
RK
6908 }
6909 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6910 && 0 > compare_tree_int (index,
6911 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6912 {
6913 tree type = TREE_TYPE (TREE_TYPE (init));
6914 enum machine_mode mode = TYPE_MODE (type);
6915
6916 if (GET_MODE_CLASS (mode) == MODE_INT
6917 && GET_MODE_SIZE (mode) == 1)
6918 return (GEN_INT
6919 (TREE_STRING_POINTER
6920 (init)[TREE_INT_CST_LOW (index)]));
6921 }
742920c7
RK
6922 }
6923 }
6924 }
3a94c984 6925 /* Fall through. */
bbf6f052
RK
6926
6927 case COMPONENT_REF:
6928 case BIT_FIELD_REF:
b4e3fabb 6929 case ARRAY_RANGE_REF:
4af3895e 6930 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6931 appropriate field if it is present. Don't do this if we have
6932 already written the data since we want to refer to that copy
6933 and varasm.c assumes that's what we'll do. */
b4e3fabb 6934 if (code == COMPONENT_REF
7a0b7b9a
RK
6935 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6936 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6937 {
6938 tree elt;
6939
6940 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6941 elt = TREE_CHAIN (elt))
86b5812c
RK
6942 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6943 /* We can normally use the value of the field in the
6944 CONSTRUCTOR. However, if this is a bitfield in
6945 an integral mode that we can fit in a HOST_WIDE_INT,
6946 we must mask only the number of bits in the bitfield,
6947 since this is done implicitly by the constructor. If
6948 the bitfield does not meet either of those conditions,
6949 we can't do this optimization. */
6950 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6951 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6952 == MODE_INT)
6953 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6954 <= HOST_BITS_PER_WIDE_INT))))
6955 {
3a94c984 6956 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6957 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6958 {
9df2c88c
RK
6959 HOST_WIDE_INT bitsize
6960 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
86b5812c
RK
6961
6962 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6963 {
6964 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6965 op0 = expand_and (op0, op1, target);
6966 }
6967 else
6968 {
e5e809f4
JL
6969 enum machine_mode imode
6970 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6971 tree count
e5e809f4
JL
6972 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6973 0);
86b5812c
RK
6974
6975 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6976 target, 0);
6977 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6978 target, 0);
6979 }
6980 }
6981
6982 return op0;
6983 }
4af3895e
JVA
6984 }
6985
bbf6f052
RK
6986 {
6987 enum machine_mode mode1;
770ae6cc 6988 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6989 tree offset;
bbf6f052 6990 int volatilep = 0;
729a2125 6991 unsigned int alignment;
839c4796
RK
6992 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6993 &mode1, &unsignedp, &volatilep,
6994 &alignment);
bbf6f052 6995
e7f3c83f
RK
6996 /* If we got back the original object, something is wrong. Perhaps
6997 we are evaluating an expression too early. In any event, don't
6998 infinitely recurse. */
6999 if (tem == exp)
7000 abort ();
7001
3d27140a 7002 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7003 computation, since it will need a temporary and TARGET is known
7004 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7005
b74f5ff2
RK
7006 op0 = expand_expr (tem,
7007 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7008 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7009 != INTEGER_CST)
7010 ? target : NULL_RTX),
4ed67205 7011 VOIDmode,
14a774a9
RK
7012 (modifier == EXPAND_INITIALIZER
7013 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 7014 ? modifier : EXPAND_NORMAL);
bbf6f052 7015
8c8a8e34 7016 /* If this is a constant, put it into a register if it is a
14a774a9 7017 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7018 if (CONSTANT_P (op0))
7019 {
7020 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7021 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7022 && offset == 0)
8c8a8e34
JW
7023 op0 = force_reg (mode, op0);
7024 else
7025 op0 = validize_mem (force_const_mem (mode, op0));
7026 }
7027
7bb0943f
RS
7028 if (offset != 0)
7029 {
906c4e36 7030 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 7031
a2725049 7032 /* If this object is in a register, put it into memory.
14a774a9
RK
7033 This case can't occur in C, but can in Ada if we have
7034 unchecked conversion of an expression from a scalar type to
7035 an array or record type. */
7036 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7037 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7038 {
d04218c0
RK
7039 /* If the operand is a SAVE_EXPR, we can deal with this by
7040 forcing the SAVE_EXPR into memory. */
7041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7042 put_var_into_stack (TREE_OPERAND (exp, 0));
7043 else
7044 {
7045 tree nt
7046 = build_qualified_type (TREE_TYPE (tem),
7047 (TYPE_QUALS (TREE_TYPE (tem))
7048 | TYPE_QUAL_CONST));
7049 rtx memloc = assign_temp (nt, 1, 1, 1);
7050
7051 mark_temp_addr_taken (memloc);
7052 emit_move_insn (memloc, op0);
7053 op0 = memloc;
7054 }
14a774a9
RK
7055 }
7056
7bb0943f
RS
7057 if (GET_CODE (op0) != MEM)
7058 abort ();
2d48c13d
JL
7059
7060 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 7061 {
2d48c13d 7062#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 7063 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 7064#else
bd070e1a 7065 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 7066#endif
bd070e1a 7067 }
2d48c13d 7068
14a774a9 7069 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7070 to call force_reg for that case. Avoid that case. */
89752202
HB
7071 if (GET_CODE (op0) == MEM
7072 && GET_MODE (op0) == BLKmode
efd07ca7 7073 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7074 && bitsize != 0
3a94c984 7075 && (bitpos % bitsize) == 0
89752202 7076 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 7077 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
7078 {
7079 rtx temp = change_address (op0, mode1,
7080 plus_constant (XEXP (op0, 0),
7081 (bitpos /
7082 BITS_PER_UNIT)));
7083 if (GET_CODE (XEXP (temp, 0)) == REG)
7084 op0 = temp;
7085 else
7086 op0 = change_address (op0, mode1,
7087 force_reg (GET_MODE (XEXP (temp, 0)),
7088 XEXP (temp, 0)));
7089 bitpos = 0;
7090 }
7091
7bb0943f 7092 op0 = change_address (op0, VOIDmode,
38a448ca 7093 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
7094 force_reg (ptr_mode,
7095 offset_rtx)));
7bb0943f
RS
7096 }
7097
bbf6f052
RK
7098 /* Don't forget about volatility even if this is a bitfield. */
7099 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7100 {
7101 op0 = copy_rtx (op0);
7102 MEM_VOLATILE_P (op0) = 1;
7103 }
7104
921b3427 7105 /* Check the access. */
32919a0d
RK
7106 if (cfun != 0 && current_function_check_memory_usage
7107 && GET_CODE (op0) == MEM)
3a94c984 7108 {
921b3427
RK
7109 enum memory_use_mode memory_usage;
7110 memory_usage = get_memory_usage_from_modifier (modifier);
7111
7112 if (memory_usage != MEMORY_USE_DONT)
7113 {
7114 rtx to;
7115 int size;
7116
7117 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7118 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7119
7120 /* Check the access right of the pointer. */
ea4da9db 7121 in_check_memory_usage = 1;
e9a25f70 7122 if (size > BITS_PER_UNIT)
ebb1b59a
BS
7123 emit_library_call (chkr_check_addr_libfunc,
7124 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7125 Pmode, GEN_INT (size / BITS_PER_UNIT),
e9a25f70 7126 TYPE_MODE (sizetype),
3a94c984 7127 GEN_INT (memory_usage),
956d6950 7128 TYPE_MODE (integer_type_node));
ea4da9db 7129 in_check_memory_usage = 0;
921b3427
RK
7130 }
7131 }
7132
ccc98036
RS
7133 /* In cases where an aligned union has an unaligned object
7134 as a field, we might be extracting a BLKmode value from
7135 an integer-mode (e.g., SImode) object. Handle this case
7136 by doing the extract into an object as wide as the field
7137 (which we know to be the width of a basic mode), then
cb5fa0f8 7138 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7139 if (mode1 == VOIDmode
ccc98036 7140 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7141 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7142 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7143 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7144 /* If the field isn't aligned enough to fetch as a memref,
7145 fetch it as a bit field. */
7146 || (mode1 != BLKmode
7147 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7148 && ((TYPE_ALIGN (TREE_TYPE (tem))
7149 < GET_MODE_ALIGNMENT (mode))
7150 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7151 /* If the type and the field are a constant size and the
7152 size of the type isn't the same size as the bitfield,
7153 we must use bitfield operations. */
7154 || (bitsize >= 0
7155 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7156 == INTEGER_CST)
7157 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7158 bitsize))
7159 || (mode == BLKmode
e1565e65 7160 && SLOW_UNALIGNED_ACCESS (mode, alignment)
19caa751 7161 && (TYPE_ALIGN (type) > alignment
14a774a9 7162 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 7163 {
bbf6f052
RK
7164 enum machine_mode ext_mode = mode;
7165
14a774a9
RK
7166 if (ext_mode == BLKmode
7167 && ! (target != 0 && GET_CODE (op0) == MEM
7168 && GET_CODE (target) == MEM
7169 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7170 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7171
7172 if (ext_mode == BLKmode)
a281e72d
RK
7173 {
7174 /* In this case, BITPOS must start at a byte boundary and
7175 TARGET, if specified, must be a MEM. */
7176 if (GET_CODE (op0) != MEM
7177 || (target != 0 && GET_CODE (target) != MEM)
7178 || bitpos % BITS_PER_UNIT != 0)
7179 abort ();
7180
7181 op0 = change_address (op0, VOIDmode,
7182 plus_constant (XEXP (op0, 0),
7183 bitpos / BITS_PER_UNIT));
7184 if (target == 0)
7185 target = assign_temp (type, 0, 1, 1);
7186
7187 emit_block_move (target, op0,
bd5dab53
RK
7188 bitsize == -1 ? expr_size (exp)
7189 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7190 / BITS_PER_UNIT),
19caa751 7191 BITS_PER_UNIT);
3a94c984 7192
a281e72d
RK
7193 return target;
7194 }
bbf6f052 7195
dc6d66b3
RK
7196 op0 = validize_mem (op0);
7197
7198 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7199 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3
RK
7200
7201 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 7202 unsignedp, target, ext_mode, ext_mode,
034f9101 7203 alignment,
bbf6f052 7204 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7205
7206 /* If the result is a record type and BITSIZE is narrower than
7207 the mode of OP0, an integral mode, and this is a big endian
7208 machine, we must put the field into the high-order bits. */
7209 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7210 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7211 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7212 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7213 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7214 - bitsize),
7215 op0, 1);
7216
bbf6f052
RK
7217 if (mode == BLKmode)
7218 {
27fb3e16 7219 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
1da68f56
RK
7220 TYPE_QUAL_CONST);
7221 rtx new = assign_temp (nt, 0, 1, 1);
bbf6f052
RK
7222
7223 emit_move_insn (new, op0);
7224 op0 = copy_rtx (new);
7225 PUT_MODE (op0, BLKmode);
7226 }
7227
7228 return op0;
7229 }
7230
05019f83
RK
7231 /* If the result is BLKmode, use that to access the object
7232 now as well. */
7233 if (mode == BLKmode)
7234 mode1 = BLKmode;
7235
bbf6f052
RK
7236 /* Get a reference to just this component. */
7237 if (modifier == EXPAND_CONST_ADDRESS
7238 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
fe7a8445
RK
7239 {
7240 rtx new = gen_rtx_MEM (mode1,
7241 plus_constant (XEXP (op0, 0),
7242 (bitpos / BITS_PER_UNIT)));
7243
7244 MEM_COPY_ATTRIBUTES (new, op0);
7245 op0 = new;
7246 }
bbf6f052
RK
7247 else
7248 op0 = change_address (op0, mode1,
7249 plus_constant (XEXP (op0, 0),
7250 (bitpos / BITS_PER_UNIT)));
41472af8 7251
3bdf5ad1 7252 set_mem_attributes (op0, exp, 0);
dc6d66b3 7253 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7254 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3 7255
bbf6f052 7256 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7257 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7258 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7259 || modifier == EXPAND_INITIALIZER)
bbf6f052 7260 return op0;
0d15e60c 7261 else if (target == 0)
bbf6f052 7262 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7263
bbf6f052
RK
7264 convert_move (target, op0, unsignedp);
7265 return target;
7266 }
7267
bbf6f052
RK
7268 /* Intended for a reference to a buffer of a file-object in Pascal.
7269 But it's not certain that a special tree code will really be
7270 necessary for these. INDIRECT_REF might work for them. */
7271 case BUFFER_REF:
7272 abort ();
7273
7308a047 7274 case IN_EXPR:
7308a047 7275 {
d6a5ac33
RK
7276 /* Pascal set IN expression.
7277
7278 Algorithm:
7279 rlo = set_low - (set_low%bits_per_word);
7280 the_word = set [ (index - rlo)/bits_per_word ];
7281 bit_index = index % bits_per_word;
7282 bitmask = 1 << bit_index;
7283 return !!(the_word & bitmask); */
7284
7308a047
RS
7285 tree set = TREE_OPERAND (exp, 0);
7286 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7287 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7288 tree set_type = TREE_TYPE (set);
7308a047
RS
7289 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7290 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7291 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7292 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7293 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7294 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7295 rtx setaddr = XEXP (setval, 0);
7296 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7297 rtx rlow;
7298 rtx diff, quo, rem, addr, bit, result;
7308a047 7299
d6a5ac33
RK
7300 /* If domain is empty, answer is no. Likewise if index is constant
7301 and out of bounds. */
51723711 7302 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7303 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7304 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7305 || (TREE_CODE (index) == INTEGER_CST
7306 && TREE_CODE (set_low_bound) == INTEGER_CST
7307 && tree_int_cst_lt (index, set_low_bound))
7308 || (TREE_CODE (set_high_bound) == INTEGER_CST
7309 && TREE_CODE (index) == INTEGER_CST
7310 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7311 return const0_rtx;
7312
d6a5ac33
RK
7313 if (target == 0)
7314 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7315
7316 /* If we get here, we have to generate the code for both cases
7317 (in range and out of range). */
7318
7319 op0 = gen_label_rtx ();
7320 op1 = gen_label_rtx ();
7321
7322 if (! (GET_CODE (index_val) == CONST_INT
7323 && GET_CODE (lo_r) == CONST_INT))
7324 {
c5d5d461
JL
7325 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7326 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7327 }
7328
7329 if (! (GET_CODE (index_val) == CONST_INT
7330 && GET_CODE (hi_r) == CONST_INT))
7331 {
c5d5d461
JL
7332 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7333 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7334 }
7335
7336 /* Calculate the element number of bit zero in the first word
7337 of the set. */
7338 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7339 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7340 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7341 else
17938e57
RK
7342 rlow = expand_binop (index_mode, and_optab, lo_r,
7343 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7344 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7345
d6a5ac33
RK
7346 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7347 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7348
7349 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7350 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7351 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7352 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7353
7308a047 7354 addr = memory_address (byte_mode,
d6a5ac33
RK
7355 expand_binop (index_mode, add_optab, diff,
7356 setaddr, NULL_RTX, iunsignedp,
17938e57 7357 OPTAB_LIB_WIDEN));
d6a5ac33 7358
3a94c984 7359 /* Extract the bit we want to examine. */
7308a047 7360 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7361 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7362 make_tree (TREE_TYPE (index), rem),
7363 NULL_RTX, 1);
7364 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7365 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7366 1, OPTAB_LIB_WIDEN);
17938e57
RK
7367
7368 if (result != target)
7369 convert_move (target, result, 1);
7308a047
RS
7370
7371 /* Output the code to handle the out-of-range case. */
7372 emit_jump (op0);
7373 emit_label (op1);
7374 emit_move_insn (target, const0_rtx);
7375 emit_label (op0);
7376 return target;
7377 }
7378
bbf6f052
RK
7379 case WITH_CLEANUP_EXPR:
7380 if (RTL_EXPR_RTL (exp) == 0)
7381 {
7382 RTL_EXPR_RTL (exp)
921b3427 7383 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
7384 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7385
bbf6f052
RK
7386 /* That's it for this cleanup. */
7387 TREE_OPERAND (exp, 2) = 0;
7388 }
7389 return RTL_EXPR_RTL (exp);
7390
5dab5552
MS
7391 case CLEANUP_POINT_EXPR:
7392 {
e976b8b2
MS
7393 /* Start a new binding layer that will keep track of all cleanup
7394 actions to be performed. */
8e91754e 7395 expand_start_bindings (2);
e976b8b2 7396
d93d4205 7397 target_temp_slot_level = temp_slot_level;
e976b8b2 7398
921b3427 7399 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
7400 /* If we're going to use this value, load it up now. */
7401 if (! ignore)
7402 op0 = force_not_mem (op0);
d93d4205 7403 preserve_temp_slots (op0);
e976b8b2 7404 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7405 }
7406 return op0;
7407
bbf6f052
RK
7408 case CALL_EXPR:
7409 /* Check for a built-in function. */
7410 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7411 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7412 == FUNCTION_DECL)
bbf6f052 7413 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
c70eaeaf
KG
7414 {
7415 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7416 == BUILT_IN_FRONTEND)
7417 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7418 else
7419 return expand_builtin (exp, target, subtarget, tmode, ignore);
7420 }
d6a5ac33 7421
8129842c 7422 return expand_call (exp, target, ignore);
bbf6f052
RK
7423
7424 case NON_LVALUE_EXPR:
7425 case NOP_EXPR:
7426 case CONVERT_EXPR:
7427 case REFERENCE_EXPR:
4a53008b 7428 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7429 return const0_rtx;
4a53008b 7430
bbf6f052
RK
7431 if (TREE_CODE (type) == UNION_TYPE)
7432 {
7433 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7434
7435 /* If both input and output are BLKmode, this conversion
7436 isn't actually doing anything unless we need to make the
7437 alignment stricter. */
7438 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7439 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7440 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7441 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7442 modifier);
7443
bbf6f052 7444 if (target == 0)
1da68f56 7445 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7446
bbf6f052
RK
7447 if (GET_CODE (target) == MEM)
7448 /* Store data into beginning of memory target. */
7449 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
7450 change_address (target, TYPE_MODE (valtype), 0), 0);
7451
bbf6f052
RK
7452 else if (GET_CODE (target) == REG)
7453 /* Store this field into a union of the proper type. */
14a774a9
RK
7454 store_field (target,
7455 MIN ((int_size_in_bytes (TREE_TYPE
7456 (TREE_OPERAND (exp, 0)))
7457 * BITS_PER_UNIT),
8752c357 7458 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7459 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7156dead
RK
7460 VOIDmode, 0, BITS_PER_UNIT,
7461 int_size_in_bytes (type), 0);
bbf6f052
RK
7462 else
7463 abort ();
7464
7465 /* Return the entire union. */
7466 return target;
7467 }
d6a5ac33 7468
7f62854a
RK
7469 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7470 {
7471 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7472 ro_modifier);
7f62854a
RK
7473
7474 /* If the signedness of the conversion differs and OP0 is
7475 a promoted SUBREG, clear that indication since we now
7476 have to do the proper extension. */
7477 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7478 && GET_CODE (op0) == SUBREG)
7479 SUBREG_PROMOTED_VAR_P (op0) = 0;
7480
7481 return op0;
7482 }
7483
1499e0a8 7484 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7485 if (GET_MODE (op0) == mode)
7486 return op0;
12342f90 7487
d6a5ac33
RK
7488 /* If OP0 is a constant, just convert it into the proper mode. */
7489 if (CONSTANT_P (op0))
7490 return
7491 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7492 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7493
26fcb35a 7494 if (modifier == EXPAND_INITIALIZER)
38a448ca 7495 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7496
bbf6f052 7497 if (target == 0)
d6a5ac33
RK
7498 return
7499 convert_to_mode (mode, op0,
7500 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7501 else
d6a5ac33
RK
7502 convert_move (target, op0,
7503 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7504 return target;
7505
7506 case PLUS_EXPR:
0f41302f
MS
7507 /* We come here from MINUS_EXPR when the second operand is a
7508 constant. */
bbf6f052 7509 plus_expr:
91ce572a
CC
7510 this_optab = ! unsignedp && flag_trapv
7511 && (GET_MODE_CLASS(mode) == MODE_INT)
7512 ? addv_optab : add_optab;
bbf6f052
RK
7513
7514 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7515 something else, make sure we add the register to the constant and
7516 then to the other thing. This case can occur during strength
7517 reduction and doing it this way will produce better code if the
7518 frame pointer or argument pointer is eliminated.
7519
7520 fold-const.c will ensure that the constant is always in the inner
7521 PLUS_EXPR, so the only case we need to do anything about is if
7522 sp, ap, or fp is our second argument, in which case we must swap
7523 the innermost first argument and our second argument. */
7524
7525 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7526 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7527 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7528 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7529 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7530 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7531 {
7532 tree t = TREE_OPERAND (exp, 1);
7533
7534 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7535 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7536 }
7537
88f63c77 7538 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7539 something, we might be forming a constant. So try to use
7540 plus_constant. If it produces a sum and we can't accept it,
7541 use force_operand. This allows P = &ARR[const] to generate
7542 efficient code on machines where a SYMBOL_REF is not a valid
7543 address.
7544
7545 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7546 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
91ce572a 7547 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7548 {
c980ac49
RS
7549 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7550 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7551 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7552 {
cbbc503e
JL
7553 rtx constant_part;
7554
c980ac49
RS
7555 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7556 EXPAND_SUM);
cbbc503e
JL
7557 /* Use immed_double_const to ensure that the constant is
7558 truncated according to the mode of OP1, then sign extended
7559 to a HOST_WIDE_INT. Using the constant directly can result
7560 in non-canonical RTL in a 64x32 cross compile. */
7561 constant_part
7562 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7563 (HOST_WIDE_INT) 0,
a5efcd63 7564 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7565 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7566 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7567 op1 = force_operand (op1, target);
7568 return op1;
7569 }
bbf6f052 7570
c980ac49
RS
7571 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7572 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7573 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7574 {
cbbc503e
JL
7575 rtx constant_part;
7576
c980ac49
RS
7577 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7578 EXPAND_SUM);
7579 if (! CONSTANT_P (op0))
7580 {
7581 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7582 VOIDmode, modifier);
709f5be1
RS
7583 /* Don't go to both_summands if modifier
7584 says it's not right to return a PLUS. */
7585 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7586 goto binop2;
c980ac49
RS
7587 goto both_summands;
7588 }
cbbc503e
JL
7589 /* Use immed_double_const to ensure that the constant is
7590 truncated according to the mode of OP1, then sign extended
7591 to a HOST_WIDE_INT. Using the constant directly can result
7592 in non-canonical RTL in a 64x32 cross compile. */
7593 constant_part
7594 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7595 (HOST_WIDE_INT) 0,
2a94e396 7596 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7597 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7598 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7599 op0 = force_operand (op0, target);
7600 return op0;
7601 }
bbf6f052
RK
7602 }
7603
7604 /* No sense saving up arithmetic to be done
7605 if it's all in the wrong mode to form part of an address.
7606 And force_operand won't know whether to sign-extend or
7607 zero-extend. */
7608 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7609 || mode != ptr_mode)
c980ac49 7610 goto binop;
bbf6f052 7611
e5e809f4 7612 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7613 subtarget = 0;
7614
921b3427
RK
7615 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7616 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7617
c980ac49 7618 both_summands:
bbf6f052
RK
7619 /* Make sure any term that's a sum with a constant comes last. */
7620 if (GET_CODE (op0) == PLUS
7621 && CONSTANT_P (XEXP (op0, 1)))
7622 {
7623 temp = op0;
7624 op0 = op1;
7625 op1 = temp;
7626 }
7627 /* If adding to a sum including a constant,
7628 associate it to put the constant outside. */
7629 if (GET_CODE (op1) == PLUS
7630 && CONSTANT_P (XEXP (op1, 1)))
7631 {
7632 rtx constant_term = const0_rtx;
7633
7634 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7635 if (temp != 0)
7636 op0 = temp;
6f90e075
JW
7637 /* Ensure that MULT comes first if there is one. */
7638 else if (GET_CODE (op0) == MULT)
38a448ca 7639 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7640 else
38a448ca 7641 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7642
7643 /* Let's also eliminate constants from op0 if possible. */
7644 op0 = eliminate_constant_term (op0, &constant_term);
7645
7646 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 7647 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
7648 result we want will then be OP0 + OP1. */
7649
7650 temp = simplify_binary_operation (PLUS, mode, constant_term,
7651 XEXP (op1, 1));
7652 if (temp != 0)
7653 op1 = temp;
7654 else
38a448ca 7655 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7656 }
7657
7658 /* Put a constant term last and put a multiplication first. */
7659 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7660 temp = op1, op1 = op0, op0 = temp;
7661
7662 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7663 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7664
7665 case MINUS_EXPR:
ea87523e
RK
7666 /* For initializers, we are allowed to return a MINUS of two
7667 symbolic constants. Here we handle all cases when both operands
7668 are constant. */
bbf6f052
RK
7669 /* Handle difference of two symbolic constants,
7670 for the sake of an initializer. */
7671 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7672 && really_constant_p (TREE_OPERAND (exp, 0))
7673 && really_constant_p (TREE_OPERAND (exp, 1)))
7674 {
906c4e36 7675 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7676 VOIDmode, ro_modifier);
906c4e36 7677 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7678 VOIDmode, ro_modifier);
ea87523e 7679
ea87523e
RK
7680 /* If the last operand is a CONST_INT, use plus_constant of
7681 the negated constant. Else make the MINUS. */
7682 if (GET_CODE (op1) == CONST_INT)
7683 return plus_constant (op0, - INTVAL (op1));
7684 else
38a448ca 7685 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7686 }
7687 /* Convert A - const to A + (-const). */
7688 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7689 {
ae431183
RK
7690 tree negated = fold (build1 (NEGATE_EXPR, type,
7691 TREE_OPERAND (exp, 1)));
7692
ae431183 7693 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7694 /* If we can't negate the constant in TYPE, leave it alone and
7695 expand_binop will negate it for us. We used to try to do it
7696 here in the signed version of TYPE, but that doesn't work
7697 on POINTER_TYPEs. */;
ae431183
RK
7698 else
7699 {
7700 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7701 goto plus_expr;
7702 }
bbf6f052 7703 }
91ce572a
CC
7704 this_optab = ! unsignedp && flag_trapv
7705 && (GET_MODE_CLASS(mode) == MODE_INT)
7706 ? subv_optab : sub_optab;
bbf6f052
RK
7707 goto binop;
7708
7709 case MULT_EXPR:
bbf6f052
RK
7710 /* If first operand is constant, swap them.
7711 Thus the following special case checks need only
7712 check the second operand. */
7713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7714 {
7715 register tree t1 = TREE_OPERAND (exp, 0);
7716 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7717 TREE_OPERAND (exp, 1) = t1;
7718 }
7719
7720 /* Attempt to return something suitable for generating an
7721 indexed address, for machines that support that. */
7722
88f63c77 7723 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7724 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7725 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7726 {
921b3427
RK
7727 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7728 EXPAND_SUM);
bbf6f052
RK
7729
7730 /* Apply distributive law if OP0 is x+c. */
7731 if (GET_CODE (op0) == PLUS
7732 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7733 return
7734 gen_rtx_PLUS
7735 (mode,
7736 gen_rtx_MULT
7737 (mode, XEXP (op0, 0),
7738 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7739 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7740 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7741
7742 if (GET_CODE (op0) != REG)
906c4e36 7743 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7744 if (GET_CODE (op0) != REG)
7745 op0 = copy_to_mode_reg (mode, op0);
7746
c5c76735
JL
7747 return
7748 gen_rtx_MULT (mode, op0,
7749 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7750 }
7751
e5e809f4 7752 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7753 subtarget = 0;
7754
7755 /* Check for multiplying things that have been extended
7756 from a narrower type. If this machine supports multiplying
7757 in that narrower type with a result in the desired type,
7758 do it that way, and avoid the explicit type-conversion. */
7759 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7760 && TREE_CODE (type) == INTEGER_TYPE
7761 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7762 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7763 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7764 && int_fits_type_p (TREE_OPERAND (exp, 1),
7765 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7766 /* Don't use a widening multiply if a shift will do. */
7767 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7768 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7769 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7770 ||
7771 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7772 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7773 ==
7774 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7775 /* If both operands are extended, they must either both
7776 be zero-extended or both be sign-extended. */
7777 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7778 ==
7779 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7780 {
7781 enum machine_mode innermode
7782 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7783 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7784 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7785 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7786 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7787 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7788 {
b10af0c8
TG
7789 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7790 {
7791 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7792 NULL_RTX, VOIDmode, 0);
7793 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7794 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7795 VOIDmode, 0);
7796 else
7797 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7798 NULL_RTX, VOIDmode, 0);
7799 goto binop2;
7800 }
7801 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7802 && innermode == word_mode)
7803 {
7804 rtx htem;
7805 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7806 NULL_RTX, VOIDmode, 0);
7807 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7808 op1 = convert_modes (innermode, mode,
7809 expand_expr (TREE_OPERAND (exp, 1),
7810 NULL_RTX, VOIDmode, 0),
7811 unsignedp);
b10af0c8
TG
7812 else
7813 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7814 NULL_RTX, VOIDmode, 0);
7815 temp = expand_binop (mode, other_optab, op0, op1, target,
7816 unsignedp, OPTAB_LIB_WIDEN);
7817 htem = expand_mult_highpart_adjust (innermode,
7818 gen_highpart (innermode, temp),
7819 op0, op1,
7820 gen_highpart (innermode, temp),
7821 unsignedp);
7822 emit_move_insn (gen_highpart (innermode, temp), htem);
7823 return temp;
7824 }
bbf6f052
RK
7825 }
7826 }
7827 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7828 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7829 return expand_mult (mode, op0, op1, target, unsignedp);
7830
7831 case TRUNC_DIV_EXPR:
7832 case FLOOR_DIV_EXPR:
7833 case CEIL_DIV_EXPR:
7834 case ROUND_DIV_EXPR:
7835 case EXACT_DIV_EXPR:
e5e809f4 7836 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7837 subtarget = 0;
7838 /* Possible optimization: compute the dividend with EXPAND_SUM
7839 then if the divisor is constant can optimize the case
7840 where some terms of the dividend have coeffs divisible by it. */
7841 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7842 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7843 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7844
7845 case RDIV_EXPR:
7846 this_optab = flodiv_optab;
7847 goto binop;
7848
7849 case TRUNC_MOD_EXPR:
7850 case FLOOR_MOD_EXPR:
7851 case CEIL_MOD_EXPR:
7852 case ROUND_MOD_EXPR:
e5e809f4 7853 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7854 subtarget = 0;
7855 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7856 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7857 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7858
7859 case FIX_ROUND_EXPR:
7860 case FIX_FLOOR_EXPR:
7861 case FIX_CEIL_EXPR:
7862 abort (); /* Not used for C. */
7863
7864 case FIX_TRUNC_EXPR:
906c4e36 7865 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7866 if (target == 0)
7867 target = gen_reg_rtx (mode);
7868 expand_fix (target, op0, unsignedp);
7869 return target;
7870
7871 case FLOAT_EXPR:
906c4e36 7872 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7873 if (target == 0)
7874 target = gen_reg_rtx (mode);
7875 /* expand_float can't figure out what to do if FROM has VOIDmode.
7876 So give it the correct mode. With -O, cse will optimize this. */
7877 if (GET_MODE (op0) == VOIDmode)
7878 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7879 op0);
7880 expand_float (target, op0,
7881 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7882 return target;
7883
7884 case NEGATE_EXPR:
5b22bee8 7885 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
91ce572a
CC
7886 temp = expand_unop (mode,
7887 ! unsignedp && flag_trapv
7888 && (GET_MODE_CLASS(mode) == MODE_INT)
7889 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7890 if (temp == 0)
7891 abort ();
7892 return temp;
7893
7894 case ABS_EXPR:
7895 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7896
2d7050fd 7897 /* Handle complex values specially. */
d6a5ac33
RK
7898 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7899 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7900 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7901
bbf6f052
RK
7902 /* Unsigned abs is simply the operand. Testing here means we don't
7903 risk generating incorrect code below. */
7904 if (TREE_UNSIGNED (type))
7905 return op0;
7906
91ce572a 7907 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7908 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7909
7910 case MAX_EXPR:
7911 case MIN_EXPR:
7912 target = original_target;
e5e809f4 7913 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7914 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7915 || GET_MODE (target) != mode
bbf6f052
RK
7916 || (GET_CODE (target) == REG
7917 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7918 target = gen_reg_rtx (mode);
906c4e36 7919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7921
7922 /* First try to do it with a special MIN or MAX instruction.
7923 If that does not win, use a conditional jump to select the proper
7924 value. */
7925 this_optab = (TREE_UNSIGNED (type)
7926 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7927 : (code == MIN_EXPR ? smin_optab : smax_optab));
7928
7929 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7930 OPTAB_WIDEN);
7931 if (temp != 0)
7932 return temp;
7933
fa2981d8
JW
7934 /* At this point, a MEM target is no longer useful; we will get better
7935 code without it. */
3a94c984 7936
fa2981d8
JW
7937 if (GET_CODE (target) == MEM)
7938 target = gen_reg_rtx (mode);
7939
ee456b1c
RK
7940 if (target != op0)
7941 emit_move_insn (target, op0);
d6a5ac33 7942
bbf6f052 7943 op0 = gen_label_rtx ();
d6a5ac33 7944
f81497d9
RS
7945 /* If this mode is an integer too wide to compare properly,
7946 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7947 if (GET_MODE_CLASS (mode) == MODE_INT
7948 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7949 {
f81497d9 7950 if (code == MAX_EXPR)
d6a5ac33
RK
7951 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7952 target, op1, NULL_RTX, op0);
bbf6f052 7953 else
d6a5ac33
RK
7954 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7955 op1, target, NULL_RTX, op0);
bbf6f052 7956 }
f81497d9
RS
7957 else
7958 {
b30f05db
BS
7959 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7960 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7961 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7962 op0);
f81497d9 7963 }
b30f05db 7964 emit_move_insn (target, op1);
bbf6f052
RK
7965 emit_label (op0);
7966 return target;
7967
bbf6f052
RK
7968 case BIT_NOT_EXPR:
7969 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7970 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7971 if (temp == 0)
7972 abort ();
7973 return temp;
7974
7975 case FFS_EXPR:
7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7977 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7978 if (temp == 0)
7979 abort ();
7980 return temp;
7981
d6a5ac33
RK
7982 /* ??? Can optimize bitwise operations with one arg constant.
7983 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7984 and (a bitwise1 b) bitwise2 b (etc)
7985 but that is probably not worth while. */
7986
7987 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7988 boolean values when we want in all cases to compute both of them. In
7989 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7990 as actual zero-or-1 values and then bitwise anding. In cases where
7991 there cannot be any side effects, better code would be made by
7992 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7993 how to recognize those cases. */
7994
bbf6f052
RK
7995 case TRUTH_AND_EXPR:
7996 case BIT_AND_EXPR:
7997 this_optab = and_optab;
7998 goto binop;
7999
bbf6f052
RK
8000 case TRUTH_OR_EXPR:
8001 case BIT_IOR_EXPR:
8002 this_optab = ior_optab;
8003 goto binop;
8004
874726a8 8005 case TRUTH_XOR_EXPR:
bbf6f052
RK
8006 case BIT_XOR_EXPR:
8007 this_optab = xor_optab;
8008 goto binop;
8009
8010 case LSHIFT_EXPR:
8011 case RSHIFT_EXPR:
8012 case LROTATE_EXPR:
8013 case RROTATE_EXPR:
e5e809f4 8014 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8015 subtarget = 0;
8016 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8017 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8018 unsignedp);
8019
d6a5ac33
RK
8020 /* Could determine the answer when only additive constants differ. Also,
8021 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8022 case LT_EXPR:
8023 case LE_EXPR:
8024 case GT_EXPR:
8025 case GE_EXPR:
8026 case EQ_EXPR:
8027 case NE_EXPR:
1eb8759b
RH
8028 case UNORDERED_EXPR:
8029 case ORDERED_EXPR:
8030 case UNLT_EXPR:
8031 case UNLE_EXPR:
8032 case UNGT_EXPR:
8033 case UNGE_EXPR:
8034 case UNEQ_EXPR:
bbf6f052
RK
8035 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8036 if (temp != 0)
8037 return temp;
d6a5ac33 8038
0f41302f 8039 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8040 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8041 && original_target
8042 && GET_CODE (original_target) == REG
8043 && (GET_MODE (original_target)
8044 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8045 {
d6a5ac33
RK
8046 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8047 VOIDmode, 0);
8048
bbf6f052
RK
8049 if (temp != original_target)
8050 temp = copy_to_reg (temp);
d6a5ac33 8051
bbf6f052 8052 op1 = gen_label_rtx ();
c5d5d461
JL
8053 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8054 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
8055 emit_move_insn (temp, const1_rtx);
8056 emit_label (op1);
8057 return temp;
8058 }
d6a5ac33 8059
bbf6f052
RK
8060 /* If no set-flag instruction, must generate a conditional
8061 store into a temporary variable. Drop through
8062 and handle this like && and ||. */
8063
8064 case TRUTH_ANDIF_EXPR:
8065 case TRUTH_ORIF_EXPR:
e44842fe 8066 if (! ignore
e5e809f4 8067 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8068 /* Make sure we don't have a hard reg (such as function's return
8069 value) live across basic blocks, if not optimizing. */
8070 || (!optimize && GET_CODE (target) == REG
8071 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8072 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8073
8074 if (target)
8075 emit_clr_insn (target);
8076
bbf6f052
RK
8077 op1 = gen_label_rtx ();
8078 jumpifnot (exp, op1);
e44842fe
RK
8079
8080 if (target)
8081 emit_0_to_1_insn (target);
8082
bbf6f052 8083 emit_label (op1);
e44842fe 8084 return ignore ? const0_rtx : target;
bbf6f052
RK
8085
8086 case TRUTH_NOT_EXPR:
8087 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8088 /* The parser is careful to generate TRUTH_NOT_EXPR
8089 only with operands that are always zero or one. */
906c4e36 8090 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8091 target, 1, OPTAB_LIB_WIDEN);
8092 if (temp == 0)
8093 abort ();
8094 return temp;
8095
8096 case COMPOUND_EXPR:
8097 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8098 emit_queue ();
8099 return expand_expr (TREE_OPERAND (exp, 1),
8100 (ignore ? const0_rtx : target),
8101 VOIDmode, 0);
8102
8103 case COND_EXPR:
ac01eace
RK
8104 /* If we would have a "singleton" (see below) were it not for a
8105 conversion in each arm, bring that conversion back out. */
8106 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8107 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8108 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8109 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8110 {
d6edb99e
ZW
8111 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8112 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8113
8114 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8115 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8116 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8117 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8118 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8119 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8120 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8121 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8122 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8123 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8124 TREE_OPERAND (exp, 0),
d6edb99e 8125 iftrue, iffalse)),
ac01eace
RK
8126 target, tmode, modifier);
8127 }
8128
bbf6f052
RK
8129 {
8130 /* Note that COND_EXPRs whose type is a structure or union
8131 are required to be constructed to contain assignments of
8132 a temporary variable, so that we can evaluate them here
8133 for side effect only. If type is void, we must do likewise. */
8134
8135 /* If an arm of the branch requires a cleanup,
8136 only that cleanup is performed. */
8137
8138 tree singleton = 0;
8139 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8140
8141 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8142 convert it to our mode, if necessary. */
8143 if (integer_onep (TREE_OPERAND (exp, 1))
8144 && integer_zerop (TREE_OPERAND (exp, 2))
8145 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8146 {
dd27116b
RK
8147 if (ignore)
8148 {
8149 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 8150 ro_modifier);
dd27116b
RK
8151 return const0_rtx;
8152 }
8153
921b3427 8154 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
8155 if (GET_MODE (op0) == mode)
8156 return op0;
d6a5ac33 8157
bbf6f052
RK
8158 if (target == 0)
8159 target = gen_reg_rtx (mode);
8160 convert_move (target, op0, unsignedp);
8161 return target;
8162 }
8163
ac01eace
RK
8164 /* Check for X ? A + B : A. If we have this, we can copy A to the
8165 output and conditionally add B. Similarly for unary operations.
8166 Don't do this if X has side-effects because those side effects
8167 might affect A or B and the "?" operation is a sequence point in
8168 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8169
8170 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8171 && operand_equal_p (TREE_OPERAND (exp, 2),
8172 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8173 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8174 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8175 && operand_equal_p (TREE_OPERAND (exp, 1),
8176 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8177 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8178 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8179 && operand_equal_p (TREE_OPERAND (exp, 2),
8180 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8181 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8182 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8183 && operand_equal_p (TREE_OPERAND (exp, 1),
8184 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8185 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8186
01c8a7c8
RK
8187 /* If we are not to produce a result, we have no target. Otherwise,
8188 if a target was specified use it; it will not be used as an
3a94c984 8189 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8190 temporary. */
8191
8192 if (ignore)
8193 temp = 0;
8194 else if (original_target
e5e809f4 8195 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8196 || (singleton && GET_CODE (original_target) == REG
8197 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8198 && original_target == var_rtx (singleton)))
8199 && GET_MODE (original_target) == mode
7c00d1fe
RK
8200#ifdef HAVE_conditional_move
8201 && (! can_conditionally_move_p (mode)
8202 || GET_CODE (original_target) == REG
8203 || TREE_ADDRESSABLE (type))
8204#endif
01c8a7c8
RK
8205 && ! (GET_CODE (original_target) == MEM
8206 && MEM_VOLATILE_P (original_target)))
8207 temp = original_target;
8208 else if (TREE_ADDRESSABLE (type))
8209 abort ();
8210 else
8211 temp = assign_temp (type, 0, 0, 1);
8212
ac01eace
RK
8213 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8214 do the test of X as a store-flag operation, do this as
8215 A + ((X != 0) << log C). Similarly for other simple binary
8216 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8217 if (temp && singleton && binary_op
bbf6f052
RK
8218 && (TREE_CODE (binary_op) == PLUS_EXPR
8219 || TREE_CODE (binary_op) == MINUS_EXPR
8220 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8221 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8222 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8223 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8224 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8225 {
8226 rtx result;
91ce572a
CC
8227 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8228 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8229 ? addv_optab : add_optab)
8230 : TREE_CODE (binary_op) == MINUS_EXPR
8231 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8232 ? subv_optab : sub_optab)
8233 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8234 : xor_optab);
bbf6f052
RK
8235
8236 /* If we had X ? A : A + 1, do this as A + (X == 0).
8237
8238 We have to invert the truth value here and then put it
8239 back later if do_store_flag fails. We cannot simply copy
8240 TREE_OPERAND (exp, 0) to another variable and modify that
8241 because invert_truthvalue can modify the tree pointed to
8242 by its argument. */
8243 if (singleton == TREE_OPERAND (exp, 1))
8244 TREE_OPERAND (exp, 0)
8245 = invert_truthvalue (TREE_OPERAND (exp, 0));
8246
8247 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 8248 (safe_from_p (temp, singleton, 1)
906c4e36 8249 ? temp : NULL_RTX),
bbf6f052
RK
8250 mode, BRANCH_COST <= 1);
8251
ac01eace
RK
8252 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8253 result = expand_shift (LSHIFT_EXPR, mode, result,
8254 build_int_2 (tree_log2
8255 (TREE_OPERAND
8256 (binary_op, 1)),
8257 0),
e5e809f4 8258 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8259 ? temp : NULL_RTX), 0);
8260
bbf6f052
RK
8261 if (result)
8262 {
906c4e36 8263 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8264 return expand_binop (mode, boptab, op1, result, temp,
8265 unsignedp, OPTAB_LIB_WIDEN);
8266 }
8267 else if (singleton == TREE_OPERAND (exp, 1))
8268 TREE_OPERAND (exp, 0)
8269 = invert_truthvalue (TREE_OPERAND (exp, 0));
8270 }
3a94c984 8271
dabf8373 8272 do_pending_stack_adjust ();
bbf6f052
RK
8273 NO_DEFER_POP;
8274 op0 = gen_label_rtx ();
8275
8276 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8277 {
8278 if (temp != 0)
8279 {
8280 /* If the target conflicts with the other operand of the
8281 binary op, we can't use it. Also, we can't use the target
8282 if it is a hard register, because evaluating the condition
8283 might clobber it. */
8284 if ((binary_op
e5e809f4 8285 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8286 || (GET_CODE (temp) == REG
8287 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8288 temp = gen_reg_rtx (mode);
8289 store_expr (singleton, temp, 0);
8290 }
8291 else
906c4e36 8292 expand_expr (singleton,
2937cf87 8293 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8294 if (singleton == TREE_OPERAND (exp, 1))
8295 jumpif (TREE_OPERAND (exp, 0), op0);
8296 else
8297 jumpifnot (TREE_OPERAND (exp, 0), op0);
8298
956d6950 8299 start_cleanup_deferral ();
bbf6f052
RK
8300 if (binary_op && temp == 0)
8301 /* Just touch the other operand. */
8302 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8303 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8304 else if (binary_op)
8305 store_expr (build (TREE_CODE (binary_op), type,
8306 make_tree (type, temp),
8307 TREE_OPERAND (binary_op, 1)),
8308 temp, 0);
8309 else
8310 store_expr (build1 (TREE_CODE (unary_op), type,
8311 make_tree (type, temp)),
8312 temp, 0);
8313 op1 = op0;
bbf6f052 8314 }
bbf6f052
RK
8315 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8316 comparison operator. If we have one of these cases, set the
8317 output to A, branch on A (cse will merge these two references),
8318 then set the output to FOO. */
8319 else if (temp
8320 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8321 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8322 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8323 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8324 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8325 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8326 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8327 {
3a94c984
KH
8328 if (GET_CODE (temp) == REG
8329 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8330 temp = gen_reg_rtx (mode);
8331 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8332 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8333
956d6950 8334 start_cleanup_deferral ();
bbf6f052
RK
8335 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8336 op1 = op0;
8337 }
8338 else if (temp
8339 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8340 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8342 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8343 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8344 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8345 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8346 {
3a94c984
KH
8347 if (GET_CODE (temp) == REG
8348 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8349 temp = gen_reg_rtx (mode);
8350 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8351 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8352
956d6950 8353 start_cleanup_deferral ();
bbf6f052
RK
8354 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8355 op1 = op0;
8356 }
8357 else
8358 {
8359 op1 = gen_label_rtx ();
8360 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8361
956d6950 8362 start_cleanup_deferral ();
3a94c984 8363
2ac84cfe 8364 /* One branch of the cond can be void, if it never returns. For
3a94c984 8365 example A ? throw : E */
2ac84cfe 8366 if (temp != 0
3a94c984 8367 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
8368 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8369 else
906c4e36
RK
8370 expand_expr (TREE_OPERAND (exp, 1),
8371 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8372 end_cleanup_deferral ();
bbf6f052
RK
8373 emit_queue ();
8374 emit_jump_insn (gen_jump (op1));
8375 emit_barrier ();
8376 emit_label (op0);
956d6950 8377 start_cleanup_deferral ();
2ac84cfe 8378 if (temp != 0
3a94c984 8379 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
8380 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8381 else
906c4e36
RK
8382 expand_expr (TREE_OPERAND (exp, 2),
8383 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8384 }
8385
956d6950 8386 end_cleanup_deferral ();
bbf6f052
RK
8387
8388 emit_queue ();
8389 emit_label (op1);
8390 OK_DEFER_POP;
5dab5552 8391
bbf6f052
RK
8392 return temp;
8393 }
8394
8395 case TARGET_EXPR:
8396 {
8397 /* Something needs to be initialized, but we didn't know
8398 where that thing was when building the tree. For example,
8399 it could be the return value of a function, or a parameter
8400 to a function which lays down in the stack, or a temporary
8401 variable which must be passed by reference.
8402
8403 We guarantee that the expression will either be constructed
8404 or copied into our original target. */
8405
8406 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8407 tree cleanups = NULL_TREE;
5c062816 8408 tree exp1;
bbf6f052
RK
8409
8410 if (TREE_CODE (slot) != VAR_DECL)
8411 abort ();
8412
9c51f375
RK
8413 if (! ignore)
8414 target = original_target;
8415
6fbfac92
JM
8416 /* Set this here so that if we get a target that refers to a
8417 register variable that's already been used, put_reg_into_stack
3a94c984 8418 knows that it should fix up those uses. */
6fbfac92
JM
8419 TREE_USED (slot) = 1;
8420
bbf6f052
RK
8421 if (target == 0)
8422 {
19e7881c 8423 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8424 {
8425 target = DECL_RTL (slot);
5c062816 8426 /* If we have already expanded the slot, so don't do
ac993f4f 8427 it again. (mrs) */
5c062816
MS
8428 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8429 return target;
ac993f4f 8430 }
bbf6f052
RK
8431 else
8432 {
e9a25f70 8433 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8434 /* All temp slots at this level must not conflict. */
8435 preserve_temp_slots (target);
19e7881c 8436 SET_DECL_RTL (slot, target);
e9a25f70 8437 if (TREE_ADDRESSABLE (slot))
4361b41d 8438 put_var_into_stack (slot);
bbf6f052 8439
e287fd6e
RK
8440 /* Since SLOT is not known to the called function
8441 to belong to its stack frame, we must build an explicit
8442 cleanup. This case occurs when we must build up a reference
8443 to pass the reference as an argument. In this case,
8444 it is very likely that such a reference need not be
8445 built here. */
8446
8447 if (TREE_OPERAND (exp, 2) == 0)
8448 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8449 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8450 }
bbf6f052
RK
8451 }
8452 else
8453 {
8454 /* This case does occur, when expanding a parameter which
8455 needs to be constructed on the stack. The target
8456 is the actual stack address that we want to initialize.
8457 The function we call will perform the cleanup in this case. */
8458
8c042b47
RS
8459 /* If we have already assigned it space, use that space,
8460 not target that we were passed in, as our target
8461 parameter is only a hint. */
19e7881c 8462 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8463 {
8464 target = DECL_RTL (slot);
8465 /* If we have already expanded the slot, so don't do
8c042b47 8466 it again. (mrs) */
3a94c984
KH
8467 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8468 return target;
8c042b47 8469 }
21002281
JW
8470 else
8471 {
19e7881c 8472 SET_DECL_RTL (slot, target);
21002281
JW
8473 /* If we must have an addressable slot, then make sure that
8474 the RTL that we just stored in slot is OK. */
8475 if (TREE_ADDRESSABLE (slot))
4361b41d 8476 put_var_into_stack (slot);
21002281 8477 }
bbf6f052
RK
8478 }
8479
4847c938 8480 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8481 /* Mark it as expanded. */
8482 TREE_OPERAND (exp, 1) = NULL_TREE;
8483
41531e5b 8484 store_expr (exp1, target, 0);
61d6b1cc 8485
e976b8b2 8486 expand_decl_cleanup (NULL_TREE, cleanups);
3a94c984 8487
41531e5b 8488 return target;
bbf6f052
RK
8489 }
8490
8491 case INIT_EXPR:
8492 {
8493 tree lhs = TREE_OPERAND (exp, 0);
8494 tree rhs = TREE_OPERAND (exp, 1);
8495 tree noncopied_parts = 0;
8496 tree lhs_type = TREE_TYPE (lhs);
8497
8498 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8499 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
b4e3fabb
RK
8500 noncopied_parts
8501 = init_noncopied_parts (stabilize_reference (lhs),
8502 TYPE_NONCOPIED_PARTS (lhs_type));
8503
bbf6f052
RK
8504 while (noncopied_parts != 0)
8505 {
8506 expand_assignment (TREE_VALUE (noncopied_parts),
8507 TREE_PURPOSE (noncopied_parts), 0, 0);
8508 noncopied_parts = TREE_CHAIN (noncopied_parts);
8509 }
8510 return temp;
8511 }
8512
8513 case MODIFY_EXPR:
8514 {
8515 /* If lhs is complex, expand calls in rhs before computing it.
8516 That's so we don't compute a pointer and save it over a call.
8517 If lhs is simple, compute it first so we can give it as a
8518 target if the rhs is just a call. This avoids an extra temp and copy
8519 and that prevents a partial-subsumption which makes bad code.
8520 Actually we could treat component_ref's of vars like vars. */
8521
8522 tree lhs = TREE_OPERAND (exp, 0);
8523 tree rhs = TREE_OPERAND (exp, 1);
8524 tree noncopied_parts = 0;
8525 tree lhs_type = TREE_TYPE (lhs);
8526
8527 temp = 0;
8528
bbf6f052
RK
8529 /* Check for |= or &= of a bitfield of size one into another bitfield
8530 of size 1. In this case, (unless we need the result of the
8531 assignment) we can do this more efficiently with a
8532 test followed by an assignment, if necessary.
8533
8534 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8535 things change so we do, this code should be enhanced to
8536 support it. */
8537 if (ignore
8538 && TREE_CODE (lhs) == COMPONENT_REF
8539 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8540 || TREE_CODE (rhs) == BIT_AND_EXPR)
8541 && TREE_OPERAND (rhs, 0) == lhs
8542 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8543 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8544 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8545 {
8546 rtx label = gen_label_rtx ();
8547
8548 do_jump (TREE_OPERAND (rhs, 1),
8549 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8550 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8551 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8552 (TREE_CODE (rhs) == BIT_IOR_EXPR
8553 ? integer_one_node
8554 : integer_zero_node)),
8555 0, 0);
e7c33f54 8556 do_pending_stack_adjust ();
bbf6f052
RK
8557 emit_label (label);
8558 return const0_rtx;
8559 }
8560
8561 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8562 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
b4e3fabb
RK
8563 noncopied_parts
8564 = save_noncopied_parts (stabilize_reference (lhs),
8565 TYPE_NONCOPIED_PARTS (lhs_type));
bbf6f052
RK
8566
8567 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8568 while (noncopied_parts != 0)
8569 {
8570 expand_assignment (TREE_PURPOSE (noncopied_parts),
8571 TREE_VALUE (noncopied_parts), 0, 0);
8572 noncopied_parts = TREE_CHAIN (noncopied_parts);
8573 }
8574 return temp;
8575 }
8576
6e7f84a7
APB
8577 case RETURN_EXPR:
8578 if (!TREE_OPERAND (exp, 0))
8579 expand_null_return ();
8580 else
8581 expand_return (TREE_OPERAND (exp, 0));
8582 return const0_rtx;
8583
bbf6f052
RK
8584 case PREINCREMENT_EXPR:
8585 case PREDECREMENT_EXPR:
7b8b9722 8586 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8587
8588 case POSTINCREMENT_EXPR:
8589 case POSTDECREMENT_EXPR:
8590 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8591 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8592
8593 case ADDR_EXPR:
987c71d9 8594 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8595 be a MEM corresponding to a stack slot. */
987c71d9
RK
8596 temp = 0;
8597
bbf6f052
RK
8598 /* Are we taking the address of a nested function? */
8599 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8600 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8601 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8602 && ! TREE_STATIC (exp))
bbf6f052
RK
8603 {
8604 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8605 op0 = force_operand (op0, target);
8606 }
682ba3a6
RK
8607 /* If we are taking the address of something erroneous, just
8608 return a zero. */
8609 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8610 return const0_rtx;
bbf6f052
RK
8611 else
8612 {
e287fd6e
RK
8613 /* We make sure to pass const0_rtx down if we came in with
8614 ignore set, to avoid doing the cleanups twice for something. */
8615 op0 = expand_expr (TREE_OPERAND (exp, 0),
8616 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8617 (modifier == EXPAND_INITIALIZER
8618 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8619
119af78a
RK
8620 /* If we are going to ignore the result, OP0 will have been set
8621 to const0_rtx, so just return it. Don't get confused and
8622 think we are taking the address of the constant. */
8623 if (ignore)
8624 return op0;
8625
73b7f58c
BS
8626 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8627 clever and returns a REG when given a MEM. */
8628 op0 = protect_from_queue (op0, 1);
3539e816 8629
c5c76735
JL
8630 /* We would like the object in memory. If it is a constant, we can
8631 have it be statically allocated into memory. For a non-constant,
8632 we need to allocate some memory and store the value into it. */
896102d0
RK
8633
8634 if (CONSTANT_P (op0))
8635 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8636 op0);
987c71d9 8637 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8638 {
8639 mark_temp_addr_taken (op0);
8640 temp = XEXP (op0, 0);
8641 }
896102d0 8642
682ba3a6 8643 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
8644 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8645 || GET_CODE (op0) == PARALLEL)
896102d0
RK
8646 {
8647 /* If this object is in a register, it must be not
0f41302f 8648 be BLKmode. */
896102d0 8649 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
1da68f56
RK
8650 tree nt = build_qualified_type (inner_type,
8651 (TYPE_QUALS (inner_type)
8652 | TYPE_QUAL_CONST));
8653 rtx memloc = assign_temp (nt, 1, 1, 1);
896102d0 8654
7a0b7b9a 8655 mark_temp_addr_taken (memloc);
df6018fd
JJ
8656 if (GET_CODE (op0) == PARALLEL)
8657 /* Handle calls that pass values in multiple non-contiguous
8658 locations. The Irix 6 ABI has examples of this. */
8659 emit_group_store (memloc, op0,
8660 int_size_in_bytes (inner_type),
8661 TYPE_ALIGN (inner_type));
8662 else
8663 emit_move_insn (memloc, op0);
896102d0
RK
8664 op0 = memloc;
8665 }
8666
bbf6f052
RK
8667 if (GET_CODE (op0) != MEM)
8668 abort ();
3a94c984 8669
bbf6f052 8670 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8671 {
8672 temp = XEXP (op0, 0);
8673#ifdef POINTERS_EXTEND_UNSIGNED
8674 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8675 && mode == ptr_mode)
9fcfcce7 8676 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8677#endif
8678 return temp;
8679 }
987c71d9 8680
bbf6f052
RK
8681 op0 = force_operand (XEXP (op0, 0), target);
8682 }
987c71d9 8683
bbf6f052 8684 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8685 op0 = force_reg (Pmode, op0);
8686
dc6d66b3
RK
8687 if (GET_CODE (op0) == REG
8688 && ! REG_USERVAR_P (op0))
bdb429a5 8689 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9
RK
8690
8691 /* If we might have had a temp slot, add an equivalent address
8692 for it. */
8693 if (temp != 0)
8694 update_temp_slot_address (temp, op0);
8695
88f63c77
RK
8696#ifdef POINTERS_EXTEND_UNSIGNED
8697 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8698 && mode == ptr_mode)
9fcfcce7 8699 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8700#endif
8701
bbf6f052
RK
8702 return op0;
8703
8704 case ENTRY_VALUE_EXPR:
8705 abort ();
8706
7308a047
RS
8707 /* COMPLEX type for Extended Pascal & Fortran */
8708 case COMPLEX_EXPR:
8709 {
8710 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8711 rtx insns;
7308a047
RS
8712
8713 /* Get the rtx code of the operands. */
8714 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8715 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8716
8717 if (! target)
8718 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8719
6551fa4d 8720 start_sequence ();
7308a047
RS
8721
8722 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8723 emit_move_insn (gen_realpart (mode, target), op0);
8724 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8725
6551fa4d
JW
8726 insns = get_insns ();
8727 end_sequence ();
8728
7308a047 8729 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8730 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8731 each with a separate pseudo as destination.
8732 It's not correct for flow to treat them as a unit. */
6d6e61ce 8733 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8734 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8735 else
8736 emit_insns (insns);
7308a047
RS
8737
8738 return target;
8739 }
8740
8741 case REALPART_EXPR:
2d7050fd
RS
8742 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8743 return gen_realpart (mode, op0);
3a94c984 8744
7308a047 8745 case IMAGPART_EXPR:
2d7050fd
RS
8746 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8747 return gen_imagpart (mode, op0);
7308a047
RS
8748
8749 case CONJ_EXPR:
8750 {
62acb978 8751 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8752 rtx imag_t;
6551fa4d 8753 rtx insns;
3a94c984
KH
8754
8755 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8756
8757 if (! target)
d6a5ac33 8758 target = gen_reg_rtx (mode);
3a94c984 8759
6551fa4d 8760 start_sequence ();
7308a047
RS
8761
8762 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8763 emit_move_insn (gen_realpart (partmode, target),
8764 gen_realpart (partmode, op0));
7308a047 8765
62acb978 8766 imag_t = gen_imagpart (partmode, target);
91ce572a
CC
8767 temp = expand_unop (partmode,
8768 ! unsignedp && flag_trapv
8769 && (GET_MODE_CLASS(partmode) == MODE_INT)
8770 ? negv_optab : neg_optab,
3a94c984 8771 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8772 if (temp != imag_t)
8773 emit_move_insn (imag_t, temp);
8774
6551fa4d
JW
8775 insns = get_insns ();
8776 end_sequence ();
8777
3a94c984 8778 /* Conjugate should appear as a single unit
d6a5ac33 8779 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8780 each with a separate pseudo as destination.
8781 It's not correct for flow to treat them as a unit. */
6d6e61ce 8782 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8783 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8784 else
8785 emit_insns (insns);
7308a047
RS
8786
8787 return target;
8788 }
8789
e976b8b2
MS
8790 case TRY_CATCH_EXPR:
8791 {
8792 tree handler = TREE_OPERAND (exp, 1);
8793
8794 expand_eh_region_start ();
8795
8796 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8797
52a11cbf 8798 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8799
8800 return op0;
8801 }
8802
b335b813
PB
8803 case TRY_FINALLY_EXPR:
8804 {
8805 tree try_block = TREE_OPERAND (exp, 0);
8806 tree finally_block = TREE_OPERAND (exp, 1);
8807 rtx finally_label = gen_label_rtx ();
8808 rtx done_label = gen_label_rtx ();
8809 rtx return_link = gen_reg_rtx (Pmode);
8810 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8811 (tree) finally_label, (tree) return_link);
8812 TREE_SIDE_EFFECTS (cleanup) = 1;
8813
8814 /* Start a new binding layer that will keep track of all cleanup
8815 actions to be performed. */
8e91754e 8816 expand_start_bindings (2);
b335b813
PB
8817
8818 target_temp_slot_level = temp_slot_level;
8819
8820 expand_decl_cleanup (NULL_TREE, cleanup);
8821 op0 = expand_expr (try_block, target, tmode, modifier);
8822
8823 preserve_temp_slots (op0);
8824 expand_end_bindings (NULL_TREE, 0, 0);
8825 emit_jump (done_label);
8826 emit_label (finally_label);
8827 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8828 emit_indirect_jump (return_link);
8829 emit_label (done_label);
8830 return op0;
8831 }
8832
3a94c984 8833 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8834 {
8835 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8836 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8837 rtx return_address = gen_label_rtx ();
3a94c984
KH
8838 emit_move_insn (return_link,
8839 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8840 emit_jump (subr);
8841 emit_label (return_address);
8842 return const0_rtx;
8843 }
8844
d3707adb
RH
8845 case VA_ARG_EXPR:
8846 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8847
52a11cbf 8848 case EXC_PTR_EXPR:
86c99549 8849 return get_exception_pointer (cfun);
52a11cbf 8850
bbf6f052 8851 default:
90764a87 8852 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8853 }
8854
8855 /* Here to do an ordinary binary operator, generating an instruction
8856 from the optab already placed in `this_optab'. */
8857 binop:
e5e809f4 8858 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8859 subtarget = 0;
8860 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8861 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8862 binop2:
8863 temp = expand_binop (mode, this_optab, op0, op1, target,
8864 unsignedp, OPTAB_LIB_WIDEN);
8865 if (temp == 0)
8866 abort ();
8867 return temp;
8868}
b93a436e 8869\f
14a774a9
RK
8870/* Similar to expand_expr, except that we don't specify a target, target
8871 mode, or modifier and we return the alignment of the inner type. This is
8872 used in cases where it is not necessary to align the result to the
8873 alignment of its type as long as we know the alignment of the result, for
8874 example for comparisons of BLKmode values. */
8875
8876static rtx
8877expand_expr_unaligned (exp, palign)
8878 register tree exp;
729a2125 8879 unsigned int *palign;
14a774a9
RK
8880{
8881 register rtx op0;
8882 tree type = TREE_TYPE (exp);
8883 register enum machine_mode mode = TYPE_MODE (type);
8884
8885 /* Default the alignment we return to that of the type. */
8886 *palign = TYPE_ALIGN (type);
8887
8888 /* The only cases in which we do anything special is if the resulting mode
8889 is BLKmode. */
8890 if (mode != BLKmode)
8891 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8892
8893 switch (TREE_CODE (exp))
8894 {
8895 case CONVERT_EXPR:
8896 case NOP_EXPR:
8897 case NON_LVALUE_EXPR:
8898 /* Conversions between BLKmode values don't change the underlying
8899 alignment or value. */
8900 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8901 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8902 break;
8903
8904 case ARRAY_REF:
8905 /* Much of the code for this case is copied directly from expand_expr.
8906 We need to duplicate it here because we will do something different
8907 in the fall-through case, so we need to handle the same exceptions
8908 it does. */
8909 {
8910 tree array = TREE_OPERAND (exp, 0);
8911 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8912 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 8913 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
14a774a9
RK
8914 HOST_WIDE_INT i;
8915
8916 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8917 abort ();
8918
8919 /* Optimize the special-case of a zero lower bound.
8920
8921 We convert the low_bound to sizetype to avoid some problems
8922 with constant folding. (E.g. suppose the lower bound is 1,
8923 and its mode is QI. Without the conversion, (ARRAY
8924 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 8925 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14a774a9
RK
8926
8927 if (! integer_zerop (low_bound))
fed3cef0 8928 index = size_diffop (index, convert (sizetype, low_bound));
14a774a9
RK
8929
8930 /* If this is a constant index into a constant array,
8931 just get the value from the array. Handle both the cases when
8932 we have an explicit constructor and when our operand is a variable
8933 that was declared const. */
8934
05bccae2 8935 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
235783d1 8936 && host_integerp (index, 0)
3a94c984 8937 && 0 > compare_tree_int (index,
05bccae2
RK
8938 list_length (CONSTRUCTOR_ELTS
8939 (TREE_OPERAND (exp, 0)))))
14a774a9 8940 {
05bccae2
RK
8941 tree elem;
8942
8943 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
235783d1 8944 i = tree_low_cst (index, 0);
05bccae2
RK
8945 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8946 ;
8947
8948 if (elem)
8949 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
14a774a9 8950 }
3a94c984 8951
14a774a9
RK
8952 else if (optimize >= 1
8953 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8954 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8955 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8956 {
8957 if (TREE_CODE (index) == INTEGER_CST)
8958 {
8959 tree init = DECL_INITIAL (array);
8960
14a774a9
RK
8961 if (TREE_CODE (init) == CONSTRUCTOR)
8962 {
05bccae2
RK
8963 tree elem;
8964
8965 for (elem = CONSTRUCTOR_ELTS (init);
8966 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8967 elem = TREE_CHAIN (elem))
8968 ;
14a774a9 8969
14a774a9
RK
8970 if (elem)
8971 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8972 palign);
8973 }
8974 }
8975 }
8976 }
3a94c984 8977 /* Fall through. */
14a774a9
RK
8978
8979 case COMPONENT_REF:
8980 case BIT_FIELD_REF:
b4e3fabb 8981 case ARRAY_RANGE_REF:
14a774a9
RK
8982 /* If the operand is a CONSTRUCTOR, we can just extract the
8983 appropriate field if it is present. Don't do this if we have
8984 already written the data since we want to refer to that copy
8985 and varasm.c assumes that's what we'll do. */
b4e3fabb 8986 if (TREE_CODE (exp) == COMPONENT_REF
14a774a9
RK
8987 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8988 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8989 {
8990 tree elt;
8991
8992 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8993 elt = TREE_CHAIN (elt))
8994 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8995 /* Note that unlike the case in expand_expr, we know this is
8996 BLKmode and hence not an integer. */
8997 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8998 }
8999
9000 {
9001 enum machine_mode mode1;
770ae6cc 9002 HOST_WIDE_INT bitsize, bitpos;
14a774a9
RK
9003 tree offset;
9004 int volatilep = 0;
729a2125 9005 unsigned int alignment;
14a774a9
RK
9006 int unsignedp;
9007 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9008 &mode1, &unsignedp, &volatilep,
9009 &alignment);
9010
9011 /* If we got back the original object, something is wrong. Perhaps
9012 we are evaluating an expression too early. In any event, don't
9013 infinitely recurse. */
9014 if (tem == exp)
9015 abort ();
9016
9017 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9018
9019 /* If this is a constant, put it into a register if it is a
9020 legitimate constant and OFFSET is 0 and memory if it isn't. */
9021 if (CONSTANT_P (op0))
9022 {
9023 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9024
9025 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9026 && offset == 0)
9027 op0 = force_reg (inner_mode, op0);
9028 else
9029 op0 = validize_mem (force_const_mem (inner_mode, op0));
9030 }
9031
9032 if (offset != 0)
9033 {
9034 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9035
9036 /* If this object is in a register, put it into memory.
9037 This case can't occur in C, but can in Ada if we have
9038 unchecked conversion of an expression from a scalar type to
9039 an array or record type. */
9040 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9041 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9042 {
1da68f56
RK
9043 tree nt = build_qualified_type (TREE_TYPE (tem),
9044 (TYPE_QUALS (TREE_TYPE (tem))
9045 | TYPE_QUAL_CONST));
9046 rtx memloc = assign_temp (nt, 1, 1, 1);
14a774a9
RK
9047
9048 mark_temp_addr_taken (memloc);
9049 emit_move_insn (memloc, op0);
9050 op0 = memloc;
9051 }
9052
9053 if (GET_CODE (op0) != MEM)
9054 abort ();
9055
9056 if (GET_MODE (offset_rtx) != ptr_mode)
9057 {
9058#ifdef POINTERS_EXTEND_UNSIGNED
9059 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9060#else
9061 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9062#endif
9063 }
9064
9065 op0 = change_address (op0, VOIDmode,
9066 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9067 force_reg (ptr_mode,
9068 offset_rtx)));
9069 }
9070
9071 /* Don't forget about volatility even if this is a bitfield. */
9072 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9073 {
9074 op0 = copy_rtx (op0);
9075 MEM_VOLATILE_P (op0) = 1;
9076 }
9077
9078 /* Check the access. */
9079 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
3a94c984 9080 {
14a774a9
RK
9081 rtx to;
9082 int size;
9083
9084 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9085 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9086
9087 /* Check the access right of the pointer. */
ea4da9db 9088 in_check_memory_usage = 1;
14a774a9 9089 if (size > BITS_PER_UNIT)
ebb1b59a
BS
9090 emit_library_call (chkr_check_addr_libfunc,
9091 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
14a774a9
RK
9092 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9093 TYPE_MODE (sizetype),
3a94c984 9094 GEN_INT (MEMORY_USE_RO),
14a774a9 9095 TYPE_MODE (integer_type_node));
ea4da9db 9096 in_check_memory_usage = 0;
14a774a9
RK
9097 }
9098
a2b99161
RK
9099 /* In cases where an aligned union has an unaligned object
9100 as a field, we might be extracting a BLKmode value from
9101 an integer-mode (e.g., SImode) object. Handle this case
9102 by doing the extract into an object as wide as the field
9103 (which we know to be the width of a basic mode), then
9104 storing into memory, and changing the mode to BLKmode.
9105 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9106 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9107 if (mode1 == VOIDmode
9108 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 9109 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
19caa751 9110 && (TYPE_ALIGN (type) > alignment
a2b99161
RK
9111 || bitpos % TYPE_ALIGN (type) != 0)))
9112 {
9113 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9114
9115 if (ext_mode == BLKmode)
9116 {
9117 /* In this case, BITPOS must start at a byte boundary. */
9118 if (GET_CODE (op0) != MEM
9119 || bitpos % BITS_PER_UNIT != 0)
9120 abort ();
9121
9122 op0 = change_address (op0, VOIDmode,
9123 plus_constant (XEXP (op0, 0),
9124 bitpos / BITS_PER_UNIT));
9125 }
9126 else
9127 {
1da68f56
RK
9128 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9129 TYPE_QUAL_CONST);
9130 rtx new = assign_temp (nt, 0, 1, 1);
a2b99161
RK
9131
9132 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9133 unsignedp, NULL_RTX, ext_mode,
9134 ext_mode, alignment,
9135 int_size_in_bytes (TREE_TYPE (tem)));
9136
9137 /* If the result is a record type and BITSIZE is narrower than
9138 the mode of OP0, an integral mode, and this is a big endian
9139 machine, we must put the field into the high-order bits. */
9140 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9141 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9142 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9143 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9144 size_int (GET_MODE_BITSIZE
9145 (GET_MODE (op0))
9146 - bitsize),
9147 op0, 1);
9148
a2b99161
RK
9149 emit_move_insn (new, op0);
9150 op0 = copy_rtx (new);
9151 PUT_MODE (op0, BLKmode);
9152 }
9153 }
9154 else
9155 /* Get a reference to just this component. */
9156 op0 = change_address (op0, mode1,
3a94c984
KH
9157 plus_constant (XEXP (op0, 0),
9158 (bitpos / BITS_PER_UNIT)));
14a774a9
RK
9159
9160 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9161
9162 /* Adjust the alignment in case the bit position is not
9163 a multiple of the alignment of the inner object. */
9164 while (bitpos % alignment != 0)
9165 alignment >>= 1;
9166
9167 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 9168 mark_reg_pointer (XEXP (op0, 0), alignment);
14a774a9
RK
9169
9170 MEM_IN_STRUCT_P (op0) = 1;
9171 MEM_VOLATILE_P (op0) |= volatilep;
9172
9173 *palign = alignment;
9174 return op0;
9175 }
9176
9177 default:
9178 break;
9179
9180 }
9181
9182 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9183}
9184\f
fed3cef0
RK
9185/* Return the tree node if a ARG corresponds to a string constant or zero
9186 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9187 in bytes within the string that ARG is accessing. The type of the
9188 offset will be `sizetype'. */
b93a436e 9189
28f4ec01 9190tree
b93a436e
JL
9191string_constant (arg, ptr_offset)
9192 tree arg;
9193 tree *ptr_offset;
9194{
9195 STRIP_NOPS (arg);
9196
9197 if (TREE_CODE (arg) == ADDR_EXPR
9198 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9199 {
fed3cef0 9200 *ptr_offset = size_zero_node;
b93a436e
JL
9201 return TREE_OPERAND (arg, 0);
9202 }
9203 else if (TREE_CODE (arg) == PLUS_EXPR)
9204 {
9205 tree arg0 = TREE_OPERAND (arg, 0);
9206 tree arg1 = TREE_OPERAND (arg, 1);
9207
9208 STRIP_NOPS (arg0);
9209 STRIP_NOPS (arg1);
9210
9211 if (TREE_CODE (arg0) == ADDR_EXPR
9212 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9213 {
fed3cef0 9214 *ptr_offset = convert (sizetype, arg1);
b93a436e 9215 return TREE_OPERAND (arg0, 0);
bbf6f052 9216 }
b93a436e
JL
9217 else if (TREE_CODE (arg1) == ADDR_EXPR
9218 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9219 {
fed3cef0 9220 *ptr_offset = convert (sizetype, arg0);
b93a436e 9221 return TREE_OPERAND (arg1, 0);
bbf6f052 9222 }
b93a436e 9223 }
ca695ac9 9224
b93a436e
JL
9225 return 0;
9226}
ca695ac9 9227\f
b93a436e
JL
9228/* Expand code for a post- or pre- increment or decrement
9229 and return the RTX for the result.
9230 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9231
b93a436e
JL
9232static rtx
9233expand_increment (exp, post, ignore)
9234 register tree exp;
9235 int post, ignore;
ca695ac9 9236{
b93a436e
JL
9237 register rtx op0, op1;
9238 register rtx temp, value;
9239 register tree incremented = TREE_OPERAND (exp, 0);
9240 optab this_optab = add_optab;
9241 int icode;
9242 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9243 int op0_is_copy = 0;
9244 int single_insn = 0;
9245 /* 1 means we can't store into OP0 directly,
9246 because it is a subreg narrower than a word,
9247 and we don't dare clobber the rest of the word. */
9248 int bad_subreg = 0;
1499e0a8 9249
b93a436e
JL
9250 /* Stabilize any component ref that might need to be
9251 evaluated more than once below. */
9252 if (!post
9253 || TREE_CODE (incremented) == BIT_FIELD_REF
9254 || (TREE_CODE (incremented) == COMPONENT_REF
9255 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9256 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9257 incremented = stabilize_reference (incremented);
9258 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9259 ones into save exprs so that they don't accidentally get evaluated
9260 more than once by the code below. */
9261 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9262 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9263 incremented = save_expr (incremented);
e9a25f70 9264
b93a436e
JL
9265 /* Compute the operands as RTX.
9266 Note whether OP0 is the actual lvalue or a copy of it:
9267 I believe it is a copy iff it is a register or subreg
9268 and insns were generated in computing it. */
e9a25f70 9269
b93a436e
JL
9270 temp = get_last_insn ();
9271 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9272
b93a436e
JL
9273 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9274 in place but instead must do sign- or zero-extension during assignment,
9275 so we copy it into a new register and let the code below use it as
9276 a copy.
e9a25f70 9277
b93a436e
JL
9278 Note that we can safely modify this SUBREG since it is know not to be
9279 shared (it was made by the expand_expr call above). */
9280
9281 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9282 {
9283 if (post)
9284 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9285 else
9286 bad_subreg = 1;
9287 }
9288 else if (GET_CODE (op0) == SUBREG
9289 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9290 {
9291 /* We cannot increment this SUBREG in place. If we are
9292 post-incrementing, get a copy of the old value. Otherwise,
9293 just mark that we cannot increment in place. */
9294 if (post)
9295 op0 = copy_to_reg (op0);
9296 else
9297 bad_subreg = 1;
e9a25f70
JL
9298 }
9299
b93a436e
JL
9300 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9301 && temp != get_last_insn ());
9302 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9303 EXPAND_MEMORY_USE_BAD);
1499e0a8 9304
b93a436e
JL
9305 /* Decide whether incrementing or decrementing. */
9306 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9307 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9308 this_optab = sub_optab;
9309
9310 /* Convert decrement by a constant into a negative increment. */
9311 if (this_optab == sub_optab
9312 && GET_CODE (op1) == CONST_INT)
ca695ac9 9313 {
3a94c984 9314 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9315 this_optab = add_optab;
ca695ac9 9316 }
1499e0a8 9317
91ce572a
CC
9318 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9319 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9320
b93a436e
JL
9321 /* For a preincrement, see if we can do this with a single instruction. */
9322 if (!post)
9323 {
9324 icode = (int) this_optab->handlers[(int) mode].insn_code;
9325 if (icode != (int) CODE_FOR_nothing
9326 /* Make sure that OP0 is valid for operands 0 and 1
9327 of the insn we want to queue. */
a995e389
RH
9328 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9329 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9330 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9331 single_insn = 1;
9332 }
bbf6f052 9333
b93a436e
JL
9334 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9335 then we cannot just increment OP0. We must therefore contrive to
9336 increment the original value. Then, for postincrement, we can return
9337 OP0 since it is a copy of the old value. For preincrement, expand here
9338 unless we can do it with a single insn.
bbf6f052 9339
b93a436e
JL
9340 Likewise if storing directly into OP0 would clobber high bits
9341 we need to preserve (bad_subreg). */
9342 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9343 {
b93a436e
JL
9344 /* This is the easiest way to increment the value wherever it is.
9345 Problems with multiple evaluation of INCREMENTED are prevented
9346 because either (1) it is a component_ref or preincrement,
9347 in which case it was stabilized above, or (2) it is an array_ref
9348 with constant index in an array in a register, which is
9349 safe to reevaluate. */
9350 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9351 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9352 ? MINUS_EXPR : PLUS_EXPR),
9353 TREE_TYPE (exp),
9354 incremented,
9355 TREE_OPERAND (exp, 1));
a358cee0 9356
b93a436e
JL
9357 while (TREE_CODE (incremented) == NOP_EXPR
9358 || TREE_CODE (incremented) == CONVERT_EXPR)
9359 {
9360 newexp = convert (TREE_TYPE (incremented), newexp);
9361 incremented = TREE_OPERAND (incremented, 0);
9362 }
bbf6f052 9363
b93a436e
JL
9364 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9365 return post ? op0 : temp;
9366 }
bbf6f052 9367
b93a436e
JL
9368 if (post)
9369 {
9370 /* We have a true reference to the value in OP0.
9371 If there is an insn to add or subtract in this mode, queue it.
9372 Queueing the increment insn avoids the register shuffling
9373 that often results if we must increment now and first save
9374 the old value for subsequent use. */
bbf6f052 9375
b93a436e
JL
9376#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9377 op0 = stabilize (op0);
9378#endif
41dfd40c 9379
b93a436e
JL
9380 icode = (int) this_optab->handlers[(int) mode].insn_code;
9381 if (icode != (int) CODE_FOR_nothing
9382 /* Make sure that OP0 is valid for operands 0 and 1
9383 of the insn we want to queue. */
a995e389
RH
9384 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9385 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9386 {
a995e389 9387 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9388 op1 = force_reg (mode, op1);
bbf6f052 9389
b93a436e
JL
9390 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9391 }
9392 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9393 {
9394 rtx addr = (general_operand (XEXP (op0, 0), mode)
9395 ? force_reg (Pmode, XEXP (op0, 0))
9396 : copy_to_reg (XEXP (op0, 0)));
9397 rtx temp, result;
ca695ac9 9398
b93a436e
JL
9399 op0 = change_address (op0, VOIDmode, addr);
9400 temp = force_reg (GET_MODE (op0), op0);
a995e389 9401 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9402 op1 = force_reg (mode, op1);
ca695ac9 9403
b93a436e
JL
9404 /* The increment queue is LIFO, thus we have to `queue'
9405 the instructions in reverse order. */
9406 enqueue_insn (op0, gen_move_insn (op0, temp));
9407 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9408 return result;
bbf6f052
RK
9409 }
9410 }
ca695ac9 9411
b93a436e
JL
9412 /* Preincrement, or we can't increment with one simple insn. */
9413 if (post)
9414 /* Save a copy of the value before inc or dec, to return it later. */
9415 temp = value = copy_to_reg (op0);
9416 else
9417 /* Arrange to return the incremented value. */
9418 /* Copy the rtx because expand_binop will protect from the queue,
9419 and the results of that would be invalid for us to return
9420 if our caller does emit_queue before using our result. */
9421 temp = copy_rtx (value = op0);
bbf6f052 9422
b93a436e
JL
9423 /* Increment however we can. */
9424 op1 = expand_binop (mode, this_optab, value, op1,
3a94c984 9425 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9426 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9427 /* Make sure the value is stored into OP0. */
9428 if (op1 != op0)
9429 emit_move_insn (op0, op1);
5718612f 9430
b93a436e
JL
9431 return temp;
9432}
9433\f
b93a436e
JL
9434/* At the start of a function, record that we have no previously-pushed
9435 arguments waiting to be popped. */
bbf6f052 9436
b93a436e
JL
9437void
9438init_pending_stack_adjust ()
9439{
9440 pending_stack_adjust = 0;
9441}
bbf6f052 9442
b93a436e 9443/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9444 so the adjustment won't get done.
9445
9446 Note, if the current function calls alloca, then it must have a
9447 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9448
b93a436e
JL
9449void
9450clear_pending_stack_adjust ()
9451{
9452#ifdef EXIT_IGNORE_STACK
9453 if (optimize > 0
060fbabf
JL
9454 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9455 && EXIT_IGNORE_STACK
b93a436e
JL
9456 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9457 && ! flag_inline_functions)
1503a7ec
JH
9458 {
9459 stack_pointer_delta -= pending_stack_adjust,
9460 pending_stack_adjust = 0;
9461 }
b93a436e
JL
9462#endif
9463}
bbf6f052 9464
b93a436e
JL
9465/* Pop any previously-pushed arguments that have not been popped yet. */
9466
9467void
9468do_pending_stack_adjust ()
9469{
9470 if (inhibit_defer_pop == 0)
ca695ac9 9471 {
b93a436e
JL
9472 if (pending_stack_adjust != 0)
9473 adjust_stack (GEN_INT (pending_stack_adjust));
9474 pending_stack_adjust = 0;
bbf6f052 9475 }
bbf6f052
RK
9476}
9477\f
b93a436e 9478/* Expand conditional expressions. */
bbf6f052 9479
b93a436e
JL
9480/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9481 LABEL is an rtx of code CODE_LABEL, in this function and all the
9482 functions here. */
bbf6f052 9483
b93a436e
JL
9484void
9485jumpifnot (exp, label)
ca695ac9 9486 tree exp;
b93a436e 9487 rtx label;
bbf6f052 9488{
b93a436e
JL
9489 do_jump (exp, label, NULL_RTX);
9490}
bbf6f052 9491
b93a436e 9492/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9493
b93a436e
JL
9494void
9495jumpif (exp, label)
9496 tree exp;
9497 rtx label;
9498{
9499 do_jump (exp, NULL_RTX, label);
9500}
ca695ac9 9501
b93a436e
JL
9502/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9503 the result is zero, or IF_TRUE_LABEL if the result is one.
9504 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9505 meaning fall through in that case.
ca695ac9 9506
b93a436e
JL
9507 do_jump always does any pending stack adjust except when it does not
9508 actually perform a jump. An example where there is no jump
9509 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9510
b93a436e
JL
9511 This function is responsible for optimizing cases such as
9512 &&, || and comparison operators in EXP. */
5718612f 9513
b93a436e
JL
9514void
9515do_jump (exp, if_false_label, if_true_label)
9516 tree exp;
9517 rtx if_false_label, if_true_label;
9518{
9519 register enum tree_code code = TREE_CODE (exp);
9520 /* Some cases need to create a label to jump to
9521 in order to properly fall through.
9522 These cases set DROP_THROUGH_LABEL nonzero. */
9523 rtx drop_through_label = 0;
9524 rtx temp;
b93a436e
JL
9525 int i;
9526 tree type;
9527 enum machine_mode mode;
ca695ac9 9528
dbecbbe4
JL
9529#ifdef MAX_INTEGER_COMPUTATION_MODE
9530 check_max_integer_computation_mode (exp);
9531#endif
9532
b93a436e 9533 emit_queue ();
ca695ac9 9534
b93a436e 9535 switch (code)
ca695ac9 9536 {
b93a436e 9537 case ERROR_MARK:
ca695ac9 9538 break;
bbf6f052 9539
b93a436e
JL
9540 case INTEGER_CST:
9541 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9542 if (temp)
9543 emit_jump (temp);
9544 break;
bbf6f052 9545
b93a436e
JL
9546#if 0
9547 /* This is not true with #pragma weak */
9548 case ADDR_EXPR:
9549 /* The address of something can never be zero. */
9550 if (if_true_label)
9551 emit_jump (if_true_label);
9552 break;
9553#endif
bbf6f052 9554
b93a436e
JL
9555 case NOP_EXPR:
9556 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9557 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
b4e3fabb
RK
9558 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9559 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
b93a436e
JL
9560 goto normal;
9561 case CONVERT_EXPR:
9562 /* If we are narrowing the operand, we have to do the compare in the
9563 narrower mode. */
9564 if ((TYPE_PRECISION (TREE_TYPE (exp))
9565 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9566 goto normal;
9567 case NON_LVALUE_EXPR:
9568 case REFERENCE_EXPR:
9569 case ABS_EXPR:
9570 case NEGATE_EXPR:
9571 case LROTATE_EXPR:
9572 case RROTATE_EXPR:
9573 /* These cannot change zero->non-zero or vice versa. */
9574 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9575 break;
bbf6f052 9576
14a774a9
RK
9577 case WITH_RECORD_EXPR:
9578 /* Put the object on the placeholder list, recurse through our first
9579 operand, and pop the list. */
9580 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9581 placeholder_list);
9582 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9583 placeholder_list = TREE_CHAIN (placeholder_list);
9584 break;
9585
b93a436e
JL
9586#if 0
9587 /* This is never less insns than evaluating the PLUS_EXPR followed by
9588 a test and can be longer if the test is eliminated. */
9589 case PLUS_EXPR:
9590 /* Reduce to minus. */
9591 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9592 TREE_OPERAND (exp, 0),
9593 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9594 TREE_OPERAND (exp, 1))));
9595 /* Process as MINUS. */
ca695ac9 9596#endif
bbf6f052 9597
b93a436e
JL
9598 case MINUS_EXPR:
9599 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9600 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9601 TREE_OPERAND (exp, 0),
9602 TREE_OPERAND (exp, 1)),
9603 NE, NE, if_false_label, if_true_label);
b93a436e 9604 break;
bbf6f052 9605
b93a436e
JL
9606 case BIT_AND_EXPR:
9607 /* If we are AND'ing with a small constant, do this comparison in the
9608 smallest type that fits. If the machine doesn't have comparisons
9609 that small, it will be converted back to the wider comparison.
9610 This helps if we are testing the sign bit of a narrower object.
9611 combine can't do this for us because it can't know whether a
9612 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9613
b93a436e
JL
9614 if (! SLOW_BYTE_ACCESS
9615 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9616 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9617 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9618 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9619 && (type = type_for_mode (mode, 1)) != 0
9620 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9621 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9622 != CODE_FOR_nothing))
9623 {
9624 do_jump (convert (type, exp), if_false_label, if_true_label);
9625 break;
9626 }
9627 goto normal;
bbf6f052 9628
b93a436e
JL
9629 case TRUTH_NOT_EXPR:
9630 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9631 break;
bbf6f052 9632
b93a436e
JL
9633 case TRUTH_ANDIF_EXPR:
9634 if (if_false_label == 0)
9635 if_false_label = drop_through_label = gen_label_rtx ();
9636 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9637 start_cleanup_deferral ();
9638 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9639 end_cleanup_deferral ();
9640 break;
bbf6f052 9641
b93a436e
JL
9642 case TRUTH_ORIF_EXPR:
9643 if (if_true_label == 0)
9644 if_true_label = drop_through_label = gen_label_rtx ();
9645 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9646 start_cleanup_deferral ();
9647 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9648 end_cleanup_deferral ();
9649 break;
bbf6f052 9650
b93a436e
JL
9651 case COMPOUND_EXPR:
9652 push_temp_slots ();
9653 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9654 preserve_temp_slots (NULL_RTX);
9655 free_temp_slots ();
9656 pop_temp_slots ();
9657 emit_queue ();
9658 do_pending_stack_adjust ();
9659 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9660 break;
bbf6f052 9661
b93a436e
JL
9662 case COMPONENT_REF:
9663 case BIT_FIELD_REF:
9664 case ARRAY_REF:
b4e3fabb 9665 case ARRAY_RANGE_REF:
b93a436e 9666 {
770ae6cc
RK
9667 HOST_WIDE_INT bitsize, bitpos;
9668 int unsignedp;
b93a436e
JL
9669 enum machine_mode mode;
9670 tree type;
9671 tree offset;
9672 int volatilep = 0;
729a2125 9673 unsigned int alignment;
bbf6f052 9674
b93a436e
JL
9675 /* Get description of this reference. We don't actually care
9676 about the underlying object here. */
19caa751
RK
9677 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9678 &unsignedp, &volatilep, &alignment);
bbf6f052 9679
b93a436e
JL
9680 type = type_for_size (bitsize, unsignedp);
9681 if (! SLOW_BYTE_ACCESS
9682 && type != 0 && bitsize >= 0
9683 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9684 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9685 != CODE_FOR_nothing))
9686 {
9687 do_jump (convert (type, exp), if_false_label, if_true_label);
9688 break;
9689 }
9690 goto normal;
9691 }
bbf6f052 9692
b93a436e
JL
9693 case COND_EXPR:
9694 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9695 if (integer_onep (TREE_OPERAND (exp, 1))
9696 && integer_zerop (TREE_OPERAND (exp, 2)))
9697 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9698
b93a436e
JL
9699 else if (integer_zerop (TREE_OPERAND (exp, 1))
9700 && integer_onep (TREE_OPERAND (exp, 2)))
9701 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9702
b93a436e
JL
9703 else
9704 {
9705 register rtx label1 = gen_label_rtx ();
9706 drop_through_label = gen_label_rtx ();
bbf6f052 9707
b93a436e 9708 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9709
b93a436e
JL
9710 start_cleanup_deferral ();
9711 /* Now the THEN-expression. */
9712 do_jump (TREE_OPERAND (exp, 1),
9713 if_false_label ? if_false_label : drop_through_label,
9714 if_true_label ? if_true_label : drop_through_label);
9715 /* In case the do_jump just above never jumps. */
9716 do_pending_stack_adjust ();
9717 emit_label (label1);
bbf6f052 9718
b93a436e
JL
9719 /* Now the ELSE-expression. */
9720 do_jump (TREE_OPERAND (exp, 2),
9721 if_false_label ? if_false_label : drop_through_label,
9722 if_true_label ? if_true_label : drop_through_label);
9723 end_cleanup_deferral ();
9724 }
9725 break;
bbf6f052 9726
b93a436e
JL
9727 case EQ_EXPR:
9728 {
9729 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9730
9ec36da5
JL
9731 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9732 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9733 {
9734 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9735 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9736 do_jump
9737 (fold
9738 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9739 fold (build (EQ_EXPR, TREE_TYPE (exp),
9740 fold (build1 (REALPART_EXPR,
9741 TREE_TYPE (inner_type),
9742 exp0)),
9743 fold (build1 (REALPART_EXPR,
9744 TREE_TYPE (inner_type),
9745 exp1)))),
9746 fold (build (EQ_EXPR, TREE_TYPE (exp),
9747 fold (build1 (IMAGPART_EXPR,
9748 TREE_TYPE (inner_type),
9749 exp0)),
9750 fold (build1 (IMAGPART_EXPR,
9751 TREE_TYPE (inner_type),
9752 exp1)))))),
9753 if_false_label, if_true_label);
9754 }
9ec36da5
JL
9755
9756 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9757 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9758
b93a436e 9759 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9760 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9761 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9762 else
b30f05db 9763 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9764 break;
9765 }
bbf6f052 9766
b93a436e
JL
9767 case NE_EXPR:
9768 {
9769 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9770
9ec36da5
JL
9771 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9772 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9773 {
9774 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9775 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9776 do_jump
9777 (fold
9778 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9779 fold (build (NE_EXPR, TREE_TYPE (exp),
9780 fold (build1 (REALPART_EXPR,
9781 TREE_TYPE (inner_type),
9782 exp0)),
9783 fold (build1 (REALPART_EXPR,
9784 TREE_TYPE (inner_type),
9785 exp1)))),
9786 fold (build (NE_EXPR, TREE_TYPE (exp),
9787 fold (build1 (IMAGPART_EXPR,
9788 TREE_TYPE (inner_type),
9789 exp0)),
9790 fold (build1 (IMAGPART_EXPR,
9791 TREE_TYPE (inner_type),
9792 exp1)))))),
9793 if_false_label, if_true_label);
9794 }
9ec36da5
JL
9795
9796 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9797 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9798
b93a436e 9799 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9800 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9801 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9802 else
b30f05db 9803 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9804 break;
9805 }
bbf6f052 9806
b93a436e 9807 case LT_EXPR:
1c0290ea
BS
9808 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9809 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9810 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9811 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9812 else
b30f05db 9813 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9814 break;
bbf6f052 9815
b93a436e 9816 case LE_EXPR:
1c0290ea
BS
9817 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9818 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9819 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9820 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9821 else
b30f05db 9822 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9823 break;
bbf6f052 9824
b93a436e 9825 case GT_EXPR:
1c0290ea
BS
9826 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9827 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9828 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9829 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9830 else
b30f05db 9831 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9832 break;
bbf6f052 9833
b93a436e 9834 case GE_EXPR:
1c0290ea
BS
9835 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9836 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9837 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9838 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9839 else
b30f05db 9840 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9841 break;
bbf6f052 9842
1eb8759b
RH
9843 case UNORDERED_EXPR:
9844 case ORDERED_EXPR:
9845 {
9846 enum rtx_code cmp, rcmp;
9847 int do_rev;
9848
9849 if (code == UNORDERED_EXPR)
9850 cmp = UNORDERED, rcmp = ORDERED;
9851 else
9852 cmp = ORDERED, rcmp = UNORDERED;
3a94c984 9853 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
9854
9855 do_rev = 0;
9856 if (! can_compare_p (cmp, mode, ccp_jump)
9857 && (can_compare_p (rcmp, mode, ccp_jump)
9858 /* If the target doesn't provide either UNORDERED or ORDERED
9859 comparisons, canonicalize on UNORDERED for the library. */
9860 || rcmp == UNORDERED))
9861 do_rev = 1;
9862
9863 if (! do_rev)
9864 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9865 else
9866 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9867 }
9868 break;
9869
9870 {
9871 enum rtx_code rcode1;
9872 enum tree_code tcode2;
9873
9874 case UNLT_EXPR:
9875 rcode1 = UNLT;
9876 tcode2 = LT_EXPR;
9877 goto unordered_bcc;
9878 case UNLE_EXPR:
9879 rcode1 = UNLE;
9880 tcode2 = LE_EXPR;
9881 goto unordered_bcc;
9882 case UNGT_EXPR:
9883 rcode1 = UNGT;
9884 tcode2 = GT_EXPR;
9885 goto unordered_bcc;
9886 case UNGE_EXPR:
9887 rcode1 = UNGE;
9888 tcode2 = GE_EXPR;
9889 goto unordered_bcc;
9890 case UNEQ_EXPR:
9891 rcode1 = UNEQ;
9892 tcode2 = EQ_EXPR;
9893 goto unordered_bcc;
7913f3d0 9894
1eb8759b
RH
9895 unordered_bcc:
9896 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9897 if (can_compare_p (rcode1, mode, ccp_jump))
9898 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9899 if_true_label);
9900 else
9901 {
9902 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9903 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9904 tree cmp0, cmp1;
9905
3a94c984 9906 /* If the target doesn't support combined unordered
1eb8759b
RH
9907 compares, decompose into UNORDERED + comparison. */
9908 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9909 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9910 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9911 do_jump (exp, if_false_label, if_true_label);
9912 }
9913 }
9914 break;
9915
5f2d6cfa
MM
9916 /* Special case:
9917 __builtin_expect (<test>, 0) and
9918 __builtin_expect (<test>, 1)
9919
9920 We need to do this here, so that <test> is not converted to a SCC
9921 operation on machines that use condition code registers and COMPARE
9922 like the PowerPC, and then the jump is done based on whether the SCC
9923 operation produced a 1 or 0. */
9924 case CALL_EXPR:
9925 /* Check for a built-in function. */
9926 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9927 {
9928 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9929 tree arglist = TREE_OPERAND (exp, 1);
9930
9931 if (TREE_CODE (fndecl) == FUNCTION_DECL
9932 && DECL_BUILT_IN (fndecl)
9933 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9934 && arglist != NULL_TREE
9935 && TREE_CHAIN (arglist) != NULL_TREE)
9936 {
9937 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9938 if_true_label);
9939
9940 if (seq != NULL_RTX)
9941 {
9942 emit_insn (seq);
9943 return;
9944 }
9945 }
9946 }
9947 /* fall through and generate the normal code. */
9948
b93a436e
JL
9949 default:
9950 normal:
9951 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9952#if 0
9953 /* This is not needed any more and causes poor code since it causes
9954 comparisons and tests from non-SI objects to have different code
9955 sequences. */
9956 /* Copy to register to avoid generating bad insns by cse
9957 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9958 if (!cse_not_expected && GET_CODE (temp) == MEM)
9959 temp = copy_to_reg (temp);
ca695ac9 9960#endif
b93a436e 9961 do_pending_stack_adjust ();
b30f05db
BS
9962 /* Do any postincrements in the expression that was tested. */
9963 emit_queue ();
9964
998a298e
GK
9965 if (GET_CODE (temp) == CONST_INT
9966 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9967 || GET_CODE (temp) == LABEL_REF)
b30f05db
BS
9968 {
9969 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9970 if (target)
9971 emit_jump (target);
9972 }
b93a436e 9973 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9974 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9975 /* Note swapping the labels gives us not-equal. */
9976 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9977 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9978 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9979 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9980 GET_MODE (temp), NULL_RTX, 0,
9981 if_false_label, if_true_label);
b93a436e
JL
9982 else
9983 abort ();
9984 }
bbf6f052 9985
b93a436e
JL
9986 if (drop_through_label)
9987 {
9988 /* If do_jump produces code that might be jumped around,
9989 do any stack adjusts from that code, before the place
9990 where control merges in. */
9991 do_pending_stack_adjust ();
9992 emit_label (drop_through_label);
9993 }
bbf6f052 9994}
b93a436e
JL
9995\f
9996/* Given a comparison expression EXP for values too wide to be compared
9997 with one insn, test the comparison and jump to the appropriate label.
9998 The code of EXP is ignored; we always test GT if SWAP is 0,
9999 and LT if SWAP is 1. */
bbf6f052 10000
b93a436e
JL
10001static void
10002do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10003 tree exp;
10004 int swap;
10005 rtx if_false_label, if_true_label;
10006{
10007 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10008 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10009 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 10010 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 10011
b30f05db 10012 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
10013}
10014
b93a436e
JL
10015/* Compare OP0 with OP1, word at a time, in mode MODE.
10016 UNSIGNEDP says to do unsigned comparison.
10017 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10018
b93a436e
JL
10019void
10020do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10021 enum machine_mode mode;
10022 int unsignedp;
10023 rtx op0, op1;
10024 rtx if_false_label, if_true_label;
f81497d9 10025{
b93a436e
JL
10026 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10027 rtx drop_through_label = 0;
10028 int i;
f81497d9 10029
b93a436e
JL
10030 if (! if_true_label || ! if_false_label)
10031 drop_through_label = gen_label_rtx ();
10032 if (! if_true_label)
10033 if_true_label = drop_through_label;
10034 if (! if_false_label)
10035 if_false_label = drop_through_label;
f81497d9 10036
b93a436e
JL
10037 /* Compare a word at a time, high order first. */
10038 for (i = 0; i < nwords; i++)
10039 {
b93a436e 10040 rtx op0_word, op1_word;
bbf6f052 10041
b93a436e
JL
10042 if (WORDS_BIG_ENDIAN)
10043 {
10044 op0_word = operand_subword_force (op0, i, mode);
10045 op1_word = operand_subword_force (op1, i, mode);
10046 }
10047 else
10048 {
10049 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10050 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10051 }
bbf6f052 10052
b93a436e 10053 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
10054 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10055 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10056 NULL_RTX, if_true_label);
bbf6f052 10057
b93a436e 10058 /* Consider lower words only if these are equal. */
b30f05db
BS
10059 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10060 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 10061 }
bbf6f052 10062
b93a436e
JL
10063 if (if_false_label)
10064 emit_jump (if_false_label);
10065 if (drop_through_label)
10066 emit_label (drop_through_label);
bbf6f052
RK
10067}
10068
b93a436e
JL
10069/* Given an EQ_EXPR expression EXP for values too wide to be compared
10070 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10071
b93a436e
JL
10072static void
10073do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10074 tree exp;
10075 rtx if_false_label, if_true_label;
bbf6f052 10076{
b93a436e
JL
10077 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10078 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10079 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10080 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10081 int i;
10082 rtx drop_through_label = 0;
bbf6f052 10083
b93a436e
JL
10084 if (! if_false_label)
10085 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10086
b93a436e 10087 for (i = 0; i < nwords; i++)
b30f05db
BS
10088 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10089 operand_subword_force (op1, i, mode),
10090 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10091 word_mode, NULL_RTX, 0, if_false_label,
10092 NULL_RTX);
bbf6f052 10093
b93a436e
JL
10094 if (if_true_label)
10095 emit_jump (if_true_label);
10096 if (drop_through_label)
10097 emit_label (drop_through_label);
bbf6f052 10098}
b93a436e
JL
10099\f
10100/* Jump according to whether OP0 is 0.
10101 We assume that OP0 has an integer mode that is too wide
10102 for the available compare insns. */
bbf6f052 10103
f5963e61 10104void
b93a436e
JL
10105do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10106 rtx op0;
10107 rtx if_false_label, if_true_label;
ca695ac9 10108{
b93a436e
JL
10109 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10110 rtx part;
10111 int i;
10112 rtx drop_through_label = 0;
bbf6f052 10113
b93a436e
JL
10114 /* The fastest way of doing this comparison on almost any machine is to
10115 "or" all the words and compare the result. If all have to be loaded
10116 from memory and this is a very wide item, it's possible this may
10117 be slower, but that's highly unlikely. */
bbf6f052 10118
b93a436e
JL
10119 part = gen_reg_rtx (word_mode);
10120 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10121 for (i = 1; i < nwords && part != 0; i++)
10122 part = expand_binop (word_mode, ior_optab, part,
10123 operand_subword_force (op0, i, GET_MODE (op0)),
10124 part, 1, OPTAB_WIDEN);
bbf6f052 10125
b93a436e
JL
10126 if (part != 0)
10127 {
b30f05db
BS
10128 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10129 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 10130
b93a436e
JL
10131 return;
10132 }
bbf6f052 10133
b93a436e
JL
10134 /* If we couldn't do the "or" simply, do this with a series of compares. */
10135 if (! if_false_label)
10136 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10137
b93a436e 10138 for (i = 0; i < nwords; i++)
b30f05db
BS
10139 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10140 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10141 if_false_label, NULL_RTX);
bbf6f052 10142
b93a436e
JL
10143 if (if_true_label)
10144 emit_jump (if_true_label);
0f41302f 10145
b93a436e
JL
10146 if (drop_through_label)
10147 emit_label (drop_through_label);
bbf6f052 10148}
b93a436e 10149\f
b30f05db 10150/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
10151 (including code to compute the values to be compared)
10152 and set (CC0) according to the result.
b30f05db 10153 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10154
b93a436e 10155 We force a stack adjustment unless there are currently
b30f05db 10156 things pushed on the stack that aren't yet used.
ca695ac9 10157
b30f05db
BS
10158 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10159 compared.
10160
10161 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10162 size of MODE should be used. */
10163
10164rtx
10165compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10166 register rtx op0, op1;
10167 enum rtx_code code;
10168 int unsignedp;
10169 enum machine_mode mode;
10170 rtx size;
729a2125 10171 unsigned int align;
b93a436e 10172{
b30f05db 10173 rtx tem;
76bbe028 10174
b30f05db
BS
10175 /* If one operand is constant, make it the second one. Only do this
10176 if the other operand is not constant as well. */
ca695ac9 10177
8c9864f3 10178 if (swap_commutative_operands_p (op0, op1))
bbf6f052 10179 {
b30f05db
BS
10180 tem = op0;
10181 op0 = op1;
10182 op1 = tem;
10183 code = swap_condition (code);
ca695ac9 10184 }
bbf6f052 10185
b30f05db 10186 if (flag_force_mem)
b93a436e 10187 {
b30f05db
BS
10188 op0 = force_not_mem (op0);
10189 op1 = force_not_mem (op1);
10190 }
bbf6f052 10191
b30f05db
BS
10192 do_pending_stack_adjust ();
10193
10194 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10195 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10196 return tem;
10197
10198#if 0
10199 /* There's no need to do this now that combine.c can eliminate lots of
10200 sign extensions. This can be less efficient in certain cases on other
10201 machines. */
10202
10203 /* If this is a signed equality comparison, we can do it as an
10204 unsigned comparison since zero-extension is cheaper than sign
10205 extension and comparisons with zero are done as unsigned. This is
10206 the case even on machines that can do fast sign extension, since
10207 zero-extension is easier to combine with other operations than
10208 sign-extension is. If we are comparing against a constant, we must
10209 convert it to what it would look like unsigned. */
10210 if ((code == EQ || code == NE) && ! unsignedp
10211 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10212 {
10213 if (GET_CODE (op1) == CONST_INT
10214 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10215 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10216 unsignedp = 1;
b93a436e
JL
10217 }
10218#endif
3a94c984 10219
b30f05db 10220 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 10221
b30f05db 10222 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 10223}
bbf6f052 10224
b30f05db 10225/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 10226 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10227
b93a436e
JL
10228 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10229 compared.
bbf6f052 10230
b93a436e
JL
10231 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10232 size of MODE should be used. */
ca695ac9 10233
b30f05db
BS
10234void
10235do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10236 if_false_label, if_true_label)
b93a436e
JL
10237 register rtx op0, op1;
10238 enum rtx_code code;
10239 int unsignedp;
10240 enum machine_mode mode;
10241 rtx size;
729a2125 10242 unsigned int align;
b30f05db 10243 rtx if_false_label, if_true_label;
bbf6f052 10244{
b93a436e 10245 rtx tem;
b30f05db
BS
10246 int dummy_true_label = 0;
10247
10248 /* Reverse the comparison if that is safe and we want to jump if it is
10249 false. */
10250 if (! if_true_label && ! FLOAT_MODE_P (mode))
10251 {
10252 if_true_label = if_false_label;
10253 if_false_label = 0;
10254 code = reverse_condition (code);
10255 }
bbf6f052 10256
b93a436e
JL
10257 /* If one operand is constant, make it the second one. Only do this
10258 if the other operand is not constant as well. */
e7c33f54 10259
8c9864f3 10260 if (swap_commutative_operands_p (op0, op1))
ca695ac9 10261 {
b93a436e
JL
10262 tem = op0;
10263 op0 = op1;
10264 op1 = tem;
10265 code = swap_condition (code);
10266 }
bbf6f052 10267
b93a436e
JL
10268 if (flag_force_mem)
10269 {
10270 op0 = force_not_mem (op0);
10271 op1 = force_not_mem (op1);
10272 }
bbf6f052 10273
b93a436e 10274 do_pending_stack_adjust ();
ca695ac9 10275
b93a436e
JL
10276 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10277 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
10278 {
10279 if (tem == const_true_rtx)
10280 {
10281 if (if_true_label)
10282 emit_jump (if_true_label);
10283 }
10284 else
10285 {
10286 if (if_false_label)
10287 emit_jump (if_false_label);
10288 }
10289 return;
10290 }
ca695ac9 10291
b93a436e
JL
10292#if 0
10293 /* There's no need to do this now that combine.c can eliminate lots of
10294 sign extensions. This can be less efficient in certain cases on other
10295 machines. */
ca695ac9 10296
b93a436e
JL
10297 /* If this is a signed equality comparison, we can do it as an
10298 unsigned comparison since zero-extension is cheaper than sign
10299 extension and comparisons with zero are done as unsigned. This is
10300 the case even on machines that can do fast sign extension, since
10301 zero-extension is easier to combine with other operations than
10302 sign-extension is. If we are comparing against a constant, we must
10303 convert it to what it would look like unsigned. */
10304 if ((code == EQ || code == NE) && ! unsignedp
10305 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10306 {
10307 if (GET_CODE (op1) == CONST_INT
10308 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10309 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10310 unsignedp = 1;
10311 }
10312#endif
ca695ac9 10313
b30f05db
BS
10314 if (! if_true_label)
10315 {
10316 dummy_true_label = 1;
10317 if_true_label = gen_label_rtx ();
10318 }
10319
10320 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10321 if_true_label);
10322
10323 if (if_false_label)
10324 emit_jump (if_false_label);
10325 if (dummy_true_label)
10326 emit_label (if_true_label);
10327}
10328
10329/* Generate code for a comparison expression EXP (including code to compute
10330 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10331 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10332 generated code will drop through.
10333 SIGNED_CODE should be the rtx operation for this comparison for
10334 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10335
10336 We force a stack adjustment unless there are currently
10337 things pushed on the stack that aren't yet used. */
10338
10339static void
10340do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10341 if_true_label)
10342 register tree exp;
10343 enum rtx_code signed_code, unsigned_code;
10344 rtx if_false_label, if_true_label;
10345{
729a2125 10346 unsigned int align0, align1;
b30f05db
BS
10347 register rtx op0, op1;
10348 register tree type;
10349 register enum machine_mode mode;
10350 int unsignedp;
10351 enum rtx_code code;
10352
10353 /* Don't crash if the comparison was erroneous. */
14a774a9 10354 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
10355 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10356 return;
10357
14a774a9 10358 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
6b16805e
JJ
10359 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10360 return;
10361
b30f05db
BS
10362 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10363 mode = TYPE_MODE (type);
6b16805e
JJ
10364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10365 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10366 || (GET_MODE_BITSIZE (mode)
31a7659b
JDA
10367 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10368 1)))))))
6b16805e
JJ
10369 {
10370 /* op0 might have been replaced by promoted constant, in which
10371 case the type of second argument should be used. */
10372 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10373 mode = TYPE_MODE (type);
10374 }
b30f05db
BS
10375 unsignedp = TREE_UNSIGNED (type);
10376 code = unsignedp ? unsigned_code : signed_code;
10377
10378#ifdef HAVE_canonicalize_funcptr_for_compare
10379 /* If function pointers need to be "canonicalized" before they can
10380 be reliably compared, then canonicalize them. */
10381 if (HAVE_canonicalize_funcptr_for_compare
10382 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10383 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10384 == FUNCTION_TYPE))
10385 {
10386 rtx new_op0 = gen_reg_rtx (mode);
10387
10388 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10389 op0 = new_op0;
10390 }
10391
10392 if (HAVE_canonicalize_funcptr_for_compare
10393 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10394 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10395 == FUNCTION_TYPE))
10396 {
10397 rtx new_op1 = gen_reg_rtx (mode);
10398
10399 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10400 op1 = new_op1;
10401 }
10402#endif
10403
10404 /* Do any postincrements in the expression that was tested. */
10405 emit_queue ();
10406
10407 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10408 ((mode == BLKmode)
10409 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
19caa751 10410 MIN (align0, align1),
b30f05db 10411 if_false_label, if_true_label);
b93a436e
JL
10412}
10413\f
10414/* Generate code to calculate EXP using a store-flag instruction
10415 and return an rtx for the result. EXP is either a comparison
10416 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10417
b93a436e 10418 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10419
b93a436e
JL
10420 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10421 cheap.
ca695ac9 10422
b93a436e
JL
10423 Return zero if there is no suitable set-flag instruction
10424 available on this machine.
ca695ac9 10425
b93a436e
JL
10426 Once expand_expr has been called on the arguments of the comparison,
10427 we are committed to doing the store flag, since it is not safe to
10428 re-evaluate the expression. We emit the store-flag insn by calling
10429 emit_store_flag, but only expand the arguments if we have a reason
10430 to believe that emit_store_flag will be successful. If we think that
10431 it will, but it isn't, we have to simulate the store-flag with a
10432 set/jump/set sequence. */
ca695ac9 10433
b93a436e
JL
10434static rtx
10435do_store_flag (exp, target, mode, only_cheap)
10436 tree exp;
10437 rtx target;
10438 enum machine_mode mode;
10439 int only_cheap;
10440{
10441 enum rtx_code code;
10442 tree arg0, arg1, type;
10443 tree tem;
10444 enum machine_mode operand_mode;
10445 int invert = 0;
10446 int unsignedp;
10447 rtx op0, op1;
10448 enum insn_code icode;
10449 rtx subtarget = target;
381127e8 10450 rtx result, label;
ca695ac9 10451
b93a436e
JL
10452 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10453 result at the end. We can't simply invert the test since it would
10454 have already been inverted if it were valid. This case occurs for
10455 some floating-point comparisons. */
ca695ac9 10456
b93a436e
JL
10457 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10458 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10459
b93a436e
JL
10460 arg0 = TREE_OPERAND (exp, 0);
10461 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
10462
10463 /* Don't crash if the comparison was erroneous. */
10464 if (arg0 == error_mark_node || arg1 == error_mark_node)
10465 return const0_rtx;
10466
b93a436e
JL
10467 type = TREE_TYPE (arg0);
10468 operand_mode = TYPE_MODE (type);
10469 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10470
b93a436e
JL
10471 /* We won't bother with BLKmode store-flag operations because it would mean
10472 passing a lot of information to emit_store_flag. */
10473 if (operand_mode == BLKmode)
10474 return 0;
ca695ac9 10475
b93a436e
JL
10476 /* We won't bother with store-flag operations involving function pointers
10477 when function pointers must be canonicalized before comparisons. */
10478#ifdef HAVE_canonicalize_funcptr_for_compare
10479 if (HAVE_canonicalize_funcptr_for_compare
10480 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10481 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10482 == FUNCTION_TYPE))
10483 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10484 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10485 == FUNCTION_TYPE))))
10486 return 0;
ca695ac9
JB
10487#endif
10488
b93a436e
JL
10489 STRIP_NOPS (arg0);
10490 STRIP_NOPS (arg1);
ca695ac9 10491
b93a436e
JL
10492 /* Get the rtx comparison code to use. We know that EXP is a comparison
10493 operation of some type. Some comparisons against 1 and -1 can be
10494 converted to comparisons with zero. Do so here so that the tests
10495 below will be aware that we have a comparison with zero. These
10496 tests will not catch constants in the first operand, but constants
10497 are rarely passed as the first operand. */
ca695ac9 10498
b93a436e
JL
10499 switch (TREE_CODE (exp))
10500 {
10501 case EQ_EXPR:
10502 code = EQ;
bbf6f052 10503 break;
b93a436e
JL
10504 case NE_EXPR:
10505 code = NE;
bbf6f052 10506 break;
b93a436e
JL
10507 case LT_EXPR:
10508 if (integer_onep (arg1))
10509 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10510 else
10511 code = unsignedp ? LTU : LT;
ca695ac9 10512 break;
b93a436e
JL
10513 case LE_EXPR:
10514 if (! unsignedp && integer_all_onesp (arg1))
10515 arg1 = integer_zero_node, code = LT;
10516 else
10517 code = unsignedp ? LEU : LE;
ca695ac9 10518 break;
b93a436e
JL
10519 case GT_EXPR:
10520 if (! unsignedp && integer_all_onesp (arg1))
10521 arg1 = integer_zero_node, code = GE;
10522 else
10523 code = unsignedp ? GTU : GT;
10524 break;
10525 case GE_EXPR:
10526 if (integer_onep (arg1))
10527 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10528 else
10529 code = unsignedp ? GEU : GE;
ca695ac9 10530 break;
1eb8759b
RH
10531
10532 case UNORDERED_EXPR:
10533 code = UNORDERED;
10534 break;
10535 case ORDERED_EXPR:
10536 code = ORDERED;
10537 break;
10538 case UNLT_EXPR:
10539 code = UNLT;
10540 break;
10541 case UNLE_EXPR:
10542 code = UNLE;
10543 break;
10544 case UNGT_EXPR:
10545 code = UNGT;
10546 break;
10547 case UNGE_EXPR:
10548 code = UNGE;
10549 break;
10550 case UNEQ_EXPR:
10551 code = UNEQ;
10552 break;
1eb8759b 10553
ca695ac9 10554 default:
b93a436e 10555 abort ();
bbf6f052 10556 }
bbf6f052 10557
b93a436e
JL
10558 /* Put a constant second. */
10559 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10560 {
10561 tem = arg0; arg0 = arg1; arg1 = tem;
10562 code = swap_condition (code);
ca695ac9 10563 }
bbf6f052 10564
b93a436e
JL
10565 /* If this is an equality or inequality test of a single bit, we can
10566 do this by shifting the bit being tested to the low-order bit and
10567 masking the result with the constant 1. If the condition was EQ,
10568 we xor it with 1. This does not require an scc insn and is faster
10569 than an scc insn even if we have it. */
d39985fa 10570
b93a436e
JL
10571 if ((code == NE || code == EQ)
10572 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10573 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10574 {
10575 tree inner = TREE_OPERAND (arg0, 0);
10576 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10577 int ops_unsignedp;
bbf6f052 10578
b93a436e
JL
10579 /* If INNER is a right shift of a constant and it plus BITNUM does
10580 not overflow, adjust BITNUM and INNER. */
ca695ac9 10581
b93a436e
JL
10582 if (TREE_CODE (inner) == RSHIFT_EXPR
10583 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10584 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10585 && bitnum < TYPE_PRECISION (type)
10586 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10587 bitnum - TYPE_PRECISION (type)))
ca695ac9 10588 {
b93a436e
JL
10589 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10590 inner = TREE_OPERAND (inner, 0);
ca695ac9 10591 }
ca695ac9 10592
b93a436e
JL
10593 /* If we are going to be able to omit the AND below, we must do our
10594 operations as unsigned. If we must use the AND, we have a choice.
10595 Normally unsigned is faster, but for some machines signed is. */
10596 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10597#ifdef LOAD_EXTEND_OP
10598 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10599#else
10600 : 1
10601#endif
10602 );
bbf6f052 10603
296b4ed9 10604 if (! get_subtarget (subtarget)
a47fed55 10605 || GET_MODE (subtarget) != operand_mode
e5e809f4 10606 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10607 subtarget = 0;
bbf6f052 10608
b93a436e 10609 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10610
b93a436e 10611 if (bitnum != 0)
681cb233 10612 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10613 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10614
b93a436e
JL
10615 if (GET_MODE (op0) != mode)
10616 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10617
b93a436e
JL
10618 if ((code == EQ && ! invert) || (code == NE && invert))
10619 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10620 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10621
b93a436e
JL
10622 /* Put the AND last so it can combine with more things. */
10623 if (bitnum != TYPE_PRECISION (type) - 1)
10624 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10625
b93a436e
JL
10626 return op0;
10627 }
bbf6f052 10628
b93a436e 10629 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10630 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10631 return 0;
1eb8759b 10632
b93a436e
JL
10633 icode = setcc_gen_code[(int) code];
10634 if (icode == CODE_FOR_nothing
a995e389 10635 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10636 {
b93a436e
JL
10637 /* We can only do this if it is one of the special cases that
10638 can be handled without an scc insn. */
10639 if ((code == LT && integer_zerop (arg1))
10640 || (! only_cheap && code == GE && integer_zerop (arg1)))
10641 ;
10642 else if (BRANCH_COST >= 0
10643 && ! only_cheap && (code == NE || code == EQ)
10644 && TREE_CODE (type) != REAL_TYPE
10645 && ((abs_optab->handlers[(int) operand_mode].insn_code
10646 != CODE_FOR_nothing)
10647 || (ffs_optab->handlers[(int) operand_mode].insn_code
10648 != CODE_FOR_nothing)))
10649 ;
10650 else
10651 return 0;
ca695ac9 10652 }
3a94c984 10653
296b4ed9 10654 if (! get_subtarget (target)
a47fed55 10655 || GET_MODE (subtarget) != operand_mode
e5e809f4 10656 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10657 subtarget = 0;
10658
10659 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10660 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10661
10662 if (target == 0)
10663 target = gen_reg_rtx (mode);
10664
10665 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10666 because, if the emit_store_flag does anything it will succeed and
10667 OP0 and OP1 will not be used subsequently. */
ca695ac9 10668
b93a436e
JL
10669 result = emit_store_flag (target, code,
10670 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10671 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10672 operand_mode, unsignedp, 1);
ca695ac9 10673
b93a436e
JL
10674 if (result)
10675 {
10676 if (invert)
10677 result = expand_binop (mode, xor_optab, result, const1_rtx,
10678 result, 0, OPTAB_LIB_WIDEN);
10679 return result;
ca695ac9 10680 }
bbf6f052 10681
b93a436e
JL
10682 /* If this failed, we have to do this with set/compare/jump/set code. */
10683 if (GET_CODE (target) != REG
10684 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10685 target = gen_reg_rtx (GET_MODE (target));
10686
10687 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10688 result = compare_from_rtx (op0, op1, code, unsignedp,
10689 operand_mode, NULL_RTX, 0);
10690 if (GET_CODE (result) == CONST_INT)
10691 return (((result == const0_rtx && ! invert)
10692 || (result != const0_rtx && invert))
10693 ? const0_rtx : const1_rtx);
ca695ac9 10694
b93a436e
JL
10695 label = gen_label_rtx ();
10696 if (bcc_gen_fctn[(int) code] == 0)
10697 abort ();
0f41302f 10698
b93a436e
JL
10699 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10700 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10701 emit_label (label);
bbf6f052 10702
b93a436e 10703 return target;
ca695ac9 10704}
b93a436e
JL
10705\f
10706/* Generate a tablejump instruction (used for switch statements). */
10707
10708#ifdef HAVE_tablejump
e87b4f3f 10709
b93a436e
JL
10710/* INDEX is the value being switched on, with the lowest value
10711 in the table already subtracted.
10712 MODE is its expected mode (needed if INDEX is constant).
10713 RANGE is the length of the jump table.
10714 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10715
b93a436e
JL
10716 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10717 index value is out of range. */
0f41302f 10718
ca695ac9 10719void
b93a436e
JL
10720do_tablejump (index, mode, range, table_label, default_label)
10721 rtx index, range, table_label, default_label;
10722 enum machine_mode mode;
ca695ac9 10723{
b93a436e 10724 register rtx temp, vector;
88d3b7f0 10725
b93a436e
JL
10726 /* Do an unsigned comparison (in the proper mode) between the index
10727 expression and the value which represents the length of the range.
10728 Since we just finished subtracting the lower bound of the range
10729 from the index expression, this comparison allows us to simultaneously
10730 check that the original index expression value is both greater than
10731 or equal to the minimum value of the range and less than or equal to
10732 the maximum value of the range. */
709f5be1 10733
c5d5d461
JL
10734 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10735 0, default_label);
bbf6f052 10736
b93a436e
JL
10737 /* If index is in range, it must fit in Pmode.
10738 Convert to Pmode so we can index with it. */
10739 if (mode != Pmode)
10740 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10741
b93a436e
JL
10742 /* Don't let a MEM slip thru, because then INDEX that comes
10743 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10744 and break_out_memory_refs will go to work on it and mess it up. */
10745#ifdef PIC_CASE_VECTOR_ADDRESS
10746 if (flag_pic && GET_CODE (index) != REG)
10747 index = copy_to_mode_reg (Pmode, index);
10748#endif
ca695ac9 10749
b93a436e
JL
10750 /* If flag_force_addr were to affect this address
10751 it could interfere with the tricky assumptions made
10752 about addresses that contain label-refs,
10753 which may be valid only very near the tablejump itself. */
10754 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10755 GET_MODE_SIZE, because this indicates how large insns are. The other
10756 uses should all be Pmode, because they are addresses. This code
10757 could fail if addresses and insns are not the same size. */
10758 index = gen_rtx_PLUS (Pmode,
10759 gen_rtx_MULT (Pmode, index,
10760 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10761 gen_rtx_LABEL_REF (Pmode, table_label));
10762#ifdef PIC_CASE_VECTOR_ADDRESS
10763 if (flag_pic)
10764 index = PIC_CASE_VECTOR_ADDRESS (index);
10765 else
bbf6f052 10766#endif
b93a436e
JL
10767 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10768 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10769 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10770 RTX_UNCHANGING_P (vector) = 1;
10771 convert_move (temp, vector, 0);
10772
10773 emit_jump_insn (gen_tablejump (temp, table_label));
10774
10775 /* If we are generating PIC code or if the table is PC-relative, the
10776 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10777 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10778 emit_barrier ();
bbf6f052 10779}
b93a436e 10780
3a94c984 10781#endif /* HAVE_tablejump */
This page took 3.348653 seconds and 5 git commands to generate.