]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
configure.ac: Fix stupid brain-fade; set default_gnattools_target correctly.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
51
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
57
58 #ifdef PUSH_ROUNDING
59
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
65
66 #endif
67
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
75
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
83
84
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
92
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces
96 {
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
107 int reverse;
108 };
109
110 /* This structure is used by store_by_pieces to describe the clear to
111 be performed. */
112
113 struct store_by_pieces
114 {
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 void *constfundata;
123 int reverse;
124 };
125
126 static rtx enqueue_insn (rtx, rtx);
127 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 unsigned int);
129 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
130 struct move_by_pieces *);
131 static bool block_move_libcall_safe_for_call_parm (void);
132 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
133 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
134 static tree emit_block_move_libcall_fn (int);
135 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
136 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
137 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
138 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
139 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
140 struct store_by_pieces *);
141 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
142 static rtx clear_storage_via_libcall (rtx, rtx);
143 static tree clear_storage_libcall_fn (int);
144 static rtx compress_float_constant (rtx, rtx);
145 static rtx get_subtarget (rtx);
146 static int is_zeros_p (tree);
147 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153 static rtx var_rtx (tree);
154
155 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
156 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
157
158 static int is_aligning_offset (tree, tree);
159 static rtx expand_increment (tree, int, int);
160 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
162 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
163 #ifdef PUSH_ROUNDING
164 static void emit_single_push_insn (enum machine_mode, rtx, tree);
165 #endif
166 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167 static rtx const_vector_from_tree (tree);
168
169 /* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
172
173 static char direct_load[NUM_MACHINE_MODES];
174 static char direct_store[NUM_MACHINE_MODES];
175
176 /* Record for each mode whether we can float-extend from memory. */
177
178 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
179
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
186
187 /* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189 #ifndef CLEAR_BY_PIECES_P
190 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
192 #endif
193
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197 #ifndef STORE_BY_PIECES_P
198 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
199 #endif
200
201 /* This array records the insn_code of insns to perform block moves. */
202 enum insn_code movstr_optab[NUM_MACHINE_MODES];
203
204 /* This array records the insn_code of insns to perform block clears. */
205 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
206
207 /* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211
212 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
213 struct file_stack *expr_wfl_stack;
214
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
216
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
219 #endif
220 \f
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
223
224 void
225 init_expr_once (void)
226 {
227 rtx insn, pat;
228 enum machine_mode mode;
229 int num_clobbers;
230 rtx mem, mem1;
231 rtx reg;
232
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
238
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
242
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
246
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
249 {
250 int regno;
251
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
254 PUT_MODE (mem1, mode);
255 PUT_MODE (reg, mode);
256
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
259
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
263 regno++)
264 {
265 if (! HARD_REGNO_MODE_OK (regno, mode))
266 continue;
267
268 REGNO (reg) = regno;
269
270 SET_SRC (pat) = mem;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
274
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
279
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
284
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
289 }
290 }
291
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
293
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
296 {
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
299 srcmode = GET_MODE_WIDER_MODE (srcmode))
300 {
301 enum insn_code ic;
302
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
305 continue;
306
307 PUT_MODE (mem, srcmode);
308
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
311 }
312 }
313 }
314
315 /* This is run at the start of compiling a function. */
316
317 void
318 init_expr (void)
319 {
320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
321 }
322
323 /* Small sanity check that the queue is empty at the end of a function. */
324
325 void
326 finish_expr_for_function (void)
327 {
328 if (pending_chain)
329 abort ();
330 }
331 \f
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
334
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
338
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
341
342 static rtx
343 enqueue_insn (rtx var, rtx body)
344 {
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
348 }
349
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
356
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
360
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
364
365 rtx
366 protect_from_queue (rtx x, int modify)
367 {
368 RTX_CODE code = GET_CODE (x);
369
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374 #endif
375
376 if (code != QUEUED)
377 {
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
385 {
386 rtx y = XEXP (x, 0);
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
388
389 if (QUEUED_INSN (y))
390 {
391 rtx temp = gen_reg_rtx (GET_MODE (x));
392
393 emit_insn_before (gen_move_insn (temp, new),
394 QUEUED_INSN (y));
395 return temp;
396 }
397
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
401 }
402
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
406 {
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
409 {
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
412 }
413 }
414 else if (code == PLUS || code == MULT)
415 {
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
419 {
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
423 }
424 }
425 return x;
426 }
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
442 }
443
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
448
449 int
450 queued_subexp_p (rtx x)
451 {
452 enum rtx_code code = GET_CODE (x);
453 switch (code)
454 {
455 case QUEUED:
456 return 1;
457 case MEM:
458 return queued_subexp_p (XEXP (x, 0));
459 case MULT:
460 case PLUS:
461 case MINUS:
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
464 default:
465 return 0;
466 }
467 }
468
469 /* Retrieve a mark on the queue. */
470
471 static rtx
472 mark_queue (void)
473 {
474 return pending_chain;
475 }
476
477 /* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
480
481 static void
482 emit_insns_enqueued_after_mark (rtx mark)
483 {
484 rtx p;
485
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
490 return;
491
492 while ((p = pending_chain) != mark)
493 {
494 rtx body = QUEUED_BODY (p);
495
496 switch (GET_CODE (body))
497 {
498 case INSN:
499 case JUMP_INSN:
500 case CALL_INSN:
501 case CODE_LABEL:
502 case BARRIER:
503 case NOTE:
504 QUEUED_INSN (p) = body;
505 emit_insn (body);
506 break;
507
508 #ifdef ENABLE_CHECKING
509 case SEQUENCE:
510 abort ();
511 break;
512 #endif
513
514 default:
515 QUEUED_INSN (p) = emit_insn (body);
516 break;
517 }
518
519 QUEUED_BODY (p) = 0;
520 pending_chain = QUEUED_NEXT (p);
521 }
522 }
523
524 /* Perform all the pending incrementations. */
525
526 void
527 emit_queue (void)
528 {
529 emit_insns_enqueued_after_mark (NULL_RTX);
530 }
531 \f
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
536
537 void
538 convert_move (rtx to, rtx from, int unsignedp)
539 {
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
544 enum insn_code code;
545 rtx libcall;
546
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
550
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
553
554 if (to_real != from_real)
555 abort ();
556
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
559 TO here. */
560
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
566
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
568 abort ();
569
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
572 {
573 emit_move_insn (to, from);
574 return;
575 }
576
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
578 {
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
580 abort ();
581
582 if (VECTOR_MODE_P (to_mode))
583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
584 else
585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
586
587 emit_move_insn (to, from);
588 return;
589 }
590
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
592 {
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
595 return;
596 }
597
598 if (to_real)
599 {
600 rtx value, insns;
601 convert_optab tab;
602
603 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
604 tab = sext_optab;
605 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
606 tab = trunc_optab;
607 else
608 abort ();
609
610 /* Try converting directly if the insn is supported. */
611
612 code = tab->handlers[to_mode][from_mode].insn_code;
613 if (code != CODE_FOR_nothing)
614 {
615 emit_unop_insn (code, to, from,
616 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
617 return;
618 }
619
620 /* Otherwise use a libcall. */
621 libcall = tab->handlers[to_mode][from_mode].libfunc;
622
623 if (!libcall)
624 /* This conversion is not implemented yet. */
625 abort ();
626
627 start_sequence ();
628 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
629 1, from, from_mode);
630 insns = get_insns ();
631 end_sequence ();
632 emit_libcall_block (insns, to, value,
633 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
634 from)
635 : gen_rtx_FLOAT_EXTEND (to_mode, from));
636 return;
637 }
638
639 /* Handle pointer conversion. */ /* SPEE 900220. */
640 /* Targets are expected to provide conversion insns between PxImode and
641 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
642 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
643 {
644 enum machine_mode full_mode
645 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
646
647 if (trunc_optab->handlers[to_mode][full_mode].insn_code
648 == CODE_FOR_nothing)
649 abort ();
650
651 if (full_mode != from_mode)
652 from = convert_to_mode (full_mode, from, unsignedp);
653 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
654 to, from, UNKNOWN);
655 return;
656 }
657 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
658 {
659 enum machine_mode full_mode
660 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
661
662 if (sext_optab->handlers[full_mode][from_mode].insn_code
663 == CODE_FOR_nothing)
664 abort ();
665
666 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
667 to, from, UNKNOWN);
668 if (to_mode == full_mode)
669 return;
670
671 /* else proceed to integer conversions below. */
672 from_mode = full_mode;
673 }
674
675 /* Now both modes are integers. */
676
677 /* Handle expanding beyond a word. */
678 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
679 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
680 {
681 rtx insns;
682 rtx lowpart;
683 rtx fill_value;
684 rtx lowfrom;
685 int i;
686 enum machine_mode lowpart_mode;
687 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
688
689 /* Try converting directly if the insn is supported. */
690 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
691 != CODE_FOR_nothing)
692 {
693 /* If FROM is a SUBREG, put it into a register. Do this
694 so that we always generate the same set of insns for
695 better cse'ing; if an intermediate assignment occurred,
696 we won't be doing the operation directly on the SUBREG. */
697 if (optimize > 0 && GET_CODE (from) == SUBREG)
698 from = force_reg (from_mode, from);
699 emit_unop_insn (code, to, from, equiv_code);
700 return;
701 }
702 /* Next, try converting via full word. */
703 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
704 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
705 != CODE_FOR_nothing))
706 {
707 if (GET_CODE (to) == REG)
708 {
709 if (reg_overlap_mentioned_p (to, from))
710 from = force_reg (from_mode, from);
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
712 }
713 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
714 emit_unop_insn (code, to,
715 gen_lowpart (word_mode, to), equiv_code);
716 return;
717 }
718
719 /* No special multiword conversion insn; do it by hand. */
720 start_sequence ();
721
722 /* Since we will turn this into a no conflict block, we must ensure
723 that the source does not overlap the target. */
724
725 if (reg_overlap_mentioned_p (to, from))
726 from = force_reg (from_mode, from);
727
728 /* Get a copy of FROM widened to a word, if necessary. */
729 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
730 lowpart_mode = word_mode;
731 else
732 lowpart_mode = from_mode;
733
734 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
735
736 lowpart = gen_lowpart (lowpart_mode, to);
737 emit_move_insn (lowpart, lowfrom);
738
739 /* Compute the value to put in each remaining word. */
740 if (unsignedp)
741 fill_value = const0_rtx;
742 else
743 {
744 #ifdef HAVE_slt
745 if (HAVE_slt
746 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
747 && STORE_FLAG_VALUE == -1)
748 {
749 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
750 lowpart_mode, 0);
751 fill_value = gen_reg_rtx (word_mode);
752 emit_insn (gen_slt (fill_value));
753 }
754 else
755 #endif
756 {
757 fill_value
758 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
759 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
760 NULL_RTX, 0);
761 fill_value = convert_to_mode (word_mode, fill_value, 1);
762 }
763 }
764
765 /* Fill the remaining words. */
766 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
767 {
768 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
769 rtx subword = operand_subword (to, index, 1, to_mode);
770
771 if (subword == 0)
772 abort ();
773
774 if (fill_value != subword)
775 emit_move_insn (subword, fill_value);
776 }
777
778 insns = get_insns ();
779 end_sequence ();
780
781 emit_no_conflict_block (insns, to, from, NULL_RTX,
782 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
783 return;
784 }
785
786 /* Truncating multi-word to a word or less. */
787 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
788 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
789 {
790 if (!((GET_CODE (from) == MEM
791 && ! MEM_VOLATILE_P (from)
792 && direct_load[(int) to_mode]
793 && ! mode_dependent_address_p (XEXP (from, 0)))
794 || GET_CODE (from) == REG
795 || GET_CODE (from) == SUBREG))
796 from = force_reg (from_mode, from);
797 convert_move (to, gen_lowpart (word_mode, from), 0);
798 return;
799 }
800
801 /* Now follow all the conversions between integers
802 no more than a word long. */
803
804 /* For truncation, usually we can just refer to FROM in a narrower mode. */
805 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
806 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
807 GET_MODE_BITSIZE (from_mode)))
808 {
809 if (!((GET_CODE (from) == MEM
810 && ! MEM_VOLATILE_P (from)
811 && direct_load[(int) to_mode]
812 && ! mode_dependent_address_p (XEXP (from, 0)))
813 || GET_CODE (from) == REG
814 || GET_CODE (from) == SUBREG))
815 from = force_reg (from_mode, from);
816 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
817 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
818 from = copy_to_reg (from);
819 emit_move_insn (to, gen_lowpart (to_mode, from));
820 return;
821 }
822
823 /* Handle extension. */
824 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
825 {
826 /* Convert directly if that works. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
829 {
830 if (flag_force_mem)
831 from = force_not_mem (from);
832
833 emit_unop_insn (code, to, from, equiv_code);
834 return;
835 }
836 else
837 {
838 enum machine_mode intermediate;
839 rtx tmp;
840 tree shift_amount;
841
842 /* Search for a mode to convert via. */
843 for (intermediate = from_mode; intermediate != VOIDmode;
844 intermediate = GET_MODE_WIDER_MODE (intermediate))
845 if (((can_extend_p (to_mode, intermediate, unsignedp)
846 != CODE_FOR_nothing)
847 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
848 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
849 GET_MODE_BITSIZE (intermediate))))
850 && (can_extend_p (intermediate, from_mode, unsignedp)
851 != CODE_FOR_nothing))
852 {
853 convert_move (to, convert_to_mode (intermediate, from,
854 unsignedp), unsignedp);
855 return;
856 }
857
858 /* No suitable intermediate mode.
859 Generate what we need with shifts. */
860 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
861 - GET_MODE_BITSIZE (from_mode), 0);
862 from = gen_lowpart (to_mode, force_reg (from_mode, from));
863 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
864 to, unsignedp);
865 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
866 to, unsignedp);
867 if (tmp != to)
868 emit_move_insn (to, tmp);
869 return;
870 }
871 }
872
873 /* Support special truncate insns for certain modes. */
874 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
875 {
876 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
877 to, from, UNKNOWN);
878 return;
879 }
880
881 /* Handle truncation of volatile memrefs, and so on;
882 the things that couldn't be truncated directly,
883 and for which there was no special instruction.
884
885 ??? Code above formerly short-circuited this, for most integer
886 mode pairs, with a force_reg in from_mode followed by a recursive
887 call to this routine. Appears always to have been wrong. */
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
889 {
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
892 return;
893 }
894
895 /* Mode combination is not recognized. */
896 abort ();
897 }
898
899 /* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
904 or by copying to a new temporary with conversion.
905
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
908
909 rtx
910 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
911 {
912 return convert_modes (mode, VOIDmode, x, unsignedp);
913 }
914
915 /* Return an rtx for a value that would result
916 from converting X from mode OLDMODE to mode MODE.
917 Both modes may be floating, or both integer.
918 UNSIGNEDP is nonzero if X is an unsigned value.
919
920 This can be done by referring to a part of X in place
921 or by copying to a new temporary with conversion.
922
923 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
924
925 This function *must not* call protect_from_queue
926 except when putting X into an insn (in which case convert_move does it). */
927
928 rtx
929 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
930 {
931 rtx temp;
932
933 /* If FROM is a SUBREG that indicates that we have already done at least
934 the required extension, strip it. */
935
936 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
937 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
938 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
939 x = gen_lowpart (mode, x);
940
941 if (GET_MODE (x) != VOIDmode)
942 oldmode = GET_MODE (x);
943
944 if (mode == oldmode)
945 return x;
946
947 /* There is one case that we must handle specially: If we are converting
948 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
949 we are to interpret the constant as unsigned, gen_lowpart will do
950 the wrong if the constant appears negative. What we want to do is
951 make the high-order word of the constant zero, not all ones. */
952
953 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
954 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
955 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
956 {
957 HOST_WIDE_INT val = INTVAL (x);
958
959 if (oldmode != VOIDmode
960 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
961 {
962 int width = GET_MODE_BITSIZE (oldmode);
963
964 /* We need to zero extend VAL. */
965 val &= ((HOST_WIDE_INT) 1 << width) - 1;
966 }
967
968 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
969 }
970
971 /* We can do this with a gen_lowpart if both desired and current modes
972 are integer, and this is either a constant integer, a register, or a
973 non-volatile MEM. Except for the constant case where MODE is no
974 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
975
976 if ((GET_CODE (x) == CONST_INT
977 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
978 || (GET_MODE_CLASS (mode) == MODE_INT
979 && GET_MODE_CLASS (oldmode) == MODE_INT
980 && (GET_CODE (x) == CONST_DOUBLE
981 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
982 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
983 && direct_load[(int) mode])
984 || (GET_CODE (x) == REG
985 && (! HARD_REGISTER_P (x)
986 || HARD_REGNO_MODE_OK (REGNO (x), mode))
987 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
988 GET_MODE_BITSIZE (GET_MODE (x)))))))))
989 {
990 /* ?? If we don't know OLDMODE, we have to assume here that
991 X does not need sign- or zero-extension. This may not be
992 the case, but it's the best we can do. */
993 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
994 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
995 {
996 HOST_WIDE_INT val = INTVAL (x);
997 int width = GET_MODE_BITSIZE (oldmode);
998
999 /* We must sign or zero-extend in this case. Start by
1000 zero-extending, then sign extend if we need to. */
1001 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1002 if (! unsignedp
1003 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1004 val |= (HOST_WIDE_INT) (-1) << width;
1005
1006 return gen_int_mode (val, mode);
1007 }
1008
1009 return gen_lowpart (mode, x);
1010 }
1011
1012 /* Converting from integer constant into mode is always equivalent to an
1013 subreg operation. */
1014 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1015 {
1016 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1017 abort ();
1018 return simplify_gen_subreg (mode, x, oldmode, 0);
1019 }
1020
1021 temp = gen_reg_rtx (mode);
1022 convert_move (temp, x, unsignedp);
1023 return temp;
1024 }
1025 \f
1026 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1027 store efficiently. Due to internal GCC limitations, this is
1028 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1029 for an immediate constant. */
1030
1031 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1032
1033 /* Determine whether the LEN bytes can be moved by using several move
1034 instructions. Return nonzero if a call to move_by_pieces should
1035 succeed. */
1036
1037 int
1038 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1039 unsigned int align ATTRIBUTE_UNUSED)
1040 {
1041 return MOVE_BY_PIECES_P (len, align);
1042 }
1043
1044 /* Generate several move instructions to copy LEN bytes from block FROM to
1045 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1046 and TO through protect_from_queue before calling.
1047
1048 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1049 used to push FROM to the stack.
1050
1051 ALIGN is maximum stack alignment we can assume.
1052
1053 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1054 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1055 stpcpy. */
1056
1057 rtx
1058 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1059 unsigned int align, int endp)
1060 {
1061 struct move_by_pieces data;
1062 rtx to_addr, from_addr = XEXP (from, 0);
1063 unsigned int max_size = MOVE_MAX_PIECES + 1;
1064 enum machine_mode mode = VOIDmode, tmode;
1065 enum insn_code icode;
1066
1067 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1068
1069 data.offset = 0;
1070 data.from_addr = from_addr;
1071 if (to)
1072 {
1073 to_addr = XEXP (to, 0);
1074 data.to = to;
1075 data.autinc_to
1076 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1077 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1078 data.reverse
1079 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1080 }
1081 else
1082 {
1083 to_addr = NULL_RTX;
1084 data.to = NULL_RTX;
1085 data.autinc_to = 1;
1086 #ifdef STACK_GROWS_DOWNWARD
1087 data.reverse = 1;
1088 #else
1089 data.reverse = 0;
1090 #endif
1091 }
1092 data.to_addr = to_addr;
1093 data.from = from;
1094 data.autinc_from
1095 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1096 || GET_CODE (from_addr) == POST_INC
1097 || GET_CODE (from_addr) == POST_DEC);
1098
1099 data.explicit_inc_from = 0;
1100 data.explicit_inc_to = 0;
1101 if (data.reverse) data.offset = len;
1102 data.len = len;
1103
1104 /* If copying requires more than two move insns,
1105 copy addresses to registers (to make displacements shorter)
1106 and use post-increment if available. */
1107 if (!(data.autinc_from && data.autinc_to)
1108 && move_by_pieces_ninsns (len, align) > 2)
1109 {
1110 /* Find the mode of the largest move... */
1111 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1112 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1113 if (GET_MODE_SIZE (tmode) < max_size)
1114 mode = tmode;
1115
1116 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1117 {
1118 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1119 data.autinc_from = 1;
1120 data.explicit_inc_from = -1;
1121 }
1122 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1123 {
1124 data.from_addr = copy_addr_to_reg (from_addr);
1125 data.autinc_from = 1;
1126 data.explicit_inc_from = 1;
1127 }
1128 if (!data.autinc_from && CONSTANT_P (from_addr))
1129 data.from_addr = copy_addr_to_reg (from_addr);
1130 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1131 {
1132 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1133 data.autinc_to = 1;
1134 data.explicit_inc_to = -1;
1135 }
1136 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1137 {
1138 data.to_addr = copy_addr_to_reg (to_addr);
1139 data.autinc_to = 1;
1140 data.explicit_inc_to = 1;
1141 }
1142 if (!data.autinc_to && CONSTANT_P (to_addr))
1143 data.to_addr = copy_addr_to_reg (to_addr);
1144 }
1145
1146 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1147 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1148 align = MOVE_MAX * BITS_PER_UNIT;
1149
1150 /* First move what we can in the largest integer mode, then go to
1151 successively smaller modes. */
1152
1153 while (max_size > 1)
1154 {
1155 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1156 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1157 if (GET_MODE_SIZE (tmode) < max_size)
1158 mode = tmode;
1159
1160 if (mode == VOIDmode)
1161 break;
1162
1163 icode = mov_optab->handlers[(int) mode].insn_code;
1164 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1165 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1166
1167 max_size = GET_MODE_SIZE (mode);
1168 }
1169
1170 /* The code above should have handled everything. */
1171 if (data.len > 0)
1172 abort ();
1173
1174 if (endp)
1175 {
1176 rtx to1;
1177
1178 if (data.reverse)
1179 abort ();
1180 if (data.autinc_to)
1181 {
1182 if (endp == 2)
1183 {
1184 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1185 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1186 else
1187 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1188 -1));
1189 }
1190 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1191 data.offset);
1192 }
1193 else
1194 {
1195 if (endp == 2)
1196 --data.offset;
1197 to1 = adjust_address (data.to, QImode, data.offset);
1198 }
1199 return to1;
1200 }
1201 else
1202 return data.to;
1203 }
1204
1205 /* Return number of insns required to move L bytes by pieces.
1206 ALIGN (in bits) is maximum alignment we can assume. */
1207
1208 static unsigned HOST_WIDE_INT
1209 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1210 {
1211 unsigned HOST_WIDE_INT n_insns = 0;
1212 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1213
1214 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1215 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1216 align = MOVE_MAX * BITS_PER_UNIT;
1217
1218 while (max_size > 1)
1219 {
1220 enum machine_mode mode = VOIDmode, tmode;
1221 enum insn_code icode;
1222
1223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1224 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1225 if (GET_MODE_SIZE (tmode) < max_size)
1226 mode = tmode;
1227
1228 if (mode == VOIDmode)
1229 break;
1230
1231 icode = mov_optab->handlers[(int) mode].insn_code;
1232 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1233 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1234
1235 max_size = GET_MODE_SIZE (mode);
1236 }
1237
1238 if (l)
1239 abort ();
1240 return n_insns;
1241 }
1242
1243 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1244 with move instructions for mode MODE. GENFUN is the gen_... function
1245 to make a move insn for that mode. DATA has all the other info. */
1246
1247 static void
1248 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1249 struct move_by_pieces *data)
1250 {
1251 unsigned int size = GET_MODE_SIZE (mode);
1252 rtx to1 = NULL_RTX, from1;
1253
1254 while (data->len >= size)
1255 {
1256 if (data->reverse)
1257 data->offset -= size;
1258
1259 if (data->to)
1260 {
1261 if (data->autinc_to)
1262 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1263 data->offset);
1264 else
1265 to1 = adjust_address (data->to, mode, data->offset);
1266 }
1267
1268 if (data->autinc_from)
1269 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1270 data->offset);
1271 else
1272 from1 = adjust_address (data->from, mode, data->offset);
1273
1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1275 emit_insn (gen_add2_insn (data->to_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
1277 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1278 emit_insn (gen_add2_insn (data->from_addr,
1279 GEN_INT (-(HOST_WIDE_INT)size)));
1280
1281 if (data->to)
1282 emit_insn ((*genfun) (to1, from1));
1283 else
1284 {
1285 #ifdef PUSH_ROUNDING
1286 emit_single_push_insn (mode, from1, NULL);
1287 #else
1288 abort ();
1289 #endif
1290 }
1291
1292 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1293 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1295 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1296
1297 if (! data->reverse)
1298 data->offset += size;
1299
1300 data->len -= size;
1301 }
1302 }
1303 \f
1304 /* Emit code to move a block Y to a block X. This may be done with
1305 string-move instructions, with multiple scalar move instructions,
1306 or with a library call.
1307
1308 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1309 SIZE is an rtx that says how long they are.
1310 ALIGN is the maximum alignment we can assume they have.
1311 METHOD describes what kind of copy this is, and what mechanisms may be used.
1312
1313 Return the address of the new block, if memcpy is called and returns it,
1314 0 otherwise. */
1315
1316 rtx
1317 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1318 {
1319 bool may_use_call;
1320 rtx retval = 0;
1321 unsigned int align;
1322
1323 switch (method)
1324 {
1325 case BLOCK_OP_NORMAL:
1326 may_use_call = true;
1327 break;
1328
1329 case BLOCK_OP_CALL_PARM:
1330 may_use_call = block_move_libcall_safe_for_call_parm ();
1331
1332 /* Make inhibit_defer_pop nonzero around the library call
1333 to force it to pop the arguments right away. */
1334 NO_DEFER_POP;
1335 break;
1336
1337 case BLOCK_OP_NO_LIBCALL:
1338 may_use_call = false;
1339 break;
1340
1341 default:
1342 abort ();
1343 }
1344
1345 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1346
1347 if (GET_MODE (x) != BLKmode)
1348 abort ();
1349 if (GET_MODE (y) != BLKmode)
1350 abort ();
1351
1352 x = protect_from_queue (x, 1);
1353 y = protect_from_queue (y, 0);
1354 size = protect_from_queue (size, 0);
1355
1356 if (GET_CODE (x) != MEM)
1357 abort ();
1358 if (GET_CODE (y) != MEM)
1359 abort ();
1360 if (size == 0)
1361 abort ();
1362
1363 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1364 can be incorrect is coming from __builtin_memcpy. */
1365 if (GET_CODE (size) == CONST_INT)
1366 {
1367 if (INTVAL (size) == 0)
1368 return 0;
1369
1370 x = shallow_copy_rtx (x);
1371 y = shallow_copy_rtx (y);
1372 set_mem_size (x, size);
1373 set_mem_size (y, size);
1374 }
1375
1376 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1377 move_by_pieces (x, y, INTVAL (size), align, 0);
1378 else if (emit_block_move_via_movstr (x, y, size, align))
1379 ;
1380 else if (may_use_call)
1381 retval = emit_block_move_via_libcall (x, y, size);
1382 else
1383 emit_block_move_via_loop (x, y, size, align);
1384
1385 if (method == BLOCK_OP_CALL_PARM)
1386 OK_DEFER_POP;
1387
1388 return retval;
1389 }
1390
1391 /* A subroutine of emit_block_move. Returns true if calling the
1392 block move libcall will not clobber any parameters which may have
1393 already been placed on the stack. */
1394
1395 static bool
1396 block_move_libcall_safe_for_call_parm (void)
1397 {
1398 /* If arguments are pushed on the stack, then they're safe. */
1399 if (PUSH_ARGS)
1400 return true;
1401
1402 /* If registers go on the stack anyway, any argument is sure to clobber
1403 an outgoing argument. */
1404 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1405 {
1406 tree fn = emit_block_move_libcall_fn (false);
1407 (void) fn;
1408 if (REG_PARM_STACK_SPACE (fn) != 0)
1409 return false;
1410 }
1411 #endif
1412
1413 /* If any argument goes in memory, then it might clobber an outgoing
1414 argument. */
1415 {
1416 CUMULATIVE_ARGS args_so_far;
1417 tree fn, arg;
1418
1419 fn = emit_block_move_libcall_fn (false);
1420 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1421
1422 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1423 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1424 {
1425 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1426 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1427 if (!tmp || !REG_P (tmp))
1428 return false;
1429 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1430 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1431 NULL_TREE, 1))
1432 return false;
1433 #endif
1434 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1435 }
1436 }
1437 return true;
1438 }
1439
1440 /* A subroutine of emit_block_move. Expand a movstr pattern;
1441 return true if successful. */
1442
1443 static bool
1444 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1445 {
1446 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1447 int save_volatile_ok = volatile_ok;
1448 enum machine_mode mode;
1449
1450 /* Since this is a move insn, we don't care about volatility. */
1451 volatile_ok = 1;
1452
1453 /* Try the most limited insn first, because there's no point
1454 including more than one in the machine description unless
1455 the more limited one has some advantage. */
1456
1457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1458 mode = GET_MODE_WIDER_MODE (mode))
1459 {
1460 enum insn_code code = movstr_optab[(int) mode];
1461 insn_operand_predicate_fn pred;
1462
1463 if (code != CODE_FOR_nothing
1464 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1465 here because if SIZE is less than the mode mask, as it is
1466 returned by the macro, it will definitely be less than the
1467 actual mode mask. */
1468 && ((GET_CODE (size) == CONST_INT
1469 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1470 <= (GET_MODE_MASK (mode) >> 1)))
1471 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1472 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1473 || (*pred) (x, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1475 || (*pred) (y, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1477 || (*pred) (opalign, VOIDmode)))
1478 {
1479 rtx op2;
1480 rtx last = get_last_insn ();
1481 rtx pat;
1482
1483 op2 = convert_to_mode (mode, size, 1);
1484 pred = insn_data[(int) code].operand[2].predicate;
1485 if (pred != 0 && ! (*pred) (op2, mode))
1486 op2 = copy_to_mode_reg (mode, op2);
1487
1488 /* ??? When called via emit_block_move_for_call, it'd be
1489 nice if there were some way to inform the backend, so
1490 that it doesn't fail the expansion because it thinks
1491 emitting the libcall would be more efficient. */
1492
1493 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1494 if (pat)
1495 {
1496 emit_insn (pat);
1497 volatile_ok = save_volatile_ok;
1498 return true;
1499 }
1500 else
1501 delete_insns_since (last);
1502 }
1503 }
1504
1505 volatile_ok = save_volatile_ok;
1506 return false;
1507 }
1508
1509 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1510 Return the return value from memcpy, 0 otherwise. */
1511
1512 static rtx
1513 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1514 {
1515 rtx dst_addr, src_addr;
1516 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1517 enum machine_mode size_mode;
1518 rtx retval;
1519
1520 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1521
1522 It is unsafe to save the value generated by protect_from_queue and reuse
1523 it later. Consider what happens if emit_queue is called before the
1524 return value from protect_from_queue is used.
1525
1526 Expansion of the CALL_EXPR below will call emit_queue before we are
1527 finished emitting RTL for argument setup. So if we are not careful we
1528 could get the wrong value for an argument.
1529
1530 To avoid this problem we go ahead and emit code to copy the addresses of
1531 DST and SRC and SIZE into new pseudos. We can then place those new
1532 pseudos into an RTL_EXPR and use them later, even after a call to
1533 emit_queue.
1534
1535 Note this is not strictly needed for library calls since they do not call
1536 emit_queue before loading their arguments. However, we may need to have
1537 library calls call emit_queue in the future since failing to do so could
1538 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1539 arguments in registers. */
1540
1541 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1542 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1543
1544 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1545 src_addr = convert_memory_address (ptr_mode, src_addr);
1546
1547 dst_tree = make_tree (ptr_type_node, dst_addr);
1548 src_tree = make_tree (ptr_type_node, src_addr);
1549
1550 if (TARGET_MEM_FUNCTIONS)
1551 size_mode = TYPE_MODE (sizetype);
1552 else
1553 size_mode = TYPE_MODE (unsigned_type_node);
1554
1555 size = convert_to_mode (size_mode, size, 1);
1556 size = copy_to_mode_reg (size_mode, size);
1557
1558 /* It is incorrect to use the libcall calling conventions to call
1559 memcpy in this context. This could be a user call to memcpy and
1560 the user may wish to examine the return value from memcpy. For
1561 targets where libcalls and normal calls have different conventions
1562 for returning pointers, we could end up generating incorrect code.
1563
1564 For convenience, we generate the call to bcopy this way as well. */
1565
1566 if (TARGET_MEM_FUNCTIONS)
1567 size_tree = make_tree (sizetype, size);
1568 else
1569 size_tree = make_tree (unsigned_type_node, size);
1570
1571 fn = emit_block_move_libcall_fn (true);
1572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1573 if (TARGET_MEM_FUNCTIONS)
1574 {
1575 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1576 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1577 }
1578 else
1579 {
1580 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1582 }
1583
1584 /* Now we have to build up the CALL_EXPR itself. */
1585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1586 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1587 call_expr, arg_list, NULL_TREE);
1588
1589 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1590
1591 /* If we are initializing a readonly value, show the above call clobbered
1592 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1593 the delay slot scheduler might overlook conflicts and take nasty
1594 decisions. */
1595 if (RTX_UNCHANGING_P (dst))
1596 add_function_usage_to
1597 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1598 gen_rtx_CLOBBER (VOIDmode, dst),
1599 NULL_RTX));
1600
1601 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1602 }
1603
1604 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1605 for the function we use for block copies. The first time FOR_CALL
1606 is true, we call assemble_external. */
1607
1608 static GTY(()) tree block_move_fn;
1609
1610 void
1611 init_block_move_fn (const char *asmspec)
1612 {
1613 if (!block_move_fn)
1614 {
1615 tree args, fn;
1616
1617 if (TARGET_MEM_FUNCTIONS)
1618 {
1619 fn = get_identifier ("memcpy");
1620 args = build_function_type_list (ptr_type_node, ptr_type_node,
1621 const_ptr_type_node, sizetype,
1622 NULL_TREE);
1623 }
1624 else
1625 {
1626 fn = get_identifier ("bcopy");
1627 args = build_function_type_list (void_type_node, const_ptr_type_node,
1628 ptr_type_node, unsigned_type_node,
1629 NULL_TREE);
1630 }
1631
1632 fn = build_decl (FUNCTION_DECL, fn, args);
1633 DECL_EXTERNAL (fn) = 1;
1634 TREE_PUBLIC (fn) = 1;
1635 DECL_ARTIFICIAL (fn) = 1;
1636 TREE_NOTHROW (fn) = 1;
1637
1638 block_move_fn = fn;
1639 }
1640
1641 if (asmspec)
1642 {
1643 SET_DECL_RTL (block_move_fn, NULL_RTX);
1644 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1645 }
1646 }
1647
1648 static tree
1649 emit_block_move_libcall_fn (int for_call)
1650 {
1651 static bool emitted_extern;
1652
1653 if (!block_move_fn)
1654 init_block_move_fn (NULL);
1655
1656 if (for_call && !emitted_extern)
1657 {
1658 emitted_extern = true;
1659 make_decl_rtl (block_move_fn, NULL);
1660 assemble_external (block_move_fn);
1661 }
1662
1663 return block_move_fn;
1664 }
1665
1666 /* A subroutine of emit_block_move. Copy the data via an explicit
1667 loop. This is used only when libcalls are forbidden. */
1668 /* ??? It'd be nice to copy in hunks larger than QImode. */
1669
1670 static void
1671 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1672 unsigned int align ATTRIBUTE_UNUSED)
1673 {
1674 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1675 enum machine_mode iter_mode;
1676
1677 iter_mode = GET_MODE (size);
1678 if (iter_mode == VOIDmode)
1679 iter_mode = word_mode;
1680
1681 top_label = gen_label_rtx ();
1682 cmp_label = gen_label_rtx ();
1683 iter = gen_reg_rtx (iter_mode);
1684
1685 emit_move_insn (iter, const0_rtx);
1686
1687 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1688 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1689 do_pending_stack_adjust ();
1690
1691 emit_note (NOTE_INSN_LOOP_BEG);
1692
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1695
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1701
1702 emit_move_insn (x, y);
1703
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1706 if (tmp != iter)
1707 emit_move_insn (iter, tmp);
1708
1709 emit_note (NOTE_INSN_LOOP_CONT);
1710 emit_label (cmp_label);
1711
1712 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1713 true, top_label);
1714
1715 emit_note (NOTE_INSN_LOOP_END);
1716 }
1717 \f
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1720
1721 void
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1723 {
1724 int i;
1725 #ifdef HAVE_load_multiple
1726 rtx pat;
1727 rtx last;
1728 #endif
1729
1730 if (nregs == 0)
1731 return;
1732
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1735
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1739 {
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1742 GEN_INT (nregs));
1743 if (pat)
1744 {
1745 emit_insn (pat);
1746 return;
1747 }
1748 else
1749 delete_insns_since (last);
1750 }
1751 #endif
1752
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1756 }
1757
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1760
1761 void
1762 move_block_from_reg (int regno, rtx x, int nregs)
1763 {
1764 int i;
1765
1766 if (nregs == 0)
1767 return;
1768
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1772 {
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1775 GEN_INT (nregs));
1776 if (pat)
1777 {
1778 emit_insn (pat);
1779 return;
1780 }
1781 else
1782 delete_insns_since (last);
1783 }
1784 #endif
1785
1786 for (i = 0; i < nregs; i++)
1787 {
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1789
1790 if (tem == 0)
1791 abort ();
1792
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1794 }
1795 }
1796
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1802
1803 rtx
1804 gen_group_rtx (rtx orig)
1805 {
1806 int i, length;
1807 rtx *tmps;
1808
1809 if (GET_CODE (orig) != PARALLEL)
1810 abort ();
1811
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1814
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1817
1818 if (i)
1819 tmps[0] = 0;
1820
1821 for (; i < length; i++)
1822 {
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1825
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1827 }
1828
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1830 }
1831
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1835 if not known. */
1836
1837 void
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1839 {
1840 rtx *tmps, src;
1841 int start, i;
1842
1843 if (GET_CODE (dst) != PARALLEL)
1844 abort ();
1845
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1849 start = 0;
1850 else
1851 start = 1;
1852
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1854
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1857 {
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1861 int shift = 0;
1862
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1865 {
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1868 if (
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1872 #else
1873 BYTES_BIG_ENDIAN
1874 #endif
1875 )
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1878 if (bytelen <= 0)
1879 abort ();
1880 }
1881
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1885 src = orig_src;
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1890 {
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1893 else
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1895
1896 emit_move_insn (src, orig_src);
1897 }
1898
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1905 {
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1908 }
1909 else if (GET_CODE (src) == CONCAT)
1910 {
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1913
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1916 {
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1920 to be extracted. */
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1927 }
1928 else if (bytepos == 0)
1929 {
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1933 }
1934 else
1935 abort ();
1936 }
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && GET_CODE (src) == REG)
1942 {
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1944 rtx mem;
1945
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1949 }
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1953 else if (CONSTANT_P (src)
1954 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1955 tmps[i] = src;
1956 else
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1959 mode, mode, ssize);
1960
1961 if (shift)
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1964 }
1965
1966 emit_queue ();
1967
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1971 }
1972
1973 /* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1975
1976 void
1977 emit_group_move (rtx dst, rtx src)
1978 {
1979 int i;
1980
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1984 abort ();
1985
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1990 }
1991
1992 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1995 known. */
1996
1997 void
1998 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1999 {
2000 rtx *tmps, dst;
2001 int start, i;
2002
2003 if (GET_CODE (src) != PARALLEL)
2004 abort ();
2005
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2009 start = 0;
2010 else
2011 start = 1;
2012
2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2014
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
2017 {
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2021 }
2022 emit_queue ();
2023
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2026 dst = orig_dst;
2027 if (GET_CODE (dst) == PARALLEL)
2028 {
2029 rtx temp;
2030
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2035 return;
2036
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2039 the temporary. */
2040
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
2044 return;
2045 }
2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2047 {
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2051 }
2052
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2055 {
2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2057 enum machine_mode mode = GET_MODE (tmps[i]);
2058 unsigned int bytelen = GET_MODE_SIZE (mode);
2059 rtx dest = dst;
2060
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2063 {
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2066 if (
2067 #ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2070 #else
2071 BYTES_BIG_ENDIAN
2072 #endif
2073 )
2074 {
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2078 }
2079 bytelen = ssize - bytepos;
2080 }
2081
2082 if (GET_CODE (dst) == CONCAT)
2083 {
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2087 {
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2090 }
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2092 {
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2096 tmps[i]);
2097 dst = dest;
2098 break;
2099 }
2100 else
2101 abort ();
2102 }
2103
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dest) == MEM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2111 else
2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2113 mode, tmps[i], ssize);
2114 }
2115
2116 emit_queue ();
2117
2118 /* Copy from the pseudo into the (probable) hard reg. */
2119 if (orig_dst != dst)
2120 emit_move_insn (orig_dst, dst);
2121 }
2122
2123 /* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2126
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
2131
2132 rtx
2133 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2134 {
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2139
2140 if (tgtblk == 0)
2141 {
2142 tgtblk = assign_temp (build_qualified_type (type,
2143 (TYPE_QUALS (type)
2144 | TYPE_QUAL_CONST)),
2145 0, 1, 1);
2146 preserve_temp_slots (tgtblk);
2147 }
2148
2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2150 into a new pseudo which is a full word. */
2151
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2155
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2159
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2167 ? !BYTES_BIG_ENDIAN
2168 : BYTES_BIG_ENDIAN))
2169 padding_correction
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2171
2172 /* Copy the structure BITSIZE bites at a time.
2173
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2176 time. */
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2180 {
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2187 GET_MODE (srcreg));
2188
2189 /* We need a new destination operand each time bitpos is on
2190 a word boundary. */
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2193
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
2200 BITS_PER_WORD),
2201 BITS_PER_WORD);
2202 }
2203
2204 return tgtblk;
2205 }
2206
2207 /* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
2209
2210 void
2211 use_reg (rtx *call_fusage, rtx reg)
2212 {
2213 if (GET_CODE (reg) != REG
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2215 abort ();
2216
2217 *call_fusage
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2220 }
2221
2222 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
2224
2225 void
2226 use_regs (rtx *call_fusage, int regno, int nregs)
2227 {
2228 int i;
2229
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2231 abort ();
2232
2233 for (i = 0; i < nregs; i++)
2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2235 }
2236
2237 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240
2241 void
2242 use_group_regs (rtx *call_fusage, rtx regs)
2243 {
2244 int i;
2245
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2247 {
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2249
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && GET_CODE (reg) == REG)
2254 use_reg (call_fusage, reg);
2255 }
2256 }
2257 \f
2258
2259 /* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2264
2265 int
2266 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
2269 {
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2274 int reverse;
2275 rtx cst;
2276
2277 if (len == 0)
2278 return 1;
2279
2280 if (! STORE_BY_PIECES_P (len, align))
2281 return 0;
2282
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2286
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2289
2290 for (reverse = 0;
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2292 reverse++)
2293 {
2294 l = len;
2295 mode = VOIDmode;
2296 max_size = STORE_MAX_PIECES + 1;
2297 while (max_size > 1)
2298 {
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2302 mode = tmode;
2303
2304 if (mode == VOIDmode)
2305 break;
2306
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2310 {
2311 unsigned int size = GET_MODE_SIZE (mode);
2312
2313 while (l >= size)
2314 {
2315 if (reverse)
2316 offset -= size;
2317
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2320 return 0;
2321
2322 if (!reverse)
2323 offset += size;
2324
2325 l -= size;
2326 }
2327 }
2328
2329 max_size = GET_MODE_SIZE (mode);
2330 }
2331
2332 /* The code above should have handled everything. */
2333 if (l != 0)
2334 abort ();
2335 }
2336
2337 return 1;
2338 }
2339
2340 /* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2346 stpcpy. */
2347
2348 rtx
2349 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
2352 {
2353 struct store_by_pieces data;
2354
2355 if (len == 0)
2356 {
2357 if (endp == 2)
2358 abort ();
2359 return to;
2360 }
2361
2362 if (! STORE_BY_PIECES_P (len, align))
2363 abort ();
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2367 data.len = len;
2368 data.to = to;
2369 store_by_pieces_1 (&data, align);
2370 if (endp)
2371 {
2372 rtx to1;
2373
2374 if (data.reverse)
2375 abort ();
2376 if (data.autinc_to)
2377 {
2378 if (endp == 2)
2379 {
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2382 else
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2384 -1));
2385 }
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2387 data.offset);
2388 }
2389 else
2390 {
2391 if (endp == 2)
2392 --data.offset;
2393 to1 = adjust_address (data.to, QImode, data.offset);
2394 }
2395 return to1;
2396 }
2397 else
2398 return data.to;
2399 }
2400
2401 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2404
2405 static void
2406 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2407 {
2408 struct store_by_pieces data;
2409
2410 if (len == 0)
2411 return;
2412
2413 data.constfun = clear_by_pieces_1;
2414 data.constfundata = NULL;
2415 data.len = len;
2416 data.to = to;
2417 store_by_pieces_1 (&data, align);
2418 }
2419
2420 /* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2422
2423 static rtx
2424 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
2427 {
2428 return const0_rtx;
2429 }
2430
2431 /* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2435
2436 static void
2437 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
2439 {
2440 rtx to_addr = XEXP (data->to, 0);
2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
2444
2445 data->offset = 0;
2446 data->to_addr = to_addr;
2447 data->autinc_to
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2450
2451 data->explicit_inc_to = 0;
2452 data->reverse
2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2454 if (data->reverse)
2455 data->offset = data->len;
2456
2457 /* If storing requires more than two move insns,
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
2462 {
2463 /* Determine the main mode we'll be using. */
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2467 mode = tmode;
2468
2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2470 {
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
2474 }
2475
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
2478 {
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
2482 }
2483
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
2486 }
2487
2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2490 align = MOVE_MAX * BITS_PER_UNIT;
2491
2492 /* First store what we can in the largest integer mode, then go to
2493 successively smaller modes. */
2494
2495 while (max_size > 1)
2496 {
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2500 mode = tmode;
2501
2502 if (mode == VOIDmode)
2503 break;
2504
2505 icode = mov_optab->handlers[(int) mode].insn_code;
2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2508
2509 max_size = GET_MODE_SIZE (mode);
2510 }
2511
2512 /* The code above should have handled everything. */
2513 if (data->len != 0)
2514 abort ();
2515 }
2516
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2520
2521 static void
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2524 {
2525 unsigned int size = GET_MODE_SIZE (mode);
2526 rtx to1, cst;
2527
2528 while (data->len >= size)
2529 {
2530 if (data->reverse)
2531 data->offset -= size;
2532
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 data->offset);
2536 else
2537 to1 = adjust_address (data->to, mode, data->offset);
2538
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2542
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2545
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2548
2549 if (! data->reverse)
2550 data->offset += size;
2551
2552 data->len -= size;
2553 }
2554 }
2555 \f
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2558
2559 rtx
2560 clear_storage (rtx object, rtx size)
2561 {
2562 rtx retval = 0;
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2565
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 else
2573 {
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2576
2577 if (size == const0_rtx)
2578 ;
2579 else if (GET_CODE (size) == CONST_INT
2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2582 else if (clear_storage_via_clrstr (object, size, align))
2583 ;
2584 else
2585 retval = clear_storage_via_libcall (object, size);
2586 }
2587
2588 return retval;
2589 }
2590
2591 /* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2593
2594 static bool
2595 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2596 {
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2600
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2603
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2606 {
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2609
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
2623 {
2624 rtx op1;
2625 rtx last = get_last_insn ();
2626 rtx pat;
2627
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
2632
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2634 if (pat)
2635 {
2636 emit_insn (pat);
2637 return true;
2638 }
2639 else
2640 delete_insns_since (last);
2641 }
2642 }
2643
2644 return false;
2645 }
2646
2647 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
2649
2650 static rtx
2651 clear_storage_via_libcall (rtx object, rtx size)
2652 {
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2655 rtx retval;
2656
2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
2658
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
2662
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
2666
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2670 emit_queue.
2671
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2677
2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2679
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2682 else
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
2686
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
2692
2693 For convenience, we generate the call to bzero this way as well. */
2694
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2698 else
2699 size_tree = make_tree (unsigned_type_node, size);
2700
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2706
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
2711
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2713
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2719
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2721 }
2722
2723 /* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2726
2727 static GTY(()) tree block_clear_fn;
2728
2729 void
2730 init_block_clear_fn (const char *asmspec)
2731 {
2732 if (!block_clear_fn)
2733 {
2734 tree fn, args;
2735
2736 if (TARGET_MEM_FUNCTIONS)
2737 {
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2741 NULL_TREE);
2742 }
2743 else
2744 {
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
2748 }
2749
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2755
2756 block_clear_fn = fn;
2757 }
2758
2759 if (asmspec)
2760 {
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2763 }
2764 }
2765
2766 static tree
2767 clear_storage_libcall_fn (int for_call)
2768 {
2769 static bool emitted_extern;
2770
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2773
2774 if (for_call && !emitted_extern)
2775 {
2776 emitted_extern = true;
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
2779 }
2780
2781 return block_clear_fn;
2782 }
2783 \f
2784 /* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2788
2789 Return the last instruction emitted. */
2790
2791 rtx
2792 emit_move_insn (rtx x, rtx y)
2793 {
2794 enum machine_mode mode = GET_MODE (x);
2795 rtx y_cst = NULL_RTX;
2796 rtx last_insn, set;
2797
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2800
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2802 abort ();
2803
2804 /* Never force constant_p_rtx to memory. */
2805 if (GET_CODE (y) == CONSTANT_P_RTX)
2806 ;
2807 else if (CONSTANT_P (y))
2808 {
2809 if (optimize
2810 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2811 && (last_insn = compress_float_constant (x, y)))
2812 return last_insn;
2813
2814 y_cst = y;
2815
2816 if (!LEGITIMATE_CONSTANT_P (y))
2817 {
2818 y = force_const_mem (mode, y);
2819
2820 /* If the target's cannot_force_const_mem prevented the spill,
2821 assume that the target's move expanders will also take care
2822 of the non-legitimate constant. */
2823 if (!y)
2824 y = y_cst;
2825 }
2826 }
2827
2828 /* If X or Y are memory references, verify that their addresses are valid
2829 for the machine. */
2830 if (GET_CODE (x) == MEM
2831 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2832 && ! push_operand (x, GET_MODE (x)))
2833 || (flag_force_addr
2834 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2835 x = validize_mem (x);
2836
2837 if (GET_CODE (y) == MEM
2838 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2839 || (flag_force_addr
2840 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2841 y = validize_mem (y);
2842
2843 if (mode == BLKmode)
2844 abort ();
2845
2846 last_insn = emit_move_insn_1 (x, y);
2847
2848 if (y_cst && GET_CODE (x) == REG
2849 && (set = single_set (last_insn)) != NULL_RTX
2850 && SET_DEST (set) == x
2851 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2852 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2853
2854 return last_insn;
2855 }
2856
2857 /* Low level part of emit_move_insn.
2858 Called just like emit_move_insn, but assumes X and Y
2859 are basically valid. */
2860
2861 rtx
2862 emit_move_insn_1 (rtx x, rtx y)
2863 {
2864 enum machine_mode mode = GET_MODE (x);
2865 enum machine_mode submode;
2866 enum mode_class class = GET_MODE_CLASS (mode);
2867
2868 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2869 abort ();
2870
2871 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2872 return
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2874
2875 /* Expand complex moves by moving real part and imag part, if possible. */
2876 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2877 && BLKmode != (submode = GET_MODE_INNER (mode))
2878 && (mov_optab->handlers[(int) submode].insn_code
2879 != CODE_FOR_nothing))
2880 {
2881 /* Don't split destination if it is a stack push. */
2882 int stack = push_operand (x, GET_MODE (x));
2883
2884 #ifdef PUSH_ROUNDING
2885 /* In case we output to the stack, but the size is smaller than the
2886 machine can push exactly, we need to use move instructions. */
2887 if (stack
2888 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2889 != GET_MODE_SIZE (submode)))
2890 {
2891 rtx temp;
2892 HOST_WIDE_INT offset1, offset2;
2893
2894 /* Do not use anti_adjust_stack, since we don't want to update
2895 stack_pointer_delta. */
2896 temp = expand_binop (Pmode,
2897 #ifdef STACK_GROWS_DOWNWARD
2898 sub_optab,
2899 #else
2900 add_optab,
2901 #endif
2902 stack_pointer_rtx,
2903 GEN_INT
2904 (PUSH_ROUNDING
2905 (GET_MODE_SIZE (GET_MODE (x)))),
2906 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2907
2908 if (temp != stack_pointer_rtx)
2909 emit_move_insn (stack_pointer_rtx, temp);
2910
2911 #ifdef STACK_GROWS_DOWNWARD
2912 offset1 = 0;
2913 offset2 = GET_MODE_SIZE (submode);
2914 #else
2915 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2916 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2917 + GET_MODE_SIZE (submode));
2918 #endif
2919
2920 emit_move_insn (change_address (x, submode,
2921 gen_rtx_PLUS (Pmode,
2922 stack_pointer_rtx,
2923 GEN_INT (offset1))),
2924 gen_realpart (submode, y));
2925 emit_move_insn (change_address (x, submode,
2926 gen_rtx_PLUS (Pmode,
2927 stack_pointer_rtx,
2928 GEN_INT (offset2))),
2929 gen_imagpart (submode, y));
2930 }
2931 else
2932 #endif
2933 /* If this is a stack, push the highpart first, so it
2934 will be in the argument order.
2935
2936 In that case, change_address is used only to convert
2937 the mode, not to change the address. */
2938 if (stack)
2939 {
2940 /* Note that the real part always precedes the imag part in memory
2941 regardless of machine's endianness. */
2942 #ifdef STACK_GROWS_DOWNWARD
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_imagpart (submode, y));
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
2947 #else
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_realpart (submode, y));
2950 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2951 gen_imagpart (submode, y));
2952 #endif
2953 }
2954 else
2955 {
2956 rtx realpart_x, realpart_y;
2957 rtx imagpart_x, imagpart_y;
2958
2959 /* If this is a complex value with each part being smaller than a
2960 word, the usual calling sequence will likely pack the pieces into
2961 a single register. Unfortunately, SUBREG of hard registers only
2962 deals in terms of words, so we have a problem converting input
2963 arguments to the CONCAT of two registers that is used elsewhere
2964 for complex values. If this is before reload, we can copy it into
2965 memory and reload. FIXME, we should see about using extract and
2966 insert on integer registers, but complex short and complex char
2967 variables should be rarely used. */
2968 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2969 && (reload_in_progress | reload_completed) == 0)
2970 {
2971 int packed_dest_p
2972 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2973 int packed_src_p
2974 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2975
2976 if (packed_dest_p || packed_src_p)
2977 {
2978 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2979 ? MODE_FLOAT : MODE_INT);
2980
2981 enum machine_mode reg_mode
2982 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2983
2984 if (reg_mode != BLKmode)
2985 {
2986 rtx mem = assign_stack_temp (reg_mode,
2987 GET_MODE_SIZE (mode), 0);
2988 rtx cmem = adjust_address (mem, mode, 0);
2989
2990 cfun->cannot_inline
2991 = N_("function using short complex types cannot be inline");
2992
2993 if (packed_dest_p)
2994 {
2995 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2996
2997 emit_move_insn_1 (cmem, y);
2998 return emit_move_insn_1 (sreg, mem);
2999 }
3000 else
3001 {
3002 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3003
3004 emit_move_insn_1 (mem, sreg);
3005 return emit_move_insn_1 (x, cmem);
3006 }
3007 }
3008 }
3009 }
3010
3011 realpart_x = gen_realpart (submode, x);
3012 realpart_y = gen_realpart (submode, y);
3013 imagpart_x = gen_imagpart (submode, x);
3014 imagpart_y = gen_imagpart (submode, y);
3015
3016 /* Show the output dies here. This is necessary for SUBREGs
3017 of pseudos since we cannot track their lifetimes correctly;
3018 hard regs shouldn't appear here except as return values.
3019 We never want to emit such a clobber after reload. */
3020 if (x != y
3021 && ! (reload_in_progress || reload_completed)
3022 && (GET_CODE (realpart_x) == SUBREG
3023 || GET_CODE (imagpart_x) == SUBREG))
3024 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3025
3026 emit_move_insn (realpart_x, realpart_y);
3027 emit_move_insn (imagpart_x, imagpart_y);
3028 }
3029
3030 return get_last_insn ();
3031 }
3032
3033 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3034 find a mode to do it in. If we have a movcc, use it. Otherwise,
3035 find the MODE_INT mode of the same width. */
3036 else if (GET_MODE_CLASS (mode) == MODE_CC
3037 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3038 {
3039 enum insn_code insn_code;
3040 enum machine_mode tmode = VOIDmode;
3041 rtx x1 = x, y1 = y;
3042
3043 if (mode != CCmode
3044 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3045 tmode = CCmode;
3046 else
3047 for (tmode = QImode; tmode != VOIDmode;
3048 tmode = GET_MODE_WIDER_MODE (tmode))
3049 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3050 break;
3051
3052 if (tmode == VOIDmode)
3053 abort ();
3054
3055 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3056 may call change_address which is not appropriate if we were
3057 called when a reload was in progress. We don't have to worry
3058 about changing the address since the size in bytes is supposed to
3059 be the same. Copy the MEM to change the mode and move any
3060 substitutions from the old MEM to the new one. */
3061
3062 if (reload_in_progress)
3063 {
3064 x = gen_lowpart_common (tmode, x1);
3065 if (x == 0 && GET_CODE (x1) == MEM)
3066 {
3067 x = adjust_address_nv (x1, tmode, 0);
3068 copy_replacements (x1, x);
3069 }
3070
3071 y = gen_lowpart_common (tmode, y1);
3072 if (y == 0 && GET_CODE (y1) == MEM)
3073 {
3074 y = adjust_address_nv (y1, tmode, 0);
3075 copy_replacements (y1, y);
3076 }
3077 }
3078 else
3079 {
3080 x = gen_lowpart (tmode, x);
3081 y = gen_lowpart (tmode, y);
3082 }
3083
3084 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3085 return emit_insn (GEN_FCN (insn_code) (x, y));
3086 }
3087
3088 /* Try using a move pattern for the corresponding integer mode. This is
3089 only safe when simplify_subreg can convert MODE constants into integer
3090 constants. At present, it can only do this reliably if the value
3091 fits within a HOST_WIDE_INT. */
3092 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3093 && (submode = int_mode_for_mode (mode)) != BLKmode
3094 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3095 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3096 (simplify_gen_subreg (submode, x, mode, 0),
3097 simplify_gen_subreg (submode, y, mode, 0)));
3098
3099 /* This will handle any multi-word or full-word mode that lacks a move_insn
3100 pattern. However, you will get better code if you define such patterns,
3101 even if they must turn into multiple assembler instructions. */
3102 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3103 {
3104 rtx last_insn = 0;
3105 rtx seq, inner;
3106 int need_clobber;
3107 int i;
3108
3109 #ifdef PUSH_ROUNDING
3110
3111 /* If X is a push on the stack, do the push now and replace
3112 X with a reference to the stack pointer. */
3113 if (push_operand (x, GET_MODE (x)))
3114 {
3115 rtx temp;
3116 enum rtx_code code;
3117
3118 /* Do not use anti_adjust_stack, since we don't want to update
3119 stack_pointer_delta. */
3120 temp = expand_binop (Pmode,
3121 #ifdef STACK_GROWS_DOWNWARD
3122 sub_optab,
3123 #else
3124 add_optab,
3125 #endif
3126 stack_pointer_rtx,
3127 GEN_INT
3128 (PUSH_ROUNDING
3129 (GET_MODE_SIZE (GET_MODE (x)))),
3130 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3131
3132 if (temp != stack_pointer_rtx)
3133 emit_move_insn (stack_pointer_rtx, temp);
3134
3135 code = GET_CODE (XEXP (x, 0));
3136
3137 /* Just hope that small offsets off SP are OK. */
3138 if (code == POST_INC)
3139 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3140 GEN_INT (-((HOST_WIDE_INT)
3141 GET_MODE_SIZE (GET_MODE (x)))));
3142 else if (code == POST_DEC)
3143 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3144 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3145 else
3146 temp = stack_pointer_rtx;
3147
3148 x = change_address (x, VOIDmode, temp);
3149 }
3150 #endif
3151
3152 /* If we are in reload, see if either operand is a MEM whose address
3153 is scheduled for replacement. */
3154 if (reload_in_progress && GET_CODE (x) == MEM
3155 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3156 x = replace_equiv_address_nv (x, inner);
3157 if (reload_in_progress && GET_CODE (y) == MEM
3158 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3159 y = replace_equiv_address_nv (y, inner);
3160
3161 start_sequence ();
3162
3163 need_clobber = 0;
3164 for (i = 0;
3165 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3166 i++)
3167 {
3168 rtx xpart = operand_subword (x, i, 1, mode);
3169 rtx ypart = operand_subword (y, i, 1, mode);
3170
3171 /* If we can't get a part of Y, put Y into memory if it is a
3172 constant. Otherwise, force it into a register. If we still
3173 can't get a part of Y, abort. */
3174 if (ypart == 0 && CONSTANT_P (y))
3175 {
3176 y = force_const_mem (mode, y);
3177 ypart = operand_subword (y, i, 1, mode);
3178 }
3179 else if (ypart == 0)
3180 ypart = operand_subword_force (y, i, mode);
3181
3182 if (xpart == 0 || ypart == 0)
3183 abort ();
3184
3185 need_clobber |= (GET_CODE (xpart) == SUBREG);
3186
3187 last_insn = emit_move_insn (xpart, ypart);
3188 }
3189
3190 seq = get_insns ();
3191 end_sequence ();
3192
3193 /* Show the output dies here. This is necessary for SUBREGs
3194 of pseudos since we cannot track their lifetimes correctly;
3195 hard regs shouldn't appear here except as return values.
3196 We never want to emit such a clobber after reload. */
3197 if (x != y
3198 && ! (reload_in_progress || reload_completed)
3199 && need_clobber != 0)
3200 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3201
3202 emit_insn (seq);
3203
3204 return last_insn;
3205 }
3206 else
3207 abort ();
3208 }
3209
3210 /* If Y is representable exactly in a narrower mode, and the target can
3211 perform the extension directly from constant or memory, then emit the
3212 move as an extension. */
3213
3214 static rtx
3215 compress_float_constant (rtx x, rtx y)
3216 {
3217 enum machine_mode dstmode = GET_MODE (x);
3218 enum machine_mode orig_srcmode = GET_MODE (y);
3219 enum machine_mode srcmode;
3220 REAL_VALUE_TYPE r;
3221
3222 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3223
3224 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3225 srcmode != orig_srcmode;
3226 srcmode = GET_MODE_WIDER_MODE (srcmode))
3227 {
3228 enum insn_code ic;
3229 rtx trunc_y, last_insn;
3230
3231 /* Skip if the target can't extend this way. */
3232 ic = can_extend_p (dstmode, srcmode, 0);
3233 if (ic == CODE_FOR_nothing)
3234 continue;
3235
3236 /* Skip if the narrowed value isn't exact. */
3237 if (! exact_real_truncate (srcmode, &r))
3238 continue;
3239
3240 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3241
3242 if (LEGITIMATE_CONSTANT_P (trunc_y))
3243 {
3244 /* Skip if the target needs extra instructions to perform
3245 the extension. */
3246 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3247 continue;
3248 }
3249 else if (float_extend_from_mem[dstmode][srcmode])
3250 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3251 else
3252 continue;
3253
3254 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3255 last_insn = get_last_insn ();
3256
3257 if (GET_CODE (x) == REG)
3258 set_unique_reg_note (last_insn, REG_EQUAL, y);
3259
3260 return last_insn;
3261 }
3262
3263 return NULL_RTX;
3264 }
3265 \f
3266 /* Pushing data onto the stack. */
3267
3268 /* Push a block of length SIZE (perhaps variable)
3269 and return an rtx to address the beginning of the block.
3270 Note that it is not possible for the value returned to be a QUEUED.
3271 The value may be virtual_outgoing_args_rtx.
3272
3273 EXTRA is the number of bytes of padding to push in addition to SIZE.
3274 BELOW nonzero means this padding comes at low addresses;
3275 otherwise, the padding comes at high addresses. */
3276
3277 rtx
3278 push_block (rtx size, int extra, int below)
3279 {
3280 rtx temp;
3281
3282 size = convert_modes (Pmode, ptr_mode, size, 1);
3283 if (CONSTANT_P (size))
3284 anti_adjust_stack (plus_constant (size, extra));
3285 else if (GET_CODE (size) == REG && extra == 0)
3286 anti_adjust_stack (size);
3287 else
3288 {
3289 temp = copy_to_mode_reg (Pmode, size);
3290 if (extra != 0)
3291 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3292 temp, 0, OPTAB_LIB_WIDEN);
3293 anti_adjust_stack (temp);
3294 }
3295
3296 #ifndef STACK_GROWS_DOWNWARD
3297 if (0)
3298 #else
3299 if (1)
3300 #endif
3301 {
3302 temp = virtual_outgoing_args_rtx;
3303 if (extra != 0 && below)
3304 temp = plus_constant (temp, extra);
3305 }
3306 else
3307 {
3308 if (GET_CODE (size) == CONST_INT)
3309 temp = plus_constant (virtual_outgoing_args_rtx,
3310 -INTVAL (size) - (below ? 0 : extra));
3311 else if (extra != 0 && !below)
3312 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3313 negate_rtx (Pmode, plus_constant (size, extra)));
3314 else
3315 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3316 negate_rtx (Pmode, size));
3317 }
3318
3319 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3320 }
3321
3322 #ifdef PUSH_ROUNDING
3323
3324 /* Emit single push insn. */
3325
3326 static void
3327 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3328 {
3329 rtx dest_addr;
3330 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3331 rtx dest;
3332 enum insn_code icode;
3333 insn_operand_predicate_fn pred;
3334
3335 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3336 /* If there is push pattern, use it. Otherwise try old way of throwing
3337 MEM representing push operation to move expander. */
3338 icode = push_optab->handlers[(int) mode].insn_code;
3339 if (icode != CODE_FOR_nothing)
3340 {
3341 if (((pred = insn_data[(int) icode].operand[0].predicate)
3342 && !((*pred) (x, mode))))
3343 x = force_reg (mode, x);
3344 emit_insn (GEN_FCN (icode) (x));
3345 return;
3346 }
3347 if (GET_MODE_SIZE (mode) == rounded_size)
3348 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3349 /* If we are to pad downward, adjust the stack pointer first and
3350 then store X into the stack location using an offset. This is
3351 because emit_move_insn does not know how to pad; it does not have
3352 access to type. */
3353 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3354 {
3355 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3356 HOST_WIDE_INT offset;
3357
3358 emit_move_insn (stack_pointer_rtx,
3359 expand_binop (Pmode,
3360 #ifdef STACK_GROWS_DOWNWARD
3361 sub_optab,
3362 #else
3363 add_optab,
3364 #endif
3365 stack_pointer_rtx,
3366 GEN_INT (rounded_size),
3367 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3368
3369 offset = (HOST_WIDE_INT) padding_size;
3370 #ifdef STACK_GROWS_DOWNWARD
3371 if (STACK_PUSH_CODE == POST_DEC)
3372 /* We have already decremented the stack pointer, so get the
3373 previous value. */
3374 offset += (HOST_WIDE_INT) rounded_size;
3375 #else
3376 if (STACK_PUSH_CODE == POST_INC)
3377 /* We have already incremented the stack pointer, so get the
3378 previous value. */
3379 offset -= (HOST_WIDE_INT) rounded_size;
3380 #endif
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3382 }
3383 else
3384 {
3385 #ifdef STACK_GROWS_DOWNWARD
3386 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3387 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3388 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3389 #else
3390 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3391 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3392 GEN_INT (rounded_size));
3393 #endif
3394 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3395 }
3396
3397 dest = gen_rtx_MEM (mode, dest_addr);
3398
3399 if (type != 0)
3400 {
3401 set_mem_attributes (dest, type, 1);
3402
3403 if (flag_optimize_sibling_calls)
3404 /* Function incoming arguments may overlap with sibling call
3405 outgoing arguments and we cannot allow reordering of reads
3406 from function arguments with stores to outgoing arguments
3407 of sibling calls. */
3408 set_mem_alias_set (dest, 0);
3409 }
3410 emit_move_insn (dest, x);
3411 }
3412 #endif
3413
3414 /* Generate code to push X onto the stack, assuming it has mode MODE and
3415 type TYPE.
3416 MODE is redundant except when X is a CONST_INT (since they don't
3417 carry mode info).
3418 SIZE is an rtx for the size of data to be copied (in bytes),
3419 needed only if X is BLKmode.
3420
3421 ALIGN (in bits) is maximum alignment we can assume.
3422
3423 If PARTIAL and REG are both nonzero, then copy that many of the first
3424 words of X into registers starting with REG, and push the rest of X.
3425 The amount of space pushed is decreased by PARTIAL words,
3426 rounded *down* to a multiple of PARM_BOUNDARY.
3427 REG must be a hard register in this case.
3428 If REG is zero but PARTIAL is not, take any all others actions for an
3429 argument partially in registers, but do not actually load any
3430 registers.
3431
3432 EXTRA is the amount in bytes of extra space to leave next to this arg.
3433 This is ignored if an argument block has already been allocated.
3434
3435 On a machine that lacks real push insns, ARGS_ADDR is the address of
3436 the bottom of the argument block for this call. We use indexing off there
3437 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3438 argument block has not been preallocated.
3439
3440 ARGS_SO_FAR is the size of args previously pushed for this call.
3441
3442 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3443 for arguments passed in registers. If nonzero, it will be the number
3444 of bytes required. */
3445
3446 void
3447 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3448 unsigned int align, int partial, rtx reg, int extra,
3449 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3450 rtx alignment_pad)
3451 {
3452 rtx xinner;
3453 enum direction stack_direction
3454 #ifdef STACK_GROWS_DOWNWARD
3455 = downward;
3456 #else
3457 = upward;
3458 #endif
3459
3460 /* Decide where to pad the argument: `downward' for below,
3461 `upward' for above, or `none' for don't pad it.
3462 Default is below for small data on big-endian machines; else above. */
3463 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3464
3465 /* Invert direction if stack is post-decrement.
3466 FIXME: why? */
3467 if (STACK_PUSH_CODE == POST_DEC)
3468 if (where_pad != none)
3469 where_pad = (where_pad == downward ? upward : downward);
3470
3471 xinner = x = protect_from_queue (x, 0);
3472
3473 if (mode == BLKmode)
3474 {
3475 /* Copy a block into the stack, entirely or partially. */
3476
3477 rtx temp;
3478 int used = partial * UNITS_PER_WORD;
3479 int offset;
3480 int skip;
3481
3482 if (reg && GET_CODE (reg) == PARALLEL)
3483 {
3484 /* Use the size of the elt to compute offset. */
3485 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3486 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3487 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3488 }
3489 else
3490 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3491
3492 if (size == 0)
3493 abort ();
3494
3495 used -= offset;
3496
3497 /* USED is now the # of bytes we need not copy to the stack
3498 because registers will take care of them. */
3499
3500 if (partial != 0)
3501 xinner = adjust_address (xinner, BLKmode, used);
3502
3503 /* If the partial register-part of the arg counts in its stack size,
3504 skip the part of stack space corresponding to the registers.
3505 Otherwise, start copying to the beginning of the stack space,
3506 by setting SKIP to 0. */
3507 skip = (reg_parm_stack_space == 0) ? 0 : used;
3508
3509 #ifdef PUSH_ROUNDING
3510 /* Do it with several push insns if that doesn't take lots of insns
3511 and if there is no difficulty with push insns that skip bytes
3512 on the stack for alignment purposes. */
3513 if (args_addr == 0
3514 && PUSH_ARGS
3515 && GET_CODE (size) == CONST_INT
3516 && skip == 0
3517 && MEM_ALIGN (xinner) >= align
3518 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3519 /* Here we avoid the case of a structure whose weak alignment
3520 forces many pushes of a small amount of data,
3521 and such small pushes do rounding that causes trouble. */
3522 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3523 || align >= BIGGEST_ALIGNMENT
3524 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3525 == (align / BITS_PER_UNIT)))
3526 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3527 {
3528 /* Push padding now if padding above and stack grows down,
3529 or if padding below and stack grows up.
3530 But if space already allocated, this has already been done. */
3531 if (extra && args_addr == 0
3532 && where_pad != none && where_pad != stack_direction)
3533 anti_adjust_stack (GEN_INT (extra));
3534
3535 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3536 }
3537 else
3538 #endif /* PUSH_ROUNDING */
3539 {
3540 rtx target;
3541
3542 /* Otherwise make space on the stack and copy the data
3543 to the address of that space. */
3544
3545 /* Deduct words put into registers from the size we must copy. */
3546 if (partial != 0)
3547 {
3548 if (GET_CODE (size) == CONST_INT)
3549 size = GEN_INT (INTVAL (size) - used);
3550 else
3551 size = expand_binop (GET_MODE (size), sub_optab, size,
3552 GEN_INT (used), NULL_RTX, 0,
3553 OPTAB_LIB_WIDEN);
3554 }
3555
3556 /* Get the address of the stack space.
3557 In this case, we do not deal with EXTRA separately.
3558 A single stack adjust will do. */
3559 if (! args_addr)
3560 {
3561 temp = push_block (size, extra, where_pad == downward);
3562 extra = 0;
3563 }
3564 else if (GET_CODE (args_so_far) == CONST_INT)
3565 temp = memory_address (BLKmode,
3566 plus_constant (args_addr,
3567 skip + INTVAL (args_so_far)));
3568 else
3569 temp = memory_address (BLKmode,
3570 plus_constant (gen_rtx_PLUS (Pmode,
3571 args_addr,
3572 args_so_far),
3573 skip));
3574
3575 if (!ACCUMULATE_OUTGOING_ARGS)
3576 {
3577 /* If the source is referenced relative to the stack pointer,
3578 copy it to another register to stabilize it. We do not need
3579 to do this if we know that we won't be changing sp. */
3580
3581 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3582 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3583 temp = copy_to_reg (temp);
3584 }
3585
3586 target = gen_rtx_MEM (BLKmode, temp);
3587
3588 if (type != 0)
3589 {
3590 set_mem_attributes (target, type, 1);
3591 /* Function incoming arguments may overlap with sibling call
3592 outgoing arguments and we cannot allow reordering of reads
3593 from function arguments with stores to outgoing arguments
3594 of sibling calls. */
3595 set_mem_alias_set (target, 0);
3596 }
3597
3598 /* ALIGN may well be better aligned than TYPE, e.g. due to
3599 PARM_BOUNDARY. Assume the caller isn't lying. */
3600 set_mem_align (target, align);
3601
3602 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3603 }
3604 }
3605 else if (partial > 0)
3606 {
3607 /* Scalar partly in registers. */
3608
3609 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3610 int i;
3611 int not_stack;
3612 /* # words of start of argument
3613 that we must make space for but need not store. */
3614 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3615 int args_offset = INTVAL (args_so_far);
3616 int skip;
3617
3618 /* Push padding now if padding above and stack grows down,
3619 or if padding below and stack grows up.
3620 But if space already allocated, this has already been done. */
3621 if (extra && args_addr == 0
3622 && where_pad != none && where_pad != stack_direction)
3623 anti_adjust_stack (GEN_INT (extra));
3624
3625 /* If we make space by pushing it, we might as well push
3626 the real data. Otherwise, we can leave OFFSET nonzero
3627 and leave the space uninitialized. */
3628 if (args_addr == 0)
3629 offset = 0;
3630
3631 /* Now NOT_STACK gets the number of words that we don't need to
3632 allocate on the stack. */
3633 not_stack = partial - offset;
3634
3635 /* If the partial register-part of the arg counts in its stack size,
3636 skip the part of stack space corresponding to the registers.
3637 Otherwise, start copying to the beginning of the stack space,
3638 by setting SKIP to 0. */
3639 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3640
3641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3642 x = validize_mem (force_const_mem (mode, x));
3643
3644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3645 SUBREGs of such registers are not allowed. */
3646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3648 x = copy_to_reg (x);
3649
3650 /* Loop over all the words allocated on the stack for this arg. */
3651 /* We can do it by words, because any scalar bigger than a word
3652 has a size a multiple of a word. */
3653 #ifndef PUSH_ARGS_REVERSED
3654 for (i = not_stack; i < size; i++)
3655 #else
3656 for (i = size - 1; i >= not_stack; i--)
3657 #endif
3658 if (i >= not_stack + offset)
3659 emit_push_insn (operand_subword_force (x, i, mode),
3660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3661 0, args_addr,
3662 GEN_INT (args_offset + ((i - not_stack + skip)
3663 * UNITS_PER_WORD)),
3664 reg_parm_stack_space, alignment_pad);
3665 }
3666 else
3667 {
3668 rtx addr;
3669 rtx dest;
3670
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
3676 anti_adjust_stack (GEN_INT (extra));
3677
3678 #ifdef PUSH_ROUNDING
3679 if (args_addr == 0 && PUSH_ARGS)
3680 emit_single_push_insn (mode, x, type);
3681 else
3682 #endif
3683 {
3684 if (GET_CODE (args_so_far) == CONST_INT)
3685 addr
3686 = memory_address (mode,
3687 plus_constant (args_addr,
3688 INTVAL (args_so_far)));
3689 else
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3691 args_so_far));
3692 dest = gen_rtx_MEM (mode, addr);
3693 if (type != 0)
3694 {
3695 set_mem_attributes (dest, type, 1);
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
3700 set_mem_alias_set (dest, 0);
3701 }
3702
3703 emit_move_insn (dest, x);
3704 }
3705 }
3706
3707 /* If part should go in registers, copy that part
3708 into the appropriate registers. Do this now, at the end,
3709 since mem-to-mem copies above may do function calls. */
3710 if (partial > 0 && reg != 0)
3711 {
3712 /* Handle calls that pass values in multiple non-contiguous locations.
3713 The Irix 6 ABI has examples of this. */
3714 if (GET_CODE (reg) == PARALLEL)
3715 emit_group_load (reg, x, type, -1);
3716 else
3717 move_block_to_reg (REGNO (reg), x, partial, mode);
3718 }
3719
3720 if (extra && args_addr == 0 && where_pad == stack_direction)
3721 anti_adjust_stack (GEN_INT (extra));
3722
3723 if (alignment_pad && args_addr == 0)
3724 anti_adjust_stack (alignment_pad);
3725 }
3726 \f
3727 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3728 operations. */
3729
3730 static rtx
3731 get_subtarget (rtx x)
3732 {
3733 return ((x == 0
3734 /* Only registers can be subtargets. */
3735 || GET_CODE (x) != REG
3736 /* If the register is readonly, it can't be set more than once. */
3737 || RTX_UNCHANGING_P (x)
3738 /* Don't use hard regs to avoid extending their life. */
3739 || REGNO (x) < FIRST_PSEUDO_REGISTER
3740 /* Avoid subtargets inside loops,
3741 since they hide some invariant expressions. */
3742 || preserve_subexpressions_p ())
3743 ? 0 : x);
3744 }
3745
3746 /* Expand an assignment that stores the value of FROM into TO.
3747 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3748 (This may contain a QUEUED rtx;
3749 if the value is constant, this rtx is a constant.)
3750 Otherwise, the returned value is NULL_RTX. */
3751
3752 rtx
3753 expand_assignment (tree to, tree from, int want_value)
3754 {
3755 rtx to_rtx = 0;
3756 rtx result;
3757
3758 /* Don't crash if the lhs of the assignment was erroneous. */
3759
3760 if (TREE_CODE (to) == ERROR_MARK)
3761 {
3762 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3763 return want_value ? result : NULL_RTX;
3764 }
3765
3766 /* Assignment of a structure component needs special treatment
3767 if the structure component's rtx is not simply a MEM.
3768 Assignment of an array element at a constant index, and assignment of
3769 an array element in an unaligned packed structure field, has the same
3770 problem. */
3771
3772 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3773 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3774 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3775 {
3776 enum machine_mode mode1;
3777 HOST_WIDE_INT bitsize, bitpos;
3778 rtx orig_to_rtx;
3779 tree offset;
3780 int unsignedp;
3781 int volatilep = 0;
3782 tree tem;
3783
3784 push_temp_slots ();
3785 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3786 &unsignedp, &volatilep);
3787
3788 /* If we are going to use store_bit_field and extract_bit_field,
3789 make sure to_rtx will be safe for multiple use. */
3790
3791 if (mode1 == VOIDmode && want_value)
3792 tem = stabilize_reference (tem);
3793
3794 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3795
3796 if (offset != 0)
3797 {
3798 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3799
3800 if (GET_CODE (to_rtx) != MEM)
3801 abort ();
3802
3803 #ifdef POINTERS_EXTEND_UNSIGNED
3804 if (GET_MODE (offset_rtx) != Pmode)
3805 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3806 #else
3807 if (GET_MODE (offset_rtx) != ptr_mode)
3808 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3809 #endif
3810
3811 /* A constant address in TO_RTX can have VOIDmode, we must not try
3812 to call force_reg for that case. Avoid that case. */
3813 if (GET_CODE (to_rtx) == MEM
3814 && GET_MODE (to_rtx) == BLKmode
3815 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3816 && bitsize > 0
3817 && (bitpos % bitsize) == 0
3818 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3819 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3820 {
3821 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3822 bitpos = 0;
3823 }
3824
3825 to_rtx = offset_address (to_rtx, offset_rtx,
3826 highest_pow2_factor_for_target (to,
3827 offset));
3828 }
3829
3830 if (GET_CODE (to_rtx) == MEM)
3831 {
3832 /* If the field is at offset zero, we could have been given the
3833 DECL_RTX of the parent struct. Don't munge it. */
3834 to_rtx = shallow_copy_rtx (to_rtx);
3835
3836 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3837 }
3838
3839 /* Deal with volatile and readonly fields. The former is only done
3840 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3841 if (volatilep && GET_CODE (to_rtx) == MEM)
3842 {
3843 if (to_rtx == orig_to_rtx)
3844 to_rtx = copy_rtx (to_rtx);
3845 MEM_VOLATILE_P (to_rtx) = 1;
3846 }
3847
3848 if (TREE_CODE (to) == COMPONENT_REF
3849 && TREE_READONLY (TREE_OPERAND (to, 1))
3850 /* We can't assert that a MEM won't be set more than once
3851 if the component is not addressable because another
3852 non-addressable component may be referenced by the same MEM. */
3853 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3854 {
3855 if (to_rtx == orig_to_rtx)
3856 to_rtx = copy_rtx (to_rtx);
3857 RTX_UNCHANGING_P (to_rtx) = 1;
3858 }
3859
3860 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3861 {
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3865 }
3866
3867 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3868 (want_value
3869 /* Spurious cast for HPUX compiler. */
3870 ? ((enum machine_mode)
3871 TYPE_MODE (TREE_TYPE (to)))
3872 : VOIDmode),
3873 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3874
3875 preserve_temp_slots (result);
3876 free_temp_slots ();
3877 pop_temp_slots ();
3878
3879 /* If the value is meaningful, convert RESULT to the proper mode.
3880 Otherwise, return nothing. */
3881 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3882 TYPE_MODE (TREE_TYPE (from)),
3883 result,
3884 TYPE_UNSIGNED (TREE_TYPE (to)))
3885 : NULL_RTX);
3886 }
3887
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
3892 requires loading up part of an address in a separate insn.
3893
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
3898 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3900 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3901 && GET_CODE (DECL_RTL (to)) == REG))
3902 {
3903 rtx value;
3904
3905 push_temp_slots ();
3906 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3907 if (to_rtx == 0)
3908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3909
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx) == PARALLEL)
3913 emit_group_load (to_rtx, value, TREE_TYPE (from),
3914 int_size_in_bytes (TREE_TYPE (from)));
3915 else if (GET_MODE (to_rtx) == BLKmode)
3916 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3917 else
3918 {
3919 if (POINTER_TYPE_P (TREE_TYPE (to)))
3920 value = convert_memory_address (GET_MODE (to_rtx), value);
3921 emit_move_insn (to_rtx, value);
3922 }
3923 preserve_temp_slots (to_rtx);
3924 free_temp_slots ();
3925 pop_temp_slots ();
3926 return want_value ? to_rtx : NULL_RTX;
3927 }
3928
3929 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3930 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3931
3932 if (to_rtx == 0)
3933 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3934
3935 /* Don't move directly into a return register. */
3936 if (TREE_CODE (to) == RESULT_DECL
3937 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3938 {
3939 rtx temp;
3940
3941 push_temp_slots ();
3942 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3943
3944 if (GET_CODE (to_rtx) == PARALLEL)
3945 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3946 int_size_in_bytes (TREE_TYPE (from)));
3947 else
3948 emit_move_insn (to_rtx, temp);
3949
3950 preserve_temp_slots (to_rtx);
3951 free_temp_slots ();
3952 pop_temp_slots ();
3953 return want_value ? to_rtx : NULL_RTX;
3954 }
3955
3956 /* In case we are returning the contents of an object which overlaps
3957 the place the value is being stored, use a safe function when copying
3958 a value through a pointer into a structure value return block. */
3959 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3960 && current_function_returns_struct
3961 && !current_function_returns_pcc_struct)
3962 {
3963 rtx from_rtx, size;
3964
3965 push_temp_slots ();
3966 size = expr_size (from);
3967 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3968
3969 if (TARGET_MEM_FUNCTIONS)
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TYPE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3976 else
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size,
3982 TYPE_UNSIGNED (integer_type_node)),
3983 TYPE_MODE (integer_type_node));
3984
3985 preserve_temp_slots (to_rtx);
3986 free_temp_slots ();
3987 pop_temp_slots ();
3988 return want_value ? to_rtx : NULL_RTX;
3989 }
3990
3991 /* Compute FROM and store the value in the rtx we got. */
3992
3993 push_temp_slots ();
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3996 free_temp_slots ();
3997 pop_temp_slots ();
3998 return want_value ? result : NULL_RTX;
3999 }
4000
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4004
4005 If WANT_VALUE & 1 is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4012
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4018 be more thorough?
4019
4020 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE.
4023
4024 If WANT_VALUE & 2 is set, this is a store into a call param on the
4025 stack, and block moves may need to be treated specially. */
4026
4027 rtx
4028 store_expr (tree exp, rtx target, int want_value)
4029 {
4030 rtx temp;
4031 rtx alt_rtl = NULL_RTX;
4032 rtx mark = mark_queue ();
4033 int dont_return_target = 0;
4034 int dont_store_target = 0;
4035
4036 if (VOID_TYPE_P (TREE_TYPE (exp)))
4037 {
4038 /* C++ can generate ?: expressions with a throw expression in one
4039 branch and an rvalue in the other. Here, we resolve attempts to
4040 store the throw expression's nonexistent result. */
4041 if (want_value)
4042 abort ();
4043 expand_expr (exp, const0_rtx, VOIDmode, 0);
4044 return NULL_RTX;
4045 }
4046 if (TREE_CODE (exp) == COMPOUND_EXPR)
4047 {
4048 /* Perform first part of compound expression, then assign from second
4049 part. */
4050 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4051 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4052 emit_queue ();
4053 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4054 }
4055 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4056 {
4057 /* For conditional expression, get safe form of the target. Then
4058 test the condition, doing the appropriate assignment on either
4059 side. This avoids the creation of unnecessary temporaries.
4060 For non-BLKmode, it is more efficient not to do this. */
4061
4062 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4063
4064 emit_queue ();
4065 target = protect_from_queue (target, 1);
4066
4067 do_pending_stack_adjust ();
4068 NO_DEFER_POP;
4069 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4070 start_cleanup_deferral ();
4071 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4072 end_cleanup_deferral ();
4073 emit_queue ();
4074 emit_jump_insn (gen_jump (lab2));
4075 emit_barrier ();
4076 emit_label (lab1);
4077 start_cleanup_deferral ();
4078 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4079 end_cleanup_deferral ();
4080 emit_queue ();
4081 emit_label (lab2);
4082 OK_DEFER_POP;
4083
4084 return want_value & 1 ? target : NULL_RTX;
4085 }
4086 else if (queued_subexp_p (target))
4087 /* If target contains a postincrement, let's not risk
4088 using it as the place to generate the rhs. */
4089 {
4090 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4091 {
4092 /* Expand EXP into a new pseudo. */
4093 temp = gen_reg_rtx (GET_MODE (target));
4094 temp = expand_expr (exp, temp, GET_MODE (target),
4095 (want_value & 2
4096 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4097 }
4098 else
4099 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4100 (want_value & 2
4101 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4102
4103 /* If target is volatile, ANSI requires accessing the value
4104 *from* the target, if it is accessed. So make that happen.
4105 In no case return the target itself. */
4106 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4107 dont_return_target = 1;
4108 }
4109 else if ((want_value & 1) != 0
4110 && GET_CODE (target) == MEM
4111 && ! MEM_VOLATILE_P (target)
4112 && GET_MODE (target) != BLKmode)
4113 /* If target is in memory and caller wants value in a register instead,
4114 arrange that. Pass TARGET as target for expand_expr so that,
4115 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4116 We know expand_expr will not use the target in that case.
4117 Don't do this if TARGET is volatile because we are supposed
4118 to write it and then read it. */
4119 {
4120 temp = expand_expr (exp, target, GET_MODE (target),
4121 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4122 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4123 {
4124 /* If TEMP is already in the desired TARGET, only copy it from
4125 memory and don't store it there again. */
4126 if (temp == target
4127 || (rtx_equal_p (temp, target)
4128 && ! side_effects_p (temp) && ! side_effects_p (target)))
4129 dont_store_target = 1;
4130 temp = copy_to_reg (temp);
4131 }
4132 dont_return_target = 1;
4133 }
4134 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4135 /* If this is a scalar in a register that is stored in a wider mode
4136 than the declared mode, compute the result into its declared mode
4137 and then convert to the wider mode. Our value is the computed
4138 expression. */
4139 {
4140 rtx inner_target = 0;
4141
4142 /* If we don't want a value, we can do the conversion inside EXP,
4143 which will often result in some optimizations. Do the conversion
4144 in two steps: first change the signedness, if needed, then
4145 the extend. But don't do this if the type of EXP is a subtype
4146 of something else since then the conversion might involve
4147 more than just converting modes. */
4148 if ((want_value & 1) == 0
4149 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4150 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4151 {
4152 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4153 != SUBREG_PROMOTED_UNSIGNED_P (target))
4154 exp = convert
4155 (lang_hooks.types.signed_or_unsigned_type
4156 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4157
4158 exp = convert (lang_hooks.types.type_for_mode
4159 (GET_MODE (SUBREG_REG (target)),
4160 SUBREG_PROMOTED_UNSIGNED_P (target)),
4161 exp);
4162
4163 inner_target = SUBREG_REG (target);
4164 }
4165
4166 temp = expand_expr (exp, inner_target, VOIDmode,
4167 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4168
4169 /* If TEMP is a MEM and we want a result value, make the access
4170 now so it gets done only once. Strictly speaking, this is
4171 only necessary if the MEM is volatile, or if the address
4172 overlaps TARGET. But not performing the load twice also
4173 reduces the amount of rtl we generate and then have to CSE. */
4174 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4175 temp = copy_to_reg (temp);
4176
4177 /* If TEMP is a VOIDmode constant, use convert_modes to make
4178 sure that we properly convert it. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4180 {
4181 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4182 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4183 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4184 GET_MODE (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4186 }
4187
4188 convert_move (SUBREG_REG (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
4190
4191 /* If we promoted a constant, change the mode back down to match
4192 target. Otherwise, the caller might get confused by a result whose
4193 mode is larger than expected. */
4194
4195 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4196 {
4197 if (GET_MODE (temp) != VOIDmode)
4198 {
4199 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4200 SUBREG_PROMOTED_VAR_P (temp) = 1;
4201 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4202 SUBREG_PROMOTED_UNSIGNED_P (target));
4203 }
4204 else
4205 temp = convert_modes (GET_MODE (target),
4206 GET_MODE (SUBREG_REG (target)),
4207 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4208 }
4209
4210 return want_value & 1 ? temp : NULL_RTX;
4211 }
4212 else
4213 {
4214 temp = expand_expr_real (exp, target, GET_MODE (target),
4215 (want_value & 2
4216 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4217 &alt_rtl);
4218 /* Return TARGET if it's a specified hardware register.
4219 If TARGET is a volatile mem ref, either return TARGET
4220 or return a reg copied *from* TARGET; ANSI requires this.
4221
4222 Otherwise, if TEMP is not TARGET, return TEMP
4223 if it is constant (for efficiency),
4224 or if we really want the correct value. */
4225 if (!(target && GET_CODE (target) == REG
4226 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4227 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4228 && ! rtx_equal_p (temp, target)
4229 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4230 dont_return_target = 1;
4231 }
4232
4233 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4234 the same as that of TARGET, adjust the constant. This is needed, for
4235 example, in case it is a CONST_DOUBLE and we want only a word-sized
4236 value. */
4237 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4238 && TREE_CODE (exp) != ERROR_MARK
4239 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4240 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4241 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4242
4243 /* If value was not generated in the target, store it there.
4244 Convert the value to TARGET's type first if necessary and emit the
4245 pending incrementations that have been queued when expanding EXP.
4246 Note that we cannot emit the whole queue blindly because this will
4247 effectively disable the POST_INC optimization later.
4248
4249 If TEMP and TARGET compare equal according to rtx_equal_p, but
4250 one or both of them are volatile memory refs, we have to distinguish
4251 two cases:
4252 - expand_expr has used TARGET. In this case, we must not generate
4253 another copy. This can be detected by TARGET being equal according
4254 to == .
4255 - expand_expr has not used TARGET - that means that the source just
4256 happens to have the same RTX form. Since temp will have been created
4257 by expand_expr, it will compare unequal according to == .
4258 We must generate a copy in this case, to reach the correct number
4259 of volatile memory references. */
4260
4261 if ((! rtx_equal_p (temp, target)
4262 || (temp != target && (side_effects_p (temp)
4263 || side_effects_p (target))))
4264 && TREE_CODE (exp) != ERROR_MARK
4265 && ! dont_store_target
4266 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4267 but TARGET is not valid memory reference, TEMP will differ
4268 from TARGET although it is really the same location. */
4269 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4270 /* If there's nothing to copy, don't bother. Don't call expr_size
4271 unless necessary, because some front-ends (C++) expr_size-hook
4272 aborts on objects that are not supposed to be bit-copied or
4273 bit-initialized. */
4274 && expr_size (exp) != const0_rtx)
4275 {
4276 emit_insns_enqueued_after_mark (mark);
4277 target = protect_from_queue (target, 1);
4278 temp = protect_from_queue (temp, 0);
4279 if (GET_MODE (temp) != GET_MODE (target)
4280 && GET_MODE (temp) != VOIDmode)
4281 {
4282 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4283 if (dont_return_target)
4284 {
4285 /* In this case, we will return TEMP,
4286 so make sure it has the proper mode.
4287 But don't forget to store the value into TARGET. */
4288 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4289 emit_move_insn (target, temp);
4290 }
4291 else
4292 convert_move (target, temp, unsignedp);
4293 }
4294
4295 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4296 {
4297 /* Handle copying a string constant into an array. The string
4298 constant may be shorter than the array. So copy just the string's
4299 actual length, and clear the rest. First get the size of the data
4300 type of the string, which is actually the size of the target. */
4301 rtx size = expr_size (exp);
4302
4303 if (GET_CODE (size) == CONST_INT
4304 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4305 emit_block_move (target, temp, size,
4306 (want_value & 2
4307 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4308 else
4309 {
4310 /* Compute the size of the data to copy from the string. */
4311 tree copy_size
4312 = size_binop (MIN_EXPR,
4313 make_tree (sizetype, size),
4314 size_int (TREE_STRING_LENGTH (exp)));
4315 rtx copy_size_rtx
4316 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4317 (want_value & 2
4318 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4319 rtx label = 0;
4320
4321 /* Copy that much. */
4322 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4323 TYPE_UNSIGNED (sizetype));
4324 emit_block_move (target, temp, copy_size_rtx,
4325 (want_value & 2
4326 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4327
4328 /* Figure out how much is left in TARGET that we have to clear.
4329 Do all calculations in ptr_mode. */
4330 if (GET_CODE (copy_size_rtx) == CONST_INT)
4331 {
4332 size = plus_constant (size, -INTVAL (copy_size_rtx));
4333 target = adjust_address (target, BLKmode,
4334 INTVAL (copy_size_rtx));
4335 }
4336 else
4337 {
4338 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4339 copy_size_rtx, NULL_RTX, 0,
4340 OPTAB_LIB_WIDEN);
4341
4342 #ifdef POINTERS_EXTEND_UNSIGNED
4343 if (GET_MODE (copy_size_rtx) != Pmode)
4344 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4345 TYPE_UNSIGNED (sizetype));
4346 #endif
4347
4348 target = offset_address (target, copy_size_rtx,
4349 highest_pow2_factor (copy_size));
4350 label = gen_label_rtx ();
4351 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4352 GET_MODE (size), 0, label);
4353 }
4354
4355 if (size != const0_rtx)
4356 clear_storage (target, size);
4357
4358 if (label)
4359 emit_label (label);
4360 }
4361 }
4362 /* Handle calls that return values in multiple non-contiguous locations.
4363 The Irix 6 ABI has examples of this. */
4364 else if (GET_CODE (target) == PARALLEL)
4365 emit_group_load (target, temp, TREE_TYPE (exp),
4366 int_size_in_bytes (TREE_TYPE (exp)));
4367 else if (GET_MODE (temp) == BLKmode)
4368 emit_block_move (target, temp, expr_size (exp),
4369 (want_value & 2
4370 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4371 else
4372 {
4373 temp = force_operand (temp, target);
4374 if (temp != target)
4375 emit_move_insn (target, temp);
4376 }
4377 }
4378
4379 /* If we don't want a value, return NULL_RTX. */
4380 if ((want_value & 1) == 0)
4381 return NULL_RTX;
4382
4383 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4384 ??? The latter test doesn't seem to make sense. */
4385 else if (dont_return_target && GET_CODE (temp) != MEM)
4386 return temp;
4387
4388 /* Return TARGET itself if it is a hard register. */
4389 else if ((want_value & 1) != 0
4390 && GET_MODE (target) != BLKmode
4391 && ! (GET_CODE (target) == REG
4392 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4393 return copy_to_reg (target);
4394
4395 else
4396 return target;
4397 }
4398 \f
4399 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4400
4401 static int
4402 is_zeros_p (tree exp)
4403 {
4404 tree elt;
4405
4406 switch (TREE_CODE (exp))
4407 {
4408 case CONVERT_EXPR:
4409 case NOP_EXPR:
4410 case NON_LVALUE_EXPR:
4411 case VIEW_CONVERT_EXPR:
4412 return is_zeros_p (TREE_OPERAND (exp, 0));
4413
4414 case INTEGER_CST:
4415 return integer_zerop (exp);
4416
4417 case COMPLEX_CST:
4418 return
4419 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4420
4421 case REAL_CST:
4422 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4423
4424 case VECTOR_CST:
4425 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4426 elt = TREE_CHAIN (elt))
4427 if (!is_zeros_p (TREE_VALUE (elt)))
4428 return 0;
4429
4430 return 1;
4431
4432 case CONSTRUCTOR:
4433 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4434 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4435 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4436 if (! is_zeros_p (TREE_VALUE (elt)))
4437 return 0;
4438
4439 return 1;
4440
4441 default:
4442 return 0;
4443 }
4444 }
4445
4446 /* Return 1 if EXP contains mostly (3/4) zeros. */
4447
4448 int
4449 mostly_zeros_p (tree exp)
4450 {
4451 if (TREE_CODE (exp) == CONSTRUCTOR)
4452 {
4453 int elts = 0, zeros = 0;
4454 tree elt = CONSTRUCTOR_ELTS (exp);
4455 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4456 {
4457 /* If there are no ranges of true bits, it is all zero. */
4458 return elt == NULL_TREE;
4459 }
4460 for (; elt; elt = TREE_CHAIN (elt))
4461 {
4462 /* We do not handle the case where the index is a RANGE_EXPR,
4463 so the statistic will be somewhat inaccurate.
4464 We do make a more accurate count in store_constructor itself,
4465 so since this function is only used for nested array elements,
4466 this should be close enough. */
4467 if (mostly_zeros_p (TREE_VALUE (elt)))
4468 zeros++;
4469 elts++;
4470 }
4471
4472 return 4 * zeros >= 3 * elts;
4473 }
4474
4475 return is_zeros_p (exp);
4476 }
4477 \f
4478 /* Helper function for store_constructor.
4479 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4480 TYPE is the type of the CONSTRUCTOR, not the element type.
4481 CLEARED is as for store_constructor.
4482 ALIAS_SET is the alias set to use for any stores.
4483
4484 This provides a recursive shortcut back to store_constructor when it isn't
4485 necessary to go through store_field. This is so that we can pass through
4486 the cleared field to let store_constructor know that we may not have to
4487 clear a substructure if the outer structure has already been cleared. */
4488
4489 static void
4490 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4491 HOST_WIDE_INT bitpos, enum machine_mode mode,
4492 tree exp, tree type, int cleared, int alias_set)
4493 {
4494 if (TREE_CODE (exp) == CONSTRUCTOR
4495 && bitpos % BITS_PER_UNIT == 0
4496 /* If we have a nonzero bitpos for a register target, then we just
4497 let store_field do the bitfield handling. This is unlikely to
4498 generate unnecessary clear instructions anyways. */
4499 && (bitpos == 0 || GET_CODE (target) == MEM))
4500 {
4501 if (GET_CODE (target) == MEM)
4502 target
4503 = adjust_address (target,
4504 GET_MODE (target) == BLKmode
4505 || 0 != (bitpos
4506 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4507 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4508
4509
4510 /* Update the alias set, if required. */
4511 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4512 && MEM_ALIAS_SET (target) != 0)
4513 {
4514 target = copy_rtx (target);
4515 set_mem_alias_set (target, alias_set);
4516 }
4517
4518 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4519 }
4520 else
4521 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4522 alias_set);
4523 }
4524
4525 /* Store the value of constructor EXP into the rtx TARGET.
4526 TARGET is either a REG or a MEM; we know it cannot conflict, since
4527 safe_from_p has been called.
4528 CLEARED is true if TARGET is known to have been zero'd.
4529 SIZE is the number of bytes of TARGET we are allowed to modify: this
4530 may not be the same as the size of EXP if we are assigning to a field
4531 which has been packed to exclude padding bits. */
4532
4533 static void
4534 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4535 {
4536 tree type = TREE_TYPE (exp);
4537 #ifdef WORD_REGISTER_OPERATIONS
4538 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4539 #endif
4540
4541 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4542 || TREE_CODE (type) == QUAL_UNION_TYPE)
4543 {
4544 tree elt;
4545
4546 /* If size is zero or the target is already cleared, do nothing. */
4547 if (size == 0 || cleared)
4548 cleared = 1;
4549 /* We either clear the aggregate or indicate the value is dead. */
4550 else if ((TREE_CODE (type) == UNION_TYPE
4551 || TREE_CODE (type) == QUAL_UNION_TYPE)
4552 && ! CONSTRUCTOR_ELTS (exp))
4553 /* If the constructor is empty, clear the union. */
4554 {
4555 clear_storage (target, expr_size (exp));
4556 cleared = 1;
4557 }
4558
4559 /* If we are building a static constructor into a register,
4560 set the initial value as zero so we can fold the value into
4561 a constant. But if more than one register is involved,
4562 this probably loses. */
4563 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4564 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4565 {
4566 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4567 cleared = 1;
4568 }
4569
4570 /* If the constructor has fewer fields than the structure
4571 or if we are initializing the structure to mostly zeros,
4572 clear the whole structure first. Don't do this if TARGET is a
4573 register whose mode size isn't equal to SIZE since clear_storage
4574 can't handle this case. */
4575 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4576 || mostly_zeros_p (exp))
4577 && (GET_CODE (target) != REG
4578 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4579 == size)))
4580 {
4581 rtx xtarget = target;
4582
4583 if (readonly_fields_p (type))
4584 {
4585 xtarget = copy_rtx (xtarget);
4586 RTX_UNCHANGING_P (xtarget) = 1;
4587 }
4588
4589 clear_storage (xtarget, GEN_INT (size));
4590 cleared = 1;
4591 }
4592
4593 if (! cleared)
4594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4595
4596 /* Store each element of the constructor into
4597 the corresponding field of TARGET. */
4598
4599 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4600 {
4601 tree field = TREE_PURPOSE (elt);
4602 tree value = TREE_VALUE (elt);
4603 enum machine_mode mode;
4604 HOST_WIDE_INT bitsize;
4605 HOST_WIDE_INT bitpos = 0;
4606 tree offset;
4607 rtx to_rtx = target;
4608
4609 /* Just ignore missing fields.
4610 We cleared the whole structure, above,
4611 if any fields are missing. */
4612 if (field == 0)
4613 continue;
4614
4615 if (cleared && is_zeros_p (value))
4616 continue;
4617
4618 if (host_integerp (DECL_SIZE (field), 1))
4619 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4620 else
4621 bitsize = -1;
4622
4623 mode = DECL_MODE (field);
4624 if (DECL_BIT_FIELD (field))
4625 mode = VOIDmode;
4626
4627 offset = DECL_FIELD_OFFSET (field);
4628 if (host_integerp (offset, 0)
4629 && host_integerp (bit_position (field), 0))
4630 {
4631 bitpos = int_bit_position (field);
4632 offset = 0;
4633 }
4634 else
4635 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4636
4637 if (offset)
4638 {
4639 rtx offset_rtx;
4640
4641 offset
4642 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4643 make_tree (TREE_TYPE (exp),
4644 target));
4645
4646 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4647 if (GET_CODE (to_rtx) != MEM)
4648 abort ();
4649
4650 #ifdef POINTERS_EXTEND_UNSIGNED
4651 if (GET_MODE (offset_rtx) != Pmode)
4652 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4653 #else
4654 if (GET_MODE (offset_rtx) != ptr_mode)
4655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4656 #endif
4657
4658 to_rtx = offset_address (to_rtx, offset_rtx,
4659 highest_pow2_factor (offset));
4660 }
4661
4662 if (TREE_READONLY (field))
4663 {
4664 if (GET_CODE (to_rtx) == MEM)
4665 to_rtx = copy_rtx (to_rtx);
4666
4667 RTX_UNCHANGING_P (to_rtx) = 1;
4668 }
4669
4670 #ifdef WORD_REGISTER_OPERATIONS
4671 /* If this initializes a field that is smaller than a word, at the
4672 start of a word, try to widen it to a full word.
4673 This special case allows us to output C++ member function
4674 initializations in a form that the optimizers can understand. */
4675 if (GET_CODE (target) == REG
4676 && bitsize < BITS_PER_WORD
4677 && bitpos % BITS_PER_WORD == 0
4678 && GET_MODE_CLASS (mode) == MODE_INT
4679 && TREE_CODE (value) == INTEGER_CST
4680 && exp_size >= 0
4681 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4682 {
4683 tree type = TREE_TYPE (value);
4684
4685 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4686 {
4687 type = lang_hooks.types.type_for_size
4688 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4689 value = convert (type, value);
4690 }
4691
4692 if (BYTES_BIG_ENDIAN)
4693 value
4694 = fold (build (LSHIFT_EXPR, type, value,
4695 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4696 bitsize = BITS_PER_WORD;
4697 mode = word_mode;
4698 }
4699 #endif
4700
4701 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4702 && DECL_NONADDRESSABLE_P (field))
4703 {
4704 to_rtx = copy_rtx (to_rtx);
4705 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4706 }
4707
4708 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4709 value, type, cleared,
4710 get_alias_set (TREE_TYPE (field)));
4711 }
4712 }
4713 else if (TREE_CODE (type) == ARRAY_TYPE
4714 || TREE_CODE (type) == VECTOR_TYPE)
4715 {
4716 tree elt;
4717 int i;
4718 int need_to_clear;
4719 tree domain = TYPE_DOMAIN (type);
4720 tree elttype = TREE_TYPE (type);
4721 int const_bounds_p;
4722 HOST_WIDE_INT minelt = 0;
4723 HOST_WIDE_INT maxelt = 0;
4724 int icode = 0;
4725 rtx *vector = NULL;
4726 int elt_size = 0;
4727 unsigned n_elts = 0;
4728
4729 /* Vectors are like arrays, but the domain is stored via an array
4730 type indirectly. */
4731 if (TREE_CODE (type) == VECTOR_TYPE)
4732 {
4733 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4734 the same field as TYPE_DOMAIN, we are not guaranteed that
4735 it always will. */
4736 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4737 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4738 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4739 {
4740 enum machine_mode mode = GET_MODE (target);
4741
4742 icode = (int) vec_init_optab->handlers[mode].insn_code;
4743 if (icode != CODE_FOR_nothing)
4744 {
4745 unsigned int i;
4746
4747 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4748 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4749 vector = alloca (n_elts);
4750 for (i = 0; i < n_elts; i++)
4751 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4752 }
4753 }
4754 }
4755
4756 const_bounds_p = (TYPE_MIN_VALUE (domain)
4757 && TYPE_MAX_VALUE (domain)
4758 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4759 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4760
4761 /* If we have constant bounds for the range of the type, get them. */
4762 if (const_bounds_p)
4763 {
4764 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4765 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4766 }
4767
4768 /* If the constructor has fewer elements than the array,
4769 clear the whole array first. Similarly if this is
4770 static constructor of a non-BLKmode object. */
4771 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4772 need_to_clear = 1;
4773 else
4774 {
4775 HOST_WIDE_INT count = 0, zero_count = 0;
4776 need_to_clear = ! const_bounds_p;
4777
4778 /* This loop is a more accurate version of the loop in
4779 mostly_zeros_p (it handles RANGE_EXPR in an index).
4780 It is also needed to check for missing elements. */
4781 for (elt = CONSTRUCTOR_ELTS (exp);
4782 elt != NULL_TREE && ! need_to_clear;
4783 elt = TREE_CHAIN (elt))
4784 {
4785 tree index = TREE_PURPOSE (elt);
4786 HOST_WIDE_INT this_node_count;
4787
4788 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4789 {
4790 tree lo_index = TREE_OPERAND (index, 0);
4791 tree hi_index = TREE_OPERAND (index, 1);
4792
4793 if (! host_integerp (lo_index, 1)
4794 || ! host_integerp (hi_index, 1))
4795 {
4796 need_to_clear = 1;
4797 break;
4798 }
4799
4800 this_node_count = (tree_low_cst (hi_index, 1)
4801 - tree_low_cst (lo_index, 1) + 1);
4802 }
4803 else
4804 this_node_count = 1;
4805
4806 count += this_node_count;
4807 if (mostly_zeros_p (TREE_VALUE (elt)))
4808 zero_count += this_node_count;
4809 }
4810
4811 /* Clear the entire array first if there are any missing elements,
4812 or if the incidence of zero elements is >= 75%. */
4813 if (! need_to_clear
4814 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4815 need_to_clear = 1;
4816 }
4817
4818 if (need_to_clear && size > 0 && !vector)
4819 {
4820 if (! cleared)
4821 {
4822 if (REG_P (target))
4823 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4824 else
4825 clear_storage (target, GEN_INT (size));
4826 }
4827 cleared = 1;
4828 }
4829 else if (REG_P (target))
4830 /* Inform later passes that the old value is dead. */
4831 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4832
4833 /* Store each element of the constructor into
4834 the corresponding element of TARGET, determined
4835 by counting the elements. */
4836 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4837 elt;
4838 elt = TREE_CHAIN (elt), i++)
4839 {
4840 enum machine_mode mode;
4841 HOST_WIDE_INT bitsize;
4842 HOST_WIDE_INT bitpos;
4843 int unsignedp;
4844 tree value = TREE_VALUE (elt);
4845 tree index = TREE_PURPOSE (elt);
4846 rtx xtarget = target;
4847
4848 if (cleared && is_zeros_p (value))
4849 continue;
4850
4851 unsignedp = TYPE_UNSIGNED (elttype);
4852 mode = TYPE_MODE (elttype);
4853 if (mode == BLKmode)
4854 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4855 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4856 : -1);
4857 else
4858 bitsize = GET_MODE_BITSIZE (mode);
4859
4860 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4861 {
4862 tree lo_index = TREE_OPERAND (index, 0);
4863 tree hi_index = TREE_OPERAND (index, 1);
4864 rtx index_r, pos_rtx, loop_end;
4865 struct nesting *loop;
4866 HOST_WIDE_INT lo, hi, count;
4867 tree position;
4868
4869 if (vector)
4870 abort ();
4871
4872 /* If the range is constant and "small", unroll the loop. */
4873 if (const_bounds_p
4874 && host_integerp (lo_index, 0)
4875 && host_integerp (hi_index, 0)
4876 && (lo = tree_low_cst (lo_index, 0),
4877 hi = tree_low_cst (hi_index, 0),
4878 count = hi - lo + 1,
4879 (GET_CODE (target) != MEM
4880 || count <= 2
4881 || (host_integerp (TYPE_SIZE (elttype), 1)
4882 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4883 <= 40 * 8)))))
4884 {
4885 lo -= minelt; hi -= minelt;
4886 for (; lo <= hi; lo++)
4887 {
4888 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4889
4890 if (GET_CODE (target) == MEM
4891 && !MEM_KEEP_ALIAS_SET_P (target)
4892 && TREE_CODE (type) == ARRAY_TYPE
4893 && TYPE_NONALIASED_COMPONENT (type))
4894 {
4895 target = copy_rtx (target);
4896 MEM_KEEP_ALIAS_SET_P (target) = 1;
4897 }
4898
4899 store_constructor_field
4900 (target, bitsize, bitpos, mode, value, type, cleared,
4901 get_alias_set (elttype));
4902 }
4903 }
4904 else
4905 {
4906 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4907 loop_end = gen_label_rtx ();
4908
4909 unsignedp = TYPE_UNSIGNED (domain);
4910
4911 index = build_decl (VAR_DECL, NULL_TREE, domain);
4912
4913 index_r
4914 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4915 &unsignedp, 0));
4916 SET_DECL_RTL (index, index_r);
4917 if (TREE_CODE (value) == SAVE_EXPR
4918 && SAVE_EXPR_RTL (value) == 0)
4919 {
4920 /* Make sure value gets expanded once before the
4921 loop. */
4922 expand_expr (value, const0_rtx, VOIDmode, 0);
4923 emit_queue ();
4924 }
4925 store_expr (lo_index, index_r, 0);
4926 loop = expand_start_loop (0);
4927
4928 /* Assign value to element index. */
4929 position
4930 = convert (ssizetype,
4931 fold (build (MINUS_EXPR, TREE_TYPE (index),
4932 index, TYPE_MIN_VALUE (domain))));
4933 position = size_binop (MULT_EXPR, position,
4934 convert (ssizetype,
4935 TYPE_SIZE_UNIT (elttype)));
4936
4937 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4938 xtarget = offset_address (target, pos_rtx,
4939 highest_pow2_factor (position));
4940 xtarget = adjust_address (xtarget, mode, 0);
4941 if (TREE_CODE (value) == CONSTRUCTOR)
4942 store_constructor (value, xtarget, cleared,
4943 bitsize / BITS_PER_UNIT);
4944 else
4945 store_expr (value, xtarget, 0);
4946
4947 expand_exit_loop_if_false (loop,
4948 build (LT_EXPR, integer_type_node,
4949 index, hi_index));
4950
4951 expand_increment (build (PREINCREMENT_EXPR,
4952 TREE_TYPE (index),
4953 index, integer_one_node), 0, 0);
4954 expand_end_loop ();
4955 emit_label (loop_end);
4956 }
4957 }
4958 else if ((index != 0 && ! host_integerp (index, 0))
4959 || ! host_integerp (TYPE_SIZE (elttype), 1))
4960 {
4961 tree position;
4962
4963 if (vector)
4964 abort ();
4965
4966 if (index == 0)
4967 index = ssize_int (1);
4968
4969 if (minelt)
4970 index = convert (ssizetype,
4971 fold (build (MINUS_EXPR, index,
4972 TYPE_MIN_VALUE (domain))));
4973
4974 position = size_binop (MULT_EXPR, index,
4975 convert (ssizetype,
4976 TYPE_SIZE_UNIT (elttype)));
4977 xtarget = offset_address (target,
4978 expand_expr (position, 0, VOIDmode, 0),
4979 highest_pow2_factor (position));
4980 xtarget = adjust_address (xtarget, mode, 0);
4981 store_expr (value, xtarget, 0);
4982 }
4983 else if (vector)
4984 {
4985 int pos;
4986
4987 if (index != 0)
4988 pos = tree_low_cst (index, 0) - minelt;
4989 else
4990 pos = i;
4991 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4992 }
4993 else
4994 {
4995 if (index != 0)
4996 bitpos = ((tree_low_cst (index, 0) - minelt)
4997 * tree_low_cst (TYPE_SIZE (elttype), 1));
4998 else
4999 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5000
5001 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5002 && TREE_CODE (type) == ARRAY_TYPE
5003 && TYPE_NONALIASED_COMPONENT (type))
5004 {
5005 target = copy_rtx (target);
5006 MEM_KEEP_ALIAS_SET_P (target) = 1;
5007 }
5008 store_constructor_field (target, bitsize, bitpos, mode, value,
5009 type, cleared, get_alias_set (elttype));
5010 }
5011 }
5012 if (vector)
5013 {
5014 emit_insn (GEN_FCN (icode) (target,
5015 gen_rtx_PARALLEL (GET_MODE (target),
5016 gen_rtvec_v (n_elts, vector))));
5017 }
5018 }
5019
5020 /* Set constructor assignments. */
5021 else if (TREE_CODE (type) == SET_TYPE)
5022 {
5023 tree elt = CONSTRUCTOR_ELTS (exp);
5024 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5025 tree domain = TYPE_DOMAIN (type);
5026 tree domain_min, domain_max, bitlength;
5027
5028 /* The default implementation strategy is to extract the constant
5029 parts of the constructor, use that to initialize the target,
5030 and then "or" in whatever non-constant ranges we need in addition.
5031
5032 If a large set is all zero or all ones, it is
5033 probably better to set it using memset (if available) or bzero.
5034 Also, if a large set has just a single range, it may also be
5035 better to first clear all the first clear the set (using
5036 bzero/memset), and set the bits we want. */
5037
5038 /* Check for all zeros. */
5039 if (elt == NULL_TREE && size > 0)
5040 {
5041 if (!cleared)
5042 clear_storage (target, GEN_INT (size));
5043 return;
5044 }
5045
5046 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5047 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5048 bitlength = size_binop (PLUS_EXPR,
5049 size_diffop (domain_max, domain_min),
5050 ssize_int (1));
5051
5052 nbits = tree_low_cst (bitlength, 1);
5053
5054 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5055 are "complicated" (more than one range), initialize (the
5056 constant parts) by copying from a constant. */
5057 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5058 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5059 {
5060 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5061 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5062 char *bit_buffer = alloca (nbits);
5063 HOST_WIDE_INT word = 0;
5064 unsigned int bit_pos = 0;
5065 unsigned int ibit = 0;
5066 unsigned int offset = 0; /* In bytes from beginning of set. */
5067
5068 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5069 for (;;)
5070 {
5071 if (bit_buffer[ibit])
5072 {
5073 if (BYTES_BIG_ENDIAN)
5074 word |= (1 << (set_word_size - 1 - bit_pos));
5075 else
5076 word |= 1 << bit_pos;
5077 }
5078
5079 bit_pos++; ibit++;
5080 if (bit_pos >= set_word_size || ibit == nbits)
5081 {
5082 if (word != 0 || ! cleared)
5083 {
5084 rtx datum = gen_int_mode (word, mode);
5085 rtx to_rtx;
5086
5087 /* The assumption here is that it is safe to use
5088 XEXP if the set is multi-word, but not if
5089 it's single-word. */
5090 if (GET_CODE (target) == MEM)
5091 to_rtx = adjust_address (target, mode, offset);
5092 else if (offset == 0)
5093 to_rtx = target;
5094 else
5095 abort ();
5096 emit_move_insn (to_rtx, datum);
5097 }
5098
5099 if (ibit == nbits)
5100 break;
5101 word = 0;
5102 bit_pos = 0;
5103 offset += set_word_size / BITS_PER_UNIT;
5104 }
5105 }
5106 }
5107 else if (!cleared)
5108 /* Don't bother clearing storage if the set is all ones. */
5109 if (TREE_CHAIN (elt) != NULL_TREE
5110 || (TREE_PURPOSE (elt) == NULL_TREE
5111 ? nbits != 1
5112 : ( ! host_integerp (TREE_VALUE (elt), 0)
5113 || ! host_integerp (TREE_PURPOSE (elt), 0)
5114 || (tree_low_cst (TREE_VALUE (elt), 0)
5115 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5116 != (HOST_WIDE_INT) nbits))))
5117 clear_storage (target, expr_size (exp));
5118
5119 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5120 {
5121 /* Start of range of element or NULL. */
5122 tree startbit = TREE_PURPOSE (elt);
5123 /* End of range of element, or element value. */
5124 tree endbit = TREE_VALUE (elt);
5125 HOST_WIDE_INT startb, endb;
5126 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5127
5128 bitlength_rtx = expand_expr (bitlength,
5129 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5130
5131 /* Handle non-range tuple element like [ expr ]. */
5132 if (startbit == NULL_TREE)
5133 {
5134 startbit = save_expr (endbit);
5135 endbit = startbit;
5136 }
5137
5138 startbit = convert (sizetype, startbit);
5139 endbit = convert (sizetype, endbit);
5140 if (! integer_zerop (domain_min))
5141 {
5142 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5143 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5144 }
5145 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5146 EXPAND_CONST_ADDRESS);
5147 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5148 EXPAND_CONST_ADDRESS);
5149
5150 if (REG_P (target))
5151 {
5152 targetx
5153 = assign_temp
5154 ((build_qualified_type (lang_hooks.types.type_for_mode
5155 (GET_MODE (target), 0),
5156 TYPE_QUAL_CONST)),
5157 0, 1, 1);
5158 emit_move_insn (targetx, target);
5159 }
5160
5161 else if (GET_CODE (target) == MEM)
5162 targetx = target;
5163 else
5164 abort ();
5165
5166 /* Optimization: If startbit and endbit are constants divisible
5167 by BITS_PER_UNIT, call memset instead. */
5168 if (TARGET_MEM_FUNCTIONS
5169 && TREE_CODE (startbit) == INTEGER_CST
5170 && TREE_CODE (endbit) == INTEGER_CST
5171 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5172 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5173 {
5174 emit_library_call (memset_libfunc, LCT_NORMAL,
5175 VOIDmode, 3,
5176 plus_constant (XEXP (targetx, 0),
5177 startb / BITS_PER_UNIT),
5178 Pmode,
5179 constm1_rtx, TYPE_MODE (integer_type_node),
5180 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5181 TYPE_MODE (sizetype));
5182 }
5183 else
5184 emit_library_call (setbits_libfunc, LCT_NORMAL,
5185 VOIDmode, 4, XEXP (targetx, 0),
5186 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5187 startbit_rtx, TYPE_MODE (sizetype),
5188 endbit_rtx, TYPE_MODE (sizetype));
5189
5190 if (REG_P (target))
5191 emit_move_insn (target, targetx);
5192 }
5193 }
5194
5195 else
5196 abort ();
5197 }
5198
5199 /* Store the value of EXP (an expression tree)
5200 into a subfield of TARGET which has mode MODE and occupies
5201 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5202 If MODE is VOIDmode, it means that we are storing into a bit-field.
5203
5204 If VALUE_MODE is VOIDmode, return nothing in particular.
5205 UNSIGNEDP is not used in this case.
5206
5207 Otherwise, return an rtx for the value stored. This rtx
5208 has mode VALUE_MODE if that is convenient to do.
5209 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5210
5211 TYPE is the type of the underlying object,
5212
5213 ALIAS_SET is the alias set for the destination. This value will
5214 (in general) be different from that for TARGET, since TARGET is a
5215 reference to the containing structure. */
5216
5217 static rtx
5218 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5219 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5220 int unsignedp, tree type, int alias_set)
5221 {
5222 HOST_WIDE_INT width_mask = 0;
5223
5224 if (TREE_CODE (exp) == ERROR_MARK)
5225 return const0_rtx;
5226
5227 /* If we have nothing to store, do nothing unless the expression has
5228 side-effects. */
5229 if (bitsize == 0)
5230 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5231 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5232 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5233
5234 /* If we are storing into an unaligned field of an aligned union that is
5235 in a register, we may have the mode of TARGET being an integer mode but
5236 MODE == BLKmode. In that case, get an aligned object whose size and
5237 alignment are the same as TARGET and store TARGET into it (we can avoid
5238 the store if the field being stored is the entire width of TARGET). Then
5239 call ourselves recursively to store the field into a BLKmode version of
5240 that object. Finally, load from the object into TARGET. This is not
5241 very efficient in general, but should only be slightly more expensive
5242 than the otherwise-required unaligned accesses. Perhaps this can be
5243 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5244 twice, once with emit_move_insn and once via store_field. */
5245
5246 if (mode == BLKmode
5247 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5248 {
5249 rtx object = assign_temp (type, 0, 1, 1);
5250 rtx blk_object = adjust_address (object, BLKmode, 0);
5251
5252 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5253 emit_move_insn (object, target);
5254
5255 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5256 alias_set);
5257
5258 emit_move_insn (target, object);
5259
5260 /* We want to return the BLKmode version of the data. */
5261 return blk_object;
5262 }
5263
5264 if (GET_CODE (target) == CONCAT)
5265 {
5266 /* We're storing into a struct containing a single __complex. */
5267
5268 if (bitpos != 0)
5269 abort ();
5270 return store_expr (exp, target, 0);
5271 }
5272
5273 /* If the structure is in a register or if the component
5274 is a bit field, we cannot use addressing to access it.
5275 Use bit-field techniques or SUBREG to store in it. */
5276
5277 if (mode == VOIDmode
5278 || (mode != BLKmode && ! direct_store[(int) mode]
5279 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5280 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5281 || GET_CODE (target) == REG
5282 || GET_CODE (target) == SUBREG
5283 /* If the field isn't aligned enough to store as an ordinary memref,
5284 store it as a bit field. */
5285 || (mode != BLKmode
5286 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5287 || bitpos % GET_MODE_ALIGNMENT (mode))
5288 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5289 || (bitpos % BITS_PER_UNIT != 0)))
5290 /* If the RHS and field are a constant size and the size of the
5291 RHS isn't the same size as the bitfield, we must use bitfield
5292 operations. */
5293 || (bitsize >= 0
5294 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5295 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5296 {
5297 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5298
5299 /* If BITSIZE is narrower than the size of the type of EXP
5300 we will be narrowing TEMP. Normally, what's wanted are the
5301 low-order bits. However, if EXP's type is a record and this is
5302 big-endian machine, we want the upper BITSIZE bits. */
5303 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5304 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5305 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5306 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5307 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5308 - bitsize),
5309 NULL_RTX, 1);
5310
5311 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5312 MODE. */
5313 if (mode != VOIDmode && mode != BLKmode
5314 && mode != TYPE_MODE (TREE_TYPE (exp)))
5315 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5316
5317 /* If the modes of TARGET and TEMP are both BLKmode, both
5318 must be in memory and BITPOS must be aligned on a byte
5319 boundary. If so, we simply do a block copy. */
5320 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5321 {
5322 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5323 || bitpos % BITS_PER_UNIT != 0)
5324 abort ();
5325
5326 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5327 emit_block_move (target, temp,
5328 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5329 / BITS_PER_UNIT),
5330 BLOCK_OP_NORMAL);
5331
5332 return value_mode == VOIDmode ? const0_rtx : target;
5333 }
5334
5335 /* Store the value in the bitfield. */
5336 store_bit_field (target, bitsize, bitpos, mode, temp,
5337 int_size_in_bytes (type));
5338
5339 if (value_mode != VOIDmode)
5340 {
5341 /* The caller wants an rtx for the value.
5342 If possible, avoid refetching from the bitfield itself. */
5343 if (width_mask != 0
5344 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5345 {
5346 tree count;
5347 enum machine_mode tmode;
5348
5349 tmode = GET_MODE (temp);
5350 if (tmode == VOIDmode)
5351 tmode = value_mode;
5352
5353 if (unsignedp)
5354 return expand_and (tmode, temp,
5355 gen_int_mode (width_mask, tmode),
5356 NULL_RTX);
5357
5358 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5359 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5360 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5361 }
5362
5363 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5364 NULL_RTX, value_mode, VOIDmode,
5365 int_size_in_bytes (type));
5366 }
5367 return const0_rtx;
5368 }
5369 else
5370 {
5371 rtx addr = XEXP (target, 0);
5372 rtx to_rtx = target;
5373
5374 /* If a value is wanted, it must be the lhs;
5375 so make the address stable for multiple use. */
5376
5377 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5378 && ! CONSTANT_ADDRESS_P (addr)
5379 /* A frame-pointer reference is already stable. */
5380 && ! (GET_CODE (addr) == PLUS
5381 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5382 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5383 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5384 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5385
5386 /* Now build a reference to just the desired component. */
5387
5388 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5389
5390 if (to_rtx == target)
5391 to_rtx = copy_rtx (to_rtx);
5392
5393 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5394 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5395 set_mem_alias_set (to_rtx, alias_set);
5396
5397 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5398 }
5399 }
5400 \f
5401 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5402 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5403 codes and find the ultimate containing object, which we return.
5404
5405 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5406 bit position, and *PUNSIGNEDP to the signedness of the field.
5407 If the position of the field is variable, we store a tree
5408 giving the variable offset (in units) in *POFFSET.
5409 This offset is in addition to the bit position.
5410 If the position is not variable, we store 0 in *POFFSET.
5411
5412 If any of the extraction expressions is volatile,
5413 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5414
5415 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5416 is a mode that can be used to access the field. In that case, *PBITSIZE
5417 is redundant.
5418
5419 If the field describes a variable-sized object, *PMODE is set to
5420 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5421 this case, but the address of the object can be found. */
5422
5423 tree
5424 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5425 HOST_WIDE_INT *pbitpos, tree *poffset,
5426 enum machine_mode *pmode, int *punsignedp,
5427 int *pvolatilep)
5428 {
5429 tree size_tree = 0;
5430 enum machine_mode mode = VOIDmode;
5431 tree offset = size_zero_node;
5432 tree bit_offset = bitsize_zero_node;
5433 tree tem;
5434
5435 /* First get the mode, signedness, and size. We do this from just the
5436 outermost expression. */
5437 if (TREE_CODE (exp) == COMPONENT_REF)
5438 {
5439 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5440 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5441 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5442
5443 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5444 }
5445 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5446 {
5447 size_tree = TREE_OPERAND (exp, 1);
5448 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5449 }
5450 else
5451 {
5452 mode = TYPE_MODE (TREE_TYPE (exp));
5453 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5454
5455 if (mode == BLKmode)
5456 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5457 else
5458 *pbitsize = GET_MODE_BITSIZE (mode);
5459 }
5460
5461 if (size_tree != 0)
5462 {
5463 if (! host_integerp (size_tree, 1))
5464 mode = BLKmode, *pbitsize = -1;
5465 else
5466 *pbitsize = tree_low_cst (size_tree, 1);
5467 }
5468
5469 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5470 and find the ultimate containing object. */
5471 while (1)
5472 {
5473 if (TREE_CODE (exp) == BIT_FIELD_REF)
5474 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5475 else if (TREE_CODE (exp) == COMPONENT_REF)
5476 {
5477 tree field = TREE_OPERAND (exp, 1);
5478 tree this_offset = DECL_FIELD_OFFSET (field);
5479
5480 /* If this field hasn't been filled in yet, don't go
5481 past it. This should only happen when folding expressions
5482 made during type construction. */
5483 if (this_offset == 0)
5484 break;
5485 else
5486 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
5487
5488 offset = size_binop (PLUS_EXPR, offset, this_offset);
5489 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5490 DECL_FIELD_BIT_OFFSET (field));
5491
5492 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5493 }
5494
5495 else if (TREE_CODE (exp) == ARRAY_REF
5496 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5497 {
5498 tree index = TREE_OPERAND (exp, 1);
5499 tree array = TREE_OPERAND (exp, 0);
5500 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5501 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5502 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5503
5504 /* We assume all arrays have sizes that are a multiple of a byte.
5505 First subtract the lower bound, if any, in the type of the
5506 index, then convert to sizetype and multiply by the size of the
5507 array element. */
5508 if (low_bound != 0 && ! integer_zerop (low_bound))
5509 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5510 index, low_bound));
5511
5512 /* If the index has a self-referential type, instantiate it with
5513 the object; likewise fkor the component size. */
5514 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5515 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
5516 offset = size_binop (PLUS_EXPR, offset,
5517 size_binop (MULT_EXPR,
5518 convert (sizetype, index),
5519 unit_size));
5520 }
5521
5522 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5523 conversions that don't change the mode, and all view conversions
5524 except those that need to "step up" the alignment. */
5525 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5526 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5527 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5528 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5529 && STRICT_ALIGNMENT
5530 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5531 < BIGGEST_ALIGNMENT)
5532 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5533 || TYPE_ALIGN_OK (TREE_TYPE
5534 (TREE_OPERAND (exp, 0))))))
5535 && ! ((TREE_CODE (exp) == NOP_EXPR
5536 || TREE_CODE (exp) == CONVERT_EXPR)
5537 && (TYPE_MODE (TREE_TYPE (exp))
5538 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5539 break;
5540
5541 /* If any reference in the chain is volatile, the effect is volatile. */
5542 if (TREE_THIS_VOLATILE (exp))
5543 *pvolatilep = 1;
5544
5545 exp = TREE_OPERAND (exp, 0);
5546 }
5547
5548 /* If OFFSET is constant, see if we can return the whole thing as a
5549 constant bit position. Otherwise, split it up. */
5550 if (host_integerp (offset, 0)
5551 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5552 bitsize_unit_node))
5553 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5554 && host_integerp (tem, 0))
5555 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5556 else
5557 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5558
5559 *pmode = mode;
5560 return exp;
5561 }
5562
5563 /* Return 1 if T is an expression that get_inner_reference handles. */
5564
5565 int
5566 handled_component_p (tree t)
5567 {
5568 switch (TREE_CODE (t))
5569 {
5570 case BIT_FIELD_REF:
5571 case COMPONENT_REF:
5572 case ARRAY_REF:
5573 case ARRAY_RANGE_REF:
5574 case NON_LVALUE_EXPR:
5575 case VIEW_CONVERT_EXPR:
5576 return 1;
5577
5578 /* ??? Sure they are handled, but get_inner_reference may return
5579 a different PBITSIZE, depending upon whether the expression is
5580 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5581 case NOP_EXPR:
5582 case CONVERT_EXPR:
5583 return (TYPE_MODE (TREE_TYPE (t))
5584 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5585
5586 default:
5587 return 0;
5588 }
5589 }
5590 \f
5591 /* Given an rtx VALUE that may contain additions and multiplications, return
5592 an equivalent value that just refers to a register, memory, or constant.
5593 This is done by generating instructions to perform the arithmetic and
5594 returning a pseudo-register containing the value.
5595
5596 The returned value may be a REG, SUBREG, MEM or constant. */
5597
5598 rtx
5599 force_operand (rtx value, rtx target)
5600 {
5601 rtx op1, op2;
5602 /* Use subtarget as the target for operand 0 of a binary operation. */
5603 rtx subtarget = get_subtarget (target);
5604 enum rtx_code code = GET_CODE (value);
5605
5606 /* Check for subreg applied to an expression produced by loop optimizer. */
5607 if (code == SUBREG
5608 && GET_CODE (SUBREG_REG (value)) != REG
5609 && GET_CODE (SUBREG_REG (value)) != MEM)
5610 {
5611 value = simplify_gen_subreg (GET_MODE (value),
5612 force_reg (GET_MODE (SUBREG_REG (value)),
5613 force_operand (SUBREG_REG (value),
5614 NULL_RTX)),
5615 GET_MODE (SUBREG_REG (value)),
5616 SUBREG_BYTE (value));
5617 code = GET_CODE (value);
5618 }
5619
5620 /* Check for a PIC address load. */
5621 if ((code == PLUS || code == MINUS)
5622 && XEXP (value, 0) == pic_offset_table_rtx
5623 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5624 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5625 || GET_CODE (XEXP (value, 1)) == CONST))
5626 {
5627 if (!subtarget)
5628 subtarget = gen_reg_rtx (GET_MODE (value));
5629 emit_move_insn (subtarget, value);
5630 return subtarget;
5631 }
5632
5633 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5634 {
5635 if (!target)
5636 target = gen_reg_rtx (GET_MODE (value));
5637 convert_move (target, force_operand (XEXP (value, 0), NULL),
5638 code == ZERO_EXTEND);
5639 return target;
5640 }
5641
5642 if (ARITHMETIC_P (value))
5643 {
5644 op2 = XEXP (value, 1);
5645 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5646 subtarget = 0;
5647 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5648 {
5649 code = PLUS;
5650 op2 = negate_rtx (GET_MODE (value), op2);
5651 }
5652
5653 /* Check for an addition with OP2 a constant integer and our first
5654 operand a PLUS of a virtual register and something else. In that
5655 case, we want to emit the sum of the virtual register and the
5656 constant first and then add the other value. This allows virtual
5657 register instantiation to simply modify the constant rather than
5658 creating another one around this addition. */
5659 if (code == PLUS && GET_CODE (op2) == CONST_INT
5660 && GET_CODE (XEXP (value, 0)) == PLUS
5661 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5662 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5663 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5664 {
5665 rtx temp = expand_simple_binop (GET_MODE (value), code,
5666 XEXP (XEXP (value, 0), 0), op2,
5667 subtarget, 0, OPTAB_LIB_WIDEN);
5668 return expand_simple_binop (GET_MODE (value), code, temp,
5669 force_operand (XEXP (XEXP (value,
5670 0), 1), 0),
5671 target, 0, OPTAB_LIB_WIDEN);
5672 }
5673
5674 op1 = force_operand (XEXP (value, 0), subtarget);
5675 op2 = force_operand (op2, NULL_RTX);
5676 switch (code)
5677 {
5678 case MULT:
5679 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5680 case DIV:
5681 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5682 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5683 target, 1, OPTAB_LIB_WIDEN);
5684 else
5685 return expand_divmod (0,
5686 FLOAT_MODE_P (GET_MODE (value))
5687 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5688 GET_MODE (value), op1, op2, target, 0);
5689 break;
5690 case MOD:
5691 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5692 target, 0);
5693 break;
5694 case UDIV:
5695 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5696 target, 1);
5697 break;
5698 case UMOD:
5699 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5700 target, 1);
5701 break;
5702 case ASHIFTRT:
5703 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5704 target, 0, OPTAB_LIB_WIDEN);
5705 break;
5706 default:
5707 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5708 target, 1, OPTAB_LIB_WIDEN);
5709 }
5710 }
5711 if (UNARY_P (value))
5712 {
5713 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5714 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5715 }
5716
5717 #ifdef INSN_SCHEDULING
5718 /* On machines that have insn scheduling, we want all memory reference to be
5719 explicit, so we need to deal with such paradoxical SUBREGs. */
5720 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5721 && (GET_MODE_SIZE (GET_MODE (value))
5722 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5723 value
5724 = simplify_gen_subreg (GET_MODE (value),
5725 force_reg (GET_MODE (SUBREG_REG (value)),
5726 force_operand (SUBREG_REG (value),
5727 NULL_RTX)),
5728 GET_MODE (SUBREG_REG (value)),
5729 SUBREG_BYTE (value));
5730 #endif
5731
5732 return value;
5733 }
5734 \f
5735 /* Subroutine of expand_expr: return nonzero iff there is no way that
5736 EXP can reference X, which is being modified. TOP_P is nonzero if this
5737 call is going to be used to determine whether we need a temporary
5738 for EXP, as opposed to a recursive call to this function.
5739
5740 It is always safe for this routine to return zero since it merely
5741 searches for optimization opportunities. */
5742
5743 int
5744 safe_from_p (rtx x, tree exp, int top_p)
5745 {
5746 rtx exp_rtl = 0;
5747 int i, nops;
5748 static tree save_expr_list;
5749
5750 if (x == 0
5751 /* If EXP has varying size, we MUST use a target since we currently
5752 have no way of allocating temporaries of variable size
5753 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5754 So we assume here that something at a higher level has prevented a
5755 clash. This is somewhat bogus, but the best we can do. Only
5756 do this when X is BLKmode and when we are at the top level. */
5757 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5758 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5759 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5760 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5761 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5762 != INTEGER_CST)
5763 && GET_MODE (x) == BLKmode)
5764 /* If X is in the outgoing argument area, it is always safe. */
5765 || (GET_CODE (x) == MEM
5766 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5767 || (GET_CODE (XEXP (x, 0)) == PLUS
5768 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5769 return 1;
5770
5771 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5772 find the underlying pseudo. */
5773 if (GET_CODE (x) == SUBREG)
5774 {
5775 x = SUBREG_REG (x);
5776 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5777 return 0;
5778 }
5779
5780 /* A SAVE_EXPR might appear many times in the expression passed to the
5781 top-level safe_from_p call, and if it has a complex subexpression,
5782 examining it multiple times could result in a combinatorial explosion.
5783 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5784 with optimization took about 28 minutes to compile -- even though it was
5785 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5786 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5787 we have processed. Note that the only test of top_p was above. */
5788
5789 if (top_p)
5790 {
5791 int rtn;
5792 tree t;
5793
5794 save_expr_list = 0;
5795
5796 rtn = safe_from_p (x, exp, 0);
5797
5798 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5799 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5800
5801 return rtn;
5802 }
5803
5804 /* Now look at our tree code and possibly recurse. */
5805 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5806 {
5807 case 'd':
5808 exp_rtl = DECL_RTL_IF_SET (exp);
5809 break;
5810
5811 case 'c':
5812 return 1;
5813
5814 case 'x':
5815 if (TREE_CODE (exp) == TREE_LIST)
5816 {
5817 while (1)
5818 {
5819 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5820 return 0;
5821 exp = TREE_CHAIN (exp);
5822 if (!exp)
5823 return 1;
5824 if (TREE_CODE (exp) != TREE_LIST)
5825 return safe_from_p (x, exp, 0);
5826 }
5827 }
5828 else if (TREE_CODE (exp) == ERROR_MARK)
5829 return 1; /* An already-visited SAVE_EXPR? */
5830 else
5831 return 0;
5832
5833 case '2':
5834 case '<':
5835 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5836 return 0;
5837 /* Fall through. */
5838
5839 case '1':
5840 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5841
5842 case 'e':
5843 case 'r':
5844 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5845 the expression. If it is set, we conflict iff we are that rtx or
5846 both are in memory. Otherwise, we check all operands of the
5847 expression recursively. */
5848
5849 switch (TREE_CODE (exp))
5850 {
5851 case ADDR_EXPR:
5852 /* If the operand is static or we are static, we can't conflict.
5853 Likewise if we don't conflict with the operand at all. */
5854 if (staticp (TREE_OPERAND (exp, 0))
5855 || TREE_STATIC (exp)
5856 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5857 return 1;
5858
5859 /* Otherwise, the only way this can conflict is if we are taking
5860 the address of a DECL a that address if part of X, which is
5861 very rare. */
5862 exp = TREE_OPERAND (exp, 0);
5863 if (DECL_P (exp))
5864 {
5865 if (!DECL_RTL_SET_P (exp)
5866 || GET_CODE (DECL_RTL (exp)) != MEM)
5867 return 0;
5868 else
5869 exp_rtl = XEXP (DECL_RTL (exp), 0);
5870 }
5871 break;
5872
5873 case INDIRECT_REF:
5874 if (GET_CODE (x) == MEM
5875 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5876 get_alias_set (exp)))
5877 return 0;
5878 break;
5879
5880 case CALL_EXPR:
5881 /* Assume that the call will clobber all hard registers and
5882 all of memory. */
5883 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5884 || GET_CODE (x) == MEM)
5885 return 0;
5886 break;
5887
5888 case RTL_EXPR:
5889 /* If a sequence exists, we would have to scan every instruction
5890 in the sequence to see if it was safe. This is probably not
5891 worthwhile. */
5892 if (RTL_EXPR_SEQUENCE (exp))
5893 return 0;
5894
5895 exp_rtl = RTL_EXPR_RTL (exp);
5896 break;
5897
5898 case WITH_CLEANUP_EXPR:
5899 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5900 break;
5901
5902 case CLEANUP_POINT_EXPR:
5903 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5904
5905 case SAVE_EXPR:
5906 exp_rtl = SAVE_EXPR_RTL (exp);
5907 if (exp_rtl)
5908 break;
5909
5910 /* If we've already scanned this, don't do it again. Otherwise,
5911 show we've scanned it and record for clearing the flag if we're
5912 going on. */
5913 if (TREE_PRIVATE (exp))
5914 return 1;
5915
5916 TREE_PRIVATE (exp) = 1;
5917 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5918 {
5919 TREE_PRIVATE (exp) = 0;
5920 return 0;
5921 }
5922
5923 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5924 return 1;
5925
5926 case BIND_EXPR:
5927 /* The only operand we look at is operand 1. The rest aren't
5928 part of the expression. */
5929 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5930
5931 default:
5932 break;
5933 }
5934
5935 /* If we have an rtx, we do not need to scan our operands. */
5936 if (exp_rtl)
5937 break;
5938
5939 nops = first_rtl_op (TREE_CODE (exp));
5940 for (i = 0; i < nops; i++)
5941 if (TREE_OPERAND (exp, i) != 0
5942 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5943 return 0;
5944
5945 /* If this is a language-specific tree code, it may require
5946 special handling. */
5947 if ((unsigned int) TREE_CODE (exp)
5948 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5949 && !lang_hooks.safe_from_p (x, exp))
5950 return 0;
5951 }
5952
5953 /* If we have an rtl, find any enclosed object. Then see if we conflict
5954 with it. */
5955 if (exp_rtl)
5956 {
5957 if (GET_CODE (exp_rtl) == SUBREG)
5958 {
5959 exp_rtl = SUBREG_REG (exp_rtl);
5960 if (GET_CODE (exp_rtl) == REG
5961 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5962 return 0;
5963 }
5964
5965 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5966 are memory and they conflict. */
5967 return ! (rtx_equal_p (x, exp_rtl)
5968 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5969 && true_dependence (exp_rtl, VOIDmode, x,
5970 rtx_addr_varies_p)));
5971 }
5972
5973 /* If we reach here, it is safe. */
5974 return 1;
5975 }
5976
5977 /* Subroutine of expand_expr: return rtx if EXP is a
5978 variable or parameter; else return 0. */
5979
5980 static rtx
5981 var_rtx (tree exp)
5982 {
5983 STRIP_NOPS (exp);
5984 switch (TREE_CODE (exp))
5985 {
5986 case PARM_DECL:
5987 case VAR_DECL:
5988 return DECL_RTL (exp);
5989 default:
5990 return 0;
5991 }
5992 }
5993 \f
5994 /* Return the highest power of two that EXP is known to be a multiple of.
5995 This is used in updating alignment of MEMs in array references. */
5996
5997 static unsigned HOST_WIDE_INT
5998 highest_pow2_factor (tree exp)
5999 {
6000 unsigned HOST_WIDE_INT c0, c1;
6001
6002 switch (TREE_CODE (exp))
6003 {
6004 case INTEGER_CST:
6005 /* We can find the lowest bit that's a one. If the low
6006 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6007 We need to handle this case since we can find it in a COND_EXPR,
6008 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6009 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6010 later ICE. */
6011 if (TREE_CONSTANT_OVERFLOW (exp))
6012 return BIGGEST_ALIGNMENT;
6013 else
6014 {
6015 /* Note: tree_low_cst is intentionally not used here,
6016 we don't care about the upper bits. */
6017 c0 = TREE_INT_CST_LOW (exp);
6018 c0 &= -c0;
6019 return c0 ? c0 : BIGGEST_ALIGNMENT;
6020 }
6021 break;
6022
6023 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6024 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6025 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6026 return MIN (c0, c1);
6027
6028 case MULT_EXPR:
6029 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6030 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6031 return c0 * c1;
6032
6033 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6034 case CEIL_DIV_EXPR:
6035 if (integer_pow2p (TREE_OPERAND (exp, 1))
6036 && host_integerp (TREE_OPERAND (exp, 1), 1))
6037 {
6038 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6039 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6040 return MAX (1, c0 / c1);
6041 }
6042 break;
6043
6044 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6045 case SAVE_EXPR:
6046 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6047
6048 case COMPOUND_EXPR:
6049 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6050
6051 case COND_EXPR:
6052 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6053 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6054 return MIN (c0, c1);
6055
6056 default:
6057 break;
6058 }
6059
6060 return 1;
6061 }
6062
6063 /* Similar, except that the alignment requirements of TARGET are
6064 taken into account. Assume it is at least as aligned as its
6065 type, unless it is a COMPONENT_REF in which case the layout of
6066 the structure gives the alignment. */
6067
6068 static unsigned HOST_WIDE_INT
6069 highest_pow2_factor_for_target (tree target, tree exp)
6070 {
6071 unsigned HOST_WIDE_INT target_align, factor;
6072
6073 factor = highest_pow2_factor (exp);
6074 if (TREE_CODE (target) == COMPONENT_REF)
6075 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6076 else
6077 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6078 return MAX (factor, target_align);
6079 }
6080 \f
6081 /* Subroutine of expand_expr. Expand the two operands of a binary
6082 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6083 The value may be stored in TARGET if TARGET is nonzero. The
6084 MODIFIER argument is as documented by expand_expr. */
6085
6086 static void
6087 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6088 enum expand_modifier modifier)
6089 {
6090 if (! safe_from_p (target, exp1, 1))
6091 target = 0;
6092 if (operand_equal_p (exp0, exp1, 0))
6093 {
6094 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6095 *op1 = copy_rtx (*op0);
6096 }
6097 else
6098 {
6099 /* If we need to preserve evaluation order, copy exp0 into its own
6100 temporary variable so that it can't be clobbered by exp1. */
6101 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6102 exp0 = save_expr (exp0);
6103 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6104 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6105 }
6106 }
6107
6108 \f
6109 /* expand_expr: generate code for computing expression EXP.
6110 An rtx for the computed value is returned. The value is never null.
6111 In the case of a void EXP, const0_rtx is returned.
6112
6113 The value may be stored in TARGET if TARGET is nonzero.
6114 TARGET is just a suggestion; callers must assume that
6115 the rtx returned may not be the same as TARGET.
6116
6117 If TARGET is CONST0_RTX, it means that the value will be ignored.
6118
6119 If TMODE is not VOIDmode, it suggests generating the
6120 result in mode TMODE. But this is done only when convenient.
6121 Otherwise, TMODE is ignored and the value generated in its natural mode.
6122 TMODE is just a suggestion; callers must assume that
6123 the rtx returned may not have mode TMODE.
6124
6125 Note that TARGET may have neither TMODE nor MODE. In that case, it
6126 probably will not be used.
6127
6128 If MODIFIER is EXPAND_SUM then when EXP is an addition
6129 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6130 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6131 products as above, or REG or MEM, or constant.
6132 Ordinarily in such cases we would output mul or add instructions
6133 and then return a pseudo reg containing the sum.
6134
6135 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6136 it also marks a label as absolutely required (it can't be dead).
6137 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6138 This is used for outputting expressions used in initializers.
6139
6140 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6141 with a constant address even if that address is not normally legitimate.
6142 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6143
6144 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6145 a call parameter. Such targets require special care as we haven't yet
6146 marked TARGET so that it's safe from being trashed by libcalls. We
6147 don't want to use TARGET for anything but the final result;
6148 Intermediate values must go elsewhere. Additionally, calls to
6149 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6150
6151 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6152 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6153 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6154 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6155 recursively. */
6156
6157 rtx
6158 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6159 enum expand_modifier modifier, rtx *alt_rtl)
6160 {
6161 rtx op0, op1, temp;
6162 tree type = TREE_TYPE (exp);
6163 int unsignedp;
6164 enum machine_mode mode;
6165 enum tree_code code = TREE_CODE (exp);
6166 optab this_optab;
6167 rtx subtarget, original_target;
6168 int ignore;
6169 tree context;
6170
6171 /* Handle ERROR_MARK before anybody tries to access its type. */
6172 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6173 {
6174 op0 = CONST0_RTX (tmode);
6175 if (op0 != 0)
6176 return op0;
6177 return const0_rtx;
6178 }
6179
6180 mode = TYPE_MODE (type);
6181 unsignedp = TYPE_UNSIGNED (type);
6182
6183 /* Use subtarget as the target for operand 0 of a binary operation. */
6184 subtarget = get_subtarget (target);
6185 original_target = target;
6186 ignore = (target == const0_rtx
6187 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6188 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6189 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6190 && TREE_CODE (type) == VOID_TYPE));
6191
6192 /* If we are going to ignore this result, we need only do something
6193 if there is a side-effect somewhere in the expression. If there
6194 is, short-circuit the most common cases here. Note that we must
6195 not call expand_expr with anything but const0_rtx in case this
6196 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6197
6198 if (ignore)
6199 {
6200 if (! TREE_SIDE_EFFECTS (exp))
6201 return const0_rtx;
6202
6203 /* Ensure we reference a volatile object even if value is ignored, but
6204 don't do this if all we are doing is taking its address. */
6205 if (TREE_THIS_VOLATILE (exp)
6206 && TREE_CODE (exp) != FUNCTION_DECL
6207 && mode != VOIDmode && mode != BLKmode
6208 && modifier != EXPAND_CONST_ADDRESS)
6209 {
6210 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6211 if (GET_CODE (temp) == MEM)
6212 temp = copy_to_reg (temp);
6213 return const0_rtx;
6214 }
6215
6216 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6217 || code == INDIRECT_REF || code == BUFFER_REF)
6218 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6219 modifier);
6220
6221 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6222 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6223 {
6224 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6225 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6226 return const0_rtx;
6227 }
6228 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6229 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6230 /* If the second operand has no side effects, just evaluate
6231 the first. */
6232 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6233 modifier);
6234 else if (code == BIT_FIELD_REF)
6235 {
6236 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6237 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6238 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6239 return const0_rtx;
6240 }
6241
6242 target = 0;
6243 }
6244
6245 /* If will do cse, generate all results into pseudo registers
6246 since 1) that allows cse to find more things
6247 and 2) otherwise cse could produce an insn the machine
6248 cannot support. An exception is a CONSTRUCTOR into a multi-word
6249 MEM: that's much more likely to be most efficient into the MEM.
6250 Another is a CALL_EXPR which must return in memory. */
6251
6252 if (! cse_not_expected && mode != BLKmode && target
6253 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6254 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6255 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6256 target = 0;
6257
6258 switch (code)
6259 {
6260 case LABEL_DECL:
6261 {
6262 tree function = decl_function_context (exp);
6263 /* Labels in containing functions, or labels used from initializers,
6264 must be forced. */
6265 if (modifier == EXPAND_INITIALIZER
6266 || (function != current_function_decl
6267 && function != inline_function_decl
6268 && function != 0))
6269 temp = force_label_rtx (exp);
6270 else
6271 temp = label_rtx (exp);
6272
6273 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6274 if (function != current_function_decl
6275 && function != inline_function_decl && function != 0)
6276 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6277 return temp;
6278 }
6279
6280 case PARM_DECL:
6281 if (!DECL_RTL_SET_P (exp))
6282 {
6283 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6284 return CONST0_RTX (mode);
6285 }
6286
6287 /* ... fall through ... */
6288
6289 case VAR_DECL:
6290 /* If a static var's type was incomplete when the decl was written,
6291 but the type is complete now, lay out the decl now. */
6292 if (DECL_SIZE (exp) == 0
6293 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6294 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6295 layout_decl (exp, 0);
6296
6297 /* ... fall through ... */
6298
6299 case FUNCTION_DECL:
6300 case RESULT_DECL:
6301 if (DECL_RTL (exp) == 0)
6302 abort ();
6303
6304 /* Ensure variable marked as used even if it doesn't go through
6305 a parser. If it hasn't be used yet, write out an external
6306 definition. */
6307 if (! TREE_USED (exp))
6308 {
6309 assemble_external (exp);
6310 TREE_USED (exp) = 1;
6311 }
6312
6313 /* Show we haven't gotten RTL for this yet. */
6314 temp = 0;
6315
6316 /* Handle variables inherited from containing functions. */
6317 context = decl_function_context (exp);
6318
6319 /* We treat inline_function_decl as an alias for the current function
6320 because that is the inline function whose vars, types, etc.
6321 are being merged into the current function.
6322 See expand_inline_function. */
6323
6324 if (context != 0 && context != current_function_decl
6325 && context != inline_function_decl
6326 /* If var is static, we don't need a static chain to access it. */
6327 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6328 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6329 {
6330 rtx addr;
6331
6332 /* Mark as non-local and addressable. */
6333 DECL_NONLOCAL (exp) = 1;
6334 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6335 abort ();
6336 lang_hooks.mark_addressable (exp);
6337 if (GET_CODE (DECL_RTL (exp)) != MEM)
6338 abort ();
6339 addr = XEXP (DECL_RTL (exp), 0);
6340 if (GET_CODE (addr) == MEM)
6341 addr
6342 = replace_equiv_address (addr,
6343 fix_lexical_addr (XEXP (addr, 0), exp));
6344 else
6345 addr = fix_lexical_addr (addr, exp);
6346
6347 temp = replace_equiv_address (DECL_RTL (exp), addr);
6348 }
6349
6350 /* This is the case of an array whose size is to be determined
6351 from its initializer, while the initializer is still being parsed.
6352 See expand_decl. */
6353
6354 else if (GET_CODE (DECL_RTL (exp)) == MEM
6355 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6356 temp = validize_mem (DECL_RTL (exp));
6357
6358 /* If DECL_RTL is memory, we are in the normal case and either
6359 the address is not valid or it is not a register and -fforce-addr
6360 is specified, get the address into a register. */
6361
6362 else if (GET_CODE (DECL_RTL (exp)) == MEM
6363 && modifier != EXPAND_CONST_ADDRESS
6364 && modifier != EXPAND_SUM
6365 && modifier != EXPAND_INITIALIZER
6366 && (! memory_address_p (DECL_MODE (exp),
6367 XEXP (DECL_RTL (exp), 0))
6368 || (flag_force_addr
6369 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6370 {
6371 if (alt_rtl)
6372 *alt_rtl = DECL_RTL (exp);
6373 temp = replace_equiv_address (DECL_RTL (exp),
6374 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6375 }
6376
6377 /* If we got something, return it. But first, set the alignment
6378 if the address is a register. */
6379 if (temp != 0)
6380 {
6381 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6382 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6383
6384 return temp;
6385 }
6386
6387 /* If the mode of DECL_RTL does not match that of the decl, it
6388 must be a promoted value. We return a SUBREG of the wanted mode,
6389 but mark it so that we know that it was already extended. */
6390
6391 if (GET_CODE (DECL_RTL (exp)) == REG
6392 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6393 {
6394 /* Get the signedness used for this variable. Ensure we get the
6395 same mode we got when the variable was declared. */
6396 if (GET_MODE (DECL_RTL (exp))
6397 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6398 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6399 abort ();
6400
6401 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6402 SUBREG_PROMOTED_VAR_P (temp) = 1;
6403 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6404 return temp;
6405 }
6406
6407 return DECL_RTL (exp);
6408
6409 case INTEGER_CST:
6410 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6411 TREE_INT_CST_HIGH (exp), mode);
6412
6413 /* ??? If overflow is set, fold will have done an incomplete job,
6414 which can result in (plus xx (const_int 0)), which can get
6415 simplified by validate_replace_rtx during virtual register
6416 instantiation, which can result in unrecognizable insns.
6417 Avoid this by forcing all overflows into registers. */
6418 if (TREE_CONSTANT_OVERFLOW (exp)
6419 && modifier != EXPAND_INITIALIZER)
6420 temp = force_reg (mode, temp);
6421
6422 return temp;
6423
6424 case VECTOR_CST:
6425 return const_vector_from_tree (exp);
6426
6427 case CONST_DECL:
6428 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6429
6430 case REAL_CST:
6431 /* If optimized, generate immediate CONST_DOUBLE
6432 which will be turned into memory by reload if necessary.
6433
6434 We used to force a register so that loop.c could see it. But
6435 this does not allow gen_* patterns to perform optimizations with
6436 the constants. It also produces two insns in cases like "x = 1.0;".
6437 On most machines, floating-point constants are not permitted in
6438 many insns, so we'd end up copying it to a register in any case.
6439
6440 Now, we do the copying in expand_binop, if appropriate. */
6441 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6442 TYPE_MODE (TREE_TYPE (exp)));
6443
6444 case COMPLEX_CST:
6445 /* Handle evaluating a complex constant in a CONCAT target. */
6446 if (original_target && GET_CODE (original_target) == CONCAT)
6447 {
6448 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6449 rtx rtarg, itarg;
6450
6451 rtarg = XEXP (original_target, 0);
6452 itarg = XEXP (original_target, 1);
6453
6454 /* Move the real and imaginary parts separately. */
6455 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6456 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6457
6458 if (op0 != rtarg)
6459 emit_move_insn (rtarg, op0);
6460 if (op1 != itarg)
6461 emit_move_insn (itarg, op1);
6462
6463 return original_target;
6464 }
6465
6466 /* ... fall through ... */
6467
6468 case STRING_CST:
6469 temp = output_constant_def (exp, 1);
6470
6471 /* temp contains a constant address.
6472 On RISC machines where a constant address isn't valid,
6473 make some insns to get that address into a register. */
6474 if (modifier != EXPAND_CONST_ADDRESS
6475 && modifier != EXPAND_INITIALIZER
6476 && modifier != EXPAND_SUM
6477 && (! memory_address_p (mode, XEXP (temp, 0))
6478 || flag_force_addr))
6479 return replace_equiv_address (temp,
6480 copy_rtx (XEXP (temp, 0)));
6481 return temp;
6482
6483 case EXPR_WITH_FILE_LOCATION:
6484 {
6485 rtx to_return;
6486 struct file_stack fs;
6487
6488 fs.location = input_location;
6489 fs.next = expr_wfl_stack;
6490 input_filename = EXPR_WFL_FILENAME (exp);
6491 input_line = EXPR_WFL_LINENO (exp);
6492 expr_wfl_stack = &fs;
6493 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6494 emit_line_note (input_location);
6495 /* Possibly avoid switching back and forth here. */
6496 to_return = expand_expr (EXPR_WFL_NODE (exp),
6497 (ignore ? const0_rtx : target),
6498 tmode, modifier);
6499 if (expr_wfl_stack != &fs)
6500 abort ();
6501 input_location = fs.location;
6502 expr_wfl_stack = fs.next;
6503 return to_return;
6504 }
6505
6506 case SAVE_EXPR:
6507 context = decl_function_context (exp);
6508
6509 /* If this SAVE_EXPR was at global context, assume we are an
6510 initialization function and move it into our context. */
6511 if (context == 0)
6512 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6513
6514 /* We treat inline_function_decl as an alias for the current function
6515 because that is the inline function whose vars, types, etc.
6516 are being merged into the current function.
6517 See expand_inline_function. */
6518 if (context == current_function_decl || context == inline_function_decl)
6519 context = 0;
6520
6521 /* If this is non-local, handle it. */
6522 if (context)
6523 {
6524 /* The following call just exists to abort if the context is
6525 not of a containing function. */
6526 find_function_data (context);
6527
6528 temp = SAVE_EXPR_RTL (exp);
6529 if (temp && GET_CODE (temp) == REG)
6530 {
6531 put_var_into_stack (exp, /*rescan=*/true);
6532 temp = SAVE_EXPR_RTL (exp);
6533 }
6534 if (temp == 0 || GET_CODE (temp) != MEM)
6535 abort ();
6536 return
6537 replace_equiv_address (temp,
6538 fix_lexical_addr (XEXP (temp, 0), exp));
6539 }
6540 if (SAVE_EXPR_RTL (exp) == 0)
6541 {
6542 if (mode == VOIDmode)
6543 temp = const0_rtx;
6544 else
6545 temp = assign_temp (build_qualified_type (type,
6546 (TYPE_QUALS (type)
6547 | TYPE_QUAL_CONST)),
6548 3, 0, 0);
6549
6550 SAVE_EXPR_RTL (exp) = temp;
6551 if (!optimize && GET_CODE (temp) == REG)
6552 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6553 save_expr_regs);
6554
6555 /* If the mode of TEMP does not match that of the expression, it
6556 must be a promoted value. We pass store_expr a SUBREG of the
6557 wanted mode but mark it so that we know that it was already
6558 extended. */
6559
6560 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6561 {
6562 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6563 promote_mode (type, mode, &unsignedp, 0);
6564 SUBREG_PROMOTED_VAR_P (temp) = 1;
6565 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6566 }
6567
6568 if (temp == const0_rtx)
6569 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6570 else
6571 store_expr (TREE_OPERAND (exp, 0), temp,
6572 modifier == EXPAND_STACK_PARM ? 2 : 0);
6573
6574 TREE_USED (exp) = 1;
6575 }
6576
6577 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6578 must be a promoted value. We return a SUBREG of the wanted mode,
6579 but mark it so that we know that it was already extended. */
6580
6581 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6582 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6583 {
6584 /* Compute the signedness and make the proper SUBREG. */
6585 promote_mode (type, mode, &unsignedp, 0);
6586 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6587 SUBREG_PROMOTED_VAR_P (temp) = 1;
6588 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6589 return temp;
6590 }
6591
6592 return SAVE_EXPR_RTL (exp);
6593
6594 case UNSAVE_EXPR:
6595 {
6596 rtx temp;
6597 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6598 TREE_OPERAND (exp, 0)
6599 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6600 return temp;
6601 }
6602
6603 case GOTO_EXPR:
6604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6605 expand_goto (TREE_OPERAND (exp, 0));
6606 else
6607 expand_computed_goto (TREE_OPERAND (exp, 0));
6608 return const0_rtx;
6609
6610 case EXIT_EXPR:
6611 expand_exit_loop_if_false (NULL,
6612 invert_truthvalue (TREE_OPERAND (exp, 0)));
6613 return const0_rtx;
6614
6615 case LABELED_BLOCK_EXPR:
6616 if (LABELED_BLOCK_BODY (exp))
6617 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6618 /* Should perhaps use expand_label, but this is simpler and safer. */
6619 do_pending_stack_adjust ();
6620 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6621 return const0_rtx;
6622
6623 case EXIT_BLOCK_EXPR:
6624 if (EXIT_BLOCK_RETURN (exp))
6625 sorry ("returned value in block_exit_expr");
6626 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6627 return const0_rtx;
6628
6629 case LOOP_EXPR:
6630 push_temp_slots ();
6631 expand_start_loop (1);
6632 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6633 expand_end_loop ();
6634 pop_temp_slots ();
6635
6636 return const0_rtx;
6637
6638 case BIND_EXPR:
6639 {
6640 tree vars = TREE_OPERAND (exp, 0);
6641
6642 /* Need to open a binding contour here because
6643 if there are any cleanups they must be contained here. */
6644 expand_start_bindings (2);
6645
6646 /* Mark the corresponding BLOCK for output in its proper place. */
6647 if (TREE_OPERAND (exp, 2) != 0
6648 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6649 lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
6650
6651 /* If VARS have not yet been expanded, expand them now. */
6652 while (vars)
6653 {
6654 if (!DECL_RTL_SET_P (vars))
6655 expand_decl (vars);
6656 expand_decl_init (vars);
6657 vars = TREE_CHAIN (vars);
6658 }
6659
6660 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6661
6662 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6663
6664 return temp;
6665 }
6666
6667 case RTL_EXPR:
6668 if (RTL_EXPR_SEQUENCE (exp))
6669 {
6670 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6671 abort ();
6672 emit_insn (RTL_EXPR_SEQUENCE (exp));
6673 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6674 }
6675 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6676 free_temps_for_rtl_expr (exp);
6677 if (alt_rtl)
6678 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6679 return RTL_EXPR_RTL (exp);
6680
6681 case CONSTRUCTOR:
6682 /* If we don't need the result, just ensure we evaluate any
6683 subexpressions. */
6684 if (ignore)
6685 {
6686 tree elt;
6687
6688 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6689 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6690
6691 return const0_rtx;
6692 }
6693
6694 /* All elts simple constants => refer to a constant in memory. But
6695 if this is a non-BLKmode mode, let it store a field at a time
6696 since that should make a CONST_INT or CONST_DOUBLE when we
6697 fold. Likewise, if we have a target we can use, it is best to
6698 store directly into the target unless the type is large enough
6699 that memcpy will be used. If we are making an initializer and
6700 all operands are constant, put it in memory as well.
6701
6702 FIXME: Avoid trying to fill vector constructors piece-meal.
6703 Output them with output_constant_def below unless we're sure
6704 they're zeros. This should go away when vector initializers
6705 are treated like VECTOR_CST instead of arrays.
6706 */
6707 else if ((TREE_STATIC (exp)
6708 && ((mode == BLKmode
6709 && ! (target != 0 && safe_from_p (target, exp, 1)))
6710 || TREE_ADDRESSABLE (exp)
6711 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6712 && (! MOVE_BY_PIECES_P
6713 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6714 TYPE_ALIGN (type)))
6715 && ((TREE_CODE (type) == VECTOR_TYPE
6716 && !is_zeros_p (exp))
6717 || ! mostly_zeros_p (exp)))))
6718 || ((modifier == EXPAND_INITIALIZER
6719 || modifier == EXPAND_CONST_ADDRESS)
6720 && TREE_CONSTANT (exp)))
6721 {
6722 rtx constructor = output_constant_def (exp, 1);
6723
6724 if (modifier != EXPAND_CONST_ADDRESS
6725 && modifier != EXPAND_INITIALIZER
6726 && modifier != EXPAND_SUM)
6727 constructor = validize_mem (constructor);
6728
6729 return constructor;
6730 }
6731 else
6732 {
6733 /* Handle calls that pass values in multiple non-contiguous
6734 locations. The Irix 6 ABI has examples of this. */
6735 if (target == 0 || ! safe_from_p (target, exp, 1)
6736 || GET_CODE (target) == PARALLEL
6737 || modifier == EXPAND_STACK_PARM)
6738 target
6739 = assign_temp (build_qualified_type (type,
6740 (TYPE_QUALS (type)
6741 | (TREE_READONLY (exp)
6742 * TYPE_QUAL_CONST))),
6743 0, TREE_ADDRESSABLE (exp), 1);
6744
6745 store_constructor (exp, target, 0, int_expr_size (exp));
6746 return target;
6747 }
6748
6749 case INDIRECT_REF:
6750 {
6751 tree exp1 = TREE_OPERAND (exp, 0);
6752 tree index;
6753 tree string = string_constant (exp1, &index);
6754
6755 /* Try to optimize reads from const strings. */
6756 if (string
6757 && TREE_CODE (string) == STRING_CST
6758 && TREE_CODE (index) == INTEGER_CST
6759 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6760 && GET_MODE_CLASS (mode) == MODE_INT
6761 && GET_MODE_SIZE (mode) == 1
6762 && modifier != EXPAND_WRITE)
6763 return gen_int_mode (TREE_STRING_POINTER (string)
6764 [TREE_INT_CST_LOW (index)], mode);
6765
6766 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6767 op0 = memory_address (mode, op0);
6768 temp = gen_rtx_MEM (mode, op0);
6769 set_mem_attributes (temp, exp, 0);
6770
6771 /* If we are writing to this object and its type is a record with
6772 readonly fields, we must mark it as readonly so it will
6773 conflict with readonly references to those fields. */
6774 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6775 RTX_UNCHANGING_P (temp) = 1;
6776
6777 return temp;
6778 }
6779
6780 case ARRAY_REF:
6781 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6782 abort ();
6783
6784 {
6785 tree array = TREE_OPERAND (exp, 0);
6786 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6787 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6788 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6789 HOST_WIDE_INT i;
6790
6791 /* Optimize the special-case of a zero lower bound.
6792
6793 We convert the low_bound to sizetype to avoid some problems
6794 with constant folding. (E.g. suppose the lower bound is 1,
6795 and its mode is QI. Without the conversion, (ARRAY
6796 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6797 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6798
6799 if (! integer_zerop (low_bound))
6800 index = size_diffop (index, convert (sizetype, low_bound));
6801
6802 /* Fold an expression like: "foo"[2].
6803 This is not done in fold so it won't happen inside &.
6804 Don't fold if this is for wide characters since it's too
6805 difficult to do correctly and this is a very rare case. */
6806
6807 if (modifier != EXPAND_CONST_ADDRESS
6808 && modifier != EXPAND_INITIALIZER
6809 && modifier != EXPAND_MEMORY
6810 && TREE_CODE (array) == STRING_CST
6811 && TREE_CODE (index) == INTEGER_CST
6812 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6813 && GET_MODE_CLASS (mode) == MODE_INT
6814 && GET_MODE_SIZE (mode) == 1)
6815 return gen_int_mode (TREE_STRING_POINTER (array)
6816 [TREE_INT_CST_LOW (index)], mode);
6817
6818 /* If this is a constant index into a constant array,
6819 just get the value from the array. Handle both the cases when
6820 we have an explicit constructor and when our operand is a variable
6821 that was declared const. */
6822
6823 if (modifier != EXPAND_CONST_ADDRESS
6824 && modifier != EXPAND_INITIALIZER
6825 && modifier != EXPAND_MEMORY
6826 && TREE_CODE (array) == CONSTRUCTOR
6827 && ! TREE_SIDE_EFFECTS (array)
6828 && TREE_CODE (index) == INTEGER_CST
6829 && 0 > compare_tree_int (index,
6830 list_length (CONSTRUCTOR_ELTS
6831 (TREE_OPERAND (exp, 0)))))
6832 {
6833 tree elem;
6834
6835 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6836 i = TREE_INT_CST_LOW (index);
6837 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6838 ;
6839
6840 if (elem)
6841 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6842 modifier);
6843 }
6844
6845 else if (optimize >= 1
6846 && modifier != EXPAND_CONST_ADDRESS
6847 && modifier != EXPAND_INITIALIZER
6848 && modifier != EXPAND_MEMORY
6849 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6850 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6851 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6852 && targetm.binds_local_p (array))
6853 {
6854 if (TREE_CODE (index) == INTEGER_CST)
6855 {
6856 tree init = DECL_INITIAL (array);
6857
6858 if (TREE_CODE (init) == CONSTRUCTOR)
6859 {
6860 tree elem;
6861
6862 for (elem = CONSTRUCTOR_ELTS (init);
6863 (elem
6864 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6865 elem = TREE_CHAIN (elem))
6866 ;
6867
6868 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6869 return expand_expr (fold (TREE_VALUE (elem)), target,
6870 tmode, modifier);
6871 }
6872 else if (TREE_CODE (init) == STRING_CST
6873 && 0 > compare_tree_int (index,
6874 TREE_STRING_LENGTH (init)))
6875 {
6876 tree type = TREE_TYPE (TREE_TYPE (init));
6877 enum machine_mode mode = TYPE_MODE (type);
6878
6879 if (GET_MODE_CLASS (mode) == MODE_INT
6880 && GET_MODE_SIZE (mode) == 1)
6881 return gen_int_mode (TREE_STRING_POINTER (init)
6882 [TREE_INT_CST_LOW (index)], mode);
6883 }
6884 }
6885 }
6886 }
6887 goto normal_inner_ref;
6888
6889 case COMPONENT_REF:
6890 /* If the operand is a CONSTRUCTOR, we can just extract the
6891 appropriate field if it is present. */
6892 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6893 {
6894 tree elt;
6895
6896 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6897 elt = TREE_CHAIN (elt))
6898 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6899 /* We can normally use the value of the field in the
6900 CONSTRUCTOR. However, if this is a bitfield in
6901 an integral mode that we can fit in a HOST_WIDE_INT,
6902 we must mask only the number of bits in the bitfield,
6903 since this is done implicitly by the constructor. If
6904 the bitfield does not meet either of those conditions,
6905 we can't do this optimization. */
6906 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6907 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6908 == MODE_INT)
6909 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6910 <= HOST_BITS_PER_WIDE_INT))))
6911 {
6912 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6913 && modifier == EXPAND_STACK_PARM)
6914 target = 0;
6915 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6916 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6917 {
6918 HOST_WIDE_INT bitsize
6919 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6920 enum machine_mode imode
6921 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6922
6923 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6924 {
6925 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6926 op0 = expand_and (imode, op0, op1, target);
6927 }
6928 else
6929 {
6930 tree count
6931 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6932 0);
6933
6934 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6935 target, 0);
6936 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6937 target, 0);
6938 }
6939 }
6940
6941 return op0;
6942 }
6943 }
6944 goto normal_inner_ref;
6945
6946 case BIT_FIELD_REF:
6947 case ARRAY_RANGE_REF:
6948 normal_inner_ref:
6949 {
6950 enum machine_mode mode1;
6951 HOST_WIDE_INT bitsize, bitpos;
6952 tree offset;
6953 int volatilep = 0;
6954 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6955 &mode1, &unsignedp, &volatilep);
6956 rtx orig_op0;
6957
6958 /* If we got back the original object, something is wrong. Perhaps
6959 we are evaluating an expression too early. In any event, don't
6960 infinitely recurse. */
6961 if (tem == exp)
6962 abort ();
6963
6964 /* If TEM's type is a union of variable size, pass TARGET to the inner
6965 computation, since it will need a temporary and TARGET is known
6966 to have to do. This occurs in unchecked conversion in Ada. */
6967
6968 orig_op0 = op0
6969 = expand_expr (tem,
6970 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6971 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6972 != INTEGER_CST)
6973 && modifier != EXPAND_STACK_PARM
6974 ? target : NULL_RTX),
6975 VOIDmode,
6976 (modifier == EXPAND_INITIALIZER
6977 || modifier == EXPAND_CONST_ADDRESS
6978 || modifier == EXPAND_STACK_PARM)
6979 ? modifier : EXPAND_NORMAL);
6980
6981 /* If this is a constant, put it into a register if it is a
6982 legitimate constant and OFFSET is 0 and memory if it isn't. */
6983 if (CONSTANT_P (op0))
6984 {
6985 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6986 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6987 && offset == 0)
6988 op0 = force_reg (mode, op0);
6989 else
6990 op0 = validize_mem (force_const_mem (mode, op0));
6991 }
6992
6993 /* Otherwise, if this object not in memory and we either have an
6994 offset or a BLKmode result, put it there. This case can't occur in
6995 C, but can in Ada if we have unchecked conversion of an expression
6996 from a scalar type to an array or record type or for an
6997 ARRAY_RANGE_REF whose type is BLKmode. */
6998 else if (GET_CODE (op0) != MEM
6999 && (offset != 0
7000 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7001 {
7002 /* If the operand is a SAVE_EXPR, we can deal with this by
7003 forcing the SAVE_EXPR into memory. */
7004 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7005 {
7006 put_var_into_stack (TREE_OPERAND (exp, 0),
7007 /*rescan=*/true);
7008 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7009 }
7010 else
7011 {
7012 tree nt
7013 = build_qualified_type (TREE_TYPE (tem),
7014 (TYPE_QUALS (TREE_TYPE (tem))
7015 | TYPE_QUAL_CONST));
7016 rtx memloc = assign_temp (nt, 1, 1, 1);
7017
7018 emit_move_insn (memloc, op0);
7019 op0 = memloc;
7020 }
7021 }
7022
7023 if (offset != 0)
7024 {
7025 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7026 EXPAND_SUM);
7027
7028 if (GET_CODE (op0) != MEM)
7029 abort ();
7030
7031 #ifdef POINTERS_EXTEND_UNSIGNED
7032 if (GET_MODE (offset_rtx) != Pmode)
7033 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7034 #else
7035 if (GET_MODE (offset_rtx) != ptr_mode)
7036 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7037 #endif
7038
7039 if (GET_MODE (op0) == BLKmode
7040 /* A constant address in OP0 can have VOIDmode, we must
7041 not try to call force_reg in that case. */
7042 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7043 && bitsize != 0
7044 && (bitpos % bitsize) == 0
7045 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7046 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7047 {
7048 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7049 bitpos = 0;
7050 }
7051
7052 op0 = offset_address (op0, offset_rtx,
7053 highest_pow2_factor (offset));
7054 }
7055
7056 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7057 record its alignment as BIGGEST_ALIGNMENT. */
7058 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7059 && is_aligning_offset (offset, tem))
7060 set_mem_align (op0, BIGGEST_ALIGNMENT);
7061
7062 /* Don't forget about volatility even if this is a bitfield. */
7063 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7064 {
7065 if (op0 == orig_op0)
7066 op0 = copy_rtx (op0);
7067
7068 MEM_VOLATILE_P (op0) = 1;
7069 }
7070
7071 /* The following code doesn't handle CONCAT.
7072 Assume only bitpos == 0 can be used for CONCAT, due to
7073 one element arrays having the same mode as its element. */
7074 if (GET_CODE (op0) == CONCAT)
7075 {
7076 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7077 abort ();
7078 return op0;
7079 }
7080
7081 /* In cases where an aligned union has an unaligned object
7082 as a field, we might be extracting a BLKmode value from
7083 an integer-mode (e.g., SImode) object. Handle this case
7084 by doing the extract into an object as wide as the field
7085 (which we know to be the width of a basic mode), then
7086 storing into memory, and changing the mode to BLKmode. */
7087 if (mode1 == VOIDmode
7088 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7089 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7090 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7091 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7092 && modifier != EXPAND_CONST_ADDRESS
7093 && modifier != EXPAND_INITIALIZER)
7094 /* If the field isn't aligned enough to fetch as a memref,
7095 fetch it as a bit field. */
7096 || (mode1 != BLKmode
7097 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7098 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7099 || (GET_CODE (op0) == MEM
7100 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7101 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7102 && ((modifier == EXPAND_CONST_ADDRESS
7103 || modifier == EXPAND_INITIALIZER)
7104 ? STRICT_ALIGNMENT
7105 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7106 || (bitpos % BITS_PER_UNIT != 0)))
7107 /* If the type and the field are a constant size and the
7108 size of the type isn't the same size as the bitfield,
7109 we must use bitfield operations. */
7110 || (bitsize >= 0
7111 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7112 == INTEGER_CST)
7113 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7114 bitsize)))
7115 {
7116 enum machine_mode ext_mode = mode;
7117
7118 if (ext_mode == BLKmode
7119 && ! (target != 0 && GET_CODE (op0) == MEM
7120 && GET_CODE (target) == MEM
7121 && bitpos % BITS_PER_UNIT == 0))
7122 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7123
7124 if (ext_mode == BLKmode)
7125 {
7126 if (target == 0)
7127 target = assign_temp (type, 0, 1, 1);
7128
7129 if (bitsize == 0)
7130 return target;
7131
7132 /* In this case, BITPOS must start at a byte boundary and
7133 TARGET, if specified, must be a MEM. */
7134 if (GET_CODE (op0) != MEM
7135 || (target != 0 && GET_CODE (target) != MEM)
7136 || bitpos % BITS_PER_UNIT != 0)
7137 abort ();
7138
7139 emit_block_move (target,
7140 adjust_address (op0, VOIDmode,
7141 bitpos / BITS_PER_UNIT),
7142 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7143 / BITS_PER_UNIT),
7144 (modifier == EXPAND_STACK_PARM
7145 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7146
7147 return target;
7148 }
7149
7150 op0 = validize_mem (op0);
7151
7152 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7153 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7154
7155 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7156 (modifier == EXPAND_STACK_PARM
7157 ? NULL_RTX : target),
7158 ext_mode, ext_mode,
7159 int_size_in_bytes (TREE_TYPE (tem)));
7160
7161 /* If the result is a record type and BITSIZE is narrower than
7162 the mode of OP0, an integral mode, and this is a big endian
7163 machine, we must put the field into the high-order bits. */
7164 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7165 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7166 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7167 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7168 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7169 - bitsize),
7170 op0, 1);
7171
7172 /* If the result type is BLKmode, store the data into a temporary
7173 of the appropriate type, but with the mode corresponding to the
7174 mode for the data we have (op0's mode). It's tempting to make
7175 this a constant type, since we know it's only being stored once,
7176 but that can cause problems if we are taking the address of this
7177 COMPONENT_REF because the MEM of any reference via that address
7178 will have flags corresponding to the type, which will not
7179 necessarily be constant. */
7180 if (mode == BLKmode)
7181 {
7182 rtx new
7183 = assign_stack_temp_for_type
7184 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7185
7186 emit_move_insn (new, op0);
7187 op0 = copy_rtx (new);
7188 PUT_MODE (op0, BLKmode);
7189 set_mem_attributes (op0, exp, 1);
7190 }
7191
7192 return op0;
7193 }
7194
7195 /* If the result is BLKmode, use that to access the object
7196 now as well. */
7197 if (mode == BLKmode)
7198 mode1 = BLKmode;
7199
7200 /* Get a reference to just this component. */
7201 if (modifier == EXPAND_CONST_ADDRESS
7202 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7203 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7204 else
7205 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7206
7207 if (op0 == orig_op0)
7208 op0 = copy_rtx (op0);
7209
7210 set_mem_attributes (op0, exp, 0);
7211 if (GET_CODE (XEXP (op0, 0)) == REG)
7212 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7213
7214 MEM_VOLATILE_P (op0) |= volatilep;
7215 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7216 || modifier == EXPAND_CONST_ADDRESS
7217 || modifier == EXPAND_INITIALIZER)
7218 return op0;
7219 else if (target == 0)
7220 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7221
7222 convert_move (target, op0, unsignedp);
7223 return target;
7224 }
7225
7226 case VTABLE_REF:
7227 {
7228 rtx insn, before = get_last_insn (), vtbl_ref;
7229
7230 /* Evaluate the interior expression. */
7231 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7232 tmode, modifier);
7233
7234 /* Get or create an instruction off which to hang a note. */
7235 if (REG_P (subtarget))
7236 {
7237 target = subtarget;
7238 insn = get_last_insn ();
7239 if (insn == before)
7240 abort ();
7241 if (! INSN_P (insn))
7242 insn = prev_nonnote_insn (insn);
7243 }
7244 else
7245 {
7246 target = gen_reg_rtx (GET_MODE (subtarget));
7247 insn = emit_move_insn (target, subtarget);
7248 }
7249
7250 /* Collect the data for the note. */
7251 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7252 vtbl_ref = plus_constant (vtbl_ref,
7253 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7254 /* Discard the initial CONST that was added. */
7255 vtbl_ref = XEXP (vtbl_ref, 0);
7256
7257 REG_NOTES (insn)
7258 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7259
7260 return target;
7261 }
7262
7263 /* Intended for a reference to a buffer of a file-object in Pascal.
7264 But it's not certain that a special tree code will really be
7265 necessary for these. INDIRECT_REF might work for them. */
7266 case BUFFER_REF:
7267 abort ();
7268
7269 case IN_EXPR:
7270 {
7271 /* Pascal set IN expression.
7272
7273 Algorithm:
7274 rlo = set_low - (set_low%bits_per_word);
7275 the_word = set [ (index - rlo)/bits_per_word ];
7276 bit_index = index % bits_per_word;
7277 bitmask = 1 << bit_index;
7278 return !!(the_word & bitmask); */
7279
7280 tree set = TREE_OPERAND (exp, 0);
7281 tree index = TREE_OPERAND (exp, 1);
7282 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7283 tree set_type = TREE_TYPE (set);
7284 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7285 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7286 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7287 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7288 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7289 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7290 rtx setaddr = XEXP (setval, 0);
7291 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7292 rtx rlow;
7293 rtx diff, quo, rem, addr, bit, result;
7294
7295 /* If domain is empty, answer is no. Likewise if index is constant
7296 and out of bounds. */
7297 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7298 && TREE_CODE (set_low_bound) == INTEGER_CST
7299 && tree_int_cst_lt (set_high_bound, set_low_bound))
7300 || (TREE_CODE (index) == INTEGER_CST
7301 && TREE_CODE (set_low_bound) == INTEGER_CST
7302 && tree_int_cst_lt (index, set_low_bound))
7303 || (TREE_CODE (set_high_bound) == INTEGER_CST
7304 && TREE_CODE (index) == INTEGER_CST
7305 && tree_int_cst_lt (set_high_bound, index))))
7306 return const0_rtx;
7307
7308 if (target == 0)
7309 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7310
7311 /* If we get here, we have to generate the code for both cases
7312 (in range and out of range). */
7313
7314 op0 = gen_label_rtx ();
7315 op1 = gen_label_rtx ();
7316
7317 if (! (GET_CODE (index_val) == CONST_INT
7318 && GET_CODE (lo_r) == CONST_INT))
7319 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7320 GET_MODE (index_val), iunsignedp, op1);
7321
7322 if (! (GET_CODE (index_val) == CONST_INT
7323 && GET_CODE (hi_r) == CONST_INT))
7324 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7325 GET_MODE (index_val), iunsignedp, op1);
7326
7327 /* Calculate the element number of bit zero in the first word
7328 of the set. */
7329 if (GET_CODE (lo_r) == CONST_INT)
7330 rlow = GEN_INT (INTVAL (lo_r)
7331 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7332 else
7333 rlow = expand_binop (index_mode, and_optab, lo_r,
7334 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7335 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7336
7337 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7338 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7339
7340 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7341 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7342 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7343 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7344
7345 addr = memory_address (byte_mode,
7346 expand_binop (index_mode, add_optab, diff,
7347 setaddr, NULL_RTX, iunsignedp,
7348 OPTAB_LIB_WIDEN));
7349
7350 /* Extract the bit we want to examine. */
7351 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7352 gen_rtx_MEM (byte_mode, addr),
7353 make_tree (TREE_TYPE (index), rem),
7354 NULL_RTX, 1);
7355 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7356 GET_MODE (target) == byte_mode ? target : 0,
7357 1, OPTAB_LIB_WIDEN);
7358
7359 if (result != target)
7360 convert_move (target, result, 1);
7361
7362 /* Output the code to handle the out-of-range case. */
7363 emit_jump (op0);
7364 emit_label (op1);
7365 emit_move_insn (target, const0_rtx);
7366 emit_label (op0);
7367 return target;
7368 }
7369
7370 case WITH_CLEANUP_EXPR:
7371 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7372 {
7373 WITH_CLEANUP_EXPR_RTL (exp)
7374 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7375 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7376 CLEANUP_EH_ONLY (exp));
7377
7378 /* That's it for this cleanup. */
7379 TREE_OPERAND (exp, 1) = 0;
7380 }
7381 return WITH_CLEANUP_EXPR_RTL (exp);
7382
7383 case CLEANUP_POINT_EXPR:
7384 {
7385 /* Start a new binding layer that will keep track of all cleanup
7386 actions to be performed. */
7387 expand_start_bindings (2);
7388
7389 target_temp_slot_level = temp_slot_level;
7390
7391 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7392 /* If we're going to use this value, load it up now. */
7393 if (! ignore)
7394 op0 = force_not_mem (op0);
7395 preserve_temp_slots (op0);
7396 expand_end_bindings (NULL_TREE, 0, 0);
7397 }
7398 return op0;
7399
7400 case CALL_EXPR:
7401 /* Check for a built-in function. */
7402 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7403 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7404 == FUNCTION_DECL)
7405 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7406 {
7407 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7408 == BUILT_IN_FRONTEND)
7409 /* ??? Use (*fun) form because expand_expr is a macro. */
7410 return (*lang_hooks.expand_expr) (exp, original_target,
7411 tmode, modifier,
7412 alt_rtl);
7413 else
7414 return expand_builtin (exp, target, subtarget, tmode, ignore);
7415 }
7416
7417 return expand_call (exp, target, ignore);
7418
7419 case NON_LVALUE_EXPR:
7420 case NOP_EXPR:
7421 case CONVERT_EXPR:
7422 case REFERENCE_EXPR:
7423 if (TREE_OPERAND (exp, 0) == error_mark_node)
7424 return const0_rtx;
7425
7426 if (TREE_CODE (type) == UNION_TYPE)
7427 {
7428 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7429
7430 /* If both input and output are BLKmode, this conversion isn't doing
7431 anything except possibly changing memory attribute. */
7432 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7433 {
7434 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7435 modifier);
7436
7437 result = copy_rtx (result);
7438 set_mem_attributes (result, exp, 0);
7439 return result;
7440 }
7441
7442 if (target == 0)
7443 {
7444 if (TYPE_MODE (type) != BLKmode)
7445 target = gen_reg_rtx (TYPE_MODE (type));
7446 else
7447 target = assign_temp (type, 0, 1, 1);
7448 }
7449
7450 if (GET_CODE (target) == MEM)
7451 /* Store data into beginning of memory target. */
7452 store_expr (TREE_OPERAND (exp, 0),
7453 adjust_address (target, TYPE_MODE (valtype), 0),
7454 modifier == EXPAND_STACK_PARM ? 2 : 0);
7455
7456 else if (GET_CODE (target) == REG)
7457 /* Store this field into a union of the proper type. */
7458 store_field (target,
7459 MIN ((int_size_in_bytes (TREE_TYPE
7460 (TREE_OPERAND (exp, 0)))
7461 * BITS_PER_UNIT),
7462 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7463 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7464 VOIDmode, 0, type, 0);
7465 else
7466 abort ();
7467
7468 /* Return the entire union. */
7469 return target;
7470 }
7471
7472 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7473 {
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7475 modifier);
7476
7477 /* If the signedness of the conversion differs and OP0 is
7478 a promoted SUBREG, clear that indication since we now
7479 have to do the proper extension. */
7480 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7481 && GET_CODE (op0) == SUBREG)
7482 SUBREG_PROMOTED_VAR_P (op0) = 0;
7483
7484 return op0;
7485 }
7486
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7488 if (GET_MODE (op0) == mode)
7489 return op0;
7490
7491 /* If OP0 is a constant, just convert it into the proper mode. */
7492 if (CONSTANT_P (op0))
7493 {
7494 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7495 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7496
7497 if (modifier == EXPAND_INITIALIZER)
7498 return simplify_gen_subreg (mode, op0, inner_mode,
7499 subreg_lowpart_offset (mode,
7500 inner_mode));
7501 else
7502 return convert_modes (mode, inner_mode, op0,
7503 TYPE_UNSIGNED (inner_type));
7504 }
7505
7506 if (modifier == EXPAND_INITIALIZER)
7507 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7508
7509 if (target == 0)
7510 return
7511 convert_to_mode (mode, op0,
7512 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7513 else
7514 convert_move (target, op0,
7515 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7516 return target;
7517
7518 case VIEW_CONVERT_EXPR:
7519 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7520
7521 /* If the input and output modes are both the same, we are done.
7522 Otherwise, if neither mode is BLKmode and both are integral and within
7523 a word, we can use gen_lowpart. If neither is true, make sure the
7524 operand is in memory and convert the MEM to the new mode. */
7525 if (TYPE_MODE (type) == GET_MODE (op0))
7526 ;
7527 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7528 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7529 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7530 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7531 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7532 op0 = gen_lowpart (TYPE_MODE (type), op0);
7533 else if (GET_CODE (op0) != MEM)
7534 {
7535 /* If the operand is not a MEM, force it into memory. Since we
7536 are going to be be changing the mode of the MEM, don't call
7537 force_const_mem for constants because we don't allow pool
7538 constants to change mode. */
7539 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7540
7541 if (TREE_ADDRESSABLE (exp))
7542 abort ();
7543
7544 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7545 target
7546 = assign_stack_temp_for_type
7547 (TYPE_MODE (inner_type),
7548 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7549
7550 emit_move_insn (target, op0);
7551 op0 = target;
7552 }
7553
7554 /* At this point, OP0 is in the correct mode. If the output type is such
7555 that the operand is known to be aligned, indicate that it is.
7556 Otherwise, we need only be concerned about alignment for non-BLKmode
7557 results. */
7558 if (GET_CODE (op0) == MEM)
7559 {
7560 op0 = copy_rtx (op0);
7561
7562 if (TYPE_ALIGN_OK (type))
7563 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7564 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7565 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7566 {
7567 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7568 HOST_WIDE_INT temp_size
7569 = MAX (int_size_in_bytes (inner_type),
7570 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7571 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7572 temp_size, 0, type);
7573 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7574
7575 if (TREE_ADDRESSABLE (exp))
7576 abort ();
7577
7578 if (GET_MODE (op0) == BLKmode)
7579 emit_block_move (new_with_op0_mode, op0,
7580 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7581 (modifier == EXPAND_STACK_PARM
7582 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7583 else
7584 emit_move_insn (new_with_op0_mode, op0);
7585
7586 op0 = new;
7587 }
7588
7589 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7590 }
7591
7592 return op0;
7593
7594 case PLUS_EXPR:
7595 this_optab = ! unsignedp && flag_trapv
7596 && (GET_MODE_CLASS (mode) == MODE_INT)
7597 ? addv_optab : add_optab;
7598
7599 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7600 something else, make sure we add the register to the constant and
7601 then to the other thing. This case can occur during strength
7602 reduction and doing it this way will produce better code if the
7603 frame pointer or argument pointer is eliminated.
7604
7605 fold-const.c will ensure that the constant is always in the inner
7606 PLUS_EXPR, so the only case we need to do anything about is if
7607 sp, ap, or fp is our second argument, in which case we must swap
7608 the innermost first argument and our second argument. */
7609
7610 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7611 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7612 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7613 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7614 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7615 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7616 {
7617 tree t = TREE_OPERAND (exp, 1);
7618
7619 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7620 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7621 }
7622
7623 /* If the result is to be ptr_mode and we are adding an integer to
7624 something, we might be forming a constant. So try to use
7625 plus_constant. If it produces a sum and we can't accept it,
7626 use force_operand. This allows P = &ARR[const] to generate
7627 efficient code on machines where a SYMBOL_REF is not a valid
7628 address.
7629
7630 If this is an EXPAND_SUM call, always return the sum. */
7631 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7632 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7633 {
7634 if (modifier == EXPAND_STACK_PARM)
7635 target = 0;
7636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7637 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7638 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7639 {
7640 rtx constant_part;
7641
7642 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7643 EXPAND_SUM);
7644 /* Use immed_double_const to ensure that the constant is
7645 truncated according to the mode of OP1, then sign extended
7646 to a HOST_WIDE_INT. Using the constant directly can result
7647 in non-canonical RTL in a 64x32 cross compile. */
7648 constant_part
7649 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7650 (HOST_WIDE_INT) 0,
7651 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7652 op1 = plus_constant (op1, INTVAL (constant_part));
7653 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7654 op1 = force_operand (op1, target);
7655 return op1;
7656 }
7657
7658 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7659 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7660 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7661 {
7662 rtx constant_part;
7663
7664 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7665 (modifier == EXPAND_INITIALIZER
7666 ? EXPAND_INITIALIZER : EXPAND_SUM));
7667 if (! CONSTANT_P (op0))
7668 {
7669 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7670 VOIDmode, modifier);
7671 /* Return a PLUS if modifier says it's OK. */
7672 if (modifier == EXPAND_SUM
7673 || modifier == EXPAND_INITIALIZER)
7674 return simplify_gen_binary (PLUS, mode, op0, op1);
7675 goto binop2;
7676 }
7677 /* Use immed_double_const to ensure that the constant is
7678 truncated according to the mode of OP1, then sign extended
7679 to a HOST_WIDE_INT. Using the constant directly can result
7680 in non-canonical RTL in a 64x32 cross compile. */
7681 constant_part
7682 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7683 (HOST_WIDE_INT) 0,
7684 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7685 op0 = plus_constant (op0, INTVAL (constant_part));
7686 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7687 op0 = force_operand (op0, target);
7688 return op0;
7689 }
7690 }
7691
7692 /* No sense saving up arithmetic to be done
7693 if it's all in the wrong mode to form part of an address.
7694 And force_operand won't know whether to sign-extend or
7695 zero-extend. */
7696 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7697 || mode != ptr_mode)
7698 {
7699 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7700 subtarget, &op0, &op1, 0);
7701 if (op0 == const0_rtx)
7702 return op1;
7703 if (op1 == const0_rtx)
7704 return op0;
7705 goto binop2;
7706 }
7707
7708 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7709 subtarget, &op0, &op1, modifier);
7710 return simplify_gen_binary (PLUS, mode, op0, op1);
7711
7712 case MINUS_EXPR:
7713 /* For initializers, we are allowed to return a MINUS of two
7714 symbolic constants. Here we handle all cases when both operands
7715 are constant. */
7716 /* Handle difference of two symbolic constants,
7717 for the sake of an initializer. */
7718 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7719 && really_constant_p (TREE_OPERAND (exp, 0))
7720 && really_constant_p (TREE_OPERAND (exp, 1)))
7721 {
7722 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7723 NULL_RTX, &op0, &op1, modifier);
7724
7725 /* If the last operand is a CONST_INT, use plus_constant of
7726 the negated constant. Else make the MINUS. */
7727 if (GET_CODE (op1) == CONST_INT)
7728 return plus_constant (op0, - INTVAL (op1));
7729 else
7730 return gen_rtx_MINUS (mode, op0, op1);
7731 }
7732
7733 this_optab = ! unsignedp && flag_trapv
7734 && (GET_MODE_CLASS(mode) == MODE_INT)
7735 ? subv_optab : sub_optab;
7736
7737 /* No sense saving up arithmetic to be done
7738 if it's all in the wrong mode to form part of an address.
7739 And force_operand won't know whether to sign-extend or
7740 zero-extend. */
7741 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7742 || mode != ptr_mode)
7743 goto binop;
7744
7745 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7746 subtarget, &op0, &op1, modifier);
7747
7748 /* Convert A - const to A + (-const). */
7749 if (GET_CODE (op1) == CONST_INT)
7750 {
7751 op1 = negate_rtx (mode, op1);
7752 return simplify_gen_binary (PLUS, mode, op0, op1);
7753 }
7754
7755 goto binop2;
7756
7757 case MULT_EXPR:
7758 /* If first operand is constant, swap them.
7759 Thus the following special case checks need only
7760 check the second operand. */
7761 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7762 {
7763 tree t1 = TREE_OPERAND (exp, 0);
7764 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7765 TREE_OPERAND (exp, 1) = t1;
7766 }
7767
7768 /* Attempt to return something suitable for generating an
7769 indexed address, for machines that support that. */
7770
7771 if (modifier == EXPAND_SUM && mode == ptr_mode
7772 && host_integerp (TREE_OPERAND (exp, 1), 0))
7773 {
7774 tree exp1 = TREE_OPERAND (exp, 1);
7775
7776 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7777 EXPAND_SUM);
7778
7779 if (GET_CODE (op0) != REG)
7780 op0 = force_operand (op0, NULL_RTX);
7781 if (GET_CODE (op0) != REG)
7782 op0 = copy_to_mode_reg (mode, op0);
7783
7784 return gen_rtx_MULT (mode, op0,
7785 gen_int_mode (tree_low_cst (exp1, 0),
7786 TYPE_MODE (TREE_TYPE (exp1))));
7787 }
7788
7789 if (modifier == EXPAND_STACK_PARM)
7790 target = 0;
7791
7792 /* Check for multiplying things that have been extended
7793 from a narrower type. If this machine supports multiplying
7794 in that narrower type with a result in the desired type,
7795 do it that way, and avoid the explicit type-conversion. */
7796 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7797 && TREE_CODE (type) == INTEGER_TYPE
7798 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7799 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7800 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7801 && int_fits_type_p (TREE_OPERAND (exp, 1),
7802 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7803 /* Don't use a widening multiply if a shift will do. */
7804 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7805 > HOST_BITS_PER_WIDE_INT)
7806 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7807 ||
7808 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7809 && (TYPE_PRECISION (TREE_TYPE
7810 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7811 == TYPE_PRECISION (TREE_TYPE
7812 (TREE_OPERAND
7813 (TREE_OPERAND (exp, 0), 0))))
7814 /* If both operands are extended, they must either both
7815 be zero-extended or both be sign-extended. */
7816 && (TYPE_UNSIGNED (TREE_TYPE
7817 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7818 == TYPE_UNSIGNED (TREE_TYPE
7819 (TREE_OPERAND
7820 (TREE_OPERAND (exp, 0), 0)))))))
7821 {
7822 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7823 enum machine_mode innermode = TYPE_MODE (op0type);
7824 bool zextend_p = TYPE_UNSIGNED (op0type);
7825 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7826 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7827
7828 if (mode == GET_MODE_WIDER_MODE (innermode))
7829 {
7830 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7831 {
7832 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7833 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7834 TREE_OPERAND (exp, 1),
7835 NULL_RTX, &op0, &op1, 0);
7836 else
7837 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7838 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7839 NULL_RTX, &op0, &op1, 0);
7840 goto binop2;
7841 }
7842 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7843 && innermode == word_mode)
7844 {
7845 rtx htem, hipart;
7846 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7847 NULL_RTX, VOIDmode, 0);
7848 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7849 op1 = convert_modes (innermode, mode,
7850 expand_expr (TREE_OPERAND (exp, 1),
7851 NULL_RTX, VOIDmode, 0),
7852 unsignedp);
7853 else
7854 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7855 NULL_RTX, VOIDmode, 0);
7856 temp = expand_binop (mode, other_optab, op0, op1, target,
7857 unsignedp, OPTAB_LIB_WIDEN);
7858 hipart = gen_highpart (innermode, temp);
7859 htem = expand_mult_highpart_adjust (innermode, hipart,
7860 op0, op1, hipart,
7861 zextend_p);
7862 if (htem != hipart)
7863 emit_move_insn (hipart, htem);
7864 return temp;
7865 }
7866 }
7867 }
7868 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7869 subtarget, &op0, &op1, 0);
7870 return expand_mult (mode, op0, op1, target, unsignedp);
7871
7872 case TRUNC_DIV_EXPR:
7873 case FLOOR_DIV_EXPR:
7874 case CEIL_DIV_EXPR:
7875 case ROUND_DIV_EXPR:
7876 case EXACT_DIV_EXPR:
7877 if (modifier == EXPAND_STACK_PARM)
7878 target = 0;
7879 /* Possible optimization: compute the dividend with EXPAND_SUM
7880 then if the divisor is constant can optimize the case
7881 where some terms of the dividend have coeffs divisible by it. */
7882 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7883 subtarget, &op0, &op1, 0);
7884 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7885
7886 case RDIV_EXPR:
7887 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7888 expensive divide. If not, combine will rebuild the original
7889 computation. */
7890 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7891 && TREE_CODE (type) == REAL_TYPE
7892 && !real_onep (TREE_OPERAND (exp, 0)))
7893 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7894 build (RDIV_EXPR, type,
7895 build_real (type, dconst1),
7896 TREE_OPERAND (exp, 1))),
7897 target, tmode, modifier);
7898 this_optab = sdiv_optab;
7899 goto binop;
7900
7901 case TRUNC_MOD_EXPR:
7902 case FLOOR_MOD_EXPR:
7903 case CEIL_MOD_EXPR:
7904 case ROUND_MOD_EXPR:
7905 if (modifier == EXPAND_STACK_PARM)
7906 target = 0;
7907 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7908 subtarget, &op0, &op1, 0);
7909 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7910
7911 case FIX_ROUND_EXPR:
7912 case FIX_FLOOR_EXPR:
7913 case FIX_CEIL_EXPR:
7914 abort (); /* Not used for C. */
7915
7916 case FIX_TRUNC_EXPR:
7917 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7918 if (target == 0 || modifier == EXPAND_STACK_PARM)
7919 target = gen_reg_rtx (mode);
7920 expand_fix (target, op0, unsignedp);
7921 return target;
7922
7923 case FLOAT_EXPR:
7924 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7925 if (target == 0 || modifier == EXPAND_STACK_PARM)
7926 target = gen_reg_rtx (mode);
7927 /* expand_float can't figure out what to do if FROM has VOIDmode.
7928 So give it the correct mode. With -O, cse will optimize this. */
7929 if (GET_MODE (op0) == VOIDmode)
7930 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7931 op0);
7932 expand_float (target, op0,
7933 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7934 return target;
7935
7936 case NEGATE_EXPR:
7937 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7938 if (modifier == EXPAND_STACK_PARM)
7939 target = 0;
7940 temp = expand_unop (mode,
7941 ! unsignedp && flag_trapv
7942 && (GET_MODE_CLASS(mode) == MODE_INT)
7943 ? negv_optab : neg_optab, op0, target, 0);
7944 if (temp == 0)
7945 abort ();
7946 return temp;
7947
7948 case ABS_EXPR:
7949 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7950 if (modifier == EXPAND_STACK_PARM)
7951 target = 0;
7952
7953 /* ABS_EXPR is not valid for complex arguments. */
7954 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7955 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7956 abort ();
7957
7958 /* Unsigned abs is simply the operand. Testing here means we don't
7959 risk generating incorrect code below. */
7960 if (TYPE_UNSIGNED (type))
7961 return op0;
7962
7963 return expand_abs (mode, op0, target, unsignedp,
7964 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7965
7966 case MAX_EXPR:
7967 case MIN_EXPR:
7968 target = original_target;
7969 if (target == 0
7970 || modifier == EXPAND_STACK_PARM
7971 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7972 || GET_MODE (target) != mode
7973 || (GET_CODE (target) == REG
7974 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7975 target = gen_reg_rtx (mode);
7976 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7977 target, &op0, &op1, 0);
7978
7979 /* First try to do it with a special MIN or MAX instruction.
7980 If that does not win, use a conditional jump to select the proper
7981 value. */
7982 this_optab = (unsignedp
7983 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7984 : (code == MIN_EXPR ? smin_optab : smax_optab));
7985
7986 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7987 OPTAB_WIDEN);
7988 if (temp != 0)
7989 return temp;
7990
7991 /* At this point, a MEM target is no longer useful; we will get better
7992 code without it. */
7993
7994 if (GET_CODE (target) == MEM)
7995 target = gen_reg_rtx (mode);
7996
7997 /* If op1 was placed in target, swap op0 and op1. */
7998 if (target != op0 && target == op1)
7999 {
8000 rtx tem = op0;
8001 op0 = op1;
8002 op1 = tem;
8003 }
8004
8005 if (target != op0)
8006 emit_move_insn (target, op0);
8007
8008 op0 = gen_label_rtx ();
8009
8010 /* If this mode is an integer too wide to compare properly,
8011 compare word by word. Rely on cse to optimize constant cases. */
8012 if (GET_MODE_CLASS (mode) == MODE_INT
8013 && ! can_compare_p (GE, mode, ccp_jump))
8014 {
8015 if (code == MAX_EXPR)
8016 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8017 NULL_RTX, op0);
8018 else
8019 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8020 NULL_RTX, op0);
8021 }
8022 else
8023 {
8024 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8025 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8026 }
8027 emit_move_insn (target, op1);
8028 emit_label (op0);
8029 return target;
8030
8031 case BIT_NOT_EXPR:
8032 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8033 if (modifier == EXPAND_STACK_PARM)
8034 target = 0;
8035 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8036 if (temp == 0)
8037 abort ();
8038 return temp;
8039
8040 /* ??? Can optimize bitwise operations with one arg constant.
8041 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8042 and (a bitwise1 b) bitwise2 b (etc)
8043 but that is probably not worth while. */
8044
8045 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8046 boolean values when we want in all cases to compute both of them. In
8047 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8048 as actual zero-or-1 values and then bitwise anding. In cases where
8049 there cannot be any side effects, better code would be made by
8050 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8051 how to recognize those cases. */
8052
8053 case TRUTH_AND_EXPR:
8054 case BIT_AND_EXPR:
8055 this_optab = and_optab;
8056 goto binop;
8057
8058 case TRUTH_OR_EXPR:
8059 case BIT_IOR_EXPR:
8060 this_optab = ior_optab;
8061 goto binop;
8062
8063 case TRUTH_XOR_EXPR:
8064 case BIT_XOR_EXPR:
8065 this_optab = xor_optab;
8066 goto binop;
8067
8068 case LSHIFT_EXPR:
8069 case RSHIFT_EXPR:
8070 case LROTATE_EXPR:
8071 case RROTATE_EXPR:
8072 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8073 subtarget = 0;
8074 if (modifier == EXPAND_STACK_PARM)
8075 target = 0;
8076 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8077 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8078 unsignedp);
8079
8080 /* Could determine the answer when only additive constants differ. Also,
8081 the addition of one can be handled by changing the condition. */
8082 case LT_EXPR:
8083 case LE_EXPR:
8084 case GT_EXPR:
8085 case GE_EXPR:
8086 case EQ_EXPR:
8087 case NE_EXPR:
8088 case UNORDERED_EXPR:
8089 case ORDERED_EXPR:
8090 case UNLT_EXPR:
8091 case UNLE_EXPR:
8092 case UNGT_EXPR:
8093 case UNGE_EXPR:
8094 case UNEQ_EXPR:
8095 temp = do_store_flag (exp,
8096 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8097 tmode != VOIDmode ? tmode : mode, 0);
8098 if (temp != 0)
8099 return temp;
8100
8101 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8102 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8103 && original_target
8104 && GET_CODE (original_target) == REG
8105 && (GET_MODE (original_target)
8106 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8107 {
8108 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8109 VOIDmode, 0);
8110
8111 /* If temp is constant, we can just compute the result. */
8112 if (GET_CODE (temp) == CONST_INT)
8113 {
8114 if (INTVAL (temp) != 0)
8115 emit_move_insn (target, const1_rtx);
8116 else
8117 emit_move_insn (target, const0_rtx);
8118
8119 return target;
8120 }
8121
8122 if (temp != original_target)
8123 {
8124 enum machine_mode mode1 = GET_MODE (temp);
8125 if (mode1 == VOIDmode)
8126 mode1 = tmode != VOIDmode ? tmode : mode;
8127
8128 temp = copy_to_mode_reg (mode1, temp);
8129 }
8130
8131 op1 = gen_label_rtx ();
8132 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8133 GET_MODE (temp), unsignedp, op1);
8134 emit_move_insn (temp, const1_rtx);
8135 emit_label (op1);
8136 return temp;
8137 }
8138
8139 /* If no set-flag instruction, must generate a conditional
8140 store into a temporary variable. Drop through
8141 and handle this like && and ||. */
8142
8143 case TRUTH_ANDIF_EXPR:
8144 case TRUTH_ORIF_EXPR:
8145 if (! ignore
8146 && (target == 0
8147 || modifier == EXPAND_STACK_PARM
8148 || ! safe_from_p (target, exp, 1)
8149 /* Make sure we don't have a hard reg (such as function's return
8150 value) live across basic blocks, if not optimizing. */
8151 || (!optimize && GET_CODE (target) == REG
8152 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8153 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8154
8155 if (target)
8156 emit_clr_insn (target);
8157
8158 op1 = gen_label_rtx ();
8159 jumpifnot (exp, op1);
8160
8161 if (target)
8162 emit_0_to_1_insn (target);
8163
8164 emit_label (op1);
8165 return ignore ? const0_rtx : target;
8166
8167 case TRUTH_NOT_EXPR:
8168 if (modifier == EXPAND_STACK_PARM)
8169 target = 0;
8170 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8171 /* The parser is careful to generate TRUTH_NOT_EXPR
8172 only with operands that are always zero or one. */
8173 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8174 target, 1, OPTAB_LIB_WIDEN);
8175 if (temp == 0)
8176 abort ();
8177 return temp;
8178
8179 case COMPOUND_EXPR:
8180 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8181 emit_queue ();
8182 return expand_expr_real (TREE_OPERAND (exp, 1),
8183 (ignore ? const0_rtx : target),
8184 VOIDmode, modifier, alt_rtl);
8185
8186 case COND_EXPR:
8187 /* If we would have a "singleton" (see below) were it not for a
8188 conversion in each arm, bring that conversion back out. */
8189 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8190 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8191 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8192 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8193 {
8194 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8195 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8196
8197 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8198 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8199 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8200 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8201 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8202 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8203 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8204 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8205 return expand_expr (build1 (NOP_EXPR, type,
8206 build (COND_EXPR, TREE_TYPE (iftrue),
8207 TREE_OPERAND (exp, 0),
8208 iftrue, iffalse)),
8209 target, tmode, modifier);
8210 }
8211
8212 {
8213 /* Note that COND_EXPRs whose type is a structure or union
8214 are required to be constructed to contain assignments of
8215 a temporary variable, so that we can evaluate them here
8216 for side effect only. If type is void, we must do likewise. */
8217
8218 /* If an arm of the branch requires a cleanup,
8219 only that cleanup is performed. */
8220
8221 tree singleton = 0;
8222 tree binary_op = 0, unary_op = 0;
8223
8224 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8225 convert it to our mode, if necessary. */
8226 if (integer_onep (TREE_OPERAND (exp, 1))
8227 && integer_zerop (TREE_OPERAND (exp, 2))
8228 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8229 {
8230 if (ignore)
8231 {
8232 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8233 modifier);
8234 return const0_rtx;
8235 }
8236
8237 if (modifier == EXPAND_STACK_PARM)
8238 target = 0;
8239 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8240 if (GET_MODE (op0) == mode)
8241 return op0;
8242
8243 if (target == 0)
8244 target = gen_reg_rtx (mode);
8245 convert_move (target, op0, unsignedp);
8246 return target;
8247 }
8248
8249 /* Check for X ? A + B : A. If we have this, we can copy A to the
8250 output and conditionally add B. Similarly for unary operations.
8251 Don't do this if X has side-effects because those side effects
8252 might affect A or B and the "?" operation is a sequence point in
8253 ANSI. (operand_equal_p tests for side effects.) */
8254
8255 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8256 && operand_equal_p (TREE_OPERAND (exp, 2),
8257 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8258 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8259 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8260 && operand_equal_p (TREE_OPERAND (exp, 1),
8261 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8262 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8263 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8264 && operand_equal_p (TREE_OPERAND (exp, 2),
8265 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8266 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8267 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8268 && operand_equal_p (TREE_OPERAND (exp, 1),
8269 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8270 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8271
8272 /* If we are not to produce a result, we have no target. Otherwise,
8273 if a target was specified use it; it will not be used as an
8274 intermediate target unless it is safe. If no target, use a
8275 temporary. */
8276
8277 if (ignore)
8278 temp = 0;
8279 else if (modifier == EXPAND_STACK_PARM)
8280 temp = assign_temp (type, 0, 0, 1);
8281 else if (original_target
8282 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8283 || (singleton && GET_CODE (original_target) == REG
8284 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8285 && original_target == var_rtx (singleton)))
8286 && GET_MODE (original_target) == mode
8287 #ifdef HAVE_conditional_move
8288 && (! can_conditionally_move_p (mode)
8289 || GET_CODE (original_target) == REG
8290 || TREE_ADDRESSABLE (type))
8291 #endif
8292 && (GET_CODE (original_target) != MEM
8293 || TREE_ADDRESSABLE (type)))
8294 temp = original_target;
8295 else if (TREE_ADDRESSABLE (type))
8296 abort ();
8297 else
8298 temp = assign_temp (type, 0, 0, 1);
8299
8300 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8301 do the test of X as a store-flag operation, do this as
8302 A + ((X != 0) << log C). Similarly for other simple binary
8303 operators. Only do for C == 1 if BRANCH_COST is low. */
8304 if (temp && singleton && binary_op
8305 && (TREE_CODE (binary_op) == PLUS_EXPR
8306 || TREE_CODE (binary_op) == MINUS_EXPR
8307 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8308 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8309 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8310 : integer_onep (TREE_OPERAND (binary_op, 1)))
8311 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8312 {
8313 rtx result;
8314 tree cond;
8315 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8316 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8317 ? addv_optab : add_optab)
8318 : TREE_CODE (binary_op) == MINUS_EXPR
8319 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8320 ? subv_optab : sub_optab)
8321 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8322 : xor_optab);
8323
8324 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8325 if (singleton == TREE_OPERAND (exp, 1))
8326 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8327 else
8328 cond = TREE_OPERAND (exp, 0);
8329
8330 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8331 ? temp : NULL_RTX),
8332 mode, BRANCH_COST <= 1);
8333
8334 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8335 result = expand_shift (LSHIFT_EXPR, mode, result,
8336 build_int_2 (tree_log2
8337 (TREE_OPERAND
8338 (binary_op, 1)),
8339 0),
8340 (safe_from_p (temp, singleton, 1)
8341 ? temp : NULL_RTX), 0);
8342
8343 if (result)
8344 {
8345 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8346 return expand_binop (mode, boptab, op1, result, temp,
8347 unsignedp, OPTAB_LIB_WIDEN);
8348 }
8349 }
8350
8351 do_pending_stack_adjust ();
8352 NO_DEFER_POP;
8353 op0 = gen_label_rtx ();
8354
8355 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8356 {
8357 if (temp != 0)
8358 {
8359 /* If the target conflicts with the other operand of the
8360 binary op, we can't use it. Also, we can't use the target
8361 if it is a hard register, because evaluating the condition
8362 might clobber it. */
8363 if ((binary_op
8364 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8365 || (GET_CODE (temp) == REG
8366 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8367 temp = gen_reg_rtx (mode);
8368 store_expr (singleton, temp,
8369 modifier == EXPAND_STACK_PARM ? 2 : 0);
8370 }
8371 else
8372 expand_expr (singleton,
8373 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8374 if (singleton == TREE_OPERAND (exp, 1))
8375 jumpif (TREE_OPERAND (exp, 0), op0);
8376 else
8377 jumpifnot (TREE_OPERAND (exp, 0), op0);
8378
8379 start_cleanup_deferral ();
8380 if (binary_op && temp == 0)
8381 /* Just touch the other operand. */
8382 expand_expr (TREE_OPERAND (binary_op, 1),
8383 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8384 else if (binary_op)
8385 store_expr (build (TREE_CODE (binary_op), type,
8386 make_tree (type, temp),
8387 TREE_OPERAND (binary_op, 1)),
8388 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8389 else
8390 store_expr (build1 (TREE_CODE (unary_op), type,
8391 make_tree (type, temp)),
8392 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8393 op1 = op0;
8394 }
8395 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8396 comparison operator. If we have one of these cases, set the
8397 output to A, branch on A (cse will merge these two references),
8398 then set the output to FOO. */
8399 else if (temp
8400 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8401 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8402 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8403 TREE_OPERAND (exp, 1), 0)
8404 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8405 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8406 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8407 {
8408 if (GET_CODE (temp) == REG
8409 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8410 temp = gen_reg_rtx (mode);
8411 store_expr (TREE_OPERAND (exp, 1), temp,
8412 modifier == EXPAND_STACK_PARM ? 2 : 0);
8413 jumpif (TREE_OPERAND (exp, 0), op0);
8414
8415 start_cleanup_deferral ();
8416 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8417 store_expr (TREE_OPERAND (exp, 2), temp,
8418 modifier == EXPAND_STACK_PARM ? 2 : 0);
8419 else
8420 expand_expr (TREE_OPERAND (exp, 2),
8421 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8422 op1 = op0;
8423 }
8424 else if (temp
8425 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8426 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8428 TREE_OPERAND (exp, 2), 0)
8429 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8430 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8431 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8432 {
8433 if (GET_CODE (temp) == REG
8434 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8435 temp = gen_reg_rtx (mode);
8436 store_expr (TREE_OPERAND (exp, 2), temp,
8437 modifier == EXPAND_STACK_PARM ? 2 : 0);
8438 jumpifnot (TREE_OPERAND (exp, 0), op0);
8439
8440 start_cleanup_deferral ();
8441 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8442 store_expr (TREE_OPERAND (exp, 1), temp,
8443 modifier == EXPAND_STACK_PARM ? 2 : 0);
8444 else
8445 expand_expr (TREE_OPERAND (exp, 1),
8446 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8447 op1 = op0;
8448 }
8449 else
8450 {
8451 op1 = gen_label_rtx ();
8452 jumpifnot (TREE_OPERAND (exp, 0), op0);
8453
8454 start_cleanup_deferral ();
8455
8456 /* One branch of the cond can be void, if it never returns. For
8457 example A ? throw : E */
8458 if (temp != 0
8459 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8460 store_expr (TREE_OPERAND (exp, 1), temp,
8461 modifier == EXPAND_STACK_PARM ? 2 : 0);
8462 else
8463 expand_expr (TREE_OPERAND (exp, 1),
8464 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8465 end_cleanup_deferral ();
8466 emit_queue ();
8467 emit_jump_insn (gen_jump (op1));
8468 emit_barrier ();
8469 emit_label (op0);
8470 start_cleanup_deferral ();
8471 if (temp != 0
8472 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8473 store_expr (TREE_OPERAND (exp, 2), temp,
8474 modifier == EXPAND_STACK_PARM ? 2 : 0);
8475 else
8476 expand_expr (TREE_OPERAND (exp, 2),
8477 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8478 }
8479
8480 end_cleanup_deferral ();
8481
8482 emit_queue ();
8483 emit_label (op1);
8484 OK_DEFER_POP;
8485
8486 return temp;
8487 }
8488
8489 case TARGET_EXPR:
8490 {
8491 /* Something needs to be initialized, but we didn't know
8492 where that thing was when building the tree. For example,
8493 it could be the return value of a function, or a parameter
8494 to a function which lays down in the stack, or a temporary
8495 variable which must be passed by reference.
8496
8497 We guarantee that the expression will either be constructed
8498 or copied into our original target. */
8499
8500 tree slot = TREE_OPERAND (exp, 0);
8501 tree cleanups = NULL_TREE;
8502 tree exp1;
8503
8504 if (TREE_CODE (slot) != VAR_DECL)
8505 abort ();
8506
8507 if (! ignore)
8508 target = original_target;
8509
8510 /* Set this here so that if we get a target that refers to a
8511 register variable that's already been used, put_reg_into_stack
8512 knows that it should fix up those uses. */
8513 TREE_USED (slot) = 1;
8514
8515 if (target == 0)
8516 {
8517 if (DECL_RTL_SET_P (slot))
8518 {
8519 target = DECL_RTL (slot);
8520 /* If we have already expanded the slot, so don't do
8521 it again. (mrs) */
8522 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8523 return target;
8524 }
8525 else
8526 {
8527 target = assign_temp (type, 2, 0, 1);
8528 /* All temp slots at this level must not conflict. */
8529 preserve_temp_slots (target);
8530 SET_DECL_RTL (slot, target);
8531 if (TREE_ADDRESSABLE (slot))
8532 put_var_into_stack (slot, /*rescan=*/false);
8533
8534 /* Since SLOT is not known to the called function
8535 to belong to its stack frame, we must build an explicit
8536 cleanup. This case occurs when we must build up a reference
8537 to pass the reference as an argument. In this case,
8538 it is very likely that such a reference need not be
8539 built here. */
8540
8541 if (TREE_OPERAND (exp, 2) == 0)
8542 TREE_OPERAND (exp, 2)
8543 = lang_hooks.maybe_build_cleanup (slot);
8544 cleanups = TREE_OPERAND (exp, 2);
8545 }
8546 }
8547 else
8548 {
8549 /* This case does occur, when expanding a parameter which
8550 needs to be constructed on the stack. The target
8551 is the actual stack address that we want to initialize.
8552 The function we call will perform the cleanup in this case. */
8553
8554 /* If we have already assigned it space, use that space,
8555 not target that we were passed in, as our target
8556 parameter is only a hint. */
8557 if (DECL_RTL_SET_P (slot))
8558 {
8559 target = DECL_RTL (slot);
8560 /* If we have already expanded the slot, so don't do
8561 it again. (mrs) */
8562 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8563 return target;
8564 }
8565 else
8566 {
8567 SET_DECL_RTL (slot, target);
8568 /* If we must have an addressable slot, then make sure that
8569 the RTL that we just stored in slot is OK. */
8570 if (TREE_ADDRESSABLE (slot))
8571 put_var_into_stack (slot, /*rescan=*/true);
8572 }
8573 }
8574
8575 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8576 /* Mark it as expanded. */
8577 TREE_OPERAND (exp, 1) = NULL_TREE;
8578
8579 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8580
8581 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8582
8583 return target;
8584 }
8585
8586 case INIT_EXPR:
8587 {
8588 tree lhs = TREE_OPERAND (exp, 0);
8589 tree rhs = TREE_OPERAND (exp, 1);
8590
8591 temp = expand_assignment (lhs, rhs, ! ignore);
8592 return temp;
8593 }
8594
8595 case MODIFY_EXPR:
8596 {
8597 /* If lhs is complex, expand calls in rhs before computing it.
8598 That's so we don't compute a pointer and save it over a
8599 call. If lhs is simple, compute it first so we can give it
8600 as a target if the rhs is just a call. This avoids an
8601 extra temp and copy and that prevents a partial-subsumption
8602 which makes bad code. Actually we could treat
8603 component_ref's of vars like vars. */
8604
8605 tree lhs = TREE_OPERAND (exp, 0);
8606 tree rhs = TREE_OPERAND (exp, 1);
8607
8608 temp = 0;
8609
8610 /* Check for |= or &= of a bitfield of size one into another bitfield
8611 of size 1. In this case, (unless we need the result of the
8612 assignment) we can do this more efficiently with a
8613 test followed by an assignment, if necessary.
8614
8615 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8616 things change so we do, this code should be enhanced to
8617 support it. */
8618 if (ignore
8619 && TREE_CODE (lhs) == COMPONENT_REF
8620 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8621 || TREE_CODE (rhs) == BIT_AND_EXPR)
8622 && TREE_OPERAND (rhs, 0) == lhs
8623 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8624 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8625 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8626 {
8627 rtx label = gen_label_rtx ();
8628
8629 do_jump (TREE_OPERAND (rhs, 1),
8630 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8631 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8632 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8633 (TREE_CODE (rhs) == BIT_IOR_EXPR
8634 ? integer_one_node
8635 : integer_zero_node)),
8636 0);
8637 do_pending_stack_adjust ();
8638 emit_label (label);
8639 return const0_rtx;
8640 }
8641
8642 temp = expand_assignment (lhs, rhs, ! ignore);
8643
8644 return temp;
8645 }
8646
8647 case RETURN_EXPR:
8648 if (!TREE_OPERAND (exp, 0))
8649 expand_null_return ();
8650 else
8651 expand_return (TREE_OPERAND (exp, 0));
8652 return const0_rtx;
8653
8654 case PREINCREMENT_EXPR:
8655 case PREDECREMENT_EXPR:
8656 return expand_increment (exp, 0, ignore);
8657
8658 case POSTINCREMENT_EXPR:
8659 case POSTDECREMENT_EXPR:
8660 /* Faster to treat as pre-increment if result is not used. */
8661 return expand_increment (exp, ! ignore, ignore);
8662
8663 case ADDR_EXPR:
8664 if (modifier == EXPAND_STACK_PARM)
8665 target = 0;
8666 /* Are we taking the address of a nested function? */
8667 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8668 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8669 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8670 && ! TREE_STATIC (exp))
8671 {
8672 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8673 op0 = force_operand (op0, target);
8674 }
8675 /* If we are taking the address of something erroneous, just
8676 return a zero. */
8677 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8678 return const0_rtx;
8679 /* If we are taking the address of a constant and are at the
8680 top level, we have to use output_constant_def since we can't
8681 call force_const_mem at top level. */
8682 else if (cfun == 0
8683 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8684 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8685 == 'c')))
8686 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8687 else
8688 {
8689 /* We make sure to pass const0_rtx down if we came in with
8690 ignore set, to avoid doing the cleanups twice for something. */
8691 op0 = expand_expr (TREE_OPERAND (exp, 0),
8692 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8693 (modifier == EXPAND_INITIALIZER
8694 ? modifier : EXPAND_CONST_ADDRESS));
8695
8696 /* If we are going to ignore the result, OP0 will have been set
8697 to const0_rtx, so just return it. Don't get confused and
8698 think we are taking the address of the constant. */
8699 if (ignore)
8700 return op0;
8701
8702 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8703 clever and returns a REG when given a MEM. */
8704 op0 = protect_from_queue (op0, 1);
8705
8706 /* We would like the object in memory. If it is a constant, we can
8707 have it be statically allocated into memory. For a non-constant,
8708 we need to allocate some memory and store the value into it. */
8709
8710 if (CONSTANT_P (op0))
8711 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8712 op0);
8713 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8714 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8715 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8716 {
8717 /* If the operand is a SAVE_EXPR, we can deal with this by
8718 forcing the SAVE_EXPR into memory. */
8719 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8720 {
8721 put_var_into_stack (TREE_OPERAND (exp, 0),
8722 /*rescan=*/true);
8723 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8724 }
8725 else
8726 {
8727 /* If this object is in a register, it can't be BLKmode. */
8728 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8729 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8730
8731 if (GET_CODE (op0) == PARALLEL)
8732 /* Handle calls that pass values in multiple
8733 non-contiguous locations. The Irix 6 ABI has examples
8734 of this. */
8735 emit_group_store (memloc, op0, inner_type,
8736 int_size_in_bytes (inner_type));
8737 else
8738 emit_move_insn (memloc, op0);
8739
8740 op0 = memloc;
8741 }
8742 }
8743
8744 if (GET_CODE (op0) != MEM)
8745 abort ();
8746
8747 mark_temp_addr_taken (op0);
8748 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8749 {
8750 op0 = XEXP (op0, 0);
8751 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8752 op0 = convert_memory_address (ptr_mode, op0);
8753 return op0;
8754 }
8755
8756 /* If OP0 is not aligned as least as much as the type requires, we
8757 need to make a temporary, copy OP0 to it, and take the address of
8758 the temporary. We want to use the alignment of the type, not of
8759 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8760 the test for BLKmode means that can't happen. The test for
8761 BLKmode is because we never make mis-aligned MEMs with
8762 non-BLKmode.
8763
8764 We don't need to do this at all if the machine doesn't have
8765 strict alignment. */
8766 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8767 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8768 > MEM_ALIGN (op0))
8769 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8770 {
8771 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8772 rtx new;
8773
8774 if (TYPE_ALIGN_OK (inner_type))
8775 abort ();
8776
8777 if (TREE_ADDRESSABLE (inner_type))
8778 {
8779 /* We can't make a bitwise copy of this object, so fail. */
8780 error ("cannot take the address of an unaligned member");
8781 return const0_rtx;
8782 }
8783
8784 new = assign_stack_temp_for_type
8785 (TYPE_MODE (inner_type),
8786 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8787 : int_size_in_bytes (inner_type),
8788 1, build_qualified_type (inner_type,
8789 (TYPE_QUALS (inner_type)
8790 | TYPE_QUAL_CONST)));
8791
8792 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8793 (modifier == EXPAND_STACK_PARM
8794 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8795
8796 op0 = new;
8797 }
8798
8799 op0 = force_operand (XEXP (op0, 0), target);
8800 }
8801
8802 if (flag_force_addr
8803 && GET_CODE (op0) != REG
8804 && modifier != EXPAND_CONST_ADDRESS
8805 && modifier != EXPAND_INITIALIZER
8806 && modifier != EXPAND_SUM)
8807 op0 = force_reg (Pmode, op0);
8808
8809 if (GET_CODE (op0) == REG
8810 && ! REG_USERVAR_P (op0))
8811 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8812
8813 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8814 op0 = convert_memory_address (ptr_mode, op0);
8815
8816 return op0;
8817
8818 case ENTRY_VALUE_EXPR:
8819 abort ();
8820
8821 /* COMPLEX type for Extended Pascal & Fortran */
8822 case COMPLEX_EXPR:
8823 {
8824 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8825 rtx insns;
8826
8827 /* Get the rtx code of the operands. */
8828 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8829 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8830
8831 if (! target)
8832 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8833
8834 start_sequence ();
8835
8836 /* Move the real (op0) and imaginary (op1) parts to their location. */
8837 emit_move_insn (gen_realpart (mode, target), op0);
8838 emit_move_insn (gen_imagpart (mode, target), op1);
8839
8840 insns = get_insns ();
8841 end_sequence ();
8842
8843 /* Complex construction should appear as a single unit. */
8844 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8845 each with a separate pseudo as destination.
8846 It's not correct for flow to treat them as a unit. */
8847 if (GET_CODE (target) != CONCAT)
8848 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8849 else
8850 emit_insn (insns);
8851
8852 return target;
8853 }
8854
8855 case REALPART_EXPR:
8856 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8857 return gen_realpart (mode, op0);
8858
8859 case IMAGPART_EXPR:
8860 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8861 return gen_imagpart (mode, op0);
8862
8863 case CONJ_EXPR:
8864 {
8865 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8866 rtx imag_t;
8867 rtx insns;
8868
8869 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8870
8871 if (! target)
8872 target = gen_reg_rtx (mode);
8873
8874 start_sequence ();
8875
8876 /* Store the realpart and the negated imagpart to target. */
8877 emit_move_insn (gen_realpart (partmode, target),
8878 gen_realpart (partmode, op0));
8879
8880 imag_t = gen_imagpart (partmode, target);
8881 temp = expand_unop (partmode,
8882 ! unsignedp && flag_trapv
8883 && (GET_MODE_CLASS(partmode) == MODE_INT)
8884 ? negv_optab : neg_optab,
8885 gen_imagpart (partmode, op0), imag_t, 0);
8886 if (temp != imag_t)
8887 emit_move_insn (imag_t, temp);
8888
8889 insns = get_insns ();
8890 end_sequence ();
8891
8892 /* Conjugate should appear as a single unit
8893 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8894 each with a separate pseudo as destination.
8895 It's not correct for flow to treat them as a unit. */
8896 if (GET_CODE (target) != CONCAT)
8897 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8898 else
8899 emit_insn (insns);
8900
8901 return target;
8902 }
8903
8904 case TRY_CATCH_EXPR:
8905 {
8906 tree handler = TREE_OPERAND (exp, 1);
8907
8908 expand_eh_region_start ();
8909
8910 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8911
8912 expand_eh_region_end_cleanup (handler);
8913
8914 return op0;
8915 }
8916
8917 case TRY_FINALLY_EXPR:
8918 {
8919 tree try_block = TREE_OPERAND (exp, 0);
8920 tree finally_block = TREE_OPERAND (exp, 1);
8921
8922 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8923 {
8924 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8925 is not sufficient, so we cannot expand the block twice.
8926 So we play games with GOTO_SUBROUTINE_EXPR to let us
8927 expand the thing only once. */
8928 /* When not optimizing, we go ahead with this form since
8929 (1) user breakpoints operate more predictably without
8930 code duplication, and
8931 (2) we're not running any of the global optimizers
8932 that would explode in time/space with the highly
8933 connected CFG created by the indirect branching. */
8934
8935 rtx finally_label = gen_label_rtx ();
8936 rtx done_label = gen_label_rtx ();
8937 rtx return_link = gen_reg_rtx (Pmode);
8938 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8939 (tree) finally_label, (tree) return_link);
8940 TREE_SIDE_EFFECTS (cleanup) = 1;
8941
8942 /* Start a new binding layer that will keep track of all cleanup
8943 actions to be performed. */
8944 expand_start_bindings (2);
8945 target_temp_slot_level = temp_slot_level;
8946
8947 expand_decl_cleanup (NULL_TREE, cleanup);
8948 op0 = expand_expr (try_block, target, tmode, modifier);
8949
8950 preserve_temp_slots (op0);
8951 expand_end_bindings (NULL_TREE, 0, 0);
8952 emit_jump (done_label);
8953 emit_label (finally_label);
8954 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8955 emit_indirect_jump (return_link);
8956 emit_label (done_label);
8957 }
8958 else
8959 {
8960 expand_start_bindings (2);
8961 target_temp_slot_level = temp_slot_level;
8962
8963 expand_decl_cleanup (NULL_TREE, finally_block);
8964 op0 = expand_expr (try_block, target, tmode, modifier);
8965
8966 preserve_temp_slots (op0);
8967 expand_end_bindings (NULL_TREE, 0, 0);
8968 }
8969
8970 return op0;
8971 }
8972
8973 case GOTO_SUBROUTINE_EXPR:
8974 {
8975 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8976 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8977 rtx return_address = gen_label_rtx ();
8978 emit_move_insn (return_link,
8979 gen_rtx_LABEL_REF (Pmode, return_address));
8980 emit_jump (subr);
8981 emit_label (return_address);
8982 return const0_rtx;
8983 }
8984
8985 case VA_ARG_EXPR:
8986 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8987
8988 case EXC_PTR_EXPR:
8989 return get_exception_pointer (cfun);
8990
8991 case FDESC_EXPR:
8992 /* Function descriptors are not valid except for as
8993 initialization constants, and should not be expanded. */
8994 abort ();
8995
8996 default:
8997 /* ??? Use (*fun) form because expand_expr is a macro. */
8998 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
8999 modifier, alt_rtl);
9000 }
9001
9002 /* Here to do an ordinary binary operator, generating an instruction
9003 from the optab already placed in `this_optab'. */
9004 binop:
9005 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9006 subtarget, &op0, &op1, 0);
9007 binop2:
9008 if (modifier == EXPAND_STACK_PARM)
9009 target = 0;
9010 temp = expand_binop (mode, this_optab, op0, op1, target,
9011 unsignedp, OPTAB_LIB_WIDEN);
9012 if (temp == 0)
9013 abort ();
9014 return temp;
9015 }
9016 \f
9017 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9018 when applied to the address of EXP produces an address known to be
9019 aligned more than BIGGEST_ALIGNMENT. */
9020
9021 static int
9022 is_aligning_offset (tree offset, tree exp)
9023 {
9024 /* Strip off any conversions. */
9025 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9026 || TREE_CODE (offset) == NOP_EXPR
9027 || TREE_CODE (offset) == CONVERT_EXPR)
9028 offset = TREE_OPERAND (offset, 0);
9029
9030 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9031 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9032 if (TREE_CODE (offset) != BIT_AND_EXPR
9033 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9034 || compare_tree_int (TREE_OPERAND (offset, 1),
9035 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9036 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9037 return 0;
9038
9039 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9040 It must be NEGATE_EXPR. Then strip any more conversions. */
9041 offset = TREE_OPERAND (offset, 0);
9042 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9043 || TREE_CODE (offset) == NOP_EXPR
9044 || TREE_CODE (offset) == CONVERT_EXPR)
9045 offset = TREE_OPERAND (offset, 0);
9046
9047 if (TREE_CODE (offset) != NEGATE_EXPR)
9048 return 0;
9049
9050 offset = TREE_OPERAND (offset, 0);
9051 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9052 || TREE_CODE (offset) == NOP_EXPR
9053 || TREE_CODE (offset) == CONVERT_EXPR)
9054 offset = TREE_OPERAND (offset, 0);
9055
9056 /* This must now be the address of EXP. */
9057 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9058 }
9059 \f
9060 /* Return the tree node if an ARG corresponds to a string constant or zero
9061 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9062 in bytes within the string that ARG is accessing. The type of the
9063 offset will be `sizetype'. */
9064
9065 tree
9066 string_constant (tree arg, tree *ptr_offset)
9067 {
9068 STRIP_NOPS (arg);
9069
9070 if (TREE_CODE (arg) == ADDR_EXPR
9071 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9072 {
9073 *ptr_offset = size_zero_node;
9074 return TREE_OPERAND (arg, 0);
9075 }
9076 else if (TREE_CODE (arg) == PLUS_EXPR)
9077 {
9078 tree arg0 = TREE_OPERAND (arg, 0);
9079 tree arg1 = TREE_OPERAND (arg, 1);
9080
9081 STRIP_NOPS (arg0);
9082 STRIP_NOPS (arg1);
9083
9084 if (TREE_CODE (arg0) == ADDR_EXPR
9085 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9086 {
9087 *ptr_offset = convert (sizetype, arg1);
9088 return TREE_OPERAND (arg0, 0);
9089 }
9090 else if (TREE_CODE (arg1) == ADDR_EXPR
9091 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9092 {
9093 *ptr_offset = convert (sizetype, arg0);
9094 return TREE_OPERAND (arg1, 0);
9095 }
9096 }
9097
9098 return 0;
9099 }
9100 \f
9101 /* Expand code for a post- or pre- increment or decrement
9102 and return the RTX for the result.
9103 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9104
9105 static rtx
9106 expand_increment (tree exp, int post, int ignore)
9107 {
9108 rtx op0, op1;
9109 rtx temp, value;
9110 tree incremented = TREE_OPERAND (exp, 0);
9111 optab this_optab = add_optab;
9112 int icode;
9113 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9114 int op0_is_copy = 0;
9115 int single_insn = 0;
9116 /* 1 means we can't store into OP0 directly,
9117 because it is a subreg narrower than a word,
9118 and we don't dare clobber the rest of the word. */
9119 int bad_subreg = 0;
9120
9121 /* Stabilize any component ref that might need to be
9122 evaluated more than once below. */
9123 if (!post
9124 || TREE_CODE (incremented) == BIT_FIELD_REF
9125 || (TREE_CODE (incremented) == COMPONENT_REF
9126 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9127 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9128 incremented = stabilize_reference (incremented);
9129 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9130 ones into save exprs so that they don't accidentally get evaluated
9131 more than once by the code below. */
9132 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9133 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9134 incremented = save_expr (incremented);
9135
9136 /* Compute the operands as RTX.
9137 Note whether OP0 is the actual lvalue or a copy of it:
9138 I believe it is a copy iff it is a register or subreg
9139 and insns were generated in computing it. */
9140
9141 temp = get_last_insn ();
9142 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9143
9144 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9145 in place but instead must do sign- or zero-extension during assignment,
9146 so we copy it into a new register and let the code below use it as
9147 a copy.
9148
9149 Note that we can safely modify this SUBREG since it is know not to be
9150 shared (it was made by the expand_expr call above). */
9151
9152 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9153 {
9154 if (post)
9155 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9156 else
9157 bad_subreg = 1;
9158 }
9159 else if (GET_CODE (op0) == SUBREG
9160 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9161 {
9162 /* We cannot increment this SUBREG in place. If we are
9163 post-incrementing, get a copy of the old value. Otherwise,
9164 just mark that we cannot increment in place. */
9165 if (post)
9166 op0 = copy_to_reg (op0);
9167 else
9168 bad_subreg = 1;
9169 }
9170
9171 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9172 && temp != get_last_insn ());
9173 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9174
9175 /* Decide whether incrementing or decrementing. */
9176 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9177 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9178 this_optab = sub_optab;
9179
9180 /* Convert decrement by a constant into a negative increment. */
9181 if (this_optab == sub_optab
9182 && GET_CODE (op1) == CONST_INT)
9183 {
9184 op1 = GEN_INT (-INTVAL (op1));
9185 this_optab = add_optab;
9186 }
9187
9188 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9189 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9190
9191 /* For a preincrement, see if we can do this with a single instruction. */
9192 if (!post)
9193 {
9194 icode = (int) this_optab->handlers[(int) mode].insn_code;
9195 if (icode != (int) CODE_FOR_nothing
9196 /* Make sure that OP0 is valid for operands 0 and 1
9197 of the insn we want to queue. */
9198 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9199 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9200 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9201 single_insn = 1;
9202 }
9203
9204 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9205 then we cannot just increment OP0. We must therefore contrive to
9206 increment the original value. Then, for postincrement, we can return
9207 OP0 since it is a copy of the old value. For preincrement, expand here
9208 unless we can do it with a single insn.
9209
9210 Likewise if storing directly into OP0 would clobber high bits
9211 we need to preserve (bad_subreg). */
9212 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9213 {
9214 /* This is the easiest way to increment the value wherever it is.
9215 Problems with multiple evaluation of INCREMENTED are prevented
9216 because either (1) it is a component_ref or preincrement,
9217 in which case it was stabilized above, or (2) it is an array_ref
9218 with constant index in an array in a register, which is
9219 safe to reevaluate. */
9220 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9221 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9222 ? MINUS_EXPR : PLUS_EXPR),
9223 TREE_TYPE (exp),
9224 incremented,
9225 TREE_OPERAND (exp, 1));
9226
9227 while (TREE_CODE (incremented) == NOP_EXPR
9228 || TREE_CODE (incremented) == CONVERT_EXPR)
9229 {
9230 newexp = convert (TREE_TYPE (incremented), newexp);
9231 incremented = TREE_OPERAND (incremented, 0);
9232 }
9233
9234 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9235 return post ? op0 : temp;
9236 }
9237
9238 if (post)
9239 {
9240 /* We have a true reference to the value in OP0.
9241 If there is an insn to add or subtract in this mode, queue it.
9242 Queuing the increment insn avoids the register shuffling
9243 that often results if we must increment now and first save
9244 the old value for subsequent use. */
9245
9246 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9247 op0 = stabilize (op0);
9248 #endif
9249
9250 icode = (int) this_optab->handlers[(int) mode].insn_code;
9251 if (icode != (int) CODE_FOR_nothing
9252 /* Make sure that OP0 is valid for operands 0 and 1
9253 of the insn we want to queue. */
9254 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9255 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9256 {
9257 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9258 op1 = force_reg (mode, op1);
9259
9260 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9261 }
9262 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9263 {
9264 rtx addr = (general_operand (XEXP (op0, 0), mode)
9265 ? force_reg (Pmode, XEXP (op0, 0))
9266 : copy_to_reg (XEXP (op0, 0)));
9267 rtx temp, result;
9268
9269 op0 = replace_equiv_address (op0, addr);
9270 temp = force_reg (GET_MODE (op0), op0);
9271 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9272 op1 = force_reg (mode, op1);
9273
9274 /* The increment queue is LIFO, thus we have to `queue'
9275 the instructions in reverse order. */
9276 enqueue_insn (op0, gen_move_insn (op0, temp));
9277 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9278 return result;
9279 }
9280 }
9281
9282 /* Preincrement, or we can't increment with one simple insn. */
9283 if (post)
9284 /* Save a copy of the value before inc or dec, to return it later. */
9285 temp = value = copy_to_reg (op0);
9286 else
9287 /* Arrange to return the incremented value. */
9288 /* Copy the rtx because expand_binop will protect from the queue,
9289 and the results of that would be invalid for us to return
9290 if our caller does emit_queue before using our result. */
9291 temp = copy_rtx (value = op0);
9292
9293 /* Increment however we can. */
9294 op1 = expand_binop (mode, this_optab, value, op1, op0,
9295 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9296
9297 /* Make sure the value is stored into OP0. */
9298 if (op1 != op0)
9299 emit_move_insn (op0, op1);
9300
9301 return temp;
9302 }
9303 \f
9304 /* Generate code to calculate EXP using a store-flag instruction
9305 and return an rtx for the result. EXP is either a comparison
9306 or a TRUTH_NOT_EXPR whose operand is a comparison.
9307
9308 If TARGET is nonzero, store the result there if convenient.
9309
9310 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9311 cheap.
9312
9313 Return zero if there is no suitable set-flag instruction
9314 available on this machine.
9315
9316 Once expand_expr has been called on the arguments of the comparison,
9317 we are committed to doing the store flag, since it is not safe to
9318 re-evaluate the expression. We emit the store-flag insn by calling
9319 emit_store_flag, but only expand the arguments if we have a reason
9320 to believe that emit_store_flag will be successful. If we think that
9321 it will, but it isn't, we have to simulate the store-flag with a
9322 set/jump/set sequence. */
9323
9324 static rtx
9325 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9326 {
9327 enum rtx_code code;
9328 tree arg0, arg1, type;
9329 tree tem;
9330 enum machine_mode operand_mode;
9331 int invert = 0;
9332 int unsignedp;
9333 rtx op0, op1;
9334 enum insn_code icode;
9335 rtx subtarget = target;
9336 rtx result, label;
9337
9338 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9339 result at the end. We can't simply invert the test since it would
9340 have already been inverted if it were valid. This case occurs for
9341 some floating-point comparisons. */
9342
9343 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9344 invert = 1, exp = TREE_OPERAND (exp, 0);
9345
9346 arg0 = TREE_OPERAND (exp, 0);
9347 arg1 = TREE_OPERAND (exp, 1);
9348
9349 /* Don't crash if the comparison was erroneous. */
9350 if (arg0 == error_mark_node || arg1 == error_mark_node)
9351 return const0_rtx;
9352
9353 type = TREE_TYPE (arg0);
9354 operand_mode = TYPE_MODE (type);
9355 unsignedp = TYPE_UNSIGNED (type);
9356
9357 /* We won't bother with BLKmode store-flag operations because it would mean
9358 passing a lot of information to emit_store_flag. */
9359 if (operand_mode == BLKmode)
9360 return 0;
9361
9362 /* We won't bother with store-flag operations involving function pointers
9363 when function pointers must be canonicalized before comparisons. */
9364 #ifdef HAVE_canonicalize_funcptr_for_compare
9365 if (HAVE_canonicalize_funcptr_for_compare
9366 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9367 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9368 == FUNCTION_TYPE))
9369 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9370 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9371 == FUNCTION_TYPE))))
9372 return 0;
9373 #endif
9374
9375 STRIP_NOPS (arg0);
9376 STRIP_NOPS (arg1);
9377
9378 /* Get the rtx comparison code to use. We know that EXP is a comparison
9379 operation of some type. Some comparisons against 1 and -1 can be
9380 converted to comparisons with zero. Do so here so that the tests
9381 below will be aware that we have a comparison with zero. These
9382 tests will not catch constants in the first operand, but constants
9383 are rarely passed as the first operand. */
9384
9385 switch (TREE_CODE (exp))
9386 {
9387 case EQ_EXPR:
9388 code = EQ;
9389 break;
9390 case NE_EXPR:
9391 code = NE;
9392 break;
9393 case LT_EXPR:
9394 if (integer_onep (arg1))
9395 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9396 else
9397 code = unsignedp ? LTU : LT;
9398 break;
9399 case LE_EXPR:
9400 if (! unsignedp && integer_all_onesp (arg1))
9401 arg1 = integer_zero_node, code = LT;
9402 else
9403 code = unsignedp ? LEU : LE;
9404 break;
9405 case GT_EXPR:
9406 if (! unsignedp && integer_all_onesp (arg1))
9407 arg1 = integer_zero_node, code = GE;
9408 else
9409 code = unsignedp ? GTU : GT;
9410 break;
9411 case GE_EXPR:
9412 if (integer_onep (arg1))
9413 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9414 else
9415 code = unsignedp ? GEU : GE;
9416 break;
9417
9418 case UNORDERED_EXPR:
9419 code = UNORDERED;
9420 break;
9421 case ORDERED_EXPR:
9422 code = ORDERED;
9423 break;
9424 case UNLT_EXPR:
9425 code = UNLT;
9426 break;
9427 case UNLE_EXPR:
9428 code = UNLE;
9429 break;
9430 case UNGT_EXPR:
9431 code = UNGT;
9432 break;
9433 case UNGE_EXPR:
9434 code = UNGE;
9435 break;
9436 case UNEQ_EXPR:
9437 code = UNEQ;
9438 break;
9439
9440 default:
9441 abort ();
9442 }
9443
9444 /* Put a constant second. */
9445 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9446 {
9447 tem = arg0; arg0 = arg1; arg1 = tem;
9448 code = swap_condition (code);
9449 }
9450
9451 /* If this is an equality or inequality test of a single bit, we can
9452 do this by shifting the bit being tested to the low-order bit and
9453 masking the result with the constant 1. If the condition was EQ,
9454 we xor it with 1. This does not require an scc insn and is faster
9455 than an scc insn even if we have it.
9456
9457 The code to make this transformation was moved into fold_single_bit_test,
9458 so we just call into the folder and expand its result. */
9459
9460 if ((code == NE || code == EQ)
9461 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9462 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9463 {
9464 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9465 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9466 arg0, arg1, type),
9467 target, VOIDmode, EXPAND_NORMAL);
9468 }
9469
9470 /* Now see if we are likely to be able to do this. Return if not. */
9471 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9472 return 0;
9473
9474 icode = setcc_gen_code[(int) code];
9475 if (icode == CODE_FOR_nothing
9476 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9477 {
9478 /* We can only do this if it is one of the special cases that
9479 can be handled without an scc insn. */
9480 if ((code == LT && integer_zerop (arg1))
9481 || (! only_cheap && code == GE && integer_zerop (arg1)))
9482 ;
9483 else if (BRANCH_COST >= 0
9484 && ! only_cheap && (code == NE || code == EQ)
9485 && TREE_CODE (type) != REAL_TYPE
9486 && ((abs_optab->handlers[(int) operand_mode].insn_code
9487 != CODE_FOR_nothing)
9488 || (ffs_optab->handlers[(int) operand_mode].insn_code
9489 != CODE_FOR_nothing)))
9490 ;
9491 else
9492 return 0;
9493 }
9494
9495 if (! get_subtarget (target)
9496 || GET_MODE (subtarget) != operand_mode)
9497 subtarget = 0;
9498
9499 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9500
9501 if (target == 0)
9502 target = gen_reg_rtx (mode);
9503
9504 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9505 because, if the emit_store_flag does anything it will succeed and
9506 OP0 and OP1 will not be used subsequently. */
9507
9508 result = emit_store_flag (target, code,
9509 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9510 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9511 operand_mode, unsignedp, 1);
9512
9513 if (result)
9514 {
9515 if (invert)
9516 result = expand_binop (mode, xor_optab, result, const1_rtx,
9517 result, 0, OPTAB_LIB_WIDEN);
9518 return result;
9519 }
9520
9521 /* If this failed, we have to do this with set/compare/jump/set code. */
9522 if (GET_CODE (target) != REG
9523 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9524 target = gen_reg_rtx (GET_MODE (target));
9525
9526 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9527 result = compare_from_rtx (op0, op1, code, unsignedp,
9528 operand_mode, NULL_RTX);
9529 if (GET_CODE (result) == CONST_INT)
9530 return (((result == const0_rtx && ! invert)
9531 || (result != const0_rtx && invert))
9532 ? const0_rtx : const1_rtx);
9533
9534 /* The code of RESULT may not match CODE if compare_from_rtx
9535 decided to swap its operands and reverse the original code.
9536
9537 We know that compare_from_rtx returns either a CONST_INT or
9538 a new comparison code, so it is safe to just extract the
9539 code from RESULT. */
9540 code = GET_CODE (result);
9541
9542 label = gen_label_rtx ();
9543 if (bcc_gen_fctn[(int) code] == 0)
9544 abort ();
9545
9546 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9547 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9548 emit_label (label);
9549
9550 return target;
9551 }
9552 \f
9553
9554 /* Stubs in case we haven't got a casesi insn. */
9555 #ifndef HAVE_casesi
9556 # define HAVE_casesi 0
9557 # define gen_casesi(a, b, c, d, e) (0)
9558 # define CODE_FOR_casesi CODE_FOR_nothing
9559 #endif
9560
9561 /* If the machine does not have a case insn that compares the bounds,
9562 this means extra overhead for dispatch tables, which raises the
9563 threshold for using them. */
9564 #ifndef CASE_VALUES_THRESHOLD
9565 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9566 #endif /* CASE_VALUES_THRESHOLD */
9567
9568 unsigned int
9569 case_values_threshold (void)
9570 {
9571 return CASE_VALUES_THRESHOLD;
9572 }
9573
9574 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9575 0 otherwise (i.e. if there is no casesi instruction). */
9576 int
9577 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9578 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9579 {
9580 enum machine_mode index_mode = SImode;
9581 int index_bits = GET_MODE_BITSIZE (index_mode);
9582 rtx op1, op2, index;
9583 enum machine_mode op_mode;
9584
9585 if (! HAVE_casesi)
9586 return 0;
9587
9588 /* Convert the index to SImode. */
9589 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9590 {
9591 enum machine_mode omode = TYPE_MODE (index_type);
9592 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9593
9594 /* We must handle the endpoints in the original mode. */
9595 index_expr = build (MINUS_EXPR, index_type,
9596 index_expr, minval);
9597 minval = integer_zero_node;
9598 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9599 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9600 omode, 1, default_label);
9601 /* Now we can safely truncate. */
9602 index = convert_to_mode (index_mode, index, 0);
9603 }
9604 else
9605 {
9606 if (TYPE_MODE (index_type) != index_mode)
9607 {
9608 index_expr = convert (lang_hooks.types.type_for_size
9609 (index_bits, 0), index_expr);
9610 index_type = TREE_TYPE (index_expr);
9611 }
9612
9613 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9614 }
9615 emit_queue ();
9616 index = protect_from_queue (index, 0);
9617 do_pending_stack_adjust ();
9618
9619 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9620 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9621 (index, op_mode))
9622 index = copy_to_mode_reg (op_mode, index);
9623
9624 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9625
9626 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9627 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9628 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9629 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9630 (op1, op_mode))
9631 op1 = copy_to_mode_reg (op_mode, op1);
9632
9633 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9634
9635 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9636 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9637 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9638 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9639 (op2, op_mode))
9640 op2 = copy_to_mode_reg (op_mode, op2);
9641
9642 emit_jump_insn (gen_casesi (index, op1, op2,
9643 table_label, default_label));
9644 return 1;
9645 }
9646
9647 /* Attempt to generate a tablejump instruction; same concept. */
9648 #ifndef HAVE_tablejump
9649 #define HAVE_tablejump 0
9650 #define gen_tablejump(x, y) (0)
9651 #endif
9652
9653 /* Subroutine of the next function.
9654
9655 INDEX is the value being switched on, with the lowest value
9656 in the table already subtracted.
9657 MODE is its expected mode (needed if INDEX is constant).
9658 RANGE is the length of the jump table.
9659 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9660
9661 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9662 index value is out of range. */
9663
9664 static void
9665 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9666 rtx default_label)
9667 {
9668 rtx temp, vector;
9669
9670 if (INTVAL (range) > cfun->max_jumptable_ents)
9671 cfun->max_jumptable_ents = INTVAL (range);
9672
9673 /* Do an unsigned comparison (in the proper mode) between the index
9674 expression and the value which represents the length of the range.
9675 Since we just finished subtracting the lower bound of the range
9676 from the index expression, this comparison allows us to simultaneously
9677 check that the original index expression value is both greater than
9678 or equal to the minimum value of the range and less than or equal to
9679 the maximum value of the range. */
9680
9681 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9682 default_label);
9683
9684 /* If index is in range, it must fit in Pmode.
9685 Convert to Pmode so we can index with it. */
9686 if (mode != Pmode)
9687 index = convert_to_mode (Pmode, index, 1);
9688
9689 /* Don't let a MEM slip through, because then INDEX that comes
9690 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9691 and break_out_memory_refs will go to work on it and mess it up. */
9692 #ifdef PIC_CASE_VECTOR_ADDRESS
9693 if (flag_pic && GET_CODE (index) != REG)
9694 index = copy_to_mode_reg (Pmode, index);
9695 #endif
9696
9697 /* If flag_force_addr were to affect this address
9698 it could interfere with the tricky assumptions made
9699 about addresses that contain label-refs,
9700 which may be valid only very near the tablejump itself. */
9701 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9702 GET_MODE_SIZE, because this indicates how large insns are. The other
9703 uses should all be Pmode, because they are addresses. This code
9704 could fail if addresses and insns are not the same size. */
9705 index = gen_rtx_PLUS (Pmode,
9706 gen_rtx_MULT (Pmode, index,
9707 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9708 gen_rtx_LABEL_REF (Pmode, table_label));
9709 #ifdef PIC_CASE_VECTOR_ADDRESS
9710 if (flag_pic)
9711 index = PIC_CASE_VECTOR_ADDRESS (index);
9712 else
9713 #endif
9714 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9715 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9716 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9717 RTX_UNCHANGING_P (vector) = 1;
9718 MEM_NOTRAP_P (vector) = 1;
9719 convert_move (temp, vector, 0);
9720
9721 emit_jump_insn (gen_tablejump (temp, table_label));
9722
9723 /* If we are generating PIC code or if the table is PC-relative, the
9724 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9725 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9726 emit_barrier ();
9727 }
9728
9729 int
9730 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9731 rtx table_label, rtx default_label)
9732 {
9733 rtx index;
9734
9735 if (! HAVE_tablejump)
9736 return 0;
9737
9738 index_expr = fold (build (MINUS_EXPR, index_type,
9739 convert (index_type, index_expr),
9740 convert (index_type, minval)));
9741 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9742 emit_queue ();
9743 index = protect_from_queue (index, 0);
9744 do_pending_stack_adjust ();
9745
9746 do_tablejump (index, TYPE_MODE (index_type),
9747 convert_modes (TYPE_MODE (index_type),
9748 TYPE_MODE (TREE_TYPE (range)),
9749 expand_expr (range, NULL_RTX,
9750 VOIDmode, 0),
9751 TYPE_UNSIGNED (TREE_TYPE (range))),
9752 table_label, default_label);
9753 return 1;
9754 }
9755
9756 /* Nonzero if the mode is a valid vector mode for this architecture.
9757 This returns nonzero even if there is no hardware support for the
9758 vector mode, but we can emulate with narrower modes. */
9759
9760 int
9761 vector_mode_valid_p (enum machine_mode mode)
9762 {
9763 enum mode_class class = GET_MODE_CLASS (mode);
9764 enum machine_mode innermode;
9765
9766 /* Doh! What's going on? */
9767 if (class != MODE_VECTOR_INT
9768 && class != MODE_VECTOR_FLOAT)
9769 return 0;
9770
9771 /* Hardware support. Woo hoo! */
9772 if (VECTOR_MODE_SUPPORTED_P (mode))
9773 return 1;
9774
9775 innermode = GET_MODE_INNER (mode);
9776
9777 /* We should probably return 1 if requesting V4DI and we have no DI,
9778 but we have V2DI, but this is probably very unlikely. */
9779
9780 /* If we have support for the inner mode, we can safely emulate it.
9781 We may not have V2DI, but me can emulate with a pair of DIs. */
9782 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9783 }
9784
9785 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9786 static rtx
9787 const_vector_from_tree (tree exp)
9788 {
9789 rtvec v;
9790 int units, i;
9791 tree link, elt;
9792 enum machine_mode inner, mode;
9793
9794 mode = TYPE_MODE (TREE_TYPE (exp));
9795
9796 if (is_zeros_p (exp))
9797 return CONST0_RTX (mode);
9798
9799 units = GET_MODE_NUNITS (mode);
9800 inner = GET_MODE_INNER (mode);
9801
9802 v = rtvec_alloc (units);
9803
9804 link = TREE_VECTOR_CST_ELTS (exp);
9805 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9806 {
9807 elt = TREE_VALUE (link);
9808
9809 if (TREE_CODE (elt) == REAL_CST)
9810 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9811 inner);
9812 else
9813 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9814 TREE_INT_CST_HIGH (elt),
9815 inner);
9816 }
9817
9818 /* Initialize remaining elements to 0. */
9819 for (; i < units; ++i)
9820 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9821
9822 return gen_rtx_raw_CONST_VECTOR (mode, v);
9823 }
9824
9825 #include "gt-expr.h"
This page took 0.47016 seconds and 5 git commands to generate.