]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
c-decl.c, [...]: Don't check TARGET_MEM_FUNCTIONS.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
61
62 #ifdef PUSH_ROUNDING
63
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
69
70 #endif
71
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
92 {
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
104 };
105
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
108
109 struct store_by_pieces
110 {
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
120 };
121
122 static rtx enqueue_insn (rtx, rtx);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
148 static rtx var_rtx (tree);
149
150 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
151 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152
153 static int is_aligning_offset (tree, tree);
154 static rtx expand_increment (tree, int, int);
155 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
157 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 #ifdef PUSH_ROUNDING
159 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 #endif
161 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
162 static rtx const_vector_from_tree (tree);
163
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
167
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
170
171 /* Record for each mode whether we can float-extend from memory. */
172
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
180 #endif
181
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
187 #endif
188
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
194 #endif
195
196 /* This array records the insn_code of insns to perform block moves. */
197 enum insn_code movstr_optab[NUM_MACHINE_MODES];
198
199 /* This array records the insn_code of insns to perform block clears. */
200 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201
202 /* These arrays record the insn_code of two different kinds of insns
203 to perform block compares. */
204 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
205 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
206
207 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
208
209 #ifndef SLOW_UNALIGNED_ACCESS
210 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
211 #endif
212 \f
213 /* This is run once per compilation to set up which modes can be used
214 directly in memory and to initialize the block move optab. */
215
216 void
217 init_expr_once (void)
218 {
219 rtx insn, pat;
220 enum machine_mode mode;
221 int num_clobbers;
222 rtx mem, mem1;
223 rtx reg;
224
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230
231 /* A scratch register we can modify in-place below to avoid
232 useless RTL allocations. */
233 reg = gen_rtx_REG (VOIDmode, -1);
234
235 insn = rtx_alloc (INSN);
236 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
237 PATTERN (insn) = pat;
238
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
241 {
242 int regno;
243
244 direct_load[(int) mode] = direct_store[(int) mode] = 0;
245 PUT_MODE (mem, mode);
246 PUT_MODE (mem1, mode);
247 PUT_MODE (reg, mode);
248
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
251
252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 regno++)
256 {
257 if (! HARD_REGNO_MODE_OK (regno, mode))
258 continue;
259
260 REGNO (reg) = regno;
261
262 SET_SRC (pat) = mem;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276
277 SET_SRC (pat) = reg;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
281 }
282 }
283
284 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
285
286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
287 mode = GET_MODE_WIDER_MODE (mode))
288 {
289 enum machine_mode srcmode;
290 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
291 srcmode = GET_MODE_WIDER_MODE (srcmode))
292 {
293 enum insn_code ic;
294
295 ic = can_extend_p (mode, srcmode, 0);
296 if (ic == CODE_FOR_nothing)
297 continue;
298
299 PUT_MODE (mem, srcmode);
300
301 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
302 float_extend_from_mem[mode][srcmode] = true;
303 }
304 }
305 }
306
307 /* This is run at the start of compiling a function. */
308
309 void
310 init_expr (void)
311 {
312 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
313 }
314
315 /* Small sanity check that the queue is empty at the end of a function. */
316
317 void
318 finish_expr_for_function (void)
319 {
320 if (pending_chain)
321 abort ();
322 }
323 \f
324 /* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
326
327 /* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
330
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
333
334 static rtx
335 enqueue_insn (rtx var, rtx body)
336 {
337 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
338 body, pending_chain);
339 return pending_chain;
340 }
341
342 /* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
348
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
352
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
356
357 rtx
358 protect_from_queue (rtx x, int modify)
359 {
360 RTX_CODE code = GET_CODE (x);
361
362 #if 0 /* A QUEUED can hang around after the queue is forced out. */
363 /* Shortcut for most common case. */
364 if (pending_chain == 0)
365 return x;
366 #endif
367
368 if (code != QUEUED)
369 {
370 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
371 use of autoincrement. Make a copy of the contents of the memory
372 location rather than a copy of the address, but not if the value is
373 of mode BLKmode. Don't modify X in place since it might be
374 shared. */
375 if (code == MEM && GET_MODE (x) != BLKmode
376 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
377 {
378 rtx y = XEXP (x, 0);
379 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
380
381 if (QUEUED_INSN (y))
382 {
383 rtx temp = gen_reg_rtx (GET_MODE (x));
384
385 emit_insn_before (gen_move_insn (temp, new),
386 QUEUED_INSN (y));
387 return temp;
388 }
389
390 /* Copy the address into a pseudo, so that the returned value
391 remains correct across calls to emit_queue. */
392 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
393 }
394
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
398 {
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
401 {
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
404 }
405 }
406 else if (code == PLUS || code == MULT)
407 {
408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
411 {
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
415 }
416 }
417 return x;
418 }
419 /* If the increment has not happened, use the variable itself. Copy it
420 into a new pseudo so that the value remains correct across calls to
421 emit_queue. */
422 if (QUEUED_INSN (x) == 0)
423 return copy_to_reg (QUEUED_VAR (x));
424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
434 }
435
436 /* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
440
441 int
442 queued_subexp_p (rtx x)
443 {
444 enum rtx_code code = GET_CODE (x);
445 switch (code)
446 {
447 case QUEUED:
448 return 1;
449 case MEM:
450 return queued_subexp_p (XEXP (x, 0));
451 case MULT:
452 case PLUS:
453 case MINUS:
454 return (queued_subexp_p (XEXP (x, 0))
455 || queued_subexp_p (XEXP (x, 1)));
456 default:
457 return 0;
458 }
459 }
460
461 /* Retrieve a mark on the queue. */
462
463 static rtx
464 mark_queue (void)
465 {
466 return pending_chain;
467 }
468
469 /* Perform all the pending incrementations that have been enqueued
470 after MARK was retrieved. If MARK is null, perform all the
471 pending incrementations. */
472
473 static void
474 emit_insns_enqueued_after_mark (rtx mark)
475 {
476 rtx p;
477
478 /* The marked incrementation may have been emitted in the meantime
479 through a call to emit_queue. In this case, the mark is not valid
480 anymore so do nothing. */
481 if (mark && ! QUEUED_BODY (mark))
482 return;
483
484 while ((p = pending_chain) != mark)
485 {
486 rtx body = QUEUED_BODY (p);
487
488 switch (GET_CODE (body))
489 {
490 case INSN:
491 case JUMP_INSN:
492 case CALL_INSN:
493 case CODE_LABEL:
494 case BARRIER:
495 case NOTE:
496 QUEUED_INSN (p) = body;
497 emit_insn (body);
498 break;
499
500 #ifdef ENABLE_CHECKING
501 case SEQUENCE:
502 abort ();
503 break;
504 #endif
505
506 default:
507 QUEUED_INSN (p) = emit_insn (body);
508 break;
509 }
510
511 QUEUED_BODY (p) = 0;
512 pending_chain = QUEUED_NEXT (p);
513 }
514 }
515
516 /* Perform all the pending incrementations. */
517
518 void
519 emit_queue (void)
520 {
521 emit_insns_enqueued_after_mark (NULL_RTX);
522 }
523 \f
524 /* Copy data from FROM to TO, where the machine modes are not the same.
525 Both modes may be integer, or both may be floating.
526 UNSIGNEDP should be nonzero if FROM is an unsigned type.
527 This causes zero-extension instead of sign-extension. */
528
529 void
530 convert_move (rtx to, rtx from, int unsignedp)
531 {
532 enum machine_mode to_mode = GET_MODE (to);
533 enum machine_mode from_mode = GET_MODE (from);
534 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
535 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
536 enum insn_code code;
537 rtx libcall;
538
539 /* rtx code for making an equivalent value. */
540 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
541 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
542
543 to = protect_from_queue (to, 1);
544 from = protect_from_queue (from, 0);
545
546 if (to_real != from_real)
547 abort ();
548
549 /* If the source and destination are already the same, then there's
550 nothing to do. */
551 if (to == from)
552 return;
553
554 /* If FROM is a SUBREG that indicates that we have already done at least
555 the required extension, strip it. We don't handle such SUBREGs as
556 TO here. */
557
558 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
559 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
560 >= GET_MODE_SIZE (to_mode))
561 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
562 from = gen_lowpart (to_mode, from), from_mode = to_mode;
563
564 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
565 abort ();
566
567 if (to_mode == from_mode
568 || (from_mode == VOIDmode && CONSTANT_P (from)))
569 {
570 emit_move_insn (to, from);
571 return;
572 }
573
574 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
575 {
576 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
577 abort ();
578
579 if (VECTOR_MODE_P (to_mode))
580 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
581 else
582 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
583
584 emit_move_insn (to, from);
585 return;
586 }
587
588 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
589 {
590 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
591 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
592 return;
593 }
594
595 if (to_real)
596 {
597 rtx value, insns;
598 convert_optab tab;
599
600 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
601 tab = sext_optab;
602 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
603 tab = trunc_optab;
604 else
605 abort ();
606
607 /* Try converting directly if the insn is supported. */
608
609 code = tab->handlers[to_mode][from_mode].insn_code;
610 if (code != CODE_FOR_nothing)
611 {
612 emit_unop_insn (code, to, from,
613 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
614 return;
615 }
616
617 /* Otherwise use a libcall. */
618 libcall = tab->handlers[to_mode][from_mode].libfunc;
619
620 if (!libcall)
621 /* This conversion is not implemented yet. */
622 abort ();
623
624 start_sequence ();
625 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
626 1, from, from_mode);
627 insns = get_insns ();
628 end_sequence ();
629 emit_libcall_block (insns, to, value,
630 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
631 from)
632 : gen_rtx_FLOAT_EXTEND (to_mode, from));
633 return;
634 }
635
636 /* Handle pointer conversion. */ /* SPEE 900220. */
637 /* Targets are expected to provide conversion insns between PxImode and
638 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
639 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
640 {
641 enum machine_mode full_mode
642 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
643
644 if (trunc_optab->handlers[to_mode][full_mode].insn_code
645 == CODE_FOR_nothing)
646 abort ();
647
648 if (full_mode != from_mode)
649 from = convert_to_mode (full_mode, from, unsignedp);
650 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
651 to, from, UNKNOWN);
652 return;
653 }
654 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
655 {
656 enum machine_mode full_mode
657 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
658
659 if (sext_optab->handlers[full_mode][from_mode].insn_code
660 == CODE_FOR_nothing)
661 abort ();
662
663 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
664 to, from, UNKNOWN);
665 if (to_mode == full_mode)
666 return;
667
668 /* else proceed to integer conversions below. */
669 from_mode = full_mode;
670 }
671
672 /* Now both modes are integers. */
673
674 /* Handle expanding beyond a word. */
675 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
676 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
677 {
678 rtx insns;
679 rtx lowpart;
680 rtx fill_value;
681 rtx lowfrom;
682 int i;
683 enum machine_mode lowpart_mode;
684 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
685
686 /* Try converting directly if the insn is supported. */
687 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
688 != CODE_FOR_nothing)
689 {
690 /* If FROM is a SUBREG, put it into a register. Do this
691 so that we always generate the same set of insns for
692 better cse'ing; if an intermediate assignment occurred,
693 we won't be doing the operation directly on the SUBREG. */
694 if (optimize > 0 && GET_CODE (from) == SUBREG)
695 from = force_reg (from_mode, from);
696 emit_unop_insn (code, to, from, equiv_code);
697 return;
698 }
699 /* Next, try converting via full word. */
700 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
701 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
702 != CODE_FOR_nothing))
703 {
704 if (REG_P (to))
705 {
706 if (reg_overlap_mentioned_p (to, from))
707 from = force_reg (from_mode, from);
708 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
709 }
710 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
711 emit_unop_insn (code, to,
712 gen_lowpart (word_mode, to), equiv_code);
713 return;
714 }
715
716 /* No special multiword conversion insn; do it by hand. */
717 start_sequence ();
718
719 /* Since we will turn this into a no conflict block, we must ensure
720 that the source does not overlap the target. */
721
722 if (reg_overlap_mentioned_p (to, from))
723 from = force_reg (from_mode, from);
724
725 /* Get a copy of FROM widened to a word, if necessary. */
726 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
727 lowpart_mode = word_mode;
728 else
729 lowpart_mode = from_mode;
730
731 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
732
733 lowpart = gen_lowpart (lowpart_mode, to);
734 emit_move_insn (lowpart, lowfrom);
735
736 /* Compute the value to put in each remaining word. */
737 if (unsignedp)
738 fill_value = const0_rtx;
739 else
740 {
741 #ifdef HAVE_slt
742 if (HAVE_slt
743 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
744 && STORE_FLAG_VALUE == -1)
745 {
746 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
747 lowpart_mode, 0);
748 fill_value = gen_reg_rtx (word_mode);
749 emit_insn (gen_slt (fill_value));
750 }
751 else
752 #endif
753 {
754 fill_value
755 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
756 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
757 NULL_RTX, 0);
758 fill_value = convert_to_mode (word_mode, fill_value, 1);
759 }
760 }
761
762 /* Fill the remaining words. */
763 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
764 {
765 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
766 rtx subword = operand_subword (to, index, 1, to_mode);
767
768 if (subword == 0)
769 abort ();
770
771 if (fill_value != subword)
772 emit_move_insn (subword, fill_value);
773 }
774
775 insns = get_insns ();
776 end_sequence ();
777
778 emit_no_conflict_block (insns, to, from, NULL_RTX,
779 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
780 return;
781 }
782
783 /* Truncating multi-word to a word or less. */
784 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
785 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
786 {
787 if (!((MEM_P (from)
788 && ! MEM_VOLATILE_P (from)
789 && direct_load[(int) to_mode]
790 && ! mode_dependent_address_p (XEXP (from, 0)))
791 || REG_P (from)
792 || GET_CODE (from) == SUBREG))
793 from = force_reg (from_mode, from);
794 convert_move (to, gen_lowpart (word_mode, from), 0);
795 return;
796 }
797
798 /* Now follow all the conversions between integers
799 no more than a word long. */
800
801 /* For truncation, usually we can just refer to FROM in a narrower mode. */
802 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
803 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
804 GET_MODE_BITSIZE (from_mode)))
805 {
806 if (!((MEM_P (from)
807 && ! MEM_VOLATILE_P (from)
808 && direct_load[(int) to_mode]
809 && ! mode_dependent_address_p (XEXP (from, 0)))
810 || REG_P (from)
811 || GET_CODE (from) == SUBREG))
812 from = force_reg (from_mode, from);
813 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
814 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
815 from = copy_to_reg (from);
816 emit_move_insn (to, gen_lowpart (to_mode, from));
817 return;
818 }
819
820 /* Handle extension. */
821 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
822 {
823 /* Convert directly if that works. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 != CODE_FOR_nothing)
826 {
827 if (flag_force_mem)
828 from = force_not_mem (from);
829
830 emit_unop_insn (code, to, from, equiv_code);
831 return;
832 }
833 else
834 {
835 enum machine_mode intermediate;
836 rtx tmp;
837 tree shift_amount;
838
839 /* Search for a mode to convert via. */
840 for (intermediate = from_mode; intermediate != VOIDmode;
841 intermediate = GET_MODE_WIDER_MODE (intermediate))
842 if (((can_extend_p (to_mode, intermediate, unsignedp)
843 != CODE_FOR_nothing)
844 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
845 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
846 GET_MODE_BITSIZE (intermediate))))
847 && (can_extend_p (intermediate, from_mode, unsignedp)
848 != CODE_FOR_nothing))
849 {
850 convert_move (to, convert_to_mode (intermediate, from,
851 unsignedp), unsignedp);
852 return;
853 }
854
855 /* No suitable intermediate mode.
856 Generate what we need with shifts. */
857 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
858 - GET_MODE_BITSIZE (from_mode), 0);
859 from = gen_lowpart (to_mode, force_reg (from_mode, from));
860 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
861 to, unsignedp);
862 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
863 to, unsignedp);
864 if (tmp != to)
865 emit_move_insn (to, tmp);
866 return;
867 }
868 }
869
870 /* Support special truncate insns for certain modes. */
871 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
872 {
873 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
874 to, from, UNKNOWN);
875 return;
876 }
877
878 /* Handle truncation of volatile memrefs, and so on;
879 the things that couldn't be truncated directly,
880 and for which there was no special instruction.
881
882 ??? Code above formerly short-circuited this, for most integer
883 mode pairs, with a force_reg in from_mode followed by a recursive
884 call to this routine. Appears always to have been wrong. */
885 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
886 {
887 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
888 emit_move_insn (to, temp);
889 return;
890 }
891
892 /* Mode combination is not recognized. */
893 abort ();
894 }
895
896 /* Return an rtx for a value that would result
897 from converting X to mode MODE.
898 Both X and MODE may be floating, or both integer.
899 UNSIGNEDP is nonzero if X is an unsigned value.
900 This can be done by referring to a part of X in place
901 or by copying to a new temporary with conversion.
902
903 This function *must not* call protect_from_queue
904 except when putting X into an insn (in which case convert_move does it). */
905
906 rtx
907 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
908 {
909 return convert_modes (mode, VOIDmode, x, unsignedp);
910 }
911
912 /* Return an rtx for a value that would result
913 from converting X from mode OLDMODE to mode MODE.
914 Both modes may be floating, or both integer.
915 UNSIGNEDP is nonzero if X is an unsigned value.
916
917 This can be done by referring to a part of X in place
918 or by copying to a new temporary with conversion.
919
920 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
921
922 This function *must not* call protect_from_queue
923 except when putting X into an insn (in which case convert_move does it). */
924
925 rtx
926 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
927 {
928 rtx temp;
929
930 /* If FROM is a SUBREG that indicates that we have already done at least
931 the required extension, strip it. */
932
933 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
934 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
935 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
936 x = gen_lowpart (mode, x);
937
938 if (GET_MODE (x) != VOIDmode)
939 oldmode = GET_MODE (x);
940
941 if (mode == oldmode)
942 return x;
943
944 /* There is one case that we must handle specially: If we are converting
945 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
946 we are to interpret the constant as unsigned, gen_lowpart will do
947 the wrong if the constant appears negative. What we want to do is
948 make the high-order word of the constant zero, not all ones. */
949
950 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
951 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
952 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
953 {
954 HOST_WIDE_INT val = INTVAL (x);
955
956 if (oldmode != VOIDmode
957 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
958 {
959 int width = GET_MODE_BITSIZE (oldmode);
960
961 /* We need to zero extend VAL. */
962 val &= ((HOST_WIDE_INT) 1 << width) - 1;
963 }
964
965 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
966 }
967
968 /* We can do this with a gen_lowpart if both desired and current modes
969 are integer, and this is either a constant integer, a register, or a
970 non-volatile MEM. Except for the constant case where MODE is no
971 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
972
973 if ((GET_CODE (x) == CONST_INT
974 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
975 || (GET_MODE_CLASS (mode) == MODE_INT
976 && GET_MODE_CLASS (oldmode) == MODE_INT
977 && (GET_CODE (x) == CONST_DOUBLE
978 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
979 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
980 && direct_load[(int) mode])
981 || (REG_P (x)
982 && (! HARD_REGISTER_P (x)
983 || HARD_REGNO_MODE_OK (REGNO (x), mode))
984 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
985 GET_MODE_BITSIZE (GET_MODE (x)))))))))
986 {
987 /* ?? If we don't know OLDMODE, we have to assume here that
988 X does not need sign- or zero-extension. This may not be
989 the case, but it's the best we can do. */
990 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
991 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
992 {
993 HOST_WIDE_INT val = INTVAL (x);
994 int width = GET_MODE_BITSIZE (oldmode);
995
996 /* We must sign or zero-extend in this case. Start by
997 zero-extending, then sign extend if we need to. */
998 val &= ((HOST_WIDE_INT) 1 << width) - 1;
999 if (! unsignedp
1000 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1001 val |= (HOST_WIDE_INT) (-1) << width;
1002
1003 return gen_int_mode (val, mode);
1004 }
1005
1006 return gen_lowpart (mode, x);
1007 }
1008
1009 /* Converting from integer constant into mode is always equivalent to an
1010 subreg operation. */
1011 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1012 {
1013 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1014 abort ();
1015 return simplify_gen_subreg (mode, x, oldmode, 0);
1016 }
1017
1018 temp = gen_reg_rtx (mode);
1019 convert_move (temp, x, unsignedp);
1020 return temp;
1021 }
1022 \f
1023 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1024 store efficiently. Due to internal GCC limitations, this is
1025 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1026 for an immediate constant. */
1027
1028 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1029
1030 /* Determine whether the LEN bytes can be moved by using several move
1031 instructions. Return nonzero if a call to move_by_pieces should
1032 succeed. */
1033
1034 int
1035 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1036 unsigned int align ATTRIBUTE_UNUSED)
1037 {
1038 return MOVE_BY_PIECES_P (len, align);
1039 }
1040
1041 /* Generate several move instructions to copy LEN bytes from block FROM to
1042 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1043 and TO through protect_from_queue before calling.
1044
1045 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1046 used to push FROM to the stack.
1047
1048 ALIGN is maximum stack alignment we can assume.
1049
1050 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1051 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1052 stpcpy. */
1053
1054 rtx
1055 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1056 unsigned int align, int endp)
1057 {
1058 struct move_by_pieces data;
1059 rtx to_addr, from_addr = XEXP (from, 0);
1060 unsigned int max_size = MOVE_MAX_PIECES + 1;
1061 enum machine_mode mode = VOIDmode, tmode;
1062 enum insn_code icode;
1063
1064 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1065
1066 data.offset = 0;
1067 data.from_addr = from_addr;
1068 if (to)
1069 {
1070 to_addr = XEXP (to, 0);
1071 data.to = to;
1072 data.autinc_to
1073 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1074 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1075 data.reverse
1076 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1077 }
1078 else
1079 {
1080 to_addr = NULL_RTX;
1081 data.to = NULL_RTX;
1082 data.autinc_to = 1;
1083 #ifdef STACK_GROWS_DOWNWARD
1084 data.reverse = 1;
1085 #else
1086 data.reverse = 0;
1087 #endif
1088 }
1089 data.to_addr = to_addr;
1090 data.from = from;
1091 data.autinc_from
1092 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1093 || GET_CODE (from_addr) == POST_INC
1094 || GET_CODE (from_addr) == POST_DEC);
1095
1096 data.explicit_inc_from = 0;
1097 data.explicit_inc_to = 0;
1098 if (data.reverse) data.offset = len;
1099 data.len = len;
1100
1101 /* If copying requires more than two move insns,
1102 copy addresses to registers (to make displacements shorter)
1103 and use post-increment if available. */
1104 if (!(data.autinc_from && data.autinc_to)
1105 && move_by_pieces_ninsns (len, align) > 2)
1106 {
1107 /* Find the mode of the largest move... */
1108 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1109 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1110 if (GET_MODE_SIZE (tmode) < max_size)
1111 mode = tmode;
1112
1113 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1114 {
1115 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1116 data.autinc_from = 1;
1117 data.explicit_inc_from = -1;
1118 }
1119 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1120 {
1121 data.from_addr = copy_addr_to_reg (from_addr);
1122 data.autinc_from = 1;
1123 data.explicit_inc_from = 1;
1124 }
1125 if (!data.autinc_from && CONSTANT_P (from_addr))
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1128 {
1129 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1130 data.autinc_to = 1;
1131 data.explicit_inc_to = -1;
1132 }
1133 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1134 {
1135 data.to_addr = copy_addr_to_reg (to_addr);
1136 data.autinc_to = 1;
1137 data.explicit_inc_to = 1;
1138 }
1139 if (!data.autinc_to && CONSTANT_P (to_addr))
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 }
1142
1143 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1144 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1145 align = MOVE_MAX * BITS_PER_UNIT;
1146
1147 /* First move what we can in the largest integer mode, then go to
1148 successively smaller modes. */
1149
1150 while (max_size > 1)
1151 {
1152 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1153 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1154 if (GET_MODE_SIZE (tmode) < max_size)
1155 mode = tmode;
1156
1157 if (mode == VOIDmode)
1158 break;
1159
1160 icode = mov_optab->handlers[(int) mode].insn_code;
1161 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1162 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1163
1164 max_size = GET_MODE_SIZE (mode);
1165 }
1166
1167 /* The code above should have handled everything. */
1168 if (data.len > 0)
1169 abort ();
1170
1171 if (endp)
1172 {
1173 rtx to1;
1174
1175 if (data.reverse)
1176 abort ();
1177 if (data.autinc_to)
1178 {
1179 if (endp == 2)
1180 {
1181 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1182 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1183 else
1184 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1185 -1));
1186 }
1187 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1188 data.offset);
1189 }
1190 else
1191 {
1192 if (endp == 2)
1193 --data.offset;
1194 to1 = adjust_address (data.to, QImode, data.offset);
1195 }
1196 return to1;
1197 }
1198 else
1199 return data.to;
1200 }
1201
1202 /* Return number of insns required to move L bytes by pieces.
1203 ALIGN (in bits) is maximum alignment we can assume. */
1204
1205 static unsigned HOST_WIDE_INT
1206 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1207 {
1208 unsigned HOST_WIDE_INT n_insns = 0;
1209 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1210
1211 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1212 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1213 align = MOVE_MAX * BITS_PER_UNIT;
1214
1215 while (max_size > 1)
1216 {
1217 enum machine_mode mode = VOIDmode, tmode;
1218 enum insn_code icode;
1219
1220 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1221 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1222 if (GET_MODE_SIZE (tmode) < max_size)
1223 mode = tmode;
1224
1225 if (mode == VOIDmode)
1226 break;
1227
1228 icode = mov_optab->handlers[(int) mode].insn_code;
1229 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1230 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1231
1232 max_size = GET_MODE_SIZE (mode);
1233 }
1234
1235 if (l)
1236 abort ();
1237 return n_insns;
1238 }
1239
1240 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1241 with move instructions for mode MODE. GENFUN is the gen_... function
1242 to make a move insn for that mode. DATA has all the other info. */
1243
1244 static void
1245 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1246 struct move_by_pieces *data)
1247 {
1248 unsigned int size = GET_MODE_SIZE (mode);
1249 rtx to1 = NULL_RTX, from1;
1250
1251 while (data->len >= size)
1252 {
1253 if (data->reverse)
1254 data->offset -= size;
1255
1256 if (data->to)
1257 {
1258 if (data->autinc_to)
1259 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1260 data->offset);
1261 else
1262 to1 = adjust_address (data->to, mode, data->offset);
1263 }
1264
1265 if (data->autinc_from)
1266 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1267 data->offset);
1268 else
1269 from1 = adjust_address (data->from, mode, data->offset);
1270
1271 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1272 emit_insn (gen_add2_insn (data->to_addr,
1273 GEN_INT (-(HOST_WIDE_INT)size)));
1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1275 emit_insn (gen_add2_insn (data->from_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
1277
1278 if (data->to)
1279 emit_insn ((*genfun) (to1, from1));
1280 else
1281 {
1282 #ifdef PUSH_ROUNDING
1283 emit_single_push_insn (mode, from1, NULL);
1284 #else
1285 abort ();
1286 #endif
1287 }
1288
1289 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1290 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1291 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1292 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1293
1294 if (! data->reverse)
1295 data->offset += size;
1296
1297 data->len -= size;
1298 }
1299 }
1300 \f
1301 /* Emit code to move a block Y to a block X. This may be done with
1302 string-move instructions, with multiple scalar move instructions,
1303 or with a library call.
1304
1305 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1306 SIZE is an rtx that says how long they are.
1307 ALIGN is the maximum alignment we can assume they have.
1308 METHOD describes what kind of copy this is, and what mechanisms may be used.
1309
1310 Return the address of the new block, if memcpy is called and returns it,
1311 0 otherwise. */
1312
1313 rtx
1314 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1315 {
1316 bool may_use_call;
1317 rtx retval = 0;
1318 unsigned int align;
1319
1320 switch (method)
1321 {
1322 case BLOCK_OP_NORMAL:
1323 may_use_call = true;
1324 break;
1325
1326 case BLOCK_OP_CALL_PARM:
1327 may_use_call = block_move_libcall_safe_for_call_parm ();
1328
1329 /* Make inhibit_defer_pop nonzero around the library call
1330 to force it to pop the arguments right away. */
1331 NO_DEFER_POP;
1332 break;
1333
1334 case BLOCK_OP_NO_LIBCALL:
1335 may_use_call = false;
1336 break;
1337
1338 default:
1339 abort ();
1340 }
1341
1342 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1343
1344 x = protect_from_queue (x, 1);
1345 y = protect_from_queue (y, 0);
1346 size = protect_from_queue (size, 0);
1347
1348 if (!MEM_P (x))
1349 abort ();
1350 if (!MEM_P (y))
1351 abort ();
1352 if (size == 0)
1353 abort ();
1354
1355 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1356 block copy is more efficient for other large modes, e.g. DCmode. */
1357 x = adjust_address (x, BLKmode, 0);
1358 y = adjust_address (y, BLKmode, 0);
1359
1360 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1361 can be incorrect is coming from __builtin_memcpy. */
1362 if (GET_CODE (size) == CONST_INT)
1363 {
1364 if (INTVAL (size) == 0)
1365 return 0;
1366
1367 x = shallow_copy_rtx (x);
1368 y = shallow_copy_rtx (y);
1369 set_mem_size (x, size);
1370 set_mem_size (y, size);
1371 }
1372
1373 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1374 move_by_pieces (x, y, INTVAL (size), align, 0);
1375 else if (emit_block_move_via_movstr (x, y, size, align))
1376 ;
1377 else if (may_use_call)
1378 retval = emit_block_move_via_libcall (x, y, size);
1379 else
1380 emit_block_move_via_loop (x, y, size, align);
1381
1382 if (method == BLOCK_OP_CALL_PARM)
1383 OK_DEFER_POP;
1384
1385 return retval;
1386 }
1387
1388 /* A subroutine of emit_block_move. Returns true if calling the
1389 block move libcall will not clobber any parameters which may have
1390 already been placed on the stack. */
1391
1392 static bool
1393 block_move_libcall_safe_for_call_parm (void)
1394 {
1395 /* If arguments are pushed on the stack, then they're safe. */
1396 if (PUSH_ARGS)
1397 return true;
1398
1399 /* If registers go on the stack anyway, any argument is sure to clobber
1400 an outgoing argument. */
1401 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1402 {
1403 tree fn = emit_block_move_libcall_fn (false);
1404 (void) fn;
1405 if (REG_PARM_STACK_SPACE (fn) != 0)
1406 return false;
1407 }
1408 #endif
1409
1410 /* If any argument goes in memory, then it might clobber an outgoing
1411 argument. */
1412 {
1413 CUMULATIVE_ARGS args_so_far;
1414 tree fn, arg;
1415
1416 fn = emit_block_move_libcall_fn (false);
1417 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1418
1419 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1420 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1421 {
1422 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1423 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1424 if (!tmp || !REG_P (tmp))
1425 return false;
1426 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1428 NULL_TREE, 1))
1429 return false;
1430 #endif
1431 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1432 }
1433 }
1434 return true;
1435 }
1436
1437 /* A subroutine of emit_block_move. Expand a movstr pattern;
1438 return true if successful. */
1439
1440 static bool
1441 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1442 {
1443 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1444 int save_volatile_ok = volatile_ok;
1445 enum machine_mode mode;
1446
1447 /* Since this is a move insn, we don't care about volatility. */
1448 volatile_ok = 1;
1449
1450 /* Try the most limited insn first, because there's no point
1451 including more than one in the machine description unless
1452 the more limited one has some advantage. */
1453
1454 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1455 mode = GET_MODE_WIDER_MODE (mode))
1456 {
1457 enum insn_code code = movstr_optab[(int) mode];
1458 insn_operand_predicate_fn pred;
1459
1460 if (code != CODE_FOR_nothing
1461 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1462 here because if SIZE is less than the mode mask, as it is
1463 returned by the macro, it will definitely be less than the
1464 actual mode mask. */
1465 && ((GET_CODE (size) == CONST_INT
1466 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1467 <= (GET_MODE_MASK (mode) >> 1)))
1468 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1469 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1470 || (*pred) (x, BLKmode))
1471 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1472 || (*pred) (y, BLKmode))
1473 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1474 || (*pred) (opalign, VOIDmode)))
1475 {
1476 rtx op2;
1477 rtx last = get_last_insn ();
1478 rtx pat;
1479
1480 op2 = convert_to_mode (mode, size, 1);
1481 pred = insn_data[(int) code].operand[2].predicate;
1482 if (pred != 0 && ! (*pred) (op2, mode))
1483 op2 = copy_to_mode_reg (mode, op2);
1484
1485 /* ??? When called via emit_block_move_for_call, it'd be
1486 nice if there were some way to inform the backend, so
1487 that it doesn't fail the expansion because it thinks
1488 emitting the libcall would be more efficient. */
1489
1490 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1491 if (pat)
1492 {
1493 emit_insn (pat);
1494 volatile_ok = save_volatile_ok;
1495 return true;
1496 }
1497 else
1498 delete_insns_since (last);
1499 }
1500 }
1501
1502 volatile_ok = save_volatile_ok;
1503 return false;
1504 }
1505
1506 /* A subroutine of emit_block_move. Expand a call to memcpy.
1507 Return the return value from memcpy, 0 otherwise. */
1508
1509 static rtx
1510 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1511 {
1512 rtx dst_addr, src_addr;
1513 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1514 enum machine_mode size_mode;
1515 rtx retval;
1516
1517 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1518
1519 It is unsafe to save the value generated by protect_from_queue and reuse
1520 it later. Consider what happens if emit_queue is called before the
1521 return value from protect_from_queue is used.
1522
1523 Expansion of the CALL_EXPR below will call emit_queue before we are
1524 finished emitting RTL for argument setup. So if we are not careful we
1525 could get the wrong value for an argument.
1526
1527 To avoid this problem we go ahead and emit code to copy the addresses of
1528 DST and SRC and SIZE into new pseudos.
1529
1530 Note this is not strictly needed for library calls since they do not call
1531 emit_queue before loading their arguments. However, we may need to have
1532 library calls call emit_queue in the future since failing to do so could
1533 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1534 arguments in registers. */
1535
1536 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1537 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1538
1539 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1540 src_addr = convert_memory_address (ptr_mode, src_addr);
1541
1542 dst_tree = make_tree (ptr_type_node, dst_addr);
1543 src_tree = make_tree (ptr_type_node, src_addr);
1544
1545 size_mode = TYPE_MODE (sizetype);
1546
1547 size = convert_to_mode (size_mode, size, 1);
1548 size = copy_to_mode_reg (size_mode, size);
1549
1550 /* It is incorrect to use the libcall calling conventions to call
1551 memcpy in this context. This could be a user call to memcpy and
1552 the user may wish to examine the return value from memcpy. For
1553 targets where libcalls and normal calls have different conventions
1554 for returning pointers, we could end up generating incorrect code. */
1555
1556 size_tree = make_tree (sizetype, size);
1557
1558 fn = emit_block_move_libcall_fn (true);
1559 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1560 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1561 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1562
1563 /* Now we have to build up the CALL_EXPR itself. */
1564 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1565 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1566 call_expr, arg_list, NULL_TREE);
1567
1568 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1569
1570 /* If we are initializing a readonly value, show the above call clobbered
1571 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1572 the delay slot scheduler might overlook conflicts and take nasty
1573 decisions. */
1574 if (RTX_UNCHANGING_P (dst))
1575 add_function_usage_to
1576 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1577 gen_rtx_CLOBBER (VOIDmode, dst),
1578 NULL_RTX));
1579
1580 return retval;
1581 }
1582
1583 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1584 for the function we use for block copies. The first time FOR_CALL
1585 is true, we call assemble_external. */
1586
1587 static GTY(()) tree block_move_fn;
1588
1589 void
1590 init_block_move_fn (const char *asmspec)
1591 {
1592 if (!block_move_fn)
1593 {
1594 tree args, fn;
1595
1596 fn = get_identifier ("memcpy");
1597 args = build_function_type_list (ptr_type_node, ptr_type_node,
1598 const_ptr_type_node, sizetype,
1599 NULL_TREE);
1600
1601 fn = build_decl (FUNCTION_DECL, fn, args);
1602 DECL_EXTERNAL (fn) = 1;
1603 TREE_PUBLIC (fn) = 1;
1604 DECL_ARTIFICIAL (fn) = 1;
1605 TREE_NOTHROW (fn) = 1;
1606
1607 block_move_fn = fn;
1608 }
1609
1610 if (asmspec)
1611 {
1612 SET_DECL_RTL (block_move_fn, NULL_RTX);
1613 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1614 }
1615 }
1616
1617 static tree
1618 emit_block_move_libcall_fn (int for_call)
1619 {
1620 static bool emitted_extern;
1621
1622 if (!block_move_fn)
1623 init_block_move_fn (NULL);
1624
1625 if (for_call && !emitted_extern)
1626 {
1627 emitted_extern = true;
1628 make_decl_rtl (block_move_fn, NULL);
1629 assemble_external (block_move_fn);
1630 }
1631
1632 return block_move_fn;
1633 }
1634
1635 /* A subroutine of emit_block_move. Copy the data via an explicit
1636 loop. This is used only when libcalls are forbidden. */
1637 /* ??? It'd be nice to copy in hunks larger than QImode. */
1638
1639 static void
1640 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1641 unsigned int align ATTRIBUTE_UNUSED)
1642 {
1643 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1644 enum machine_mode iter_mode;
1645
1646 iter_mode = GET_MODE (size);
1647 if (iter_mode == VOIDmode)
1648 iter_mode = word_mode;
1649
1650 top_label = gen_label_rtx ();
1651 cmp_label = gen_label_rtx ();
1652 iter = gen_reg_rtx (iter_mode);
1653
1654 emit_move_insn (iter, const0_rtx);
1655
1656 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1657 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1658 do_pending_stack_adjust ();
1659
1660 emit_jump (cmp_label);
1661 emit_label (top_label);
1662
1663 tmp = convert_modes (Pmode, iter_mode, iter, true);
1664 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1665 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1666 x = change_address (x, QImode, x_addr);
1667 y = change_address (y, QImode, y_addr);
1668
1669 emit_move_insn (x, y);
1670
1671 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1672 true, OPTAB_LIB_WIDEN);
1673 if (tmp != iter)
1674 emit_move_insn (iter, tmp);
1675
1676 emit_label (cmp_label);
1677
1678 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1679 true, top_label);
1680 }
1681 \f
1682 /* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1684
1685 void
1686 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1687 {
1688 int i;
1689 #ifdef HAVE_load_multiple
1690 rtx pat;
1691 rtx last;
1692 #endif
1693
1694 if (nregs == 0)
1695 return;
1696
1697 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1698 x = validize_mem (force_const_mem (mode, x));
1699
1700 /* See if the machine can do this with a load multiple insn. */
1701 #ifdef HAVE_load_multiple
1702 if (HAVE_load_multiple)
1703 {
1704 last = get_last_insn ();
1705 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1706 GEN_INT (nregs));
1707 if (pat)
1708 {
1709 emit_insn (pat);
1710 return;
1711 }
1712 else
1713 delete_insns_since (last);
1714 }
1715 #endif
1716
1717 for (i = 0; i < nregs; i++)
1718 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1719 operand_subword_force (x, i, mode));
1720 }
1721
1722 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1723 The number of registers to be filled is NREGS. */
1724
1725 void
1726 move_block_from_reg (int regno, rtx x, int nregs)
1727 {
1728 int i;
1729
1730 if (nregs == 0)
1731 return;
1732
1733 /* See if the machine can do this with a store multiple insn. */
1734 #ifdef HAVE_store_multiple
1735 if (HAVE_store_multiple)
1736 {
1737 rtx last = get_last_insn ();
1738 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1739 GEN_INT (nregs));
1740 if (pat)
1741 {
1742 emit_insn (pat);
1743 return;
1744 }
1745 else
1746 delete_insns_since (last);
1747 }
1748 #endif
1749
1750 for (i = 0; i < nregs; i++)
1751 {
1752 rtx tem = operand_subword (x, i, 1, BLKmode);
1753
1754 if (tem == 0)
1755 abort ();
1756
1757 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1758 }
1759 }
1760
1761 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1762 ORIG, where ORIG is a non-consecutive group of registers represented by
1763 a PARALLEL. The clone is identical to the original except in that the
1764 original set of registers is replaced by a new set of pseudo registers.
1765 The new set has the same modes as the original set. */
1766
1767 rtx
1768 gen_group_rtx (rtx orig)
1769 {
1770 int i, length;
1771 rtx *tmps;
1772
1773 if (GET_CODE (orig) != PARALLEL)
1774 abort ();
1775
1776 length = XVECLEN (orig, 0);
1777 tmps = alloca (sizeof (rtx) * length);
1778
1779 /* Skip a NULL entry in first slot. */
1780 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1781
1782 if (i)
1783 tmps[0] = 0;
1784
1785 for (; i < length; i++)
1786 {
1787 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1788 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1789
1790 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1791 }
1792
1793 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1794 }
1795
1796 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1797 where DST is non-consecutive registers represented by a PARALLEL.
1798 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1799 if not known. */
1800
1801 void
1802 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1803 {
1804 rtx *tmps, src;
1805 int start, i;
1806
1807 if (GET_CODE (dst) != PARALLEL)
1808 abort ();
1809
1810 /* Check for a NULL entry, used to indicate that the parameter goes
1811 both on the stack and in registers. */
1812 if (XEXP (XVECEXP (dst, 0, 0), 0))
1813 start = 0;
1814 else
1815 start = 1;
1816
1817 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1818
1819 /* Process the pieces. */
1820 for (i = start; i < XVECLEN (dst, 0); i++)
1821 {
1822 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1823 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1824 unsigned int bytelen = GET_MODE_SIZE (mode);
1825 int shift = 0;
1826
1827 /* Handle trailing fragments that run over the size of the struct. */
1828 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1829 {
1830 /* Arrange to shift the fragment to where it belongs.
1831 extract_bit_field loads to the lsb of the reg. */
1832 if (
1833 #ifdef BLOCK_REG_PADDING
1834 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1835 == (BYTES_BIG_ENDIAN ? upward : downward)
1836 #else
1837 BYTES_BIG_ENDIAN
1838 #endif
1839 )
1840 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1841 bytelen = ssize - bytepos;
1842 if (bytelen <= 0)
1843 abort ();
1844 }
1845
1846 /* If we won't be loading directly from memory, protect the real source
1847 from strange tricks we might play; but make sure that the source can
1848 be loaded directly into the destination. */
1849 src = orig_src;
1850 if (!MEM_P (orig_src)
1851 && (!CONSTANT_P (orig_src)
1852 || (GET_MODE (orig_src) != mode
1853 && GET_MODE (orig_src) != VOIDmode)))
1854 {
1855 if (GET_MODE (orig_src) == VOIDmode)
1856 src = gen_reg_rtx (mode);
1857 else
1858 src = gen_reg_rtx (GET_MODE (orig_src));
1859
1860 emit_move_insn (src, orig_src);
1861 }
1862
1863 /* Optimize the access just a bit. */
1864 if (MEM_P (src)
1865 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1866 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1867 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1868 && bytelen == GET_MODE_SIZE (mode))
1869 {
1870 tmps[i] = gen_reg_rtx (mode);
1871 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1872 }
1873 else if (GET_CODE (src) == CONCAT)
1874 {
1875 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1876 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1877
1878 if ((bytepos == 0 && bytelen == slen0)
1879 || (bytepos != 0 && bytepos + bytelen <= slen))
1880 {
1881 /* The following assumes that the concatenated objects all
1882 have the same size. In this case, a simple calculation
1883 can be used to determine the object and the bit field
1884 to be extracted. */
1885 tmps[i] = XEXP (src, bytepos / slen0);
1886 if (! CONSTANT_P (tmps[i])
1887 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1888 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1889 (bytepos % slen0) * BITS_PER_UNIT,
1890 1, NULL_RTX, mode, mode, ssize);
1891 }
1892 else if (bytepos == 0)
1893 {
1894 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1895 emit_move_insn (mem, src);
1896 tmps[i] = adjust_address (mem, mode, 0);
1897 }
1898 else
1899 abort ();
1900 }
1901 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1902 SIMD register, which is currently broken. While we get GCC
1903 to emit proper RTL for these cases, let's dump to memory. */
1904 else if (VECTOR_MODE_P (GET_MODE (dst))
1905 && REG_P (src))
1906 {
1907 int slen = GET_MODE_SIZE (GET_MODE (src));
1908 rtx mem;
1909
1910 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1911 emit_move_insn (mem, src);
1912 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1913 }
1914 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1915 && XVECLEN (dst, 0) > 1)
1916 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1917 else if (CONSTANT_P (src)
1918 || (REG_P (src) && GET_MODE (src) == mode))
1919 tmps[i] = src;
1920 else
1921 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1922 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1923 mode, mode, ssize);
1924
1925 if (shift)
1926 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1927 tmps[i], 0, OPTAB_WIDEN);
1928 }
1929
1930 emit_queue ();
1931
1932 /* Copy the extracted pieces into the proper (probable) hard regs. */
1933 for (i = start; i < XVECLEN (dst, 0); i++)
1934 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1935 }
1936
1937 /* Emit code to move a block SRC to block DST, where SRC and DST are
1938 non-consecutive groups of registers, each represented by a PARALLEL. */
1939
1940 void
1941 emit_group_move (rtx dst, rtx src)
1942 {
1943 int i;
1944
1945 if (GET_CODE (src) != PARALLEL
1946 || GET_CODE (dst) != PARALLEL
1947 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1948 abort ();
1949
1950 /* Skip first entry if NULL. */
1951 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1952 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1953 XEXP (XVECEXP (src, 0, i), 0));
1954 }
1955
1956 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1957 where SRC is non-consecutive registers represented by a PARALLEL.
1958 SSIZE represents the total size of block ORIG_DST, or -1 if not
1959 known. */
1960
1961 void
1962 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1963 {
1964 rtx *tmps, dst;
1965 int start, i;
1966
1967 if (GET_CODE (src) != PARALLEL)
1968 abort ();
1969
1970 /* Check for a NULL entry, used to indicate that the parameter goes
1971 both on the stack and in registers. */
1972 if (XEXP (XVECEXP (src, 0, 0), 0))
1973 start = 0;
1974 else
1975 start = 1;
1976
1977 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1978
1979 /* Copy the (probable) hard regs into pseudos. */
1980 for (i = start; i < XVECLEN (src, 0); i++)
1981 {
1982 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1983 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1984 emit_move_insn (tmps[i], reg);
1985 }
1986 emit_queue ();
1987
1988 /* If we won't be storing directly into memory, protect the real destination
1989 from strange tricks we might play. */
1990 dst = orig_dst;
1991 if (GET_CODE (dst) == PARALLEL)
1992 {
1993 rtx temp;
1994
1995 /* We can get a PARALLEL dst if there is a conditional expression in
1996 a return statement. In that case, the dst and src are the same,
1997 so no action is necessary. */
1998 if (rtx_equal_p (dst, src))
1999 return;
2000
2001 /* It is unclear if we can ever reach here, but we may as well handle
2002 it. Allocate a temporary, and split this into a store/load to/from
2003 the temporary. */
2004
2005 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2006 emit_group_store (temp, src, type, ssize);
2007 emit_group_load (dst, temp, type, ssize);
2008 return;
2009 }
2010 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2011 {
2012 dst = gen_reg_rtx (GET_MODE (orig_dst));
2013 /* Make life a bit easier for combine. */
2014 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2015 }
2016
2017 /* Process the pieces. */
2018 for (i = start; i < XVECLEN (src, 0); i++)
2019 {
2020 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2021 enum machine_mode mode = GET_MODE (tmps[i]);
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2023 rtx dest = dst;
2024
2025 /* Handle trailing fragments that run over the size of the struct. */
2026 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2027 {
2028 /* store_bit_field always takes its value from the lsb.
2029 Move the fragment to the lsb if it's not already there. */
2030 if (
2031 #ifdef BLOCK_REG_PADDING
2032 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2033 == (BYTES_BIG_ENDIAN ? upward : downward)
2034 #else
2035 BYTES_BIG_ENDIAN
2036 #endif
2037 )
2038 {
2039 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2040 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2041 tmps[i], 0, OPTAB_WIDEN);
2042 }
2043 bytelen = ssize - bytepos;
2044 }
2045
2046 if (GET_CODE (dst) == CONCAT)
2047 {
2048 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2049 dest = XEXP (dst, 0);
2050 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 {
2052 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2053 dest = XEXP (dst, 1);
2054 }
2055 else if (bytepos == 0 && XVECLEN (src, 0))
2056 {
2057 dest = assign_stack_temp (GET_MODE (dest),
2058 GET_MODE_SIZE (GET_MODE (dest)), 0);
2059 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2060 tmps[i]);
2061 dst = dest;
2062 break;
2063 }
2064 else
2065 abort ();
2066 }
2067
2068 /* Optimize the access just a bit. */
2069 if (MEM_P (dest)
2070 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2071 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2072 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2073 && bytelen == GET_MODE_SIZE (mode))
2074 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075 else
2076 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 mode, tmps[i], ssize);
2078 }
2079
2080 emit_queue ();
2081
2082 /* Copy from the pseudo into the (probable) hard reg. */
2083 if (orig_dst != dst)
2084 emit_move_insn (orig_dst, dst);
2085 }
2086
2087 /* Generate code to copy a BLKmode object of TYPE out of a
2088 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2089 is null, a stack temporary is created. TGTBLK is returned.
2090
2091 The purpose of this routine is to handle functions that return
2092 BLKmode structures in registers. Some machines (the PA for example)
2093 want to return all small structures in registers regardless of the
2094 structure's alignment. */
2095
2096 rtx
2097 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2098 {
2099 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2102 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2103
2104 if (tgtblk == 0)
2105 {
2106 tgtblk = assign_temp (build_qualified_type (type,
2107 (TYPE_QUALS (type)
2108 | TYPE_QUAL_CONST)),
2109 0, 1, 1);
2110 preserve_temp_slots (tgtblk);
2111 }
2112
2113 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2114 into a new pseudo which is a full word. */
2115
2116 if (GET_MODE (srcreg) != BLKmode
2117 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2118 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2119
2120 /* If the structure doesn't take up a whole number of words, see whether
2121 SRCREG is padded on the left or on the right. If it's on the left,
2122 set PADDING_CORRECTION to the number of bits to skip.
2123
2124 In most ABIs, the structure will be returned at the least end of
2125 the register, which translates to right padding on little-endian
2126 targets and left padding on big-endian targets. The opposite
2127 holds if the structure is returned at the most significant
2128 end of the register. */
2129 if (bytes % UNITS_PER_WORD != 0
2130 && (targetm.calls.return_in_msb (type)
2131 ? !BYTES_BIG_ENDIAN
2132 : BYTES_BIG_ENDIAN))
2133 padding_correction
2134 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2135
2136 /* Copy the structure BITSIZE bites at a time.
2137
2138 We could probably emit more efficient code for machines which do not use
2139 strict alignment, but it doesn't seem worth the effort at the current
2140 time. */
2141 for (bitpos = 0, xbitpos = padding_correction;
2142 bitpos < bytes * BITS_PER_UNIT;
2143 bitpos += bitsize, xbitpos += bitsize)
2144 {
2145 /* We need a new source operand each time xbitpos is on a
2146 word boundary and when xbitpos == padding_correction
2147 (the first time through). */
2148 if (xbitpos % BITS_PER_WORD == 0
2149 || xbitpos == padding_correction)
2150 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2151 GET_MODE (srcreg));
2152
2153 /* We need a new destination operand each time bitpos is on
2154 a word boundary. */
2155 if (bitpos % BITS_PER_WORD == 0)
2156 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2157
2158 /* Use xbitpos for the source extraction (right justified) and
2159 xbitpos for the destination store (left justified). */
2160 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2161 extract_bit_field (src, bitsize,
2162 xbitpos % BITS_PER_WORD, 1,
2163 NULL_RTX, word_mode, word_mode,
2164 BITS_PER_WORD),
2165 BITS_PER_WORD);
2166 }
2167
2168 return tgtblk;
2169 }
2170
2171 /* Add a USE expression for REG to the (possibly empty) list pointed
2172 to by CALL_FUSAGE. REG must denote a hard register. */
2173
2174 void
2175 use_reg (rtx *call_fusage, rtx reg)
2176 {
2177 if (!REG_P (reg)
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2179 abort ();
2180
2181 *call_fusage
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2184 }
2185
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2188
2189 void
2190 use_regs (rtx *call_fusage, int regno, int nregs)
2191 {
2192 int i;
2193
2194 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2195 abort ();
2196
2197 for (i = 0; i < nregs; i++)
2198 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2199 }
2200
2201 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2202 PARALLEL REGS. This is for calls that pass values in multiple
2203 non-contiguous locations. The Irix 6 ABI has examples of this. */
2204
2205 void
2206 use_group_regs (rtx *call_fusage, rtx regs)
2207 {
2208 int i;
2209
2210 for (i = 0; i < XVECLEN (regs, 0); i++)
2211 {
2212 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2213
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg != 0 && REG_P (reg))
2218 use_reg (call_fusage, reg);
2219 }
2220 }
2221 \f
2222
2223 /* Determine whether the LEN bytes generated by CONSTFUN can be
2224 stored to memory using several move instructions. CONSTFUNDATA is
2225 a pointer which will be passed as argument in every CONSTFUN call.
2226 ALIGN is maximum alignment we can assume. Return nonzero if a
2227 call to store_by_pieces should succeed. */
2228
2229 int
2230 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2231 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2232 void *constfundata, unsigned int align)
2233 {
2234 unsigned HOST_WIDE_INT max_size, l;
2235 HOST_WIDE_INT offset = 0;
2236 enum machine_mode mode, tmode;
2237 enum insn_code icode;
2238 int reverse;
2239 rtx cst;
2240
2241 if (len == 0)
2242 return 1;
2243
2244 if (! STORE_BY_PIECES_P (len, align))
2245 return 0;
2246
2247 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2248 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2249 align = MOVE_MAX * BITS_PER_UNIT;
2250
2251 /* We would first store what we can in the largest integer mode, then go to
2252 successively smaller modes. */
2253
2254 for (reverse = 0;
2255 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2256 reverse++)
2257 {
2258 l = len;
2259 mode = VOIDmode;
2260 max_size = STORE_MAX_PIECES + 1;
2261 while (max_size > 1)
2262 {
2263 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2264 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2265 if (GET_MODE_SIZE (tmode) < max_size)
2266 mode = tmode;
2267
2268 if (mode == VOIDmode)
2269 break;
2270
2271 icode = mov_optab->handlers[(int) mode].insn_code;
2272 if (icode != CODE_FOR_nothing
2273 && align >= GET_MODE_ALIGNMENT (mode))
2274 {
2275 unsigned int size = GET_MODE_SIZE (mode);
2276
2277 while (l >= size)
2278 {
2279 if (reverse)
2280 offset -= size;
2281
2282 cst = (*constfun) (constfundata, offset, mode);
2283 if (!LEGITIMATE_CONSTANT_P (cst))
2284 return 0;
2285
2286 if (!reverse)
2287 offset += size;
2288
2289 l -= size;
2290 }
2291 }
2292
2293 max_size = GET_MODE_SIZE (mode);
2294 }
2295
2296 /* The code above should have handled everything. */
2297 if (l != 0)
2298 abort ();
2299 }
2300
2301 return 1;
2302 }
2303
2304 /* Generate several move instructions to store LEN bytes generated by
2305 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2306 pointer which will be passed as argument in every CONSTFUN call.
2307 ALIGN is maximum alignment we can assume.
2308 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2309 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2310 stpcpy. */
2311
2312 rtx
2313 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2314 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2315 void *constfundata, unsigned int align, int endp)
2316 {
2317 struct store_by_pieces data;
2318
2319 if (len == 0)
2320 {
2321 if (endp == 2)
2322 abort ();
2323 return to;
2324 }
2325
2326 if (! STORE_BY_PIECES_P (len, align))
2327 abort ();
2328 to = protect_from_queue (to, 1);
2329 data.constfun = constfun;
2330 data.constfundata = constfundata;
2331 data.len = len;
2332 data.to = to;
2333 store_by_pieces_1 (&data, align);
2334 if (endp)
2335 {
2336 rtx to1;
2337
2338 if (data.reverse)
2339 abort ();
2340 if (data.autinc_to)
2341 {
2342 if (endp == 2)
2343 {
2344 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2345 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2346 else
2347 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2348 -1));
2349 }
2350 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2351 data.offset);
2352 }
2353 else
2354 {
2355 if (endp == 2)
2356 --data.offset;
2357 to1 = adjust_address (data.to, QImode, data.offset);
2358 }
2359 return to1;
2360 }
2361 else
2362 return data.to;
2363 }
2364
2365 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2366 rtx with BLKmode). The caller must pass TO through protect_from_queue
2367 before calling. ALIGN is maximum alignment we can assume. */
2368
2369 static void
2370 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2371 {
2372 struct store_by_pieces data;
2373
2374 if (len == 0)
2375 return;
2376
2377 data.constfun = clear_by_pieces_1;
2378 data.constfundata = NULL;
2379 data.len = len;
2380 data.to = to;
2381 store_by_pieces_1 (&data, align);
2382 }
2383
2384 /* Callback routine for clear_by_pieces.
2385 Return const0_rtx unconditionally. */
2386
2387 static rtx
2388 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2389 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2390 enum machine_mode mode ATTRIBUTE_UNUSED)
2391 {
2392 return const0_rtx;
2393 }
2394
2395 /* Subroutine of clear_by_pieces and store_by_pieces.
2396 Generate several move instructions to store LEN bytes of block TO. (A MEM
2397 rtx with BLKmode). The caller must pass TO through protect_from_queue
2398 before calling. ALIGN is maximum alignment we can assume. */
2399
2400 static void
2401 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2402 unsigned int align ATTRIBUTE_UNUSED)
2403 {
2404 rtx to_addr = XEXP (data->to, 0);
2405 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2406 enum machine_mode mode = VOIDmode, tmode;
2407 enum insn_code icode;
2408
2409 data->offset = 0;
2410 data->to_addr = to_addr;
2411 data->autinc_to
2412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2414
2415 data->explicit_inc_to = 0;
2416 data->reverse
2417 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2418 if (data->reverse)
2419 data->offset = data->len;
2420
2421 /* If storing requires more than two move insns,
2422 copy addresses to registers (to make displacements shorter)
2423 and use post-increment if available. */
2424 if (!data->autinc_to
2425 && move_by_pieces_ninsns (data->len, align) > 2)
2426 {
2427 /* Determine the main mode we'll be using. */
2428 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2429 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2430 if (GET_MODE_SIZE (tmode) < max_size)
2431 mode = tmode;
2432
2433 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2434 {
2435 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2436 data->autinc_to = 1;
2437 data->explicit_inc_to = -1;
2438 }
2439
2440 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2441 && ! data->autinc_to)
2442 {
2443 data->to_addr = copy_addr_to_reg (to_addr);
2444 data->autinc_to = 1;
2445 data->explicit_inc_to = 1;
2446 }
2447
2448 if ( !data->autinc_to && CONSTANT_P (to_addr))
2449 data->to_addr = copy_addr_to_reg (to_addr);
2450 }
2451
2452 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2453 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2454 align = MOVE_MAX * BITS_PER_UNIT;
2455
2456 /* First store what we can in the largest integer mode, then go to
2457 successively smaller modes. */
2458
2459 while (max_size > 1)
2460 {
2461 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2462 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2463 if (GET_MODE_SIZE (tmode) < max_size)
2464 mode = tmode;
2465
2466 if (mode == VOIDmode)
2467 break;
2468
2469 icode = mov_optab->handlers[(int) mode].insn_code;
2470 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2471 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2472
2473 max_size = GET_MODE_SIZE (mode);
2474 }
2475
2476 /* The code above should have handled everything. */
2477 if (data->len != 0)
2478 abort ();
2479 }
2480
2481 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2482 with move instructions for mode MODE. GENFUN is the gen_... function
2483 to make a move insn for that mode. DATA has all the other info. */
2484
2485 static void
2486 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2487 struct store_by_pieces *data)
2488 {
2489 unsigned int size = GET_MODE_SIZE (mode);
2490 rtx to1, cst;
2491
2492 while (data->len >= size)
2493 {
2494 if (data->reverse)
2495 data->offset -= size;
2496
2497 if (data->autinc_to)
2498 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2499 data->offset);
2500 else
2501 to1 = adjust_address (data->to, mode, data->offset);
2502
2503 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2504 emit_insn (gen_add2_insn (data->to_addr,
2505 GEN_INT (-(HOST_WIDE_INT) size)));
2506
2507 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2508 emit_insn ((*genfun) (to1, cst));
2509
2510 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2511 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2512
2513 if (! data->reverse)
2514 data->offset += size;
2515
2516 data->len -= size;
2517 }
2518 }
2519 \f
2520 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2521 its length in bytes. */
2522
2523 rtx
2524 clear_storage (rtx object, rtx size)
2525 {
2526 rtx retval = 0;
2527 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2528 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2529
2530 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2531 just move a zero. Otherwise, do this a piece at a time. */
2532 if (GET_MODE (object) != BLKmode
2533 && GET_CODE (size) == CONST_INT
2534 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2535 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2536 else
2537 {
2538 object = protect_from_queue (object, 1);
2539 size = protect_from_queue (size, 0);
2540
2541 if (size == const0_rtx)
2542 ;
2543 else if (GET_CODE (size) == CONST_INT
2544 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2545 clear_by_pieces (object, INTVAL (size), align);
2546 else if (clear_storage_via_clrstr (object, size, align))
2547 ;
2548 else
2549 retval = clear_storage_via_libcall (object, size);
2550 }
2551
2552 return retval;
2553 }
2554
2555 /* A subroutine of clear_storage. Expand a clrstr pattern;
2556 return true if successful. */
2557
2558 static bool
2559 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2560 {
2561 /* Try the most limited insn first, because there's no point
2562 including more than one in the machine description unless
2563 the more limited one has some advantage. */
2564
2565 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2566 enum machine_mode mode;
2567
2568 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2569 mode = GET_MODE_WIDER_MODE (mode))
2570 {
2571 enum insn_code code = clrstr_optab[(int) mode];
2572 insn_operand_predicate_fn pred;
2573
2574 if (code != CODE_FOR_nothing
2575 /* We don't need MODE to be narrower than
2576 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2577 the mode mask, as it is returned by the macro, it will
2578 definitely be less than the actual mode mask. */
2579 && ((GET_CODE (size) == CONST_INT
2580 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2581 <= (GET_MODE_MASK (mode) >> 1)))
2582 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2583 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2584 || (*pred) (object, BLKmode))
2585 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2586 || (*pred) (opalign, VOIDmode)))
2587 {
2588 rtx op1;
2589 rtx last = get_last_insn ();
2590 rtx pat;
2591
2592 op1 = convert_to_mode (mode, size, 1);
2593 pred = insn_data[(int) code].operand[1].predicate;
2594 if (pred != 0 && ! (*pred) (op1, mode))
2595 op1 = copy_to_mode_reg (mode, op1);
2596
2597 pat = GEN_FCN ((int) code) (object, op1, opalign);
2598 if (pat)
2599 {
2600 emit_insn (pat);
2601 return true;
2602 }
2603 else
2604 delete_insns_since (last);
2605 }
2606 }
2607
2608 return false;
2609 }
2610
2611 /* A subroutine of clear_storage. Expand a call to memset.
2612 Return the return value of memset, 0 otherwise. */
2613
2614 static rtx
2615 clear_storage_via_libcall (rtx object, rtx size)
2616 {
2617 tree call_expr, arg_list, fn, object_tree, size_tree;
2618 enum machine_mode size_mode;
2619 rtx retval;
2620
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2622
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2626
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2630
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos.
2633
2634 Note this is not strictly needed for library calls since they
2635 do not call emit_queue before loading their arguments. However,
2636 we may need to have library calls call emit_queue in the future
2637 since failing to do so could cause problems for targets which
2638 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2639
2640 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2641
2642 size_mode = TYPE_MODE (sizetype);
2643 size = convert_to_mode (size_mode, size, 1);
2644 size = copy_to_mode_reg (size_mode, size);
2645
2646 /* It is incorrect to use the libcall calling conventions to call
2647 memset in this context. This could be a user call to memset and
2648 the user may wish to examine the return value from memset. For
2649 targets where libcalls and normal calls have different conventions
2650 for returning pointers, we could end up generating incorrect code. */
2651
2652 object_tree = make_tree (ptr_type_node, object);
2653 size_tree = make_tree (sizetype, size);
2654
2655 fn = clear_storage_libcall_fn (true);
2656 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2657 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2658 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2659
2660 /* Now we have to build up the CALL_EXPR itself. */
2661 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2662 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2663 call_expr, arg_list, NULL_TREE);
2664
2665 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2666
2667 /* If we are initializing a readonly value, show the above call
2668 clobbered it. Otherwise, a load from it may erroneously be
2669 hoisted from a loop. */
2670 if (RTX_UNCHANGING_P (object))
2671 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2672
2673 return retval;
2674 }
2675
2676 /* A subroutine of clear_storage_via_libcall. Create the tree node
2677 for the function we use for block clears. The first time FOR_CALL
2678 is true, we call assemble_external. */
2679
2680 static GTY(()) tree block_clear_fn;
2681
2682 void
2683 init_block_clear_fn (const char *asmspec)
2684 {
2685 if (!block_clear_fn)
2686 {
2687 tree fn, args;
2688
2689 fn = get_identifier ("memset");
2690 args = build_function_type_list (ptr_type_node, ptr_type_node,
2691 integer_type_node, sizetype,
2692 NULL_TREE);
2693
2694 fn = build_decl (FUNCTION_DECL, fn, args);
2695 DECL_EXTERNAL (fn) = 1;
2696 TREE_PUBLIC (fn) = 1;
2697 DECL_ARTIFICIAL (fn) = 1;
2698 TREE_NOTHROW (fn) = 1;
2699
2700 block_clear_fn = fn;
2701 }
2702
2703 if (asmspec)
2704 {
2705 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2706 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2707 }
2708 }
2709
2710 static tree
2711 clear_storage_libcall_fn (int for_call)
2712 {
2713 static bool emitted_extern;
2714
2715 if (!block_clear_fn)
2716 init_block_clear_fn (NULL);
2717
2718 if (for_call && !emitted_extern)
2719 {
2720 emitted_extern = true;
2721 make_decl_rtl (block_clear_fn, NULL);
2722 assemble_external (block_clear_fn);
2723 }
2724
2725 return block_clear_fn;
2726 }
2727 \f
2728 /* Generate code to copy Y into X.
2729 Both Y and X must have the same mode, except that
2730 Y can be a constant with VOIDmode.
2731 This mode cannot be BLKmode; use emit_block_move for that.
2732
2733 Return the last instruction emitted. */
2734
2735 rtx
2736 emit_move_insn (rtx x, rtx y)
2737 {
2738 enum machine_mode mode = GET_MODE (x);
2739 rtx y_cst = NULL_RTX;
2740 rtx last_insn, set;
2741
2742 x = protect_from_queue (x, 1);
2743 y = protect_from_queue (y, 0);
2744
2745 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2746 abort ();
2747
2748 if (CONSTANT_P (y))
2749 {
2750 if (optimize
2751 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2752 && (last_insn = compress_float_constant (x, y)))
2753 return last_insn;
2754
2755 y_cst = y;
2756
2757 if (!LEGITIMATE_CONSTANT_P (y))
2758 {
2759 y = force_const_mem (mode, y);
2760
2761 /* If the target's cannot_force_const_mem prevented the spill,
2762 assume that the target's move expanders will also take care
2763 of the non-legitimate constant. */
2764 if (!y)
2765 y = y_cst;
2766 }
2767 }
2768
2769 /* If X or Y are memory references, verify that their addresses are valid
2770 for the machine. */
2771 if (MEM_P (x)
2772 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2773 && ! push_operand (x, GET_MODE (x)))
2774 || (flag_force_addr
2775 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2776 x = validize_mem (x);
2777
2778 if (MEM_P (y)
2779 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2780 || (flag_force_addr
2781 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2782 y = validize_mem (y);
2783
2784 if (mode == BLKmode)
2785 abort ();
2786
2787 last_insn = emit_move_insn_1 (x, y);
2788
2789 if (y_cst && REG_P (x)
2790 && (set = single_set (last_insn)) != NULL_RTX
2791 && SET_DEST (set) == x
2792 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2793 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2794
2795 return last_insn;
2796 }
2797
2798 /* Low level part of emit_move_insn.
2799 Called just like emit_move_insn, but assumes X and Y
2800 are basically valid. */
2801
2802 rtx
2803 emit_move_insn_1 (rtx x, rtx y)
2804 {
2805 enum machine_mode mode = GET_MODE (x);
2806 enum machine_mode submode;
2807 enum mode_class class = GET_MODE_CLASS (mode);
2808
2809 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2810 abort ();
2811
2812 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2813 return
2814 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2815
2816 /* Expand complex moves by moving real part and imag part, if possible. */
2817 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2818 && BLKmode != (submode = GET_MODE_INNER (mode))
2819 && (mov_optab->handlers[(int) submode].insn_code
2820 != CODE_FOR_nothing))
2821 {
2822 /* Don't split destination if it is a stack push. */
2823 int stack = push_operand (x, GET_MODE (x));
2824
2825 #ifdef PUSH_ROUNDING
2826 /* In case we output to the stack, but the size is smaller than the
2827 machine can push exactly, we need to use move instructions. */
2828 if (stack
2829 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2830 != GET_MODE_SIZE (submode)))
2831 {
2832 rtx temp;
2833 HOST_WIDE_INT offset1, offset2;
2834
2835 /* Do not use anti_adjust_stack, since we don't want to update
2836 stack_pointer_delta. */
2837 temp = expand_binop (Pmode,
2838 #ifdef STACK_GROWS_DOWNWARD
2839 sub_optab,
2840 #else
2841 add_optab,
2842 #endif
2843 stack_pointer_rtx,
2844 GEN_INT
2845 (PUSH_ROUNDING
2846 (GET_MODE_SIZE (GET_MODE (x)))),
2847 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2848
2849 if (temp != stack_pointer_rtx)
2850 emit_move_insn (stack_pointer_rtx, temp);
2851
2852 #ifdef STACK_GROWS_DOWNWARD
2853 offset1 = 0;
2854 offset2 = GET_MODE_SIZE (submode);
2855 #else
2856 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2857 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2858 + GET_MODE_SIZE (submode));
2859 #endif
2860
2861 emit_move_insn (change_address (x, submode,
2862 gen_rtx_PLUS (Pmode,
2863 stack_pointer_rtx,
2864 GEN_INT (offset1))),
2865 gen_realpart (submode, y));
2866 emit_move_insn (change_address (x, submode,
2867 gen_rtx_PLUS (Pmode,
2868 stack_pointer_rtx,
2869 GEN_INT (offset2))),
2870 gen_imagpart (submode, y));
2871 }
2872 else
2873 #endif
2874 /* If this is a stack, push the highpart first, so it
2875 will be in the argument order.
2876
2877 In that case, change_address is used only to convert
2878 the mode, not to change the address. */
2879 if (stack)
2880 {
2881 /* Note that the real part always precedes the imag part in memory
2882 regardless of machine's endianness. */
2883 #ifdef STACK_GROWS_DOWNWARD
2884 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_imagpart (submode, y));
2886 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2887 gen_realpart (submode, y));
2888 #else
2889 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2890 gen_realpart (submode, y));
2891 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2892 gen_imagpart (submode, y));
2893 #endif
2894 }
2895 else
2896 {
2897 rtx realpart_x, realpart_y;
2898 rtx imagpart_x, imagpart_y;
2899
2900 /* If this is a complex value with each part being smaller than a
2901 word, the usual calling sequence will likely pack the pieces into
2902 a single register. Unfortunately, SUBREG of hard registers only
2903 deals in terms of words, so we have a problem converting input
2904 arguments to the CONCAT of two registers that is used elsewhere
2905 for complex values. If this is before reload, we can copy it into
2906 memory and reload. FIXME, we should see about using extract and
2907 insert on integer registers, but complex short and complex char
2908 variables should be rarely used. */
2909 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2910 && (reload_in_progress | reload_completed) == 0)
2911 {
2912 int packed_dest_p
2913 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2914 int packed_src_p
2915 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2916
2917 if (packed_dest_p || packed_src_p)
2918 {
2919 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2920 ? MODE_FLOAT : MODE_INT);
2921
2922 enum machine_mode reg_mode
2923 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2924
2925 if (reg_mode != BLKmode)
2926 {
2927 rtx mem = assign_stack_temp (reg_mode,
2928 GET_MODE_SIZE (mode), 0);
2929 rtx cmem = adjust_address (mem, mode, 0);
2930
2931 if (packed_dest_p)
2932 {
2933 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2934
2935 emit_move_insn_1 (cmem, y);
2936 return emit_move_insn_1 (sreg, mem);
2937 }
2938 else
2939 {
2940 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2941
2942 emit_move_insn_1 (mem, sreg);
2943 return emit_move_insn_1 (x, cmem);
2944 }
2945 }
2946 }
2947 }
2948
2949 realpart_x = gen_realpart (submode, x);
2950 realpart_y = gen_realpart (submode, y);
2951 imagpart_x = gen_imagpart (submode, x);
2952 imagpart_y = gen_imagpart (submode, y);
2953
2954 /* Show the output dies here. This is necessary for SUBREGs
2955 of pseudos since we cannot track their lifetimes correctly;
2956 hard regs shouldn't appear here except as return values.
2957 We never want to emit such a clobber after reload. */
2958 if (x != y
2959 && ! (reload_in_progress || reload_completed)
2960 && (GET_CODE (realpart_x) == SUBREG
2961 || GET_CODE (imagpart_x) == SUBREG))
2962 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2963
2964 emit_move_insn (realpart_x, realpart_y);
2965 emit_move_insn (imagpart_x, imagpart_y);
2966 }
2967
2968 return get_last_insn ();
2969 }
2970
2971 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2972 find a mode to do it in. If we have a movcc, use it. Otherwise,
2973 find the MODE_INT mode of the same width. */
2974 else if (GET_MODE_CLASS (mode) == MODE_CC
2975 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2976 {
2977 enum insn_code insn_code;
2978 enum machine_mode tmode = VOIDmode;
2979 rtx x1 = x, y1 = y;
2980
2981 if (mode != CCmode
2982 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2983 tmode = CCmode;
2984 else
2985 for (tmode = QImode; tmode != VOIDmode;
2986 tmode = GET_MODE_WIDER_MODE (tmode))
2987 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2988 break;
2989
2990 if (tmode == VOIDmode)
2991 abort ();
2992
2993 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2994 may call change_address which is not appropriate if we were
2995 called when a reload was in progress. We don't have to worry
2996 about changing the address since the size in bytes is supposed to
2997 be the same. Copy the MEM to change the mode and move any
2998 substitutions from the old MEM to the new one. */
2999
3000 if (reload_in_progress)
3001 {
3002 x = gen_lowpart_common (tmode, x1);
3003 if (x == 0 && MEM_P (x1))
3004 {
3005 x = adjust_address_nv (x1, tmode, 0);
3006 copy_replacements (x1, x);
3007 }
3008
3009 y = gen_lowpart_common (tmode, y1);
3010 if (y == 0 && MEM_P (y1))
3011 {
3012 y = adjust_address_nv (y1, tmode, 0);
3013 copy_replacements (y1, y);
3014 }
3015 }
3016 else
3017 {
3018 x = gen_lowpart (tmode, x);
3019 y = gen_lowpart (tmode, y);
3020 }
3021
3022 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3023 return emit_insn (GEN_FCN (insn_code) (x, y));
3024 }
3025
3026 /* Try using a move pattern for the corresponding integer mode. This is
3027 only safe when simplify_subreg can convert MODE constants into integer
3028 constants. At present, it can only do this reliably if the value
3029 fits within a HOST_WIDE_INT. */
3030 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3031 && (submode = int_mode_for_mode (mode)) != BLKmode
3032 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3033 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3034 (simplify_gen_subreg (submode, x, mode, 0),
3035 simplify_gen_subreg (submode, y, mode, 0)));
3036
3037 /* This will handle any multi-word or full-word mode that lacks a move_insn
3038 pattern. However, you will get better code if you define such patterns,
3039 even if they must turn into multiple assembler instructions. */
3040 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3041 {
3042 rtx last_insn = 0;
3043 rtx seq, inner;
3044 int need_clobber;
3045 int i;
3046
3047 #ifdef PUSH_ROUNDING
3048
3049 /* If X is a push on the stack, do the push now and replace
3050 X with a reference to the stack pointer. */
3051 if (push_operand (x, GET_MODE (x)))
3052 {
3053 rtx temp;
3054 enum rtx_code code;
3055
3056 /* Do not use anti_adjust_stack, since we don't want to update
3057 stack_pointer_delta. */
3058 temp = expand_binop (Pmode,
3059 #ifdef STACK_GROWS_DOWNWARD
3060 sub_optab,
3061 #else
3062 add_optab,
3063 #endif
3064 stack_pointer_rtx,
3065 GEN_INT
3066 (PUSH_ROUNDING
3067 (GET_MODE_SIZE (GET_MODE (x)))),
3068 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3069
3070 if (temp != stack_pointer_rtx)
3071 emit_move_insn (stack_pointer_rtx, temp);
3072
3073 code = GET_CODE (XEXP (x, 0));
3074
3075 /* Just hope that small offsets off SP are OK. */
3076 if (code == POST_INC)
3077 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3078 GEN_INT (-((HOST_WIDE_INT)
3079 GET_MODE_SIZE (GET_MODE (x)))));
3080 else if (code == POST_DEC)
3081 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3082 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3083 else
3084 temp = stack_pointer_rtx;
3085
3086 x = change_address (x, VOIDmode, temp);
3087 }
3088 #endif
3089
3090 /* If we are in reload, see if either operand is a MEM whose address
3091 is scheduled for replacement. */
3092 if (reload_in_progress && MEM_P (x)
3093 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3094 x = replace_equiv_address_nv (x, inner);
3095 if (reload_in_progress && MEM_P (y)
3096 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3097 y = replace_equiv_address_nv (y, inner);
3098
3099 start_sequence ();
3100
3101 need_clobber = 0;
3102 for (i = 0;
3103 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3104 i++)
3105 {
3106 rtx xpart = operand_subword (x, i, 1, mode);
3107 rtx ypart = operand_subword (y, i, 1, mode);
3108
3109 /* If we can't get a part of Y, put Y into memory if it is a
3110 constant. Otherwise, force it into a register. If we still
3111 can't get a part of Y, abort. */
3112 if (ypart == 0 && CONSTANT_P (y))
3113 {
3114 y = force_const_mem (mode, y);
3115 ypart = operand_subword (y, i, 1, mode);
3116 }
3117 else if (ypart == 0)
3118 ypart = operand_subword_force (y, i, mode);
3119
3120 if (xpart == 0 || ypart == 0)
3121 abort ();
3122
3123 need_clobber |= (GET_CODE (xpart) == SUBREG);
3124
3125 last_insn = emit_move_insn (xpart, ypart);
3126 }
3127
3128 seq = get_insns ();
3129 end_sequence ();
3130
3131 /* Show the output dies here. This is necessary for SUBREGs
3132 of pseudos since we cannot track their lifetimes correctly;
3133 hard regs shouldn't appear here except as return values.
3134 We never want to emit such a clobber after reload. */
3135 if (x != y
3136 && ! (reload_in_progress || reload_completed)
3137 && need_clobber != 0)
3138 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3139
3140 emit_insn (seq);
3141
3142 return last_insn;
3143 }
3144 else
3145 abort ();
3146 }
3147
3148 /* If Y is representable exactly in a narrower mode, and the target can
3149 perform the extension directly from constant or memory, then emit the
3150 move as an extension. */
3151
3152 static rtx
3153 compress_float_constant (rtx x, rtx y)
3154 {
3155 enum machine_mode dstmode = GET_MODE (x);
3156 enum machine_mode orig_srcmode = GET_MODE (y);
3157 enum machine_mode srcmode;
3158 REAL_VALUE_TYPE r;
3159
3160 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3161
3162 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3163 srcmode != orig_srcmode;
3164 srcmode = GET_MODE_WIDER_MODE (srcmode))
3165 {
3166 enum insn_code ic;
3167 rtx trunc_y, last_insn;
3168
3169 /* Skip if the target can't extend this way. */
3170 ic = can_extend_p (dstmode, srcmode, 0);
3171 if (ic == CODE_FOR_nothing)
3172 continue;
3173
3174 /* Skip if the narrowed value isn't exact. */
3175 if (! exact_real_truncate (srcmode, &r))
3176 continue;
3177
3178 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3179
3180 if (LEGITIMATE_CONSTANT_P (trunc_y))
3181 {
3182 /* Skip if the target needs extra instructions to perform
3183 the extension. */
3184 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3185 continue;
3186 }
3187 else if (float_extend_from_mem[dstmode][srcmode])
3188 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3189 else
3190 continue;
3191
3192 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3193 last_insn = get_last_insn ();
3194
3195 if (REG_P (x))
3196 set_unique_reg_note (last_insn, REG_EQUAL, y);
3197
3198 return last_insn;
3199 }
3200
3201 return NULL_RTX;
3202 }
3203 \f
3204 /* Pushing data onto the stack. */
3205
3206 /* Push a block of length SIZE (perhaps variable)
3207 and return an rtx to address the beginning of the block.
3208 Note that it is not possible for the value returned to be a QUEUED.
3209 The value may be virtual_outgoing_args_rtx.
3210
3211 EXTRA is the number of bytes of padding to push in addition to SIZE.
3212 BELOW nonzero means this padding comes at low addresses;
3213 otherwise, the padding comes at high addresses. */
3214
3215 rtx
3216 push_block (rtx size, int extra, int below)
3217 {
3218 rtx temp;
3219
3220 size = convert_modes (Pmode, ptr_mode, size, 1);
3221 if (CONSTANT_P (size))
3222 anti_adjust_stack (plus_constant (size, extra));
3223 else if (REG_P (size) && extra == 0)
3224 anti_adjust_stack (size);
3225 else
3226 {
3227 temp = copy_to_mode_reg (Pmode, size);
3228 if (extra != 0)
3229 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3230 temp, 0, OPTAB_LIB_WIDEN);
3231 anti_adjust_stack (temp);
3232 }
3233
3234 #ifndef STACK_GROWS_DOWNWARD
3235 if (0)
3236 #else
3237 if (1)
3238 #endif
3239 {
3240 temp = virtual_outgoing_args_rtx;
3241 if (extra != 0 && below)
3242 temp = plus_constant (temp, extra);
3243 }
3244 else
3245 {
3246 if (GET_CODE (size) == CONST_INT)
3247 temp = plus_constant (virtual_outgoing_args_rtx,
3248 -INTVAL (size) - (below ? 0 : extra));
3249 else if (extra != 0 && !below)
3250 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3251 negate_rtx (Pmode, plus_constant (size, extra)));
3252 else
3253 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3254 negate_rtx (Pmode, size));
3255 }
3256
3257 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3258 }
3259
3260 #ifdef PUSH_ROUNDING
3261
3262 /* Emit single push insn. */
3263
3264 static void
3265 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3266 {
3267 rtx dest_addr;
3268 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3269 rtx dest;
3270 enum insn_code icode;
3271 insn_operand_predicate_fn pred;
3272
3273 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3274 /* If there is push pattern, use it. Otherwise try old way of throwing
3275 MEM representing push operation to move expander. */
3276 icode = push_optab->handlers[(int) mode].insn_code;
3277 if (icode != CODE_FOR_nothing)
3278 {
3279 if (((pred = insn_data[(int) icode].operand[0].predicate)
3280 && !((*pred) (x, mode))))
3281 x = force_reg (mode, x);
3282 emit_insn (GEN_FCN (icode) (x));
3283 return;
3284 }
3285 if (GET_MODE_SIZE (mode) == rounded_size)
3286 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3287 /* If we are to pad downward, adjust the stack pointer first and
3288 then store X into the stack location using an offset. This is
3289 because emit_move_insn does not know how to pad; it does not have
3290 access to type. */
3291 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3292 {
3293 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3294 HOST_WIDE_INT offset;
3295
3296 emit_move_insn (stack_pointer_rtx,
3297 expand_binop (Pmode,
3298 #ifdef STACK_GROWS_DOWNWARD
3299 sub_optab,
3300 #else
3301 add_optab,
3302 #endif
3303 stack_pointer_rtx,
3304 GEN_INT (rounded_size),
3305 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3306
3307 offset = (HOST_WIDE_INT) padding_size;
3308 #ifdef STACK_GROWS_DOWNWARD
3309 if (STACK_PUSH_CODE == POST_DEC)
3310 /* We have already decremented the stack pointer, so get the
3311 previous value. */
3312 offset += (HOST_WIDE_INT) rounded_size;
3313 #else
3314 if (STACK_PUSH_CODE == POST_INC)
3315 /* We have already incremented the stack pointer, so get the
3316 previous value. */
3317 offset -= (HOST_WIDE_INT) rounded_size;
3318 #endif
3319 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3320 }
3321 else
3322 {
3323 #ifdef STACK_GROWS_DOWNWARD
3324 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3325 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3326 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3327 #else
3328 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3329 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3330 GEN_INT (rounded_size));
3331 #endif
3332 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3333 }
3334
3335 dest = gen_rtx_MEM (mode, dest_addr);
3336
3337 if (type != 0)
3338 {
3339 set_mem_attributes (dest, type, 1);
3340
3341 if (flag_optimize_sibling_calls)
3342 /* Function incoming arguments may overlap with sibling call
3343 outgoing arguments and we cannot allow reordering of reads
3344 from function arguments with stores to outgoing arguments
3345 of sibling calls. */
3346 set_mem_alias_set (dest, 0);
3347 }
3348 emit_move_insn (dest, x);
3349 }
3350 #endif
3351
3352 /* Generate code to push X onto the stack, assuming it has mode MODE and
3353 type TYPE.
3354 MODE is redundant except when X is a CONST_INT (since they don't
3355 carry mode info).
3356 SIZE is an rtx for the size of data to be copied (in bytes),
3357 needed only if X is BLKmode.
3358
3359 ALIGN (in bits) is maximum alignment we can assume.
3360
3361 If PARTIAL and REG are both nonzero, then copy that many of the first
3362 words of X into registers starting with REG, and push the rest of X.
3363 The amount of space pushed is decreased by PARTIAL words,
3364 rounded *down* to a multiple of PARM_BOUNDARY.
3365 REG must be a hard register in this case.
3366 If REG is zero but PARTIAL is not, take any all others actions for an
3367 argument partially in registers, but do not actually load any
3368 registers.
3369
3370 EXTRA is the amount in bytes of extra space to leave next to this arg.
3371 This is ignored if an argument block has already been allocated.
3372
3373 On a machine that lacks real push insns, ARGS_ADDR is the address of
3374 the bottom of the argument block for this call. We use indexing off there
3375 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3376 argument block has not been preallocated.
3377
3378 ARGS_SO_FAR is the size of args previously pushed for this call.
3379
3380 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3381 for arguments passed in registers. If nonzero, it will be the number
3382 of bytes required. */
3383
3384 void
3385 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3386 unsigned int align, int partial, rtx reg, int extra,
3387 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3388 rtx alignment_pad)
3389 {
3390 rtx xinner;
3391 enum direction stack_direction
3392 #ifdef STACK_GROWS_DOWNWARD
3393 = downward;
3394 #else
3395 = upward;
3396 #endif
3397
3398 /* Decide where to pad the argument: `downward' for below,
3399 `upward' for above, or `none' for don't pad it.
3400 Default is below for small data on big-endian machines; else above. */
3401 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3402
3403 /* Invert direction if stack is post-decrement.
3404 FIXME: why? */
3405 if (STACK_PUSH_CODE == POST_DEC)
3406 if (where_pad != none)
3407 where_pad = (where_pad == downward ? upward : downward);
3408
3409 xinner = x = protect_from_queue (x, 0);
3410
3411 if (mode == BLKmode)
3412 {
3413 /* Copy a block into the stack, entirely or partially. */
3414
3415 rtx temp;
3416 int used = partial * UNITS_PER_WORD;
3417 int offset;
3418 int skip;
3419
3420 if (reg && GET_CODE (reg) == PARALLEL)
3421 {
3422 /* Use the size of the elt to compute offset. */
3423 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3424 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3425 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3426 }
3427 else
3428 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3429
3430 if (size == 0)
3431 abort ();
3432
3433 used -= offset;
3434
3435 /* USED is now the # of bytes we need not copy to the stack
3436 because registers will take care of them. */
3437
3438 if (partial != 0)
3439 xinner = adjust_address (xinner, BLKmode, used);
3440
3441 /* If the partial register-part of the arg counts in its stack size,
3442 skip the part of stack space corresponding to the registers.
3443 Otherwise, start copying to the beginning of the stack space,
3444 by setting SKIP to 0. */
3445 skip = (reg_parm_stack_space == 0) ? 0 : used;
3446
3447 #ifdef PUSH_ROUNDING
3448 /* Do it with several push insns if that doesn't take lots of insns
3449 and if there is no difficulty with push insns that skip bytes
3450 on the stack for alignment purposes. */
3451 if (args_addr == 0
3452 && PUSH_ARGS
3453 && GET_CODE (size) == CONST_INT
3454 && skip == 0
3455 && MEM_ALIGN (xinner) >= align
3456 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3457 /* Here we avoid the case of a structure whose weak alignment
3458 forces many pushes of a small amount of data,
3459 and such small pushes do rounding that causes trouble. */
3460 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3461 || align >= BIGGEST_ALIGNMENT
3462 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3463 == (align / BITS_PER_UNIT)))
3464 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3465 {
3466 /* Push padding now if padding above and stack grows down,
3467 or if padding below and stack grows up.
3468 But if space already allocated, this has already been done. */
3469 if (extra && args_addr == 0
3470 && where_pad != none && where_pad != stack_direction)
3471 anti_adjust_stack (GEN_INT (extra));
3472
3473 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3474 }
3475 else
3476 #endif /* PUSH_ROUNDING */
3477 {
3478 rtx target;
3479
3480 /* Otherwise make space on the stack and copy the data
3481 to the address of that space. */
3482
3483 /* Deduct words put into registers from the size we must copy. */
3484 if (partial != 0)
3485 {
3486 if (GET_CODE (size) == CONST_INT)
3487 size = GEN_INT (INTVAL (size) - used);
3488 else
3489 size = expand_binop (GET_MODE (size), sub_optab, size,
3490 GEN_INT (used), NULL_RTX, 0,
3491 OPTAB_LIB_WIDEN);
3492 }
3493
3494 /* Get the address of the stack space.
3495 In this case, we do not deal with EXTRA separately.
3496 A single stack adjust will do. */
3497 if (! args_addr)
3498 {
3499 temp = push_block (size, extra, where_pad == downward);
3500 extra = 0;
3501 }
3502 else if (GET_CODE (args_so_far) == CONST_INT)
3503 temp = memory_address (BLKmode,
3504 plus_constant (args_addr,
3505 skip + INTVAL (args_so_far)));
3506 else
3507 temp = memory_address (BLKmode,
3508 plus_constant (gen_rtx_PLUS (Pmode,
3509 args_addr,
3510 args_so_far),
3511 skip));
3512
3513 if (!ACCUMULATE_OUTGOING_ARGS)
3514 {
3515 /* If the source is referenced relative to the stack pointer,
3516 copy it to another register to stabilize it. We do not need
3517 to do this if we know that we won't be changing sp. */
3518
3519 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3520 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3521 temp = copy_to_reg (temp);
3522 }
3523
3524 target = gen_rtx_MEM (BLKmode, temp);
3525
3526 if (type != 0)
3527 {
3528 set_mem_attributes (target, type, 1);
3529 /* Function incoming arguments may overlap with sibling call
3530 outgoing arguments and we cannot allow reordering of reads
3531 from function arguments with stores to outgoing arguments
3532 of sibling calls. */
3533 set_mem_alias_set (target, 0);
3534 }
3535
3536 /* ALIGN may well be better aligned than TYPE, e.g. due to
3537 PARM_BOUNDARY. Assume the caller isn't lying. */
3538 set_mem_align (target, align);
3539
3540 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3541 }
3542 }
3543 else if (partial > 0)
3544 {
3545 /* Scalar partly in registers. */
3546
3547 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3548 int i;
3549 int not_stack;
3550 /* # words of start of argument
3551 that we must make space for but need not store. */
3552 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3553 int args_offset = INTVAL (args_so_far);
3554 int skip;
3555
3556 /* Push padding now if padding above and stack grows down,
3557 or if padding below and stack grows up.
3558 But if space already allocated, this has already been done. */
3559 if (extra && args_addr == 0
3560 && where_pad != none && where_pad != stack_direction)
3561 anti_adjust_stack (GEN_INT (extra));
3562
3563 /* If we make space by pushing it, we might as well push
3564 the real data. Otherwise, we can leave OFFSET nonzero
3565 and leave the space uninitialized. */
3566 if (args_addr == 0)
3567 offset = 0;
3568
3569 /* Now NOT_STACK gets the number of words that we don't need to
3570 allocate on the stack. */
3571 not_stack = partial - offset;
3572
3573 /* If the partial register-part of the arg counts in its stack size,
3574 skip the part of stack space corresponding to the registers.
3575 Otherwise, start copying to the beginning of the stack space,
3576 by setting SKIP to 0. */
3577 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3578
3579 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3580 x = validize_mem (force_const_mem (mode, x));
3581
3582 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3583 SUBREGs of such registers are not allowed. */
3584 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3585 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3586 x = copy_to_reg (x);
3587
3588 /* Loop over all the words allocated on the stack for this arg. */
3589 /* We can do it by words, because any scalar bigger than a word
3590 has a size a multiple of a word. */
3591 #ifndef PUSH_ARGS_REVERSED
3592 for (i = not_stack; i < size; i++)
3593 #else
3594 for (i = size - 1; i >= not_stack; i--)
3595 #endif
3596 if (i >= not_stack + offset)
3597 emit_push_insn (operand_subword_force (x, i, mode),
3598 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3599 0, args_addr,
3600 GEN_INT (args_offset + ((i - not_stack + skip)
3601 * UNITS_PER_WORD)),
3602 reg_parm_stack_space, alignment_pad);
3603 }
3604 else
3605 {
3606 rtx addr;
3607 rtx dest;
3608
3609 /* Push padding now if padding above and stack grows down,
3610 or if padding below and stack grows up.
3611 But if space already allocated, this has already been done. */
3612 if (extra && args_addr == 0
3613 && where_pad != none && where_pad != stack_direction)
3614 anti_adjust_stack (GEN_INT (extra));
3615
3616 #ifdef PUSH_ROUNDING
3617 if (args_addr == 0 && PUSH_ARGS)
3618 emit_single_push_insn (mode, x, type);
3619 else
3620 #endif
3621 {
3622 if (GET_CODE (args_so_far) == CONST_INT)
3623 addr
3624 = memory_address (mode,
3625 plus_constant (args_addr,
3626 INTVAL (args_so_far)));
3627 else
3628 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3629 args_so_far));
3630 dest = gen_rtx_MEM (mode, addr);
3631 if (type != 0)
3632 {
3633 set_mem_attributes (dest, type, 1);
3634 /* Function incoming arguments may overlap with sibling call
3635 outgoing arguments and we cannot allow reordering of reads
3636 from function arguments with stores to outgoing arguments
3637 of sibling calls. */
3638 set_mem_alias_set (dest, 0);
3639 }
3640
3641 emit_move_insn (dest, x);
3642 }
3643 }
3644
3645 /* If part should go in registers, copy that part
3646 into the appropriate registers. Do this now, at the end,
3647 since mem-to-mem copies above may do function calls. */
3648 if (partial > 0 && reg != 0)
3649 {
3650 /* Handle calls that pass values in multiple non-contiguous locations.
3651 The Irix 6 ABI has examples of this. */
3652 if (GET_CODE (reg) == PARALLEL)
3653 emit_group_load (reg, x, type, -1);
3654 else
3655 move_block_to_reg (REGNO (reg), x, partial, mode);
3656 }
3657
3658 if (extra && args_addr == 0 && where_pad == stack_direction)
3659 anti_adjust_stack (GEN_INT (extra));
3660
3661 if (alignment_pad && args_addr == 0)
3662 anti_adjust_stack (alignment_pad);
3663 }
3664 \f
3665 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3666 operations. */
3667
3668 static rtx
3669 get_subtarget (rtx x)
3670 {
3671 return ((x == 0
3672 /* Only registers can be subtargets. */
3673 || !REG_P (x)
3674 /* If the register is readonly, it can't be set more than once. */
3675 || RTX_UNCHANGING_P (x)
3676 /* Don't use hard regs to avoid extending their life. */
3677 || REGNO (x) < FIRST_PSEUDO_REGISTER
3678 /* Avoid subtargets inside loops,
3679 since they hide some invariant expressions. */
3680 || preserve_subexpressions_p ())
3681 ? 0 : x);
3682 }
3683
3684 /* Expand an assignment that stores the value of FROM into TO.
3685 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3686 (This may contain a QUEUED rtx;
3687 if the value is constant, this rtx is a constant.)
3688 Otherwise, the returned value is NULL_RTX. */
3689
3690 rtx
3691 expand_assignment (tree to, tree from, int want_value)
3692 {
3693 rtx to_rtx = 0;
3694 rtx result;
3695
3696 /* Don't crash if the lhs of the assignment was erroneous. */
3697
3698 if (TREE_CODE (to) == ERROR_MARK)
3699 {
3700 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3701 return want_value ? result : NULL_RTX;
3702 }
3703
3704 /* Assignment of a structure component needs special treatment
3705 if the structure component's rtx is not simply a MEM.
3706 Assignment of an array element at a constant index, and assignment of
3707 an array element in an unaligned packed structure field, has the same
3708 problem. */
3709
3710 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3711 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3712 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3713 {
3714 enum machine_mode mode1;
3715 HOST_WIDE_INT bitsize, bitpos;
3716 rtx orig_to_rtx;
3717 tree offset;
3718 int unsignedp;
3719 int volatilep = 0;
3720 tree tem;
3721
3722 push_temp_slots ();
3723 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3724 &unsignedp, &volatilep);
3725
3726 /* If we are going to use store_bit_field and extract_bit_field,
3727 make sure to_rtx will be safe for multiple use. */
3728
3729 if (mode1 == VOIDmode && want_value)
3730 tem = stabilize_reference (tem);
3731
3732 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3733
3734 if (offset != 0)
3735 {
3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3737
3738 if (!MEM_P (to_rtx))
3739 abort ();
3740
3741 #ifdef POINTERS_EXTEND_UNSIGNED
3742 if (GET_MODE (offset_rtx) != Pmode)
3743 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3744 #else
3745 if (GET_MODE (offset_rtx) != ptr_mode)
3746 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3747 #endif
3748
3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
3751 if (MEM_P (to_rtx)
3752 && GET_MODE (to_rtx) == BLKmode
3753 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3754 && bitsize > 0
3755 && (bitpos % bitsize) == 0
3756 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3757 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3758 {
3759 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3760 bitpos = 0;
3761 }
3762
3763 to_rtx = offset_address (to_rtx, offset_rtx,
3764 highest_pow2_factor_for_target (to,
3765 offset));
3766 }
3767
3768 if (MEM_P (to_rtx))
3769 {
3770 /* If the field is at offset zero, we could have been given the
3771 DECL_RTX of the parent struct. Don't munge it. */
3772 to_rtx = shallow_copy_rtx (to_rtx);
3773
3774 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3775 }
3776
3777 /* Deal with volatile and readonly fields. The former is only done
3778 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3779 if (volatilep && MEM_P (to_rtx))
3780 {
3781 if (to_rtx == orig_to_rtx)
3782 to_rtx = copy_rtx (to_rtx);
3783 MEM_VOLATILE_P (to_rtx) = 1;
3784 }
3785
3786 if (TREE_CODE (to) == COMPONENT_REF
3787 && TREE_READONLY (TREE_OPERAND (to, 1))
3788 /* We can't assert that a MEM won't be set more than once
3789 if the component is not addressable because another
3790 non-addressable component may be referenced by the same MEM. */
3791 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3792 {
3793 if (to_rtx == orig_to_rtx)
3794 to_rtx = copy_rtx (to_rtx);
3795 RTX_UNCHANGING_P (to_rtx) = 1;
3796 }
3797
3798 if (MEM_P (to_rtx) && ! can_address_p (to))
3799 {
3800 if (to_rtx == orig_to_rtx)
3801 to_rtx = copy_rtx (to_rtx);
3802 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3803 }
3804
3805 while (mode1 == VOIDmode && !want_value
3806 && bitpos + bitsize <= BITS_PER_WORD
3807 && bitsize < BITS_PER_WORD
3808 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3809 && !TREE_SIDE_EFFECTS (to)
3810 && !TREE_THIS_VOLATILE (to))
3811 {
3812 tree src, op0, op1;
3813 rtx value;
3814 HOST_WIDE_INT count = bitpos;
3815 optab binop;
3816
3817 src = from;
3818 STRIP_NOPS (src);
3819 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3820 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3821 break;
3822
3823 op0 = TREE_OPERAND (src, 0);
3824 op1 = TREE_OPERAND (src, 1);
3825 STRIP_NOPS (op0);
3826
3827 if (! operand_equal_p (to, op0, 0))
3828 break;
3829
3830 if (BYTES_BIG_ENDIAN)
3831 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3832
3833 /* Special case some bitfield op= exp. */
3834 switch (TREE_CODE (src))
3835 {
3836 case PLUS_EXPR:
3837 case MINUS_EXPR:
3838 if (count <= 0)
3839 break;
3840
3841 /* For now, just optimize the case of the topmost bitfield
3842 where we don't need to do any masking and also
3843 1 bit bitfields where xor can be used.
3844 We might win by one instruction for the other bitfields
3845 too if insv/extv instructions aren't used, so that
3846 can be added later. */
3847 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3848 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3849 break;
3850 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
3851 value = protect_from_queue (value, 0);
3852 to_rtx = protect_from_queue (to_rtx, 1);
3853 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3854 if (bitsize == 1
3855 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3856 {
3857 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3858 NULL_RTX);
3859 binop = xor_optab;
3860 }
3861 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3862 value, build_int_2 (count, 0),
3863 NULL_RTX, 1);
3864 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
3865 value, to_rtx, 1, OPTAB_WIDEN);
3866 if (result != to_rtx)
3867 emit_move_insn (to_rtx, result);
3868 free_temp_slots ();
3869 pop_temp_slots ();
3870 return NULL_RTX;
3871 default:
3872 break;
3873 }
3874
3875 break;
3876 }
3877
3878 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3879 (want_value
3880 /* Spurious cast for HPUX compiler. */
3881 ? ((enum machine_mode)
3882 TYPE_MODE (TREE_TYPE (to)))
3883 : VOIDmode),
3884 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3885
3886 preserve_temp_slots (result);
3887 free_temp_slots ();
3888 pop_temp_slots ();
3889
3890 /* If the value is meaningful, convert RESULT to the proper mode.
3891 Otherwise, return nothing. */
3892 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3893 TYPE_MODE (TREE_TYPE (from)),
3894 result,
3895 TYPE_UNSIGNED (TREE_TYPE (to)))
3896 : NULL_RTX);
3897 }
3898
3899 /* If the rhs is a function call and its value is not an aggregate,
3900 call the function before we start to compute the lhs.
3901 This is needed for correct code for cases such as
3902 val = setjmp (buf) on machines where reference to val
3903 requires loading up part of an address in a separate insn.
3904
3905 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3906 since it might be a promoted variable where the zero- or sign- extension
3907 needs to be done. Handling this in the normal way is safe because no
3908 computation is done before the call. */
3909 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3910 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3911 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3912 && REG_P (DECL_RTL (to))))
3913 {
3914 rtx value;
3915
3916 push_temp_slots ();
3917 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3918 if (to_rtx == 0)
3919 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3920
3921 /* Handle calls that return values in multiple non-contiguous locations.
3922 The Irix 6 ABI has examples of this. */
3923 if (GET_CODE (to_rtx) == PARALLEL)
3924 emit_group_load (to_rtx, value, TREE_TYPE (from),
3925 int_size_in_bytes (TREE_TYPE (from)));
3926 else if (GET_MODE (to_rtx) == BLKmode)
3927 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3928 else
3929 {
3930 if (POINTER_TYPE_P (TREE_TYPE (to)))
3931 value = convert_memory_address (GET_MODE (to_rtx), value);
3932 emit_move_insn (to_rtx, value);
3933 }
3934 preserve_temp_slots (to_rtx);
3935 free_temp_slots ();
3936 pop_temp_slots ();
3937 return want_value ? to_rtx : NULL_RTX;
3938 }
3939
3940 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3941 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3942
3943 if (to_rtx == 0)
3944 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3945
3946 /* Don't move directly into a return register. */
3947 if (TREE_CODE (to) == RESULT_DECL
3948 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3949 {
3950 rtx temp;
3951
3952 push_temp_slots ();
3953 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3954
3955 if (GET_CODE (to_rtx) == PARALLEL)
3956 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3957 int_size_in_bytes (TREE_TYPE (from)));
3958 else
3959 emit_move_insn (to_rtx, temp);
3960
3961 preserve_temp_slots (to_rtx);
3962 free_temp_slots ();
3963 pop_temp_slots ();
3964 return want_value ? to_rtx : NULL_RTX;
3965 }
3966
3967 /* In case we are returning the contents of an object which overlaps
3968 the place the value is being stored, use a safe function when copying
3969 a value through a pointer into a structure value return block. */
3970 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3971 && current_function_returns_struct
3972 && !current_function_returns_pcc_struct)
3973 {
3974 rtx from_rtx, size;
3975
3976 push_temp_slots ();
3977 size = expr_size (from);
3978 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3979
3980 emit_library_call (memmove_libfunc, LCT_NORMAL,
3981 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3982 XEXP (from_rtx, 0), Pmode,
3983 convert_to_mode (TYPE_MODE (sizetype),
3984 size, TYPE_UNSIGNED (sizetype)),
3985 TYPE_MODE (sizetype));
3986
3987 preserve_temp_slots (to_rtx);
3988 free_temp_slots ();
3989 pop_temp_slots ();
3990 return want_value ? to_rtx : NULL_RTX;
3991 }
3992
3993 /* Compute FROM and store the value in the rtx we got. */
3994
3995 push_temp_slots ();
3996 result = store_expr (from, to_rtx, want_value);
3997 preserve_temp_slots (result);
3998 free_temp_slots ();
3999 pop_temp_slots ();
4000 return want_value ? result : NULL_RTX;
4001 }
4002
4003 /* Generate code for computing expression EXP,
4004 and storing the value into TARGET.
4005 TARGET may contain a QUEUED rtx.
4006
4007 If WANT_VALUE & 1 is nonzero, return a copy of the value
4008 not in TARGET, so that we can be sure to use the proper
4009 value in a containing expression even if TARGET has something
4010 else stored in it. If possible, we copy the value through a pseudo
4011 and return that pseudo. Or, if the value is constant, we try to
4012 return the constant. In some cases, we return a pseudo
4013 copied *from* TARGET.
4014
4015 If the mode is BLKmode then we may return TARGET itself.
4016 It turns out that in BLKmode it doesn't cause a problem.
4017 because C has no operators that could combine two different
4018 assignments into the same BLKmode object with different values
4019 with no sequence point. Will other languages need this to
4020 be more thorough?
4021
4022 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4023 to catch quickly any cases where the caller uses the value
4024 and fails to set WANT_VALUE.
4025
4026 If WANT_VALUE & 2 is set, this is a store into a call param on the
4027 stack, and block moves may need to be treated specially. */
4028
4029 rtx
4030 store_expr (tree exp, rtx target, int want_value)
4031 {
4032 rtx temp;
4033 rtx alt_rtl = NULL_RTX;
4034 rtx mark = mark_queue ();
4035 int dont_return_target = 0;
4036 int dont_store_target = 0;
4037
4038 if (VOID_TYPE_P (TREE_TYPE (exp)))
4039 {
4040 /* C++ can generate ?: expressions with a throw expression in one
4041 branch and an rvalue in the other. Here, we resolve attempts to
4042 store the throw expression's nonexistent result. */
4043 if (want_value)
4044 abort ();
4045 expand_expr (exp, const0_rtx, VOIDmode, 0);
4046 return NULL_RTX;
4047 }
4048 if (TREE_CODE (exp) == COMPOUND_EXPR)
4049 {
4050 /* Perform first part of compound expression, then assign from second
4051 part. */
4052 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4053 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4054 emit_queue ();
4055 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4056 }
4057 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4058 {
4059 /* For conditional expression, get safe form of the target. Then
4060 test the condition, doing the appropriate assignment on either
4061 side. This avoids the creation of unnecessary temporaries.
4062 For non-BLKmode, it is more efficient not to do this. */
4063
4064 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4065
4066 emit_queue ();
4067 target = protect_from_queue (target, 1);
4068
4069 do_pending_stack_adjust ();
4070 NO_DEFER_POP;
4071 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4072 start_cleanup_deferral ();
4073 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4074 end_cleanup_deferral ();
4075 emit_queue ();
4076 emit_jump_insn (gen_jump (lab2));
4077 emit_barrier ();
4078 emit_label (lab1);
4079 start_cleanup_deferral ();
4080 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4081 end_cleanup_deferral ();
4082 emit_queue ();
4083 emit_label (lab2);
4084 OK_DEFER_POP;
4085
4086 return want_value & 1 ? target : NULL_RTX;
4087 }
4088 else if (queued_subexp_p (target))
4089 /* If target contains a postincrement, let's not risk
4090 using it as the place to generate the rhs. */
4091 {
4092 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4093 {
4094 /* Expand EXP into a new pseudo. */
4095 temp = gen_reg_rtx (GET_MODE (target));
4096 temp = expand_expr (exp, temp, GET_MODE (target),
4097 (want_value & 2
4098 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4099 }
4100 else
4101 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4102 (want_value & 2
4103 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4104
4105 /* If target is volatile, ANSI requires accessing the value
4106 *from* the target, if it is accessed. So make that happen.
4107 In no case return the target itself. */
4108 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4109 dont_return_target = 1;
4110 }
4111 else if ((want_value & 1) != 0
4112 && MEM_P (target)
4113 && ! MEM_VOLATILE_P (target)
4114 && GET_MODE (target) != BLKmode)
4115 /* If target is in memory and caller wants value in a register instead,
4116 arrange that. Pass TARGET as target for expand_expr so that,
4117 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4118 We know expand_expr will not use the target in that case.
4119 Don't do this if TARGET is volatile because we are supposed
4120 to write it and then read it. */
4121 {
4122 temp = expand_expr (exp, target, GET_MODE (target),
4123 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4124 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4125 {
4126 /* If TEMP is already in the desired TARGET, only copy it from
4127 memory and don't store it there again. */
4128 if (temp == target
4129 || (rtx_equal_p (temp, target)
4130 && ! side_effects_p (temp) && ! side_effects_p (target)))
4131 dont_store_target = 1;
4132 temp = copy_to_reg (temp);
4133 }
4134 dont_return_target = 1;
4135 }
4136 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4137 /* If this is a scalar in a register that is stored in a wider mode
4138 than the declared mode, compute the result into its declared mode
4139 and then convert to the wider mode. Our value is the computed
4140 expression. */
4141 {
4142 rtx inner_target = 0;
4143
4144 /* If we don't want a value, we can do the conversion inside EXP,
4145 which will often result in some optimizations. Do the conversion
4146 in two steps: first change the signedness, if needed, then
4147 the extend. But don't do this if the type of EXP is a subtype
4148 of something else since then the conversion might involve
4149 more than just converting modes. */
4150 if ((want_value & 1) == 0
4151 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4152 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4153 {
4154 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4155 != SUBREG_PROMOTED_UNSIGNED_P (target))
4156 exp = convert
4157 (lang_hooks.types.signed_or_unsigned_type
4158 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4159
4160 exp = convert (lang_hooks.types.type_for_mode
4161 (GET_MODE (SUBREG_REG (target)),
4162 SUBREG_PROMOTED_UNSIGNED_P (target)),
4163 exp);
4164
4165 inner_target = SUBREG_REG (target);
4166 }
4167
4168 temp = expand_expr (exp, inner_target, VOIDmode,
4169 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4170
4171 /* If TEMP is a MEM and we want a result value, make the access
4172 now so it gets done only once. Strictly speaking, this is
4173 only necessary if the MEM is volatile, or if the address
4174 overlaps TARGET. But not performing the load twice also
4175 reduces the amount of rtl we generate and then have to CSE. */
4176 if (MEM_P (temp) && (want_value & 1) != 0)
4177 temp = copy_to_reg (temp);
4178
4179 /* If TEMP is a VOIDmode constant, use convert_modes to make
4180 sure that we properly convert it. */
4181 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4182 {
4183 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4184 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4185 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4186 GET_MODE (target), temp,
4187 SUBREG_PROMOTED_UNSIGNED_P (target));
4188 }
4189
4190 convert_move (SUBREG_REG (target), temp,
4191 SUBREG_PROMOTED_UNSIGNED_P (target));
4192
4193 /* If we promoted a constant, change the mode back down to match
4194 target. Otherwise, the caller might get confused by a result whose
4195 mode is larger than expected. */
4196
4197 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4198 {
4199 if (GET_MODE (temp) != VOIDmode)
4200 {
4201 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4202 SUBREG_PROMOTED_VAR_P (temp) = 1;
4203 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4204 SUBREG_PROMOTED_UNSIGNED_P (target));
4205 }
4206 else
4207 temp = convert_modes (GET_MODE (target),
4208 GET_MODE (SUBREG_REG (target)),
4209 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4210 }
4211
4212 return want_value & 1 ? temp : NULL_RTX;
4213 }
4214 else
4215 {
4216 temp = expand_expr_real (exp, target, GET_MODE (target),
4217 (want_value & 2
4218 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4219 &alt_rtl);
4220 /* Return TARGET if it's a specified hardware register.
4221 If TARGET is a volatile mem ref, either return TARGET
4222 or return a reg copied *from* TARGET; ANSI requires this.
4223
4224 Otherwise, if TEMP is not TARGET, return TEMP
4225 if it is constant (for efficiency),
4226 or if we really want the correct value. */
4227 if (!(target && REG_P (target)
4228 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4229 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4230 && ! rtx_equal_p (temp, target)
4231 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4232 dont_return_target = 1;
4233 }
4234
4235 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4236 the same as that of TARGET, adjust the constant. This is needed, for
4237 example, in case it is a CONST_DOUBLE and we want only a word-sized
4238 value. */
4239 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4240 && TREE_CODE (exp) != ERROR_MARK
4241 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4242 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4243 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4244
4245 /* If value was not generated in the target, store it there.
4246 Convert the value to TARGET's type first if necessary and emit the
4247 pending incrementations that have been queued when expanding EXP.
4248 Note that we cannot emit the whole queue blindly because this will
4249 effectively disable the POST_INC optimization later.
4250
4251 If TEMP and TARGET compare equal according to rtx_equal_p, but
4252 one or both of them are volatile memory refs, we have to distinguish
4253 two cases:
4254 - expand_expr has used TARGET. In this case, we must not generate
4255 another copy. This can be detected by TARGET being equal according
4256 to == .
4257 - expand_expr has not used TARGET - that means that the source just
4258 happens to have the same RTX form. Since temp will have been created
4259 by expand_expr, it will compare unequal according to == .
4260 We must generate a copy in this case, to reach the correct number
4261 of volatile memory references. */
4262
4263 if ((! rtx_equal_p (temp, target)
4264 || (temp != target && (side_effects_p (temp)
4265 || side_effects_p (target))))
4266 && TREE_CODE (exp) != ERROR_MARK
4267 && ! dont_store_target
4268 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4269 but TARGET is not valid memory reference, TEMP will differ
4270 from TARGET although it is really the same location. */
4271 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4272 /* If there's nothing to copy, don't bother. Don't call expr_size
4273 unless necessary, because some front-ends (C++) expr_size-hook
4274 aborts on objects that are not supposed to be bit-copied or
4275 bit-initialized. */
4276 && expr_size (exp) != const0_rtx)
4277 {
4278 emit_insns_enqueued_after_mark (mark);
4279 target = protect_from_queue (target, 1);
4280 temp = protect_from_queue (temp, 0);
4281 if (GET_MODE (temp) != GET_MODE (target)
4282 && GET_MODE (temp) != VOIDmode)
4283 {
4284 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4285 if (dont_return_target)
4286 {
4287 /* In this case, we will return TEMP,
4288 so make sure it has the proper mode.
4289 But don't forget to store the value into TARGET. */
4290 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4291 emit_move_insn (target, temp);
4292 }
4293 else
4294 convert_move (target, temp, unsignedp);
4295 }
4296
4297 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4298 {
4299 /* Handle copying a string constant into an array. The string
4300 constant may be shorter than the array. So copy just the string's
4301 actual length, and clear the rest. First get the size of the data
4302 type of the string, which is actually the size of the target. */
4303 rtx size = expr_size (exp);
4304
4305 if (GET_CODE (size) == CONST_INT
4306 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4307 emit_block_move (target, temp, size,
4308 (want_value & 2
4309 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4310 else
4311 {
4312 /* Compute the size of the data to copy from the string. */
4313 tree copy_size
4314 = size_binop (MIN_EXPR,
4315 make_tree (sizetype, size),
4316 size_int (TREE_STRING_LENGTH (exp)));
4317 rtx copy_size_rtx
4318 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4319 (want_value & 2
4320 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4321 rtx label = 0;
4322
4323 /* Copy that much. */
4324 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4325 TYPE_UNSIGNED (sizetype));
4326 emit_block_move (target, temp, copy_size_rtx,
4327 (want_value & 2
4328 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4329
4330 /* Figure out how much is left in TARGET that we have to clear.
4331 Do all calculations in ptr_mode. */
4332 if (GET_CODE (copy_size_rtx) == CONST_INT)
4333 {
4334 size = plus_constant (size, -INTVAL (copy_size_rtx));
4335 target = adjust_address (target, BLKmode,
4336 INTVAL (copy_size_rtx));
4337 }
4338 else
4339 {
4340 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4341 copy_size_rtx, NULL_RTX, 0,
4342 OPTAB_LIB_WIDEN);
4343
4344 #ifdef POINTERS_EXTEND_UNSIGNED
4345 if (GET_MODE (copy_size_rtx) != Pmode)
4346 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4347 TYPE_UNSIGNED (sizetype));
4348 #endif
4349
4350 target = offset_address (target, copy_size_rtx,
4351 highest_pow2_factor (copy_size));
4352 label = gen_label_rtx ();
4353 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4354 GET_MODE (size), 0, label);
4355 }
4356
4357 if (size != const0_rtx)
4358 clear_storage (target, size);
4359
4360 if (label)
4361 emit_label (label);
4362 }
4363 }
4364 /* Handle calls that return values in multiple non-contiguous locations.
4365 The Irix 6 ABI has examples of this. */
4366 else if (GET_CODE (target) == PARALLEL)
4367 emit_group_load (target, temp, TREE_TYPE (exp),
4368 int_size_in_bytes (TREE_TYPE (exp)));
4369 else if (GET_MODE (temp) == BLKmode)
4370 emit_block_move (target, temp, expr_size (exp),
4371 (want_value & 2
4372 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4373 else
4374 {
4375 temp = force_operand (temp, target);
4376 if (temp != target)
4377 emit_move_insn (target, temp);
4378 }
4379 }
4380
4381 /* If we don't want a value, return NULL_RTX. */
4382 if ((want_value & 1) == 0)
4383 return NULL_RTX;
4384
4385 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4386 ??? The latter test doesn't seem to make sense. */
4387 else if (dont_return_target && !MEM_P (temp))
4388 return temp;
4389
4390 /* Return TARGET itself if it is a hard register. */
4391 else if ((want_value & 1) != 0
4392 && GET_MODE (target) != BLKmode
4393 && ! (REG_P (target)
4394 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4395 return copy_to_reg (target);
4396
4397 else
4398 return target;
4399 }
4400 \f
4401 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4402 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4403 are set to non-constant values and place it in *P_NC_ELTS. */
4404
4405 static void
4406 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4407 HOST_WIDE_INT *p_nc_elts)
4408 {
4409 HOST_WIDE_INT nz_elts, nc_elts;
4410 tree list;
4411
4412 nz_elts = 0;
4413 nc_elts = 0;
4414
4415 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4416 {
4417 tree value = TREE_VALUE (list);
4418 tree purpose = TREE_PURPOSE (list);
4419 HOST_WIDE_INT mult;
4420
4421 mult = 1;
4422 if (TREE_CODE (purpose) == RANGE_EXPR)
4423 {
4424 tree lo_index = TREE_OPERAND (purpose, 0);
4425 tree hi_index = TREE_OPERAND (purpose, 1);
4426
4427 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4428 mult = (tree_low_cst (hi_index, 1)
4429 - tree_low_cst (lo_index, 1) + 1);
4430 }
4431
4432 switch (TREE_CODE (value))
4433 {
4434 case CONSTRUCTOR:
4435 {
4436 HOST_WIDE_INT nz = 0, nc = 0;
4437 categorize_ctor_elements_1 (value, &nz, &nc);
4438 nz_elts += mult * nz;
4439 nc_elts += mult * nc;
4440 }
4441 break;
4442
4443 case INTEGER_CST:
4444 case REAL_CST:
4445 if (!initializer_zerop (value))
4446 nz_elts += mult;
4447 break;
4448 case COMPLEX_CST:
4449 if (!initializer_zerop (TREE_REALPART (value)))
4450 nz_elts += mult;
4451 if (!initializer_zerop (TREE_IMAGPART (value)))
4452 nz_elts += mult;
4453 break;
4454 case VECTOR_CST:
4455 {
4456 tree v;
4457 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4458 if (!initializer_zerop (TREE_VALUE (v)))
4459 nz_elts += mult;
4460 }
4461 break;
4462
4463 default:
4464 nz_elts += mult;
4465 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4466 nc_elts += mult;
4467 break;
4468 }
4469 }
4470
4471 *p_nz_elts += nz_elts;
4472 *p_nc_elts += nc_elts;
4473 }
4474
4475 void
4476 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4477 HOST_WIDE_INT *p_nc_elts)
4478 {
4479 *p_nz_elts = 0;
4480 *p_nc_elts = 0;
4481 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4482 }
4483
4484 /* Count the number of scalars in TYPE. Return -1 on overflow or
4485 variable-sized. */
4486
4487 HOST_WIDE_INT
4488 count_type_elements (tree type)
4489 {
4490 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4491 switch (TREE_CODE (type))
4492 {
4493 case ARRAY_TYPE:
4494 {
4495 tree telts = array_type_nelts (type);
4496 if (telts && host_integerp (telts, 1))
4497 {
4498 HOST_WIDE_INT n = tree_low_cst (telts, 1);
4499 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4500 if (n == 0)
4501 return 0;
4502 if (max / n < m)
4503 return n * m;
4504 }
4505 return -1;
4506 }
4507
4508 case RECORD_TYPE:
4509 {
4510 HOST_WIDE_INT n = 0, t;
4511 tree f;
4512
4513 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4514 if (TREE_CODE (f) == FIELD_DECL)
4515 {
4516 t = count_type_elements (TREE_TYPE (f));
4517 if (t < 0)
4518 return -1;
4519 n += t;
4520 }
4521
4522 return n;
4523 }
4524
4525 case UNION_TYPE:
4526 case QUAL_UNION_TYPE:
4527 {
4528 /* Ho hum. How in the world do we guess here? Clearly it isn't
4529 right to count the fields. Guess based on the number of words. */
4530 HOST_WIDE_INT n = int_size_in_bytes (type);
4531 if (n < 0)
4532 return -1;
4533 return n / UNITS_PER_WORD;
4534 }
4535
4536 case COMPLEX_TYPE:
4537 return 2;
4538
4539 case VECTOR_TYPE:
4540 /* ??? This is broke. We should encode the vector width in the tree. */
4541 return GET_MODE_NUNITS (TYPE_MODE (type));
4542
4543 case INTEGER_TYPE:
4544 case REAL_TYPE:
4545 case ENUMERAL_TYPE:
4546 case BOOLEAN_TYPE:
4547 case CHAR_TYPE:
4548 case POINTER_TYPE:
4549 case OFFSET_TYPE:
4550 case REFERENCE_TYPE:
4551 return 1;
4552
4553 case VOID_TYPE:
4554 case METHOD_TYPE:
4555 case FILE_TYPE:
4556 case SET_TYPE:
4557 case FUNCTION_TYPE:
4558 case LANG_TYPE:
4559 default:
4560 abort ();
4561 }
4562 }
4563
4564 /* Return 1 if EXP contains mostly (3/4) zeros. */
4565
4566 int
4567 mostly_zeros_p (tree exp)
4568 {
4569 if (TREE_CODE (exp) == CONSTRUCTOR)
4570
4571 {
4572 HOST_WIDE_INT nz_elts, nc_elts, elts;
4573
4574 /* If there are no ranges of true bits, it is all zero. */
4575 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4576 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4577
4578 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4579 elts = count_type_elements (TREE_TYPE (exp));
4580
4581 return nz_elts < elts / 4;
4582 }
4583
4584 return initializer_zerop (exp);
4585 }
4586 \f
4587 /* Helper function for store_constructor.
4588 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4589 TYPE is the type of the CONSTRUCTOR, not the element type.
4590 CLEARED is as for store_constructor.
4591 ALIAS_SET is the alias set to use for any stores.
4592
4593 This provides a recursive shortcut back to store_constructor when it isn't
4594 necessary to go through store_field. This is so that we can pass through
4595 the cleared field to let store_constructor know that we may not have to
4596 clear a substructure if the outer structure has already been cleared. */
4597
4598 static void
4599 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4600 HOST_WIDE_INT bitpos, enum machine_mode mode,
4601 tree exp, tree type, int cleared, int alias_set)
4602 {
4603 if (TREE_CODE (exp) == CONSTRUCTOR
4604 /* We can only call store_constructor recursively if the size and
4605 bit position are on a byte boundary. */
4606 && bitpos % BITS_PER_UNIT == 0
4607 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4608 /* If we have a nonzero bitpos for a register target, then we just
4609 let store_field do the bitfield handling. This is unlikely to
4610 generate unnecessary clear instructions anyways. */
4611 && (bitpos == 0 || MEM_P (target)))
4612 {
4613 if (MEM_P (target))
4614 target
4615 = adjust_address (target,
4616 GET_MODE (target) == BLKmode
4617 || 0 != (bitpos
4618 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4619 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4620
4621
4622 /* Update the alias set, if required. */
4623 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4624 && MEM_ALIAS_SET (target) != 0)
4625 {
4626 target = copy_rtx (target);
4627 set_mem_alias_set (target, alias_set);
4628 }
4629
4630 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4631 }
4632 else
4633 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4634 alias_set);
4635 }
4636
4637 /* Store the value of constructor EXP into the rtx TARGET.
4638 TARGET is either a REG or a MEM; we know it cannot conflict, since
4639 safe_from_p has been called.
4640 CLEARED is true if TARGET is known to have been zero'd.
4641 SIZE is the number of bytes of TARGET we are allowed to modify: this
4642 may not be the same as the size of EXP if we are assigning to a field
4643 which has been packed to exclude padding bits. */
4644
4645 static void
4646 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4647 {
4648 tree type = TREE_TYPE (exp);
4649 #ifdef WORD_REGISTER_OPERATIONS
4650 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4651 #endif
4652
4653 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4654 || TREE_CODE (type) == QUAL_UNION_TYPE)
4655 {
4656 tree elt;
4657
4658 /* If size is zero or the target is already cleared, do nothing. */
4659 if (size == 0 || cleared)
4660 cleared = 1;
4661 /* We either clear the aggregate or indicate the value is dead. */
4662 else if ((TREE_CODE (type) == UNION_TYPE
4663 || TREE_CODE (type) == QUAL_UNION_TYPE)
4664 && ! CONSTRUCTOR_ELTS (exp))
4665 /* If the constructor is empty, clear the union. */
4666 {
4667 clear_storage (target, expr_size (exp));
4668 cleared = 1;
4669 }
4670
4671 /* If we are building a static constructor into a register,
4672 set the initial value as zero so we can fold the value into
4673 a constant. But if more than one register is involved,
4674 this probably loses. */
4675 else if (REG_P (target) && TREE_STATIC (exp)
4676 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4677 {
4678 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4679 cleared = 1;
4680 }
4681
4682 /* If the constructor has fewer fields than the structure
4683 or if we are initializing the structure to mostly zeros,
4684 clear the whole structure first. Don't do this if TARGET is a
4685 register whose mode size isn't equal to SIZE since clear_storage
4686 can't handle this case. */
4687 else if (size > 0
4688 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4689 || mostly_zeros_p (exp))
4690 && (!REG_P (target)
4691 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4692 == size)))
4693 {
4694 rtx xtarget = target;
4695
4696 if (readonly_fields_p (type))
4697 {
4698 xtarget = copy_rtx (xtarget);
4699 RTX_UNCHANGING_P (xtarget) = 1;
4700 }
4701
4702 clear_storage (xtarget, GEN_INT (size));
4703 cleared = 1;
4704 }
4705
4706 if (! cleared)
4707 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4708
4709 /* Store each element of the constructor into
4710 the corresponding field of TARGET. */
4711
4712 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4713 {
4714 tree field = TREE_PURPOSE (elt);
4715 tree value = TREE_VALUE (elt);
4716 enum machine_mode mode;
4717 HOST_WIDE_INT bitsize;
4718 HOST_WIDE_INT bitpos = 0;
4719 tree offset;
4720 rtx to_rtx = target;
4721
4722 /* Just ignore missing fields.
4723 We cleared the whole structure, above,
4724 if any fields are missing. */
4725 if (field == 0)
4726 continue;
4727
4728 if (cleared && initializer_zerop (value))
4729 continue;
4730
4731 if (host_integerp (DECL_SIZE (field), 1))
4732 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4733 else
4734 bitsize = -1;
4735
4736 mode = DECL_MODE (field);
4737 if (DECL_BIT_FIELD (field))
4738 mode = VOIDmode;
4739
4740 offset = DECL_FIELD_OFFSET (field);
4741 if (host_integerp (offset, 0)
4742 && host_integerp (bit_position (field), 0))
4743 {
4744 bitpos = int_bit_position (field);
4745 offset = 0;
4746 }
4747 else
4748 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4749
4750 if (offset)
4751 {
4752 rtx offset_rtx;
4753
4754 offset
4755 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4756 make_tree (TREE_TYPE (exp),
4757 target));
4758
4759 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4760 if (!MEM_P (to_rtx))
4761 abort ();
4762
4763 #ifdef POINTERS_EXTEND_UNSIGNED
4764 if (GET_MODE (offset_rtx) != Pmode)
4765 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4766 #else
4767 if (GET_MODE (offset_rtx) != ptr_mode)
4768 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4769 #endif
4770
4771 to_rtx = offset_address (to_rtx, offset_rtx,
4772 highest_pow2_factor (offset));
4773 }
4774
4775 if (TREE_READONLY (field))
4776 {
4777 if (MEM_P (to_rtx))
4778 to_rtx = copy_rtx (to_rtx);
4779
4780 RTX_UNCHANGING_P (to_rtx) = 1;
4781 }
4782
4783 #ifdef WORD_REGISTER_OPERATIONS
4784 /* If this initializes a field that is smaller than a word, at the
4785 start of a word, try to widen it to a full word.
4786 This special case allows us to output C++ member function
4787 initializations in a form that the optimizers can understand. */
4788 if (REG_P (target)
4789 && bitsize < BITS_PER_WORD
4790 && bitpos % BITS_PER_WORD == 0
4791 && GET_MODE_CLASS (mode) == MODE_INT
4792 && TREE_CODE (value) == INTEGER_CST
4793 && exp_size >= 0
4794 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4795 {
4796 tree type = TREE_TYPE (value);
4797
4798 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4799 {
4800 type = lang_hooks.types.type_for_size
4801 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4802 value = convert (type, value);
4803 }
4804
4805 if (BYTES_BIG_ENDIAN)
4806 value
4807 = fold (build (LSHIFT_EXPR, type, value,
4808 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4809 bitsize = BITS_PER_WORD;
4810 mode = word_mode;
4811 }
4812 #endif
4813
4814 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4815 && DECL_NONADDRESSABLE_P (field))
4816 {
4817 to_rtx = copy_rtx (to_rtx);
4818 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4819 }
4820
4821 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4822 value, type, cleared,
4823 get_alias_set (TREE_TYPE (field)));
4824 }
4825 }
4826 else if (TREE_CODE (type) == ARRAY_TYPE
4827 || TREE_CODE (type) == VECTOR_TYPE)
4828 {
4829 tree elt;
4830 int i;
4831 int need_to_clear;
4832 tree domain;
4833 tree elttype = TREE_TYPE (type);
4834 int const_bounds_p;
4835 HOST_WIDE_INT minelt = 0;
4836 HOST_WIDE_INT maxelt = 0;
4837 int icode = 0;
4838 rtx *vector = NULL;
4839 int elt_size = 0;
4840 unsigned n_elts = 0;
4841
4842 if (TREE_CODE (type) == ARRAY_TYPE)
4843 domain = TYPE_DOMAIN (type);
4844 else
4845 /* Vectors do not have domains; look up the domain of
4846 the array embedded in the debug representation type.
4847 FIXME Would probably be more efficient to treat vectors
4848 separately from arrays. */
4849 {
4850 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4851 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4852 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4853 {
4854 enum machine_mode mode = GET_MODE (target);
4855
4856 icode = (int) vec_init_optab->handlers[mode].insn_code;
4857 if (icode != CODE_FOR_nothing)
4858 {
4859 unsigned int i;
4860
4861 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4862 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4863 vector = alloca (n_elts);
4864 for (i = 0; i < n_elts; i++)
4865 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4866 }
4867 }
4868 }
4869
4870 const_bounds_p = (TYPE_MIN_VALUE (domain)
4871 && TYPE_MAX_VALUE (domain)
4872 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4873 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4874
4875 /* If we have constant bounds for the range of the type, get them. */
4876 if (const_bounds_p)
4877 {
4878 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4879 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4880 }
4881
4882 /* If the constructor has fewer elements than the array,
4883 clear the whole array first. Similarly if this is
4884 static constructor of a non-BLKmode object. */
4885 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4886 need_to_clear = 1;
4887 else
4888 {
4889 HOST_WIDE_INT count = 0, zero_count = 0;
4890 need_to_clear = ! const_bounds_p;
4891
4892 /* This loop is a more accurate version of the loop in
4893 mostly_zeros_p (it handles RANGE_EXPR in an index).
4894 It is also needed to check for missing elements. */
4895 for (elt = CONSTRUCTOR_ELTS (exp);
4896 elt != NULL_TREE && ! need_to_clear;
4897 elt = TREE_CHAIN (elt))
4898 {
4899 tree index = TREE_PURPOSE (elt);
4900 HOST_WIDE_INT this_node_count;
4901
4902 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4903 {
4904 tree lo_index = TREE_OPERAND (index, 0);
4905 tree hi_index = TREE_OPERAND (index, 1);
4906
4907 if (! host_integerp (lo_index, 1)
4908 || ! host_integerp (hi_index, 1))
4909 {
4910 need_to_clear = 1;
4911 break;
4912 }
4913
4914 this_node_count = (tree_low_cst (hi_index, 1)
4915 - tree_low_cst (lo_index, 1) + 1);
4916 }
4917 else
4918 this_node_count = 1;
4919
4920 count += this_node_count;
4921 if (mostly_zeros_p (TREE_VALUE (elt)))
4922 zero_count += this_node_count;
4923 }
4924
4925 /* Clear the entire array first if there are any missing elements,
4926 or if the incidence of zero elements is >= 75%. */
4927 if (! need_to_clear
4928 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4929 need_to_clear = 1;
4930 }
4931
4932 if (need_to_clear && size > 0 && !vector)
4933 {
4934 if (! cleared)
4935 {
4936 if (REG_P (target))
4937 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4938 else
4939 clear_storage (target, GEN_INT (size));
4940 }
4941 cleared = 1;
4942 }
4943 else if (REG_P (target))
4944 /* Inform later passes that the old value is dead. */
4945 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4946
4947 /* Store each element of the constructor into
4948 the corresponding element of TARGET, determined
4949 by counting the elements. */
4950 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4951 elt;
4952 elt = TREE_CHAIN (elt), i++)
4953 {
4954 enum machine_mode mode;
4955 HOST_WIDE_INT bitsize;
4956 HOST_WIDE_INT bitpos;
4957 int unsignedp;
4958 tree value = TREE_VALUE (elt);
4959 tree index = TREE_PURPOSE (elt);
4960 rtx xtarget = target;
4961
4962 if (cleared && initializer_zerop (value))
4963 continue;
4964
4965 unsignedp = TYPE_UNSIGNED (elttype);
4966 mode = TYPE_MODE (elttype);
4967 if (mode == BLKmode)
4968 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4969 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4970 : -1);
4971 else
4972 bitsize = GET_MODE_BITSIZE (mode);
4973
4974 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4975 {
4976 tree lo_index = TREE_OPERAND (index, 0);
4977 tree hi_index = TREE_OPERAND (index, 1);
4978 rtx index_r, pos_rtx;
4979 HOST_WIDE_INT lo, hi, count;
4980 tree position;
4981
4982 if (vector)
4983 abort ();
4984
4985 /* If the range is constant and "small", unroll the loop. */
4986 if (const_bounds_p
4987 && host_integerp (lo_index, 0)
4988 && host_integerp (hi_index, 0)
4989 && (lo = tree_low_cst (lo_index, 0),
4990 hi = tree_low_cst (hi_index, 0),
4991 count = hi - lo + 1,
4992 (!MEM_P (target)
4993 || count <= 2
4994 || (host_integerp (TYPE_SIZE (elttype), 1)
4995 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4996 <= 40 * 8)))))
4997 {
4998 lo -= minelt; hi -= minelt;
4999 for (; lo <= hi; lo++)
5000 {
5001 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5002
5003 if (MEM_P (target)
5004 && !MEM_KEEP_ALIAS_SET_P (target)
5005 && TREE_CODE (type) == ARRAY_TYPE
5006 && TYPE_NONALIASED_COMPONENT (type))
5007 {
5008 target = copy_rtx (target);
5009 MEM_KEEP_ALIAS_SET_P (target) = 1;
5010 }
5011
5012 store_constructor_field
5013 (target, bitsize, bitpos, mode, value, type, cleared,
5014 get_alias_set (elttype));
5015 }
5016 }
5017 else
5018 {
5019 rtx loop_start = gen_label_rtx ();
5020 rtx loop_end = gen_label_rtx ();
5021 tree exit_cond;
5022
5023 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5024 unsignedp = TYPE_UNSIGNED (domain);
5025
5026 index = build_decl (VAR_DECL, NULL_TREE, domain);
5027
5028 index_r
5029 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5030 &unsignedp, 0));
5031 SET_DECL_RTL (index, index_r);
5032 store_expr (lo_index, index_r, 0);
5033
5034 /* Build the head of the loop. */
5035 do_pending_stack_adjust ();
5036 emit_queue ();
5037 emit_label (loop_start);
5038
5039 /* Assign value to element index. */
5040 position
5041 = convert (ssizetype,
5042 fold (build (MINUS_EXPR, TREE_TYPE (index),
5043 index, TYPE_MIN_VALUE (domain))));
5044 position = size_binop (MULT_EXPR, position,
5045 convert (ssizetype,
5046 TYPE_SIZE_UNIT (elttype)));
5047
5048 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5049 xtarget = offset_address (target, pos_rtx,
5050 highest_pow2_factor (position));
5051 xtarget = adjust_address (xtarget, mode, 0);
5052 if (TREE_CODE (value) == CONSTRUCTOR)
5053 store_constructor (value, xtarget, cleared,
5054 bitsize / BITS_PER_UNIT);
5055 else
5056 store_expr (value, xtarget, 0);
5057
5058 /* Generate a conditional jump to exit the loop. */
5059 exit_cond = build (LT_EXPR, integer_type_node,
5060 index, hi_index);
5061 jumpif (exit_cond, loop_end);
5062
5063 /* Update the loop counter, and jump to the head of
5064 the loop. */
5065 expand_increment (build (PREINCREMENT_EXPR,
5066 TREE_TYPE (index),
5067 index, integer_one_node), 0, 0);
5068 emit_jump (loop_start);
5069
5070 /* Build the end of the loop. */
5071 emit_label (loop_end);
5072 }
5073 }
5074 else if ((index != 0 && ! host_integerp (index, 0))
5075 || ! host_integerp (TYPE_SIZE (elttype), 1))
5076 {
5077 tree position;
5078
5079 if (vector)
5080 abort ();
5081
5082 if (index == 0)
5083 index = ssize_int (1);
5084
5085 if (minelt)
5086 index = convert (ssizetype,
5087 fold (build (MINUS_EXPR, index,
5088 TYPE_MIN_VALUE (domain))));
5089
5090 position = size_binop (MULT_EXPR, index,
5091 convert (ssizetype,
5092 TYPE_SIZE_UNIT (elttype)));
5093 xtarget = offset_address (target,
5094 expand_expr (position, 0, VOIDmode, 0),
5095 highest_pow2_factor (position));
5096 xtarget = adjust_address (xtarget, mode, 0);
5097 store_expr (value, xtarget, 0);
5098 }
5099 else if (vector)
5100 {
5101 int pos;
5102
5103 if (index != 0)
5104 pos = tree_low_cst (index, 0) - minelt;
5105 else
5106 pos = i;
5107 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5108 }
5109 else
5110 {
5111 if (index != 0)
5112 bitpos = ((tree_low_cst (index, 0) - minelt)
5113 * tree_low_cst (TYPE_SIZE (elttype), 1));
5114 else
5115 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5116
5117 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5118 && TREE_CODE (type) == ARRAY_TYPE
5119 && TYPE_NONALIASED_COMPONENT (type))
5120 {
5121 target = copy_rtx (target);
5122 MEM_KEEP_ALIAS_SET_P (target) = 1;
5123 }
5124 store_constructor_field (target, bitsize, bitpos, mode, value,
5125 type, cleared, get_alias_set (elttype));
5126 }
5127 }
5128 if (vector)
5129 {
5130 emit_insn (GEN_FCN (icode) (target,
5131 gen_rtx_PARALLEL (GET_MODE (target),
5132 gen_rtvec_v (n_elts, vector))));
5133 }
5134 }
5135
5136 /* Set constructor assignments. */
5137 else if (TREE_CODE (type) == SET_TYPE)
5138 {
5139 tree elt = CONSTRUCTOR_ELTS (exp);
5140 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5141 tree domain = TYPE_DOMAIN (type);
5142 tree domain_min, domain_max, bitlength;
5143
5144 /* The default implementation strategy is to extract the constant
5145 parts of the constructor, use that to initialize the target,
5146 and then "or" in whatever non-constant ranges we need in addition.
5147
5148 If a large set is all zero or all ones, it is
5149 probably better to set it using memset.
5150 Also, if a large set has just a single range, it may also be
5151 better to first clear all the first clear the set (using
5152 memset), and set the bits we want. */
5153
5154 /* Check for all zeros. */
5155 if (elt == NULL_TREE && size > 0)
5156 {
5157 if (!cleared)
5158 clear_storage (target, GEN_INT (size));
5159 return;
5160 }
5161
5162 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5163 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5164 bitlength = size_binop (PLUS_EXPR,
5165 size_diffop (domain_max, domain_min),
5166 ssize_int (1));
5167
5168 nbits = tree_low_cst (bitlength, 1);
5169
5170 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5171 are "complicated" (more than one range), initialize (the
5172 constant parts) by copying from a constant. */
5173 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5174 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5175 {
5176 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5177 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5178 char *bit_buffer = alloca (nbits);
5179 HOST_WIDE_INT word = 0;
5180 unsigned int bit_pos = 0;
5181 unsigned int ibit = 0;
5182 unsigned int offset = 0; /* In bytes from beginning of set. */
5183
5184 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5185 for (;;)
5186 {
5187 if (bit_buffer[ibit])
5188 {
5189 if (BYTES_BIG_ENDIAN)
5190 word |= (1 << (set_word_size - 1 - bit_pos));
5191 else
5192 word |= 1 << bit_pos;
5193 }
5194
5195 bit_pos++; ibit++;
5196 if (bit_pos >= set_word_size || ibit == nbits)
5197 {
5198 if (word != 0 || ! cleared)
5199 {
5200 rtx datum = gen_int_mode (word, mode);
5201 rtx to_rtx;
5202
5203 /* The assumption here is that it is safe to use
5204 XEXP if the set is multi-word, but not if
5205 it's single-word. */
5206 if (MEM_P (target))
5207 to_rtx = adjust_address (target, mode, offset);
5208 else if (offset == 0)
5209 to_rtx = target;
5210 else
5211 abort ();
5212 emit_move_insn (to_rtx, datum);
5213 }
5214
5215 if (ibit == nbits)
5216 break;
5217 word = 0;
5218 bit_pos = 0;
5219 offset += set_word_size / BITS_PER_UNIT;
5220 }
5221 }
5222 }
5223 else if (!cleared)
5224 /* Don't bother clearing storage if the set is all ones. */
5225 if (TREE_CHAIN (elt) != NULL_TREE
5226 || (TREE_PURPOSE (elt) == NULL_TREE
5227 ? nbits != 1
5228 : ( ! host_integerp (TREE_VALUE (elt), 0)
5229 || ! host_integerp (TREE_PURPOSE (elt), 0)
5230 || (tree_low_cst (TREE_VALUE (elt), 0)
5231 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5232 != (HOST_WIDE_INT) nbits))))
5233 clear_storage (target, expr_size (exp));
5234
5235 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5236 {
5237 /* Start of range of element or NULL. */
5238 tree startbit = TREE_PURPOSE (elt);
5239 /* End of range of element, or element value. */
5240 tree endbit = TREE_VALUE (elt);
5241 HOST_WIDE_INT startb, endb;
5242 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5243
5244 bitlength_rtx = expand_expr (bitlength,
5245 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5246
5247 /* Handle non-range tuple element like [ expr ]. */
5248 if (startbit == NULL_TREE)
5249 {
5250 startbit = save_expr (endbit);
5251 endbit = startbit;
5252 }
5253
5254 startbit = convert (sizetype, startbit);
5255 endbit = convert (sizetype, endbit);
5256 if (! integer_zerop (domain_min))
5257 {
5258 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5259 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5260 }
5261 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5262 EXPAND_CONST_ADDRESS);
5263 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5264 EXPAND_CONST_ADDRESS);
5265
5266 if (REG_P (target))
5267 {
5268 targetx
5269 = assign_temp
5270 ((build_qualified_type (lang_hooks.types.type_for_mode
5271 (GET_MODE (target), 0),
5272 TYPE_QUAL_CONST)),
5273 0, 1, 1);
5274 emit_move_insn (targetx, target);
5275 }
5276
5277 else if (MEM_P (target))
5278 targetx = target;
5279 else
5280 abort ();
5281
5282 /* Optimization: If startbit and endbit are constants divisible
5283 by BITS_PER_UNIT, call memset instead. */
5284 if (TREE_CODE (startbit) == INTEGER_CST
5285 && TREE_CODE (endbit) == INTEGER_CST
5286 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5287 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5288 {
5289 emit_library_call (memset_libfunc, LCT_NORMAL,
5290 VOIDmode, 3,
5291 plus_constant (XEXP (targetx, 0),
5292 startb / BITS_PER_UNIT),
5293 Pmode,
5294 constm1_rtx, TYPE_MODE (integer_type_node),
5295 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5296 TYPE_MODE (sizetype));
5297 }
5298 else
5299 emit_library_call (setbits_libfunc, LCT_NORMAL,
5300 VOIDmode, 4, XEXP (targetx, 0),
5301 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5302 startbit_rtx, TYPE_MODE (sizetype),
5303 endbit_rtx, TYPE_MODE (sizetype));
5304
5305 if (REG_P (target))
5306 emit_move_insn (target, targetx);
5307 }
5308 }
5309
5310 else
5311 abort ();
5312 }
5313
5314 /* Store the value of EXP (an expression tree)
5315 into a subfield of TARGET which has mode MODE and occupies
5316 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5317 If MODE is VOIDmode, it means that we are storing into a bit-field.
5318
5319 If VALUE_MODE is VOIDmode, return nothing in particular.
5320 UNSIGNEDP is not used in this case.
5321
5322 Otherwise, return an rtx for the value stored. This rtx
5323 has mode VALUE_MODE if that is convenient to do.
5324 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5325
5326 TYPE is the type of the underlying object,
5327
5328 ALIAS_SET is the alias set for the destination. This value will
5329 (in general) be different from that for TARGET, since TARGET is a
5330 reference to the containing structure. */
5331
5332 static rtx
5333 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5334 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5335 int unsignedp, tree type, int alias_set)
5336 {
5337 HOST_WIDE_INT width_mask = 0;
5338
5339 if (TREE_CODE (exp) == ERROR_MARK)
5340 return const0_rtx;
5341
5342 /* If we have nothing to store, do nothing unless the expression has
5343 side-effects. */
5344 if (bitsize == 0)
5345 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5346 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5347 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5348
5349 /* If we are storing into an unaligned field of an aligned union that is
5350 in a register, we may have the mode of TARGET being an integer mode but
5351 MODE == BLKmode. In that case, get an aligned object whose size and
5352 alignment are the same as TARGET and store TARGET into it (we can avoid
5353 the store if the field being stored is the entire width of TARGET). Then
5354 call ourselves recursively to store the field into a BLKmode version of
5355 that object. Finally, load from the object into TARGET. This is not
5356 very efficient in general, but should only be slightly more expensive
5357 than the otherwise-required unaligned accesses. Perhaps this can be
5358 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5359 twice, once with emit_move_insn and once via store_field. */
5360
5361 if (mode == BLKmode
5362 && (REG_P (target) || GET_CODE (target) == SUBREG))
5363 {
5364 rtx object = assign_temp (type, 0, 1, 1);
5365 rtx blk_object = adjust_address (object, BLKmode, 0);
5366
5367 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5368 emit_move_insn (object, target);
5369
5370 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5371 alias_set);
5372
5373 emit_move_insn (target, object);
5374
5375 /* We want to return the BLKmode version of the data. */
5376 return blk_object;
5377 }
5378
5379 if (GET_CODE (target) == CONCAT)
5380 {
5381 /* We're storing into a struct containing a single __complex. */
5382
5383 if (bitpos != 0)
5384 abort ();
5385 return store_expr (exp, target, value_mode != VOIDmode);
5386 }
5387
5388 /* If the structure is in a register or if the component
5389 is a bit field, we cannot use addressing to access it.
5390 Use bit-field techniques or SUBREG to store in it. */
5391
5392 if (mode == VOIDmode
5393 || (mode != BLKmode && ! direct_store[(int) mode]
5394 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5395 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5396 || REG_P (target)
5397 || GET_CODE (target) == SUBREG
5398 /* If the field isn't aligned enough to store as an ordinary memref,
5399 store it as a bit field. */
5400 || (mode != BLKmode
5401 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5402 || bitpos % GET_MODE_ALIGNMENT (mode))
5403 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5404 || (bitpos % BITS_PER_UNIT != 0)))
5405 /* If the RHS and field are a constant size and the size of the
5406 RHS isn't the same size as the bitfield, we must use bitfield
5407 operations. */
5408 || (bitsize >= 0
5409 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5410 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5411 {
5412 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5413
5414 /* If BITSIZE is narrower than the size of the type of EXP
5415 we will be narrowing TEMP. Normally, what's wanted are the
5416 low-order bits. However, if EXP's type is a record and this is
5417 big-endian machine, we want the upper BITSIZE bits. */
5418 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5419 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5420 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5421 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5422 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5423 - bitsize),
5424 NULL_RTX, 1);
5425
5426 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5427 MODE. */
5428 if (mode != VOIDmode && mode != BLKmode
5429 && mode != TYPE_MODE (TREE_TYPE (exp)))
5430 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5431
5432 /* If the modes of TARGET and TEMP are both BLKmode, both
5433 must be in memory and BITPOS must be aligned on a byte
5434 boundary. If so, we simply do a block copy. */
5435 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5436 {
5437 if (!MEM_P (target) || !MEM_P (temp)
5438 || bitpos % BITS_PER_UNIT != 0)
5439 abort ();
5440
5441 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5442 emit_block_move (target, temp,
5443 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5444 / BITS_PER_UNIT),
5445 BLOCK_OP_NORMAL);
5446
5447 return value_mode == VOIDmode ? const0_rtx : target;
5448 }
5449
5450 /* Store the value in the bitfield. */
5451 store_bit_field (target, bitsize, bitpos, mode, temp,
5452 int_size_in_bytes (type));
5453
5454 if (value_mode != VOIDmode)
5455 {
5456 /* The caller wants an rtx for the value.
5457 If possible, avoid refetching from the bitfield itself. */
5458 if (width_mask != 0
5459 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5460 {
5461 tree count;
5462 enum machine_mode tmode;
5463
5464 tmode = GET_MODE (temp);
5465 if (tmode == VOIDmode)
5466 tmode = value_mode;
5467
5468 if (unsignedp)
5469 return expand_and (tmode, temp,
5470 gen_int_mode (width_mask, tmode),
5471 NULL_RTX);
5472
5473 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5474 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5475 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5476 }
5477
5478 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5479 NULL_RTX, value_mode, VOIDmode,
5480 int_size_in_bytes (type));
5481 }
5482 return const0_rtx;
5483 }
5484 else
5485 {
5486 rtx addr = XEXP (target, 0);
5487 rtx to_rtx = target;
5488
5489 /* If a value is wanted, it must be the lhs;
5490 so make the address stable for multiple use. */
5491
5492 if (value_mode != VOIDmode && !REG_P (addr)
5493 && ! CONSTANT_ADDRESS_P (addr)
5494 /* A frame-pointer reference is already stable. */
5495 && ! (GET_CODE (addr) == PLUS
5496 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5497 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5498 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5499 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5500
5501 /* Now build a reference to just the desired component. */
5502
5503 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5504
5505 if (to_rtx == target)
5506 to_rtx = copy_rtx (to_rtx);
5507
5508 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5509 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5510 set_mem_alias_set (to_rtx, alias_set);
5511
5512 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5513 }
5514 }
5515 \f
5516 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5517 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5518 codes and find the ultimate containing object, which we return.
5519
5520 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5521 bit position, and *PUNSIGNEDP to the signedness of the field.
5522 If the position of the field is variable, we store a tree
5523 giving the variable offset (in units) in *POFFSET.
5524 This offset is in addition to the bit position.
5525 If the position is not variable, we store 0 in *POFFSET.
5526
5527 If any of the extraction expressions is volatile,
5528 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5529
5530 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5531 is a mode that can be used to access the field. In that case, *PBITSIZE
5532 is redundant.
5533
5534 If the field describes a variable-sized object, *PMODE is set to
5535 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5536 this case, but the address of the object can be found. */
5537
5538 tree
5539 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5540 HOST_WIDE_INT *pbitpos, tree *poffset,
5541 enum machine_mode *pmode, int *punsignedp,
5542 int *pvolatilep)
5543 {
5544 tree size_tree = 0;
5545 enum machine_mode mode = VOIDmode;
5546 tree offset = size_zero_node;
5547 tree bit_offset = bitsize_zero_node;
5548 tree tem;
5549
5550 /* First get the mode, signedness, and size. We do this from just the
5551 outermost expression. */
5552 if (TREE_CODE (exp) == COMPONENT_REF)
5553 {
5554 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5555 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5556 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5557
5558 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5559 }
5560 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5561 {
5562 size_tree = TREE_OPERAND (exp, 1);
5563 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5564 }
5565 else
5566 {
5567 mode = TYPE_MODE (TREE_TYPE (exp));
5568 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5569
5570 if (mode == BLKmode)
5571 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5572 else
5573 *pbitsize = GET_MODE_BITSIZE (mode);
5574 }
5575
5576 if (size_tree != 0)
5577 {
5578 if (! host_integerp (size_tree, 1))
5579 mode = BLKmode, *pbitsize = -1;
5580 else
5581 *pbitsize = tree_low_cst (size_tree, 1);
5582 }
5583
5584 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5585 and find the ultimate containing object. */
5586 while (1)
5587 {
5588 if (TREE_CODE (exp) == BIT_FIELD_REF)
5589 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5590 else if (TREE_CODE (exp) == COMPONENT_REF)
5591 {
5592 tree field = TREE_OPERAND (exp, 1);
5593 tree this_offset = component_ref_field_offset (exp);
5594
5595 /* If this field hasn't been filled in yet, don't go
5596 past it. This should only happen when folding expressions
5597 made during type construction. */
5598 if (this_offset == 0)
5599 break;
5600
5601 offset = size_binop (PLUS_EXPR, offset, this_offset);
5602 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5603 DECL_FIELD_BIT_OFFSET (field));
5604
5605 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5606 }
5607
5608 else if (TREE_CODE (exp) == ARRAY_REF
5609 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5610 {
5611 tree index = TREE_OPERAND (exp, 1);
5612 tree low_bound = array_ref_low_bound (exp);
5613 tree unit_size = array_ref_element_size (exp);
5614
5615 /* We assume all arrays have sizes that are a multiple of a byte.
5616 First subtract the lower bound, if any, in the type of the
5617 index, then convert to sizetype and multiply by the size of the
5618 array element. */
5619 if (! integer_zerop (low_bound))
5620 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5621 index, low_bound));
5622
5623 offset = size_binop (PLUS_EXPR, offset,
5624 size_binop (MULT_EXPR,
5625 convert (sizetype, index),
5626 unit_size));
5627 }
5628
5629 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5630 conversions that don't change the mode, and all view conversions
5631 except those that need to "step up" the alignment. */
5632 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5633 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5634 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5635 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5636 && STRICT_ALIGNMENT
5637 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5638 < BIGGEST_ALIGNMENT)
5639 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5640 || TYPE_ALIGN_OK (TREE_TYPE
5641 (TREE_OPERAND (exp, 0))))))
5642 && ! ((TREE_CODE (exp) == NOP_EXPR
5643 || TREE_CODE (exp) == CONVERT_EXPR)
5644 && (TYPE_MODE (TREE_TYPE (exp))
5645 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5646 break;
5647
5648 /* If any reference in the chain is volatile, the effect is volatile. */
5649 if (TREE_THIS_VOLATILE (exp))
5650 *pvolatilep = 1;
5651
5652 exp = TREE_OPERAND (exp, 0);
5653 }
5654
5655 /* If OFFSET is constant, see if we can return the whole thing as a
5656 constant bit position. Otherwise, split it up. */
5657 if (host_integerp (offset, 0)
5658 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5659 bitsize_unit_node))
5660 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5661 && host_integerp (tem, 0))
5662 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5663 else
5664 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5665
5666 *pmode = mode;
5667 return exp;
5668 }
5669
5670 /* Return a tree of sizetype representing the size, in bytes, of the element
5671 of EXP, an ARRAY_REF. */
5672
5673 tree
5674 array_ref_element_size (tree exp)
5675 {
5676 tree aligned_size = TREE_OPERAND (exp, 3);
5677 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5678
5679 /* If a size was specified in the ARRAY_REF, it's the size measured
5680 in alignment units of the element type. So multiply by that value. */
5681 if (aligned_size)
5682 return size_binop (MULT_EXPR, aligned_size,
5683 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5684
5685 /* Otherwise, take the size from that of the element type. Substitute
5686 any PLACEHOLDER_EXPR that we have. */
5687 else
5688 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5689 }
5690
5691 /* Return a tree representing the lower bound of the array mentioned in
5692 EXP, an ARRAY_REF. */
5693
5694 tree
5695 array_ref_low_bound (tree exp)
5696 {
5697 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5698
5699 /* If a lower bound is specified in EXP, use it. */
5700 if (TREE_OPERAND (exp, 2))
5701 return TREE_OPERAND (exp, 2);
5702
5703 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5704 substituting for a PLACEHOLDER_EXPR as needed. */
5705 if (domain_type && TYPE_MIN_VALUE (domain_type))
5706 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5707
5708 /* Otherwise, return a zero of the appropriate type. */
5709 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5710 }
5711
5712 /* Return a tree representing the offset, in bytes, of the field referenced
5713 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5714
5715 tree
5716 component_ref_field_offset (tree exp)
5717 {
5718 tree aligned_offset = TREE_OPERAND (exp, 2);
5719 tree field = TREE_OPERAND (exp, 1);
5720
5721 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5722 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5723 value. */
5724 if (aligned_offset)
5725 return size_binop (MULT_EXPR, aligned_offset,
5726 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5727
5728 /* Otherwise, take the offset from that of the field. Substitute
5729 any PLACEHOLDER_EXPR that we have. */
5730 else
5731 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5732 }
5733
5734 /* Return 1 if T is an expression that get_inner_reference handles. */
5735
5736 int
5737 handled_component_p (tree t)
5738 {
5739 switch (TREE_CODE (t))
5740 {
5741 case BIT_FIELD_REF:
5742 case COMPONENT_REF:
5743 case ARRAY_REF:
5744 case ARRAY_RANGE_REF:
5745 case NON_LVALUE_EXPR:
5746 case VIEW_CONVERT_EXPR:
5747 return 1;
5748
5749 /* ??? Sure they are handled, but get_inner_reference may return
5750 a different PBITSIZE, depending upon whether the expression is
5751 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5752 case NOP_EXPR:
5753 case CONVERT_EXPR:
5754 return (TYPE_MODE (TREE_TYPE (t))
5755 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5756
5757 default:
5758 return 0;
5759 }
5760 }
5761 \f
5762 /* Given an rtx VALUE that may contain additions and multiplications, return
5763 an equivalent value that just refers to a register, memory, or constant.
5764 This is done by generating instructions to perform the arithmetic and
5765 returning a pseudo-register containing the value.
5766
5767 The returned value may be a REG, SUBREG, MEM or constant. */
5768
5769 rtx
5770 force_operand (rtx value, rtx target)
5771 {
5772 rtx op1, op2;
5773 /* Use subtarget as the target for operand 0 of a binary operation. */
5774 rtx subtarget = get_subtarget (target);
5775 enum rtx_code code = GET_CODE (value);
5776
5777 /* Check for subreg applied to an expression produced by loop optimizer. */
5778 if (code == SUBREG
5779 && !REG_P (SUBREG_REG (value))
5780 && !MEM_P (SUBREG_REG (value)))
5781 {
5782 value = simplify_gen_subreg (GET_MODE (value),
5783 force_reg (GET_MODE (SUBREG_REG (value)),
5784 force_operand (SUBREG_REG (value),
5785 NULL_RTX)),
5786 GET_MODE (SUBREG_REG (value)),
5787 SUBREG_BYTE (value));
5788 code = GET_CODE (value);
5789 }
5790
5791 /* Check for a PIC address load. */
5792 if ((code == PLUS || code == MINUS)
5793 && XEXP (value, 0) == pic_offset_table_rtx
5794 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5795 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5796 || GET_CODE (XEXP (value, 1)) == CONST))
5797 {
5798 if (!subtarget)
5799 subtarget = gen_reg_rtx (GET_MODE (value));
5800 emit_move_insn (subtarget, value);
5801 return subtarget;
5802 }
5803
5804 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5805 {
5806 if (!target)
5807 target = gen_reg_rtx (GET_MODE (value));
5808 convert_move (target, force_operand (XEXP (value, 0), NULL),
5809 code == ZERO_EXTEND);
5810 return target;
5811 }
5812
5813 if (ARITHMETIC_P (value))
5814 {
5815 op2 = XEXP (value, 1);
5816 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5817 subtarget = 0;
5818 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5819 {
5820 code = PLUS;
5821 op2 = negate_rtx (GET_MODE (value), op2);
5822 }
5823
5824 /* Check for an addition with OP2 a constant integer and our first
5825 operand a PLUS of a virtual register and something else. In that
5826 case, we want to emit the sum of the virtual register and the
5827 constant first and then add the other value. This allows virtual
5828 register instantiation to simply modify the constant rather than
5829 creating another one around this addition. */
5830 if (code == PLUS && GET_CODE (op2) == CONST_INT
5831 && GET_CODE (XEXP (value, 0)) == PLUS
5832 && REG_P (XEXP (XEXP (value, 0), 0))
5833 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5834 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5835 {
5836 rtx temp = expand_simple_binop (GET_MODE (value), code,
5837 XEXP (XEXP (value, 0), 0), op2,
5838 subtarget, 0, OPTAB_LIB_WIDEN);
5839 return expand_simple_binop (GET_MODE (value), code, temp,
5840 force_operand (XEXP (XEXP (value,
5841 0), 1), 0),
5842 target, 0, OPTAB_LIB_WIDEN);
5843 }
5844
5845 op1 = force_operand (XEXP (value, 0), subtarget);
5846 op2 = force_operand (op2, NULL_RTX);
5847 switch (code)
5848 {
5849 case MULT:
5850 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5851 case DIV:
5852 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5853 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5854 target, 1, OPTAB_LIB_WIDEN);
5855 else
5856 return expand_divmod (0,
5857 FLOAT_MODE_P (GET_MODE (value))
5858 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5859 GET_MODE (value), op1, op2, target, 0);
5860 break;
5861 case MOD:
5862 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5863 target, 0);
5864 break;
5865 case UDIV:
5866 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5867 target, 1);
5868 break;
5869 case UMOD:
5870 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5871 target, 1);
5872 break;
5873 case ASHIFTRT:
5874 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5875 target, 0, OPTAB_LIB_WIDEN);
5876 break;
5877 default:
5878 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5879 target, 1, OPTAB_LIB_WIDEN);
5880 }
5881 }
5882 if (UNARY_P (value))
5883 {
5884 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5885 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5886 }
5887
5888 #ifdef INSN_SCHEDULING
5889 /* On machines that have insn scheduling, we want all memory reference to be
5890 explicit, so we need to deal with such paradoxical SUBREGs. */
5891 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5892 && (GET_MODE_SIZE (GET_MODE (value))
5893 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5894 value
5895 = simplify_gen_subreg (GET_MODE (value),
5896 force_reg (GET_MODE (SUBREG_REG (value)),
5897 force_operand (SUBREG_REG (value),
5898 NULL_RTX)),
5899 GET_MODE (SUBREG_REG (value)),
5900 SUBREG_BYTE (value));
5901 #endif
5902
5903 return value;
5904 }
5905 \f
5906 /* Subroutine of expand_expr: return nonzero iff there is no way that
5907 EXP can reference X, which is being modified. TOP_P is nonzero if this
5908 call is going to be used to determine whether we need a temporary
5909 for EXP, as opposed to a recursive call to this function.
5910
5911 It is always safe for this routine to return zero since it merely
5912 searches for optimization opportunities. */
5913
5914 int
5915 safe_from_p (rtx x, tree exp, int top_p)
5916 {
5917 rtx exp_rtl = 0;
5918 int i, nops;
5919
5920 if (x == 0
5921 /* If EXP has varying size, we MUST use a target since we currently
5922 have no way of allocating temporaries of variable size
5923 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5924 So we assume here that something at a higher level has prevented a
5925 clash. This is somewhat bogus, but the best we can do. Only
5926 do this when X is BLKmode and when we are at the top level. */
5927 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5928 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5929 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5930 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5931 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5932 != INTEGER_CST)
5933 && GET_MODE (x) == BLKmode)
5934 /* If X is in the outgoing argument area, it is always safe. */
5935 || (MEM_P (x)
5936 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5937 || (GET_CODE (XEXP (x, 0)) == PLUS
5938 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5939 return 1;
5940
5941 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5942 find the underlying pseudo. */
5943 if (GET_CODE (x) == SUBREG)
5944 {
5945 x = SUBREG_REG (x);
5946 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5947 return 0;
5948 }
5949
5950 /* Now look at our tree code and possibly recurse. */
5951 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5952 {
5953 case 'd':
5954 exp_rtl = DECL_RTL_IF_SET (exp);
5955 break;
5956
5957 case 'c':
5958 return 1;
5959
5960 case 'x':
5961 if (TREE_CODE (exp) == TREE_LIST)
5962 {
5963 while (1)
5964 {
5965 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5966 return 0;
5967 exp = TREE_CHAIN (exp);
5968 if (!exp)
5969 return 1;
5970 if (TREE_CODE (exp) != TREE_LIST)
5971 return safe_from_p (x, exp, 0);
5972 }
5973 }
5974 else if (TREE_CODE (exp) == ERROR_MARK)
5975 return 1; /* An already-visited SAVE_EXPR? */
5976 else
5977 return 0;
5978
5979 case 's':
5980 /* The only case we look at here is the DECL_INITIAL inside a
5981 DECL_EXPR. */
5982 return (TREE_CODE (exp) != DECL_EXPR
5983 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5984 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5985 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5986
5987 case '2':
5988 case '<':
5989 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5990 return 0;
5991 /* Fall through. */
5992
5993 case '1':
5994 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5995
5996 case 'e':
5997 case 'r':
5998 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5999 the expression. If it is set, we conflict iff we are that rtx or
6000 both are in memory. Otherwise, we check all operands of the
6001 expression recursively. */
6002
6003 switch (TREE_CODE (exp))
6004 {
6005 case ADDR_EXPR:
6006 /* If the operand is static or we are static, we can't conflict.
6007 Likewise if we don't conflict with the operand at all. */
6008 if (staticp (TREE_OPERAND (exp, 0))
6009 || TREE_STATIC (exp)
6010 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6011 return 1;
6012
6013 /* Otherwise, the only way this can conflict is if we are taking
6014 the address of a DECL a that address if part of X, which is
6015 very rare. */
6016 exp = TREE_OPERAND (exp, 0);
6017 if (DECL_P (exp))
6018 {
6019 if (!DECL_RTL_SET_P (exp)
6020 || !MEM_P (DECL_RTL (exp)))
6021 return 0;
6022 else
6023 exp_rtl = XEXP (DECL_RTL (exp), 0);
6024 }
6025 break;
6026
6027 case INDIRECT_REF:
6028 if (MEM_P (x)
6029 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6030 get_alias_set (exp)))
6031 return 0;
6032 break;
6033
6034 case CALL_EXPR:
6035 /* Assume that the call will clobber all hard registers and
6036 all of memory. */
6037 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6038 || MEM_P (x))
6039 return 0;
6040 break;
6041
6042 case WITH_CLEANUP_EXPR:
6043 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6044 break;
6045
6046 case CLEANUP_POINT_EXPR:
6047 case SAVE_EXPR:
6048 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6049
6050 case BIND_EXPR:
6051 /* The only operand we look at is operand 1. The rest aren't
6052 part of the expression. */
6053 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6054
6055 default:
6056 break;
6057 }
6058
6059 /* If we have an rtx, we do not need to scan our operands. */
6060 if (exp_rtl)
6061 break;
6062
6063 nops = first_rtl_op (TREE_CODE (exp));
6064 for (i = 0; i < nops; i++)
6065 if (TREE_OPERAND (exp, i) != 0
6066 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6067 return 0;
6068
6069 /* If this is a language-specific tree code, it may require
6070 special handling. */
6071 if ((unsigned int) TREE_CODE (exp)
6072 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6073 && !lang_hooks.safe_from_p (x, exp))
6074 return 0;
6075 }
6076
6077 /* If we have an rtl, find any enclosed object. Then see if we conflict
6078 with it. */
6079 if (exp_rtl)
6080 {
6081 if (GET_CODE (exp_rtl) == SUBREG)
6082 {
6083 exp_rtl = SUBREG_REG (exp_rtl);
6084 if (REG_P (exp_rtl)
6085 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6086 return 0;
6087 }
6088
6089 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6090 are memory and they conflict. */
6091 return ! (rtx_equal_p (x, exp_rtl)
6092 || (MEM_P (x) && MEM_P (exp_rtl)
6093 && true_dependence (exp_rtl, VOIDmode, x,
6094 rtx_addr_varies_p)));
6095 }
6096
6097 /* If we reach here, it is safe. */
6098 return 1;
6099 }
6100
6101 /* Subroutine of expand_expr: return rtx if EXP is a
6102 variable or parameter; else return 0. */
6103
6104 static rtx
6105 var_rtx (tree exp)
6106 {
6107 STRIP_NOPS (exp);
6108 switch (TREE_CODE (exp))
6109 {
6110 case PARM_DECL:
6111 case VAR_DECL:
6112 return DECL_RTL (exp);
6113 default:
6114 return 0;
6115 }
6116 }
6117 \f
6118 /* Return the highest power of two that EXP is known to be a multiple of.
6119 This is used in updating alignment of MEMs in array references. */
6120
6121 static unsigned HOST_WIDE_INT
6122 highest_pow2_factor (tree exp)
6123 {
6124 unsigned HOST_WIDE_INT c0, c1;
6125
6126 switch (TREE_CODE (exp))
6127 {
6128 case INTEGER_CST:
6129 /* We can find the lowest bit that's a one. If the low
6130 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6131 We need to handle this case since we can find it in a COND_EXPR,
6132 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6133 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6134 later ICE. */
6135 if (TREE_CONSTANT_OVERFLOW (exp))
6136 return BIGGEST_ALIGNMENT;
6137 else
6138 {
6139 /* Note: tree_low_cst is intentionally not used here,
6140 we don't care about the upper bits. */
6141 c0 = TREE_INT_CST_LOW (exp);
6142 c0 &= -c0;
6143 return c0 ? c0 : BIGGEST_ALIGNMENT;
6144 }
6145 break;
6146
6147 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6148 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6149 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6150 return MIN (c0, c1);
6151
6152 case MULT_EXPR:
6153 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6154 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6155 return c0 * c1;
6156
6157 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6158 case CEIL_DIV_EXPR:
6159 if (integer_pow2p (TREE_OPERAND (exp, 1))
6160 && host_integerp (TREE_OPERAND (exp, 1), 1))
6161 {
6162 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6163 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6164 return MAX (1, c0 / c1);
6165 }
6166 break;
6167
6168 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6169 case SAVE_EXPR:
6170 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6171
6172 case COMPOUND_EXPR:
6173 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6174
6175 case COND_EXPR:
6176 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6177 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6178 return MIN (c0, c1);
6179
6180 default:
6181 break;
6182 }
6183
6184 return 1;
6185 }
6186
6187 /* Similar, except that the alignment requirements of TARGET are
6188 taken into account. Assume it is at least as aligned as its
6189 type, unless it is a COMPONENT_REF in which case the layout of
6190 the structure gives the alignment. */
6191
6192 static unsigned HOST_WIDE_INT
6193 highest_pow2_factor_for_target (tree target, tree exp)
6194 {
6195 unsigned HOST_WIDE_INT target_align, factor;
6196
6197 factor = highest_pow2_factor (exp);
6198 if (TREE_CODE (target) == COMPONENT_REF)
6199 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6200 else
6201 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6202 return MAX (factor, target_align);
6203 }
6204 \f
6205 /* Expands variable VAR. */
6206
6207 void
6208 expand_var (tree var)
6209 {
6210 if (DECL_EXTERNAL (var))
6211 return;
6212
6213 if (TREE_STATIC (var))
6214 /* If this is an inlined copy of a static local variable,
6215 look up the original decl. */
6216 var = DECL_ORIGIN (var);
6217
6218 if (TREE_STATIC (var)
6219 ? !TREE_ASM_WRITTEN (var)
6220 : !DECL_RTL_SET_P (var))
6221 {
6222 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6223 {
6224 /* Prepare a mem & address for the decl. */
6225 rtx x;
6226
6227 if (TREE_STATIC (var))
6228 abort ();
6229
6230 x = gen_rtx_MEM (DECL_MODE (var),
6231 gen_reg_rtx (Pmode));
6232
6233 set_mem_attributes (x, var, 1);
6234 SET_DECL_RTL (var, x);
6235 }
6236 else if (lang_hooks.expand_decl (var))
6237 /* OK. */;
6238 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6239 expand_decl (var);
6240 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6241 rest_of_decl_compilation (var, NULL, 0, 0);
6242 else if (TREE_CODE (var) == TYPE_DECL
6243 || TREE_CODE (var) == CONST_DECL
6244 || TREE_CODE (var) == FUNCTION_DECL
6245 || TREE_CODE (var) == LABEL_DECL)
6246 /* No expansion needed. */;
6247 else
6248 abort ();
6249 }
6250 }
6251
6252 /* Expands declarations of variables in list VARS. */
6253
6254 static void
6255 expand_vars (tree vars)
6256 {
6257 for (; vars; vars = TREE_CHAIN (vars))
6258 {
6259 tree var = vars;
6260
6261 if (DECL_EXTERNAL (var))
6262 continue;
6263
6264 expand_var (var);
6265 expand_decl_init (var);
6266 }
6267 }
6268
6269 /* Subroutine of expand_expr. Expand the two operands of a binary
6270 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6271 The value may be stored in TARGET if TARGET is nonzero. The
6272 MODIFIER argument is as documented by expand_expr. */
6273
6274 static void
6275 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6276 enum expand_modifier modifier)
6277 {
6278 if (! safe_from_p (target, exp1, 1))
6279 target = 0;
6280 if (operand_equal_p (exp0, exp1, 0))
6281 {
6282 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6283 *op1 = copy_rtx (*op0);
6284 }
6285 else
6286 {
6287 /* If we need to preserve evaluation order, copy exp0 into its own
6288 temporary variable so that it can't be clobbered by exp1. */
6289 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6290 exp0 = save_expr (exp0);
6291 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6292 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6293 }
6294 }
6295
6296 \f
6297 /* expand_expr: generate code for computing expression EXP.
6298 An rtx for the computed value is returned. The value is never null.
6299 In the case of a void EXP, const0_rtx is returned.
6300
6301 The value may be stored in TARGET if TARGET is nonzero.
6302 TARGET is just a suggestion; callers must assume that
6303 the rtx returned may not be the same as TARGET.
6304
6305 If TARGET is CONST0_RTX, it means that the value will be ignored.
6306
6307 If TMODE is not VOIDmode, it suggests generating the
6308 result in mode TMODE. But this is done only when convenient.
6309 Otherwise, TMODE is ignored and the value generated in its natural mode.
6310 TMODE is just a suggestion; callers must assume that
6311 the rtx returned may not have mode TMODE.
6312
6313 Note that TARGET may have neither TMODE nor MODE. In that case, it
6314 probably will not be used.
6315
6316 If MODIFIER is EXPAND_SUM then when EXP is an addition
6317 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6318 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6319 products as above, or REG or MEM, or constant.
6320 Ordinarily in such cases we would output mul or add instructions
6321 and then return a pseudo reg containing the sum.
6322
6323 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6324 it also marks a label as absolutely required (it can't be dead).
6325 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6326 This is used for outputting expressions used in initializers.
6327
6328 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6329 with a constant address even if that address is not normally legitimate.
6330 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6331
6332 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6333 a call parameter. Such targets require special care as we haven't yet
6334 marked TARGET so that it's safe from being trashed by libcalls. We
6335 don't want to use TARGET for anything but the final result;
6336 Intermediate values must go elsewhere. Additionally, calls to
6337 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6338
6339 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6340 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6341 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6342 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6343 recursively. */
6344
6345 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6346 enum expand_modifier, rtx *);
6347
6348 rtx
6349 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6350 enum expand_modifier modifier, rtx *alt_rtl)
6351 {
6352 int rn = -1;
6353 rtx ret, last = NULL;
6354
6355 /* Handle ERROR_MARK before anybody tries to access its type. */
6356 if (TREE_CODE (exp) == ERROR_MARK
6357 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6358 {
6359 ret = CONST0_RTX (tmode);
6360 return ret ? ret : const0_rtx;
6361 }
6362
6363 if (flag_non_call_exceptions)
6364 {
6365 rn = lookup_stmt_eh_region (exp);
6366 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6367 if (rn >= 0)
6368 last = get_last_insn ();
6369 }
6370
6371 /* If this is an expression of some kind and it has an associated line
6372 number, then emit the line number before expanding the expression.
6373
6374 We need to save and restore the file and line information so that
6375 errors discovered during expansion are emitted with the right
6376 information. It would be better of the diagnostic routines
6377 used the file/line information embedded in the tree nodes rather
6378 than globals. */
6379 if (cfun && EXPR_HAS_LOCATION (exp))
6380 {
6381 location_t saved_location = input_location;
6382 input_location = EXPR_LOCATION (exp);
6383 emit_line_note (input_location);
6384
6385 /* Record where the insns produced belong. */
6386 record_block_change (TREE_BLOCK (exp));
6387
6388 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6389
6390 input_location = saved_location;
6391 }
6392 else
6393 {
6394 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6395 }
6396
6397 /* If using non-call exceptions, mark all insns that may trap.
6398 expand_call() will mark CALL_INSNs before we get to this code,
6399 but it doesn't handle libcalls, and these may trap. */
6400 if (rn >= 0)
6401 {
6402 rtx insn;
6403 for (insn = next_real_insn (last); insn;
6404 insn = next_real_insn (insn))
6405 {
6406 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6407 /* If we want exceptions for non-call insns, any
6408 may_trap_p instruction may throw. */
6409 && GET_CODE (PATTERN (insn)) != CLOBBER
6410 && GET_CODE (PATTERN (insn)) != USE
6411 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6412 {
6413 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6414 REG_NOTES (insn));
6415 }
6416 }
6417 }
6418
6419 return ret;
6420 }
6421
6422 static rtx
6423 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6424 enum expand_modifier modifier, rtx *alt_rtl)
6425 {
6426 rtx op0, op1, temp;
6427 tree type = TREE_TYPE (exp);
6428 int unsignedp;
6429 enum machine_mode mode;
6430 enum tree_code code = TREE_CODE (exp);
6431 optab this_optab;
6432 rtx subtarget, original_target;
6433 int ignore;
6434 tree context;
6435
6436 mode = TYPE_MODE (type);
6437 unsignedp = TYPE_UNSIGNED (type);
6438
6439 /* Use subtarget as the target for operand 0 of a binary operation. */
6440 subtarget = get_subtarget (target);
6441 original_target = target;
6442 ignore = (target == const0_rtx
6443 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6444 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6445 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6446 && TREE_CODE (type) == VOID_TYPE));
6447
6448 /* If we are going to ignore this result, we need only do something
6449 if there is a side-effect somewhere in the expression. If there
6450 is, short-circuit the most common cases here. Note that we must
6451 not call expand_expr with anything but const0_rtx in case this
6452 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6453
6454 if (ignore)
6455 {
6456 if (! TREE_SIDE_EFFECTS (exp))
6457 return const0_rtx;
6458
6459 /* Ensure we reference a volatile object even if value is ignored, but
6460 don't do this if all we are doing is taking its address. */
6461 if (TREE_THIS_VOLATILE (exp)
6462 && TREE_CODE (exp) != FUNCTION_DECL
6463 && mode != VOIDmode && mode != BLKmode
6464 && modifier != EXPAND_CONST_ADDRESS)
6465 {
6466 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6467 if (MEM_P (temp))
6468 temp = copy_to_reg (temp);
6469 return const0_rtx;
6470 }
6471
6472 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6473 || code == INDIRECT_REF || code == BUFFER_REF)
6474 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6475 modifier);
6476
6477 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6478 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6479 {
6480 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6481 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6482 return const0_rtx;
6483 }
6484 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6485 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6486 /* If the second operand has no side effects, just evaluate
6487 the first. */
6488 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6489 modifier);
6490 else if (code == BIT_FIELD_REF)
6491 {
6492 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6493 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6494 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6495 return const0_rtx;
6496 }
6497
6498 target = 0;
6499 }
6500
6501 /* If will do cse, generate all results into pseudo registers
6502 since 1) that allows cse to find more things
6503 and 2) otherwise cse could produce an insn the machine
6504 cannot support. An exception is a CONSTRUCTOR into a multi-word
6505 MEM: that's much more likely to be most efficient into the MEM.
6506 Another is a CALL_EXPR which must return in memory. */
6507
6508 if (! cse_not_expected && mode != BLKmode && target
6509 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6510 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6511 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6512 target = 0;
6513
6514 switch (code)
6515 {
6516 case LABEL_DECL:
6517 {
6518 tree function = decl_function_context (exp);
6519
6520 temp = label_rtx (exp);
6521 temp = gen_rtx_LABEL_REF (Pmode, temp);
6522
6523 if (function != current_function_decl
6524 && function != 0)
6525 LABEL_REF_NONLOCAL_P (temp) = 1;
6526
6527 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6528 return temp;
6529 }
6530
6531 case PARM_DECL:
6532 if (!DECL_RTL_SET_P (exp))
6533 {
6534 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6535 return CONST0_RTX (mode);
6536 }
6537
6538 /* ... fall through ... */
6539
6540 case VAR_DECL:
6541 /* If a static var's type was incomplete when the decl was written,
6542 but the type is complete now, lay out the decl now. */
6543 if (DECL_SIZE (exp) == 0
6544 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6545 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6546 layout_decl (exp, 0);
6547
6548 /* ... fall through ... */
6549
6550 case FUNCTION_DECL:
6551 case RESULT_DECL:
6552 if (DECL_RTL (exp) == 0)
6553 abort ();
6554
6555 /* Ensure variable marked as used even if it doesn't go through
6556 a parser. If it hasn't be used yet, write out an external
6557 definition. */
6558 if (! TREE_USED (exp))
6559 {
6560 assemble_external (exp);
6561 TREE_USED (exp) = 1;
6562 }
6563
6564 /* Show we haven't gotten RTL for this yet. */
6565 temp = 0;
6566
6567 /* Handle variables inherited from containing functions. */
6568 context = decl_function_context (exp);
6569
6570 if (context != 0 && context != current_function_decl
6571 /* If var is static, we don't need a static chain to access it. */
6572 && ! (MEM_P (DECL_RTL (exp))
6573 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6574 {
6575 rtx addr;
6576
6577 /* Mark as non-local and addressable. */
6578 DECL_NONLOCAL (exp) = 1;
6579 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6580 abort ();
6581 lang_hooks.mark_addressable (exp);
6582 if (!MEM_P (DECL_RTL (exp)))
6583 abort ();
6584 addr = XEXP (DECL_RTL (exp), 0);
6585 if (MEM_P (addr))
6586 addr
6587 = replace_equiv_address (addr,
6588 fix_lexical_addr (XEXP (addr, 0), exp));
6589 else
6590 addr = fix_lexical_addr (addr, exp);
6591
6592 temp = replace_equiv_address (DECL_RTL (exp), addr);
6593 }
6594
6595 /* This is the case of an array whose size is to be determined
6596 from its initializer, while the initializer is still being parsed.
6597 See expand_decl. */
6598
6599 else if (MEM_P (DECL_RTL (exp))
6600 && REG_P (XEXP (DECL_RTL (exp), 0)))
6601 temp = validize_mem (DECL_RTL (exp));
6602
6603 /* If DECL_RTL is memory, we are in the normal case and either
6604 the address is not valid or it is not a register and -fforce-addr
6605 is specified, get the address into a register. */
6606
6607 else if (MEM_P (DECL_RTL (exp))
6608 && modifier != EXPAND_CONST_ADDRESS
6609 && modifier != EXPAND_SUM
6610 && modifier != EXPAND_INITIALIZER
6611 && (! memory_address_p (DECL_MODE (exp),
6612 XEXP (DECL_RTL (exp), 0))
6613 || (flag_force_addr
6614 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6615 {
6616 if (alt_rtl)
6617 *alt_rtl = DECL_RTL (exp);
6618 temp = replace_equiv_address (DECL_RTL (exp),
6619 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6620 }
6621
6622 /* If we got something, return it. But first, set the alignment
6623 if the address is a register. */
6624 if (temp != 0)
6625 {
6626 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6627 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6628
6629 return temp;
6630 }
6631
6632 /* If the mode of DECL_RTL does not match that of the decl, it
6633 must be a promoted value. We return a SUBREG of the wanted mode,
6634 but mark it so that we know that it was already extended. */
6635
6636 if (REG_P (DECL_RTL (exp))
6637 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6638 {
6639 /* Get the signedness used for this variable. Ensure we get the
6640 same mode we got when the variable was declared. */
6641 if (GET_MODE (DECL_RTL (exp))
6642 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6643 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6644 abort ();
6645
6646 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6647 SUBREG_PROMOTED_VAR_P (temp) = 1;
6648 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6649 return temp;
6650 }
6651
6652 return DECL_RTL (exp);
6653
6654 case INTEGER_CST:
6655 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6656 TREE_INT_CST_HIGH (exp), mode);
6657
6658 /* ??? If overflow is set, fold will have done an incomplete job,
6659 which can result in (plus xx (const_int 0)), which can get
6660 simplified by validate_replace_rtx during virtual register
6661 instantiation, which can result in unrecognizable insns.
6662 Avoid this by forcing all overflows into registers. */
6663 if (TREE_CONSTANT_OVERFLOW (exp)
6664 && modifier != EXPAND_INITIALIZER)
6665 temp = force_reg (mode, temp);
6666
6667 return temp;
6668
6669 case VECTOR_CST:
6670 return const_vector_from_tree (exp);
6671
6672 case CONST_DECL:
6673 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6674
6675 case REAL_CST:
6676 /* If optimized, generate immediate CONST_DOUBLE
6677 which will be turned into memory by reload if necessary.
6678
6679 We used to force a register so that loop.c could see it. But
6680 this does not allow gen_* patterns to perform optimizations with
6681 the constants. It also produces two insns in cases like "x = 1.0;".
6682 On most machines, floating-point constants are not permitted in
6683 many insns, so we'd end up copying it to a register in any case.
6684
6685 Now, we do the copying in expand_binop, if appropriate. */
6686 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6687 TYPE_MODE (TREE_TYPE (exp)));
6688
6689 case COMPLEX_CST:
6690 /* Handle evaluating a complex constant in a CONCAT target. */
6691 if (original_target && GET_CODE (original_target) == CONCAT)
6692 {
6693 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6694 rtx rtarg, itarg;
6695
6696 rtarg = XEXP (original_target, 0);
6697 itarg = XEXP (original_target, 1);
6698
6699 /* Move the real and imaginary parts separately. */
6700 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6701 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6702
6703 if (op0 != rtarg)
6704 emit_move_insn (rtarg, op0);
6705 if (op1 != itarg)
6706 emit_move_insn (itarg, op1);
6707
6708 return original_target;
6709 }
6710
6711 /* ... fall through ... */
6712
6713 case STRING_CST:
6714 temp = output_constant_def (exp, 1);
6715
6716 /* temp contains a constant address.
6717 On RISC machines where a constant address isn't valid,
6718 make some insns to get that address into a register. */
6719 if (modifier != EXPAND_CONST_ADDRESS
6720 && modifier != EXPAND_INITIALIZER
6721 && modifier != EXPAND_SUM
6722 && (! memory_address_p (mode, XEXP (temp, 0))
6723 || flag_force_addr))
6724 return replace_equiv_address (temp,
6725 copy_rtx (XEXP (temp, 0)));
6726 return temp;
6727
6728 case SAVE_EXPR:
6729 {
6730 tree val = TREE_OPERAND (exp, 0);
6731 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6732
6733 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6734 {
6735 /* We can indeed still hit this case, typically via builtin
6736 expanders calling save_expr immediately before expanding
6737 something. Assume this means that we only have to deal
6738 with non-BLKmode values. */
6739 if (GET_MODE (ret) == BLKmode)
6740 abort ();
6741
6742 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6743 DECL_ARTIFICIAL (val) = 1;
6744 TREE_OPERAND (exp, 0) = val;
6745
6746 if (!CONSTANT_P (ret))
6747 ret = copy_to_reg (ret);
6748 SET_DECL_RTL (val, ret);
6749 }
6750
6751 return ret;
6752 }
6753
6754 case UNSAVE_EXPR:
6755 {
6756 rtx temp;
6757 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6758 TREE_OPERAND (exp, 0)
6759 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6760 return temp;
6761 }
6762
6763 case GOTO_EXPR:
6764 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6765 expand_goto (TREE_OPERAND (exp, 0));
6766 else
6767 expand_computed_goto (TREE_OPERAND (exp, 0));
6768 return const0_rtx;
6769
6770 /* These are lowered during gimplification, so we should never ever
6771 see them here. */
6772 case LOOP_EXPR:
6773 case EXIT_EXPR:
6774 abort ();
6775
6776 case LABELED_BLOCK_EXPR:
6777 if (LABELED_BLOCK_BODY (exp))
6778 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6779 /* Should perhaps use expand_label, but this is simpler and safer. */
6780 do_pending_stack_adjust ();
6781 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6782 return const0_rtx;
6783
6784 case EXIT_BLOCK_EXPR:
6785 if (EXIT_BLOCK_RETURN (exp))
6786 sorry ("returned value in block_exit_expr");
6787 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6788 return const0_rtx;
6789
6790 case BIND_EXPR:
6791 {
6792 tree block = BIND_EXPR_BLOCK (exp);
6793 int mark_ends;
6794
6795 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6796 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6797 mark_ends = (block != NULL_TREE);
6798 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6799
6800 /* If VARS have not yet been expanded, expand them now. */
6801 expand_vars (BIND_EXPR_VARS (exp));
6802
6803 /* TARGET was clobbered early in this function. The correct
6804 indicator or whether or not we need the value of this
6805 expression is the IGNORE variable. */
6806 temp = expand_expr (BIND_EXPR_BODY (exp),
6807 ignore ? const0_rtx : target,
6808 tmode, modifier);
6809
6810 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6811
6812 return temp;
6813 }
6814
6815 case CONSTRUCTOR:
6816 /* If we don't need the result, just ensure we evaluate any
6817 subexpressions. */
6818 if (ignore)
6819 {
6820 tree elt;
6821
6822 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6823 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6824
6825 return const0_rtx;
6826 }
6827
6828 /* All elts simple constants => refer to a constant in memory. But
6829 if this is a non-BLKmode mode, let it store a field at a time
6830 since that should make a CONST_INT or CONST_DOUBLE when we
6831 fold. Likewise, if we have a target we can use, it is best to
6832 store directly into the target unless the type is large enough
6833 that memcpy will be used. If we are making an initializer and
6834 all operands are constant, put it in memory as well.
6835
6836 FIXME: Avoid trying to fill vector constructors piece-meal.
6837 Output them with output_constant_def below unless we're sure
6838 they're zeros. This should go away when vector initializers
6839 are treated like VECTOR_CST instead of arrays.
6840 */
6841 else if ((TREE_STATIC (exp)
6842 && ((mode == BLKmode
6843 && ! (target != 0 && safe_from_p (target, exp, 1)))
6844 || TREE_ADDRESSABLE (exp)
6845 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6846 && (! MOVE_BY_PIECES_P
6847 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6848 TYPE_ALIGN (type)))
6849 && ! mostly_zeros_p (exp))))
6850 || ((modifier == EXPAND_INITIALIZER
6851 || modifier == EXPAND_CONST_ADDRESS)
6852 && TREE_CONSTANT (exp)))
6853 {
6854 rtx constructor = output_constant_def (exp, 1);
6855
6856 if (modifier != EXPAND_CONST_ADDRESS
6857 && modifier != EXPAND_INITIALIZER
6858 && modifier != EXPAND_SUM)
6859 constructor = validize_mem (constructor);
6860
6861 return constructor;
6862 }
6863 else
6864 {
6865 /* Handle calls that pass values in multiple non-contiguous
6866 locations. The Irix 6 ABI has examples of this. */
6867 if (target == 0 || ! safe_from_p (target, exp, 1)
6868 || GET_CODE (target) == PARALLEL
6869 || modifier == EXPAND_STACK_PARM)
6870 target
6871 = assign_temp (build_qualified_type (type,
6872 (TYPE_QUALS (type)
6873 | (TREE_READONLY (exp)
6874 * TYPE_QUAL_CONST))),
6875 0, TREE_ADDRESSABLE (exp), 1);
6876
6877 store_constructor (exp, target, 0, int_expr_size (exp));
6878 return target;
6879 }
6880
6881 case INDIRECT_REF:
6882 {
6883 tree exp1 = TREE_OPERAND (exp, 0);
6884
6885 if (modifier != EXPAND_WRITE)
6886 {
6887 tree t;
6888
6889 t = fold_read_from_constant_string (exp);
6890 if (t)
6891 return expand_expr (t, target, tmode, modifier);
6892 }
6893
6894 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6895 op0 = memory_address (mode, op0);
6896 temp = gen_rtx_MEM (mode, op0);
6897 set_mem_attributes (temp, exp, 0);
6898
6899 /* If we are writing to this object and its type is a record with
6900 readonly fields, we must mark it as readonly so it will
6901 conflict with readonly references to those fields. */
6902 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6903 RTX_UNCHANGING_P (temp) = 1;
6904
6905 return temp;
6906 }
6907
6908 case ARRAY_REF:
6909
6910 #ifdef ENABLE_CHECKING
6911 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6912 abort ();
6913 #endif
6914
6915 {
6916 tree array = TREE_OPERAND (exp, 0);
6917 tree low_bound = array_ref_low_bound (exp);
6918 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6919 HOST_WIDE_INT i;
6920
6921 /* Optimize the special-case of a zero lower bound.
6922
6923 We convert the low_bound to sizetype to avoid some problems
6924 with constant folding. (E.g. suppose the lower bound is 1,
6925 and its mode is QI. Without the conversion, (ARRAY
6926 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6927 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6928
6929 if (! integer_zerop (low_bound))
6930 index = size_diffop (index, convert (sizetype, low_bound));
6931
6932 /* Fold an expression like: "foo"[2].
6933 This is not done in fold so it won't happen inside &.
6934 Don't fold if this is for wide characters since it's too
6935 difficult to do correctly and this is a very rare case. */
6936
6937 if (modifier != EXPAND_CONST_ADDRESS
6938 && modifier != EXPAND_INITIALIZER
6939 && modifier != EXPAND_MEMORY)
6940 {
6941 tree t = fold_read_from_constant_string (exp);
6942
6943 if (t)
6944 return expand_expr (t, target, tmode, modifier);
6945 }
6946
6947 /* If this is a constant index into a constant array,
6948 just get the value from the array. Handle both the cases when
6949 we have an explicit constructor and when our operand is a variable
6950 that was declared const. */
6951
6952 if (modifier != EXPAND_CONST_ADDRESS
6953 && modifier != EXPAND_INITIALIZER
6954 && modifier != EXPAND_MEMORY
6955 && TREE_CODE (array) == CONSTRUCTOR
6956 && ! TREE_SIDE_EFFECTS (array)
6957 && TREE_CODE (index) == INTEGER_CST
6958 && 0 > compare_tree_int (index,
6959 list_length (CONSTRUCTOR_ELTS
6960 (TREE_OPERAND (exp, 0)))))
6961 {
6962 tree elem;
6963
6964 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6965 i = TREE_INT_CST_LOW (index);
6966 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6967 ;
6968
6969 if (elem)
6970 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6971 modifier);
6972 }
6973
6974 else if (optimize >= 1
6975 && modifier != EXPAND_CONST_ADDRESS
6976 && modifier != EXPAND_INITIALIZER
6977 && modifier != EXPAND_MEMORY
6978 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6979 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6980 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6981 && targetm.binds_local_p (array))
6982 {
6983 if (TREE_CODE (index) == INTEGER_CST)
6984 {
6985 tree init = DECL_INITIAL (array);
6986
6987 if (TREE_CODE (init) == CONSTRUCTOR)
6988 {
6989 tree elem;
6990
6991 for (elem = CONSTRUCTOR_ELTS (init);
6992 (elem
6993 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6994 elem = TREE_CHAIN (elem))
6995 ;
6996
6997 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6998 return expand_expr (fold (TREE_VALUE (elem)), target,
6999 tmode, modifier);
7000 }
7001 else if (TREE_CODE (init) == STRING_CST
7002 && 0 > compare_tree_int (index,
7003 TREE_STRING_LENGTH (init)))
7004 {
7005 tree type = TREE_TYPE (TREE_TYPE (init));
7006 enum machine_mode mode = TYPE_MODE (type);
7007
7008 if (GET_MODE_CLASS (mode) == MODE_INT
7009 && GET_MODE_SIZE (mode) == 1)
7010 return gen_int_mode (TREE_STRING_POINTER (init)
7011 [TREE_INT_CST_LOW (index)], mode);
7012 }
7013 }
7014 }
7015 }
7016 goto normal_inner_ref;
7017
7018 case COMPONENT_REF:
7019 /* If the operand is a CONSTRUCTOR, we can just extract the
7020 appropriate field if it is present. */
7021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7022 {
7023 tree elt;
7024
7025 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7026 elt = TREE_CHAIN (elt))
7027 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7028 /* We can normally use the value of the field in the
7029 CONSTRUCTOR. However, if this is a bitfield in
7030 an integral mode that we can fit in a HOST_WIDE_INT,
7031 we must mask only the number of bits in the bitfield,
7032 since this is done implicitly by the constructor. If
7033 the bitfield does not meet either of those conditions,
7034 we can't do this optimization. */
7035 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7036 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7037 == MODE_INT)
7038 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7039 <= HOST_BITS_PER_WIDE_INT))))
7040 {
7041 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7042 && modifier == EXPAND_STACK_PARM)
7043 target = 0;
7044 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7045 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7046 {
7047 HOST_WIDE_INT bitsize
7048 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7049 enum machine_mode imode
7050 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7051
7052 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7053 {
7054 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7055 op0 = expand_and (imode, op0, op1, target);
7056 }
7057 else
7058 {
7059 tree count
7060 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7061 0);
7062
7063 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7064 target, 0);
7065 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7066 target, 0);
7067 }
7068 }
7069
7070 return op0;
7071 }
7072 }
7073 goto normal_inner_ref;
7074
7075 case BIT_FIELD_REF:
7076 case ARRAY_RANGE_REF:
7077 normal_inner_ref:
7078 {
7079 enum machine_mode mode1;
7080 HOST_WIDE_INT bitsize, bitpos;
7081 tree offset;
7082 int volatilep = 0;
7083 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7084 &mode1, &unsignedp, &volatilep);
7085 rtx orig_op0;
7086
7087 /* If we got back the original object, something is wrong. Perhaps
7088 we are evaluating an expression too early. In any event, don't
7089 infinitely recurse. */
7090 if (tem == exp)
7091 abort ();
7092
7093 /* If TEM's type is a union of variable size, pass TARGET to the inner
7094 computation, since it will need a temporary and TARGET is known
7095 to have to do. This occurs in unchecked conversion in Ada. */
7096
7097 orig_op0 = op0
7098 = expand_expr (tem,
7099 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7100 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7101 != INTEGER_CST)
7102 && modifier != EXPAND_STACK_PARM
7103 ? target : NULL_RTX),
7104 VOIDmode,
7105 (modifier == EXPAND_INITIALIZER
7106 || modifier == EXPAND_CONST_ADDRESS
7107 || modifier == EXPAND_STACK_PARM)
7108 ? modifier : EXPAND_NORMAL);
7109
7110 /* If this is a constant, put it into a register if it is a
7111 legitimate constant and OFFSET is 0 and memory if it isn't. */
7112 if (CONSTANT_P (op0))
7113 {
7114 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7115 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7116 && offset == 0)
7117 op0 = force_reg (mode, op0);
7118 else
7119 op0 = validize_mem (force_const_mem (mode, op0));
7120 }
7121
7122 /* Otherwise, if this object not in memory and we either have an
7123 offset or a BLKmode result, put it there. This case can't occur in
7124 C, but can in Ada if we have unchecked conversion of an expression
7125 from a scalar type to an array or record type or for an
7126 ARRAY_RANGE_REF whose type is BLKmode. */
7127 else if (!MEM_P (op0)
7128 && (offset != 0
7129 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7130 {
7131 tree nt = build_qualified_type (TREE_TYPE (tem),
7132 (TYPE_QUALS (TREE_TYPE (tem))
7133 | TYPE_QUAL_CONST));
7134 rtx memloc = assign_temp (nt, 1, 1, 1);
7135
7136 emit_move_insn (memloc, op0);
7137 op0 = memloc;
7138 }
7139
7140 if (offset != 0)
7141 {
7142 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7143 EXPAND_SUM);
7144
7145 if (!MEM_P (op0))
7146 abort ();
7147
7148 #ifdef POINTERS_EXTEND_UNSIGNED
7149 if (GET_MODE (offset_rtx) != Pmode)
7150 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7151 #else
7152 if (GET_MODE (offset_rtx) != ptr_mode)
7153 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7154 #endif
7155
7156 if (GET_MODE (op0) == BLKmode
7157 /* A constant address in OP0 can have VOIDmode, we must
7158 not try to call force_reg in that case. */
7159 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7160 && bitsize != 0
7161 && (bitpos % bitsize) == 0
7162 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7163 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7164 {
7165 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7166 bitpos = 0;
7167 }
7168
7169 op0 = offset_address (op0, offset_rtx,
7170 highest_pow2_factor (offset));
7171 }
7172
7173 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7174 record its alignment as BIGGEST_ALIGNMENT. */
7175 if (MEM_P (op0) && bitpos == 0 && offset != 0
7176 && is_aligning_offset (offset, tem))
7177 set_mem_align (op0, BIGGEST_ALIGNMENT);
7178
7179 /* Don't forget about volatility even if this is a bitfield. */
7180 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7181 {
7182 if (op0 == orig_op0)
7183 op0 = copy_rtx (op0);
7184
7185 MEM_VOLATILE_P (op0) = 1;
7186 }
7187
7188 /* The following code doesn't handle CONCAT.
7189 Assume only bitpos == 0 can be used for CONCAT, due to
7190 one element arrays having the same mode as its element. */
7191 if (GET_CODE (op0) == CONCAT)
7192 {
7193 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7194 abort ();
7195 return op0;
7196 }
7197
7198 /* In cases where an aligned union has an unaligned object
7199 as a field, we might be extracting a BLKmode value from
7200 an integer-mode (e.g., SImode) object. Handle this case
7201 by doing the extract into an object as wide as the field
7202 (which we know to be the width of a basic mode), then
7203 storing into memory, and changing the mode to BLKmode. */
7204 if (mode1 == VOIDmode
7205 || REG_P (op0) || GET_CODE (op0) == SUBREG
7206 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7207 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7208 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7209 && modifier != EXPAND_CONST_ADDRESS
7210 && modifier != EXPAND_INITIALIZER)
7211 /* If the field isn't aligned enough to fetch as a memref,
7212 fetch it as a bit field. */
7213 || (mode1 != BLKmode
7214 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7215 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7216 || (MEM_P (op0)
7217 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7218 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7219 && ((modifier == EXPAND_CONST_ADDRESS
7220 || modifier == EXPAND_INITIALIZER)
7221 ? STRICT_ALIGNMENT
7222 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7223 || (bitpos % BITS_PER_UNIT != 0)))
7224 /* If the type and the field are a constant size and the
7225 size of the type isn't the same size as the bitfield,
7226 we must use bitfield operations. */
7227 || (bitsize >= 0
7228 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7229 == INTEGER_CST)
7230 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7231 bitsize)))
7232 {
7233 enum machine_mode ext_mode = mode;
7234
7235 if (ext_mode == BLKmode
7236 && ! (target != 0 && MEM_P (op0)
7237 && MEM_P (target)
7238 && bitpos % BITS_PER_UNIT == 0))
7239 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7240
7241 if (ext_mode == BLKmode)
7242 {
7243 if (target == 0)
7244 target = assign_temp (type, 0, 1, 1);
7245
7246 if (bitsize == 0)
7247 return target;
7248
7249 /* In this case, BITPOS must start at a byte boundary and
7250 TARGET, if specified, must be a MEM. */
7251 if (!MEM_P (op0)
7252 || (target != 0 && !MEM_P (target))
7253 || bitpos % BITS_PER_UNIT != 0)
7254 abort ();
7255
7256 emit_block_move (target,
7257 adjust_address (op0, VOIDmode,
7258 bitpos / BITS_PER_UNIT),
7259 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7260 / BITS_PER_UNIT),
7261 (modifier == EXPAND_STACK_PARM
7262 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7263
7264 return target;
7265 }
7266
7267 op0 = validize_mem (op0);
7268
7269 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7270 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7271
7272 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7273 (modifier == EXPAND_STACK_PARM
7274 ? NULL_RTX : target),
7275 ext_mode, ext_mode,
7276 int_size_in_bytes (TREE_TYPE (tem)));
7277
7278 /* If the result is a record type and BITSIZE is narrower than
7279 the mode of OP0, an integral mode, and this is a big endian
7280 machine, we must put the field into the high-order bits. */
7281 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7282 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7283 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7284 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7285 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7286 - bitsize),
7287 op0, 1);
7288
7289 /* If the result type is BLKmode, store the data into a temporary
7290 of the appropriate type, but with the mode corresponding to the
7291 mode for the data we have (op0's mode). It's tempting to make
7292 this a constant type, since we know it's only being stored once,
7293 but that can cause problems if we are taking the address of this
7294 COMPONENT_REF because the MEM of any reference via that address
7295 will have flags corresponding to the type, which will not
7296 necessarily be constant. */
7297 if (mode == BLKmode)
7298 {
7299 rtx new
7300 = assign_stack_temp_for_type
7301 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7302
7303 emit_move_insn (new, op0);
7304 op0 = copy_rtx (new);
7305 PUT_MODE (op0, BLKmode);
7306 set_mem_attributes (op0, exp, 1);
7307 }
7308
7309 return op0;
7310 }
7311
7312 /* If the result is BLKmode, use that to access the object
7313 now as well. */
7314 if (mode == BLKmode)
7315 mode1 = BLKmode;
7316
7317 /* Get a reference to just this component. */
7318 if (modifier == EXPAND_CONST_ADDRESS
7319 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7320 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7321 else
7322 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7323
7324 if (op0 == orig_op0)
7325 op0 = copy_rtx (op0);
7326
7327 set_mem_attributes (op0, exp, 0);
7328 if (REG_P (XEXP (op0, 0)))
7329 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7330
7331 MEM_VOLATILE_P (op0) |= volatilep;
7332 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7333 || modifier == EXPAND_CONST_ADDRESS
7334 || modifier == EXPAND_INITIALIZER)
7335 return op0;
7336 else if (target == 0)
7337 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7338
7339 convert_move (target, op0, unsignedp);
7340 return target;
7341 }
7342
7343 case OBJ_TYPE_REF:
7344 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7345
7346 /* Intended for a reference to a buffer of a file-object in Pascal.
7347 But it's not certain that a special tree code will really be
7348 necessary for these. INDIRECT_REF might work for them. */
7349 case BUFFER_REF:
7350 abort ();
7351
7352 case IN_EXPR:
7353 {
7354 /* Pascal set IN expression.
7355
7356 Algorithm:
7357 rlo = set_low - (set_low%bits_per_word);
7358 the_word = set [ (index - rlo)/bits_per_word ];
7359 bit_index = index % bits_per_word;
7360 bitmask = 1 << bit_index;
7361 return !!(the_word & bitmask); */
7362
7363 tree set = TREE_OPERAND (exp, 0);
7364 tree index = TREE_OPERAND (exp, 1);
7365 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7366 tree set_type = TREE_TYPE (set);
7367 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7368 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7369 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7370 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7371 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7372 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7373 rtx setaddr = XEXP (setval, 0);
7374 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7375 rtx rlow;
7376 rtx diff, quo, rem, addr, bit, result;
7377
7378 /* If domain is empty, answer is no. Likewise if index is constant
7379 and out of bounds. */
7380 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7381 && TREE_CODE (set_low_bound) == INTEGER_CST
7382 && tree_int_cst_lt (set_high_bound, set_low_bound))
7383 || (TREE_CODE (index) == INTEGER_CST
7384 && TREE_CODE (set_low_bound) == INTEGER_CST
7385 && tree_int_cst_lt (index, set_low_bound))
7386 || (TREE_CODE (set_high_bound) == INTEGER_CST
7387 && TREE_CODE (index) == INTEGER_CST
7388 && tree_int_cst_lt (set_high_bound, index))))
7389 return const0_rtx;
7390
7391 if (target == 0)
7392 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7393
7394 /* If we get here, we have to generate the code for both cases
7395 (in range and out of range). */
7396
7397 op0 = gen_label_rtx ();
7398 op1 = gen_label_rtx ();
7399
7400 if (! (GET_CODE (index_val) == CONST_INT
7401 && GET_CODE (lo_r) == CONST_INT))
7402 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7403 GET_MODE (index_val), iunsignedp, op1);
7404
7405 if (! (GET_CODE (index_val) == CONST_INT
7406 && GET_CODE (hi_r) == CONST_INT))
7407 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7408 GET_MODE (index_val), iunsignedp, op1);
7409
7410 /* Calculate the element number of bit zero in the first word
7411 of the set. */
7412 if (GET_CODE (lo_r) == CONST_INT)
7413 rlow = GEN_INT (INTVAL (lo_r)
7414 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7415 else
7416 rlow = expand_binop (index_mode, and_optab, lo_r,
7417 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7418 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7419
7420 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7421 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7422
7423 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7424 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7425 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7426 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7427
7428 addr = memory_address (byte_mode,
7429 expand_binop (index_mode, add_optab, diff,
7430 setaddr, NULL_RTX, iunsignedp,
7431 OPTAB_LIB_WIDEN));
7432
7433 /* Extract the bit we want to examine. */
7434 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7435 gen_rtx_MEM (byte_mode, addr),
7436 make_tree (TREE_TYPE (index), rem),
7437 NULL_RTX, 1);
7438 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7439 GET_MODE (target) == byte_mode ? target : 0,
7440 1, OPTAB_LIB_WIDEN);
7441
7442 if (result != target)
7443 convert_move (target, result, 1);
7444
7445 /* Output the code to handle the out-of-range case. */
7446 emit_jump (op0);
7447 emit_label (op1);
7448 emit_move_insn (target, const0_rtx);
7449 emit_label (op0);
7450 return target;
7451 }
7452
7453 case WITH_CLEANUP_EXPR:
7454 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7455 {
7456 WITH_CLEANUP_EXPR_RTL (exp)
7457 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7458 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7459 CLEANUP_EH_ONLY (exp));
7460
7461 /* That's it for this cleanup. */
7462 TREE_OPERAND (exp, 1) = 0;
7463 }
7464 return WITH_CLEANUP_EXPR_RTL (exp);
7465
7466 case CLEANUP_POINT_EXPR:
7467 {
7468 /* Start a new binding layer that will keep track of all cleanup
7469 actions to be performed. */
7470 expand_start_bindings (2);
7471
7472 target_temp_slot_level = temp_slot_level;
7473
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7475 /* If we're going to use this value, load it up now. */
7476 if (! ignore)
7477 op0 = force_not_mem (op0);
7478 preserve_temp_slots (op0);
7479 expand_end_bindings (NULL_TREE, 0, 0);
7480 }
7481 return op0;
7482
7483 case CALL_EXPR:
7484 /* Check for a built-in function. */
7485 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7486 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7487 == FUNCTION_DECL)
7488 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7489 {
7490 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7491 == BUILT_IN_FRONTEND)
7492 return lang_hooks.expand_expr (exp, original_target,
7493 tmode, modifier,
7494 alt_rtl);
7495 else
7496 return expand_builtin (exp, target, subtarget, tmode, ignore);
7497 }
7498
7499 return expand_call (exp, target, ignore);
7500
7501 case NON_LVALUE_EXPR:
7502 case NOP_EXPR:
7503 case CONVERT_EXPR:
7504 case REFERENCE_EXPR:
7505 if (TREE_OPERAND (exp, 0) == error_mark_node)
7506 return const0_rtx;
7507
7508 if (TREE_CODE (type) == UNION_TYPE)
7509 {
7510 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7511
7512 /* If both input and output are BLKmode, this conversion isn't doing
7513 anything except possibly changing memory attribute. */
7514 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7515 {
7516 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7517 modifier);
7518
7519 result = copy_rtx (result);
7520 set_mem_attributes (result, exp, 0);
7521 return result;
7522 }
7523
7524 if (target == 0)
7525 {
7526 if (TYPE_MODE (type) != BLKmode)
7527 target = gen_reg_rtx (TYPE_MODE (type));
7528 else
7529 target = assign_temp (type, 0, 1, 1);
7530 }
7531
7532 if (MEM_P (target))
7533 /* Store data into beginning of memory target. */
7534 store_expr (TREE_OPERAND (exp, 0),
7535 adjust_address (target, TYPE_MODE (valtype), 0),
7536 modifier == EXPAND_STACK_PARM ? 2 : 0);
7537
7538 else if (REG_P (target))
7539 /* Store this field into a union of the proper type. */
7540 store_field (target,
7541 MIN ((int_size_in_bytes (TREE_TYPE
7542 (TREE_OPERAND (exp, 0)))
7543 * BITS_PER_UNIT),
7544 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7545 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7546 VOIDmode, 0, type, 0);
7547 else
7548 abort ();
7549
7550 /* Return the entire union. */
7551 return target;
7552 }
7553
7554 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7555 {
7556 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7557 modifier);
7558
7559 /* If the signedness of the conversion differs and OP0 is
7560 a promoted SUBREG, clear that indication since we now
7561 have to do the proper extension. */
7562 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7563 && GET_CODE (op0) == SUBREG)
7564 SUBREG_PROMOTED_VAR_P (op0) = 0;
7565
7566 return op0;
7567 }
7568
7569 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7570 if (GET_MODE (op0) == mode)
7571 return op0;
7572
7573 /* If OP0 is a constant, just convert it into the proper mode. */
7574 if (CONSTANT_P (op0))
7575 {
7576 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7577 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7578
7579 if (modifier == EXPAND_INITIALIZER)
7580 return simplify_gen_subreg (mode, op0, inner_mode,
7581 subreg_lowpart_offset (mode,
7582 inner_mode));
7583 else
7584 return convert_modes (mode, inner_mode, op0,
7585 TYPE_UNSIGNED (inner_type));
7586 }
7587
7588 if (modifier == EXPAND_INITIALIZER)
7589 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7590
7591 if (target == 0)
7592 return
7593 convert_to_mode (mode, op0,
7594 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7595 else
7596 convert_move (target, op0,
7597 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7598 return target;
7599
7600 case VIEW_CONVERT_EXPR:
7601 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7602
7603 /* If the input and output modes are both the same, we are done.
7604 Otherwise, if neither mode is BLKmode and both are integral and within
7605 a word, we can use gen_lowpart. If neither is true, make sure the
7606 operand is in memory and convert the MEM to the new mode. */
7607 if (TYPE_MODE (type) == GET_MODE (op0))
7608 ;
7609 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7610 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7611 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7612 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7613 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7614 op0 = gen_lowpart (TYPE_MODE (type), op0);
7615 else if (!MEM_P (op0))
7616 {
7617 /* If the operand is not a MEM, force it into memory. Since we
7618 are going to be be changing the mode of the MEM, don't call
7619 force_const_mem for constants because we don't allow pool
7620 constants to change mode. */
7621 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7622
7623 if (TREE_ADDRESSABLE (exp))
7624 abort ();
7625
7626 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7627 target
7628 = assign_stack_temp_for_type
7629 (TYPE_MODE (inner_type),
7630 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7631
7632 emit_move_insn (target, op0);
7633 op0 = target;
7634 }
7635
7636 /* At this point, OP0 is in the correct mode. If the output type is such
7637 that the operand is known to be aligned, indicate that it is.
7638 Otherwise, we need only be concerned about alignment for non-BLKmode
7639 results. */
7640 if (MEM_P (op0))
7641 {
7642 op0 = copy_rtx (op0);
7643
7644 if (TYPE_ALIGN_OK (type))
7645 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7646 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7647 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7648 {
7649 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7650 HOST_WIDE_INT temp_size
7651 = MAX (int_size_in_bytes (inner_type),
7652 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7653 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7654 temp_size, 0, type);
7655 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7656
7657 if (TREE_ADDRESSABLE (exp))
7658 abort ();
7659
7660 if (GET_MODE (op0) == BLKmode)
7661 emit_block_move (new_with_op0_mode, op0,
7662 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7663 (modifier == EXPAND_STACK_PARM
7664 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7665 else
7666 emit_move_insn (new_with_op0_mode, op0);
7667
7668 op0 = new;
7669 }
7670
7671 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7672 }
7673
7674 return op0;
7675
7676 case PLUS_EXPR:
7677 this_optab = ! unsignedp && flag_trapv
7678 && (GET_MODE_CLASS (mode) == MODE_INT)
7679 ? addv_optab : add_optab;
7680
7681 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7682 something else, make sure we add the register to the constant and
7683 then to the other thing. This case can occur during strength
7684 reduction and doing it this way will produce better code if the
7685 frame pointer or argument pointer is eliminated.
7686
7687 fold-const.c will ensure that the constant is always in the inner
7688 PLUS_EXPR, so the only case we need to do anything about is if
7689 sp, ap, or fp is our second argument, in which case we must swap
7690 the innermost first argument and our second argument. */
7691
7692 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7693 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7694 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7695 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7696 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7697 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7698 {
7699 tree t = TREE_OPERAND (exp, 1);
7700
7701 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7702 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7703 }
7704
7705 /* If the result is to be ptr_mode and we are adding an integer to
7706 something, we might be forming a constant. So try to use
7707 plus_constant. If it produces a sum and we can't accept it,
7708 use force_operand. This allows P = &ARR[const] to generate
7709 efficient code on machines where a SYMBOL_REF is not a valid
7710 address.
7711
7712 If this is an EXPAND_SUM call, always return the sum. */
7713 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7714 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7715 {
7716 if (modifier == EXPAND_STACK_PARM)
7717 target = 0;
7718 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7719 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7720 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7721 {
7722 rtx constant_part;
7723
7724 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7725 EXPAND_SUM);
7726 /* Use immed_double_const to ensure that the constant is
7727 truncated according to the mode of OP1, then sign extended
7728 to a HOST_WIDE_INT. Using the constant directly can result
7729 in non-canonical RTL in a 64x32 cross compile. */
7730 constant_part
7731 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7732 (HOST_WIDE_INT) 0,
7733 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7734 op1 = plus_constant (op1, INTVAL (constant_part));
7735 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7736 op1 = force_operand (op1, target);
7737 return op1;
7738 }
7739
7740 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7741 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7742 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7743 {
7744 rtx constant_part;
7745
7746 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7747 (modifier == EXPAND_INITIALIZER
7748 ? EXPAND_INITIALIZER : EXPAND_SUM));
7749 if (! CONSTANT_P (op0))
7750 {
7751 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7752 VOIDmode, modifier);
7753 /* Return a PLUS if modifier says it's OK. */
7754 if (modifier == EXPAND_SUM
7755 || modifier == EXPAND_INITIALIZER)
7756 return simplify_gen_binary (PLUS, mode, op0, op1);
7757 goto binop2;
7758 }
7759 /* Use immed_double_const to ensure that the constant is
7760 truncated according to the mode of OP1, then sign extended
7761 to a HOST_WIDE_INT. Using the constant directly can result
7762 in non-canonical RTL in a 64x32 cross compile. */
7763 constant_part
7764 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7765 (HOST_WIDE_INT) 0,
7766 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7767 op0 = plus_constant (op0, INTVAL (constant_part));
7768 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7769 op0 = force_operand (op0, target);
7770 return op0;
7771 }
7772 }
7773
7774 /* No sense saving up arithmetic to be done
7775 if it's all in the wrong mode to form part of an address.
7776 And force_operand won't know whether to sign-extend or
7777 zero-extend. */
7778 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7779 || mode != ptr_mode)
7780 {
7781 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7782 subtarget, &op0, &op1, 0);
7783 if (op0 == const0_rtx)
7784 return op1;
7785 if (op1 == const0_rtx)
7786 return op0;
7787 goto binop2;
7788 }
7789
7790 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7791 subtarget, &op0, &op1, modifier);
7792 return simplify_gen_binary (PLUS, mode, op0, op1);
7793
7794 case MINUS_EXPR:
7795 /* For initializers, we are allowed to return a MINUS of two
7796 symbolic constants. Here we handle all cases when both operands
7797 are constant. */
7798 /* Handle difference of two symbolic constants,
7799 for the sake of an initializer. */
7800 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7801 && really_constant_p (TREE_OPERAND (exp, 0))
7802 && really_constant_p (TREE_OPERAND (exp, 1)))
7803 {
7804 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7805 NULL_RTX, &op0, &op1, modifier);
7806
7807 /* If the last operand is a CONST_INT, use plus_constant of
7808 the negated constant. Else make the MINUS. */
7809 if (GET_CODE (op1) == CONST_INT)
7810 return plus_constant (op0, - INTVAL (op1));
7811 else
7812 return gen_rtx_MINUS (mode, op0, op1);
7813 }
7814
7815 this_optab = ! unsignedp && flag_trapv
7816 && (GET_MODE_CLASS(mode) == MODE_INT)
7817 ? subv_optab : sub_optab;
7818
7819 /* No sense saving up arithmetic to be done
7820 if it's all in the wrong mode to form part of an address.
7821 And force_operand won't know whether to sign-extend or
7822 zero-extend. */
7823 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7824 || mode != ptr_mode)
7825 goto binop;
7826
7827 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7828 subtarget, &op0, &op1, modifier);
7829
7830 /* Convert A - const to A + (-const). */
7831 if (GET_CODE (op1) == CONST_INT)
7832 {
7833 op1 = negate_rtx (mode, op1);
7834 return simplify_gen_binary (PLUS, mode, op0, op1);
7835 }
7836
7837 goto binop2;
7838
7839 case MULT_EXPR:
7840 /* If first operand is constant, swap them.
7841 Thus the following special case checks need only
7842 check the second operand. */
7843 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7844 {
7845 tree t1 = TREE_OPERAND (exp, 0);
7846 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7847 TREE_OPERAND (exp, 1) = t1;
7848 }
7849
7850 /* Attempt to return something suitable for generating an
7851 indexed address, for machines that support that. */
7852
7853 if (modifier == EXPAND_SUM && mode == ptr_mode
7854 && host_integerp (TREE_OPERAND (exp, 1), 0))
7855 {
7856 tree exp1 = TREE_OPERAND (exp, 1);
7857
7858 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7859 EXPAND_SUM);
7860
7861 if (!REG_P (op0))
7862 op0 = force_operand (op0, NULL_RTX);
7863 if (!REG_P (op0))
7864 op0 = copy_to_mode_reg (mode, op0);
7865
7866 return gen_rtx_MULT (mode, op0,
7867 gen_int_mode (tree_low_cst (exp1, 0),
7868 TYPE_MODE (TREE_TYPE (exp1))));
7869 }
7870
7871 if (modifier == EXPAND_STACK_PARM)
7872 target = 0;
7873
7874 /* Check for multiplying things that have been extended
7875 from a narrower type. If this machine supports multiplying
7876 in that narrower type with a result in the desired type,
7877 do it that way, and avoid the explicit type-conversion. */
7878 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7879 && TREE_CODE (type) == INTEGER_TYPE
7880 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7881 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7882 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7883 && int_fits_type_p (TREE_OPERAND (exp, 1),
7884 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7885 /* Don't use a widening multiply if a shift will do. */
7886 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7887 > HOST_BITS_PER_WIDE_INT)
7888 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7889 ||
7890 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7891 && (TYPE_PRECISION (TREE_TYPE
7892 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7893 == TYPE_PRECISION (TREE_TYPE
7894 (TREE_OPERAND
7895 (TREE_OPERAND (exp, 0), 0))))
7896 /* If both operands are extended, they must either both
7897 be zero-extended or both be sign-extended. */
7898 && (TYPE_UNSIGNED (TREE_TYPE
7899 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7900 == TYPE_UNSIGNED (TREE_TYPE
7901 (TREE_OPERAND
7902 (TREE_OPERAND (exp, 0), 0)))))))
7903 {
7904 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7905 enum machine_mode innermode = TYPE_MODE (op0type);
7906 bool zextend_p = TYPE_UNSIGNED (op0type);
7907 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7908 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7909
7910 if (mode == GET_MODE_WIDER_MODE (innermode))
7911 {
7912 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7913 {
7914 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7915 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7916 TREE_OPERAND (exp, 1),
7917 NULL_RTX, &op0, &op1, 0);
7918 else
7919 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7920 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7921 NULL_RTX, &op0, &op1, 0);
7922 goto binop2;
7923 }
7924 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7925 && innermode == word_mode)
7926 {
7927 rtx htem, hipart;
7928 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7929 NULL_RTX, VOIDmode, 0);
7930 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7931 op1 = convert_modes (innermode, mode,
7932 expand_expr (TREE_OPERAND (exp, 1),
7933 NULL_RTX, VOIDmode, 0),
7934 unsignedp);
7935 else
7936 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7937 NULL_RTX, VOIDmode, 0);
7938 temp = expand_binop (mode, other_optab, op0, op1, target,
7939 unsignedp, OPTAB_LIB_WIDEN);
7940 hipart = gen_highpart (innermode, temp);
7941 htem = expand_mult_highpart_adjust (innermode, hipart,
7942 op0, op1, hipart,
7943 zextend_p);
7944 if (htem != hipart)
7945 emit_move_insn (hipart, htem);
7946 return temp;
7947 }
7948 }
7949 }
7950 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7951 subtarget, &op0, &op1, 0);
7952 return expand_mult (mode, op0, op1, target, unsignedp);
7953
7954 case TRUNC_DIV_EXPR:
7955 case FLOOR_DIV_EXPR:
7956 case CEIL_DIV_EXPR:
7957 case ROUND_DIV_EXPR:
7958 case EXACT_DIV_EXPR:
7959 if (modifier == EXPAND_STACK_PARM)
7960 target = 0;
7961 /* Possible optimization: compute the dividend with EXPAND_SUM
7962 then if the divisor is constant can optimize the case
7963 where some terms of the dividend have coeffs divisible by it. */
7964 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7965 subtarget, &op0, &op1, 0);
7966 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7967
7968 case RDIV_EXPR:
7969 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7970 expensive divide. If not, combine will rebuild the original
7971 computation. */
7972 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7973 && TREE_CODE (type) == REAL_TYPE
7974 && !real_onep (TREE_OPERAND (exp, 0)))
7975 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7976 build (RDIV_EXPR, type,
7977 build_real (type, dconst1),
7978 TREE_OPERAND (exp, 1))),
7979 target, tmode, modifier);
7980 this_optab = sdiv_optab;
7981 goto binop;
7982
7983 case TRUNC_MOD_EXPR:
7984 case FLOOR_MOD_EXPR:
7985 case CEIL_MOD_EXPR:
7986 case ROUND_MOD_EXPR:
7987 if (modifier == EXPAND_STACK_PARM)
7988 target = 0;
7989 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7990 subtarget, &op0, &op1, 0);
7991 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7992
7993 case FIX_ROUND_EXPR:
7994 case FIX_FLOOR_EXPR:
7995 case FIX_CEIL_EXPR:
7996 abort (); /* Not used for C. */
7997
7998 case FIX_TRUNC_EXPR:
7999 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8000 if (target == 0 || modifier == EXPAND_STACK_PARM)
8001 target = gen_reg_rtx (mode);
8002 expand_fix (target, op0, unsignedp);
8003 return target;
8004
8005 case FLOAT_EXPR:
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8007 if (target == 0 || modifier == EXPAND_STACK_PARM)
8008 target = gen_reg_rtx (mode);
8009 /* expand_float can't figure out what to do if FROM has VOIDmode.
8010 So give it the correct mode. With -O, cse will optimize this. */
8011 if (GET_MODE (op0) == VOIDmode)
8012 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8013 op0);
8014 expand_float (target, op0,
8015 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8016 return target;
8017
8018 case NEGATE_EXPR:
8019 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8020 if (modifier == EXPAND_STACK_PARM)
8021 target = 0;
8022 temp = expand_unop (mode,
8023 ! unsignedp && flag_trapv
8024 && (GET_MODE_CLASS(mode) == MODE_INT)
8025 ? negv_optab : neg_optab, op0, target, 0);
8026 if (temp == 0)
8027 abort ();
8028 return temp;
8029
8030 case ABS_EXPR:
8031 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8032 if (modifier == EXPAND_STACK_PARM)
8033 target = 0;
8034
8035 /* ABS_EXPR is not valid for complex arguments. */
8036 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8037 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8038 abort ();
8039
8040 /* Unsigned abs is simply the operand. Testing here means we don't
8041 risk generating incorrect code below. */
8042 if (TYPE_UNSIGNED (type))
8043 return op0;
8044
8045 return expand_abs (mode, op0, target, unsignedp,
8046 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8047
8048 case MAX_EXPR:
8049 case MIN_EXPR:
8050 target = original_target;
8051 if (target == 0
8052 || modifier == EXPAND_STACK_PARM
8053 || (MEM_P (target) && MEM_VOLATILE_P (target))
8054 || GET_MODE (target) != mode
8055 || (REG_P (target)
8056 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8057 target = gen_reg_rtx (mode);
8058 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8059 target, &op0, &op1, 0);
8060
8061 /* First try to do it with a special MIN or MAX instruction.
8062 If that does not win, use a conditional jump to select the proper
8063 value. */
8064 this_optab = (unsignedp
8065 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8066 : (code == MIN_EXPR ? smin_optab : smax_optab));
8067
8068 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8069 OPTAB_WIDEN);
8070 if (temp != 0)
8071 return temp;
8072
8073 /* At this point, a MEM target is no longer useful; we will get better
8074 code without it. */
8075
8076 if (MEM_P (target))
8077 target = gen_reg_rtx (mode);
8078
8079 /* If op1 was placed in target, swap op0 and op1. */
8080 if (target != op0 && target == op1)
8081 {
8082 rtx tem = op0;
8083 op0 = op1;
8084 op1 = tem;
8085 }
8086
8087 if (target != op0)
8088 emit_move_insn (target, op0);
8089
8090 op0 = gen_label_rtx ();
8091
8092 /* If this mode is an integer too wide to compare properly,
8093 compare word by word. Rely on cse to optimize constant cases. */
8094 if (GET_MODE_CLASS (mode) == MODE_INT
8095 && ! can_compare_p (GE, mode, ccp_jump))
8096 {
8097 if (code == MAX_EXPR)
8098 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8099 NULL_RTX, op0);
8100 else
8101 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8102 NULL_RTX, op0);
8103 }
8104 else
8105 {
8106 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8107 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8108 }
8109 emit_move_insn (target, op1);
8110 emit_label (op0);
8111 return target;
8112
8113 case BIT_NOT_EXPR:
8114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8115 if (modifier == EXPAND_STACK_PARM)
8116 target = 0;
8117 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8118 if (temp == 0)
8119 abort ();
8120 return temp;
8121
8122 /* ??? Can optimize bitwise operations with one arg constant.
8123 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8124 and (a bitwise1 b) bitwise2 b (etc)
8125 but that is probably not worth while. */
8126
8127 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8128 boolean values when we want in all cases to compute both of them. In
8129 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8130 as actual zero-or-1 values and then bitwise anding. In cases where
8131 there cannot be any side effects, better code would be made by
8132 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8133 how to recognize those cases. */
8134
8135 case TRUTH_AND_EXPR:
8136 case BIT_AND_EXPR:
8137 this_optab = and_optab;
8138 goto binop;
8139
8140 case TRUTH_OR_EXPR:
8141 case BIT_IOR_EXPR:
8142 this_optab = ior_optab;
8143 goto binop;
8144
8145 case TRUTH_XOR_EXPR:
8146 case BIT_XOR_EXPR:
8147 this_optab = xor_optab;
8148 goto binop;
8149
8150 case LSHIFT_EXPR:
8151 case RSHIFT_EXPR:
8152 case LROTATE_EXPR:
8153 case RROTATE_EXPR:
8154 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8155 subtarget = 0;
8156 if (modifier == EXPAND_STACK_PARM)
8157 target = 0;
8158 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8159 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8160 unsignedp);
8161
8162 /* Could determine the answer when only additive constants differ. Also,
8163 the addition of one can be handled by changing the condition. */
8164 case LT_EXPR:
8165 case LE_EXPR:
8166 case GT_EXPR:
8167 case GE_EXPR:
8168 case EQ_EXPR:
8169 case NE_EXPR:
8170 case UNORDERED_EXPR:
8171 case ORDERED_EXPR:
8172 case UNLT_EXPR:
8173 case UNLE_EXPR:
8174 case UNGT_EXPR:
8175 case UNGE_EXPR:
8176 case UNEQ_EXPR:
8177 case LTGT_EXPR:
8178 temp = do_store_flag (exp,
8179 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8180 tmode != VOIDmode ? tmode : mode, 0);
8181 if (temp != 0)
8182 return temp;
8183
8184 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8185 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8186 && original_target
8187 && REG_P (original_target)
8188 && (GET_MODE (original_target)
8189 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8190 {
8191 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8192 VOIDmode, 0);
8193
8194 /* If temp is constant, we can just compute the result. */
8195 if (GET_CODE (temp) == CONST_INT)
8196 {
8197 if (INTVAL (temp) != 0)
8198 emit_move_insn (target, const1_rtx);
8199 else
8200 emit_move_insn (target, const0_rtx);
8201
8202 return target;
8203 }
8204
8205 if (temp != original_target)
8206 {
8207 enum machine_mode mode1 = GET_MODE (temp);
8208 if (mode1 == VOIDmode)
8209 mode1 = tmode != VOIDmode ? tmode : mode;
8210
8211 temp = copy_to_mode_reg (mode1, temp);
8212 }
8213
8214 op1 = gen_label_rtx ();
8215 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8216 GET_MODE (temp), unsignedp, op1);
8217 emit_move_insn (temp, const1_rtx);
8218 emit_label (op1);
8219 return temp;
8220 }
8221
8222 /* If no set-flag instruction, must generate a conditional
8223 store into a temporary variable. Drop through
8224 and handle this like && and ||. */
8225
8226 case TRUTH_ANDIF_EXPR:
8227 case TRUTH_ORIF_EXPR:
8228 if (! ignore
8229 && (target == 0
8230 || modifier == EXPAND_STACK_PARM
8231 || ! safe_from_p (target, exp, 1)
8232 /* Make sure we don't have a hard reg (such as function's return
8233 value) live across basic blocks, if not optimizing. */
8234 || (!optimize && REG_P (target)
8235 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8236 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8237
8238 if (target)
8239 emit_clr_insn (target);
8240
8241 op1 = gen_label_rtx ();
8242 jumpifnot (exp, op1);
8243
8244 if (target)
8245 emit_0_to_1_insn (target);
8246
8247 emit_label (op1);
8248 return ignore ? const0_rtx : target;
8249
8250 case TRUTH_NOT_EXPR:
8251 if (modifier == EXPAND_STACK_PARM)
8252 target = 0;
8253 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8254 /* The parser is careful to generate TRUTH_NOT_EXPR
8255 only with operands that are always zero or one. */
8256 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8257 target, 1, OPTAB_LIB_WIDEN);
8258 if (temp == 0)
8259 abort ();
8260 return temp;
8261
8262 case COMPOUND_EXPR:
8263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8264 emit_queue ();
8265 return expand_expr_real (TREE_OPERAND (exp, 1),
8266 (ignore ? const0_rtx : target),
8267 VOIDmode, modifier, alt_rtl);
8268
8269 case STATEMENT_LIST:
8270 {
8271 tree_stmt_iterator iter;
8272
8273 if (!ignore)
8274 abort ();
8275
8276 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8277 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8278 }
8279 return const0_rtx;
8280
8281 case COND_EXPR:
8282 /* If it's void, we don't need to worry about computing a value. */
8283 if (VOID_TYPE_P (TREE_TYPE (exp)))
8284 {
8285 tree pred = TREE_OPERAND (exp, 0);
8286 tree then_ = TREE_OPERAND (exp, 1);
8287 tree else_ = TREE_OPERAND (exp, 2);
8288
8289 /* If we do not have any pending cleanups or stack_levels
8290 to restore, and at least one arm of the COND_EXPR is a
8291 GOTO_EXPR to a local label, then we can emit more efficient
8292 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8293 if (! optimize
8294 || containing_blocks_have_cleanups_or_stack_level ())
8295 ;
8296 else if (TREE_CODE (then_) == GOTO_EXPR
8297 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8298 {
8299 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8300 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8301 }
8302 else if (TREE_CODE (else_) == GOTO_EXPR
8303 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8304 {
8305 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8306 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8307 }
8308
8309 /* Just use the 'if' machinery. */
8310 expand_start_cond (pred, 0);
8311 start_cleanup_deferral ();
8312 expand_expr (then_, const0_rtx, VOIDmode, 0);
8313
8314 exp = else_;
8315
8316 /* Iterate over 'else if's instead of recursing. */
8317 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8318 {
8319 expand_start_else ();
8320 if (EXPR_HAS_LOCATION (exp))
8321 {
8322 emit_line_note (EXPR_LOCATION (exp));
8323 record_block_change (TREE_BLOCK (exp));
8324 }
8325 expand_elseif (TREE_OPERAND (exp, 0));
8326 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8327 }
8328 /* Don't emit the jump and label if there's no 'else' clause. */
8329 if (TREE_SIDE_EFFECTS (exp))
8330 {
8331 expand_start_else ();
8332 expand_expr (exp, const0_rtx, VOIDmode, 0);
8333 }
8334 end_cleanup_deferral ();
8335 expand_end_cond ();
8336 return const0_rtx;
8337 }
8338
8339 /* If we would have a "singleton" (see below) were it not for a
8340 conversion in each arm, bring that conversion back out. */
8341 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8342 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8343 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8344 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8345 {
8346 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8347 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8348
8349 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8350 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8351 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8352 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8353 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8354 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8355 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8356 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8357 return expand_expr (build1 (NOP_EXPR, type,
8358 build (COND_EXPR, TREE_TYPE (iftrue),
8359 TREE_OPERAND (exp, 0),
8360 iftrue, iffalse)),
8361 target, tmode, modifier);
8362 }
8363
8364 {
8365 /* Note that COND_EXPRs whose type is a structure or union
8366 are required to be constructed to contain assignments of
8367 a temporary variable, so that we can evaluate them here
8368 for side effect only. If type is void, we must do likewise. */
8369
8370 /* If an arm of the branch requires a cleanup,
8371 only that cleanup is performed. */
8372
8373 tree singleton = 0;
8374 tree binary_op = 0, unary_op = 0;
8375
8376 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8377 convert it to our mode, if necessary. */
8378 if (integer_onep (TREE_OPERAND (exp, 1))
8379 && integer_zerop (TREE_OPERAND (exp, 2))
8380 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8381 {
8382 if (ignore)
8383 {
8384 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8385 modifier);
8386 return const0_rtx;
8387 }
8388
8389 if (modifier == EXPAND_STACK_PARM)
8390 target = 0;
8391 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8392 if (GET_MODE (op0) == mode)
8393 return op0;
8394
8395 if (target == 0)
8396 target = gen_reg_rtx (mode);
8397 convert_move (target, op0, unsignedp);
8398 return target;
8399 }
8400
8401 /* Check for X ? A + B : A. If we have this, we can copy A to the
8402 output and conditionally add B. Similarly for unary operations.
8403 Don't do this if X has side-effects because those side effects
8404 might affect A or B and the "?" operation is a sequence point in
8405 ANSI. (operand_equal_p tests for side effects.) */
8406
8407 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8408 && operand_equal_p (TREE_OPERAND (exp, 2),
8409 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8410 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8411 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8412 && operand_equal_p (TREE_OPERAND (exp, 1),
8413 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8414 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8415 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8416 && operand_equal_p (TREE_OPERAND (exp, 2),
8417 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8418 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8419 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8420 && operand_equal_p (TREE_OPERAND (exp, 1),
8421 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8422 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8423
8424 /* If we are not to produce a result, we have no target. Otherwise,
8425 if a target was specified use it; it will not be used as an
8426 intermediate target unless it is safe. If no target, use a
8427 temporary. */
8428
8429 if (ignore)
8430 temp = 0;
8431 else if (modifier == EXPAND_STACK_PARM)
8432 temp = assign_temp (type, 0, 0, 1);
8433 else if (original_target
8434 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8435 || (singleton && REG_P (original_target)
8436 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8437 && original_target == var_rtx (singleton)))
8438 && GET_MODE (original_target) == mode
8439 #ifdef HAVE_conditional_move
8440 && (! can_conditionally_move_p (mode)
8441 || REG_P (original_target)
8442 || TREE_ADDRESSABLE (type))
8443 #endif
8444 && (!MEM_P (original_target)
8445 || TREE_ADDRESSABLE (type)))
8446 temp = original_target;
8447 else if (TREE_ADDRESSABLE (type))
8448 abort ();
8449 else
8450 temp = assign_temp (type, 0, 0, 1);
8451
8452 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8453 do the test of X as a store-flag operation, do this as
8454 A + ((X != 0) << log C). Similarly for other simple binary
8455 operators. Only do for C == 1 if BRANCH_COST is low. */
8456 if (temp && singleton && binary_op
8457 && (TREE_CODE (binary_op) == PLUS_EXPR
8458 || TREE_CODE (binary_op) == MINUS_EXPR
8459 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8460 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8461 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8462 : integer_onep (TREE_OPERAND (binary_op, 1)))
8463 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8464 {
8465 rtx result;
8466 tree cond;
8467 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8468 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8469 ? addv_optab : add_optab)
8470 : TREE_CODE (binary_op) == MINUS_EXPR
8471 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8472 ? subv_optab : sub_optab)
8473 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8474 : xor_optab);
8475
8476 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8477 if (singleton == TREE_OPERAND (exp, 1))
8478 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8479 else
8480 cond = TREE_OPERAND (exp, 0);
8481
8482 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8483 ? temp : NULL_RTX),
8484 mode, BRANCH_COST <= 1);
8485
8486 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8487 result = expand_shift (LSHIFT_EXPR, mode, result,
8488 build_int_2 (tree_log2
8489 (TREE_OPERAND
8490 (binary_op, 1)),
8491 0),
8492 (safe_from_p (temp, singleton, 1)
8493 ? temp : NULL_RTX), 0);
8494
8495 if (result)
8496 {
8497 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8498 return expand_binop (mode, boptab, op1, result, temp,
8499 unsignedp, OPTAB_LIB_WIDEN);
8500 }
8501 }
8502
8503 do_pending_stack_adjust ();
8504 NO_DEFER_POP;
8505 op0 = gen_label_rtx ();
8506
8507 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8508 {
8509 if (temp != 0)
8510 {
8511 /* If the target conflicts with the other operand of the
8512 binary op, we can't use it. Also, we can't use the target
8513 if it is a hard register, because evaluating the condition
8514 might clobber it. */
8515 if ((binary_op
8516 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8517 || (REG_P (temp)
8518 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8519 temp = gen_reg_rtx (mode);
8520 store_expr (singleton, temp,
8521 modifier == EXPAND_STACK_PARM ? 2 : 0);
8522 }
8523 else
8524 expand_expr (singleton,
8525 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8526 if (singleton == TREE_OPERAND (exp, 1))
8527 jumpif (TREE_OPERAND (exp, 0), op0);
8528 else
8529 jumpifnot (TREE_OPERAND (exp, 0), op0);
8530
8531 start_cleanup_deferral ();
8532 if (binary_op && temp == 0)
8533 /* Just touch the other operand. */
8534 expand_expr (TREE_OPERAND (binary_op, 1),
8535 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8536 else if (binary_op)
8537 store_expr (build (TREE_CODE (binary_op), type,
8538 make_tree (type, temp),
8539 TREE_OPERAND (binary_op, 1)),
8540 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8541 else
8542 store_expr (build1 (TREE_CODE (unary_op), type,
8543 make_tree (type, temp)),
8544 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8545 op1 = op0;
8546 }
8547 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8548 comparison operator. If we have one of these cases, set the
8549 output to A, branch on A (cse will merge these two references),
8550 then set the output to FOO. */
8551 else if (temp
8552 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8553 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8554 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8555 TREE_OPERAND (exp, 1), 0)
8556 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8557 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8558 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8559 {
8560 if (REG_P (temp)
8561 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8562 temp = gen_reg_rtx (mode);
8563 store_expr (TREE_OPERAND (exp, 1), temp,
8564 modifier == EXPAND_STACK_PARM ? 2 : 0);
8565 jumpif (TREE_OPERAND (exp, 0), op0);
8566
8567 start_cleanup_deferral ();
8568 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8569 store_expr (TREE_OPERAND (exp, 2), temp,
8570 modifier == EXPAND_STACK_PARM ? 2 : 0);
8571 else
8572 expand_expr (TREE_OPERAND (exp, 2),
8573 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8574 op1 = op0;
8575 }
8576 else if (temp
8577 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8578 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8579 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8580 TREE_OPERAND (exp, 2), 0)
8581 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8582 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8583 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8584 {
8585 if (REG_P (temp)
8586 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8587 temp = gen_reg_rtx (mode);
8588 store_expr (TREE_OPERAND (exp, 2), temp,
8589 modifier == EXPAND_STACK_PARM ? 2 : 0);
8590 jumpifnot (TREE_OPERAND (exp, 0), op0);
8591
8592 start_cleanup_deferral ();
8593 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8594 store_expr (TREE_OPERAND (exp, 1), temp,
8595 modifier == EXPAND_STACK_PARM ? 2 : 0);
8596 else
8597 expand_expr (TREE_OPERAND (exp, 1),
8598 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8599 op1 = op0;
8600 }
8601 else
8602 {
8603 op1 = gen_label_rtx ();
8604 jumpifnot (TREE_OPERAND (exp, 0), op0);
8605
8606 start_cleanup_deferral ();
8607
8608 /* One branch of the cond can be void, if it never returns. For
8609 example A ? throw : E */
8610 if (temp != 0
8611 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8612 store_expr (TREE_OPERAND (exp, 1), temp,
8613 modifier == EXPAND_STACK_PARM ? 2 : 0);
8614 else
8615 expand_expr (TREE_OPERAND (exp, 1),
8616 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8617 end_cleanup_deferral ();
8618 emit_queue ();
8619 emit_jump_insn (gen_jump (op1));
8620 emit_barrier ();
8621 emit_label (op0);
8622 start_cleanup_deferral ();
8623 if (temp != 0
8624 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8625 store_expr (TREE_OPERAND (exp, 2), temp,
8626 modifier == EXPAND_STACK_PARM ? 2 : 0);
8627 else
8628 expand_expr (TREE_OPERAND (exp, 2),
8629 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8630 }
8631
8632 end_cleanup_deferral ();
8633
8634 emit_queue ();
8635 emit_label (op1);
8636 OK_DEFER_POP;
8637
8638 return temp;
8639 }
8640
8641 case TARGET_EXPR:
8642 {
8643 /* Something needs to be initialized, but we didn't know
8644 where that thing was when building the tree. For example,
8645 it could be the return value of a function, or a parameter
8646 to a function which lays down in the stack, or a temporary
8647 variable which must be passed by reference.
8648
8649 We guarantee that the expression will either be constructed
8650 or copied into our original target. */
8651
8652 tree slot = TREE_OPERAND (exp, 0);
8653 tree cleanups = NULL_TREE;
8654 tree exp1;
8655
8656 if (TREE_CODE (slot) != VAR_DECL)
8657 abort ();
8658
8659 if (! ignore)
8660 target = original_target;
8661
8662 /* Set this here so that if we get a target that refers to a
8663 register variable that's already been used, put_reg_into_stack
8664 knows that it should fix up those uses. */
8665 TREE_USED (slot) = 1;
8666
8667 if (target == 0)
8668 {
8669 if (DECL_RTL_SET_P (slot))
8670 {
8671 target = DECL_RTL (slot);
8672 /* If we have already expanded the slot, so don't do
8673 it again. (mrs) */
8674 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8675 return target;
8676 }
8677 else
8678 {
8679 target = assign_temp (type, 2, 0, 1);
8680 SET_DECL_RTL (slot, target);
8681
8682 /* Since SLOT is not known to the called function
8683 to belong to its stack frame, we must build an explicit
8684 cleanup. This case occurs when we must build up a reference
8685 to pass the reference as an argument. In this case,
8686 it is very likely that such a reference need not be
8687 built here. */
8688
8689 if (TREE_OPERAND (exp, 2) == 0)
8690 TREE_OPERAND (exp, 2)
8691 = lang_hooks.maybe_build_cleanup (slot);
8692 cleanups = TREE_OPERAND (exp, 2);
8693 }
8694 }
8695 else
8696 {
8697 /* This case does occur, when expanding a parameter which
8698 needs to be constructed on the stack. The target
8699 is the actual stack address that we want to initialize.
8700 The function we call will perform the cleanup in this case. */
8701
8702 /* If we have already assigned it space, use that space,
8703 not target that we were passed in, as our target
8704 parameter is only a hint. */
8705 if (DECL_RTL_SET_P (slot))
8706 {
8707 target = DECL_RTL (slot);
8708 /* If we have already expanded the slot, so don't do
8709 it again. (mrs) */
8710 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8711 return target;
8712 }
8713 else
8714 SET_DECL_RTL (slot, target);
8715 }
8716
8717 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8718 /* Mark it as expanded. */
8719 TREE_OPERAND (exp, 1) = NULL_TREE;
8720
8721 if (VOID_TYPE_P (TREE_TYPE (exp1)))
8722 /* If the initializer is void, just expand it; it will initialize
8723 the object directly. */
8724 expand_expr (exp1, const0_rtx, VOIDmode, 0);
8725 else
8726 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8727
8728 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8729
8730 return target;
8731 }
8732
8733 case INIT_EXPR:
8734 {
8735 tree lhs = TREE_OPERAND (exp, 0);
8736 tree rhs = TREE_OPERAND (exp, 1);
8737
8738 temp = expand_assignment (lhs, rhs, ! ignore);
8739 return temp;
8740 }
8741
8742 case MODIFY_EXPR:
8743 {
8744 /* If lhs is complex, expand calls in rhs before computing it.
8745 That's so we don't compute a pointer and save it over a
8746 call. If lhs is simple, compute it first so we can give it
8747 as a target if the rhs is just a call. This avoids an
8748 extra temp and copy and that prevents a partial-subsumption
8749 which makes bad code. Actually we could treat
8750 component_ref's of vars like vars. */
8751
8752 tree lhs = TREE_OPERAND (exp, 0);
8753 tree rhs = TREE_OPERAND (exp, 1);
8754
8755 temp = 0;
8756
8757 /* Check for |= or &= of a bitfield of size one into another bitfield
8758 of size 1. In this case, (unless we need the result of the
8759 assignment) we can do this more efficiently with a
8760 test followed by an assignment, if necessary.
8761
8762 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8763 things change so we do, this code should be enhanced to
8764 support it. */
8765 if (ignore
8766 && TREE_CODE (lhs) == COMPONENT_REF
8767 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8768 || TREE_CODE (rhs) == BIT_AND_EXPR)
8769 && TREE_OPERAND (rhs, 0) == lhs
8770 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8771 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8772 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8773 {
8774 rtx label = gen_label_rtx ();
8775
8776 do_jump (TREE_OPERAND (rhs, 1),
8777 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8778 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8779 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8780 (TREE_CODE (rhs) == BIT_IOR_EXPR
8781 ? integer_one_node
8782 : integer_zero_node)),
8783 0);
8784 do_pending_stack_adjust ();
8785 emit_label (label);
8786 return const0_rtx;
8787 }
8788
8789 temp = expand_assignment (lhs, rhs, ! ignore);
8790
8791 return temp;
8792 }
8793
8794 case RETURN_EXPR:
8795 if (!TREE_OPERAND (exp, 0))
8796 expand_null_return ();
8797 else
8798 expand_return (TREE_OPERAND (exp, 0));
8799 return const0_rtx;
8800
8801 case PREINCREMENT_EXPR:
8802 case PREDECREMENT_EXPR:
8803 return expand_increment (exp, 0, ignore);
8804
8805 case POSTINCREMENT_EXPR:
8806 case POSTDECREMENT_EXPR:
8807 /* Faster to treat as pre-increment if result is not used. */
8808 return expand_increment (exp, ! ignore, ignore);
8809
8810 case ADDR_EXPR:
8811 if (modifier == EXPAND_STACK_PARM)
8812 target = 0;
8813 /* If we are taking the address of something erroneous, just
8814 return a zero. */
8815 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8816 return const0_rtx;
8817 /* If we are taking the address of a constant and are at the
8818 top level, we have to use output_constant_def since we can't
8819 call force_const_mem at top level. */
8820 else if (cfun == 0
8821 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8822 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8823 == 'c')))
8824 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8825 else
8826 {
8827 /* We make sure to pass const0_rtx down if we came in with
8828 ignore set, to avoid doing the cleanups twice for something. */
8829 op0 = expand_expr (TREE_OPERAND (exp, 0),
8830 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8831 (modifier == EXPAND_INITIALIZER
8832 ? modifier : EXPAND_CONST_ADDRESS));
8833
8834 /* If we are going to ignore the result, OP0 will have been set
8835 to const0_rtx, so just return it. Don't get confused and
8836 think we are taking the address of the constant. */
8837 if (ignore)
8838 return op0;
8839
8840 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8841 clever and returns a REG when given a MEM. */
8842 op0 = protect_from_queue (op0, 1);
8843
8844 /* We would like the object in memory. If it is a constant, we can
8845 have it be statically allocated into memory. For a non-constant,
8846 we need to allocate some memory and store the value into it. */
8847
8848 if (CONSTANT_P (op0))
8849 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8850 op0);
8851 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8852 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8853 || GET_CODE (op0) == LO_SUM)
8854 {
8855 /* If this object is in a register, it can't be BLKmode. */
8856 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8857 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8858
8859 if (GET_CODE (op0) == PARALLEL)
8860 /* Handle calls that pass values in multiple
8861 non-contiguous locations. The Irix 6 ABI has examples
8862 of this. */
8863 emit_group_store (memloc, op0, inner_type,
8864 int_size_in_bytes (inner_type));
8865 else
8866 emit_move_insn (memloc, op0);
8867
8868 op0 = memloc;
8869 }
8870
8871 if (!MEM_P (op0))
8872 abort ();
8873
8874 mark_temp_addr_taken (op0);
8875 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8876 {
8877 op0 = XEXP (op0, 0);
8878 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8879 op0 = convert_memory_address (ptr_mode, op0);
8880 return op0;
8881 }
8882
8883 /* If OP0 is not aligned as least as much as the type requires, we
8884 need to make a temporary, copy OP0 to it, and take the address of
8885 the temporary. We want to use the alignment of the type, not of
8886 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8887 the test for BLKmode means that can't happen. The test for
8888 BLKmode is because we never make mis-aligned MEMs with
8889 non-BLKmode.
8890
8891 We don't need to do this at all if the machine doesn't have
8892 strict alignment. */
8893 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8894 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8895 > MEM_ALIGN (op0))
8896 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8897 {
8898 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8899 rtx new;
8900
8901 if (TYPE_ALIGN_OK (inner_type))
8902 abort ();
8903
8904 if (TREE_ADDRESSABLE (inner_type))
8905 {
8906 /* We can't make a bitwise copy of this object, so fail. */
8907 error ("cannot take the address of an unaligned member");
8908 return const0_rtx;
8909 }
8910
8911 new = assign_stack_temp_for_type
8912 (TYPE_MODE (inner_type),
8913 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8914 : int_size_in_bytes (inner_type),
8915 1, build_qualified_type (inner_type,
8916 (TYPE_QUALS (inner_type)
8917 | TYPE_QUAL_CONST)));
8918
8919 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8920 (modifier == EXPAND_STACK_PARM
8921 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8922
8923 op0 = new;
8924 }
8925
8926 op0 = force_operand (XEXP (op0, 0), target);
8927 }
8928
8929 if (flag_force_addr
8930 && !REG_P (op0)
8931 && modifier != EXPAND_CONST_ADDRESS
8932 && modifier != EXPAND_INITIALIZER
8933 && modifier != EXPAND_SUM)
8934 op0 = force_reg (Pmode, op0);
8935
8936 if (REG_P (op0)
8937 && ! REG_USERVAR_P (op0))
8938 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8939
8940 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8941 op0 = convert_memory_address (ptr_mode, op0);
8942
8943 return op0;
8944
8945 case ENTRY_VALUE_EXPR:
8946 abort ();
8947
8948 /* COMPLEX type for Extended Pascal & Fortran */
8949 case COMPLEX_EXPR:
8950 {
8951 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8952 rtx insns;
8953
8954 /* Get the rtx code of the operands. */
8955 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8956 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8957
8958 if (! target)
8959 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8960
8961 start_sequence ();
8962
8963 /* Move the real (op0) and imaginary (op1) parts to their location. */
8964 emit_move_insn (gen_realpart (mode, target), op0);
8965 emit_move_insn (gen_imagpart (mode, target), op1);
8966
8967 insns = get_insns ();
8968 end_sequence ();
8969
8970 /* Complex construction should appear as a single unit. */
8971 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8972 each with a separate pseudo as destination.
8973 It's not correct for flow to treat them as a unit. */
8974 if (GET_CODE (target) != CONCAT)
8975 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8976 else
8977 emit_insn (insns);
8978
8979 return target;
8980 }
8981
8982 case REALPART_EXPR:
8983 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8984 return gen_realpart (mode, op0);
8985
8986 case IMAGPART_EXPR:
8987 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8988 return gen_imagpart (mode, op0);
8989
8990 case CONJ_EXPR:
8991 {
8992 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8993 rtx imag_t;
8994 rtx insns;
8995
8996 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8997
8998 if (! target)
8999 target = gen_reg_rtx (mode);
9000
9001 start_sequence ();
9002
9003 /* Store the realpart and the negated imagpart to target. */
9004 emit_move_insn (gen_realpart (partmode, target),
9005 gen_realpart (partmode, op0));
9006
9007 imag_t = gen_imagpart (partmode, target);
9008 temp = expand_unop (partmode,
9009 ! unsignedp && flag_trapv
9010 && (GET_MODE_CLASS(partmode) == MODE_INT)
9011 ? negv_optab : neg_optab,
9012 gen_imagpart (partmode, op0), imag_t, 0);
9013 if (temp != imag_t)
9014 emit_move_insn (imag_t, temp);
9015
9016 insns = get_insns ();
9017 end_sequence ();
9018
9019 /* Conjugate should appear as a single unit
9020 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9021 each with a separate pseudo as destination.
9022 It's not correct for flow to treat them as a unit. */
9023 if (GET_CODE (target) != CONCAT)
9024 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9025 else
9026 emit_insn (insns);
9027
9028 return target;
9029 }
9030
9031 case RESX_EXPR:
9032 expand_resx_expr (exp);
9033 return const0_rtx;
9034
9035 case TRY_CATCH_EXPR:
9036 {
9037 tree handler = TREE_OPERAND (exp, 1);
9038
9039 expand_eh_region_start ();
9040 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9041 expand_eh_handler (handler);
9042
9043 return op0;
9044 }
9045
9046 case CATCH_EXPR:
9047 expand_start_catch (CATCH_TYPES (exp));
9048 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9049 expand_end_catch ();
9050 return const0_rtx;
9051
9052 case EH_FILTER_EXPR:
9053 /* Should have been handled in expand_eh_handler. */
9054 abort ();
9055
9056 case TRY_FINALLY_EXPR:
9057 {
9058 tree try_block = TREE_OPERAND (exp, 0);
9059 tree finally_block = TREE_OPERAND (exp, 1);
9060
9061 if ((!optimize && lang_protect_cleanup_actions == NULL)
9062 || unsafe_for_reeval (finally_block) > 1)
9063 {
9064 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9065 is not sufficient, so we cannot expand the block twice.
9066 So we play games with GOTO_SUBROUTINE_EXPR to let us
9067 expand the thing only once. */
9068 /* When not optimizing, we go ahead with this form since
9069 (1) user breakpoints operate more predictably without
9070 code duplication, and
9071 (2) we're not running any of the global optimizers
9072 that would explode in time/space with the highly
9073 connected CFG created by the indirect branching. */
9074
9075 rtx finally_label = gen_label_rtx ();
9076 rtx done_label = gen_label_rtx ();
9077 rtx return_link = gen_reg_rtx (Pmode);
9078 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9079 (tree) finally_label, (tree) return_link);
9080 TREE_SIDE_EFFECTS (cleanup) = 1;
9081
9082 /* Start a new binding layer that will keep track of all cleanup
9083 actions to be performed. */
9084 expand_start_bindings (2);
9085 target_temp_slot_level = temp_slot_level;
9086
9087 expand_decl_cleanup (NULL_TREE, cleanup);
9088 op0 = expand_expr (try_block, target, tmode, modifier);
9089
9090 preserve_temp_slots (op0);
9091 expand_end_bindings (NULL_TREE, 0, 0);
9092 emit_jump (done_label);
9093 emit_label (finally_label);
9094 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9095 emit_indirect_jump (return_link);
9096 emit_label (done_label);
9097 }
9098 else
9099 {
9100 expand_start_bindings (2);
9101 target_temp_slot_level = temp_slot_level;
9102
9103 expand_decl_cleanup (NULL_TREE, finally_block);
9104 op0 = expand_expr (try_block, target, tmode, modifier);
9105
9106 preserve_temp_slots (op0);
9107 expand_end_bindings (NULL_TREE, 0, 0);
9108 }
9109
9110 return op0;
9111 }
9112
9113 case GOTO_SUBROUTINE_EXPR:
9114 {
9115 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9116 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9117 rtx return_address = gen_label_rtx ();
9118 emit_move_insn (return_link,
9119 gen_rtx_LABEL_REF (Pmode, return_address));
9120 emit_jump (subr);
9121 emit_label (return_address);
9122 return const0_rtx;
9123 }
9124
9125 case VA_ARG_EXPR:
9126 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9127
9128 case EXC_PTR_EXPR:
9129 return get_exception_pointer (cfun);
9130
9131 case FILTER_EXPR:
9132 return get_exception_filter (cfun);
9133
9134 case FDESC_EXPR:
9135 /* Function descriptors are not valid except for as
9136 initialization constants, and should not be expanded. */
9137 abort ();
9138
9139 case SWITCH_EXPR:
9140 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9141 "switch");
9142 if (SWITCH_BODY (exp))
9143 expand_expr_stmt (SWITCH_BODY (exp));
9144 if (SWITCH_LABELS (exp))
9145 {
9146 tree duplicate = 0;
9147 tree vec = SWITCH_LABELS (exp);
9148 size_t i, n = TREE_VEC_LENGTH (vec);
9149
9150 for (i = 0; i < n; ++i)
9151 {
9152 tree elt = TREE_VEC_ELT (vec, i);
9153 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9154 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9155 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9156
9157 tree case_low = CASE_LOW (elt);
9158 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9159 if (case_low && case_high)
9160 {
9161 /* Case label is less than minimum for type. */
9162 if (TREE_CODE (min_value) == INTEGER_CST
9163 && tree_int_cst_compare (case_low, min_value) < 0
9164 && tree_int_cst_compare (case_high, min_value) < 0)
9165 {
9166 warning ("case label value %d is less than minimum value for type",
9167 TREE_INT_CST (case_low));
9168 continue;
9169 }
9170
9171 /* Case value is greater than maximum for type. */
9172 if (TREE_CODE (max_value) == INTEGER_CST
9173 && tree_int_cst_compare (case_low, max_value) > 0
9174 && tree_int_cst_compare (case_high, max_value) > 0)
9175 {
9176 warning ("case label value %d exceeds maximum value for type",
9177 TREE_INT_CST (case_high));
9178 continue;
9179 }
9180
9181 /* Saturate lower case label value to minimum. */
9182 if (TREE_CODE (min_value) == INTEGER_CST
9183 && tree_int_cst_compare (case_high, min_value) >= 0
9184 && tree_int_cst_compare (case_low, min_value) < 0)
9185 {
9186 warning ("lower value %d in case label range less than minimum value for type",
9187 TREE_INT_CST (case_low));
9188 case_low = min_value;
9189 }
9190
9191 /* Saturate upper case label value to maximum. */
9192 if (TREE_CODE (max_value) == INTEGER_CST
9193 && tree_int_cst_compare (case_low, max_value) <= 0
9194 && tree_int_cst_compare (case_high, max_value) > 0)
9195 {
9196 warning ("upper value %d in case label range exceeds maximum value for type",
9197 TREE_INT_CST (case_high));
9198 case_high = max_value;
9199 }
9200 }
9201
9202 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9203 if (duplicate)
9204 abort ();
9205 }
9206 }
9207 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9208 return const0_rtx;
9209
9210 case LABEL_EXPR:
9211 expand_label (TREE_OPERAND (exp, 0));
9212 return const0_rtx;
9213
9214 case CASE_LABEL_EXPR:
9215 {
9216 tree duplicate = 0;
9217 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9218 &duplicate, false);
9219 if (duplicate)
9220 abort ();
9221 return const0_rtx;
9222 }
9223
9224 case ASM_EXPR:
9225 expand_asm_expr (exp);
9226 return const0_rtx;
9227
9228 default:
9229 return lang_hooks.expand_expr (exp, original_target, tmode,
9230 modifier, alt_rtl);
9231 }
9232
9233 /* Here to do an ordinary binary operator, generating an instruction
9234 from the optab already placed in `this_optab'. */
9235 binop:
9236 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9237 subtarget, &op0, &op1, 0);
9238 binop2:
9239 if (modifier == EXPAND_STACK_PARM)
9240 target = 0;
9241 temp = expand_binop (mode, this_optab, op0, op1, target,
9242 unsignedp, OPTAB_LIB_WIDEN);
9243 if (temp == 0)
9244 abort ();
9245 return temp;
9246 }
9247 \f
9248 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9249 when applied to the address of EXP produces an address known to be
9250 aligned more than BIGGEST_ALIGNMENT. */
9251
9252 static int
9253 is_aligning_offset (tree offset, tree exp)
9254 {
9255 /* Strip off any conversions. */
9256 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9257 || TREE_CODE (offset) == NOP_EXPR
9258 || TREE_CODE (offset) == CONVERT_EXPR)
9259 offset = TREE_OPERAND (offset, 0);
9260
9261 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9262 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9263 if (TREE_CODE (offset) != BIT_AND_EXPR
9264 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9265 || compare_tree_int (TREE_OPERAND (offset, 1),
9266 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9267 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9268 return 0;
9269
9270 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9271 It must be NEGATE_EXPR. Then strip any more conversions. */
9272 offset = TREE_OPERAND (offset, 0);
9273 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9274 || TREE_CODE (offset) == NOP_EXPR
9275 || TREE_CODE (offset) == CONVERT_EXPR)
9276 offset = TREE_OPERAND (offset, 0);
9277
9278 if (TREE_CODE (offset) != NEGATE_EXPR)
9279 return 0;
9280
9281 offset = TREE_OPERAND (offset, 0);
9282 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9283 || TREE_CODE (offset) == NOP_EXPR
9284 || TREE_CODE (offset) == CONVERT_EXPR)
9285 offset = TREE_OPERAND (offset, 0);
9286
9287 /* This must now be the address of EXP. */
9288 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9289 }
9290 \f
9291 /* Return the tree node if an ARG corresponds to a string constant or zero
9292 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9293 in bytes within the string that ARG is accessing. The type of the
9294 offset will be `sizetype'. */
9295
9296 tree
9297 string_constant (tree arg, tree *ptr_offset)
9298 {
9299 STRIP_NOPS (arg);
9300
9301 if (TREE_CODE (arg) == ADDR_EXPR
9302 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9303 {
9304 *ptr_offset = size_zero_node;
9305 return TREE_OPERAND (arg, 0);
9306 }
9307 if (TREE_CODE (arg) == ADDR_EXPR
9308 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9309 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9310 {
9311 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9312 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9313 }
9314 else if (TREE_CODE (arg) == PLUS_EXPR)
9315 {
9316 tree arg0 = TREE_OPERAND (arg, 0);
9317 tree arg1 = TREE_OPERAND (arg, 1);
9318
9319 STRIP_NOPS (arg0);
9320 STRIP_NOPS (arg1);
9321
9322 if (TREE_CODE (arg0) == ADDR_EXPR
9323 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9324 {
9325 *ptr_offset = convert (sizetype, arg1);
9326 return TREE_OPERAND (arg0, 0);
9327 }
9328 else if (TREE_CODE (arg1) == ADDR_EXPR
9329 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9330 {
9331 *ptr_offset = convert (sizetype, arg0);
9332 return TREE_OPERAND (arg1, 0);
9333 }
9334 }
9335
9336 return 0;
9337 }
9338 \f
9339 /* Expand code for a post- or pre- increment or decrement
9340 and return the RTX for the result.
9341 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9342
9343 static rtx
9344 expand_increment (tree exp, int post, int ignore)
9345 {
9346 rtx op0, op1;
9347 rtx temp, value;
9348 tree incremented = TREE_OPERAND (exp, 0);
9349 optab this_optab = add_optab;
9350 int icode;
9351 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9352 int op0_is_copy = 0;
9353 int single_insn = 0;
9354 /* 1 means we can't store into OP0 directly,
9355 because it is a subreg narrower than a word,
9356 and we don't dare clobber the rest of the word. */
9357 int bad_subreg = 0;
9358
9359 /* Stabilize any component ref that might need to be
9360 evaluated more than once below. */
9361 if (!post
9362 || TREE_CODE (incremented) == BIT_FIELD_REF
9363 || (TREE_CODE (incremented) == COMPONENT_REF
9364 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9365 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9366 incremented = stabilize_reference (incremented);
9367 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9368 ones into save exprs so that they don't accidentally get evaluated
9369 more than once by the code below. */
9370 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9371 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9372 incremented = save_expr (incremented);
9373
9374 /* Compute the operands as RTX.
9375 Note whether OP0 is the actual lvalue or a copy of it:
9376 I believe it is a copy iff it is a register or subreg
9377 and insns were generated in computing it. */
9378
9379 temp = get_last_insn ();
9380 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9381
9382 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9383 in place but instead must do sign- or zero-extension during assignment,
9384 so we copy it into a new register and let the code below use it as
9385 a copy.
9386
9387 Note that we can safely modify this SUBREG since it is know not to be
9388 shared (it was made by the expand_expr call above). */
9389
9390 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9391 {
9392 if (post)
9393 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9394 else
9395 bad_subreg = 1;
9396 }
9397 else if (GET_CODE (op0) == SUBREG
9398 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9399 {
9400 /* We cannot increment this SUBREG in place. If we are
9401 post-incrementing, get a copy of the old value. Otherwise,
9402 just mark that we cannot increment in place. */
9403 if (post)
9404 op0 = copy_to_reg (op0);
9405 else
9406 bad_subreg = 1;
9407 }
9408
9409 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9410 && temp != get_last_insn ());
9411 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9412
9413 /* Decide whether incrementing or decrementing. */
9414 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9415 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9416 this_optab = sub_optab;
9417
9418 /* Convert decrement by a constant into a negative increment. */
9419 if (this_optab == sub_optab
9420 && GET_CODE (op1) == CONST_INT)
9421 {
9422 op1 = GEN_INT (-INTVAL (op1));
9423 this_optab = add_optab;
9424 }
9425
9426 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9427 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9428
9429 /* For a preincrement, see if we can do this with a single instruction. */
9430 if (!post)
9431 {
9432 icode = (int) this_optab->handlers[(int) mode].insn_code;
9433 if (icode != (int) CODE_FOR_nothing
9434 /* Make sure that OP0 is valid for operands 0 and 1
9435 of the insn we want to queue. */
9436 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9437 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9438 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9439 single_insn = 1;
9440 }
9441
9442 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9443 then we cannot just increment OP0. We must therefore contrive to
9444 increment the original value. Then, for postincrement, we can return
9445 OP0 since it is a copy of the old value. For preincrement, expand here
9446 unless we can do it with a single insn.
9447
9448 Likewise if storing directly into OP0 would clobber high bits
9449 we need to preserve (bad_subreg). */
9450 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9451 {
9452 /* This is the easiest way to increment the value wherever it is.
9453 Problems with multiple evaluation of INCREMENTED are prevented
9454 because either (1) it is a component_ref or preincrement,
9455 in which case it was stabilized above, or (2) it is an array_ref
9456 with constant index in an array in a register, which is
9457 safe to reevaluate. */
9458 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9459 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9460 ? MINUS_EXPR : PLUS_EXPR),
9461 TREE_TYPE (exp),
9462 incremented,
9463 TREE_OPERAND (exp, 1));
9464
9465 while (TREE_CODE (incremented) == NOP_EXPR
9466 || TREE_CODE (incremented) == CONVERT_EXPR)
9467 {
9468 newexp = convert (TREE_TYPE (incremented), newexp);
9469 incremented = TREE_OPERAND (incremented, 0);
9470 }
9471
9472 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9473 return post ? op0 : temp;
9474 }
9475
9476 if (post)
9477 {
9478 /* We have a true reference to the value in OP0.
9479 If there is an insn to add or subtract in this mode, queue it.
9480 Queuing the increment insn avoids the register shuffling
9481 that often results if we must increment now and first save
9482 the old value for subsequent use. */
9483
9484 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9485 op0 = stabilize (op0);
9486 #endif
9487
9488 icode = (int) this_optab->handlers[(int) mode].insn_code;
9489 if (icode != (int) CODE_FOR_nothing
9490 /* Make sure that OP0 is valid for operands 0 and 1
9491 of the insn we want to queue. */
9492 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9493 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9494 {
9495 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9496 op1 = force_reg (mode, op1);
9497
9498 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9499 }
9500 if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
9501 {
9502 rtx addr = (general_operand (XEXP (op0, 0), mode)
9503 ? force_reg (Pmode, XEXP (op0, 0))
9504 : copy_to_reg (XEXP (op0, 0)));
9505 rtx temp, result;
9506
9507 op0 = replace_equiv_address (op0, addr);
9508 temp = force_reg (GET_MODE (op0), op0);
9509 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9510 op1 = force_reg (mode, op1);
9511
9512 /* The increment queue is LIFO, thus we have to `queue'
9513 the instructions in reverse order. */
9514 enqueue_insn (op0, gen_move_insn (op0, temp));
9515 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9516 return result;
9517 }
9518 }
9519
9520 /* Preincrement, or we can't increment with one simple insn. */
9521 if (post)
9522 /* Save a copy of the value before inc or dec, to return it later. */
9523 temp = value = copy_to_reg (op0);
9524 else
9525 /* Arrange to return the incremented value. */
9526 /* Copy the rtx because expand_binop will protect from the queue,
9527 and the results of that would be invalid for us to return
9528 if our caller does emit_queue before using our result. */
9529 temp = copy_rtx (value = op0);
9530
9531 /* Increment however we can. */
9532 op1 = expand_binop (mode, this_optab, value, op1, op0,
9533 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9534
9535 /* Make sure the value is stored into OP0. */
9536 if (op1 != op0)
9537 emit_move_insn (op0, op1);
9538
9539 return temp;
9540 }
9541 \f
9542 /* Generate code to calculate EXP using a store-flag instruction
9543 and return an rtx for the result. EXP is either a comparison
9544 or a TRUTH_NOT_EXPR whose operand is a comparison.
9545
9546 If TARGET is nonzero, store the result there if convenient.
9547
9548 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9549 cheap.
9550
9551 Return zero if there is no suitable set-flag instruction
9552 available on this machine.
9553
9554 Once expand_expr has been called on the arguments of the comparison,
9555 we are committed to doing the store flag, since it is not safe to
9556 re-evaluate the expression. We emit the store-flag insn by calling
9557 emit_store_flag, but only expand the arguments if we have a reason
9558 to believe that emit_store_flag will be successful. If we think that
9559 it will, but it isn't, we have to simulate the store-flag with a
9560 set/jump/set sequence. */
9561
9562 static rtx
9563 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9564 {
9565 enum rtx_code code;
9566 tree arg0, arg1, type;
9567 tree tem;
9568 enum machine_mode operand_mode;
9569 int invert = 0;
9570 int unsignedp;
9571 rtx op0, op1;
9572 enum insn_code icode;
9573 rtx subtarget = target;
9574 rtx result, label;
9575
9576 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9577 result at the end. We can't simply invert the test since it would
9578 have already been inverted if it were valid. This case occurs for
9579 some floating-point comparisons. */
9580
9581 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9582 invert = 1, exp = TREE_OPERAND (exp, 0);
9583
9584 arg0 = TREE_OPERAND (exp, 0);
9585 arg1 = TREE_OPERAND (exp, 1);
9586
9587 /* Don't crash if the comparison was erroneous. */
9588 if (arg0 == error_mark_node || arg1 == error_mark_node)
9589 return const0_rtx;
9590
9591 type = TREE_TYPE (arg0);
9592 operand_mode = TYPE_MODE (type);
9593 unsignedp = TYPE_UNSIGNED (type);
9594
9595 /* We won't bother with BLKmode store-flag operations because it would mean
9596 passing a lot of information to emit_store_flag. */
9597 if (operand_mode == BLKmode)
9598 return 0;
9599
9600 /* We won't bother with store-flag operations involving function pointers
9601 when function pointers must be canonicalized before comparisons. */
9602 #ifdef HAVE_canonicalize_funcptr_for_compare
9603 if (HAVE_canonicalize_funcptr_for_compare
9604 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9605 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9606 == FUNCTION_TYPE))
9607 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9608 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9609 == FUNCTION_TYPE))))
9610 return 0;
9611 #endif
9612
9613 STRIP_NOPS (arg0);
9614 STRIP_NOPS (arg1);
9615
9616 /* Get the rtx comparison code to use. We know that EXP is a comparison
9617 operation of some type. Some comparisons against 1 and -1 can be
9618 converted to comparisons with zero. Do so here so that the tests
9619 below will be aware that we have a comparison with zero. These
9620 tests will not catch constants in the first operand, but constants
9621 are rarely passed as the first operand. */
9622
9623 switch (TREE_CODE (exp))
9624 {
9625 case EQ_EXPR:
9626 code = EQ;
9627 break;
9628 case NE_EXPR:
9629 code = NE;
9630 break;
9631 case LT_EXPR:
9632 if (integer_onep (arg1))
9633 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9634 else
9635 code = unsignedp ? LTU : LT;
9636 break;
9637 case LE_EXPR:
9638 if (! unsignedp && integer_all_onesp (arg1))
9639 arg1 = integer_zero_node, code = LT;
9640 else
9641 code = unsignedp ? LEU : LE;
9642 break;
9643 case GT_EXPR:
9644 if (! unsignedp && integer_all_onesp (arg1))
9645 arg1 = integer_zero_node, code = GE;
9646 else
9647 code = unsignedp ? GTU : GT;
9648 break;
9649 case GE_EXPR:
9650 if (integer_onep (arg1))
9651 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9652 else
9653 code = unsignedp ? GEU : GE;
9654 break;
9655
9656 case UNORDERED_EXPR:
9657 code = UNORDERED;
9658 break;
9659 case ORDERED_EXPR:
9660 code = ORDERED;
9661 break;
9662 case UNLT_EXPR:
9663 code = UNLT;
9664 break;
9665 case UNLE_EXPR:
9666 code = UNLE;
9667 break;
9668 case UNGT_EXPR:
9669 code = UNGT;
9670 break;
9671 case UNGE_EXPR:
9672 code = UNGE;
9673 break;
9674 case UNEQ_EXPR:
9675 code = UNEQ;
9676 break;
9677 case LTGT_EXPR:
9678 code = LTGT;
9679 break;
9680
9681 default:
9682 abort ();
9683 }
9684
9685 /* Put a constant second. */
9686 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9687 {
9688 tem = arg0; arg0 = arg1; arg1 = tem;
9689 code = swap_condition (code);
9690 }
9691
9692 /* If this is an equality or inequality test of a single bit, we can
9693 do this by shifting the bit being tested to the low-order bit and
9694 masking the result with the constant 1. If the condition was EQ,
9695 we xor it with 1. This does not require an scc insn and is faster
9696 than an scc insn even if we have it.
9697
9698 The code to make this transformation was moved into fold_single_bit_test,
9699 so we just call into the folder and expand its result. */
9700
9701 if ((code == NE || code == EQ)
9702 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9703 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9704 {
9705 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9706 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9707 arg0, arg1, type),
9708 target, VOIDmode, EXPAND_NORMAL);
9709 }
9710
9711 /* Now see if we are likely to be able to do this. Return if not. */
9712 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9713 return 0;
9714
9715 icode = setcc_gen_code[(int) code];
9716 if (icode == CODE_FOR_nothing
9717 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9718 {
9719 /* We can only do this if it is one of the special cases that
9720 can be handled without an scc insn. */
9721 if ((code == LT && integer_zerop (arg1))
9722 || (! only_cheap && code == GE && integer_zerop (arg1)))
9723 ;
9724 else if (BRANCH_COST >= 0
9725 && ! only_cheap && (code == NE || code == EQ)
9726 && TREE_CODE (type) != REAL_TYPE
9727 && ((abs_optab->handlers[(int) operand_mode].insn_code
9728 != CODE_FOR_nothing)
9729 || (ffs_optab->handlers[(int) operand_mode].insn_code
9730 != CODE_FOR_nothing)))
9731 ;
9732 else
9733 return 0;
9734 }
9735
9736 if (! get_subtarget (target)
9737 || GET_MODE (subtarget) != operand_mode)
9738 subtarget = 0;
9739
9740 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9741
9742 if (target == 0)
9743 target = gen_reg_rtx (mode);
9744
9745 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9746 because, if the emit_store_flag does anything it will succeed and
9747 OP0 and OP1 will not be used subsequently. */
9748
9749 result = emit_store_flag (target, code,
9750 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9751 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9752 operand_mode, unsignedp, 1);
9753
9754 if (result)
9755 {
9756 if (invert)
9757 result = expand_binop (mode, xor_optab, result, const1_rtx,
9758 result, 0, OPTAB_LIB_WIDEN);
9759 return result;
9760 }
9761
9762 /* If this failed, we have to do this with set/compare/jump/set code. */
9763 if (!REG_P (target)
9764 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9765 target = gen_reg_rtx (GET_MODE (target));
9766
9767 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9768 result = compare_from_rtx (op0, op1, code, unsignedp,
9769 operand_mode, NULL_RTX);
9770 if (GET_CODE (result) == CONST_INT)
9771 return (((result == const0_rtx && ! invert)
9772 || (result != const0_rtx && invert))
9773 ? const0_rtx : const1_rtx);
9774
9775 /* The code of RESULT may not match CODE if compare_from_rtx
9776 decided to swap its operands and reverse the original code.
9777
9778 We know that compare_from_rtx returns either a CONST_INT or
9779 a new comparison code, so it is safe to just extract the
9780 code from RESULT. */
9781 code = GET_CODE (result);
9782
9783 label = gen_label_rtx ();
9784 if (bcc_gen_fctn[(int) code] == 0)
9785 abort ();
9786
9787 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9788 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9789 emit_label (label);
9790
9791 return target;
9792 }
9793 \f
9794
9795 /* Stubs in case we haven't got a casesi insn. */
9796 #ifndef HAVE_casesi
9797 # define HAVE_casesi 0
9798 # define gen_casesi(a, b, c, d, e) (0)
9799 # define CODE_FOR_casesi CODE_FOR_nothing
9800 #endif
9801
9802 /* If the machine does not have a case insn that compares the bounds,
9803 this means extra overhead for dispatch tables, which raises the
9804 threshold for using them. */
9805 #ifndef CASE_VALUES_THRESHOLD
9806 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9807 #endif /* CASE_VALUES_THRESHOLD */
9808
9809 unsigned int
9810 case_values_threshold (void)
9811 {
9812 return CASE_VALUES_THRESHOLD;
9813 }
9814
9815 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9816 0 otherwise (i.e. if there is no casesi instruction). */
9817 int
9818 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9819 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9820 {
9821 enum machine_mode index_mode = SImode;
9822 int index_bits = GET_MODE_BITSIZE (index_mode);
9823 rtx op1, op2, index;
9824 enum machine_mode op_mode;
9825
9826 if (! HAVE_casesi)
9827 return 0;
9828
9829 /* Convert the index to SImode. */
9830 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9831 {
9832 enum machine_mode omode = TYPE_MODE (index_type);
9833 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9834
9835 /* We must handle the endpoints in the original mode. */
9836 index_expr = build (MINUS_EXPR, index_type,
9837 index_expr, minval);
9838 minval = integer_zero_node;
9839 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9840 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9841 omode, 1, default_label);
9842 /* Now we can safely truncate. */
9843 index = convert_to_mode (index_mode, index, 0);
9844 }
9845 else
9846 {
9847 if (TYPE_MODE (index_type) != index_mode)
9848 {
9849 index_expr = convert (lang_hooks.types.type_for_size
9850 (index_bits, 0), index_expr);
9851 index_type = TREE_TYPE (index_expr);
9852 }
9853
9854 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9855 }
9856 emit_queue ();
9857 index = protect_from_queue (index, 0);
9858 do_pending_stack_adjust ();
9859
9860 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9861 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9862 (index, op_mode))
9863 index = copy_to_mode_reg (op_mode, index);
9864
9865 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9866
9867 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9868 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9869 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9870 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9871 (op1, op_mode))
9872 op1 = copy_to_mode_reg (op_mode, op1);
9873
9874 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9875
9876 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9877 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9878 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9879 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9880 (op2, op_mode))
9881 op2 = copy_to_mode_reg (op_mode, op2);
9882
9883 emit_jump_insn (gen_casesi (index, op1, op2,
9884 table_label, default_label));
9885 return 1;
9886 }
9887
9888 /* Attempt to generate a tablejump instruction; same concept. */
9889 #ifndef HAVE_tablejump
9890 #define HAVE_tablejump 0
9891 #define gen_tablejump(x, y) (0)
9892 #endif
9893
9894 /* Subroutine of the next function.
9895
9896 INDEX is the value being switched on, with the lowest value
9897 in the table already subtracted.
9898 MODE is its expected mode (needed if INDEX is constant).
9899 RANGE is the length of the jump table.
9900 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9901
9902 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9903 index value is out of range. */
9904
9905 static void
9906 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9907 rtx default_label)
9908 {
9909 rtx temp, vector;
9910
9911 if (INTVAL (range) > cfun->max_jumptable_ents)
9912 cfun->max_jumptable_ents = INTVAL (range);
9913
9914 /* Do an unsigned comparison (in the proper mode) between the index
9915 expression and the value which represents the length of the range.
9916 Since we just finished subtracting the lower bound of the range
9917 from the index expression, this comparison allows us to simultaneously
9918 check that the original index expression value is both greater than
9919 or equal to the minimum value of the range and less than or equal to
9920 the maximum value of the range. */
9921
9922 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9923 default_label);
9924
9925 /* If index is in range, it must fit in Pmode.
9926 Convert to Pmode so we can index with it. */
9927 if (mode != Pmode)
9928 index = convert_to_mode (Pmode, index, 1);
9929
9930 /* Don't let a MEM slip through, because then INDEX that comes
9931 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9932 and break_out_memory_refs will go to work on it and mess it up. */
9933 #ifdef PIC_CASE_VECTOR_ADDRESS
9934 if (flag_pic && !REG_P (index))
9935 index = copy_to_mode_reg (Pmode, index);
9936 #endif
9937
9938 /* If flag_force_addr were to affect this address
9939 it could interfere with the tricky assumptions made
9940 about addresses that contain label-refs,
9941 which may be valid only very near the tablejump itself. */
9942 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9943 GET_MODE_SIZE, because this indicates how large insns are. The other
9944 uses should all be Pmode, because they are addresses. This code
9945 could fail if addresses and insns are not the same size. */
9946 index = gen_rtx_PLUS (Pmode,
9947 gen_rtx_MULT (Pmode, index,
9948 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9949 gen_rtx_LABEL_REF (Pmode, table_label));
9950 #ifdef PIC_CASE_VECTOR_ADDRESS
9951 if (flag_pic)
9952 index = PIC_CASE_VECTOR_ADDRESS (index);
9953 else
9954 #endif
9955 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9956 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9957 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9958 RTX_UNCHANGING_P (vector) = 1;
9959 MEM_NOTRAP_P (vector) = 1;
9960 convert_move (temp, vector, 0);
9961
9962 emit_jump_insn (gen_tablejump (temp, table_label));
9963
9964 /* If we are generating PIC code or if the table is PC-relative, the
9965 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9966 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9967 emit_barrier ();
9968 }
9969
9970 int
9971 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9972 rtx table_label, rtx default_label)
9973 {
9974 rtx index;
9975
9976 if (! HAVE_tablejump)
9977 return 0;
9978
9979 index_expr = fold (build (MINUS_EXPR, index_type,
9980 convert (index_type, index_expr),
9981 convert (index_type, minval)));
9982 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9983 emit_queue ();
9984 index = protect_from_queue (index, 0);
9985 do_pending_stack_adjust ();
9986
9987 do_tablejump (index, TYPE_MODE (index_type),
9988 convert_modes (TYPE_MODE (index_type),
9989 TYPE_MODE (TREE_TYPE (range)),
9990 expand_expr (range, NULL_RTX,
9991 VOIDmode, 0),
9992 TYPE_UNSIGNED (TREE_TYPE (range))),
9993 table_label, default_label);
9994 return 1;
9995 }
9996
9997 /* Nonzero if the mode is a valid vector mode for this architecture.
9998 This returns nonzero even if there is no hardware support for the
9999 vector mode, but we can emulate with narrower modes. */
10000
10001 int
10002 vector_mode_valid_p (enum machine_mode mode)
10003 {
10004 enum mode_class class = GET_MODE_CLASS (mode);
10005 enum machine_mode innermode;
10006
10007 /* Doh! What's going on? */
10008 if (class != MODE_VECTOR_INT
10009 && class != MODE_VECTOR_FLOAT)
10010 return 0;
10011
10012 /* Hardware support. Woo hoo! */
10013 if (VECTOR_MODE_SUPPORTED_P (mode))
10014 return 1;
10015
10016 innermode = GET_MODE_INNER (mode);
10017
10018 /* We should probably return 1 if requesting V4DI and we have no DI,
10019 but we have V2DI, but this is probably very unlikely. */
10020
10021 /* If we have support for the inner mode, we can safely emulate it.
10022 We may not have V2DI, but me can emulate with a pair of DIs. */
10023 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10024 }
10025
10026 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10027 static rtx
10028 const_vector_from_tree (tree exp)
10029 {
10030 rtvec v;
10031 int units, i;
10032 tree link, elt;
10033 enum machine_mode inner, mode;
10034
10035 mode = TYPE_MODE (TREE_TYPE (exp));
10036
10037 if (initializer_zerop (exp))
10038 return CONST0_RTX (mode);
10039
10040 units = GET_MODE_NUNITS (mode);
10041 inner = GET_MODE_INNER (mode);
10042
10043 v = rtvec_alloc (units);
10044
10045 link = TREE_VECTOR_CST_ELTS (exp);
10046 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10047 {
10048 elt = TREE_VALUE (link);
10049
10050 if (TREE_CODE (elt) == REAL_CST)
10051 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10052 inner);
10053 else
10054 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10055 TREE_INT_CST_HIGH (elt),
10056 inner);
10057 }
10058
10059 /* Initialize remaining elements to 0. */
10060 for (; i < units; ++i)
10061 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10062
10063 return gen_rtx_raw_CONST_VECTOR (mode, v);
10064 }
10065 #include "gt-expr.h"
This page took 0.518924 seconds and 5 git commands to generate.