]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
re PR middle-end/31448 (ICE in expand_shift with bit fields and expand inlining const...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
146
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
148
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* Record for each mode whether we can float-extend from memory. */
169
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
171
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
179
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
187
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
196
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
199
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
202
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
208
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
232
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
234
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
238 \f
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
241
242 void
243 init_expr_once (void)
244 {
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
250
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
256
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
260
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
264
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
267 {
268 int regno;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
274
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
277
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
282 {
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
285
286 REGNO (reg) = regno;
287
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
292
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
297
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
302
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
307 }
308 }
309
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
311
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
314 {
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
318 {
319 enum insn_code ic;
320
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
324
325 PUT_MODE (mem, srcmode);
326
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
329 }
330 }
331 }
332
333 /* This is run at the start of compiling a function. */
334
335 void
336 init_expr (void)
337 {
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
339 }
340 \f
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
345
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
348 {
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
355
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
359
360
361 gcc_assert (to_real == from_real);
362
363 /* If the source and destination are already the same, then there's
364 nothing to do. */
365 if (to == from)
366 return;
367
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
370 TO here. */
371
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
377
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
379
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
382 {
383 emit_move_insn (to, from);
384 return;
385 }
386
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
388 {
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
390
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
393 else
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
395
396 emit_move_insn (to, from);
397 return;
398 }
399
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
401 {
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
404 return;
405 }
406
407 if (to_real)
408 {
409 rtx value, insns;
410 convert_optab tab;
411
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
416
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 tab = sext_optab;
422 else
423 tab = trunc_optab;
424
425 /* Try converting directly if the insn is supported. */
426
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
429 {
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
432 return;
433 }
434
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
437
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
440
441 start_sequence ();
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
443 1, from, from_mode);
444 insns = get_insns ();
445 end_sequence ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
448 from)
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
450 return;
451 }
452
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
457 {
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
460
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
463
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
467 to, from, UNKNOWN);
468 return;
469 }
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
471 {
472 rtx new_from;
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
475
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
478
479 if (to_mode == full_mode)
480 {
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 to, from, UNKNOWN);
483 return;
484 }
485
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
489
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
492 from = new_from;
493 }
494
495 /* Now both modes are integers. */
496
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
500 {
501 rtx insns;
502 rtx lowpart;
503 rtx fill_value;
504 rtx lowfrom;
505 int i;
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
508
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
511 != CODE_FOR_nothing)
512 {
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
520 return;
521 }
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
526 {
527 if (REG_P (to))
528 {
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
532 }
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
536 return;
537 }
538
539 /* No special multiword conversion insn; do it by hand. */
540 start_sequence ();
541
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
544
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
547
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
551 else
552 lowpart_mode = from_mode;
553
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
555
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
558
559 /* Compute the value to put in each remaining word. */
560 if (unsignedp)
561 fill_value = const0_rtx;
562 else
563 {
564 #ifdef HAVE_slt
565 if (HAVE_slt
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
568 {
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
570 lowpart_mode, 0);
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
573 }
574 else
575 #endif
576 {
577 fill_value
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
580 NULL_RTX, 0);
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
582 }
583 }
584
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
587 {
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
590
591 gcc_assert (subword);
592
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
595 }
596
597 insns = get_insns ();
598 end_sequence ();
599
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
602 return;
603 }
604
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
608 {
609 if (!((MEM_P (from)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
613 || REG_P (from)
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
617 return;
618 }
619
620 /* Now follow all the conversions between integers
621 no more than a word long. */
622
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
627 {
628 if (!((MEM_P (from)
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
632 || REG_P (from)
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
639 return;
640 }
641
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
644 {
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
647 != CODE_FOR_nothing)
648 {
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
651 }
652 else
653 {
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
657
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
668 {
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
672 }
673
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
687 }
688 }
689
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
692 {
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
696 }
697
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
701
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
706 {
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
710 }
711
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
714 }
715
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
722
723 rtx
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
725 {
726 return convert_modes (mode, VOIDmode, x, unsignedp);
727 }
728
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
733
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
736
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
738
739 rtx
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
741 {
742 rtx temp;
743
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
746
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
751
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
754
755 if (mode == oldmode)
756 return x;
757
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
763
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
767 {
768 HOST_WIDE_INT val = INTVAL (x);
769
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
772 {
773 int width = GET_MODE_BITSIZE (oldmode);
774
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
777 }
778
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
780 }
781
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
786
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
800 {
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
806 {
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
809
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
816
817 return gen_int_mode (val, mode);
818 }
819
820 return gen_lowpart (mode, x);
821 }
822
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
826 {
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
829 }
830
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
834 }
835 \f
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
840
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
846
847 int
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
850 {
851 return MOVE_BY_PIECES_P (len, align);
852 }
853
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
856
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
859
860 ALIGN is maximum stack alignment we can assume.
861
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
865
866 rtx
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
869 {
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
875
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
881 {
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
889 }
890 else
891 {
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
900 }
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
907
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
912
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
918 {
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
924
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
926 {
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
930 }
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
932 {
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
936 }
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
940 {
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
944 }
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
946 {
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
950 }
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
953 }
954
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
959 {
960 enum machine_mode xmode;
961
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
968
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
970 }
971
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
974
975 while (max_size > 1)
976 {
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
981
982 if (mode == VOIDmode)
983 break;
984
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
988
989 max_size = GET_MODE_SIZE (mode);
990 }
991
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
994
995 if (endp)
996 {
997 rtx to1;
998
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1001 {
1002 if (endp == 2)
1003 {
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1009 }
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1012 }
1013 else
1014 {
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1018 }
1019 return to1;
1020 }
1021 else
1022 return data.to;
1023 }
1024
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1027
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1031 {
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1034
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1039 {
1040 enum machine_mode tmode, xmode;
1041
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1048
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1050 }
1051
1052 while (max_size > 1)
1053 {
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1056
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1061
1062 if (mode == VOIDmode)
1063 break;
1064
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1068
1069 max_size = GET_MODE_SIZE (mode);
1070 }
1071
1072 gcc_assert (!l);
1073 return n_insns;
1074 }
1075
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1079
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1083 {
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1086
1087 while (data->len >= size)
1088 {
1089 if (data->reverse)
1090 data->offset -= size;
1091
1092 if (data->to)
1093 {
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1099 }
1100
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1106
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1113
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1117 {
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1123 }
1124
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1129
1130 if (! data->reverse)
1131 data->offset += size;
1132
1133 data->len -= size;
1134 }
1135 }
1136 \f
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1140
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1145
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1148
1149 rtx
1150 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1151 unsigned int expected_align, HOST_WIDE_INT expected_size)
1152 {
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1156
1157 switch (method)
1158 {
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1163
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1166
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1171
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1175
1176 default:
1177 gcc_unreachable ();
1178 }
1179
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1181
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1185
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1190
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1194 {
1195 if (INTVAL (size) == 0)
1196 return 0;
1197
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1202 }
1203
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align,
1207 expected_align, expected_size))
1208 ;
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1214
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1217
1218 return retval;
1219 }
1220
1221 rtx
1222 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1223 {
1224 return emit_block_move_hints (x, y, size, method, 0, -1);
1225 }
1226
1227 /* A subroutine of emit_block_move. Returns true if calling the
1228 block move libcall will not clobber any parameters which may have
1229 already been placed on the stack. */
1230
1231 static bool
1232 block_move_libcall_safe_for_call_parm (void)
1233 {
1234 /* If arguments are pushed on the stack, then they're safe. */
1235 if (PUSH_ARGS)
1236 return true;
1237
1238 /* If registers go on the stack anyway, any argument is sure to clobber
1239 an outgoing argument. */
1240 #if defined (REG_PARM_STACK_SPACE)
1241 if (OUTGOING_REG_PARM_STACK_SPACE)
1242 {
1243 tree fn;
1244 fn = emit_block_move_libcall_fn (false);
1245 if (REG_PARM_STACK_SPACE (fn) != 0)
1246 return false;
1247 }
1248 #endif
1249
1250 /* If any argument goes in memory, then it might clobber an outgoing
1251 argument. */
1252 {
1253 CUMULATIVE_ARGS args_so_far;
1254 tree fn, arg;
1255
1256 fn = emit_block_move_libcall_fn (false);
1257 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1258
1259 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1260 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1261 {
1262 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1263 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1264 if (!tmp || !REG_P (tmp))
1265 return false;
1266 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1267 return false;
1268 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1269 }
1270 }
1271 return true;
1272 }
1273
1274 /* A subroutine of emit_block_move. Expand a movmem pattern;
1275 return true if successful. */
1276
1277 static bool
1278 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1279 unsigned int expected_align, HOST_WIDE_INT expected_size)
1280 {
1281 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1282 int save_volatile_ok = volatile_ok;
1283 enum machine_mode mode;
1284
1285 if (expected_align < align)
1286 expected_align = align;
1287
1288 /* Since this is a move insn, we don't care about volatility. */
1289 volatile_ok = 1;
1290
1291 /* Try the most limited insn first, because there's no point
1292 including more than one in the machine description unless
1293 the more limited one has some advantage. */
1294
1295 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1296 mode = GET_MODE_WIDER_MODE (mode))
1297 {
1298 enum insn_code code = movmem_optab[(int) mode];
1299 insn_operand_predicate_fn pred;
1300
1301 if (code != CODE_FOR_nothing
1302 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1303 here because if SIZE is less than the mode mask, as it is
1304 returned by the macro, it will definitely be less than the
1305 actual mode mask. */
1306 && ((GET_CODE (size) == CONST_INT
1307 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1308 <= (GET_MODE_MASK (mode) >> 1)))
1309 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1310 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1311 || (*pred) (x, BLKmode))
1312 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1313 || (*pred) (y, BLKmode))
1314 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1315 || (*pred) (opalign, VOIDmode)))
1316 {
1317 rtx op2;
1318 rtx last = get_last_insn ();
1319 rtx pat;
1320
1321 op2 = convert_to_mode (mode, size, 1);
1322 pred = insn_data[(int) code].operand[2].predicate;
1323 if (pred != 0 && ! (*pred) (op2, mode))
1324 op2 = copy_to_mode_reg (mode, op2);
1325
1326 /* ??? When called via emit_block_move_for_call, it'd be
1327 nice if there were some way to inform the backend, so
1328 that it doesn't fail the expansion because it thinks
1329 emitting the libcall would be more efficient. */
1330
1331 if (insn_data[(int) code].n_operands == 4)
1332 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1333 else
1334 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1335 GEN_INT (expected_align),
1336 GEN_INT (expected_size));
1337 if (pat)
1338 {
1339 emit_insn (pat);
1340 volatile_ok = save_volatile_ok;
1341 return true;
1342 }
1343 else
1344 delete_insns_since (last);
1345 }
1346 }
1347
1348 volatile_ok = save_volatile_ok;
1349 return false;
1350 }
1351
1352 /* A subroutine of emit_block_move. Expand a call to memcpy.
1353 Return the return value from memcpy, 0 otherwise. */
1354
1355 rtx
1356 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1357 {
1358 rtx dst_addr, src_addr;
1359 tree call_expr, fn, src_tree, dst_tree, size_tree;
1360 enum machine_mode size_mode;
1361 rtx retval;
1362
1363 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1364 pseudos. We can then place those new pseudos into a VAR_DECL and
1365 use them later. */
1366
1367 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1368 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1369
1370 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1371 src_addr = convert_memory_address (ptr_mode, src_addr);
1372
1373 dst_tree = make_tree (ptr_type_node, dst_addr);
1374 src_tree = make_tree (ptr_type_node, src_addr);
1375
1376 size_mode = TYPE_MODE (sizetype);
1377
1378 size = convert_to_mode (size_mode, size, 1);
1379 size = copy_to_mode_reg (size_mode, size);
1380
1381 /* It is incorrect to use the libcall calling conventions to call
1382 memcpy in this context. This could be a user call to memcpy and
1383 the user may wish to examine the return value from memcpy. For
1384 targets where libcalls and normal calls have different conventions
1385 for returning pointers, we could end up generating incorrect code. */
1386
1387 size_tree = make_tree (sizetype, size);
1388
1389 fn = emit_block_move_libcall_fn (true);
1390 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1391 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1392
1393 retval = expand_normal (call_expr);
1394
1395 return retval;
1396 }
1397
1398 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1399 for the function we use for block copies. The first time FOR_CALL
1400 is true, we call assemble_external. */
1401
1402 static GTY(()) tree block_move_fn;
1403
1404 void
1405 init_block_move_fn (const char *asmspec)
1406 {
1407 if (!block_move_fn)
1408 {
1409 tree args, fn;
1410
1411 fn = get_identifier ("memcpy");
1412 args = build_function_type_list (ptr_type_node, ptr_type_node,
1413 const_ptr_type_node, sizetype,
1414 NULL_TREE);
1415
1416 fn = build_decl (FUNCTION_DECL, fn, args);
1417 DECL_EXTERNAL (fn) = 1;
1418 TREE_PUBLIC (fn) = 1;
1419 DECL_ARTIFICIAL (fn) = 1;
1420 TREE_NOTHROW (fn) = 1;
1421 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1422 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1423
1424 block_move_fn = fn;
1425 }
1426
1427 if (asmspec)
1428 set_user_assembler_name (block_move_fn, asmspec);
1429 }
1430
1431 static tree
1432 emit_block_move_libcall_fn (int for_call)
1433 {
1434 static bool emitted_extern;
1435
1436 if (!block_move_fn)
1437 init_block_move_fn (NULL);
1438
1439 if (for_call && !emitted_extern)
1440 {
1441 emitted_extern = true;
1442 make_decl_rtl (block_move_fn);
1443 assemble_external (block_move_fn);
1444 }
1445
1446 return block_move_fn;
1447 }
1448
1449 /* A subroutine of emit_block_move. Copy the data via an explicit
1450 loop. This is used only when libcalls are forbidden. */
1451 /* ??? It'd be nice to copy in hunks larger than QImode. */
1452
1453 static void
1454 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1455 unsigned int align ATTRIBUTE_UNUSED)
1456 {
1457 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1458 enum machine_mode iter_mode;
1459
1460 iter_mode = GET_MODE (size);
1461 if (iter_mode == VOIDmode)
1462 iter_mode = word_mode;
1463
1464 top_label = gen_label_rtx ();
1465 cmp_label = gen_label_rtx ();
1466 iter = gen_reg_rtx (iter_mode);
1467
1468 emit_move_insn (iter, const0_rtx);
1469
1470 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1471 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1472 do_pending_stack_adjust ();
1473
1474 emit_jump (cmp_label);
1475 emit_label (top_label);
1476
1477 tmp = convert_modes (Pmode, iter_mode, iter, true);
1478 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1479 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1480 x = change_address (x, QImode, x_addr);
1481 y = change_address (y, QImode, y_addr);
1482
1483 emit_move_insn (x, y);
1484
1485 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1486 true, OPTAB_LIB_WIDEN);
1487 if (tmp != iter)
1488 emit_move_insn (iter, tmp);
1489
1490 emit_label (cmp_label);
1491
1492 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1493 true, top_label);
1494 }
1495 \f
1496 /* Copy all or part of a value X into registers starting at REGNO.
1497 The number of registers to be filled is NREGS. */
1498
1499 void
1500 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1501 {
1502 int i;
1503 #ifdef HAVE_load_multiple
1504 rtx pat;
1505 rtx last;
1506 #endif
1507
1508 if (nregs == 0)
1509 return;
1510
1511 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1512 x = validize_mem (force_const_mem (mode, x));
1513
1514 /* See if the machine can do this with a load multiple insn. */
1515 #ifdef HAVE_load_multiple
1516 if (HAVE_load_multiple)
1517 {
1518 last = get_last_insn ();
1519 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1520 GEN_INT (nregs));
1521 if (pat)
1522 {
1523 emit_insn (pat);
1524 return;
1525 }
1526 else
1527 delete_insns_since (last);
1528 }
1529 #endif
1530
1531 for (i = 0; i < nregs; i++)
1532 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1533 operand_subword_force (x, i, mode));
1534 }
1535
1536 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1537 The number of registers to be filled is NREGS. */
1538
1539 void
1540 move_block_from_reg (int regno, rtx x, int nregs)
1541 {
1542 int i;
1543
1544 if (nregs == 0)
1545 return;
1546
1547 /* See if the machine can do this with a store multiple insn. */
1548 #ifdef HAVE_store_multiple
1549 if (HAVE_store_multiple)
1550 {
1551 rtx last = get_last_insn ();
1552 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1553 GEN_INT (nregs));
1554 if (pat)
1555 {
1556 emit_insn (pat);
1557 return;
1558 }
1559 else
1560 delete_insns_since (last);
1561 }
1562 #endif
1563
1564 for (i = 0; i < nregs; i++)
1565 {
1566 rtx tem = operand_subword (x, i, 1, BLKmode);
1567
1568 gcc_assert (tem);
1569
1570 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1571 }
1572 }
1573
1574 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1575 ORIG, where ORIG is a non-consecutive group of registers represented by
1576 a PARALLEL. The clone is identical to the original except in that the
1577 original set of registers is replaced by a new set of pseudo registers.
1578 The new set has the same modes as the original set. */
1579
1580 rtx
1581 gen_group_rtx (rtx orig)
1582 {
1583 int i, length;
1584 rtx *tmps;
1585
1586 gcc_assert (GET_CODE (orig) == PARALLEL);
1587
1588 length = XVECLEN (orig, 0);
1589 tmps = alloca (sizeof (rtx) * length);
1590
1591 /* Skip a NULL entry in first slot. */
1592 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1593
1594 if (i)
1595 tmps[0] = 0;
1596
1597 for (; i < length; i++)
1598 {
1599 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1600 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1601
1602 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1603 }
1604
1605 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1606 }
1607
1608 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1609 except that values are placed in TMPS[i], and must later be moved
1610 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1611
1612 static void
1613 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1614 {
1615 rtx src;
1616 int start, i;
1617 enum machine_mode m = GET_MODE (orig_src);
1618
1619 gcc_assert (GET_CODE (dst) == PARALLEL);
1620
1621 if (m != VOIDmode
1622 && !SCALAR_INT_MODE_P (m)
1623 && !MEM_P (orig_src)
1624 && GET_CODE (orig_src) != CONCAT)
1625 {
1626 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1627 if (imode == BLKmode)
1628 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1629 else
1630 src = gen_reg_rtx (imode);
1631 if (imode != BLKmode)
1632 src = gen_lowpart (GET_MODE (orig_src), src);
1633 emit_move_insn (src, orig_src);
1634 /* ...and back again. */
1635 if (imode != BLKmode)
1636 src = gen_lowpart (imode, src);
1637 emit_group_load_1 (tmps, dst, src, type, ssize);
1638 return;
1639 }
1640
1641 /* Check for a NULL entry, used to indicate that the parameter goes
1642 both on the stack and in registers. */
1643 if (XEXP (XVECEXP (dst, 0, 0), 0))
1644 start = 0;
1645 else
1646 start = 1;
1647
1648 /* Process the pieces. */
1649 for (i = start; i < XVECLEN (dst, 0); i++)
1650 {
1651 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1652 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1653 unsigned int bytelen = GET_MODE_SIZE (mode);
1654 int shift = 0;
1655
1656 /* Handle trailing fragments that run over the size of the struct. */
1657 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1658 {
1659 /* Arrange to shift the fragment to where it belongs.
1660 extract_bit_field loads to the lsb of the reg. */
1661 if (
1662 #ifdef BLOCK_REG_PADDING
1663 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1664 == (BYTES_BIG_ENDIAN ? upward : downward)
1665 #else
1666 BYTES_BIG_ENDIAN
1667 #endif
1668 )
1669 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1670 bytelen = ssize - bytepos;
1671 gcc_assert (bytelen > 0);
1672 }
1673
1674 /* If we won't be loading directly from memory, protect the real source
1675 from strange tricks we might play; but make sure that the source can
1676 be loaded directly into the destination. */
1677 src = orig_src;
1678 if (!MEM_P (orig_src)
1679 && (!CONSTANT_P (orig_src)
1680 || (GET_MODE (orig_src) != mode
1681 && GET_MODE (orig_src) != VOIDmode)))
1682 {
1683 if (GET_MODE (orig_src) == VOIDmode)
1684 src = gen_reg_rtx (mode);
1685 else
1686 src = gen_reg_rtx (GET_MODE (orig_src));
1687
1688 emit_move_insn (src, orig_src);
1689 }
1690
1691 /* Optimize the access just a bit. */
1692 if (MEM_P (src)
1693 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1694 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1695 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1696 && bytelen == GET_MODE_SIZE (mode))
1697 {
1698 tmps[i] = gen_reg_rtx (mode);
1699 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1700 }
1701 else if (COMPLEX_MODE_P (mode)
1702 && GET_MODE (src) == mode
1703 && bytelen == GET_MODE_SIZE (mode))
1704 /* Let emit_move_complex do the bulk of the work. */
1705 tmps[i] = src;
1706 else if (GET_CODE (src) == CONCAT)
1707 {
1708 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1709 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1710
1711 if ((bytepos == 0 && bytelen == slen0)
1712 || (bytepos != 0 && bytepos + bytelen <= slen))
1713 {
1714 /* The following assumes that the concatenated objects all
1715 have the same size. In this case, a simple calculation
1716 can be used to determine the object and the bit field
1717 to be extracted. */
1718 tmps[i] = XEXP (src, bytepos / slen0);
1719 if (! CONSTANT_P (tmps[i])
1720 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1721 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1722 (bytepos % slen0) * BITS_PER_UNIT,
1723 1, NULL_RTX, mode, mode);
1724 }
1725 else
1726 {
1727 rtx mem;
1728
1729 gcc_assert (!bytepos);
1730 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1731 emit_move_insn (mem, src);
1732 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1733 0, 1, NULL_RTX, mode, mode);
1734 }
1735 }
1736 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1737 SIMD register, which is currently broken. While we get GCC
1738 to emit proper RTL for these cases, let's dump to memory. */
1739 else if (VECTOR_MODE_P (GET_MODE (dst))
1740 && REG_P (src))
1741 {
1742 int slen = GET_MODE_SIZE (GET_MODE (src));
1743 rtx mem;
1744
1745 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1746 emit_move_insn (mem, src);
1747 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1748 }
1749 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1750 && XVECLEN (dst, 0) > 1)
1751 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1752 else if (CONSTANT_P (src)
1753 || (REG_P (src) && GET_MODE (src) == mode))
1754 tmps[i] = src;
1755 else
1756 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1757 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1758 mode, mode);
1759
1760 if (shift)
1761 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1762 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1763 }
1764 }
1765
1766 /* Emit code to move a block SRC of type TYPE to a block DST,
1767 where DST is non-consecutive registers represented by a PARALLEL.
1768 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1769 if not known. */
1770
1771 void
1772 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1773 {
1774 rtx *tmps;
1775 int i;
1776
1777 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1778 emit_group_load_1 (tmps, dst, src, type, ssize);
1779
1780 /* Copy the extracted pieces into the proper (probable) hard regs. */
1781 for (i = 0; i < XVECLEN (dst, 0); i++)
1782 {
1783 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1784 if (d == NULL)
1785 continue;
1786 emit_move_insn (d, tmps[i]);
1787 }
1788 }
1789
1790 /* Similar, but load SRC into new pseudos in a format that looks like
1791 PARALLEL. This can later be fed to emit_group_move to get things
1792 in the right place. */
1793
1794 rtx
1795 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1796 {
1797 rtvec vec;
1798 int i;
1799
1800 vec = rtvec_alloc (XVECLEN (parallel, 0));
1801 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1802
1803 /* Convert the vector to look just like the original PARALLEL, except
1804 with the computed values. */
1805 for (i = 0; i < XVECLEN (parallel, 0); i++)
1806 {
1807 rtx e = XVECEXP (parallel, 0, i);
1808 rtx d = XEXP (e, 0);
1809
1810 if (d)
1811 {
1812 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1813 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1814 }
1815 RTVEC_ELT (vec, i) = e;
1816 }
1817
1818 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1819 }
1820
1821 /* Emit code to move a block SRC to block DST, where SRC and DST are
1822 non-consecutive groups of registers, each represented by a PARALLEL. */
1823
1824 void
1825 emit_group_move (rtx dst, rtx src)
1826 {
1827 int i;
1828
1829 gcc_assert (GET_CODE (src) == PARALLEL
1830 && GET_CODE (dst) == PARALLEL
1831 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1832
1833 /* Skip first entry if NULL. */
1834 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1835 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1836 XEXP (XVECEXP (src, 0, i), 0));
1837 }
1838
1839 /* Move a group of registers represented by a PARALLEL into pseudos. */
1840
1841 rtx
1842 emit_group_move_into_temps (rtx src)
1843 {
1844 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1845 int i;
1846
1847 for (i = 0; i < XVECLEN (src, 0); i++)
1848 {
1849 rtx e = XVECEXP (src, 0, i);
1850 rtx d = XEXP (e, 0);
1851
1852 if (d)
1853 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1854 RTVEC_ELT (vec, i) = e;
1855 }
1856
1857 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1858 }
1859
1860 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1861 where SRC is non-consecutive registers represented by a PARALLEL.
1862 SSIZE represents the total size of block ORIG_DST, or -1 if not
1863 known. */
1864
1865 void
1866 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1867 {
1868 rtx *tmps, dst;
1869 int start, finish, i;
1870 enum machine_mode m = GET_MODE (orig_dst);
1871
1872 gcc_assert (GET_CODE (src) == PARALLEL);
1873
1874 if (!SCALAR_INT_MODE_P (m)
1875 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1876 {
1877 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1878 if (imode == BLKmode)
1879 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1880 else
1881 dst = gen_reg_rtx (imode);
1882 emit_group_store (dst, src, type, ssize);
1883 if (imode != BLKmode)
1884 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1885 emit_move_insn (orig_dst, dst);
1886 return;
1887 }
1888
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (src, 0, 0), 0))
1892 start = 0;
1893 else
1894 start = 1;
1895 finish = XVECLEN (src, 0);
1896
1897 tmps = alloca (sizeof (rtx) * finish);
1898
1899 /* Copy the (probable) hard regs into pseudos. */
1900 for (i = start; i < finish; i++)
1901 {
1902 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1903 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1904 {
1905 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1906 emit_move_insn (tmps[i], reg);
1907 }
1908 else
1909 tmps[i] = reg;
1910 }
1911
1912 /* If we won't be storing directly into memory, protect the real destination
1913 from strange tricks we might play. */
1914 dst = orig_dst;
1915 if (GET_CODE (dst) == PARALLEL)
1916 {
1917 rtx temp;
1918
1919 /* We can get a PARALLEL dst if there is a conditional expression in
1920 a return statement. In that case, the dst and src are the same,
1921 so no action is necessary. */
1922 if (rtx_equal_p (dst, src))
1923 return;
1924
1925 /* It is unclear if we can ever reach here, but we may as well handle
1926 it. Allocate a temporary, and split this into a store/load to/from
1927 the temporary. */
1928
1929 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1930 emit_group_store (temp, src, type, ssize);
1931 emit_group_load (dst, temp, type, ssize);
1932 return;
1933 }
1934 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1935 {
1936 enum machine_mode outer = GET_MODE (dst);
1937 enum machine_mode inner;
1938 HOST_WIDE_INT bytepos;
1939 bool done = false;
1940 rtx temp;
1941
1942 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1943 dst = gen_reg_rtx (outer);
1944
1945 /* Make life a bit easier for combine. */
1946 /* If the first element of the vector is the low part
1947 of the destination mode, use a paradoxical subreg to
1948 initialize the destination. */
1949 if (start < finish)
1950 {
1951 inner = GET_MODE (tmps[start]);
1952 bytepos = subreg_lowpart_offset (inner, outer);
1953 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1954 {
1955 temp = simplify_gen_subreg (outer, tmps[start],
1956 inner, 0);
1957 if (temp)
1958 {
1959 emit_move_insn (dst, temp);
1960 done = true;
1961 start++;
1962 }
1963 }
1964 }
1965
1966 /* If the first element wasn't the low part, try the last. */
1967 if (!done
1968 && start < finish - 1)
1969 {
1970 inner = GET_MODE (tmps[finish - 1]);
1971 bytepos = subreg_lowpart_offset (inner, outer);
1972 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1973 {
1974 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1975 inner, 0);
1976 if (temp)
1977 {
1978 emit_move_insn (dst, temp);
1979 done = true;
1980 finish--;
1981 }
1982 }
1983 }
1984
1985 /* Otherwise, simply initialize the result to zero. */
1986 if (!done)
1987 emit_move_insn (dst, CONST0_RTX (outer));
1988 }
1989
1990 /* Process the pieces. */
1991 for (i = start; i < finish; i++)
1992 {
1993 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1994 enum machine_mode mode = GET_MODE (tmps[i]);
1995 unsigned int bytelen = GET_MODE_SIZE (mode);
1996 rtx dest = dst;
1997
1998 /* Handle trailing fragments that run over the size of the struct. */
1999 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2000 {
2001 /* store_bit_field always takes its value from the lsb.
2002 Move the fragment to the lsb if it's not already there. */
2003 if (
2004 #ifdef BLOCK_REG_PADDING
2005 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2006 == (BYTES_BIG_ENDIAN ? upward : downward)
2007 #else
2008 BYTES_BIG_ENDIAN
2009 #endif
2010 )
2011 {
2012 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2013 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2014 build_int_cst (NULL_TREE, shift),
2015 tmps[i], 0);
2016 }
2017 bytelen = ssize - bytepos;
2018 }
2019
2020 if (GET_CODE (dst) == CONCAT)
2021 {
2022 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2023 dest = XEXP (dst, 0);
2024 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2025 {
2026 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2027 dest = XEXP (dst, 1);
2028 }
2029 else
2030 {
2031 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2032 dest = assign_stack_temp (GET_MODE (dest),
2033 GET_MODE_SIZE (GET_MODE (dest)), 0);
2034 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2035 tmps[i]);
2036 dst = dest;
2037 break;
2038 }
2039 }
2040
2041 /* Optimize the access just a bit. */
2042 if (MEM_P (dest)
2043 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2044 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2045 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2046 && bytelen == GET_MODE_SIZE (mode))
2047 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2048 else
2049 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2050 mode, tmps[i]);
2051 }
2052
2053 /* Copy from the pseudo into the (probable) hard reg. */
2054 if (orig_dst != dst)
2055 emit_move_insn (orig_dst, dst);
2056 }
2057
2058 /* Generate code to copy a BLKmode object of TYPE out of a
2059 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2060 is null, a stack temporary is created. TGTBLK is returned.
2061
2062 The purpose of this routine is to handle functions that return
2063 BLKmode structures in registers. Some machines (the PA for example)
2064 want to return all small structures in registers regardless of the
2065 structure's alignment. */
2066
2067 rtx
2068 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2069 {
2070 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2071 rtx src = NULL, dst = NULL;
2072 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2073 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2074
2075 if (tgtblk == 0)
2076 {
2077 tgtblk = assign_temp (build_qualified_type (type,
2078 (TYPE_QUALS (type)
2079 | TYPE_QUAL_CONST)),
2080 0, 1, 1);
2081 preserve_temp_slots (tgtblk);
2082 }
2083
2084 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2085 into a new pseudo which is a full word. */
2086
2087 if (GET_MODE (srcreg) != BLKmode
2088 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2089 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2090
2091 /* If the structure doesn't take up a whole number of words, see whether
2092 SRCREG is padded on the left or on the right. If it's on the left,
2093 set PADDING_CORRECTION to the number of bits to skip.
2094
2095 In most ABIs, the structure will be returned at the least end of
2096 the register, which translates to right padding on little-endian
2097 targets and left padding on big-endian targets. The opposite
2098 holds if the structure is returned at the most significant
2099 end of the register. */
2100 if (bytes % UNITS_PER_WORD != 0
2101 && (targetm.calls.return_in_msb (type)
2102 ? !BYTES_BIG_ENDIAN
2103 : BYTES_BIG_ENDIAN))
2104 padding_correction
2105 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2106
2107 /* Copy the structure BITSIZE bites at a time.
2108
2109 We could probably emit more efficient code for machines which do not use
2110 strict alignment, but it doesn't seem worth the effort at the current
2111 time. */
2112 for (bitpos = 0, xbitpos = padding_correction;
2113 bitpos < bytes * BITS_PER_UNIT;
2114 bitpos += bitsize, xbitpos += bitsize)
2115 {
2116 /* We need a new source operand each time xbitpos is on a
2117 word boundary and when xbitpos == padding_correction
2118 (the first time through). */
2119 if (xbitpos % BITS_PER_WORD == 0
2120 || xbitpos == padding_correction)
2121 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2122 GET_MODE (srcreg));
2123
2124 /* We need a new destination operand each time bitpos is on
2125 a word boundary. */
2126 if (bitpos % BITS_PER_WORD == 0)
2127 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2128
2129 /* Use xbitpos for the source extraction (right justified) and
2130 xbitpos for the destination store (left justified). */
2131 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2132 extract_bit_field (src, bitsize,
2133 xbitpos % BITS_PER_WORD, 1,
2134 NULL_RTX, word_mode, word_mode));
2135 }
2136
2137 return tgtblk;
2138 }
2139
2140 /* Add a USE expression for REG to the (possibly empty) list pointed
2141 to by CALL_FUSAGE. REG must denote a hard register. */
2142
2143 void
2144 use_reg (rtx *call_fusage, rtx reg)
2145 {
2146 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2147
2148 *call_fusage
2149 = gen_rtx_EXPR_LIST (VOIDmode,
2150 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2151 }
2152
2153 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2154 starting at REGNO. All of these registers must be hard registers. */
2155
2156 void
2157 use_regs (rtx *call_fusage, int regno, int nregs)
2158 {
2159 int i;
2160
2161 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2162
2163 for (i = 0; i < nregs; i++)
2164 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2165 }
2166
2167 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2168 PARALLEL REGS. This is for calls that pass values in multiple
2169 non-contiguous locations. The Irix 6 ABI has examples of this. */
2170
2171 void
2172 use_group_regs (rtx *call_fusage, rtx regs)
2173 {
2174 int i;
2175
2176 for (i = 0; i < XVECLEN (regs, 0); i++)
2177 {
2178 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2179
2180 /* A NULL entry means the parameter goes both on the stack and in
2181 registers. This can also be a MEM for targets that pass values
2182 partially on the stack and partially in registers. */
2183 if (reg != 0 && REG_P (reg))
2184 use_reg (call_fusage, reg);
2185 }
2186 }
2187 \f
2188
2189 /* Determine whether the LEN bytes generated by CONSTFUN can be
2190 stored to memory using several move instructions. CONSTFUNDATA is
2191 a pointer which will be passed as argument in every CONSTFUN call.
2192 ALIGN is maximum alignment we can assume. Return nonzero if a
2193 call to store_by_pieces should succeed. */
2194
2195 int
2196 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2197 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2198 void *constfundata, unsigned int align)
2199 {
2200 unsigned HOST_WIDE_INT l;
2201 unsigned int max_size;
2202 HOST_WIDE_INT offset = 0;
2203 enum machine_mode mode, tmode;
2204 enum insn_code icode;
2205 int reverse;
2206 rtx cst;
2207
2208 if (len == 0)
2209 return 1;
2210
2211 if (! STORE_BY_PIECES_P (len, align))
2212 return 0;
2213
2214 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2215 if (align >= GET_MODE_ALIGNMENT (tmode))
2216 align = GET_MODE_ALIGNMENT (tmode);
2217 else
2218 {
2219 enum machine_mode xmode;
2220
2221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2222 tmode != VOIDmode;
2223 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2224 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2225 || SLOW_UNALIGNED_ACCESS (tmode, align))
2226 break;
2227
2228 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2229 }
2230
2231 /* We would first store what we can in the largest integer mode, then go to
2232 successively smaller modes. */
2233
2234 for (reverse = 0;
2235 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2236 reverse++)
2237 {
2238 l = len;
2239 mode = VOIDmode;
2240 max_size = STORE_MAX_PIECES + 1;
2241 while (max_size > 1)
2242 {
2243 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2244 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2245 if (GET_MODE_SIZE (tmode) < max_size)
2246 mode = tmode;
2247
2248 if (mode == VOIDmode)
2249 break;
2250
2251 icode = mov_optab->handlers[(int) mode].insn_code;
2252 if (icode != CODE_FOR_nothing
2253 && align >= GET_MODE_ALIGNMENT (mode))
2254 {
2255 unsigned int size = GET_MODE_SIZE (mode);
2256
2257 while (l >= size)
2258 {
2259 if (reverse)
2260 offset -= size;
2261
2262 cst = (*constfun) (constfundata, offset, mode);
2263 if (!LEGITIMATE_CONSTANT_P (cst))
2264 return 0;
2265
2266 if (!reverse)
2267 offset += size;
2268
2269 l -= size;
2270 }
2271 }
2272
2273 max_size = GET_MODE_SIZE (mode);
2274 }
2275
2276 /* The code above should have handled everything. */
2277 gcc_assert (!l);
2278 }
2279
2280 return 1;
2281 }
2282
2283 /* Generate several move instructions to store LEN bytes generated by
2284 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2285 pointer which will be passed as argument in every CONSTFUN call.
2286 ALIGN is maximum alignment we can assume.
2287 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2288 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2289 stpcpy. */
2290
2291 rtx
2292 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2293 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2294 void *constfundata, unsigned int align, int endp)
2295 {
2296 struct store_by_pieces data;
2297
2298 if (len == 0)
2299 {
2300 gcc_assert (endp != 2);
2301 return to;
2302 }
2303
2304 gcc_assert (STORE_BY_PIECES_P (len, align));
2305 data.constfun = constfun;
2306 data.constfundata = constfundata;
2307 data.len = len;
2308 data.to = to;
2309 store_by_pieces_1 (&data, align);
2310 if (endp)
2311 {
2312 rtx to1;
2313
2314 gcc_assert (!data.reverse);
2315 if (data.autinc_to)
2316 {
2317 if (endp == 2)
2318 {
2319 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2320 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2321 else
2322 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2323 -1));
2324 }
2325 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2326 data.offset);
2327 }
2328 else
2329 {
2330 if (endp == 2)
2331 --data.offset;
2332 to1 = adjust_address (data.to, QImode, data.offset);
2333 }
2334 return to1;
2335 }
2336 else
2337 return data.to;
2338 }
2339
2340 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2341 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2342
2343 static void
2344 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2345 {
2346 struct store_by_pieces data;
2347
2348 if (len == 0)
2349 return;
2350
2351 data.constfun = clear_by_pieces_1;
2352 data.constfundata = NULL;
2353 data.len = len;
2354 data.to = to;
2355 store_by_pieces_1 (&data, align);
2356 }
2357
2358 /* Callback routine for clear_by_pieces.
2359 Return const0_rtx unconditionally. */
2360
2361 static rtx
2362 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2363 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2364 enum machine_mode mode ATTRIBUTE_UNUSED)
2365 {
2366 return const0_rtx;
2367 }
2368
2369 /* Subroutine of clear_by_pieces and store_by_pieces.
2370 Generate several move instructions to store LEN bytes of block TO. (A MEM
2371 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2372
2373 static void
2374 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2375 unsigned int align ATTRIBUTE_UNUSED)
2376 {
2377 rtx to_addr = XEXP (data->to, 0);
2378 unsigned int max_size = STORE_MAX_PIECES + 1;
2379 enum machine_mode mode = VOIDmode, tmode;
2380 enum insn_code icode;
2381
2382 data->offset = 0;
2383 data->to_addr = to_addr;
2384 data->autinc_to
2385 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2386 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2387
2388 data->explicit_inc_to = 0;
2389 data->reverse
2390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2391 if (data->reverse)
2392 data->offset = data->len;
2393
2394 /* If storing requires more than two move insns,
2395 copy addresses to registers (to make displacements shorter)
2396 and use post-increment if available. */
2397 if (!data->autinc_to
2398 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2399 {
2400 /* Determine the main mode we'll be using. */
2401 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2402 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2403 if (GET_MODE_SIZE (tmode) < max_size)
2404 mode = tmode;
2405
2406 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2407 {
2408 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2409 data->autinc_to = 1;
2410 data->explicit_inc_to = -1;
2411 }
2412
2413 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2414 && ! data->autinc_to)
2415 {
2416 data->to_addr = copy_addr_to_reg (to_addr);
2417 data->autinc_to = 1;
2418 data->explicit_inc_to = 1;
2419 }
2420
2421 if ( !data->autinc_to && CONSTANT_P (to_addr))
2422 data->to_addr = copy_addr_to_reg (to_addr);
2423 }
2424
2425 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2426 if (align >= GET_MODE_ALIGNMENT (tmode))
2427 align = GET_MODE_ALIGNMENT (tmode);
2428 else
2429 {
2430 enum machine_mode xmode;
2431
2432 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2433 tmode != VOIDmode;
2434 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2435 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2436 || SLOW_UNALIGNED_ACCESS (tmode, align))
2437 break;
2438
2439 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2440 }
2441
2442 /* First store what we can in the largest integer mode, then go to
2443 successively smaller modes. */
2444
2445 while (max_size > 1)
2446 {
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2449 if (GET_MODE_SIZE (tmode) < max_size)
2450 mode = tmode;
2451
2452 if (mode == VOIDmode)
2453 break;
2454
2455 icode = mov_optab->handlers[(int) mode].insn_code;
2456 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2457 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2458
2459 max_size = GET_MODE_SIZE (mode);
2460 }
2461
2462 /* The code above should have handled everything. */
2463 gcc_assert (!data->len);
2464 }
2465
2466 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2467 with move instructions for mode MODE. GENFUN is the gen_... function
2468 to make a move insn for that mode. DATA has all the other info. */
2469
2470 static void
2471 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2472 struct store_by_pieces *data)
2473 {
2474 unsigned int size = GET_MODE_SIZE (mode);
2475 rtx to1, cst;
2476
2477 while (data->len >= size)
2478 {
2479 if (data->reverse)
2480 data->offset -= size;
2481
2482 if (data->autinc_to)
2483 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2484 data->offset);
2485 else
2486 to1 = adjust_address (data->to, mode, data->offset);
2487
2488 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2489 emit_insn (gen_add2_insn (data->to_addr,
2490 GEN_INT (-(HOST_WIDE_INT) size)));
2491
2492 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2493 emit_insn ((*genfun) (to1, cst));
2494
2495 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2496 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2497
2498 if (! data->reverse)
2499 data->offset += size;
2500
2501 data->len -= size;
2502 }
2503 }
2504 \f
2505 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2506 its length in bytes. */
2507
2508 rtx
2509 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2510 unsigned int expected_align, HOST_WIDE_INT expected_size)
2511 {
2512 enum machine_mode mode = GET_MODE (object);
2513 unsigned int align;
2514
2515 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2516
2517 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2518 just move a zero. Otherwise, do this a piece at a time. */
2519 if (mode != BLKmode
2520 && GET_CODE (size) == CONST_INT
2521 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2522 {
2523 rtx zero = CONST0_RTX (mode);
2524 if (zero != NULL)
2525 {
2526 emit_move_insn (object, zero);
2527 return NULL;
2528 }
2529
2530 if (COMPLEX_MODE_P (mode))
2531 {
2532 zero = CONST0_RTX (GET_MODE_INNER (mode));
2533 if (zero != NULL)
2534 {
2535 write_complex_part (object, zero, 0);
2536 write_complex_part (object, zero, 1);
2537 return NULL;
2538 }
2539 }
2540 }
2541
2542 if (size == const0_rtx)
2543 return NULL;
2544
2545 align = MEM_ALIGN (object);
2546
2547 if (GET_CODE (size) == CONST_INT
2548 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2549 clear_by_pieces (object, INTVAL (size), align);
2550 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2551 expected_align, expected_size))
2552 ;
2553 else
2554 return set_storage_via_libcall (object, size, const0_rtx,
2555 method == BLOCK_OP_TAILCALL);
2556
2557 return NULL;
2558 }
2559
2560 rtx
2561 clear_storage (rtx object, rtx size, enum block_op_methods method)
2562 {
2563 return clear_storage_hints (object, size, method, 0, -1);
2564 }
2565
2566
2567 /* A subroutine of clear_storage. Expand a call to memset.
2568 Return the return value of memset, 0 otherwise. */
2569
2570 rtx
2571 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2572 {
2573 tree call_expr, fn, object_tree, size_tree, val_tree;
2574 enum machine_mode size_mode;
2575 rtx retval;
2576
2577 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2578 place those into new pseudos into a VAR_DECL and use them later. */
2579
2580 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2581
2582 size_mode = TYPE_MODE (sizetype);
2583 size = convert_to_mode (size_mode, size, 1);
2584 size = copy_to_mode_reg (size_mode, size);
2585
2586 /* It is incorrect to use the libcall calling conventions to call
2587 memset in this context. This could be a user call to memset and
2588 the user may wish to examine the return value from memset. For
2589 targets where libcalls and normal calls have different conventions
2590 for returning pointers, we could end up generating incorrect code. */
2591
2592 object_tree = make_tree (ptr_type_node, object);
2593 if (GET_CODE (val) != CONST_INT)
2594 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2595 size_tree = make_tree (sizetype, size);
2596 val_tree = make_tree (integer_type_node, val);
2597
2598 fn = clear_storage_libcall_fn (true);
2599 call_expr = build_call_expr (fn, 3,
2600 object_tree, integer_zero_node, size_tree);
2601 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2602
2603 retval = expand_normal (call_expr);
2604
2605 return retval;
2606 }
2607
2608 /* A subroutine of set_storage_via_libcall. Create the tree node
2609 for the function we use for block clears. The first time FOR_CALL
2610 is true, we call assemble_external. */
2611
2612 static GTY(()) tree block_clear_fn;
2613
2614 void
2615 init_block_clear_fn (const char *asmspec)
2616 {
2617 if (!block_clear_fn)
2618 {
2619 tree fn, args;
2620
2621 fn = get_identifier ("memset");
2622 args = build_function_type_list (ptr_type_node, ptr_type_node,
2623 integer_type_node, sizetype,
2624 NULL_TREE);
2625
2626 fn = build_decl (FUNCTION_DECL, fn, args);
2627 DECL_EXTERNAL (fn) = 1;
2628 TREE_PUBLIC (fn) = 1;
2629 DECL_ARTIFICIAL (fn) = 1;
2630 TREE_NOTHROW (fn) = 1;
2631 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2632 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2633
2634 block_clear_fn = fn;
2635 }
2636
2637 if (asmspec)
2638 set_user_assembler_name (block_clear_fn, asmspec);
2639 }
2640
2641 static tree
2642 clear_storage_libcall_fn (int for_call)
2643 {
2644 static bool emitted_extern;
2645
2646 if (!block_clear_fn)
2647 init_block_clear_fn (NULL);
2648
2649 if (for_call && !emitted_extern)
2650 {
2651 emitted_extern = true;
2652 make_decl_rtl (block_clear_fn);
2653 assemble_external (block_clear_fn);
2654 }
2655
2656 return block_clear_fn;
2657 }
2658 \f
2659 /* Expand a setmem pattern; return true if successful. */
2660
2661 bool
2662 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2663 unsigned int expected_align, HOST_WIDE_INT expected_size)
2664 {
2665 /* Try the most limited insn first, because there's no point
2666 including more than one in the machine description unless
2667 the more limited one has some advantage. */
2668
2669 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2670 enum machine_mode mode;
2671
2672 if (expected_align < align)
2673 expected_align = align;
2674
2675 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2676 mode = GET_MODE_WIDER_MODE (mode))
2677 {
2678 enum insn_code code = setmem_optab[(int) mode];
2679 insn_operand_predicate_fn pred;
2680
2681 if (code != CODE_FOR_nothing
2682 /* We don't need MODE to be narrower than
2683 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2684 the mode mask, as it is returned by the macro, it will
2685 definitely be less than the actual mode mask. */
2686 && ((GET_CODE (size) == CONST_INT
2687 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2688 <= (GET_MODE_MASK (mode) >> 1)))
2689 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2690 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2691 || (*pred) (object, BLKmode))
2692 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2693 || (*pred) (opalign, VOIDmode)))
2694 {
2695 rtx opsize, opchar;
2696 enum machine_mode char_mode;
2697 rtx last = get_last_insn ();
2698 rtx pat;
2699
2700 opsize = convert_to_mode (mode, size, 1);
2701 pred = insn_data[(int) code].operand[1].predicate;
2702 if (pred != 0 && ! (*pred) (opsize, mode))
2703 opsize = copy_to_mode_reg (mode, opsize);
2704
2705 opchar = val;
2706 char_mode = insn_data[(int) code].operand[2].mode;
2707 if (char_mode != VOIDmode)
2708 {
2709 opchar = convert_to_mode (char_mode, opchar, 1);
2710 pred = insn_data[(int) code].operand[2].predicate;
2711 if (pred != 0 && ! (*pred) (opchar, char_mode))
2712 opchar = copy_to_mode_reg (char_mode, opchar);
2713 }
2714
2715 if (insn_data[(int) code].n_operands == 4)
2716 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2717 else
2718 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2719 GEN_INT (expected_align),
2720 GEN_INT (expected_size));
2721 if (pat)
2722 {
2723 emit_insn (pat);
2724 return true;
2725 }
2726 else
2727 delete_insns_since (last);
2728 }
2729 }
2730
2731 return false;
2732 }
2733
2734 \f
2735 /* Write to one of the components of the complex value CPLX. Write VAL to
2736 the real part if IMAG_P is false, and the imaginary part if its true. */
2737
2738 static void
2739 write_complex_part (rtx cplx, rtx val, bool imag_p)
2740 {
2741 enum machine_mode cmode;
2742 enum machine_mode imode;
2743 unsigned ibitsize;
2744
2745 if (GET_CODE (cplx) == CONCAT)
2746 {
2747 emit_move_insn (XEXP (cplx, imag_p), val);
2748 return;
2749 }
2750
2751 cmode = GET_MODE (cplx);
2752 imode = GET_MODE_INNER (cmode);
2753 ibitsize = GET_MODE_BITSIZE (imode);
2754
2755 /* For MEMs simplify_gen_subreg may generate an invalid new address
2756 because, e.g., the original address is considered mode-dependent
2757 by the target, which restricts simplify_subreg from invoking
2758 adjust_address_nv. Instead of preparing fallback support for an
2759 invalid address, we call adjust_address_nv directly. */
2760 if (MEM_P (cplx))
2761 {
2762 emit_move_insn (adjust_address_nv (cplx, imode,
2763 imag_p ? GET_MODE_SIZE (imode) : 0),
2764 val);
2765 return;
2766 }
2767
2768 /* If the sub-object is at least word sized, then we know that subregging
2769 will work. This special case is important, since store_bit_field
2770 wants to operate on integer modes, and there's rarely an OImode to
2771 correspond to TCmode. */
2772 if (ibitsize >= BITS_PER_WORD
2773 /* For hard regs we have exact predicates. Assume we can split
2774 the original object if it spans an even number of hard regs.
2775 This special case is important for SCmode on 64-bit platforms
2776 where the natural size of floating-point regs is 32-bit. */
2777 || (REG_P (cplx)
2778 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2779 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2780 {
2781 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2782 imag_p ? GET_MODE_SIZE (imode) : 0);
2783 if (part)
2784 {
2785 emit_move_insn (part, val);
2786 return;
2787 }
2788 else
2789 /* simplify_gen_subreg may fail for sub-word MEMs. */
2790 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2791 }
2792
2793 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2794 }
2795
2796 /* Extract one of the components of the complex value CPLX. Extract the
2797 real part if IMAG_P is false, and the imaginary part if it's true. */
2798
2799 static rtx
2800 read_complex_part (rtx cplx, bool imag_p)
2801 {
2802 enum machine_mode cmode, imode;
2803 unsigned ibitsize;
2804
2805 if (GET_CODE (cplx) == CONCAT)
2806 return XEXP (cplx, imag_p);
2807
2808 cmode = GET_MODE (cplx);
2809 imode = GET_MODE_INNER (cmode);
2810 ibitsize = GET_MODE_BITSIZE (imode);
2811
2812 /* Special case reads from complex constants that got spilled to memory. */
2813 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2814 {
2815 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2816 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2817 {
2818 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2819 if (CONSTANT_CLASS_P (part))
2820 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2821 }
2822 }
2823
2824 /* For MEMs simplify_gen_subreg may generate an invalid new address
2825 because, e.g., the original address is considered mode-dependent
2826 by the target, which restricts simplify_subreg from invoking
2827 adjust_address_nv. Instead of preparing fallback support for an
2828 invalid address, we call adjust_address_nv directly. */
2829 if (MEM_P (cplx))
2830 return adjust_address_nv (cplx, imode,
2831 imag_p ? GET_MODE_SIZE (imode) : 0);
2832
2833 /* If the sub-object is at least word sized, then we know that subregging
2834 will work. This special case is important, since extract_bit_field
2835 wants to operate on integer modes, and there's rarely an OImode to
2836 correspond to TCmode. */
2837 if (ibitsize >= BITS_PER_WORD
2838 /* For hard regs we have exact predicates. Assume we can split
2839 the original object if it spans an even number of hard regs.
2840 This special case is important for SCmode on 64-bit platforms
2841 where the natural size of floating-point regs is 32-bit. */
2842 || (REG_P (cplx)
2843 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2844 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2845 {
2846 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2847 imag_p ? GET_MODE_SIZE (imode) : 0);
2848 if (ret)
2849 return ret;
2850 else
2851 /* simplify_gen_subreg may fail for sub-word MEMs. */
2852 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2853 }
2854
2855 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2856 true, NULL_RTX, imode, imode);
2857 }
2858 \f
2859 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2860 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2861 represented in NEW_MODE. If FORCE is true, this will never happen, as
2862 we'll force-create a SUBREG if needed. */
2863
2864 static rtx
2865 emit_move_change_mode (enum machine_mode new_mode,
2866 enum machine_mode old_mode, rtx x, bool force)
2867 {
2868 rtx ret;
2869
2870 if (MEM_P (x))
2871 {
2872 /* We don't have to worry about changing the address since the
2873 size in bytes is supposed to be the same. */
2874 if (reload_in_progress)
2875 {
2876 /* Copy the MEM to change the mode and move any
2877 substitutions from the old MEM to the new one. */
2878 ret = adjust_address_nv (x, new_mode, 0);
2879 copy_replacements (x, ret);
2880 }
2881 else
2882 ret = adjust_address (x, new_mode, 0);
2883 }
2884 else
2885 {
2886 /* Note that we do want simplify_subreg's behavior of validating
2887 that the new mode is ok for a hard register. If we were to use
2888 simplify_gen_subreg, we would create the subreg, but would
2889 probably run into the target not being able to implement it. */
2890 /* Except, of course, when FORCE is true, when this is exactly what
2891 we want. Which is needed for CCmodes on some targets. */
2892 if (force)
2893 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2894 else
2895 ret = simplify_subreg (new_mode, x, old_mode, 0);
2896 }
2897
2898 return ret;
2899 }
2900
2901 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2902 an integer mode of the same size as MODE. Returns the instruction
2903 emitted, or NULL if such a move could not be generated. */
2904
2905 static rtx
2906 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2907 {
2908 enum machine_mode imode;
2909 enum insn_code code;
2910
2911 /* There must exist a mode of the exact size we require. */
2912 imode = int_mode_for_mode (mode);
2913 if (imode == BLKmode)
2914 return NULL_RTX;
2915
2916 /* The target must support moves in this mode. */
2917 code = mov_optab->handlers[imode].insn_code;
2918 if (code == CODE_FOR_nothing)
2919 return NULL_RTX;
2920
2921 x = emit_move_change_mode (imode, mode, x, force);
2922 if (x == NULL_RTX)
2923 return NULL_RTX;
2924 y = emit_move_change_mode (imode, mode, y, force);
2925 if (y == NULL_RTX)
2926 return NULL_RTX;
2927 return emit_insn (GEN_FCN (code) (x, y));
2928 }
2929
2930 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2931 Return an equivalent MEM that does not use an auto-increment. */
2932
2933 static rtx
2934 emit_move_resolve_push (enum machine_mode mode, rtx x)
2935 {
2936 enum rtx_code code = GET_CODE (XEXP (x, 0));
2937 HOST_WIDE_INT adjust;
2938 rtx temp;
2939
2940 adjust = GET_MODE_SIZE (mode);
2941 #ifdef PUSH_ROUNDING
2942 adjust = PUSH_ROUNDING (adjust);
2943 #endif
2944 if (code == PRE_DEC || code == POST_DEC)
2945 adjust = -adjust;
2946 else if (code == PRE_MODIFY || code == POST_MODIFY)
2947 {
2948 rtx expr = XEXP (XEXP (x, 0), 1);
2949 HOST_WIDE_INT val;
2950
2951 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2952 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2953 val = INTVAL (XEXP (expr, 1));
2954 if (GET_CODE (expr) == MINUS)
2955 val = -val;
2956 gcc_assert (adjust == val || adjust == -val);
2957 adjust = val;
2958 }
2959
2960 /* Do not use anti_adjust_stack, since we don't want to update
2961 stack_pointer_delta. */
2962 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2963 GEN_INT (adjust), stack_pointer_rtx,
2964 0, OPTAB_LIB_WIDEN);
2965 if (temp != stack_pointer_rtx)
2966 emit_move_insn (stack_pointer_rtx, temp);
2967
2968 switch (code)
2969 {
2970 case PRE_INC:
2971 case PRE_DEC:
2972 case PRE_MODIFY:
2973 temp = stack_pointer_rtx;
2974 break;
2975 case POST_INC:
2976 case POST_DEC:
2977 case POST_MODIFY:
2978 temp = plus_constant (stack_pointer_rtx, -adjust);
2979 break;
2980 default:
2981 gcc_unreachable ();
2982 }
2983
2984 return replace_equiv_address (x, temp);
2985 }
2986
2987 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2988 X is known to satisfy push_operand, and MODE is known to be complex.
2989 Returns the last instruction emitted. */
2990
2991 rtx
2992 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2993 {
2994 enum machine_mode submode = GET_MODE_INNER (mode);
2995 bool imag_first;
2996
2997 #ifdef PUSH_ROUNDING
2998 unsigned int submodesize = GET_MODE_SIZE (submode);
2999
3000 /* In case we output to the stack, but the size is smaller than the
3001 machine can push exactly, we need to use move instructions. */
3002 if (PUSH_ROUNDING (submodesize) != submodesize)
3003 {
3004 x = emit_move_resolve_push (mode, x);
3005 return emit_move_insn (x, y);
3006 }
3007 #endif
3008
3009 /* Note that the real part always precedes the imag part in memory
3010 regardless of machine's endianness. */
3011 switch (GET_CODE (XEXP (x, 0)))
3012 {
3013 case PRE_DEC:
3014 case POST_DEC:
3015 imag_first = true;
3016 break;
3017 case PRE_INC:
3018 case POST_INC:
3019 imag_first = false;
3020 break;
3021 default:
3022 gcc_unreachable ();
3023 }
3024
3025 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3026 read_complex_part (y, imag_first));
3027 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3028 read_complex_part (y, !imag_first));
3029 }
3030
3031 /* A subroutine of emit_move_complex. Perform the move from Y to X
3032 via two moves of the parts. Returns the last instruction emitted. */
3033
3034 rtx
3035 emit_move_complex_parts (rtx x, rtx y)
3036 {
3037 /* Show the output dies here. This is necessary for SUBREGs
3038 of pseudos since we cannot track their lifetimes correctly;
3039 hard regs shouldn't appear here except as return values. */
3040 if (!reload_completed && !reload_in_progress
3041 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3042 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3043
3044 write_complex_part (x, read_complex_part (y, false), false);
3045 write_complex_part (x, read_complex_part (y, true), true);
3046
3047 return get_last_insn ();
3048 }
3049
3050 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3051 MODE is known to be complex. Returns the last instruction emitted. */
3052
3053 static rtx
3054 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3055 {
3056 bool try_int;
3057
3058 /* Need to take special care for pushes, to maintain proper ordering
3059 of the data, and possibly extra padding. */
3060 if (push_operand (x, mode))
3061 return emit_move_complex_push (mode, x, y);
3062
3063 /* See if we can coerce the target into moving both values at once. */
3064
3065 /* Move floating point as parts. */
3066 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3067 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3068 try_int = false;
3069 /* Not possible if the values are inherently not adjacent. */
3070 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3071 try_int = false;
3072 /* Is possible if both are registers (or subregs of registers). */
3073 else if (register_operand (x, mode) && register_operand (y, mode))
3074 try_int = true;
3075 /* If one of the operands is a memory, and alignment constraints
3076 are friendly enough, we may be able to do combined memory operations.
3077 We do not attempt this if Y is a constant because that combination is
3078 usually better with the by-parts thing below. */
3079 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3080 && (!STRICT_ALIGNMENT
3081 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3082 try_int = true;
3083 else
3084 try_int = false;
3085
3086 if (try_int)
3087 {
3088 rtx ret;
3089
3090 /* For memory to memory moves, optimal behavior can be had with the
3091 existing block move logic. */
3092 if (MEM_P (x) && MEM_P (y))
3093 {
3094 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3095 BLOCK_OP_NO_LIBCALL);
3096 return get_last_insn ();
3097 }
3098
3099 ret = emit_move_via_integer (mode, x, y, true);
3100 if (ret)
3101 return ret;
3102 }
3103
3104 return emit_move_complex_parts (x, y);
3105 }
3106
3107 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3108 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3109
3110 static rtx
3111 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3112 {
3113 rtx ret;
3114
3115 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3116 if (mode != CCmode)
3117 {
3118 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3119 if (code != CODE_FOR_nothing)
3120 {
3121 x = emit_move_change_mode (CCmode, mode, x, true);
3122 y = emit_move_change_mode (CCmode, mode, y, true);
3123 return emit_insn (GEN_FCN (code) (x, y));
3124 }
3125 }
3126
3127 /* Otherwise, find the MODE_INT mode of the same width. */
3128 ret = emit_move_via_integer (mode, x, y, false);
3129 gcc_assert (ret != NULL);
3130 return ret;
3131 }
3132
3133 /* Return true if word I of OP lies entirely in the
3134 undefined bits of a paradoxical subreg. */
3135
3136 static bool
3137 undefined_operand_subword_p (rtx op, int i)
3138 {
3139 enum machine_mode innermode, innermostmode;
3140 int offset;
3141 if (GET_CODE (op) != SUBREG)
3142 return false;
3143 innermode = GET_MODE (op);
3144 innermostmode = GET_MODE (SUBREG_REG (op));
3145 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3146 /* The SUBREG_BYTE represents offset, as if the value were stored in
3147 memory, except for a paradoxical subreg where we define
3148 SUBREG_BYTE to be 0; undo this exception as in
3149 simplify_subreg. */
3150 if (SUBREG_BYTE (op) == 0
3151 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3152 {
3153 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3154 if (WORDS_BIG_ENDIAN)
3155 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3156 if (BYTES_BIG_ENDIAN)
3157 offset += difference % UNITS_PER_WORD;
3158 }
3159 if (offset >= GET_MODE_SIZE (innermostmode)
3160 || offset <= -GET_MODE_SIZE (word_mode))
3161 return true;
3162 return false;
3163 }
3164
3165 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3166 MODE is any multi-word or full-word mode that lacks a move_insn
3167 pattern. Note that you will get better code if you define such
3168 patterns, even if they must turn into multiple assembler instructions. */
3169
3170 static rtx
3171 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3172 {
3173 rtx last_insn = 0;
3174 rtx seq, inner;
3175 bool need_clobber;
3176 int i;
3177
3178 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3179
3180 /* If X is a push on the stack, do the push now and replace
3181 X with a reference to the stack pointer. */
3182 if (push_operand (x, mode))
3183 x = emit_move_resolve_push (mode, x);
3184
3185 /* If we are in reload, see if either operand is a MEM whose address
3186 is scheduled for replacement. */
3187 if (reload_in_progress && MEM_P (x)
3188 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3189 x = replace_equiv_address_nv (x, inner);
3190 if (reload_in_progress && MEM_P (y)
3191 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3192 y = replace_equiv_address_nv (y, inner);
3193
3194 start_sequence ();
3195
3196 need_clobber = false;
3197 for (i = 0;
3198 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3199 i++)
3200 {
3201 rtx xpart = operand_subword (x, i, 1, mode);
3202 rtx ypart;
3203
3204 /* Do not generate code for a move if it would come entirely
3205 from the undefined bits of a paradoxical subreg. */
3206 if (undefined_operand_subword_p (y, i))
3207 continue;
3208
3209 ypart = operand_subword (y, i, 1, mode);
3210
3211 /* If we can't get a part of Y, put Y into memory if it is a
3212 constant. Otherwise, force it into a register. Then we must
3213 be able to get a part of Y. */
3214 if (ypart == 0 && CONSTANT_P (y))
3215 {
3216 y = use_anchored_address (force_const_mem (mode, y));
3217 ypart = operand_subword (y, i, 1, mode);
3218 }
3219 else if (ypart == 0)
3220 ypart = operand_subword_force (y, i, mode);
3221
3222 gcc_assert (xpart && ypart);
3223
3224 need_clobber |= (GET_CODE (xpart) == SUBREG);
3225
3226 last_insn = emit_move_insn (xpart, ypart);
3227 }
3228
3229 seq = get_insns ();
3230 end_sequence ();
3231
3232 /* Show the output dies here. This is necessary for SUBREGs
3233 of pseudos since we cannot track their lifetimes correctly;
3234 hard regs shouldn't appear here except as return values.
3235 We never want to emit such a clobber after reload. */
3236 if (x != y
3237 && ! (reload_in_progress || reload_completed)
3238 && need_clobber != 0)
3239 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3240
3241 emit_insn (seq);
3242
3243 return last_insn;
3244 }
3245
3246 /* Low level part of emit_move_insn.
3247 Called just like emit_move_insn, but assumes X and Y
3248 are basically valid. */
3249
3250 rtx
3251 emit_move_insn_1 (rtx x, rtx y)
3252 {
3253 enum machine_mode mode = GET_MODE (x);
3254 enum insn_code code;
3255
3256 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3257
3258 code = mov_optab->handlers[mode].insn_code;
3259 if (code != CODE_FOR_nothing)
3260 return emit_insn (GEN_FCN (code) (x, y));
3261
3262 /* Expand complex moves by moving real part and imag part. */
3263 if (COMPLEX_MODE_P (mode))
3264 return emit_move_complex (mode, x, y);
3265
3266 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3267 {
3268 rtx result = emit_move_via_integer (mode, x, y, true);
3269
3270 /* If we can't find an integer mode, use multi words. */
3271 if (result)
3272 return result;
3273 else
3274 return emit_move_multi_word (mode, x, y);
3275 }
3276
3277 if (GET_MODE_CLASS (mode) == MODE_CC)
3278 return emit_move_ccmode (mode, x, y);
3279
3280 /* Try using a move pattern for the corresponding integer mode. This is
3281 only safe when simplify_subreg can convert MODE constants into integer
3282 constants. At present, it can only do this reliably if the value
3283 fits within a HOST_WIDE_INT. */
3284 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3285 {
3286 rtx ret = emit_move_via_integer (mode, x, y, false);
3287 if (ret)
3288 return ret;
3289 }
3290
3291 return emit_move_multi_word (mode, x, y);
3292 }
3293
3294 /* Generate code to copy Y into X.
3295 Both Y and X must have the same mode, except that
3296 Y can be a constant with VOIDmode.
3297 This mode cannot be BLKmode; use emit_block_move for that.
3298
3299 Return the last instruction emitted. */
3300
3301 rtx
3302 emit_move_insn (rtx x, rtx y)
3303 {
3304 enum machine_mode mode = GET_MODE (x);
3305 rtx y_cst = NULL_RTX;
3306 rtx last_insn, set;
3307
3308 gcc_assert (mode != BLKmode
3309 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3310
3311 if (CONSTANT_P (y))
3312 {
3313 if (optimize
3314 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3315 && (last_insn = compress_float_constant (x, y)))
3316 return last_insn;
3317
3318 y_cst = y;
3319
3320 if (!LEGITIMATE_CONSTANT_P (y))
3321 {
3322 y = force_const_mem (mode, y);
3323
3324 /* If the target's cannot_force_const_mem prevented the spill,
3325 assume that the target's move expanders will also take care
3326 of the non-legitimate constant. */
3327 if (!y)
3328 y = y_cst;
3329 else
3330 y = use_anchored_address (y);
3331 }
3332 }
3333
3334 /* If X or Y are memory references, verify that their addresses are valid
3335 for the machine. */
3336 if (MEM_P (x)
3337 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3338 && ! push_operand (x, GET_MODE (x)))
3339 || (flag_force_addr
3340 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3341 x = validize_mem (x);
3342
3343 if (MEM_P (y)
3344 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3345 || (flag_force_addr
3346 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3347 y = validize_mem (y);
3348
3349 gcc_assert (mode != BLKmode);
3350
3351 last_insn = emit_move_insn_1 (x, y);
3352
3353 if (y_cst && REG_P (x)
3354 && (set = single_set (last_insn)) != NULL_RTX
3355 && SET_DEST (set) == x
3356 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3357 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3358
3359 return last_insn;
3360 }
3361
3362 /* If Y is representable exactly in a narrower mode, and the target can
3363 perform the extension directly from constant or memory, then emit the
3364 move as an extension. */
3365
3366 static rtx
3367 compress_float_constant (rtx x, rtx y)
3368 {
3369 enum machine_mode dstmode = GET_MODE (x);
3370 enum machine_mode orig_srcmode = GET_MODE (y);
3371 enum machine_mode srcmode;
3372 REAL_VALUE_TYPE r;
3373 int oldcost, newcost;
3374
3375 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3376
3377 if (LEGITIMATE_CONSTANT_P (y))
3378 oldcost = rtx_cost (y, SET);
3379 else
3380 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3381
3382 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3383 srcmode != orig_srcmode;
3384 srcmode = GET_MODE_WIDER_MODE (srcmode))
3385 {
3386 enum insn_code ic;
3387 rtx trunc_y, last_insn;
3388
3389 /* Skip if the target can't extend this way. */
3390 ic = can_extend_p (dstmode, srcmode, 0);
3391 if (ic == CODE_FOR_nothing)
3392 continue;
3393
3394 /* Skip if the narrowed value isn't exact. */
3395 if (! exact_real_truncate (srcmode, &r))
3396 continue;
3397
3398 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3399
3400 if (LEGITIMATE_CONSTANT_P (trunc_y))
3401 {
3402 /* Skip if the target needs extra instructions to perform
3403 the extension. */
3404 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3405 continue;
3406 /* This is valid, but may not be cheaper than the original. */
3407 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3408 if (oldcost < newcost)
3409 continue;
3410 }
3411 else if (float_extend_from_mem[dstmode][srcmode])
3412 {
3413 trunc_y = force_const_mem (srcmode, trunc_y);
3414 /* This is valid, but may not be cheaper than the original. */
3415 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3416 if (oldcost < newcost)
3417 continue;
3418 trunc_y = validize_mem (trunc_y);
3419 }
3420 else
3421 continue;
3422
3423 /* For CSE's benefit, force the compressed constant pool entry
3424 into a new pseudo. This constant may be used in different modes,
3425 and if not, combine will put things back together for us. */
3426 trunc_y = force_reg (srcmode, trunc_y);
3427 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3428 last_insn = get_last_insn ();
3429
3430 if (REG_P (x))
3431 set_unique_reg_note (last_insn, REG_EQUAL, y);
3432
3433 return last_insn;
3434 }
3435
3436 return NULL_RTX;
3437 }
3438 \f
3439 /* Pushing data onto the stack. */
3440
3441 /* Push a block of length SIZE (perhaps variable)
3442 and return an rtx to address the beginning of the block.
3443 The value may be virtual_outgoing_args_rtx.
3444
3445 EXTRA is the number of bytes of padding to push in addition to SIZE.
3446 BELOW nonzero means this padding comes at low addresses;
3447 otherwise, the padding comes at high addresses. */
3448
3449 rtx
3450 push_block (rtx size, int extra, int below)
3451 {
3452 rtx temp;
3453
3454 size = convert_modes (Pmode, ptr_mode, size, 1);
3455 if (CONSTANT_P (size))
3456 anti_adjust_stack (plus_constant (size, extra));
3457 else if (REG_P (size) && extra == 0)
3458 anti_adjust_stack (size);
3459 else
3460 {
3461 temp = copy_to_mode_reg (Pmode, size);
3462 if (extra != 0)
3463 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3464 temp, 0, OPTAB_LIB_WIDEN);
3465 anti_adjust_stack (temp);
3466 }
3467
3468 #ifndef STACK_GROWS_DOWNWARD
3469 if (0)
3470 #else
3471 if (1)
3472 #endif
3473 {
3474 temp = virtual_outgoing_args_rtx;
3475 if (extra != 0 && below)
3476 temp = plus_constant (temp, extra);
3477 }
3478 else
3479 {
3480 if (GET_CODE (size) == CONST_INT)
3481 temp = plus_constant (virtual_outgoing_args_rtx,
3482 -INTVAL (size) - (below ? 0 : extra));
3483 else if (extra != 0 && !below)
3484 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3485 negate_rtx (Pmode, plus_constant (size, extra)));
3486 else
3487 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3488 negate_rtx (Pmode, size));
3489 }
3490
3491 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3492 }
3493
3494 #ifdef PUSH_ROUNDING
3495
3496 /* Emit single push insn. */
3497
3498 static void
3499 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3500 {
3501 rtx dest_addr;
3502 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3503 rtx dest;
3504 enum insn_code icode;
3505 insn_operand_predicate_fn pred;
3506
3507 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3508 /* If there is push pattern, use it. Otherwise try old way of throwing
3509 MEM representing push operation to move expander. */
3510 icode = push_optab->handlers[(int) mode].insn_code;
3511 if (icode != CODE_FOR_nothing)
3512 {
3513 if (((pred = insn_data[(int) icode].operand[0].predicate)
3514 && !((*pred) (x, mode))))
3515 x = force_reg (mode, x);
3516 emit_insn (GEN_FCN (icode) (x));
3517 return;
3518 }
3519 if (GET_MODE_SIZE (mode) == rounded_size)
3520 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3521 /* If we are to pad downward, adjust the stack pointer first and
3522 then store X into the stack location using an offset. This is
3523 because emit_move_insn does not know how to pad; it does not have
3524 access to type. */
3525 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3526 {
3527 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3528 HOST_WIDE_INT offset;
3529
3530 emit_move_insn (stack_pointer_rtx,
3531 expand_binop (Pmode,
3532 #ifdef STACK_GROWS_DOWNWARD
3533 sub_optab,
3534 #else
3535 add_optab,
3536 #endif
3537 stack_pointer_rtx,
3538 GEN_INT (rounded_size),
3539 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3540
3541 offset = (HOST_WIDE_INT) padding_size;
3542 #ifdef STACK_GROWS_DOWNWARD
3543 if (STACK_PUSH_CODE == POST_DEC)
3544 /* We have already decremented the stack pointer, so get the
3545 previous value. */
3546 offset += (HOST_WIDE_INT) rounded_size;
3547 #else
3548 if (STACK_PUSH_CODE == POST_INC)
3549 /* We have already incremented the stack pointer, so get the
3550 previous value. */
3551 offset -= (HOST_WIDE_INT) rounded_size;
3552 #endif
3553 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3554 }
3555 else
3556 {
3557 #ifdef STACK_GROWS_DOWNWARD
3558 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3559 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3560 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3561 #else
3562 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3563 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3564 GEN_INT (rounded_size));
3565 #endif
3566 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3567 }
3568
3569 dest = gen_rtx_MEM (mode, dest_addr);
3570
3571 if (type != 0)
3572 {
3573 set_mem_attributes (dest, type, 1);
3574
3575 if (flag_optimize_sibling_calls)
3576 /* Function incoming arguments may overlap with sibling call
3577 outgoing arguments and we cannot allow reordering of reads
3578 from function arguments with stores to outgoing arguments
3579 of sibling calls. */
3580 set_mem_alias_set (dest, 0);
3581 }
3582 emit_move_insn (dest, x);
3583 }
3584 #endif
3585
3586 /* Generate code to push X onto the stack, assuming it has mode MODE and
3587 type TYPE.
3588 MODE is redundant except when X is a CONST_INT (since they don't
3589 carry mode info).
3590 SIZE is an rtx for the size of data to be copied (in bytes),
3591 needed only if X is BLKmode.
3592
3593 ALIGN (in bits) is maximum alignment we can assume.
3594
3595 If PARTIAL and REG are both nonzero, then copy that many of the first
3596 bytes of X into registers starting with REG, and push the rest of X.
3597 The amount of space pushed is decreased by PARTIAL bytes.
3598 REG must be a hard register in this case.
3599 If REG is zero but PARTIAL is not, take any all others actions for an
3600 argument partially in registers, but do not actually load any
3601 registers.
3602
3603 EXTRA is the amount in bytes of extra space to leave next to this arg.
3604 This is ignored if an argument block has already been allocated.
3605
3606 On a machine that lacks real push insns, ARGS_ADDR is the address of
3607 the bottom of the argument block for this call. We use indexing off there
3608 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3609 argument block has not been preallocated.
3610
3611 ARGS_SO_FAR is the size of args previously pushed for this call.
3612
3613 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3614 for arguments passed in registers. If nonzero, it will be the number
3615 of bytes required. */
3616
3617 void
3618 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3619 unsigned int align, int partial, rtx reg, int extra,
3620 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3621 rtx alignment_pad)
3622 {
3623 rtx xinner;
3624 enum direction stack_direction
3625 #ifdef STACK_GROWS_DOWNWARD
3626 = downward;
3627 #else
3628 = upward;
3629 #endif
3630
3631 /* Decide where to pad the argument: `downward' for below,
3632 `upward' for above, or `none' for don't pad it.
3633 Default is below for small data on big-endian machines; else above. */
3634 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3635
3636 /* Invert direction if stack is post-decrement.
3637 FIXME: why? */
3638 if (STACK_PUSH_CODE == POST_DEC)
3639 if (where_pad != none)
3640 where_pad = (where_pad == downward ? upward : downward);
3641
3642 xinner = x;
3643
3644 if (mode == BLKmode
3645 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3646 {
3647 /* Copy a block into the stack, entirely or partially. */
3648
3649 rtx temp;
3650 int used;
3651 int offset;
3652 int skip;
3653
3654 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3655 used = partial - offset;
3656
3657 if (mode != BLKmode)
3658 {
3659 /* A value is to be stored in an insufficiently aligned
3660 stack slot; copy via a suitably aligned slot if
3661 necessary. */
3662 size = GEN_INT (GET_MODE_SIZE (mode));
3663 if (!MEM_P (xinner))
3664 {
3665 temp = assign_temp (type, 0, 1, 1);
3666 emit_move_insn (temp, xinner);
3667 xinner = temp;
3668 }
3669 }
3670
3671 gcc_assert (size);
3672
3673 /* USED is now the # of bytes we need not copy to the stack
3674 because registers will take care of them. */
3675
3676 if (partial != 0)
3677 xinner = adjust_address (xinner, BLKmode, used);
3678
3679 /* If the partial register-part of the arg counts in its stack size,
3680 skip the part of stack space corresponding to the registers.
3681 Otherwise, start copying to the beginning of the stack space,
3682 by setting SKIP to 0. */
3683 skip = (reg_parm_stack_space == 0) ? 0 : used;
3684
3685 #ifdef PUSH_ROUNDING
3686 /* Do it with several push insns if that doesn't take lots of insns
3687 and if there is no difficulty with push insns that skip bytes
3688 on the stack for alignment purposes. */
3689 if (args_addr == 0
3690 && PUSH_ARGS
3691 && GET_CODE (size) == CONST_INT
3692 && skip == 0
3693 && MEM_ALIGN (xinner) >= align
3694 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3695 /* Here we avoid the case of a structure whose weak alignment
3696 forces many pushes of a small amount of data,
3697 and such small pushes do rounding that causes trouble. */
3698 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3699 || align >= BIGGEST_ALIGNMENT
3700 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3701 == (align / BITS_PER_UNIT)))
3702 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3703 {
3704 /* Push padding now if padding above and stack grows down,
3705 or if padding below and stack grows up.
3706 But if space already allocated, this has already been done. */
3707 if (extra && args_addr == 0
3708 && where_pad != none && where_pad != stack_direction)
3709 anti_adjust_stack (GEN_INT (extra));
3710
3711 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3712 }
3713 else
3714 #endif /* PUSH_ROUNDING */
3715 {
3716 rtx target;
3717
3718 /* Otherwise make space on the stack and copy the data
3719 to the address of that space. */
3720
3721 /* Deduct words put into registers from the size we must copy. */
3722 if (partial != 0)
3723 {
3724 if (GET_CODE (size) == CONST_INT)
3725 size = GEN_INT (INTVAL (size) - used);
3726 else
3727 size = expand_binop (GET_MODE (size), sub_optab, size,
3728 GEN_INT (used), NULL_RTX, 0,
3729 OPTAB_LIB_WIDEN);
3730 }
3731
3732 /* Get the address of the stack space.
3733 In this case, we do not deal with EXTRA separately.
3734 A single stack adjust will do. */
3735 if (! args_addr)
3736 {
3737 temp = push_block (size, extra, where_pad == downward);
3738 extra = 0;
3739 }
3740 else if (GET_CODE (args_so_far) == CONST_INT)
3741 temp = memory_address (BLKmode,
3742 plus_constant (args_addr,
3743 skip + INTVAL (args_so_far)));
3744 else
3745 temp = memory_address (BLKmode,
3746 plus_constant (gen_rtx_PLUS (Pmode,
3747 args_addr,
3748 args_so_far),
3749 skip));
3750
3751 if (!ACCUMULATE_OUTGOING_ARGS)
3752 {
3753 /* If the source is referenced relative to the stack pointer,
3754 copy it to another register to stabilize it. We do not need
3755 to do this if we know that we won't be changing sp. */
3756
3757 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3758 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3759 temp = copy_to_reg (temp);
3760 }
3761
3762 target = gen_rtx_MEM (BLKmode, temp);
3763
3764 /* We do *not* set_mem_attributes here, because incoming arguments
3765 may overlap with sibling call outgoing arguments and we cannot
3766 allow reordering of reads from function arguments with stores
3767 to outgoing arguments of sibling calls. We do, however, want
3768 to record the alignment of the stack slot. */
3769 /* ALIGN may well be better aligned than TYPE, e.g. due to
3770 PARM_BOUNDARY. Assume the caller isn't lying. */
3771 set_mem_align (target, align);
3772
3773 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3774 }
3775 }
3776 else if (partial > 0)
3777 {
3778 /* Scalar partly in registers. */
3779
3780 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3781 int i;
3782 int not_stack;
3783 /* # bytes of start of argument
3784 that we must make space for but need not store. */
3785 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3786 int args_offset = INTVAL (args_so_far);
3787 int skip;
3788
3789 /* Push padding now if padding above and stack grows down,
3790 or if padding below and stack grows up.
3791 But if space already allocated, this has already been done. */
3792 if (extra && args_addr == 0
3793 && where_pad != none && where_pad != stack_direction)
3794 anti_adjust_stack (GEN_INT (extra));
3795
3796 /* If we make space by pushing it, we might as well push
3797 the real data. Otherwise, we can leave OFFSET nonzero
3798 and leave the space uninitialized. */
3799 if (args_addr == 0)
3800 offset = 0;
3801
3802 /* Now NOT_STACK gets the number of words that we don't need to
3803 allocate on the stack. Convert OFFSET to words too. */
3804 not_stack = (partial - offset) / UNITS_PER_WORD;
3805 offset /= UNITS_PER_WORD;
3806
3807 /* If the partial register-part of the arg counts in its stack size,
3808 skip the part of stack space corresponding to the registers.
3809 Otherwise, start copying to the beginning of the stack space,
3810 by setting SKIP to 0. */
3811 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3812
3813 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3814 x = validize_mem (force_const_mem (mode, x));
3815
3816 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3817 SUBREGs of such registers are not allowed. */
3818 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3819 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3820 x = copy_to_reg (x);
3821
3822 /* Loop over all the words allocated on the stack for this arg. */
3823 /* We can do it by words, because any scalar bigger than a word
3824 has a size a multiple of a word. */
3825 #ifndef PUSH_ARGS_REVERSED
3826 for (i = not_stack; i < size; i++)
3827 #else
3828 for (i = size - 1; i >= not_stack; i--)
3829 #endif
3830 if (i >= not_stack + offset)
3831 emit_push_insn (operand_subword_force (x, i, mode),
3832 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3833 0, args_addr,
3834 GEN_INT (args_offset + ((i - not_stack + skip)
3835 * UNITS_PER_WORD)),
3836 reg_parm_stack_space, alignment_pad);
3837 }
3838 else
3839 {
3840 rtx addr;
3841 rtx dest;
3842
3843 /* Push padding now if padding above and stack grows down,
3844 or if padding below and stack grows up.
3845 But if space already allocated, this has already been done. */
3846 if (extra && args_addr == 0
3847 && where_pad != none && where_pad != stack_direction)
3848 anti_adjust_stack (GEN_INT (extra));
3849
3850 #ifdef PUSH_ROUNDING
3851 if (args_addr == 0 && PUSH_ARGS)
3852 emit_single_push_insn (mode, x, type);
3853 else
3854 #endif
3855 {
3856 if (GET_CODE (args_so_far) == CONST_INT)
3857 addr
3858 = memory_address (mode,
3859 plus_constant (args_addr,
3860 INTVAL (args_so_far)));
3861 else
3862 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3863 args_so_far));
3864 dest = gen_rtx_MEM (mode, addr);
3865
3866 /* We do *not* set_mem_attributes here, because incoming arguments
3867 may overlap with sibling call outgoing arguments and we cannot
3868 allow reordering of reads from function arguments with stores
3869 to outgoing arguments of sibling calls. We do, however, want
3870 to record the alignment of the stack slot. */
3871 /* ALIGN may well be better aligned than TYPE, e.g. due to
3872 PARM_BOUNDARY. Assume the caller isn't lying. */
3873 set_mem_align (dest, align);
3874
3875 emit_move_insn (dest, x);
3876 }
3877 }
3878
3879 /* If part should go in registers, copy that part
3880 into the appropriate registers. Do this now, at the end,
3881 since mem-to-mem copies above may do function calls. */
3882 if (partial > 0 && reg != 0)
3883 {
3884 /* Handle calls that pass values in multiple non-contiguous locations.
3885 The Irix 6 ABI has examples of this. */
3886 if (GET_CODE (reg) == PARALLEL)
3887 emit_group_load (reg, x, type, -1);
3888 else
3889 {
3890 gcc_assert (partial % UNITS_PER_WORD == 0);
3891 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3892 }
3893 }
3894
3895 if (extra && args_addr == 0 && where_pad == stack_direction)
3896 anti_adjust_stack (GEN_INT (extra));
3897
3898 if (alignment_pad && args_addr == 0)
3899 anti_adjust_stack (alignment_pad);
3900 }
3901 \f
3902 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3903 operations. */
3904
3905 static rtx
3906 get_subtarget (rtx x)
3907 {
3908 return (optimize
3909 || x == 0
3910 /* Only registers can be subtargets. */
3911 || !REG_P (x)
3912 /* Don't use hard regs to avoid extending their life. */
3913 || REGNO (x) < FIRST_PSEUDO_REGISTER
3914 ? 0 : x);
3915 }
3916
3917 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3918 FIELD is a bitfield. Returns true if the optimization was successful,
3919 and there's nothing else to do. */
3920
3921 static bool
3922 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3923 unsigned HOST_WIDE_INT bitpos,
3924 enum machine_mode mode1, rtx str_rtx,
3925 tree to, tree src)
3926 {
3927 enum machine_mode str_mode = GET_MODE (str_rtx);
3928 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3929 tree op0, op1;
3930 rtx value, result;
3931 optab binop;
3932
3933 if (mode1 != VOIDmode
3934 || bitsize >= BITS_PER_WORD
3935 || str_bitsize > BITS_PER_WORD
3936 || TREE_SIDE_EFFECTS (to)
3937 || TREE_THIS_VOLATILE (to))
3938 return false;
3939
3940 STRIP_NOPS (src);
3941 if (!BINARY_CLASS_P (src)
3942 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3943 return false;
3944
3945 op0 = TREE_OPERAND (src, 0);
3946 op1 = TREE_OPERAND (src, 1);
3947 STRIP_NOPS (op0);
3948
3949 if (!operand_equal_p (to, op0, 0))
3950 return false;
3951
3952 if (MEM_P (str_rtx))
3953 {
3954 unsigned HOST_WIDE_INT offset1;
3955
3956 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3957 str_mode = word_mode;
3958 str_mode = get_best_mode (bitsize, bitpos,
3959 MEM_ALIGN (str_rtx), str_mode, 0);
3960 if (str_mode == VOIDmode)
3961 return false;
3962 str_bitsize = GET_MODE_BITSIZE (str_mode);
3963
3964 offset1 = bitpos;
3965 bitpos %= str_bitsize;
3966 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3967 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3968 }
3969 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3970 return false;
3971
3972 /* If the bit field covers the whole REG/MEM, store_field
3973 will likely generate better code. */
3974 if (bitsize >= str_bitsize)
3975 return false;
3976
3977 /* We can't handle fields split across multiple entities. */
3978 if (bitpos + bitsize > str_bitsize)
3979 return false;
3980
3981 if (BYTES_BIG_ENDIAN)
3982 bitpos = str_bitsize - bitpos - bitsize;
3983
3984 switch (TREE_CODE (src))
3985 {
3986 case PLUS_EXPR:
3987 case MINUS_EXPR:
3988 /* For now, just optimize the case of the topmost bitfield
3989 where we don't need to do any masking and also
3990 1 bit bitfields where xor can be used.
3991 We might win by one instruction for the other bitfields
3992 too if insv/extv instructions aren't used, so that
3993 can be added later. */
3994 if (bitpos + bitsize != str_bitsize
3995 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3996 break;
3997
3998 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3999 value = convert_modes (str_mode,
4000 TYPE_MODE (TREE_TYPE (op1)), value,
4001 TYPE_UNSIGNED (TREE_TYPE (op1)));
4002
4003 /* We may be accessing data outside the field, which means
4004 we can alias adjacent data. */
4005 if (MEM_P (str_rtx))
4006 {
4007 str_rtx = shallow_copy_rtx (str_rtx);
4008 set_mem_alias_set (str_rtx, 0);
4009 set_mem_expr (str_rtx, 0);
4010 }
4011
4012 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4013 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4014 {
4015 value = expand_and (str_mode, value, const1_rtx, NULL);
4016 binop = xor_optab;
4017 }
4018 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4019 build_int_cst (NULL_TREE, bitpos),
4020 NULL_RTX, 1);
4021 result = expand_binop (str_mode, binop, str_rtx,
4022 value, str_rtx, 1, OPTAB_WIDEN);
4023 if (result != str_rtx)
4024 emit_move_insn (str_rtx, result);
4025 return true;
4026
4027 case BIT_IOR_EXPR:
4028 case BIT_XOR_EXPR:
4029 if (TREE_CODE (op1) != INTEGER_CST)
4030 break;
4031 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
4032 value = convert_modes (GET_MODE (str_rtx),
4033 TYPE_MODE (TREE_TYPE (op1)), value,
4034 TYPE_UNSIGNED (TREE_TYPE (op1)));
4035
4036 /* We may be accessing data outside the field, which means
4037 we can alias adjacent data. */
4038 if (MEM_P (str_rtx))
4039 {
4040 str_rtx = shallow_copy_rtx (str_rtx);
4041 set_mem_alias_set (str_rtx, 0);
4042 set_mem_expr (str_rtx, 0);
4043 }
4044
4045 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4046 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4047 {
4048 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4049 - 1);
4050 value = expand_and (GET_MODE (str_rtx), value, mask,
4051 NULL_RTX);
4052 }
4053 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4054 build_int_cst (NULL_TREE, bitpos),
4055 NULL_RTX, 1);
4056 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4057 value, str_rtx, 1, OPTAB_WIDEN);
4058 if (result != str_rtx)
4059 emit_move_insn (str_rtx, result);
4060 return true;
4061
4062 default:
4063 break;
4064 }
4065
4066 return false;
4067 }
4068
4069
4070 /* Expand an assignment that stores the value of FROM into TO. */
4071
4072 void
4073 expand_assignment (tree to, tree from)
4074 {
4075 rtx to_rtx = 0;
4076 rtx result;
4077
4078 /* Don't crash if the lhs of the assignment was erroneous. */
4079 if (TREE_CODE (to) == ERROR_MARK)
4080 {
4081 result = expand_normal (from);
4082 return;
4083 }
4084
4085 /* Optimize away no-op moves without side-effects. */
4086 if (operand_equal_p (to, from, 0))
4087 return;
4088
4089 /* Assignment of a structure component needs special treatment
4090 if the structure component's rtx is not simply a MEM.
4091 Assignment of an array element at a constant index, and assignment of
4092 an array element in an unaligned packed structure field, has the same
4093 problem. */
4094 if (handled_component_p (to)
4095 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4096 {
4097 enum machine_mode mode1;
4098 HOST_WIDE_INT bitsize, bitpos;
4099 tree offset;
4100 int unsignedp;
4101 int volatilep = 0;
4102 tree tem;
4103
4104 push_temp_slots ();
4105 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4106 &unsignedp, &volatilep, true);
4107
4108 /* If we are going to use store_bit_field and extract_bit_field,
4109 make sure to_rtx will be safe for multiple use. */
4110
4111 to_rtx = expand_normal (tem);
4112
4113 if (offset != 0)
4114 {
4115 rtx offset_rtx;
4116
4117 if (!MEM_P (to_rtx))
4118 {
4119 /* We can get constant negative offsets into arrays with broken
4120 user code. Translate this to a trap instead of ICEing. */
4121 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4122 expand_builtin_trap ();
4123 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4124 }
4125
4126 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4127 #ifdef POINTERS_EXTEND_UNSIGNED
4128 if (GET_MODE (offset_rtx) != Pmode)
4129 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4130 #else
4131 if (GET_MODE (offset_rtx) != ptr_mode)
4132 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4133 #endif
4134
4135 /* A constant address in TO_RTX can have VOIDmode, we must not try
4136 to call force_reg for that case. Avoid that case. */
4137 if (MEM_P (to_rtx)
4138 && GET_MODE (to_rtx) == BLKmode
4139 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4140 && bitsize > 0
4141 && (bitpos % bitsize) == 0
4142 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4143 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4144 {
4145 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4146 bitpos = 0;
4147 }
4148
4149 to_rtx = offset_address (to_rtx, offset_rtx,
4150 highest_pow2_factor_for_target (to,
4151 offset));
4152 }
4153
4154 /* Handle expand_expr of a complex value returning a CONCAT. */
4155 if (GET_CODE (to_rtx) == CONCAT)
4156 {
4157 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4158 {
4159 gcc_assert (bitpos == 0);
4160 result = store_expr (from, to_rtx, false);
4161 }
4162 else
4163 {
4164 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4165 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4166 }
4167 }
4168 else
4169 {
4170 if (MEM_P (to_rtx))
4171 {
4172 /* If the field is at offset zero, we could have been given the
4173 DECL_RTX of the parent struct. Don't munge it. */
4174 to_rtx = shallow_copy_rtx (to_rtx);
4175
4176 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4177
4178 /* Deal with volatile and readonly fields. The former is only
4179 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4180 if (volatilep)
4181 MEM_VOLATILE_P (to_rtx) = 1;
4182 if (component_uses_parent_alias_set (to))
4183 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4184 }
4185
4186 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4187 to_rtx, to, from))
4188 result = NULL;
4189 else
4190 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4191 TREE_TYPE (tem), get_alias_set (to));
4192 }
4193
4194 if (result)
4195 preserve_temp_slots (result);
4196 free_temp_slots ();
4197 pop_temp_slots ();
4198 return;
4199 }
4200
4201 /* If the rhs is a function call and its value is not an aggregate,
4202 call the function before we start to compute the lhs.
4203 This is needed for correct code for cases such as
4204 val = setjmp (buf) on machines where reference to val
4205 requires loading up part of an address in a separate insn.
4206
4207 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4208 since it might be a promoted variable where the zero- or sign- extension
4209 needs to be done. Handling this in the normal way is safe because no
4210 computation is done before the call. */
4211 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4212 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4213 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4214 && REG_P (DECL_RTL (to))))
4215 {
4216 rtx value;
4217
4218 push_temp_slots ();
4219 value = expand_normal (from);
4220 if (to_rtx == 0)
4221 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4222
4223 /* Handle calls that return values in multiple non-contiguous locations.
4224 The Irix 6 ABI has examples of this. */
4225 if (GET_CODE (to_rtx) == PARALLEL)
4226 emit_group_load (to_rtx, value, TREE_TYPE (from),
4227 int_size_in_bytes (TREE_TYPE (from)));
4228 else if (GET_MODE (to_rtx) == BLKmode)
4229 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4230 else
4231 {
4232 if (POINTER_TYPE_P (TREE_TYPE (to)))
4233 value = convert_memory_address (GET_MODE (to_rtx), value);
4234 emit_move_insn (to_rtx, value);
4235 }
4236 preserve_temp_slots (to_rtx);
4237 free_temp_slots ();
4238 pop_temp_slots ();
4239 return;
4240 }
4241
4242 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4243 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4244
4245 if (to_rtx == 0)
4246 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4247
4248 /* Don't move directly into a return register. */
4249 if (TREE_CODE (to) == RESULT_DECL
4250 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4251 {
4252 rtx temp;
4253
4254 push_temp_slots ();
4255 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4256
4257 if (GET_CODE (to_rtx) == PARALLEL)
4258 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4259 int_size_in_bytes (TREE_TYPE (from)));
4260 else
4261 emit_move_insn (to_rtx, temp);
4262
4263 preserve_temp_slots (to_rtx);
4264 free_temp_slots ();
4265 pop_temp_slots ();
4266 return;
4267 }
4268
4269 /* In case we are returning the contents of an object which overlaps
4270 the place the value is being stored, use a safe function when copying
4271 a value through a pointer into a structure value return block. */
4272 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4273 && current_function_returns_struct
4274 && !current_function_returns_pcc_struct)
4275 {
4276 rtx from_rtx, size;
4277
4278 push_temp_slots ();
4279 size = expr_size (from);
4280 from_rtx = expand_normal (from);
4281
4282 emit_library_call (memmove_libfunc, LCT_NORMAL,
4283 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4284 XEXP (from_rtx, 0), Pmode,
4285 convert_to_mode (TYPE_MODE (sizetype),
4286 size, TYPE_UNSIGNED (sizetype)),
4287 TYPE_MODE (sizetype));
4288
4289 preserve_temp_slots (to_rtx);
4290 free_temp_slots ();
4291 pop_temp_slots ();
4292 return;
4293 }
4294
4295 /* Compute FROM and store the value in the rtx we got. */
4296
4297 push_temp_slots ();
4298 result = store_expr (from, to_rtx, 0);
4299 preserve_temp_slots (result);
4300 free_temp_slots ();
4301 pop_temp_slots ();
4302 return;
4303 }
4304
4305 /* Generate code for computing expression EXP,
4306 and storing the value into TARGET.
4307
4308 If the mode is BLKmode then we may return TARGET itself.
4309 It turns out that in BLKmode it doesn't cause a problem.
4310 because C has no operators that could combine two different
4311 assignments into the same BLKmode object with different values
4312 with no sequence point. Will other languages need this to
4313 be more thorough?
4314
4315 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4316 stack, and block moves may need to be treated specially. */
4317
4318 rtx
4319 store_expr (tree exp, rtx target, int call_param_p)
4320 {
4321 rtx temp;
4322 rtx alt_rtl = NULL_RTX;
4323 int dont_return_target = 0;
4324
4325 if (VOID_TYPE_P (TREE_TYPE (exp)))
4326 {
4327 /* C++ can generate ?: expressions with a throw expression in one
4328 branch and an rvalue in the other. Here, we resolve attempts to
4329 store the throw expression's nonexistent result. */
4330 gcc_assert (!call_param_p);
4331 expand_expr (exp, const0_rtx, VOIDmode, 0);
4332 return NULL_RTX;
4333 }
4334 if (TREE_CODE (exp) == COMPOUND_EXPR)
4335 {
4336 /* Perform first part of compound expression, then assign from second
4337 part. */
4338 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4339 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4340 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4341 }
4342 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4343 {
4344 /* For conditional expression, get safe form of the target. Then
4345 test the condition, doing the appropriate assignment on either
4346 side. This avoids the creation of unnecessary temporaries.
4347 For non-BLKmode, it is more efficient not to do this. */
4348
4349 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4350
4351 do_pending_stack_adjust ();
4352 NO_DEFER_POP;
4353 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4354 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4355 emit_jump_insn (gen_jump (lab2));
4356 emit_barrier ();
4357 emit_label (lab1);
4358 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4359 emit_label (lab2);
4360 OK_DEFER_POP;
4361
4362 return NULL_RTX;
4363 }
4364 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4365 /* If this is a scalar in a register that is stored in a wider mode
4366 than the declared mode, compute the result into its declared mode
4367 and then convert to the wider mode. Our value is the computed
4368 expression. */
4369 {
4370 rtx inner_target = 0;
4371
4372 /* We can do the conversion inside EXP, which will often result
4373 in some optimizations. Do the conversion in two steps: first
4374 change the signedness, if needed, then the extend. But don't
4375 do this if the type of EXP is a subtype of something else
4376 since then the conversion might involve more than just
4377 converting modes. */
4378 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4379 && TREE_TYPE (TREE_TYPE (exp)) == 0
4380 && (!lang_hooks.reduce_bit_field_operations
4381 || (GET_MODE_PRECISION (GET_MODE (target))
4382 == TYPE_PRECISION (TREE_TYPE (exp)))))
4383 {
4384 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4385 != SUBREG_PROMOTED_UNSIGNED_P (target))
4386 {
4387 /* Some types, e.g. Fortran's logical*4, won't have a signed
4388 version, so use the mode instead. */
4389 tree ntype
4390 = (get_signed_or_unsigned_type
4391 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4392 if (ntype == NULL)
4393 ntype = lang_hooks.types.type_for_mode
4394 (TYPE_MODE (TREE_TYPE (exp)),
4395 SUBREG_PROMOTED_UNSIGNED_P (target));
4396
4397 exp = fold_convert (ntype, exp);
4398 }
4399
4400 exp = fold_convert (lang_hooks.types.type_for_mode
4401 (GET_MODE (SUBREG_REG (target)),
4402 SUBREG_PROMOTED_UNSIGNED_P (target)),
4403 exp);
4404
4405 inner_target = SUBREG_REG (target);
4406 }
4407
4408 temp = expand_expr (exp, inner_target, VOIDmode,
4409 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4410
4411 /* If TEMP is a VOIDmode constant, use convert_modes to make
4412 sure that we properly convert it. */
4413 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4414 {
4415 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4416 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4417 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4418 GET_MODE (target), temp,
4419 SUBREG_PROMOTED_UNSIGNED_P (target));
4420 }
4421
4422 convert_move (SUBREG_REG (target), temp,
4423 SUBREG_PROMOTED_UNSIGNED_P (target));
4424
4425 return NULL_RTX;
4426 }
4427 else
4428 {
4429 temp = expand_expr_real (exp, target, GET_MODE (target),
4430 (call_param_p
4431 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4432 &alt_rtl);
4433 /* Return TARGET if it's a specified hardware register.
4434 If TARGET is a volatile mem ref, either return TARGET
4435 or return a reg copied *from* TARGET; ANSI requires this.
4436
4437 Otherwise, if TEMP is not TARGET, return TEMP
4438 if it is constant (for efficiency),
4439 or if we really want the correct value. */
4440 if (!(target && REG_P (target)
4441 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4442 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4443 && ! rtx_equal_p (temp, target)
4444 && CONSTANT_P (temp))
4445 dont_return_target = 1;
4446 }
4447
4448 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4449 the same as that of TARGET, adjust the constant. This is needed, for
4450 example, in case it is a CONST_DOUBLE and we want only a word-sized
4451 value. */
4452 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4453 && TREE_CODE (exp) != ERROR_MARK
4454 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4455 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4456 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4457
4458 /* If value was not generated in the target, store it there.
4459 Convert the value to TARGET's type first if necessary and emit the
4460 pending incrementations that have been queued when expanding EXP.
4461 Note that we cannot emit the whole queue blindly because this will
4462 effectively disable the POST_INC optimization later.
4463
4464 If TEMP and TARGET compare equal according to rtx_equal_p, but
4465 one or both of them are volatile memory refs, we have to distinguish
4466 two cases:
4467 - expand_expr has used TARGET. In this case, we must not generate
4468 another copy. This can be detected by TARGET being equal according
4469 to == .
4470 - expand_expr has not used TARGET - that means that the source just
4471 happens to have the same RTX form. Since temp will have been created
4472 by expand_expr, it will compare unequal according to == .
4473 We must generate a copy in this case, to reach the correct number
4474 of volatile memory references. */
4475
4476 if ((! rtx_equal_p (temp, target)
4477 || (temp != target && (side_effects_p (temp)
4478 || side_effects_p (target))))
4479 && TREE_CODE (exp) != ERROR_MARK
4480 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4481 but TARGET is not valid memory reference, TEMP will differ
4482 from TARGET although it is really the same location. */
4483 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4484 /* If there's nothing to copy, don't bother. Don't call
4485 expr_size unless necessary, because some front-ends (C++)
4486 expr_size-hook must not be given objects that are not
4487 supposed to be bit-copied or bit-initialized. */
4488 && expr_size (exp) != const0_rtx)
4489 {
4490 if (GET_MODE (temp) != GET_MODE (target)
4491 && GET_MODE (temp) != VOIDmode)
4492 {
4493 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4494 if (dont_return_target)
4495 {
4496 /* In this case, we will return TEMP,
4497 so make sure it has the proper mode.
4498 But don't forget to store the value into TARGET. */
4499 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4500 emit_move_insn (target, temp);
4501 }
4502 else
4503 convert_move (target, temp, unsignedp);
4504 }
4505
4506 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4507 {
4508 /* Handle copying a string constant into an array. The string
4509 constant may be shorter than the array. So copy just the string's
4510 actual length, and clear the rest. First get the size of the data
4511 type of the string, which is actually the size of the target. */
4512 rtx size = expr_size (exp);
4513
4514 if (GET_CODE (size) == CONST_INT
4515 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4516 emit_block_move (target, temp, size,
4517 (call_param_p
4518 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4519 else
4520 {
4521 /* Compute the size of the data to copy from the string. */
4522 tree copy_size
4523 = size_binop (MIN_EXPR,
4524 make_tree (sizetype, size),
4525 size_int (TREE_STRING_LENGTH (exp)));
4526 rtx copy_size_rtx
4527 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4528 (call_param_p
4529 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4530 rtx label = 0;
4531
4532 /* Copy that much. */
4533 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4534 TYPE_UNSIGNED (sizetype));
4535 emit_block_move (target, temp, copy_size_rtx,
4536 (call_param_p
4537 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4538
4539 /* Figure out how much is left in TARGET that we have to clear.
4540 Do all calculations in ptr_mode. */
4541 if (GET_CODE (copy_size_rtx) == CONST_INT)
4542 {
4543 size = plus_constant (size, -INTVAL (copy_size_rtx));
4544 target = adjust_address (target, BLKmode,
4545 INTVAL (copy_size_rtx));
4546 }
4547 else
4548 {
4549 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4550 copy_size_rtx, NULL_RTX, 0,
4551 OPTAB_LIB_WIDEN);
4552
4553 #ifdef POINTERS_EXTEND_UNSIGNED
4554 if (GET_MODE (copy_size_rtx) != Pmode)
4555 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4556 TYPE_UNSIGNED (sizetype));
4557 #endif
4558
4559 target = offset_address (target, copy_size_rtx,
4560 highest_pow2_factor (copy_size));
4561 label = gen_label_rtx ();
4562 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4563 GET_MODE (size), 0, label);
4564 }
4565
4566 if (size != const0_rtx)
4567 clear_storage (target, size, BLOCK_OP_NORMAL);
4568
4569 if (label)
4570 emit_label (label);
4571 }
4572 }
4573 /* Handle calls that return values in multiple non-contiguous locations.
4574 The Irix 6 ABI has examples of this. */
4575 else if (GET_CODE (target) == PARALLEL)
4576 emit_group_load (target, temp, TREE_TYPE (exp),
4577 int_size_in_bytes (TREE_TYPE (exp)));
4578 else if (GET_MODE (temp) == BLKmode)
4579 emit_block_move (target, temp, expr_size (exp),
4580 (call_param_p
4581 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4582 else
4583 {
4584 temp = force_operand (temp, target);
4585 if (temp != target)
4586 emit_move_insn (target, temp);
4587 }
4588 }
4589
4590 return NULL_RTX;
4591 }
4592 \f
4593 /* Helper for categorize_ctor_elements. Identical interface. */
4594
4595 static bool
4596 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4597 HOST_WIDE_INT *p_elt_count,
4598 bool *p_must_clear)
4599 {
4600 unsigned HOST_WIDE_INT idx;
4601 HOST_WIDE_INT nz_elts, elt_count;
4602 tree value, purpose;
4603
4604 /* Whether CTOR is a valid constant initializer, in accordance with what
4605 initializer_constant_valid_p does. If inferred from the constructor
4606 elements, true until proven otherwise. */
4607 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4608 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4609
4610 nz_elts = 0;
4611 elt_count = 0;
4612
4613 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4614 {
4615 HOST_WIDE_INT mult;
4616
4617 mult = 1;
4618 if (TREE_CODE (purpose) == RANGE_EXPR)
4619 {
4620 tree lo_index = TREE_OPERAND (purpose, 0);
4621 tree hi_index = TREE_OPERAND (purpose, 1);
4622
4623 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4624 mult = (tree_low_cst (hi_index, 1)
4625 - tree_low_cst (lo_index, 1) + 1);
4626 }
4627
4628 switch (TREE_CODE (value))
4629 {
4630 case CONSTRUCTOR:
4631 {
4632 HOST_WIDE_INT nz = 0, ic = 0;
4633
4634 bool const_elt_p
4635 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4636
4637 nz_elts += mult * nz;
4638 elt_count += mult * ic;
4639
4640 if (const_from_elts_p && const_p)
4641 const_p = const_elt_p;
4642 }
4643 break;
4644
4645 case INTEGER_CST:
4646 case REAL_CST:
4647 if (!initializer_zerop (value))
4648 nz_elts += mult;
4649 elt_count += mult;
4650 break;
4651
4652 case STRING_CST:
4653 nz_elts += mult * TREE_STRING_LENGTH (value);
4654 elt_count += mult * TREE_STRING_LENGTH (value);
4655 break;
4656
4657 case COMPLEX_CST:
4658 if (!initializer_zerop (TREE_REALPART (value)))
4659 nz_elts += mult;
4660 if (!initializer_zerop (TREE_IMAGPART (value)))
4661 nz_elts += mult;
4662 elt_count += mult;
4663 break;
4664
4665 case VECTOR_CST:
4666 {
4667 tree v;
4668 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4669 {
4670 if (!initializer_zerop (TREE_VALUE (v)))
4671 nz_elts += mult;
4672 elt_count += mult;
4673 }
4674 }
4675 break;
4676
4677 default:
4678 nz_elts += mult;
4679 elt_count += mult;
4680
4681 if (const_from_elts_p && const_p)
4682 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4683 != NULL_TREE;
4684 break;
4685 }
4686 }
4687
4688 if (!*p_must_clear
4689 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4690 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4691 {
4692 tree init_sub_type;
4693 bool clear_this = true;
4694
4695 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4696 {
4697 /* We don't expect more than one element of the union to be
4698 initialized. Not sure what we should do otherwise... */
4699 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4700 == 1);
4701
4702 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4703 CONSTRUCTOR_ELTS (ctor),
4704 0)->value);
4705
4706 /* ??? We could look at each element of the union, and find the
4707 largest element. Which would avoid comparing the size of the
4708 initialized element against any tail padding in the union.
4709 Doesn't seem worth the effort... */
4710 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4711 TYPE_SIZE (init_sub_type)) == 1)
4712 {
4713 /* And now we have to find out if the element itself is fully
4714 constructed. E.g. for union { struct { int a, b; } s; } u
4715 = { .s = { .a = 1 } }. */
4716 if (elt_count == count_type_elements (init_sub_type, false))
4717 clear_this = false;
4718 }
4719 }
4720
4721 *p_must_clear = clear_this;
4722 }
4723
4724 *p_nz_elts += nz_elts;
4725 *p_elt_count += elt_count;
4726
4727 return const_p;
4728 }
4729
4730 /* Examine CTOR to discover:
4731 * how many scalar fields are set to nonzero values,
4732 and place it in *P_NZ_ELTS;
4733 * how many scalar fields in total are in CTOR,
4734 and place it in *P_ELT_COUNT.
4735 * if a type is a union, and the initializer from the constructor
4736 is not the largest element in the union, then set *p_must_clear.
4737
4738 Return whether or not CTOR is a valid static constant initializer, the same
4739 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4740
4741 bool
4742 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4743 HOST_WIDE_INT *p_elt_count,
4744 bool *p_must_clear)
4745 {
4746 *p_nz_elts = 0;
4747 *p_elt_count = 0;
4748 *p_must_clear = false;
4749
4750 return
4751 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4752 }
4753
4754 /* Count the number of scalars in TYPE. Return -1 on overflow or
4755 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4756 array member at the end of the structure. */
4757
4758 HOST_WIDE_INT
4759 count_type_elements (tree type, bool allow_flexarr)
4760 {
4761 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4762 switch (TREE_CODE (type))
4763 {
4764 case ARRAY_TYPE:
4765 {
4766 tree telts = array_type_nelts (type);
4767 if (telts && host_integerp (telts, 1))
4768 {
4769 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4770 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4771 if (n == 0)
4772 return 0;
4773 else if (max / n > m)
4774 return n * m;
4775 }
4776 return -1;
4777 }
4778
4779 case RECORD_TYPE:
4780 {
4781 HOST_WIDE_INT n = 0, t;
4782 tree f;
4783
4784 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4785 if (TREE_CODE (f) == FIELD_DECL)
4786 {
4787 t = count_type_elements (TREE_TYPE (f), false);
4788 if (t < 0)
4789 {
4790 /* Check for structures with flexible array member. */
4791 tree tf = TREE_TYPE (f);
4792 if (allow_flexarr
4793 && TREE_CHAIN (f) == NULL
4794 && TREE_CODE (tf) == ARRAY_TYPE
4795 && TYPE_DOMAIN (tf)
4796 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4797 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4798 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4799 && int_size_in_bytes (type) >= 0)
4800 break;
4801
4802 return -1;
4803 }
4804 n += t;
4805 }
4806
4807 return n;
4808 }
4809
4810 case UNION_TYPE:
4811 case QUAL_UNION_TYPE:
4812 {
4813 /* Ho hum. How in the world do we guess here? Clearly it isn't
4814 right to count the fields. Guess based on the number of words. */
4815 HOST_WIDE_INT n = int_size_in_bytes (type);
4816 if (n < 0)
4817 return -1;
4818 return n / UNITS_PER_WORD;
4819 }
4820
4821 case COMPLEX_TYPE:
4822 return 2;
4823
4824 case VECTOR_TYPE:
4825 return TYPE_VECTOR_SUBPARTS (type);
4826
4827 case INTEGER_TYPE:
4828 case REAL_TYPE:
4829 case ENUMERAL_TYPE:
4830 case BOOLEAN_TYPE:
4831 case POINTER_TYPE:
4832 case OFFSET_TYPE:
4833 case REFERENCE_TYPE:
4834 return 1;
4835
4836 case VOID_TYPE:
4837 case METHOD_TYPE:
4838 case FUNCTION_TYPE:
4839 case LANG_TYPE:
4840 default:
4841 gcc_unreachable ();
4842 }
4843 }
4844
4845 /* Return 1 if EXP contains mostly (3/4) zeros. */
4846
4847 static int
4848 mostly_zeros_p (tree exp)
4849 {
4850 if (TREE_CODE (exp) == CONSTRUCTOR)
4851
4852 {
4853 HOST_WIDE_INT nz_elts, count, elts;
4854 bool must_clear;
4855
4856 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4857 if (must_clear)
4858 return 1;
4859
4860 elts = count_type_elements (TREE_TYPE (exp), false);
4861
4862 return nz_elts < elts / 4;
4863 }
4864
4865 return initializer_zerop (exp);
4866 }
4867
4868 /* Return 1 if EXP contains all zeros. */
4869
4870 static int
4871 all_zeros_p (tree exp)
4872 {
4873 if (TREE_CODE (exp) == CONSTRUCTOR)
4874
4875 {
4876 HOST_WIDE_INT nz_elts, count;
4877 bool must_clear;
4878
4879 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4880 return nz_elts == 0;
4881 }
4882
4883 return initializer_zerop (exp);
4884 }
4885 \f
4886 /* Helper function for store_constructor.
4887 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4888 TYPE is the type of the CONSTRUCTOR, not the element type.
4889 CLEARED is as for store_constructor.
4890 ALIAS_SET is the alias set to use for any stores.
4891
4892 This provides a recursive shortcut back to store_constructor when it isn't
4893 necessary to go through store_field. This is so that we can pass through
4894 the cleared field to let store_constructor know that we may not have to
4895 clear a substructure if the outer structure has already been cleared. */
4896
4897 static void
4898 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4899 HOST_WIDE_INT bitpos, enum machine_mode mode,
4900 tree exp, tree type, int cleared, int alias_set)
4901 {
4902 if (TREE_CODE (exp) == CONSTRUCTOR
4903 /* We can only call store_constructor recursively if the size and
4904 bit position are on a byte boundary. */
4905 && bitpos % BITS_PER_UNIT == 0
4906 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4907 /* If we have a nonzero bitpos for a register target, then we just
4908 let store_field do the bitfield handling. This is unlikely to
4909 generate unnecessary clear instructions anyways. */
4910 && (bitpos == 0 || MEM_P (target)))
4911 {
4912 if (MEM_P (target))
4913 target
4914 = adjust_address (target,
4915 GET_MODE (target) == BLKmode
4916 || 0 != (bitpos
4917 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4918 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4919
4920
4921 /* Update the alias set, if required. */
4922 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4923 && MEM_ALIAS_SET (target) != 0)
4924 {
4925 target = copy_rtx (target);
4926 set_mem_alias_set (target, alias_set);
4927 }
4928
4929 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4930 }
4931 else
4932 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4933 }
4934
4935 /* Store the value of constructor EXP into the rtx TARGET.
4936 TARGET is either a REG or a MEM; we know it cannot conflict, since
4937 safe_from_p has been called.
4938 CLEARED is true if TARGET is known to have been zero'd.
4939 SIZE is the number of bytes of TARGET we are allowed to modify: this
4940 may not be the same as the size of EXP if we are assigning to a field
4941 which has been packed to exclude padding bits. */
4942
4943 static void
4944 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4945 {
4946 tree type = TREE_TYPE (exp);
4947 #ifdef WORD_REGISTER_OPERATIONS
4948 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4949 #endif
4950
4951 switch (TREE_CODE (type))
4952 {
4953 case RECORD_TYPE:
4954 case UNION_TYPE:
4955 case QUAL_UNION_TYPE:
4956 {
4957 unsigned HOST_WIDE_INT idx;
4958 tree field, value;
4959
4960 /* If size is zero or the target is already cleared, do nothing. */
4961 if (size == 0 || cleared)
4962 cleared = 1;
4963 /* We either clear the aggregate or indicate the value is dead. */
4964 else if ((TREE_CODE (type) == UNION_TYPE
4965 || TREE_CODE (type) == QUAL_UNION_TYPE)
4966 && ! CONSTRUCTOR_ELTS (exp))
4967 /* If the constructor is empty, clear the union. */
4968 {
4969 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4970 cleared = 1;
4971 }
4972
4973 /* If we are building a static constructor into a register,
4974 set the initial value as zero so we can fold the value into
4975 a constant. But if more than one register is involved,
4976 this probably loses. */
4977 else if (REG_P (target) && TREE_STATIC (exp)
4978 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4979 {
4980 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4981 cleared = 1;
4982 }
4983
4984 /* If the constructor has fewer fields than the structure or
4985 if we are initializing the structure to mostly zeros, clear
4986 the whole structure first. Don't do this if TARGET is a
4987 register whose mode size isn't equal to SIZE since
4988 clear_storage can't handle this case. */
4989 else if (size > 0
4990 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4991 != fields_length (type))
4992 || mostly_zeros_p (exp))
4993 && (!REG_P (target)
4994 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4995 == size)))
4996 {
4997 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4998 cleared = 1;
4999 }
5000
5001 if (! cleared)
5002 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5003
5004 /* Store each element of the constructor into the
5005 corresponding field of TARGET. */
5006 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5007 {
5008 enum machine_mode mode;
5009 HOST_WIDE_INT bitsize;
5010 HOST_WIDE_INT bitpos = 0;
5011 tree offset;
5012 rtx to_rtx = target;
5013
5014 /* Just ignore missing fields. We cleared the whole
5015 structure, above, if any fields are missing. */
5016 if (field == 0)
5017 continue;
5018
5019 if (cleared && initializer_zerop (value))
5020 continue;
5021
5022 if (host_integerp (DECL_SIZE (field), 1))
5023 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5024 else
5025 bitsize = -1;
5026
5027 mode = DECL_MODE (field);
5028 if (DECL_BIT_FIELD (field))
5029 mode = VOIDmode;
5030
5031 offset = DECL_FIELD_OFFSET (field);
5032 if (host_integerp (offset, 0)
5033 && host_integerp (bit_position (field), 0))
5034 {
5035 bitpos = int_bit_position (field);
5036 offset = 0;
5037 }
5038 else
5039 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5040
5041 if (offset)
5042 {
5043 rtx offset_rtx;
5044
5045 offset
5046 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5047 make_tree (TREE_TYPE (exp),
5048 target));
5049
5050 offset_rtx = expand_normal (offset);
5051 gcc_assert (MEM_P (to_rtx));
5052
5053 #ifdef POINTERS_EXTEND_UNSIGNED
5054 if (GET_MODE (offset_rtx) != Pmode)
5055 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5056 #else
5057 if (GET_MODE (offset_rtx) != ptr_mode)
5058 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5059 #endif
5060
5061 to_rtx = offset_address (to_rtx, offset_rtx,
5062 highest_pow2_factor (offset));
5063 }
5064
5065 #ifdef WORD_REGISTER_OPERATIONS
5066 /* If this initializes a field that is smaller than a
5067 word, at the start of a word, try to widen it to a full
5068 word. This special case allows us to output C++ member
5069 function initializations in a form that the optimizers
5070 can understand. */
5071 if (REG_P (target)
5072 && bitsize < BITS_PER_WORD
5073 && bitpos % BITS_PER_WORD == 0
5074 && GET_MODE_CLASS (mode) == MODE_INT
5075 && TREE_CODE (value) == INTEGER_CST
5076 && exp_size >= 0
5077 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5078 {
5079 tree type = TREE_TYPE (value);
5080
5081 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5082 {
5083 type = lang_hooks.types.type_for_size
5084 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5085 value = fold_convert (type, value);
5086 }
5087
5088 if (BYTES_BIG_ENDIAN)
5089 value
5090 = fold_build2 (LSHIFT_EXPR, type, value,
5091 build_int_cst (type,
5092 BITS_PER_WORD - bitsize));
5093 bitsize = BITS_PER_WORD;
5094 mode = word_mode;
5095 }
5096 #endif
5097
5098 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5099 && DECL_NONADDRESSABLE_P (field))
5100 {
5101 to_rtx = copy_rtx (to_rtx);
5102 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5103 }
5104
5105 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5106 value, type, cleared,
5107 get_alias_set (TREE_TYPE (field)));
5108 }
5109 break;
5110 }
5111 case ARRAY_TYPE:
5112 {
5113 tree value, index;
5114 unsigned HOST_WIDE_INT i;
5115 int need_to_clear;
5116 tree domain;
5117 tree elttype = TREE_TYPE (type);
5118 int const_bounds_p;
5119 HOST_WIDE_INT minelt = 0;
5120 HOST_WIDE_INT maxelt = 0;
5121
5122 domain = TYPE_DOMAIN (type);
5123 const_bounds_p = (TYPE_MIN_VALUE (domain)
5124 && TYPE_MAX_VALUE (domain)
5125 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5126 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5127
5128 /* If we have constant bounds for the range of the type, get them. */
5129 if (const_bounds_p)
5130 {
5131 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5132 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5133 }
5134
5135 /* If the constructor has fewer elements than the array, clear
5136 the whole array first. Similarly if this is static
5137 constructor of a non-BLKmode object. */
5138 if (cleared)
5139 need_to_clear = 0;
5140 else if (REG_P (target) && TREE_STATIC (exp))
5141 need_to_clear = 1;
5142 else
5143 {
5144 unsigned HOST_WIDE_INT idx;
5145 tree index, value;
5146 HOST_WIDE_INT count = 0, zero_count = 0;
5147 need_to_clear = ! const_bounds_p;
5148
5149 /* This loop is a more accurate version of the loop in
5150 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5151 is also needed to check for missing elements. */
5152 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5153 {
5154 HOST_WIDE_INT this_node_count;
5155
5156 if (need_to_clear)
5157 break;
5158
5159 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5160 {
5161 tree lo_index = TREE_OPERAND (index, 0);
5162 tree hi_index = TREE_OPERAND (index, 1);
5163
5164 if (! host_integerp (lo_index, 1)
5165 || ! host_integerp (hi_index, 1))
5166 {
5167 need_to_clear = 1;
5168 break;
5169 }
5170
5171 this_node_count = (tree_low_cst (hi_index, 1)
5172 - tree_low_cst (lo_index, 1) + 1);
5173 }
5174 else
5175 this_node_count = 1;
5176
5177 count += this_node_count;
5178 if (mostly_zeros_p (value))
5179 zero_count += this_node_count;
5180 }
5181
5182 /* Clear the entire array first if there are any missing
5183 elements, or if the incidence of zero elements is >=
5184 75%. */
5185 if (! need_to_clear
5186 && (count < maxelt - minelt + 1
5187 || 4 * zero_count >= 3 * count))
5188 need_to_clear = 1;
5189 }
5190
5191 if (need_to_clear && size > 0)
5192 {
5193 if (REG_P (target))
5194 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5195 else
5196 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5197 cleared = 1;
5198 }
5199
5200 if (!cleared && REG_P (target))
5201 /* Inform later passes that the old value is dead. */
5202 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5203
5204 /* Store each element of the constructor into the
5205 corresponding element of TARGET, determined by counting the
5206 elements. */
5207 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5208 {
5209 enum machine_mode mode;
5210 HOST_WIDE_INT bitsize;
5211 HOST_WIDE_INT bitpos;
5212 int unsignedp;
5213 rtx xtarget = target;
5214
5215 if (cleared && initializer_zerop (value))
5216 continue;
5217
5218 unsignedp = TYPE_UNSIGNED (elttype);
5219 mode = TYPE_MODE (elttype);
5220 if (mode == BLKmode)
5221 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5222 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5223 : -1);
5224 else
5225 bitsize = GET_MODE_BITSIZE (mode);
5226
5227 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5228 {
5229 tree lo_index = TREE_OPERAND (index, 0);
5230 tree hi_index = TREE_OPERAND (index, 1);
5231 rtx index_r, pos_rtx;
5232 HOST_WIDE_INT lo, hi, count;
5233 tree position;
5234
5235 /* If the range is constant and "small", unroll the loop. */
5236 if (const_bounds_p
5237 && host_integerp (lo_index, 0)
5238 && host_integerp (hi_index, 0)
5239 && (lo = tree_low_cst (lo_index, 0),
5240 hi = tree_low_cst (hi_index, 0),
5241 count = hi - lo + 1,
5242 (!MEM_P (target)
5243 || count <= 2
5244 || (host_integerp (TYPE_SIZE (elttype), 1)
5245 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5246 <= 40 * 8)))))
5247 {
5248 lo -= minelt; hi -= minelt;
5249 for (; lo <= hi; lo++)
5250 {
5251 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5252
5253 if (MEM_P (target)
5254 && !MEM_KEEP_ALIAS_SET_P (target)
5255 && TREE_CODE (type) == ARRAY_TYPE
5256 && TYPE_NONALIASED_COMPONENT (type))
5257 {
5258 target = copy_rtx (target);
5259 MEM_KEEP_ALIAS_SET_P (target) = 1;
5260 }
5261
5262 store_constructor_field
5263 (target, bitsize, bitpos, mode, value, type, cleared,
5264 get_alias_set (elttype));
5265 }
5266 }
5267 else
5268 {
5269 rtx loop_start = gen_label_rtx ();
5270 rtx loop_end = gen_label_rtx ();
5271 tree exit_cond;
5272
5273 expand_normal (hi_index);
5274 unsignedp = TYPE_UNSIGNED (domain);
5275
5276 index = build_decl (VAR_DECL, NULL_TREE, domain);
5277
5278 index_r
5279 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5280 &unsignedp, 0));
5281 SET_DECL_RTL (index, index_r);
5282 store_expr (lo_index, index_r, 0);
5283
5284 /* Build the head of the loop. */
5285 do_pending_stack_adjust ();
5286 emit_label (loop_start);
5287
5288 /* Assign value to element index. */
5289 position =
5290 fold_convert (ssizetype,
5291 fold_build2 (MINUS_EXPR,
5292 TREE_TYPE (index),
5293 index,
5294 TYPE_MIN_VALUE (domain)));
5295
5296 position =
5297 size_binop (MULT_EXPR, position,
5298 fold_convert (ssizetype,
5299 TYPE_SIZE_UNIT (elttype)));
5300
5301 pos_rtx = expand_normal (position);
5302 xtarget = offset_address (target, pos_rtx,
5303 highest_pow2_factor (position));
5304 xtarget = adjust_address (xtarget, mode, 0);
5305 if (TREE_CODE (value) == CONSTRUCTOR)
5306 store_constructor (value, xtarget, cleared,
5307 bitsize / BITS_PER_UNIT);
5308 else
5309 store_expr (value, xtarget, 0);
5310
5311 /* Generate a conditional jump to exit the loop. */
5312 exit_cond = build2 (LT_EXPR, integer_type_node,
5313 index, hi_index);
5314 jumpif (exit_cond, loop_end);
5315
5316 /* Update the loop counter, and jump to the head of
5317 the loop. */
5318 expand_assignment (index,
5319 build2 (PLUS_EXPR, TREE_TYPE (index),
5320 index, integer_one_node));
5321
5322 emit_jump (loop_start);
5323
5324 /* Build the end of the loop. */
5325 emit_label (loop_end);
5326 }
5327 }
5328 else if ((index != 0 && ! host_integerp (index, 0))
5329 || ! host_integerp (TYPE_SIZE (elttype), 1))
5330 {
5331 tree position;
5332
5333 if (index == 0)
5334 index = ssize_int (1);
5335
5336 if (minelt)
5337 index = fold_convert (ssizetype,
5338 fold_build2 (MINUS_EXPR,
5339 TREE_TYPE (index),
5340 index,
5341 TYPE_MIN_VALUE (domain)));
5342
5343 position =
5344 size_binop (MULT_EXPR, index,
5345 fold_convert (ssizetype,
5346 TYPE_SIZE_UNIT (elttype)));
5347 xtarget = offset_address (target,
5348 expand_normal (position),
5349 highest_pow2_factor (position));
5350 xtarget = adjust_address (xtarget, mode, 0);
5351 store_expr (value, xtarget, 0);
5352 }
5353 else
5354 {
5355 if (index != 0)
5356 bitpos = ((tree_low_cst (index, 0) - minelt)
5357 * tree_low_cst (TYPE_SIZE (elttype), 1));
5358 else
5359 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5360
5361 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5362 && TREE_CODE (type) == ARRAY_TYPE
5363 && TYPE_NONALIASED_COMPONENT (type))
5364 {
5365 target = copy_rtx (target);
5366 MEM_KEEP_ALIAS_SET_P (target) = 1;
5367 }
5368 store_constructor_field (target, bitsize, bitpos, mode, value,
5369 type, cleared, get_alias_set (elttype));
5370 }
5371 }
5372 break;
5373 }
5374
5375 case VECTOR_TYPE:
5376 {
5377 unsigned HOST_WIDE_INT idx;
5378 constructor_elt *ce;
5379 int i;
5380 int need_to_clear;
5381 int icode = 0;
5382 tree elttype = TREE_TYPE (type);
5383 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5384 enum machine_mode eltmode = TYPE_MODE (elttype);
5385 HOST_WIDE_INT bitsize;
5386 HOST_WIDE_INT bitpos;
5387 rtvec vector = NULL;
5388 unsigned n_elts;
5389
5390 gcc_assert (eltmode != BLKmode);
5391
5392 n_elts = TYPE_VECTOR_SUBPARTS (type);
5393 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5394 {
5395 enum machine_mode mode = GET_MODE (target);
5396
5397 icode = (int) vec_init_optab->handlers[mode].insn_code;
5398 if (icode != CODE_FOR_nothing)
5399 {
5400 unsigned int i;
5401
5402 vector = rtvec_alloc (n_elts);
5403 for (i = 0; i < n_elts; i++)
5404 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5405 }
5406 }
5407
5408 /* If the constructor has fewer elements than the vector,
5409 clear the whole array first. Similarly if this is static
5410 constructor of a non-BLKmode object. */
5411 if (cleared)
5412 need_to_clear = 0;
5413 else if (REG_P (target) && TREE_STATIC (exp))
5414 need_to_clear = 1;
5415 else
5416 {
5417 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5418 tree value;
5419
5420 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5421 {
5422 int n_elts_here = tree_low_cst
5423 (int_const_binop (TRUNC_DIV_EXPR,
5424 TYPE_SIZE (TREE_TYPE (value)),
5425 TYPE_SIZE (elttype), 0), 1);
5426
5427 count += n_elts_here;
5428 if (mostly_zeros_p (value))
5429 zero_count += n_elts_here;
5430 }
5431
5432 /* Clear the entire vector first if there are any missing elements,
5433 or if the incidence of zero elements is >= 75%. */
5434 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5435 }
5436
5437 if (need_to_clear && size > 0 && !vector)
5438 {
5439 if (REG_P (target))
5440 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5441 else
5442 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5443 cleared = 1;
5444 }
5445
5446 /* Inform later passes that the old value is dead. */
5447 if (!cleared && !vector && REG_P (target))
5448 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5449
5450 /* Store each element of the constructor into the corresponding
5451 element of TARGET, determined by counting the elements. */
5452 for (idx = 0, i = 0;
5453 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5454 idx++, i += bitsize / elt_size)
5455 {
5456 HOST_WIDE_INT eltpos;
5457 tree value = ce->value;
5458
5459 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5460 if (cleared && initializer_zerop (value))
5461 continue;
5462
5463 if (ce->index)
5464 eltpos = tree_low_cst (ce->index, 1);
5465 else
5466 eltpos = i;
5467
5468 if (vector)
5469 {
5470 /* Vector CONSTRUCTORs should only be built from smaller
5471 vectors in the case of BLKmode vectors. */
5472 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5473 RTVEC_ELT (vector, eltpos)
5474 = expand_normal (value);
5475 }
5476 else
5477 {
5478 enum machine_mode value_mode =
5479 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5480 ? TYPE_MODE (TREE_TYPE (value))
5481 : eltmode;
5482 bitpos = eltpos * elt_size;
5483 store_constructor_field (target, bitsize, bitpos,
5484 value_mode, value, type,
5485 cleared, get_alias_set (elttype));
5486 }
5487 }
5488
5489 if (vector)
5490 emit_insn (GEN_FCN (icode)
5491 (target,
5492 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5493 break;
5494 }
5495
5496 default:
5497 gcc_unreachable ();
5498 }
5499 }
5500
5501 /* Store the value of EXP (an expression tree)
5502 into a subfield of TARGET which has mode MODE and occupies
5503 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5504 If MODE is VOIDmode, it means that we are storing into a bit-field.
5505
5506 Always return const0_rtx unless we have something particular to
5507 return.
5508
5509 TYPE is the type of the underlying object,
5510
5511 ALIAS_SET is the alias set for the destination. This value will
5512 (in general) be different from that for TARGET, since TARGET is a
5513 reference to the containing structure. */
5514
5515 static rtx
5516 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5517 enum machine_mode mode, tree exp, tree type, int alias_set)
5518 {
5519 HOST_WIDE_INT width_mask = 0;
5520
5521 if (TREE_CODE (exp) == ERROR_MARK)
5522 return const0_rtx;
5523
5524 /* If we have nothing to store, do nothing unless the expression has
5525 side-effects. */
5526 if (bitsize == 0)
5527 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5528 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5529 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5530
5531 /* If we are storing into an unaligned field of an aligned union that is
5532 in a register, we may have the mode of TARGET being an integer mode but
5533 MODE == BLKmode. In that case, get an aligned object whose size and
5534 alignment are the same as TARGET and store TARGET into it (we can avoid
5535 the store if the field being stored is the entire width of TARGET). Then
5536 call ourselves recursively to store the field into a BLKmode version of
5537 that object. Finally, load from the object into TARGET. This is not
5538 very efficient in general, but should only be slightly more expensive
5539 than the otherwise-required unaligned accesses. Perhaps this can be
5540 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5541 twice, once with emit_move_insn and once via store_field. */
5542
5543 if (mode == BLKmode
5544 && (REG_P (target) || GET_CODE (target) == SUBREG))
5545 {
5546 rtx object = assign_temp (type, 0, 1, 1);
5547 rtx blk_object = adjust_address (object, BLKmode, 0);
5548
5549 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5550 emit_move_insn (object, target);
5551
5552 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5553
5554 emit_move_insn (target, object);
5555
5556 /* We want to return the BLKmode version of the data. */
5557 return blk_object;
5558 }
5559
5560 if (GET_CODE (target) == CONCAT)
5561 {
5562 /* We're storing into a struct containing a single __complex. */
5563
5564 gcc_assert (!bitpos);
5565 return store_expr (exp, target, 0);
5566 }
5567
5568 /* If the structure is in a register or if the component
5569 is a bit field, we cannot use addressing to access it.
5570 Use bit-field techniques or SUBREG to store in it. */
5571
5572 if (mode == VOIDmode
5573 || (mode != BLKmode && ! direct_store[(int) mode]
5574 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5575 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5576 || REG_P (target)
5577 || GET_CODE (target) == SUBREG
5578 /* If the field isn't aligned enough to store as an ordinary memref,
5579 store it as a bit field. */
5580 || (mode != BLKmode
5581 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5582 || bitpos % GET_MODE_ALIGNMENT (mode))
5583 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5584 || (bitpos % BITS_PER_UNIT != 0)))
5585 /* If the RHS and field are a constant size and the size of the
5586 RHS isn't the same size as the bitfield, we must use bitfield
5587 operations. */
5588 || (bitsize >= 0
5589 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5590 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5591 {
5592 rtx temp;
5593
5594 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5595 implies a mask operation. If the precision is the same size as
5596 the field we're storing into, that mask is redundant. This is
5597 particularly common with bit field assignments generated by the
5598 C front end. */
5599 if (TREE_CODE (exp) == NOP_EXPR)
5600 {
5601 tree type = TREE_TYPE (exp);
5602 if (INTEGRAL_TYPE_P (type)
5603 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5604 && bitsize == TYPE_PRECISION (type))
5605 {
5606 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5607 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5608 exp = TREE_OPERAND (exp, 0);
5609 }
5610 }
5611
5612 temp = expand_normal (exp);
5613
5614 /* If BITSIZE is narrower than the size of the type of EXP
5615 we will be narrowing TEMP. Normally, what's wanted are the
5616 low-order bits. However, if EXP's type is a record and this is
5617 big-endian machine, we want the upper BITSIZE bits. */
5618 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5619 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5620 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5621 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5622 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5623 - bitsize),
5624 NULL_RTX, 1);
5625
5626 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5627 MODE. */
5628 if (mode != VOIDmode && mode != BLKmode
5629 && mode != TYPE_MODE (TREE_TYPE (exp)))
5630 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5631
5632 /* If the modes of TARGET and TEMP are both BLKmode, both
5633 must be in memory and BITPOS must be aligned on a byte
5634 boundary. If so, we simply do a block copy. */
5635 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5636 {
5637 gcc_assert (MEM_P (target) && MEM_P (temp)
5638 && !(bitpos % BITS_PER_UNIT));
5639
5640 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5641 emit_block_move (target, temp,
5642 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5643 / BITS_PER_UNIT),
5644 BLOCK_OP_NORMAL);
5645
5646 return const0_rtx;
5647 }
5648
5649 /* Store the value in the bitfield. */
5650 store_bit_field (target, bitsize, bitpos, mode, temp);
5651
5652 return const0_rtx;
5653 }
5654 else
5655 {
5656 /* Now build a reference to just the desired component. */
5657 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5658
5659 if (to_rtx == target)
5660 to_rtx = copy_rtx (to_rtx);
5661
5662 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5663 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5664 set_mem_alias_set (to_rtx, alias_set);
5665
5666 return store_expr (exp, to_rtx, 0);
5667 }
5668 }
5669 \f
5670 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5671 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5672 codes and find the ultimate containing object, which we return.
5673
5674 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5675 bit position, and *PUNSIGNEDP to the signedness of the field.
5676 If the position of the field is variable, we store a tree
5677 giving the variable offset (in units) in *POFFSET.
5678 This offset is in addition to the bit position.
5679 If the position is not variable, we store 0 in *POFFSET.
5680
5681 If any of the extraction expressions is volatile,
5682 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5683
5684 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5685 is a mode that can be used to access the field. In that case, *PBITSIZE
5686 is redundant.
5687
5688 If the field describes a variable-sized object, *PMODE is set to
5689 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5690 this case, but the address of the object can be found.
5691
5692 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5693 look through nodes that serve as markers of a greater alignment than
5694 the one that can be deduced from the expression. These nodes make it
5695 possible for front-ends to prevent temporaries from being created by
5696 the middle-end on alignment considerations. For that purpose, the
5697 normal operating mode at high-level is to always pass FALSE so that
5698 the ultimate containing object is really returned; moreover, the
5699 associated predicate handled_component_p will always return TRUE
5700 on these nodes, thus indicating that they are essentially handled
5701 by get_inner_reference. TRUE should only be passed when the caller
5702 is scanning the expression in order to build another representation
5703 and specifically knows how to handle these nodes; as such, this is
5704 the normal operating mode in the RTL expanders. */
5705
5706 tree
5707 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5708 HOST_WIDE_INT *pbitpos, tree *poffset,
5709 enum machine_mode *pmode, int *punsignedp,
5710 int *pvolatilep, bool keep_aligning)
5711 {
5712 tree size_tree = 0;
5713 enum machine_mode mode = VOIDmode;
5714 tree offset = size_zero_node;
5715 tree bit_offset = bitsize_zero_node;
5716 tree tem;
5717
5718 /* First get the mode, signedness, and size. We do this from just the
5719 outermost expression. */
5720 if (TREE_CODE (exp) == COMPONENT_REF)
5721 {
5722 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5723 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5724 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5725
5726 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5727 }
5728 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5729 {
5730 size_tree = TREE_OPERAND (exp, 1);
5731 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5732
5733 /* For vector types, with the correct size of access, use the mode of
5734 inner type. */
5735 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5736 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5737 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5738 mode = TYPE_MODE (TREE_TYPE (exp));
5739 }
5740 else
5741 {
5742 mode = TYPE_MODE (TREE_TYPE (exp));
5743 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5744
5745 if (mode == BLKmode)
5746 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5747 else
5748 *pbitsize = GET_MODE_BITSIZE (mode);
5749 }
5750
5751 if (size_tree != 0)
5752 {
5753 if (! host_integerp (size_tree, 1))
5754 mode = BLKmode, *pbitsize = -1;
5755 else
5756 *pbitsize = tree_low_cst (size_tree, 1);
5757 }
5758
5759 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5760 and find the ultimate containing object. */
5761 while (1)
5762 {
5763 switch (TREE_CODE (exp))
5764 {
5765 case BIT_FIELD_REF:
5766 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5767 TREE_OPERAND (exp, 2));
5768 break;
5769
5770 case COMPONENT_REF:
5771 {
5772 tree field = TREE_OPERAND (exp, 1);
5773 tree this_offset = component_ref_field_offset (exp);
5774
5775 /* If this field hasn't been filled in yet, don't go past it.
5776 This should only happen when folding expressions made during
5777 type construction. */
5778 if (this_offset == 0)
5779 break;
5780
5781 offset = size_binop (PLUS_EXPR, offset, this_offset);
5782 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5783 DECL_FIELD_BIT_OFFSET (field));
5784
5785 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5786 }
5787 break;
5788
5789 case ARRAY_REF:
5790 case ARRAY_RANGE_REF:
5791 {
5792 tree index = TREE_OPERAND (exp, 1);
5793 tree low_bound = array_ref_low_bound (exp);
5794 tree unit_size = array_ref_element_size (exp);
5795
5796 /* We assume all arrays have sizes that are a multiple of a byte.
5797 First subtract the lower bound, if any, in the type of the
5798 index, then convert to sizetype and multiply by the size of
5799 the array element. */
5800 if (! integer_zerop (low_bound))
5801 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5802 index, low_bound);
5803
5804 offset = size_binop (PLUS_EXPR, offset,
5805 size_binop (MULT_EXPR,
5806 fold_convert (sizetype, index),
5807 unit_size));
5808 }
5809 break;
5810
5811 case REALPART_EXPR:
5812 break;
5813
5814 case IMAGPART_EXPR:
5815 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5816 bitsize_int (*pbitsize));
5817 break;
5818
5819 case VIEW_CONVERT_EXPR:
5820 if (keep_aligning && STRICT_ALIGNMENT
5821 && (TYPE_ALIGN (TREE_TYPE (exp))
5822 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5823 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5824 < BIGGEST_ALIGNMENT)
5825 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5826 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5827 goto done;
5828 break;
5829
5830 default:
5831 goto done;
5832 }
5833
5834 /* If any reference in the chain is volatile, the effect is volatile. */
5835 if (TREE_THIS_VOLATILE (exp))
5836 *pvolatilep = 1;
5837
5838 exp = TREE_OPERAND (exp, 0);
5839 }
5840 done:
5841
5842 /* If OFFSET is constant, see if we can return the whole thing as a
5843 constant bit position. Otherwise, split it up. */
5844 if (host_integerp (offset, 0)
5845 && 0 != (tem = size_binop (MULT_EXPR,
5846 fold_convert (bitsizetype, offset),
5847 bitsize_unit_node))
5848 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5849 && host_integerp (tem, 0))
5850 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5851 else
5852 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5853
5854 *pmode = mode;
5855 return exp;
5856 }
5857
5858 /* Return a tree of sizetype representing the size, in bytes, of the element
5859 of EXP, an ARRAY_REF. */
5860
5861 tree
5862 array_ref_element_size (tree exp)
5863 {
5864 tree aligned_size = TREE_OPERAND (exp, 3);
5865 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5866
5867 /* If a size was specified in the ARRAY_REF, it's the size measured
5868 in alignment units of the element type. So multiply by that value. */
5869 if (aligned_size)
5870 {
5871 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5872 sizetype from another type of the same width and signedness. */
5873 if (TREE_TYPE (aligned_size) != sizetype)
5874 aligned_size = fold_convert (sizetype, aligned_size);
5875 return size_binop (MULT_EXPR, aligned_size,
5876 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5877 }
5878
5879 /* Otherwise, take the size from that of the element type. Substitute
5880 any PLACEHOLDER_EXPR that we have. */
5881 else
5882 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5883 }
5884
5885 /* Return a tree representing the lower bound of the array mentioned in
5886 EXP, an ARRAY_REF. */
5887
5888 tree
5889 array_ref_low_bound (tree exp)
5890 {
5891 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5892
5893 /* If a lower bound is specified in EXP, use it. */
5894 if (TREE_OPERAND (exp, 2))
5895 return TREE_OPERAND (exp, 2);
5896
5897 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5898 substituting for a PLACEHOLDER_EXPR as needed. */
5899 if (domain_type && TYPE_MIN_VALUE (domain_type))
5900 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5901
5902 /* Otherwise, return a zero of the appropriate type. */
5903 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5904 }
5905
5906 /* Return a tree representing the upper bound of the array mentioned in
5907 EXP, an ARRAY_REF. */
5908
5909 tree
5910 array_ref_up_bound (tree exp)
5911 {
5912 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5913
5914 /* If there is a domain type and it has an upper bound, use it, substituting
5915 for a PLACEHOLDER_EXPR as needed. */
5916 if (domain_type && TYPE_MAX_VALUE (domain_type))
5917 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5918
5919 /* Otherwise fail. */
5920 return NULL_TREE;
5921 }
5922
5923 /* Return a tree representing the offset, in bytes, of the field referenced
5924 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5925
5926 tree
5927 component_ref_field_offset (tree exp)
5928 {
5929 tree aligned_offset = TREE_OPERAND (exp, 2);
5930 tree field = TREE_OPERAND (exp, 1);
5931
5932 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5933 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5934 value. */
5935 if (aligned_offset)
5936 {
5937 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5938 sizetype from another type of the same width and signedness. */
5939 if (TREE_TYPE (aligned_offset) != sizetype)
5940 aligned_offset = fold_convert (sizetype, aligned_offset);
5941 return size_binop (MULT_EXPR, aligned_offset,
5942 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5943 }
5944
5945 /* Otherwise, take the offset from that of the field. Substitute
5946 any PLACEHOLDER_EXPR that we have. */
5947 else
5948 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5949 }
5950
5951 /* Return 1 if T is an expression that get_inner_reference handles. */
5952
5953 int
5954 handled_component_p (tree t)
5955 {
5956 switch (TREE_CODE (t))
5957 {
5958 case BIT_FIELD_REF:
5959 case COMPONENT_REF:
5960 case ARRAY_REF:
5961 case ARRAY_RANGE_REF:
5962 case VIEW_CONVERT_EXPR:
5963 case REALPART_EXPR:
5964 case IMAGPART_EXPR:
5965 return 1;
5966
5967 default:
5968 return 0;
5969 }
5970 }
5971 \f
5972 /* Given an rtx VALUE that may contain additions and multiplications, return
5973 an equivalent value that just refers to a register, memory, or constant.
5974 This is done by generating instructions to perform the arithmetic and
5975 returning a pseudo-register containing the value.
5976
5977 The returned value may be a REG, SUBREG, MEM or constant. */
5978
5979 rtx
5980 force_operand (rtx value, rtx target)
5981 {
5982 rtx op1, op2;
5983 /* Use subtarget as the target for operand 0 of a binary operation. */
5984 rtx subtarget = get_subtarget (target);
5985 enum rtx_code code = GET_CODE (value);
5986
5987 /* Check for subreg applied to an expression produced by loop optimizer. */
5988 if (code == SUBREG
5989 && !REG_P (SUBREG_REG (value))
5990 && !MEM_P (SUBREG_REG (value)))
5991 {
5992 value = simplify_gen_subreg (GET_MODE (value),
5993 force_reg (GET_MODE (SUBREG_REG (value)),
5994 force_operand (SUBREG_REG (value),
5995 NULL_RTX)),
5996 GET_MODE (SUBREG_REG (value)),
5997 SUBREG_BYTE (value));
5998 code = GET_CODE (value);
5999 }
6000
6001 /* Check for a PIC address load. */
6002 if ((code == PLUS || code == MINUS)
6003 && XEXP (value, 0) == pic_offset_table_rtx
6004 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6005 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6006 || GET_CODE (XEXP (value, 1)) == CONST))
6007 {
6008 if (!subtarget)
6009 subtarget = gen_reg_rtx (GET_MODE (value));
6010 emit_move_insn (subtarget, value);
6011 return subtarget;
6012 }
6013
6014 if (ARITHMETIC_P (value))
6015 {
6016 op2 = XEXP (value, 1);
6017 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6018 subtarget = 0;
6019 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6020 {
6021 code = PLUS;
6022 op2 = negate_rtx (GET_MODE (value), op2);
6023 }
6024
6025 /* Check for an addition with OP2 a constant integer and our first
6026 operand a PLUS of a virtual register and something else. In that
6027 case, we want to emit the sum of the virtual register and the
6028 constant first and then add the other value. This allows virtual
6029 register instantiation to simply modify the constant rather than
6030 creating another one around this addition. */
6031 if (code == PLUS && GET_CODE (op2) == CONST_INT
6032 && GET_CODE (XEXP (value, 0)) == PLUS
6033 && REG_P (XEXP (XEXP (value, 0), 0))
6034 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6035 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6036 {
6037 rtx temp = expand_simple_binop (GET_MODE (value), code,
6038 XEXP (XEXP (value, 0), 0), op2,
6039 subtarget, 0, OPTAB_LIB_WIDEN);
6040 return expand_simple_binop (GET_MODE (value), code, temp,
6041 force_operand (XEXP (XEXP (value,
6042 0), 1), 0),
6043 target, 0, OPTAB_LIB_WIDEN);
6044 }
6045
6046 op1 = force_operand (XEXP (value, 0), subtarget);
6047 op2 = force_operand (op2, NULL_RTX);
6048 switch (code)
6049 {
6050 case MULT:
6051 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6052 case DIV:
6053 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6054 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6055 target, 1, OPTAB_LIB_WIDEN);
6056 else
6057 return expand_divmod (0,
6058 FLOAT_MODE_P (GET_MODE (value))
6059 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6060 GET_MODE (value), op1, op2, target, 0);
6061 break;
6062 case MOD:
6063 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6064 target, 0);
6065 break;
6066 case UDIV:
6067 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6068 target, 1);
6069 break;
6070 case UMOD:
6071 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6072 target, 1);
6073 break;
6074 case ASHIFTRT:
6075 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6076 target, 0, OPTAB_LIB_WIDEN);
6077 break;
6078 default:
6079 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6080 target, 1, OPTAB_LIB_WIDEN);
6081 }
6082 }
6083 if (UNARY_P (value))
6084 {
6085 if (!target)
6086 target = gen_reg_rtx (GET_MODE (value));
6087 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6088 switch (code)
6089 {
6090 case ZERO_EXTEND:
6091 case SIGN_EXTEND:
6092 case TRUNCATE:
6093 case FLOAT_EXTEND:
6094 case FLOAT_TRUNCATE:
6095 convert_move (target, op1, code == ZERO_EXTEND);
6096 return target;
6097
6098 case FIX:
6099 case UNSIGNED_FIX:
6100 expand_fix (target, op1, code == UNSIGNED_FIX);
6101 return target;
6102
6103 case FLOAT:
6104 case UNSIGNED_FLOAT:
6105 expand_float (target, op1, code == UNSIGNED_FLOAT);
6106 return target;
6107
6108 default:
6109 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6110 }
6111 }
6112
6113 #ifdef INSN_SCHEDULING
6114 /* On machines that have insn scheduling, we want all memory reference to be
6115 explicit, so we need to deal with such paradoxical SUBREGs. */
6116 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6117 && (GET_MODE_SIZE (GET_MODE (value))
6118 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6119 value
6120 = simplify_gen_subreg (GET_MODE (value),
6121 force_reg (GET_MODE (SUBREG_REG (value)),
6122 force_operand (SUBREG_REG (value),
6123 NULL_RTX)),
6124 GET_MODE (SUBREG_REG (value)),
6125 SUBREG_BYTE (value));
6126 #endif
6127
6128 return value;
6129 }
6130 \f
6131 /* Subroutine of expand_expr: return nonzero iff there is no way that
6132 EXP can reference X, which is being modified. TOP_P is nonzero if this
6133 call is going to be used to determine whether we need a temporary
6134 for EXP, as opposed to a recursive call to this function.
6135
6136 It is always safe for this routine to return zero since it merely
6137 searches for optimization opportunities. */
6138
6139 int
6140 safe_from_p (rtx x, tree exp, int top_p)
6141 {
6142 rtx exp_rtl = 0;
6143 int i, nops;
6144
6145 if (x == 0
6146 /* If EXP has varying size, we MUST use a target since we currently
6147 have no way of allocating temporaries of variable size
6148 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6149 So we assume here that something at a higher level has prevented a
6150 clash. This is somewhat bogus, but the best we can do. Only
6151 do this when X is BLKmode and when we are at the top level. */
6152 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6153 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6154 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6155 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6156 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6157 != INTEGER_CST)
6158 && GET_MODE (x) == BLKmode)
6159 /* If X is in the outgoing argument area, it is always safe. */
6160 || (MEM_P (x)
6161 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6162 || (GET_CODE (XEXP (x, 0)) == PLUS
6163 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6164 return 1;
6165
6166 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6167 find the underlying pseudo. */
6168 if (GET_CODE (x) == SUBREG)
6169 {
6170 x = SUBREG_REG (x);
6171 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6172 return 0;
6173 }
6174
6175 /* Now look at our tree code and possibly recurse. */
6176 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6177 {
6178 case tcc_declaration:
6179 exp_rtl = DECL_RTL_IF_SET (exp);
6180 break;
6181
6182 case tcc_constant:
6183 return 1;
6184
6185 case tcc_exceptional:
6186 if (TREE_CODE (exp) == TREE_LIST)
6187 {
6188 while (1)
6189 {
6190 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6191 return 0;
6192 exp = TREE_CHAIN (exp);
6193 if (!exp)
6194 return 1;
6195 if (TREE_CODE (exp) != TREE_LIST)
6196 return safe_from_p (x, exp, 0);
6197 }
6198 }
6199 else if (TREE_CODE (exp) == CONSTRUCTOR)
6200 {
6201 constructor_elt *ce;
6202 unsigned HOST_WIDE_INT idx;
6203
6204 for (idx = 0;
6205 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6206 idx++)
6207 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6208 || !safe_from_p (x, ce->value, 0))
6209 return 0;
6210 return 1;
6211 }
6212 else if (TREE_CODE (exp) == ERROR_MARK)
6213 return 1; /* An already-visited SAVE_EXPR? */
6214 else
6215 return 0;
6216
6217 case tcc_statement:
6218 /* The only case we look at here is the DECL_INITIAL inside a
6219 DECL_EXPR. */
6220 return (TREE_CODE (exp) != DECL_EXPR
6221 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6222 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6223 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6224
6225 case tcc_binary:
6226 case tcc_comparison:
6227 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6228 return 0;
6229 /* Fall through. */
6230
6231 case tcc_unary:
6232 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6233
6234 case tcc_expression:
6235 case tcc_reference:
6236 case tcc_vl_exp:
6237 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6238 the expression. If it is set, we conflict iff we are that rtx or
6239 both are in memory. Otherwise, we check all operands of the
6240 expression recursively. */
6241
6242 switch (TREE_CODE (exp))
6243 {
6244 case ADDR_EXPR:
6245 /* If the operand is static or we are static, we can't conflict.
6246 Likewise if we don't conflict with the operand at all. */
6247 if (staticp (TREE_OPERAND (exp, 0))
6248 || TREE_STATIC (exp)
6249 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6250 return 1;
6251
6252 /* Otherwise, the only way this can conflict is if we are taking
6253 the address of a DECL a that address if part of X, which is
6254 very rare. */
6255 exp = TREE_OPERAND (exp, 0);
6256 if (DECL_P (exp))
6257 {
6258 if (!DECL_RTL_SET_P (exp)
6259 || !MEM_P (DECL_RTL (exp)))
6260 return 0;
6261 else
6262 exp_rtl = XEXP (DECL_RTL (exp), 0);
6263 }
6264 break;
6265
6266 case MISALIGNED_INDIRECT_REF:
6267 case ALIGN_INDIRECT_REF:
6268 case INDIRECT_REF:
6269 if (MEM_P (x)
6270 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6271 get_alias_set (exp)))
6272 return 0;
6273 break;
6274
6275 case CALL_EXPR:
6276 /* Assume that the call will clobber all hard registers and
6277 all of memory. */
6278 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6279 || MEM_P (x))
6280 return 0;
6281 break;
6282
6283 case WITH_CLEANUP_EXPR:
6284 case CLEANUP_POINT_EXPR:
6285 /* Lowered by gimplify.c. */
6286 gcc_unreachable ();
6287
6288 case SAVE_EXPR:
6289 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6290
6291 default:
6292 break;
6293 }
6294
6295 /* If we have an rtx, we do not need to scan our operands. */
6296 if (exp_rtl)
6297 break;
6298
6299 nops = TREE_OPERAND_LENGTH (exp);
6300 for (i = 0; i < nops; i++)
6301 if (TREE_OPERAND (exp, i) != 0
6302 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6303 return 0;
6304
6305 break;
6306
6307 case tcc_type:
6308 /* Should never get a type here. */
6309 gcc_unreachable ();
6310
6311 case tcc_gimple_stmt:
6312 gcc_unreachable ();
6313 }
6314
6315 /* If we have an rtl, find any enclosed object. Then see if we conflict
6316 with it. */
6317 if (exp_rtl)
6318 {
6319 if (GET_CODE (exp_rtl) == SUBREG)
6320 {
6321 exp_rtl = SUBREG_REG (exp_rtl);
6322 if (REG_P (exp_rtl)
6323 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6324 return 0;
6325 }
6326
6327 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6328 are memory and they conflict. */
6329 return ! (rtx_equal_p (x, exp_rtl)
6330 || (MEM_P (x) && MEM_P (exp_rtl)
6331 && true_dependence (exp_rtl, VOIDmode, x,
6332 rtx_addr_varies_p)));
6333 }
6334
6335 /* If we reach here, it is safe. */
6336 return 1;
6337 }
6338
6339 \f
6340 /* Return the highest power of two that EXP is known to be a multiple of.
6341 This is used in updating alignment of MEMs in array references. */
6342
6343 unsigned HOST_WIDE_INT
6344 highest_pow2_factor (tree exp)
6345 {
6346 unsigned HOST_WIDE_INT c0, c1;
6347
6348 switch (TREE_CODE (exp))
6349 {
6350 case INTEGER_CST:
6351 /* We can find the lowest bit that's a one. If the low
6352 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6353 We need to handle this case since we can find it in a COND_EXPR,
6354 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6355 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6356 later ICE. */
6357 if (TREE_OVERFLOW (exp))
6358 return BIGGEST_ALIGNMENT;
6359 else
6360 {
6361 /* Note: tree_low_cst is intentionally not used here,
6362 we don't care about the upper bits. */
6363 c0 = TREE_INT_CST_LOW (exp);
6364 c0 &= -c0;
6365 return c0 ? c0 : BIGGEST_ALIGNMENT;
6366 }
6367 break;
6368
6369 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6370 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6371 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6372 return MIN (c0, c1);
6373
6374 case MULT_EXPR:
6375 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6376 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6377 return c0 * c1;
6378
6379 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6380 case CEIL_DIV_EXPR:
6381 if (integer_pow2p (TREE_OPERAND (exp, 1))
6382 && host_integerp (TREE_OPERAND (exp, 1), 1))
6383 {
6384 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6385 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6386 return MAX (1, c0 / c1);
6387 }
6388 break;
6389
6390 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6391 case SAVE_EXPR:
6392 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6393
6394 case COMPOUND_EXPR:
6395 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6396
6397 case COND_EXPR:
6398 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6399 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6400 return MIN (c0, c1);
6401
6402 default:
6403 break;
6404 }
6405
6406 return 1;
6407 }
6408
6409 /* Similar, except that the alignment requirements of TARGET are
6410 taken into account. Assume it is at least as aligned as its
6411 type, unless it is a COMPONENT_REF in which case the layout of
6412 the structure gives the alignment. */
6413
6414 static unsigned HOST_WIDE_INT
6415 highest_pow2_factor_for_target (tree target, tree exp)
6416 {
6417 unsigned HOST_WIDE_INT target_align, factor;
6418
6419 factor = highest_pow2_factor (exp);
6420 if (TREE_CODE (target) == COMPONENT_REF)
6421 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6422 else
6423 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6424 return MAX (factor, target_align);
6425 }
6426 \f
6427 /* Return &VAR expression for emulated thread local VAR. */
6428
6429 static tree
6430 emutls_var_address (tree var)
6431 {
6432 tree emuvar = emutls_decl (var);
6433 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6434 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6435 tree arglist = build_tree_list (NULL_TREE, arg);
6436 tree call = build_function_call_expr (fn, arglist);
6437 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6438 }
6439 \f
6440 /* Expands variable VAR. */
6441
6442 void
6443 expand_var (tree var)
6444 {
6445 if (DECL_EXTERNAL (var))
6446 return;
6447
6448 if (TREE_STATIC (var))
6449 /* If this is an inlined copy of a static local variable,
6450 look up the original decl. */
6451 var = DECL_ORIGIN (var);
6452
6453 if (TREE_STATIC (var)
6454 ? !TREE_ASM_WRITTEN (var)
6455 : !DECL_RTL_SET_P (var))
6456 {
6457 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6458 /* Should be ignored. */;
6459 else if (lang_hooks.expand_decl (var))
6460 /* OK. */;
6461 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6462 expand_decl (var);
6463 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6464 rest_of_decl_compilation (var, 0, 0);
6465 else
6466 /* No expansion needed. */
6467 gcc_assert (TREE_CODE (var) == TYPE_DECL
6468 || TREE_CODE (var) == CONST_DECL
6469 || TREE_CODE (var) == FUNCTION_DECL
6470 || TREE_CODE (var) == LABEL_DECL);
6471 }
6472 }
6473
6474 /* Subroutine of expand_expr. Expand the two operands of a binary
6475 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6476 The value may be stored in TARGET if TARGET is nonzero. The
6477 MODIFIER argument is as documented by expand_expr. */
6478
6479 static void
6480 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6481 enum expand_modifier modifier)
6482 {
6483 if (! safe_from_p (target, exp1, 1))
6484 target = 0;
6485 if (operand_equal_p (exp0, exp1, 0))
6486 {
6487 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6488 *op1 = copy_rtx (*op0);
6489 }
6490 else
6491 {
6492 /* If we need to preserve evaluation order, copy exp0 into its own
6493 temporary variable so that it can't be clobbered by exp1. */
6494 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6495 exp0 = save_expr (exp0);
6496 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6497 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6498 }
6499 }
6500
6501 \f
6502 /* Return a MEM that contains constant EXP. DEFER is as for
6503 output_constant_def and MODIFIER is as for expand_expr. */
6504
6505 static rtx
6506 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6507 {
6508 rtx mem;
6509
6510 mem = output_constant_def (exp, defer);
6511 if (modifier != EXPAND_INITIALIZER)
6512 mem = use_anchored_address (mem);
6513 return mem;
6514 }
6515
6516 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6517 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6518
6519 static rtx
6520 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6521 enum expand_modifier modifier)
6522 {
6523 rtx result, subtarget;
6524 tree inner, offset;
6525 HOST_WIDE_INT bitsize, bitpos;
6526 int volatilep, unsignedp;
6527 enum machine_mode mode1;
6528
6529 /* If we are taking the address of a constant and are at the top level,
6530 we have to use output_constant_def since we can't call force_const_mem
6531 at top level. */
6532 /* ??? This should be considered a front-end bug. We should not be
6533 generating ADDR_EXPR of something that isn't an LVALUE. The only
6534 exception here is STRING_CST. */
6535 if (TREE_CODE (exp) == CONSTRUCTOR
6536 || CONSTANT_CLASS_P (exp))
6537 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6538
6539 /* Everything must be something allowed by is_gimple_addressable. */
6540 switch (TREE_CODE (exp))
6541 {
6542 case INDIRECT_REF:
6543 /* This case will happen via recursion for &a->b. */
6544 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6545
6546 case CONST_DECL:
6547 /* Recurse and make the output_constant_def clause above handle this. */
6548 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6549 tmode, modifier);
6550
6551 case REALPART_EXPR:
6552 /* The real part of the complex number is always first, therefore
6553 the address is the same as the address of the parent object. */
6554 offset = 0;
6555 bitpos = 0;
6556 inner = TREE_OPERAND (exp, 0);
6557 break;
6558
6559 case IMAGPART_EXPR:
6560 /* The imaginary part of the complex number is always second.
6561 The expression is therefore always offset by the size of the
6562 scalar type. */
6563 offset = 0;
6564 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6565 inner = TREE_OPERAND (exp, 0);
6566 break;
6567
6568 case VAR_DECL:
6569 /* TLS emulation hook - replace __thread VAR's &VAR with
6570 __emutls_get_address (&_emutls.VAR). */
6571 if (! targetm.have_tls
6572 && TREE_CODE (exp) == VAR_DECL
6573 && DECL_THREAD_LOCAL_P (exp))
6574 {
6575 exp = emutls_var_address (exp);
6576 return expand_expr (exp, target, tmode, modifier);
6577 }
6578 /* Fall through. */
6579
6580 default:
6581 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6582 expand_expr, as that can have various side effects; LABEL_DECLs for
6583 example, may not have their DECL_RTL set yet. Assume language
6584 specific tree nodes can be expanded in some interesting way. */
6585 if (DECL_P (exp)
6586 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6587 {
6588 result = expand_expr (exp, target, tmode,
6589 modifier == EXPAND_INITIALIZER
6590 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6591
6592 /* If the DECL isn't in memory, then the DECL wasn't properly
6593 marked TREE_ADDRESSABLE, which will be either a front-end
6594 or a tree optimizer bug. */
6595 gcc_assert (MEM_P (result));
6596 result = XEXP (result, 0);
6597
6598 /* ??? Is this needed anymore? */
6599 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6600 {
6601 assemble_external (exp);
6602 TREE_USED (exp) = 1;
6603 }
6604
6605 if (modifier != EXPAND_INITIALIZER
6606 && modifier != EXPAND_CONST_ADDRESS)
6607 result = force_operand (result, target);
6608 return result;
6609 }
6610
6611 /* Pass FALSE as the last argument to get_inner_reference although
6612 we are expanding to RTL. The rationale is that we know how to
6613 handle "aligning nodes" here: we can just bypass them because
6614 they won't change the final object whose address will be returned
6615 (they actually exist only for that purpose). */
6616 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6617 &mode1, &unsignedp, &volatilep, false);
6618 break;
6619 }
6620
6621 /* We must have made progress. */
6622 gcc_assert (inner != exp);
6623
6624 subtarget = offset || bitpos ? NULL_RTX : target;
6625 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6626
6627 if (offset)
6628 {
6629 rtx tmp;
6630
6631 if (modifier != EXPAND_NORMAL)
6632 result = force_operand (result, NULL);
6633 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6634
6635 result = convert_memory_address (tmode, result);
6636 tmp = convert_memory_address (tmode, tmp);
6637
6638 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6639 result = gen_rtx_PLUS (tmode, result, tmp);
6640 else
6641 {
6642 subtarget = bitpos ? NULL_RTX : target;
6643 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6644 1, OPTAB_LIB_WIDEN);
6645 }
6646 }
6647
6648 if (bitpos)
6649 {
6650 /* Someone beforehand should have rejected taking the address
6651 of such an object. */
6652 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6653
6654 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6655 if (modifier < EXPAND_SUM)
6656 result = force_operand (result, target);
6657 }
6658
6659 return result;
6660 }
6661
6662 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6663 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6664
6665 static rtx
6666 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6667 enum expand_modifier modifier)
6668 {
6669 enum machine_mode rmode;
6670 rtx result;
6671
6672 /* Target mode of VOIDmode says "whatever's natural". */
6673 if (tmode == VOIDmode)
6674 tmode = TYPE_MODE (TREE_TYPE (exp));
6675
6676 /* We can get called with some Weird Things if the user does silliness
6677 like "(short) &a". In that case, convert_memory_address won't do
6678 the right thing, so ignore the given target mode. */
6679 if (tmode != Pmode && tmode != ptr_mode)
6680 tmode = Pmode;
6681
6682 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6683 tmode, modifier);
6684
6685 /* Despite expand_expr claims concerning ignoring TMODE when not
6686 strictly convenient, stuff breaks if we don't honor it. Note
6687 that combined with the above, we only do this for pointer modes. */
6688 rmode = GET_MODE (result);
6689 if (rmode == VOIDmode)
6690 rmode = tmode;
6691 if (rmode != tmode)
6692 result = convert_memory_address (tmode, result);
6693
6694 return result;
6695 }
6696
6697
6698 /* expand_expr: generate code for computing expression EXP.
6699 An rtx for the computed value is returned. The value is never null.
6700 In the case of a void EXP, const0_rtx is returned.
6701
6702 The value may be stored in TARGET if TARGET is nonzero.
6703 TARGET is just a suggestion; callers must assume that
6704 the rtx returned may not be the same as TARGET.
6705
6706 If TARGET is CONST0_RTX, it means that the value will be ignored.
6707
6708 If TMODE is not VOIDmode, it suggests generating the
6709 result in mode TMODE. But this is done only when convenient.
6710 Otherwise, TMODE is ignored and the value generated in its natural mode.
6711 TMODE is just a suggestion; callers must assume that
6712 the rtx returned may not have mode TMODE.
6713
6714 Note that TARGET may have neither TMODE nor MODE. In that case, it
6715 probably will not be used.
6716
6717 If MODIFIER is EXPAND_SUM then when EXP is an addition
6718 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6719 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6720 products as above, or REG or MEM, or constant.
6721 Ordinarily in such cases we would output mul or add instructions
6722 and then return a pseudo reg containing the sum.
6723
6724 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6725 it also marks a label as absolutely required (it can't be dead).
6726 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6727 This is used for outputting expressions used in initializers.
6728
6729 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6730 with a constant address even if that address is not normally legitimate.
6731 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6732
6733 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6734 a call parameter. Such targets require special care as we haven't yet
6735 marked TARGET so that it's safe from being trashed by libcalls. We
6736 don't want to use TARGET for anything but the final result;
6737 Intermediate values must go elsewhere. Additionally, calls to
6738 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6739
6740 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6741 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6742 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6743 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6744 recursively. */
6745
6746 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6747 enum expand_modifier, rtx *);
6748
6749 rtx
6750 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6751 enum expand_modifier modifier, rtx *alt_rtl)
6752 {
6753 int rn = -1;
6754 rtx ret, last = NULL;
6755
6756 /* Handle ERROR_MARK before anybody tries to access its type. */
6757 if (TREE_CODE (exp) == ERROR_MARK
6758 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6759 {
6760 ret = CONST0_RTX (tmode);
6761 return ret ? ret : const0_rtx;
6762 }
6763
6764 if (flag_non_call_exceptions)
6765 {
6766 rn = lookup_stmt_eh_region (exp);
6767 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6768 if (rn >= 0)
6769 last = get_last_insn ();
6770 }
6771
6772 /* If this is an expression of some kind and it has an associated line
6773 number, then emit the line number before expanding the expression.
6774
6775 We need to save and restore the file and line information so that
6776 errors discovered during expansion are emitted with the right
6777 information. It would be better of the diagnostic routines
6778 used the file/line information embedded in the tree nodes rather
6779 than globals. */
6780 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6781 {
6782 location_t saved_location = input_location;
6783 input_location = EXPR_LOCATION (exp);
6784 emit_line_note (input_location);
6785
6786 /* Record where the insns produced belong. */
6787 record_block_change (TREE_BLOCK (exp));
6788
6789 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6790
6791 input_location = saved_location;
6792 }
6793 else
6794 {
6795 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6796 }
6797
6798 /* If using non-call exceptions, mark all insns that may trap.
6799 expand_call() will mark CALL_INSNs before we get to this code,
6800 but it doesn't handle libcalls, and these may trap. */
6801 if (rn >= 0)
6802 {
6803 rtx insn;
6804 for (insn = next_real_insn (last); insn;
6805 insn = next_real_insn (insn))
6806 {
6807 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6808 /* If we want exceptions for non-call insns, any
6809 may_trap_p instruction may throw. */
6810 && GET_CODE (PATTERN (insn)) != CLOBBER
6811 && GET_CODE (PATTERN (insn)) != USE
6812 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6813 {
6814 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6815 REG_NOTES (insn));
6816 }
6817 }
6818 }
6819
6820 return ret;
6821 }
6822
6823 static rtx
6824 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6825 enum expand_modifier modifier, rtx *alt_rtl)
6826 {
6827 rtx op0, op1, temp, decl_rtl;
6828 tree type;
6829 int unsignedp;
6830 enum machine_mode mode;
6831 enum tree_code code = TREE_CODE (exp);
6832 optab this_optab;
6833 rtx subtarget, original_target;
6834 int ignore;
6835 tree context, subexp0, subexp1;
6836 bool reduce_bit_field = false;
6837 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6838 ? reduce_to_bit_field_precision ((expr), \
6839 target, \
6840 type) \
6841 : (expr))
6842
6843 if (GIMPLE_STMT_P (exp))
6844 {
6845 type = void_type_node;
6846 mode = VOIDmode;
6847 unsignedp = 0;
6848 }
6849 else
6850 {
6851 type = TREE_TYPE (exp);
6852 mode = TYPE_MODE (type);
6853 unsignedp = TYPE_UNSIGNED (type);
6854 }
6855 if (lang_hooks.reduce_bit_field_operations
6856 && TREE_CODE (type) == INTEGER_TYPE
6857 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6858 {
6859 /* An operation in what may be a bit-field type needs the
6860 result to be reduced to the precision of the bit-field type,
6861 which is narrower than that of the type's mode. */
6862 reduce_bit_field = true;
6863 if (modifier == EXPAND_STACK_PARM)
6864 target = 0;
6865 }
6866
6867 /* Use subtarget as the target for operand 0 of a binary operation. */
6868 subtarget = get_subtarget (target);
6869 original_target = target;
6870 ignore = (target == const0_rtx
6871 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6872 || code == CONVERT_EXPR || code == COND_EXPR
6873 || code == VIEW_CONVERT_EXPR)
6874 && TREE_CODE (type) == VOID_TYPE));
6875
6876 /* If we are going to ignore this result, we need only do something
6877 if there is a side-effect somewhere in the expression. If there
6878 is, short-circuit the most common cases here. Note that we must
6879 not call expand_expr with anything but const0_rtx in case this
6880 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6881
6882 if (ignore)
6883 {
6884 if (! TREE_SIDE_EFFECTS (exp))
6885 return const0_rtx;
6886
6887 /* Ensure we reference a volatile object even if value is ignored, but
6888 don't do this if all we are doing is taking its address. */
6889 if (TREE_THIS_VOLATILE (exp)
6890 && TREE_CODE (exp) != FUNCTION_DECL
6891 && mode != VOIDmode && mode != BLKmode
6892 && modifier != EXPAND_CONST_ADDRESS)
6893 {
6894 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6895 if (MEM_P (temp))
6896 temp = copy_to_reg (temp);
6897 return const0_rtx;
6898 }
6899
6900 if (TREE_CODE_CLASS (code) == tcc_unary
6901 || code == COMPONENT_REF || code == INDIRECT_REF)
6902 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6903 modifier);
6904
6905 else if (TREE_CODE_CLASS (code) == tcc_binary
6906 || TREE_CODE_CLASS (code) == tcc_comparison
6907 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6908 {
6909 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6910 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6911 return const0_rtx;
6912 }
6913 else if (code == BIT_FIELD_REF)
6914 {
6915 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6916 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6917 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6918 return const0_rtx;
6919 }
6920
6921 target = 0;
6922 }
6923
6924
6925 switch (code)
6926 {
6927 case LABEL_DECL:
6928 {
6929 tree function = decl_function_context (exp);
6930
6931 temp = label_rtx (exp);
6932 temp = gen_rtx_LABEL_REF (Pmode, temp);
6933
6934 if (function != current_function_decl
6935 && function != 0)
6936 LABEL_REF_NONLOCAL_P (temp) = 1;
6937
6938 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6939 return temp;
6940 }
6941
6942 case SSA_NAME:
6943 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6944 NULL);
6945
6946 case PARM_DECL:
6947 case VAR_DECL:
6948 /* If a static var's type was incomplete when the decl was written,
6949 but the type is complete now, lay out the decl now. */
6950 if (DECL_SIZE (exp) == 0
6951 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6952 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6953 layout_decl (exp, 0);
6954
6955 /* TLS emulation hook - replace __thread vars with
6956 *__emutls_get_address (&_emutls.var). */
6957 if (! targetm.have_tls
6958 && TREE_CODE (exp) == VAR_DECL
6959 && DECL_THREAD_LOCAL_P (exp))
6960 {
6961 exp = build_fold_indirect_ref (emutls_var_address (exp));
6962 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
6963 }
6964
6965 /* ... fall through ... */
6966
6967 case FUNCTION_DECL:
6968 case RESULT_DECL:
6969 decl_rtl = DECL_RTL (exp);
6970 gcc_assert (decl_rtl);
6971
6972 /* Ensure variable marked as used even if it doesn't go through
6973 a parser. If it hasn't be used yet, write out an external
6974 definition. */
6975 if (! TREE_USED (exp))
6976 {
6977 assemble_external (exp);
6978 TREE_USED (exp) = 1;
6979 }
6980
6981 /* Show we haven't gotten RTL for this yet. */
6982 temp = 0;
6983
6984 /* Variables inherited from containing functions should have
6985 been lowered by this point. */
6986 context = decl_function_context (exp);
6987 gcc_assert (!context
6988 || context == current_function_decl
6989 || TREE_STATIC (exp)
6990 /* ??? C++ creates functions that are not TREE_STATIC. */
6991 || TREE_CODE (exp) == FUNCTION_DECL);
6992
6993 /* This is the case of an array whose size is to be determined
6994 from its initializer, while the initializer is still being parsed.
6995 See expand_decl. */
6996
6997 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6998 temp = validize_mem (decl_rtl);
6999
7000 /* If DECL_RTL is memory, we are in the normal case and either
7001 the address is not valid or it is not a register and -fforce-addr
7002 is specified, get the address into a register. */
7003
7004 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7005 {
7006 if (alt_rtl)
7007 *alt_rtl = decl_rtl;
7008 decl_rtl = use_anchored_address (decl_rtl);
7009 if (modifier != EXPAND_CONST_ADDRESS
7010 && modifier != EXPAND_SUM
7011 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7012 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7013 temp = replace_equiv_address (decl_rtl,
7014 copy_rtx (XEXP (decl_rtl, 0)));
7015 }
7016
7017 /* If we got something, return it. But first, set the alignment
7018 if the address is a register. */
7019 if (temp != 0)
7020 {
7021 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7022 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7023
7024 return temp;
7025 }
7026
7027 /* If the mode of DECL_RTL does not match that of the decl, it
7028 must be a promoted value. We return a SUBREG of the wanted mode,
7029 but mark it so that we know that it was already extended. */
7030
7031 if (REG_P (decl_rtl)
7032 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7033 {
7034 enum machine_mode pmode;
7035
7036 /* Get the signedness used for this variable. Ensure we get the
7037 same mode we got when the variable was declared. */
7038 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7039 (TREE_CODE (exp) == RESULT_DECL
7040 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7041 gcc_assert (GET_MODE (decl_rtl) == pmode);
7042
7043 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7044 SUBREG_PROMOTED_VAR_P (temp) = 1;
7045 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7046 return temp;
7047 }
7048
7049 return decl_rtl;
7050
7051 case INTEGER_CST:
7052 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7053 TREE_INT_CST_HIGH (exp), mode);
7054
7055 /* ??? If overflow is set, fold will have done an incomplete job,
7056 which can result in (plus xx (const_int 0)), which can get
7057 simplified by validate_replace_rtx during virtual register
7058 instantiation, which can result in unrecognizable insns.
7059 Avoid this by forcing all overflows into registers. */
7060 if (TREE_OVERFLOW (exp)
7061 && modifier != EXPAND_INITIALIZER)
7062 temp = force_reg (mode, temp);
7063
7064 return temp;
7065
7066 case VECTOR_CST:
7067 {
7068 tree tmp = NULL_TREE;
7069 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7070 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7071 return const_vector_from_tree (exp);
7072 if (GET_MODE_CLASS (mode) == MODE_INT)
7073 {
7074 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7075 if (type_for_mode)
7076 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7077 }
7078 if (!tmp)
7079 tmp = build_constructor_from_list (type,
7080 TREE_VECTOR_CST_ELTS (exp));
7081 return expand_expr (tmp, ignore ? const0_rtx : target,
7082 tmode, modifier);
7083 }
7084
7085 case CONST_DECL:
7086 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7087
7088 case REAL_CST:
7089 /* If optimized, generate immediate CONST_DOUBLE
7090 which will be turned into memory by reload if necessary.
7091
7092 We used to force a register so that loop.c could see it. But
7093 this does not allow gen_* patterns to perform optimizations with
7094 the constants. It also produces two insns in cases like "x = 1.0;".
7095 On most machines, floating-point constants are not permitted in
7096 many insns, so we'd end up copying it to a register in any case.
7097
7098 Now, we do the copying in expand_binop, if appropriate. */
7099 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7100 TYPE_MODE (TREE_TYPE (exp)));
7101
7102 case COMPLEX_CST:
7103 /* Handle evaluating a complex constant in a CONCAT target. */
7104 if (original_target && GET_CODE (original_target) == CONCAT)
7105 {
7106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7107 rtx rtarg, itarg;
7108
7109 rtarg = XEXP (original_target, 0);
7110 itarg = XEXP (original_target, 1);
7111
7112 /* Move the real and imaginary parts separately. */
7113 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7114 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7115
7116 if (op0 != rtarg)
7117 emit_move_insn (rtarg, op0);
7118 if (op1 != itarg)
7119 emit_move_insn (itarg, op1);
7120
7121 return original_target;
7122 }
7123
7124 /* ... fall through ... */
7125
7126 case STRING_CST:
7127 temp = expand_expr_constant (exp, 1, modifier);
7128
7129 /* temp contains a constant address.
7130 On RISC machines where a constant address isn't valid,
7131 make some insns to get that address into a register. */
7132 if (modifier != EXPAND_CONST_ADDRESS
7133 && modifier != EXPAND_INITIALIZER
7134 && modifier != EXPAND_SUM
7135 && (! memory_address_p (mode, XEXP (temp, 0))
7136 || flag_force_addr))
7137 return replace_equiv_address (temp,
7138 copy_rtx (XEXP (temp, 0)));
7139 return temp;
7140
7141 case SAVE_EXPR:
7142 {
7143 tree val = TREE_OPERAND (exp, 0);
7144 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7145
7146 if (!SAVE_EXPR_RESOLVED_P (exp))
7147 {
7148 /* We can indeed still hit this case, typically via builtin
7149 expanders calling save_expr immediately before expanding
7150 something. Assume this means that we only have to deal
7151 with non-BLKmode values. */
7152 gcc_assert (GET_MODE (ret) != BLKmode);
7153
7154 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7155 DECL_ARTIFICIAL (val) = 1;
7156 DECL_IGNORED_P (val) = 1;
7157 TREE_OPERAND (exp, 0) = val;
7158 SAVE_EXPR_RESOLVED_P (exp) = 1;
7159
7160 if (!CONSTANT_P (ret))
7161 ret = copy_to_reg (ret);
7162 SET_DECL_RTL (val, ret);
7163 }
7164
7165 return ret;
7166 }
7167
7168 case GOTO_EXPR:
7169 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7170 expand_goto (TREE_OPERAND (exp, 0));
7171 else
7172 expand_computed_goto (TREE_OPERAND (exp, 0));
7173 return const0_rtx;
7174
7175 case CONSTRUCTOR:
7176 /* If we don't need the result, just ensure we evaluate any
7177 subexpressions. */
7178 if (ignore)
7179 {
7180 unsigned HOST_WIDE_INT idx;
7181 tree value;
7182
7183 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7184 expand_expr (value, const0_rtx, VOIDmode, 0);
7185
7186 return const0_rtx;
7187 }
7188
7189 /* Try to avoid creating a temporary at all. This is possible
7190 if all of the initializer is zero.
7191 FIXME: try to handle all [0..255] initializers we can handle
7192 with memset. */
7193 else if (TREE_STATIC (exp)
7194 && !TREE_ADDRESSABLE (exp)
7195 && target != 0 && mode == BLKmode
7196 && all_zeros_p (exp))
7197 {
7198 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7199 return target;
7200 }
7201
7202 /* All elts simple constants => refer to a constant in memory. But
7203 if this is a non-BLKmode mode, let it store a field at a time
7204 since that should make a CONST_INT or CONST_DOUBLE when we
7205 fold. Likewise, if we have a target we can use, it is best to
7206 store directly into the target unless the type is large enough
7207 that memcpy will be used. If we are making an initializer and
7208 all operands are constant, put it in memory as well.
7209
7210 FIXME: Avoid trying to fill vector constructors piece-meal.
7211 Output them with output_constant_def below unless we're sure
7212 they're zeros. This should go away when vector initializers
7213 are treated like VECTOR_CST instead of arrays.
7214 */
7215 else if ((TREE_STATIC (exp)
7216 && ((mode == BLKmode
7217 && ! (target != 0 && safe_from_p (target, exp, 1)))
7218 || TREE_ADDRESSABLE (exp)
7219 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7220 && (! MOVE_BY_PIECES_P
7221 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7222 TYPE_ALIGN (type)))
7223 && ! mostly_zeros_p (exp))))
7224 || ((modifier == EXPAND_INITIALIZER
7225 || modifier == EXPAND_CONST_ADDRESS)
7226 && TREE_CONSTANT (exp)))
7227 {
7228 rtx constructor = expand_expr_constant (exp, 1, modifier);
7229
7230 if (modifier != EXPAND_CONST_ADDRESS
7231 && modifier != EXPAND_INITIALIZER
7232 && modifier != EXPAND_SUM)
7233 constructor = validize_mem (constructor);
7234
7235 return constructor;
7236 }
7237 else
7238 {
7239 /* Handle calls that pass values in multiple non-contiguous
7240 locations. The Irix 6 ABI has examples of this. */
7241 if (target == 0 || ! safe_from_p (target, exp, 1)
7242 || GET_CODE (target) == PARALLEL
7243 || modifier == EXPAND_STACK_PARM)
7244 target
7245 = assign_temp (build_qualified_type (type,
7246 (TYPE_QUALS (type)
7247 | (TREE_READONLY (exp)
7248 * TYPE_QUAL_CONST))),
7249 0, TREE_ADDRESSABLE (exp), 1);
7250
7251 store_constructor (exp, target, 0, int_expr_size (exp));
7252 return target;
7253 }
7254
7255 case MISALIGNED_INDIRECT_REF:
7256 case ALIGN_INDIRECT_REF:
7257 case INDIRECT_REF:
7258 {
7259 tree exp1 = TREE_OPERAND (exp, 0);
7260
7261 if (modifier != EXPAND_WRITE)
7262 {
7263 tree t;
7264
7265 t = fold_read_from_constant_string (exp);
7266 if (t)
7267 return expand_expr (t, target, tmode, modifier);
7268 }
7269
7270 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7271 op0 = memory_address (mode, op0);
7272
7273 if (code == ALIGN_INDIRECT_REF)
7274 {
7275 int align = TYPE_ALIGN_UNIT (type);
7276 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7277 op0 = memory_address (mode, op0);
7278 }
7279
7280 temp = gen_rtx_MEM (mode, op0);
7281
7282 set_mem_attributes (temp, exp, 0);
7283
7284 /* Resolve the misalignment now, so that we don't have to remember
7285 to resolve it later. Of course, this only works for reads. */
7286 /* ??? When we get around to supporting writes, we'll have to handle
7287 this in store_expr directly. The vectorizer isn't generating
7288 those yet, however. */
7289 if (code == MISALIGNED_INDIRECT_REF)
7290 {
7291 int icode;
7292 rtx reg, insn;
7293
7294 gcc_assert (modifier == EXPAND_NORMAL
7295 || modifier == EXPAND_STACK_PARM);
7296
7297 /* The vectorizer should have already checked the mode. */
7298 icode = movmisalign_optab->handlers[mode].insn_code;
7299 gcc_assert (icode != CODE_FOR_nothing);
7300
7301 /* We've already validated the memory, and we're creating a
7302 new pseudo destination. The predicates really can't fail. */
7303 reg = gen_reg_rtx (mode);
7304
7305 /* Nor can the insn generator. */
7306 insn = GEN_FCN (icode) (reg, temp);
7307 emit_insn (insn);
7308
7309 return reg;
7310 }
7311
7312 return temp;
7313 }
7314
7315 case TARGET_MEM_REF:
7316 {
7317 struct mem_address addr;
7318
7319 get_address_description (exp, &addr);
7320 op0 = addr_for_mem_ref (&addr, true);
7321 op0 = memory_address (mode, op0);
7322 temp = gen_rtx_MEM (mode, op0);
7323 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7324 }
7325 return temp;
7326
7327 case ARRAY_REF:
7328
7329 {
7330 tree array = TREE_OPERAND (exp, 0);
7331 tree index = TREE_OPERAND (exp, 1);
7332
7333 /* Fold an expression like: "foo"[2].
7334 This is not done in fold so it won't happen inside &.
7335 Don't fold if this is for wide characters since it's too
7336 difficult to do correctly and this is a very rare case. */
7337
7338 if (modifier != EXPAND_CONST_ADDRESS
7339 && modifier != EXPAND_INITIALIZER
7340 && modifier != EXPAND_MEMORY)
7341 {
7342 tree t = fold_read_from_constant_string (exp);
7343
7344 if (t)
7345 return expand_expr (t, target, tmode, modifier);
7346 }
7347
7348 /* If this is a constant index into a constant array,
7349 just get the value from the array. Handle both the cases when
7350 we have an explicit constructor and when our operand is a variable
7351 that was declared const. */
7352
7353 if (modifier != EXPAND_CONST_ADDRESS
7354 && modifier != EXPAND_INITIALIZER
7355 && modifier != EXPAND_MEMORY
7356 && TREE_CODE (array) == CONSTRUCTOR
7357 && ! TREE_SIDE_EFFECTS (array)
7358 && TREE_CODE (index) == INTEGER_CST)
7359 {
7360 unsigned HOST_WIDE_INT ix;
7361 tree field, value;
7362
7363 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7364 field, value)
7365 if (tree_int_cst_equal (field, index))
7366 {
7367 if (!TREE_SIDE_EFFECTS (value))
7368 return expand_expr (fold (value), target, tmode, modifier);
7369 break;
7370 }
7371 }
7372
7373 else if (optimize >= 1
7374 && modifier != EXPAND_CONST_ADDRESS
7375 && modifier != EXPAND_INITIALIZER
7376 && modifier != EXPAND_MEMORY
7377 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7378 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7379 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7380 && targetm.binds_local_p (array))
7381 {
7382 if (TREE_CODE (index) == INTEGER_CST)
7383 {
7384 tree init = DECL_INITIAL (array);
7385
7386 if (TREE_CODE (init) == CONSTRUCTOR)
7387 {
7388 unsigned HOST_WIDE_INT ix;
7389 tree field, value;
7390
7391 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7392 field, value)
7393 if (tree_int_cst_equal (field, index))
7394 {
7395 if (!TREE_SIDE_EFFECTS (value))
7396 return expand_expr (fold (value), target, tmode,
7397 modifier);
7398 break;
7399 }
7400 }
7401 else if(TREE_CODE (init) == STRING_CST)
7402 {
7403 tree index1 = index;
7404 tree low_bound = array_ref_low_bound (exp);
7405 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7406
7407 /* Optimize the special-case of a zero lower bound.
7408
7409 We convert the low_bound to sizetype to avoid some problems
7410 with constant folding. (E.g. suppose the lower bound is 1,
7411 and its mode is QI. Without the conversion,l (ARRAY
7412 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7413 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7414
7415 if (! integer_zerop (low_bound))
7416 index1 = size_diffop (index1, fold_convert (sizetype,
7417 low_bound));
7418
7419 if (0 > compare_tree_int (index1,
7420 TREE_STRING_LENGTH (init)))
7421 {
7422 tree type = TREE_TYPE (TREE_TYPE (init));
7423 enum machine_mode mode = TYPE_MODE (type);
7424
7425 if (GET_MODE_CLASS (mode) == MODE_INT
7426 && GET_MODE_SIZE (mode) == 1)
7427 return gen_int_mode (TREE_STRING_POINTER (init)
7428 [TREE_INT_CST_LOW (index1)],
7429 mode);
7430 }
7431 }
7432 }
7433 }
7434 }
7435 goto normal_inner_ref;
7436
7437 case COMPONENT_REF:
7438 /* If the operand is a CONSTRUCTOR, we can just extract the
7439 appropriate field if it is present. */
7440 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7441 {
7442 unsigned HOST_WIDE_INT idx;
7443 tree field, value;
7444
7445 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7446 idx, field, value)
7447 if (field == TREE_OPERAND (exp, 1)
7448 /* We can normally use the value of the field in the
7449 CONSTRUCTOR. However, if this is a bitfield in
7450 an integral mode that we can fit in a HOST_WIDE_INT,
7451 we must mask only the number of bits in the bitfield,
7452 since this is done implicitly by the constructor. If
7453 the bitfield does not meet either of those conditions,
7454 we can't do this optimization. */
7455 && (! DECL_BIT_FIELD (field)
7456 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7457 && (GET_MODE_BITSIZE (DECL_MODE (field))
7458 <= HOST_BITS_PER_WIDE_INT))))
7459 {
7460 if (DECL_BIT_FIELD (field)
7461 && modifier == EXPAND_STACK_PARM)
7462 target = 0;
7463 op0 = expand_expr (value, target, tmode, modifier);
7464 if (DECL_BIT_FIELD (field))
7465 {
7466 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7467 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7468
7469 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7470 {
7471 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7472 op0 = expand_and (imode, op0, op1, target);
7473 }
7474 else
7475 {
7476 tree count
7477 = build_int_cst (NULL_TREE,
7478 GET_MODE_BITSIZE (imode) - bitsize);
7479
7480 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7481 target, 0);
7482 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7483 target, 0);
7484 }
7485 }
7486
7487 return op0;
7488 }
7489 }
7490 goto normal_inner_ref;
7491
7492 case BIT_FIELD_REF:
7493 case ARRAY_RANGE_REF:
7494 normal_inner_ref:
7495 {
7496 enum machine_mode mode1;
7497 HOST_WIDE_INT bitsize, bitpos;
7498 tree offset;
7499 int volatilep = 0;
7500 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7501 &mode1, &unsignedp, &volatilep, true);
7502 rtx orig_op0;
7503
7504 /* If we got back the original object, something is wrong. Perhaps
7505 we are evaluating an expression too early. In any event, don't
7506 infinitely recurse. */
7507 gcc_assert (tem != exp);
7508
7509 /* If TEM's type is a union of variable size, pass TARGET to the inner
7510 computation, since it will need a temporary and TARGET is known
7511 to have to do. This occurs in unchecked conversion in Ada. */
7512
7513 orig_op0 = op0
7514 = expand_expr (tem,
7515 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7516 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7517 != INTEGER_CST)
7518 && modifier != EXPAND_STACK_PARM
7519 ? target : NULL_RTX),
7520 VOIDmode,
7521 (modifier == EXPAND_INITIALIZER
7522 || modifier == EXPAND_CONST_ADDRESS
7523 || modifier == EXPAND_STACK_PARM)
7524 ? modifier : EXPAND_NORMAL);
7525
7526 /* If this is a constant, put it into a register if it is a legitimate
7527 constant, OFFSET is 0, and we won't try to extract outside the
7528 register (in case we were passed a partially uninitialized object
7529 or a view_conversion to a larger size). Force the constant to
7530 memory otherwise. */
7531 if (CONSTANT_P (op0))
7532 {
7533 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7534 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7535 && offset == 0
7536 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7537 op0 = force_reg (mode, op0);
7538 else
7539 op0 = validize_mem (force_const_mem (mode, op0));
7540 }
7541
7542 /* Otherwise, if this object not in memory and we either have an
7543 offset, a BLKmode result, or a reference outside the object, put it
7544 there. Such cases can occur in Ada if we have unchecked conversion
7545 of an expression from a scalar type to an array or record type or
7546 for an ARRAY_RANGE_REF whose type is BLKmode. */
7547 else if (!MEM_P (op0)
7548 && (offset != 0
7549 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7550 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7551 {
7552 tree nt = build_qualified_type (TREE_TYPE (tem),
7553 (TYPE_QUALS (TREE_TYPE (tem))
7554 | TYPE_QUAL_CONST));
7555 rtx memloc = assign_temp (nt, 1, 1, 1);
7556
7557 emit_move_insn (memloc, op0);
7558 op0 = memloc;
7559 }
7560
7561 if (offset != 0)
7562 {
7563 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7564 EXPAND_SUM);
7565
7566 gcc_assert (MEM_P (op0));
7567
7568 #ifdef POINTERS_EXTEND_UNSIGNED
7569 if (GET_MODE (offset_rtx) != Pmode)
7570 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7571 #else
7572 if (GET_MODE (offset_rtx) != ptr_mode)
7573 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7574 #endif
7575
7576 if (GET_MODE (op0) == BLKmode
7577 /* A constant address in OP0 can have VOIDmode, we must
7578 not try to call force_reg in that case. */
7579 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7580 && bitsize != 0
7581 && (bitpos % bitsize) == 0
7582 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7583 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7584 {
7585 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7586 bitpos = 0;
7587 }
7588
7589 op0 = offset_address (op0, offset_rtx,
7590 highest_pow2_factor (offset));
7591 }
7592
7593 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7594 record its alignment as BIGGEST_ALIGNMENT. */
7595 if (MEM_P (op0) && bitpos == 0 && offset != 0
7596 && is_aligning_offset (offset, tem))
7597 set_mem_align (op0, BIGGEST_ALIGNMENT);
7598
7599 /* Don't forget about volatility even if this is a bitfield. */
7600 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7601 {
7602 if (op0 == orig_op0)
7603 op0 = copy_rtx (op0);
7604
7605 MEM_VOLATILE_P (op0) = 1;
7606 }
7607
7608 /* The following code doesn't handle CONCAT.
7609 Assume only bitpos == 0 can be used for CONCAT, due to
7610 one element arrays having the same mode as its element. */
7611 if (GET_CODE (op0) == CONCAT)
7612 {
7613 gcc_assert (bitpos == 0
7614 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7615 return op0;
7616 }
7617
7618 /* In cases where an aligned union has an unaligned object
7619 as a field, we might be extracting a BLKmode value from
7620 an integer-mode (e.g., SImode) object. Handle this case
7621 by doing the extract into an object as wide as the field
7622 (which we know to be the width of a basic mode), then
7623 storing into memory, and changing the mode to BLKmode. */
7624 if (mode1 == VOIDmode
7625 || REG_P (op0) || GET_CODE (op0) == SUBREG
7626 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7627 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7628 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7629 && modifier != EXPAND_CONST_ADDRESS
7630 && modifier != EXPAND_INITIALIZER)
7631 /* If the field isn't aligned enough to fetch as a memref,
7632 fetch it as a bit field. */
7633 || (mode1 != BLKmode
7634 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7635 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7636 || (MEM_P (op0)
7637 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7638 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7639 && ((modifier == EXPAND_CONST_ADDRESS
7640 || modifier == EXPAND_INITIALIZER)
7641 ? STRICT_ALIGNMENT
7642 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7643 || (bitpos % BITS_PER_UNIT != 0)))
7644 /* If the type and the field are a constant size and the
7645 size of the type isn't the same size as the bitfield,
7646 we must use bitfield operations. */
7647 || (bitsize >= 0
7648 && TYPE_SIZE (TREE_TYPE (exp))
7649 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7650 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7651 bitsize)))
7652 {
7653 enum machine_mode ext_mode = mode;
7654
7655 if (ext_mode == BLKmode
7656 && ! (target != 0 && MEM_P (op0)
7657 && MEM_P (target)
7658 && bitpos % BITS_PER_UNIT == 0))
7659 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7660
7661 if (ext_mode == BLKmode)
7662 {
7663 if (target == 0)
7664 target = assign_temp (type, 0, 1, 1);
7665
7666 if (bitsize == 0)
7667 return target;
7668
7669 /* In this case, BITPOS must start at a byte boundary and
7670 TARGET, if specified, must be a MEM. */
7671 gcc_assert (MEM_P (op0)
7672 && (!target || MEM_P (target))
7673 && !(bitpos % BITS_PER_UNIT));
7674
7675 emit_block_move (target,
7676 adjust_address (op0, VOIDmode,
7677 bitpos / BITS_PER_UNIT),
7678 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7679 / BITS_PER_UNIT),
7680 (modifier == EXPAND_STACK_PARM
7681 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7682
7683 return target;
7684 }
7685
7686 op0 = validize_mem (op0);
7687
7688 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7689 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7690
7691 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7692 (modifier == EXPAND_STACK_PARM
7693 ? NULL_RTX : target),
7694 ext_mode, ext_mode);
7695
7696 /* If the result is a record type and BITSIZE is narrower than
7697 the mode of OP0, an integral mode, and this is a big endian
7698 machine, we must put the field into the high-order bits. */
7699 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7700 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7701 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7702 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7703 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7704 - bitsize),
7705 op0, 1);
7706
7707 /* If the result type is BLKmode, store the data into a temporary
7708 of the appropriate type, but with the mode corresponding to the
7709 mode for the data we have (op0's mode). It's tempting to make
7710 this a constant type, since we know it's only being stored once,
7711 but that can cause problems if we are taking the address of this
7712 COMPONENT_REF because the MEM of any reference via that address
7713 will have flags corresponding to the type, which will not
7714 necessarily be constant. */
7715 if (mode == BLKmode)
7716 {
7717 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7718 rtx new;
7719
7720 /* If the reference doesn't use the alias set of its type,
7721 we cannot create the temporary using that type. */
7722 if (component_uses_parent_alias_set (exp))
7723 {
7724 new = assign_stack_local (ext_mode, size, 0);
7725 set_mem_alias_set (new, get_alias_set (exp));
7726 }
7727 else
7728 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7729
7730 emit_move_insn (new, op0);
7731 op0 = copy_rtx (new);
7732 PUT_MODE (op0, BLKmode);
7733 set_mem_attributes (op0, exp, 1);
7734 }
7735
7736 return op0;
7737 }
7738
7739 /* If the result is BLKmode, use that to access the object
7740 now as well. */
7741 if (mode == BLKmode)
7742 mode1 = BLKmode;
7743
7744 /* Get a reference to just this component. */
7745 if (modifier == EXPAND_CONST_ADDRESS
7746 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7747 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7748 else
7749 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7750
7751 if (op0 == orig_op0)
7752 op0 = copy_rtx (op0);
7753
7754 set_mem_attributes (op0, exp, 0);
7755 if (REG_P (XEXP (op0, 0)))
7756 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7757
7758 MEM_VOLATILE_P (op0) |= volatilep;
7759 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7760 || modifier == EXPAND_CONST_ADDRESS
7761 || modifier == EXPAND_INITIALIZER)
7762 return op0;
7763 else if (target == 0)
7764 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7765
7766 convert_move (target, op0, unsignedp);
7767 return target;
7768 }
7769
7770 case OBJ_TYPE_REF:
7771 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7772
7773 case CALL_EXPR:
7774 /* Check for a built-in function. */
7775 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7776 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7777 == FUNCTION_DECL)
7778 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7779 {
7780 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7781 == BUILT_IN_FRONTEND)
7782 return lang_hooks.expand_expr (exp, original_target,
7783 tmode, modifier,
7784 alt_rtl);
7785 else
7786 return expand_builtin (exp, target, subtarget, tmode, ignore);
7787 }
7788
7789 return expand_call (exp, target, ignore);
7790
7791 case NON_LVALUE_EXPR:
7792 case NOP_EXPR:
7793 case CONVERT_EXPR:
7794 if (TREE_OPERAND (exp, 0) == error_mark_node)
7795 return const0_rtx;
7796
7797 if (TREE_CODE (type) == UNION_TYPE)
7798 {
7799 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7800
7801 /* If both input and output are BLKmode, this conversion isn't doing
7802 anything except possibly changing memory attribute. */
7803 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7804 {
7805 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7806 modifier);
7807
7808 result = copy_rtx (result);
7809 set_mem_attributes (result, exp, 0);
7810 return result;
7811 }
7812
7813 if (target == 0)
7814 {
7815 if (TYPE_MODE (type) != BLKmode)
7816 target = gen_reg_rtx (TYPE_MODE (type));
7817 else
7818 target = assign_temp (type, 0, 1, 1);
7819 }
7820
7821 if (MEM_P (target))
7822 /* Store data into beginning of memory target. */
7823 store_expr (TREE_OPERAND (exp, 0),
7824 adjust_address (target, TYPE_MODE (valtype), 0),
7825 modifier == EXPAND_STACK_PARM);
7826
7827 else
7828 {
7829 gcc_assert (REG_P (target));
7830
7831 /* Store this field into a union of the proper type. */
7832 store_field (target,
7833 MIN ((int_size_in_bytes (TREE_TYPE
7834 (TREE_OPERAND (exp, 0)))
7835 * BITS_PER_UNIT),
7836 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7837 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7838 type, 0);
7839 }
7840
7841 /* Return the entire union. */
7842 return target;
7843 }
7844
7845 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7846 {
7847 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7848 modifier);
7849
7850 /* If the signedness of the conversion differs and OP0 is
7851 a promoted SUBREG, clear that indication since we now
7852 have to do the proper extension. */
7853 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7854 && GET_CODE (op0) == SUBREG)
7855 SUBREG_PROMOTED_VAR_P (op0) = 0;
7856
7857 return REDUCE_BIT_FIELD (op0);
7858 }
7859
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7861 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7862 if (GET_MODE (op0) == mode)
7863 ;
7864
7865 /* If OP0 is a constant, just convert it into the proper mode. */
7866 else if (CONSTANT_P (op0))
7867 {
7868 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7869 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7870
7871 if (modifier == EXPAND_INITIALIZER)
7872 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7873 subreg_lowpart_offset (mode,
7874 inner_mode));
7875 else
7876 op0= convert_modes (mode, inner_mode, op0,
7877 TYPE_UNSIGNED (inner_type));
7878 }
7879
7880 else if (modifier == EXPAND_INITIALIZER)
7881 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7882
7883 else if (target == 0)
7884 op0 = convert_to_mode (mode, op0,
7885 TYPE_UNSIGNED (TREE_TYPE
7886 (TREE_OPERAND (exp, 0))));
7887 else
7888 {
7889 convert_move (target, op0,
7890 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7891 op0 = target;
7892 }
7893
7894 return REDUCE_BIT_FIELD (op0);
7895
7896 case VIEW_CONVERT_EXPR:
7897 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7898
7899 /* If the input and output modes are both the same, we are done. */
7900 if (TYPE_MODE (type) == GET_MODE (op0))
7901 ;
7902 /* If neither mode is BLKmode, and both modes are the same size
7903 then we can use gen_lowpart. */
7904 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7905 && GET_MODE_SIZE (TYPE_MODE (type))
7906 == GET_MODE_SIZE (GET_MODE (op0)))
7907 {
7908 if (GET_CODE (op0) == SUBREG)
7909 op0 = force_reg (GET_MODE (op0), op0);
7910 op0 = gen_lowpart (TYPE_MODE (type), op0);
7911 }
7912 /* If both modes are integral, then we can convert from one to the
7913 other. */
7914 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7915 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7916 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7917 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7918 /* As a last resort, spill op0 to memory, and reload it in a
7919 different mode. */
7920 else if (!MEM_P (op0))
7921 {
7922 /* If the operand is not a MEM, force it into memory. Since we
7923 are going to be changing the mode of the MEM, don't call
7924 force_const_mem for constants because we don't allow pool
7925 constants to change mode. */
7926 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7927
7928 gcc_assert (!TREE_ADDRESSABLE (exp));
7929
7930 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7931 target
7932 = assign_stack_temp_for_type
7933 (TYPE_MODE (inner_type),
7934 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7935
7936 emit_move_insn (target, op0);
7937 op0 = target;
7938 }
7939
7940 /* At this point, OP0 is in the correct mode. If the output type is such
7941 that the operand is known to be aligned, indicate that it is.
7942 Otherwise, we need only be concerned about alignment for non-BLKmode
7943 results. */
7944 if (MEM_P (op0))
7945 {
7946 op0 = copy_rtx (op0);
7947
7948 if (TYPE_ALIGN_OK (type))
7949 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7950 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7951 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7952 {
7953 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7954 HOST_WIDE_INT temp_size
7955 = MAX (int_size_in_bytes (inner_type),
7956 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7957 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7958 temp_size, 0, type);
7959 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7960
7961 gcc_assert (!TREE_ADDRESSABLE (exp));
7962
7963 if (GET_MODE (op0) == BLKmode)
7964 emit_block_move (new_with_op0_mode, op0,
7965 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7966 (modifier == EXPAND_STACK_PARM
7967 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7968 else
7969 emit_move_insn (new_with_op0_mode, op0);
7970
7971 op0 = new;
7972 }
7973
7974 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7975 }
7976
7977 return op0;
7978
7979 case PLUS_EXPR:
7980 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7981 something else, make sure we add the register to the constant and
7982 then to the other thing. This case can occur during strength
7983 reduction and doing it this way will produce better code if the
7984 frame pointer or argument pointer is eliminated.
7985
7986 fold-const.c will ensure that the constant is always in the inner
7987 PLUS_EXPR, so the only case we need to do anything about is if
7988 sp, ap, or fp is our second argument, in which case we must swap
7989 the innermost first argument and our second argument. */
7990
7991 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7992 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7993 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7994 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7995 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7996 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7997 {
7998 tree t = TREE_OPERAND (exp, 1);
7999
8000 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8001 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8002 }
8003
8004 /* If the result is to be ptr_mode and we are adding an integer to
8005 something, we might be forming a constant. So try to use
8006 plus_constant. If it produces a sum and we can't accept it,
8007 use force_operand. This allows P = &ARR[const] to generate
8008 efficient code on machines where a SYMBOL_REF is not a valid
8009 address.
8010
8011 If this is an EXPAND_SUM call, always return the sum. */
8012 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8013 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8014 {
8015 if (modifier == EXPAND_STACK_PARM)
8016 target = 0;
8017 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8018 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8019 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8020 {
8021 rtx constant_part;
8022
8023 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8024 EXPAND_SUM);
8025 /* Use immed_double_const to ensure that the constant is
8026 truncated according to the mode of OP1, then sign extended
8027 to a HOST_WIDE_INT. Using the constant directly can result
8028 in non-canonical RTL in a 64x32 cross compile. */
8029 constant_part
8030 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8031 (HOST_WIDE_INT) 0,
8032 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8033 op1 = plus_constant (op1, INTVAL (constant_part));
8034 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8035 op1 = force_operand (op1, target);
8036 return REDUCE_BIT_FIELD (op1);
8037 }
8038
8039 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8040 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8041 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8042 {
8043 rtx constant_part;
8044
8045 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8046 (modifier == EXPAND_INITIALIZER
8047 ? EXPAND_INITIALIZER : EXPAND_SUM));
8048 if (! CONSTANT_P (op0))
8049 {
8050 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8051 VOIDmode, modifier);
8052 /* Return a PLUS if modifier says it's OK. */
8053 if (modifier == EXPAND_SUM
8054 || modifier == EXPAND_INITIALIZER)
8055 return simplify_gen_binary (PLUS, mode, op0, op1);
8056 goto binop2;
8057 }
8058 /* Use immed_double_const to ensure that the constant is
8059 truncated according to the mode of OP1, then sign extended
8060 to a HOST_WIDE_INT. Using the constant directly can result
8061 in non-canonical RTL in a 64x32 cross compile. */
8062 constant_part
8063 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8064 (HOST_WIDE_INT) 0,
8065 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8066 op0 = plus_constant (op0, INTVAL (constant_part));
8067 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8068 op0 = force_operand (op0, target);
8069 return REDUCE_BIT_FIELD (op0);
8070 }
8071 }
8072
8073 /* No sense saving up arithmetic to be done
8074 if it's all in the wrong mode to form part of an address.
8075 And force_operand won't know whether to sign-extend or
8076 zero-extend. */
8077 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8078 || mode != ptr_mode)
8079 {
8080 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8081 subtarget, &op0, &op1, 0);
8082 if (op0 == const0_rtx)
8083 return op1;
8084 if (op1 == const0_rtx)
8085 return op0;
8086 goto binop2;
8087 }
8088
8089 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8090 subtarget, &op0, &op1, modifier);
8091 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8092
8093 case MINUS_EXPR:
8094 /* For initializers, we are allowed to return a MINUS of two
8095 symbolic constants. Here we handle all cases when both operands
8096 are constant. */
8097 /* Handle difference of two symbolic constants,
8098 for the sake of an initializer. */
8099 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8100 && really_constant_p (TREE_OPERAND (exp, 0))
8101 && really_constant_p (TREE_OPERAND (exp, 1)))
8102 {
8103 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8104 NULL_RTX, &op0, &op1, modifier);
8105
8106 /* If the last operand is a CONST_INT, use plus_constant of
8107 the negated constant. Else make the MINUS. */
8108 if (GET_CODE (op1) == CONST_INT)
8109 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8110 else
8111 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8112 }
8113
8114 /* No sense saving up arithmetic to be done
8115 if it's all in the wrong mode to form part of an address.
8116 And force_operand won't know whether to sign-extend or
8117 zero-extend. */
8118 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8119 || mode != ptr_mode)
8120 goto binop;
8121
8122 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8123 subtarget, &op0, &op1, modifier);
8124
8125 /* Convert A - const to A + (-const). */
8126 if (GET_CODE (op1) == CONST_INT)
8127 {
8128 op1 = negate_rtx (mode, op1);
8129 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8130 }
8131
8132 goto binop2;
8133
8134 case MULT_EXPR:
8135 /* If first operand is constant, swap them.
8136 Thus the following special case checks need only
8137 check the second operand. */
8138 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8139 {
8140 tree t1 = TREE_OPERAND (exp, 0);
8141 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8142 TREE_OPERAND (exp, 1) = t1;
8143 }
8144
8145 /* Attempt to return something suitable for generating an
8146 indexed address, for machines that support that. */
8147
8148 if (modifier == EXPAND_SUM && mode == ptr_mode
8149 && host_integerp (TREE_OPERAND (exp, 1), 0))
8150 {
8151 tree exp1 = TREE_OPERAND (exp, 1);
8152
8153 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8154 EXPAND_SUM);
8155
8156 if (!REG_P (op0))
8157 op0 = force_operand (op0, NULL_RTX);
8158 if (!REG_P (op0))
8159 op0 = copy_to_mode_reg (mode, op0);
8160
8161 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8162 gen_int_mode (tree_low_cst (exp1, 0),
8163 TYPE_MODE (TREE_TYPE (exp1)))));
8164 }
8165
8166 if (modifier == EXPAND_STACK_PARM)
8167 target = 0;
8168
8169 /* Check for multiplying things that have been extended
8170 from a narrower type. If this machine supports multiplying
8171 in that narrower type with a result in the desired type,
8172 do it that way, and avoid the explicit type-conversion. */
8173
8174 subexp0 = TREE_OPERAND (exp, 0);
8175 subexp1 = TREE_OPERAND (exp, 1);
8176 /* First, check if we have a multiplication of one signed and one
8177 unsigned operand. */
8178 if (TREE_CODE (subexp0) == NOP_EXPR
8179 && TREE_CODE (subexp1) == NOP_EXPR
8180 && TREE_CODE (type) == INTEGER_TYPE
8181 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8182 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8183 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8184 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8185 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8186 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8187 {
8188 enum machine_mode innermode
8189 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8190 this_optab = usmul_widen_optab;
8191 if (mode == GET_MODE_WIDER_MODE (innermode))
8192 {
8193 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8194 {
8195 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8196 expand_operands (TREE_OPERAND (subexp0, 0),
8197 TREE_OPERAND (subexp1, 0),
8198 NULL_RTX, &op0, &op1, 0);
8199 else
8200 expand_operands (TREE_OPERAND (subexp0, 0),
8201 TREE_OPERAND (subexp1, 0),
8202 NULL_RTX, &op1, &op0, 0);
8203
8204 goto binop3;
8205 }
8206 }
8207 }
8208 /* Check for a multiplication with matching signedness. */
8209 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8210 && TREE_CODE (type) == INTEGER_TYPE
8211 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8212 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8213 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8214 && int_fits_type_p (TREE_OPERAND (exp, 1),
8215 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8216 /* Don't use a widening multiply if a shift will do. */
8217 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8218 > HOST_BITS_PER_WIDE_INT)
8219 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8220 ||
8221 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8222 && (TYPE_PRECISION (TREE_TYPE
8223 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8224 == TYPE_PRECISION (TREE_TYPE
8225 (TREE_OPERAND
8226 (TREE_OPERAND (exp, 0), 0))))
8227 /* If both operands are extended, they must either both
8228 be zero-extended or both be sign-extended. */
8229 && (TYPE_UNSIGNED (TREE_TYPE
8230 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8231 == TYPE_UNSIGNED (TREE_TYPE
8232 (TREE_OPERAND
8233 (TREE_OPERAND (exp, 0), 0)))))))
8234 {
8235 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8236 enum machine_mode innermode = TYPE_MODE (op0type);
8237 bool zextend_p = TYPE_UNSIGNED (op0type);
8238 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8239 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8240
8241 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8242 {
8243 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8244 {
8245 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8246 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8247 TREE_OPERAND (exp, 1),
8248 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8249 else
8250 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8251 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8252 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8253 goto binop3;
8254 }
8255 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8256 && innermode == word_mode)
8257 {
8258 rtx htem, hipart;
8259 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8260 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8261 op1 = convert_modes (innermode, mode,
8262 expand_normal (TREE_OPERAND (exp, 1)),
8263 unsignedp);
8264 else
8265 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8266 temp = expand_binop (mode, other_optab, op0, op1, target,
8267 unsignedp, OPTAB_LIB_WIDEN);
8268 hipart = gen_highpart (innermode, temp);
8269 htem = expand_mult_highpart_adjust (innermode, hipart,
8270 op0, op1, hipart,
8271 zextend_p);
8272 if (htem != hipart)
8273 emit_move_insn (hipart, htem);
8274 return REDUCE_BIT_FIELD (temp);
8275 }
8276 }
8277 }
8278 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8279 subtarget, &op0, &op1, 0);
8280 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8281
8282 case TRUNC_DIV_EXPR:
8283 case FLOOR_DIV_EXPR:
8284 case CEIL_DIV_EXPR:
8285 case ROUND_DIV_EXPR:
8286 case EXACT_DIV_EXPR:
8287 if (modifier == EXPAND_STACK_PARM)
8288 target = 0;
8289 /* Possible optimization: compute the dividend with EXPAND_SUM
8290 then if the divisor is constant can optimize the case
8291 where some terms of the dividend have coeffs divisible by it. */
8292 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8293 subtarget, &op0, &op1, 0);
8294 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8295
8296 case RDIV_EXPR:
8297 goto binop;
8298
8299 case TRUNC_MOD_EXPR:
8300 case FLOOR_MOD_EXPR:
8301 case CEIL_MOD_EXPR:
8302 case ROUND_MOD_EXPR:
8303 if (modifier == EXPAND_STACK_PARM)
8304 target = 0;
8305 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8306 subtarget, &op0, &op1, 0);
8307 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8308
8309 case FIX_TRUNC_EXPR:
8310 op0 = expand_normal (TREE_OPERAND (exp, 0));
8311 if (target == 0 || modifier == EXPAND_STACK_PARM)
8312 target = gen_reg_rtx (mode);
8313 expand_fix (target, op0, unsignedp);
8314 return target;
8315
8316 case FLOAT_EXPR:
8317 op0 = expand_normal (TREE_OPERAND (exp, 0));
8318 if (target == 0 || modifier == EXPAND_STACK_PARM)
8319 target = gen_reg_rtx (mode);
8320 /* expand_float can't figure out what to do if FROM has VOIDmode.
8321 So give it the correct mode. With -O, cse will optimize this. */
8322 if (GET_MODE (op0) == VOIDmode)
8323 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8324 op0);
8325 expand_float (target, op0,
8326 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8327 return target;
8328
8329 case NEGATE_EXPR:
8330 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8331 if (modifier == EXPAND_STACK_PARM)
8332 target = 0;
8333 temp = expand_unop (mode,
8334 optab_for_tree_code (NEGATE_EXPR, type),
8335 op0, target, 0);
8336 gcc_assert (temp);
8337 return REDUCE_BIT_FIELD (temp);
8338
8339 case ABS_EXPR:
8340 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8341 if (modifier == EXPAND_STACK_PARM)
8342 target = 0;
8343
8344 /* ABS_EXPR is not valid for complex arguments. */
8345 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8346 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8347
8348 /* Unsigned abs is simply the operand. Testing here means we don't
8349 risk generating incorrect code below. */
8350 if (TYPE_UNSIGNED (type))
8351 return op0;
8352
8353 return expand_abs (mode, op0, target, unsignedp,
8354 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8355
8356 case MAX_EXPR:
8357 case MIN_EXPR:
8358 target = original_target;
8359 if (target == 0
8360 || modifier == EXPAND_STACK_PARM
8361 || (MEM_P (target) && MEM_VOLATILE_P (target))
8362 || GET_MODE (target) != mode
8363 || (REG_P (target)
8364 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8365 target = gen_reg_rtx (mode);
8366 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8367 target, &op0, &op1, 0);
8368
8369 /* First try to do it with a special MIN or MAX instruction.
8370 If that does not win, use a conditional jump to select the proper
8371 value. */
8372 this_optab = optab_for_tree_code (code, type);
8373 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8374 OPTAB_WIDEN);
8375 if (temp != 0)
8376 return temp;
8377
8378 /* At this point, a MEM target is no longer useful; we will get better
8379 code without it. */
8380
8381 if (! REG_P (target))
8382 target = gen_reg_rtx (mode);
8383
8384 /* If op1 was placed in target, swap op0 and op1. */
8385 if (target != op0 && target == op1)
8386 {
8387 temp = op0;
8388 op0 = op1;
8389 op1 = temp;
8390 }
8391
8392 /* We generate better code and avoid problems with op1 mentioning
8393 target by forcing op1 into a pseudo if it isn't a constant. */
8394 if (! CONSTANT_P (op1))
8395 op1 = force_reg (mode, op1);
8396
8397 {
8398 enum rtx_code comparison_code;
8399 rtx cmpop1 = op1;
8400
8401 if (code == MAX_EXPR)
8402 comparison_code = unsignedp ? GEU : GE;
8403 else
8404 comparison_code = unsignedp ? LEU : LE;
8405
8406 /* Canonicalize to comparisons against 0. */
8407 if (op1 == const1_rtx)
8408 {
8409 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8410 or (a != 0 ? a : 1) for unsigned.
8411 For MIN we are safe converting (a <= 1 ? a : 1)
8412 into (a <= 0 ? a : 1) */
8413 cmpop1 = const0_rtx;
8414 if (code == MAX_EXPR)
8415 comparison_code = unsignedp ? NE : GT;
8416 }
8417 if (op1 == constm1_rtx && !unsignedp)
8418 {
8419 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8420 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8421 cmpop1 = const0_rtx;
8422 if (code == MIN_EXPR)
8423 comparison_code = LT;
8424 }
8425 #ifdef HAVE_conditional_move
8426 /* Use a conditional move if possible. */
8427 if (can_conditionally_move_p (mode))
8428 {
8429 rtx insn;
8430
8431 /* ??? Same problem as in expmed.c: emit_conditional_move
8432 forces a stack adjustment via compare_from_rtx, and we
8433 lose the stack adjustment if the sequence we are about
8434 to create is discarded. */
8435 do_pending_stack_adjust ();
8436
8437 start_sequence ();
8438
8439 /* Try to emit the conditional move. */
8440 insn = emit_conditional_move (target, comparison_code,
8441 op0, cmpop1, mode,
8442 op0, op1, mode,
8443 unsignedp);
8444
8445 /* If we could do the conditional move, emit the sequence,
8446 and return. */
8447 if (insn)
8448 {
8449 rtx seq = get_insns ();
8450 end_sequence ();
8451 emit_insn (seq);
8452 return target;
8453 }
8454
8455 /* Otherwise discard the sequence and fall back to code with
8456 branches. */
8457 end_sequence ();
8458 }
8459 #endif
8460 if (target != op0)
8461 emit_move_insn (target, op0);
8462
8463 temp = gen_label_rtx ();
8464 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8465 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8466 }
8467 emit_move_insn (target, op1);
8468 emit_label (temp);
8469 return target;
8470
8471 case BIT_NOT_EXPR:
8472 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8473 if (modifier == EXPAND_STACK_PARM)
8474 target = 0;
8475 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8476 gcc_assert (temp);
8477 return temp;
8478
8479 /* ??? Can optimize bitwise operations with one arg constant.
8480 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8481 and (a bitwise1 b) bitwise2 b (etc)
8482 but that is probably not worth while. */
8483
8484 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8485 boolean values when we want in all cases to compute both of them. In
8486 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8487 as actual zero-or-1 values and then bitwise anding. In cases where
8488 there cannot be any side effects, better code would be made by
8489 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8490 how to recognize those cases. */
8491
8492 case TRUTH_AND_EXPR:
8493 code = BIT_AND_EXPR;
8494 case BIT_AND_EXPR:
8495 goto binop;
8496
8497 case TRUTH_OR_EXPR:
8498 code = BIT_IOR_EXPR;
8499 case BIT_IOR_EXPR:
8500 goto binop;
8501
8502 case TRUTH_XOR_EXPR:
8503 code = BIT_XOR_EXPR;
8504 case BIT_XOR_EXPR:
8505 goto binop;
8506
8507 case LSHIFT_EXPR:
8508 case RSHIFT_EXPR:
8509 case LROTATE_EXPR:
8510 case RROTATE_EXPR:
8511 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8512 subtarget = 0;
8513 if (modifier == EXPAND_STACK_PARM)
8514 target = 0;
8515 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8516 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8517 unsignedp);
8518
8519 /* Could determine the answer when only additive constants differ. Also,
8520 the addition of one can be handled by changing the condition. */
8521 case LT_EXPR:
8522 case LE_EXPR:
8523 case GT_EXPR:
8524 case GE_EXPR:
8525 case EQ_EXPR:
8526 case NE_EXPR:
8527 case UNORDERED_EXPR:
8528 case ORDERED_EXPR:
8529 case UNLT_EXPR:
8530 case UNLE_EXPR:
8531 case UNGT_EXPR:
8532 case UNGE_EXPR:
8533 case UNEQ_EXPR:
8534 case LTGT_EXPR:
8535 temp = do_store_flag (exp,
8536 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8537 tmode != VOIDmode ? tmode : mode, 0);
8538 if (temp != 0)
8539 return temp;
8540
8541 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8542 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8543 && original_target
8544 && REG_P (original_target)
8545 && (GET_MODE (original_target)
8546 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8547 {
8548 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8549 VOIDmode, 0);
8550
8551 /* If temp is constant, we can just compute the result. */
8552 if (GET_CODE (temp) == CONST_INT)
8553 {
8554 if (INTVAL (temp) != 0)
8555 emit_move_insn (target, const1_rtx);
8556 else
8557 emit_move_insn (target, const0_rtx);
8558
8559 return target;
8560 }
8561
8562 if (temp != original_target)
8563 {
8564 enum machine_mode mode1 = GET_MODE (temp);
8565 if (mode1 == VOIDmode)
8566 mode1 = tmode != VOIDmode ? tmode : mode;
8567
8568 temp = copy_to_mode_reg (mode1, temp);
8569 }
8570
8571 op1 = gen_label_rtx ();
8572 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8573 GET_MODE (temp), unsignedp, op1);
8574 emit_move_insn (temp, const1_rtx);
8575 emit_label (op1);
8576 return temp;
8577 }
8578
8579 /* If no set-flag instruction, must generate a conditional store
8580 into a temporary variable. Drop through and handle this
8581 like && and ||. */
8582
8583 if (! ignore
8584 && (target == 0
8585 || modifier == EXPAND_STACK_PARM
8586 || ! safe_from_p (target, exp, 1)
8587 /* Make sure we don't have a hard reg (such as function's return
8588 value) live across basic blocks, if not optimizing. */
8589 || (!optimize && REG_P (target)
8590 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8591 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8592
8593 if (target)
8594 emit_move_insn (target, const0_rtx);
8595
8596 op1 = gen_label_rtx ();
8597 jumpifnot (exp, op1);
8598
8599 if (target)
8600 emit_move_insn (target, const1_rtx);
8601
8602 emit_label (op1);
8603 return ignore ? const0_rtx : target;
8604
8605 case TRUTH_NOT_EXPR:
8606 if (modifier == EXPAND_STACK_PARM)
8607 target = 0;
8608 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8609 /* The parser is careful to generate TRUTH_NOT_EXPR
8610 only with operands that are always zero or one. */
8611 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8612 target, 1, OPTAB_LIB_WIDEN);
8613 gcc_assert (temp);
8614 return temp;
8615
8616 case STATEMENT_LIST:
8617 {
8618 tree_stmt_iterator iter;
8619
8620 gcc_assert (ignore);
8621
8622 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8623 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8624 }
8625 return const0_rtx;
8626
8627 case COND_EXPR:
8628 /* A COND_EXPR with its type being VOID_TYPE represents a
8629 conditional jump and is handled in
8630 expand_gimple_cond_expr. */
8631 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8632
8633 /* Note that COND_EXPRs whose type is a structure or union
8634 are required to be constructed to contain assignments of
8635 a temporary variable, so that we can evaluate them here
8636 for side effect only. If type is void, we must do likewise. */
8637
8638 gcc_assert (!TREE_ADDRESSABLE (type)
8639 && !ignore
8640 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8641 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8642
8643 /* If we are not to produce a result, we have no target. Otherwise,
8644 if a target was specified use it; it will not be used as an
8645 intermediate target unless it is safe. If no target, use a
8646 temporary. */
8647
8648 if (modifier != EXPAND_STACK_PARM
8649 && original_target
8650 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8651 && GET_MODE (original_target) == mode
8652 #ifdef HAVE_conditional_move
8653 && (! can_conditionally_move_p (mode)
8654 || REG_P (original_target))
8655 #endif
8656 && !MEM_P (original_target))
8657 temp = original_target;
8658 else
8659 temp = assign_temp (type, 0, 0, 1);
8660
8661 do_pending_stack_adjust ();
8662 NO_DEFER_POP;
8663 op0 = gen_label_rtx ();
8664 op1 = gen_label_rtx ();
8665 jumpifnot (TREE_OPERAND (exp, 0), op0);
8666 store_expr (TREE_OPERAND (exp, 1), temp,
8667 modifier == EXPAND_STACK_PARM);
8668
8669 emit_jump_insn (gen_jump (op1));
8670 emit_barrier ();
8671 emit_label (op0);
8672 store_expr (TREE_OPERAND (exp, 2), temp,
8673 modifier == EXPAND_STACK_PARM);
8674
8675 emit_label (op1);
8676 OK_DEFER_POP;
8677 return temp;
8678
8679 case VEC_COND_EXPR:
8680 target = expand_vec_cond_expr (exp, target);
8681 return target;
8682
8683 case MODIFY_EXPR:
8684 {
8685 tree lhs = TREE_OPERAND (exp, 0);
8686 tree rhs = TREE_OPERAND (exp, 1);
8687 gcc_assert (ignore);
8688 expand_assignment (lhs, rhs);
8689 return const0_rtx;
8690 }
8691
8692 case GIMPLE_MODIFY_STMT:
8693 {
8694 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8695 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8696
8697 gcc_assert (ignore);
8698
8699 /* Check for |= or &= of a bitfield of size one into another bitfield
8700 of size 1. In this case, (unless we need the result of the
8701 assignment) we can do this more efficiently with a
8702 test followed by an assignment, if necessary.
8703
8704 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8705 things change so we do, this code should be enhanced to
8706 support it. */
8707 if (TREE_CODE (lhs) == COMPONENT_REF
8708 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8709 || TREE_CODE (rhs) == BIT_AND_EXPR)
8710 && TREE_OPERAND (rhs, 0) == lhs
8711 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8712 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8713 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8714 {
8715 rtx label = gen_label_rtx ();
8716 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8717 do_jump (TREE_OPERAND (rhs, 1),
8718 value ? label : 0,
8719 value ? 0 : label);
8720 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8721 do_pending_stack_adjust ();
8722 emit_label (label);
8723 return const0_rtx;
8724 }
8725
8726 expand_assignment (lhs, rhs);
8727 return const0_rtx;
8728 }
8729
8730 case RETURN_EXPR:
8731 if (!TREE_OPERAND (exp, 0))
8732 expand_null_return ();
8733 else
8734 expand_return (TREE_OPERAND (exp, 0));
8735 return const0_rtx;
8736
8737 case ADDR_EXPR:
8738 return expand_expr_addr_expr (exp, target, tmode, modifier);
8739
8740 case COMPLEX_EXPR:
8741 /* Get the rtx code of the operands. */
8742 op0 = expand_normal (TREE_OPERAND (exp, 0));
8743 op1 = expand_normal (TREE_OPERAND (exp, 1));
8744
8745 if (!target)
8746 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8747
8748 /* Move the real (op0) and imaginary (op1) parts to their location. */
8749 write_complex_part (target, op0, false);
8750 write_complex_part (target, op1, true);
8751
8752 return target;
8753
8754 case REALPART_EXPR:
8755 op0 = expand_normal (TREE_OPERAND (exp, 0));
8756 return read_complex_part (op0, false);
8757
8758 case IMAGPART_EXPR:
8759 op0 = expand_normal (TREE_OPERAND (exp, 0));
8760 return read_complex_part (op0, true);
8761
8762 case RESX_EXPR:
8763 expand_resx_expr (exp);
8764 return const0_rtx;
8765
8766 case TRY_CATCH_EXPR:
8767 case CATCH_EXPR:
8768 case EH_FILTER_EXPR:
8769 case TRY_FINALLY_EXPR:
8770 /* Lowered by tree-eh.c. */
8771 gcc_unreachable ();
8772
8773 case WITH_CLEANUP_EXPR:
8774 case CLEANUP_POINT_EXPR:
8775 case TARGET_EXPR:
8776 case CASE_LABEL_EXPR:
8777 case VA_ARG_EXPR:
8778 case BIND_EXPR:
8779 case INIT_EXPR:
8780 case CONJ_EXPR:
8781 case COMPOUND_EXPR:
8782 case PREINCREMENT_EXPR:
8783 case PREDECREMENT_EXPR:
8784 case POSTINCREMENT_EXPR:
8785 case POSTDECREMENT_EXPR:
8786 case LOOP_EXPR:
8787 case EXIT_EXPR:
8788 case TRUTH_ANDIF_EXPR:
8789 case TRUTH_ORIF_EXPR:
8790 /* Lowered by gimplify.c. */
8791 gcc_unreachable ();
8792
8793 case EXC_PTR_EXPR:
8794 return get_exception_pointer (cfun);
8795
8796 case FILTER_EXPR:
8797 return get_exception_filter (cfun);
8798
8799 case FDESC_EXPR:
8800 /* Function descriptors are not valid except for as
8801 initialization constants, and should not be expanded. */
8802 gcc_unreachable ();
8803
8804 case SWITCH_EXPR:
8805 expand_case (exp);
8806 return const0_rtx;
8807
8808 case LABEL_EXPR:
8809 expand_label (TREE_OPERAND (exp, 0));
8810 return const0_rtx;
8811
8812 case ASM_EXPR:
8813 expand_asm_expr (exp);
8814 return const0_rtx;
8815
8816 case WITH_SIZE_EXPR:
8817 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8818 have pulled out the size to use in whatever context it needed. */
8819 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8820 modifier, alt_rtl);
8821
8822 case REALIGN_LOAD_EXPR:
8823 {
8824 tree oprnd0 = TREE_OPERAND (exp, 0);
8825 tree oprnd1 = TREE_OPERAND (exp, 1);
8826 tree oprnd2 = TREE_OPERAND (exp, 2);
8827 rtx op2;
8828
8829 this_optab = optab_for_tree_code (code, type);
8830 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8831 op2 = expand_normal (oprnd2);
8832 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8833 target, unsignedp);
8834 gcc_assert (temp);
8835 return temp;
8836 }
8837
8838 case DOT_PROD_EXPR:
8839 {
8840 tree oprnd0 = TREE_OPERAND (exp, 0);
8841 tree oprnd1 = TREE_OPERAND (exp, 1);
8842 tree oprnd2 = TREE_OPERAND (exp, 2);
8843 rtx op2;
8844
8845 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8846 op2 = expand_normal (oprnd2);
8847 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8848 target, unsignedp);
8849 return target;
8850 }
8851
8852 case WIDEN_SUM_EXPR:
8853 {
8854 tree oprnd0 = TREE_OPERAND (exp, 0);
8855 tree oprnd1 = TREE_OPERAND (exp, 1);
8856
8857 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8858 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8859 target, unsignedp);
8860 return target;
8861 }
8862
8863 case REDUC_MAX_EXPR:
8864 case REDUC_MIN_EXPR:
8865 case REDUC_PLUS_EXPR:
8866 {
8867 op0 = expand_normal (TREE_OPERAND (exp, 0));
8868 this_optab = optab_for_tree_code (code, type);
8869 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8870 gcc_assert (temp);
8871 return temp;
8872 }
8873
8874 case VEC_EXTRACT_EVEN_EXPR:
8875 case VEC_EXTRACT_ODD_EXPR:
8876 {
8877 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8878 NULL_RTX, &op0, &op1, 0);
8879 this_optab = optab_for_tree_code (code, type);
8880 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8881 OPTAB_WIDEN);
8882 gcc_assert (temp);
8883 return temp;
8884 }
8885
8886 case VEC_INTERLEAVE_HIGH_EXPR:
8887 case VEC_INTERLEAVE_LOW_EXPR:
8888 {
8889 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8890 NULL_RTX, &op0, &op1, 0);
8891 this_optab = optab_for_tree_code (code, type);
8892 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8893 OPTAB_WIDEN);
8894 gcc_assert (temp);
8895 return temp;
8896 }
8897
8898 case VEC_LSHIFT_EXPR:
8899 case VEC_RSHIFT_EXPR:
8900 {
8901 target = expand_vec_shift_expr (exp, target);
8902 return target;
8903 }
8904
8905 case VEC_UNPACK_HI_EXPR:
8906 case VEC_UNPACK_LO_EXPR:
8907 {
8908 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8909 this_optab = optab_for_tree_code (code, type);
8910 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8911 target, unsignedp);
8912 gcc_assert (temp);
8913 return temp;
8914 }
8915
8916 case VEC_WIDEN_MULT_HI_EXPR:
8917 case VEC_WIDEN_MULT_LO_EXPR:
8918 {
8919 tree oprnd0 = TREE_OPERAND (exp, 0);
8920 tree oprnd1 = TREE_OPERAND (exp, 1);
8921
8922 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8923 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8924 target, unsignedp);
8925 gcc_assert (target);
8926 return target;
8927 }
8928
8929 case VEC_PACK_TRUNC_EXPR:
8930 case VEC_PACK_SAT_EXPR:
8931 {
8932 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8933 goto binop;
8934 }
8935
8936 default:
8937 return lang_hooks.expand_expr (exp, original_target, tmode,
8938 modifier, alt_rtl);
8939 }
8940
8941 /* Here to do an ordinary binary operator. */
8942 binop:
8943 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8944 subtarget, &op0, &op1, 0);
8945 binop2:
8946 this_optab = optab_for_tree_code (code, type);
8947 binop3:
8948 if (modifier == EXPAND_STACK_PARM)
8949 target = 0;
8950 temp = expand_binop (mode, this_optab, op0, op1, target,
8951 unsignedp, OPTAB_LIB_WIDEN);
8952 gcc_assert (temp);
8953 return REDUCE_BIT_FIELD (temp);
8954 }
8955 #undef REDUCE_BIT_FIELD
8956 \f
8957 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8958 signedness of TYPE), possibly returning the result in TARGET. */
8959 static rtx
8960 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8961 {
8962 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8963 if (target && GET_MODE (target) != GET_MODE (exp))
8964 target = 0;
8965 /* For constant values, reduce using build_int_cst_type. */
8966 if (GET_CODE (exp) == CONST_INT)
8967 {
8968 HOST_WIDE_INT value = INTVAL (exp);
8969 tree t = build_int_cst_type (type, value);
8970 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8971 }
8972 else if (TYPE_UNSIGNED (type))
8973 {
8974 rtx mask;
8975 if (prec < HOST_BITS_PER_WIDE_INT)
8976 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8977 GET_MODE (exp));
8978 else
8979 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8980 ((unsigned HOST_WIDE_INT) 1
8981 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8982 GET_MODE (exp));
8983 return expand_and (GET_MODE (exp), exp, mask, target);
8984 }
8985 else
8986 {
8987 tree count = build_int_cst (NULL_TREE,
8988 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8989 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8990 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8991 }
8992 }
8993 \f
8994 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8995 when applied to the address of EXP produces an address known to be
8996 aligned more than BIGGEST_ALIGNMENT. */
8997
8998 static int
8999 is_aligning_offset (tree offset, tree exp)
9000 {
9001 /* Strip off any conversions. */
9002 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9003 || TREE_CODE (offset) == NOP_EXPR
9004 || TREE_CODE (offset) == CONVERT_EXPR)
9005 offset = TREE_OPERAND (offset, 0);
9006
9007 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9008 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9009 if (TREE_CODE (offset) != BIT_AND_EXPR
9010 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9011 || compare_tree_int (TREE_OPERAND (offset, 1),
9012 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9013 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9014 return 0;
9015
9016 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9017 It must be NEGATE_EXPR. Then strip any more conversions. */
9018 offset = TREE_OPERAND (offset, 0);
9019 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9020 || TREE_CODE (offset) == NOP_EXPR
9021 || TREE_CODE (offset) == CONVERT_EXPR)
9022 offset = TREE_OPERAND (offset, 0);
9023
9024 if (TREE_CODE (offset) != NEGATE_EXPR)
9025 return 0;
9026
9027 offset = TREE_OPERAND (offset, 0);
9028 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9029 || TREE_CODE (offset) == NOP_EXPR
9030 || TREE_CODE (offset) == CONVERT_EXPR)
9031 offset = TREE_OPERAND (offset, 0);
9032
9033 /* This must now be the address of EXP. */
9034 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9035 }
9036 \f
9037 /* Return the tree node if an ARG corresponds to a string constant or zero
9038 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9039 in bytes within the string that ARG is accessing. The type of the
9040 offset will be `sizetype'. */
9041
9042 tree
9043 string_constant (tree arg, tree *ptr_offset)
9044 {
9045 tree array, offset, lower_bound;
9046 STRIP_NOPS (arg);
9047
9048 if (TREE_CODE (arg) == ADDR_EXPR)
9049 {
9050 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9051 {
9052 *ptr_offset = size_zero_node;
9053 return TREE_OPERAND (arg, 0);
9054 }
9055 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9056 {
9057 array = TREE_OPERAND (arg, 0);
9058 offset = size_zero_node;
9059 }
9060 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9061 {
9062 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9063 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9064 if (TREE_CODE (array) != STRING_CST
9065 && TREE_CODE (array) != VAR_DECL)
9066 return 0;
9067
9068 /* Check if the array has a nonzero lower bound. */
9069 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9070 if (!integer_zerop (lower_bound))
9071 {
9072 /* If the offset and base aren't both constants, return 0. */
9073 if (TREE_CODE (lower_bound) != INTEGER_CST)
9074 return 0;
9075 if (TREE_CODE (offset) != INTEGER_CST)
9076 return 0;
9077 /* Adjust offset by the lower bound. */
9078 offset = size_diffop (fold_convert (sizetype, offset),
9079 fold_convert (sizetype, lower_bound));
9080 }
9081 }
9082 else
9083 return 0;
9084 }
9085 else if (TREE_CODE (arg) == PLUS_EXPR)
9086 {
9087 tree arg0 = TREE_OPERAND (arg, 0);
9088 tree arg1 = TREE_OPERAND (arg, 1);
9089
9090 STRIP_NOPS (arg0);
9091 STRIP_NOPS (arg1);
9092
9093 if (TREE_CODE (arg0) == ADDR_EXPR
9094 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9095 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9096 {
9097 array = TREE_OPERAND (arg0, 0);
9098 offset = arg1;
9099 }
9100 else if (TREE_CODE (arg1) == ADDR_EXPR
9101 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9102 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9103 {
9104 array = TREE_OPERAND (arg1, 0);
9105 offset = arg0;
9106 }
9107 else
9108 return 0;
9109 }
9110 else
9111 return 0;
9112
9113 if (TREE_CODE (array) == STRING_CST)
9114 {
9115 *ptr_offset = fold_convert (sizetype, offset);
9116 return array;
9117 }
9118 else if (TREE_CODE (array) == VAR_DECL)
9119 {
9120 int length;
9121
9122 /* Variables initialized to string literals can be handled too. */
9123 if (DECL_INITIAL (array) == NULL_TREE
9124 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9125 return 0;
9126
9127 /* If they are read-only, non-volatile and bind locally. */
9128 if (! TREE_READONLY (array)
9129 || TREE_SIDE_EFFECTS (array)
9130 || ! targetm.binds_local_p (array))
9131 return 0;
9132
9133 /* Avoid const char foo[4] = "abcde"; */
9134 if (DECL_SIZE_UNIT (array) == NULL_TREE
9135 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9136 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9137 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9138 return 0;
9139
9140 /* If variable is bigger than the string literal, OFFSET must be constant
9141 and inside of the bounds of the string literal. */
9142 offset = fold_convert (sizetype, offset);
9143 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9144 && (! host_integerp (offset, 1)
9145 || compare_tree_int (offset, length) >= 0))
9146 return 0;
9147
9148 *ptr_offset = offset;
9149 return DECL_INITIAL (array);
9150 }
9151
9152 return 0;
9153 }
9154 \f
9155 /* Generate code to calculate EXP using a store-flag instruction
9156 and return an rtx for the result. EXP is either a comparison
9157 or a TRUTH_NOT_EXPR whose operand is a comparison.
9158
9159 If TARGET is nonzero, store the result there if convenient.
9160
9161 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9162 cheap.
9163
9164 Return zero if there is no suitable set-flag instruction
9165 available on this machine.
9166
9167 Once expand_expr has been called on the arguments of the comparison,
9168 we are committed to doing the store flag, since it is not safe to
9169 re-evaluate the expression. We emit the store-flag insn by calling
9170 emit_store_flag, but only expand the arguments if we have a reason
9171 to believe that emit_store_flag will be successful. If we think that
9172 it will, but it isn't, we have to simulate the store-flag with a
9173 set/jump/set sequence. */
9174
9175 static rtx
9176 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9177 {
9178 enum rtx_code code;
9179 tree arg0, arg1, type;
9180 tree tem;
9181 enum machine_mode operand_mode;
9182 int invert = 0;
9183 int unsignedp;
9184 rtx op0, op1;
9185 enum insn_code icode;
9186 rtx subtarget = target;
9187 rtx result, label;
9188
9189 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9190 result at the end. We can't simply invert the test since it would
9191 have already been inverted if it were valid. This case occurs for
9192 some floating-point comparisons. */
9193
9194 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9195 invert = 1, exp = TREE_OPERAND (exp, 0);
9196
9197 arg0 = TREE_OPERAND (exp, 0);
9198 arg1 = TREE_OPERAND (exp, 1);
9199
9200 /* Don't crash if the comparison was erroneous. */
9201 if (arg0 == error_mark_node || arg1 == error_mark_node)
9202 return const0_rtx;
9203
9204 type = TREE_TYPE (arg0);
9205 operand_mode = TYPE_MODE (type);
9206 unsignedp = TYPE_UNSIGNED (type);
9207
9208 /* We won't bother with BLKmode store-flag operations because it would mean
9209 passing a lot of information to emit_store_flag. */
9210 if (operand_mode == BLKmode)
9211 return 0;
9212
9213 /* We won't bother with store-flag operations involving function pointers
9214 when function pointers must be canonicalized before comparisons. */
9215 #ifdef HAVE_canonicalize_funcptr_for_compare
9216 if (HAVE_canonicalize_funcptr_for_compare
9217 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9218 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9219 == FUNCTION_TYPE))
9220 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9221 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9222 == FUNCTION_TYPE))))
9223 return 0;
9224 #endif
9225
9226 STRIP_NOPS (arg0);
9227 STRIP_NOPS (arg1);
9228
9229 /* Get the rtx comparison code to use. We know that EXP is a comparison
9230 operation of some type. Some comparisons against 1 and -1 can be
9231 converted to comparisons with zero. Do so here so that the tests
9232 below will be aware that we have a comparison with zero. These
9233 tests will not catch constants in the first operand, but constants
9234 are rarely passed as the first operand. */
9235
9236 switch (TREE_CODE (exp))
9237 {
9238 case EQ_EXPR:
9239 code = EQ;
9240 break;
9241 case NE_EXPR:
9242 code = NE;
9243 break;
9244 case LT_EXPR:
9245 if (integer_onep (arg1))
9246 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9247 else
9248 code = unsignedp ? LTU : LT;
9249 break;
9250 case LE_EXPR:
9251 if (! unsignedp && integer_all_onesp (arg1))
9252 arg1 = integer_zero_node, code = LT;
9253 else
9254 code = unsignedp ? LEU : LE;
9255 break;
9256 case GT_EXPR:
9257 if (! unsignedp && integer_all_onesp (arg1))
9258 arg1 = integer_zero_node, code = GE;
9259 else
9260 code = unsignedp ? GTU : GT;
9261 break;
9262 case GE_EXPR:
9263 if (integer_onep (arg1))
9264 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9265 else
9266 code = unsignedp ? GEU : GE;
9267 break;
9268
9269 case UNORDERED_EXPR:
9270 code = UNORDERED;
9271 break;
9272 case ORDERED_EXPR:
9273 code = ORDERED;
9274 break;
9275 case UNLT_EXPR:
9276 code = UNLT;
9277 break;
9278 case UNLE_EXPR:
9279 code = UNLE;
9280 break;
9281 case UNGT_EXPR:
9282 code = UNGT;
9283 break;
9284 case UNGE_EXPR:
9285 code = UNGE;
9286 break;
9287 case UNEQ_EXPR:
9288 code = UNEQ;
9289 break;
9290 case LTGT_EXPR:
9291 code = LTGT;
9292 break;
9293
9294 default:
9295 gcc_unreachable ();
9296 }
9297
9298 /* Put a constant second. */
9299 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9300 {
9301 tem = arg0; arg0 = arg1; arg1 = tem;
9302 code = swap_condition (code);
9303 }
9304
9305 /* If this is an equality or inequality test of a single bit, we can
9306 do this by shifting the bit being tested to the low-order bit and
9307 masking the result with the constant 1. If the condition was EQ,
9308 we xor it with 1. This does not require an scc insn and is faster
9309 than an scc insn even if we have it.
9310
9311 The code to make this transformation was moved into fold_single_bit_test,
9312 so we just call into the folder and expand its result. */
9313
9314 if ((code == NE || code == EQ)
9315 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9316 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9317 {
9318 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9319 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9320 arg0, arg1, type),
9321 target, VOIDmode, EXPAND_NORMAL);
9322 }
9323
9324 /* Now see if we are likely to be able to do this. Return if not. */
9325 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9326 return 0;
9327
9328 icode = setcc_gen_code[(int) code];
9329
9330 if (icode == CODE_FOR_nothing)
9331 {
9332 enum machine_mode wmode;
9333
9334 for (wmode = operand_mode;
9335 icode == CODE_FOR_nothing && wmode != VOIDmode;
9336 wmode = GET_MODE_WIDER_MODE (wmode))
9337 icode = cstore_optab->handlers[(int) wmode].insn_code;
9338 }
9339
9340 if (icode == CODE_FOR_nothing
9341 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9342 {
9343 /* We can only do this if it is one of the special cases that
9344 can be handled without an scc insn. */
9345 if ((code == LT && integer_zerop (arg1))
9346 || (! only_cheap && code == GE && integer_zerop (arg1)))
9347 ;
9348 else if (! only_cheap && (code == NE || code == EQ)
9349 && TREE_CODE (type) != REAL_TYPE
9350 && ((abs_optab->handlers[(int) operand_mode].insn_code
9351 != CODE_FOR_nothing)
9352 || (ffs_optab->handlers[(int) operand_mode].insn_code
9353 != CODE_FOR_nothing)))
9354 ;
9355 else
9356 return 0;
9357 }
9358
9359 if (! get_subtarget (target)
9360 || GET_MODE (subtarget) != operand_mode)
9361 subtarget = 0;
9362
9363 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9364
9365 if (target == 0)
9366 target = gen_reg_rtx (mode);
9367
9368 result = emit_store_flag (target, code, op0, op1,
9369 operand_mode, unsignedp, 1);
9370
9371 if (result)
9372 {
9373 if (invert)
9374 result = expand_binop (mode, xor_optab, result, const1_rtx,
9375 result, 0, OPTAB_LIB_WIDEN);
9376 return result;
9377 }
9378
9379 /* If this failed, we have to do this with set/compare/jump/set code. */
9380 if (!REG_P (target)
9381 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9382 target = gen_reg_rtx (GET_MODE (target));
9383
9384 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9385 label = gen_label_rtx ();
9386 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9387 NULL_RTX, label);
9388
9389 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9390 emit_label (label);
9391
9392 return target;
9393 }
9394 \f
9395
9396 /* Stubs in case we haven't got a casesi insn. */
9397 #ifndef HAVE_casesi
9398 # define HAVE_casesi 0
9399 # define gen_casesi(a, b, c, d, e) (0)
9400 # define CODE_FOR_casesi CODE_FOR_nothing
9401 #endif
9402
9403 /* If the machine does not have a case insn that compares the bounds,
9404 this means extra overhead for dispatch tables, which raises the
9405 threshold for using them. */
9406 #ifndef CASE_VALUES_THRESHOLD
9407 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9408 #endif /* CASE_VALUES_THRESHOLD */
9409
9410 unsigned int
9411 case_values_threshold (void)
9412 {
9413 return CASE_VALUES_THRESHOLD;
9414 }
9415
9416 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9417 0 otherwise (i.e. if there is no casesi instruction). */
9418 int
9419 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9420 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9421 {
9422 enum machine_mode index_mode = SImode;
9423 int index_bits = GET_MODE_BITSIZE (index_mode);
9424 rtx op1, op2, index;
9425 enum machine_mode op_mode;
9426
9427 if (! HAVE_casesi)
9428 return 0;
9429
9430 /* Convert the index to SImode. */
9431 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9432 {
9433 enum machine_mode omode = TYPE_MODE (index_type);
9434 rtx rangertx = expand_normal (range);
9435
9436 /* We must handle the endpoints in the original mode. */
9437 index_expr = build2 (MINUS_EXPR, index_type,
9438 index_expr, minval);
9439 minval = integer_zero_node;
9440 index = expand_normal (index_expr);
9441 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9442 omode, 1, default_label);
9443 /* Now we can safely truncate. */
9444 index = convert_to_mode (index_mode, index, 0);
9445 }
9446 else
9447 {
9448 if (TYPE_MODE (index_type) != index_mode)
9449 {
9450 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9451 index_expr = fold_convert (index_type, index_expr);
9452 }
9453
9454 index = expand_normal (index_expr);
9455 }
9456
9457 do_pending_stack_adjust ();
9458
9459 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9460 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9461 (index, op_mode))
9462 index = copy_to_mode_reg (op_mode, index);
9463
9464 op1 = expand_normal (minval);
9465
9466 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9467 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9468 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9469 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9470 (op1, op_mode))
9471 op1 = copy_to_mode_reg (op_mode, op1);
9472
9473 op2 = expand_normal (range);
9474
9475 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9476 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9477 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9478 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9479 (op2, op_mode))
9480 op2 = copy_to_mode_reg (op_mode, op2);
9481
9482 emit_jump_insn (gen_casesi (index, op1, op2,
9483 table_label, default_label));
9484 return 1;
9485 }
9486
9487 /* Attempt to generate a tablejump instruction; same concept. */
9488 #ifndef HAVE_tablejump
9489 #define HAVE_tablejump 0
9490 #define gen_tablejump(x, y) (0)
9491 #endif
9492
9493 /* Subroutine of the next function.
9494
9495 INDEX is the value being switched on, with the lowest value
9496 in the table already subtracted.
9497 MODE is its expected mode (needed if INDEX is constant).
9498 RANGE is the length of the jump table.
9499 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9500
9501 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9502 index value is out of range. */
9503
9504 static void
9505 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9506 rtx default_label)
9507 {
9508 rtx temp, vector;
9509
9510 if (INTVAL (range) > cfun->max_jumptable_ents)
9511 cfun->max_jumptable_ents = INTVAL (range);
9512
9513 /* Do an unsigned comparison (in the proper mode) between the index
9514 expression and the value which represents the length of the range.
9515 Since we just finished subtracting the lower bound of the range
9516 from the index expression, this comparison allows us to simultaneously
9517 check that the original index expression value is both greater than
9518 or equal to the minimum value of the range and less than or equal to
9519 the maximum value of the range. */
9520
9521 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9522 default_label);
9523
9524 /* If index is in range, it must fit in Pmode.
9525 Convert to Pmode so we can index with it. */
9526 if (mode != Pmode)
9527 index = convert_to_mode (Pmode, index, 1);
9528
9529 /* Don't let a MEM slip through, because then INDEX that comes
9530 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9531 and break_out_memory_refs will go to work on it and mess it up. */
9532 #ifdef PIC_CASE_VECTOR_ADDRESS
9533 if (flag_pic && !REG_P (index))
9534 index = copy_to_mode_reg (Pmode, index);
9535 #endif
9536
9537 /* If flag_force_addr were to affect this address
9538 it could interfere with the tricky assumptions made
9539 about addresses that contain label-refs,
9540 which may be valid only very near the tablejump itself. */
9541 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9542 GET_MODE_SIZE, because this indicates how large insns are. The other
9543 uses should all be Pmode, because they are addresses. This code
9544 could fail if addresses and insns are not the same size. */
9545 index = gen_rtx_PLUS (Pmode,
9546 gen_rtx_MULT (Pmode, index,
9547 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9548 gen_rtx_LABEL_REF (Pmode, table_label));
9549 #ifdef PIC_CASE_VECTOR_ADDRESS
9550 if (flag_pic)
9551 index = PIC_CASE_VECTOR_ADDRESS (index);
9552 else
9553 #endif
9554 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9555 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9556 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9557 convert_move (temp, vector, 0);
9558
9559 emit_jump_insn (gen_tablejump (temp, table_label));
9560
9561 /* If we are generating PIC code or if the table is PC-relative, the
9562 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9563 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9564 emit_barrier ();
9565 }
9566
9567 int
9568 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9569 rtx table_label, rtx default_label)
9570 {
9571 rtx index;
9572
9573 if (! HAVE_tablejump)
9574 return 0;
9575
9576 index_expr = fold_build2 (MINUS_EXPR, index_type,
9577 fold_convert (index_type, index_expr),
9578 fold_convert (index_type, minval));
9579 index = expand_normal (index_expr);
9580 do_pending_stack_adjust ();
9581
9582 do_tablejump (index, TYPE_MODE (index_type),
9583 convert_modes (TYPE_MODE (index_type),
9584 TYPE_MODE (TREE_TYPE (range)),
9585 expand_normal (range),
9586 TYPE_UNSIGNED (TREE_TYPE (range))),
9587 table_label, default_label);
9588 return 1;
9589 }
9590
9591 /* Nonzero if the mode is a valid vector mode for this architecture.
9592 This returns nonzero even if there is no hardware support for the
9593 vector mode, but we can emulate with narrower modes. */
9594
9595 int
9596 vector_mode_valid_p (enum machine_mode mode)
9597 {
9598 enum mode_class class = GET_MODE_CLASS (mode);
9599 enum machine_mode innermode;
9600
9601 /* Doh! What's going on? */
9602 if (class != MODE_VECTOR_INT
9603 && class != MODE_VECTOR_FLOAT)
9604 return 0;
9605
9606 /* Hardware support. Woo hoo! */
9607 if (targetm.vector_mode_supported_p (mode))
9608 return 1;
9609
9610 innermode = GET_MODE_INNER (mode);
9611
9612 /* We should probably return 1 if requesting V4DI and we have no DI,
9613 but we have V2DI, but this is probably very unlikely. */
9614
9615 /* If we have support for the inner mode, we can safely emulate it.
9616 We may not have V2DI, but me can emulate with a pair of DIs. */
9617 return targetm.scalar_mode_supported_p (innermode);
9618 }
9619
9620 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9621 static rtx
9622 const_vector_from_tree (tree exp)
9623 {
9624 rtvec v;
9625 int units, i;
9626 tree link, elt;
9627 enum machine_mode inner, mode;
9628
9629 mode = TYPE_MODE (TREE_TYPE (exp));
9630
9631 if (initializer_zerop (exp))
9632 return CONST0_RTX (mode);
9633
9634 units = GET_MODE_NUNITS (mode);
9635 inner = GET_MODE_INNER (mode);
9636
9637 v = rtvec_alloc (units);
9638
9639 link = TREE_VECTOR_CST_ELTS (exp);
9640 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9641 {
9642 elt = TREE_VALUE (link);
9643
9644 if (TREE_CODE (elt) == REAL_CST)
9645 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9646 inner);
9647 else
9648 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9649 TREE_INT_CST_HIGH (elt),
9650 inner);
9651 }
9652
9653 /* Initialize remaining elements to 0. */
9654 for (; i < units; ++i)
9655 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9656
9657 return gen_rtx_CONST_VECTOR (mode, v);
9658 }
9659 #include "gt-expr.h"
This page took 0.476736 seconds and 6 git commands to generate.