]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
stdfix.h: New file.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
146
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
148
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* Record for each mode whether we can float-extend from memory. */
169
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
171
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
179
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
187
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero. */
190 #ifndef SET_BY_PIECES_P
191 #define SET_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
193 < (unsigned int) SET_RATIO)
194 #endif
195
196 /* This macro is used to determine whether store_by_pieces should be
197 called to "memcpy" storage when the source is a constant string. */
198 #ifndef STORE_BY_PIECES_P
199 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
201 < (unsigned int) MOVE_RATIO)
202 #endif
203
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movmem_optab[NUM_MACHINE_MODES];
206
207 /* This array records the insn_code of insns to perform block sets. */
208 enum insn_code setmem_optab[NUM_MACHINE_MODES];
209
210 /* These arrays record the insn_code of three different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215
216 /* Synchronization primitives. */
217 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
239
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
241
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
244 #endif
245 \f
246 /* This is run to set up which modes can be used
247 directly in memory and to initialize the block move optab. It is run
248 at the beginning of compilation and when the target is reinitialized. */
249
250 void
251 init_expr_target (void)
252 {
253 rtx insn, pat;
254 enum machine_mode mode;
255 int num_clobbers;
256 rtx mem, mem1;
257 rtx reg;
258
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
268
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
272
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
275 {
276 int regno;
277
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
282
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
285
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
289 regno++)
290 {
291 if (! HARD_REGNO_MODE_OK (regno, mode))
292 continue;
293
294 SET_REGNO (reg, regno);
295
296 SET_SRC (pat) = mem;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
300
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
305
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
310
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
315 }
316 }
317
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
319
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
322 {
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
326 {
327 enum insn_code ic;
328
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
331 continue;
332
333 PUT_MODE (mem, srcmode);
334
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
337 }
338 }
339 }
340
341 /* This is run at the start of compiling a function. */
342
343 void
344 init_expr (void)
345 {
346 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
347 }
348 \f
349 /* Copy data from FROM to TO, where the machine modes are not the same.
350 Both modes may be integer, or both may be floating, or both may be
351 fixed-point.
352 UNSIGNEDP should be nonzero if FROM is an unsigned type.
353 This causes zero-extension instead of sign-extension. */
354
355 void
356 convert_move (rtx to, rtx from, int unsignedp)
357 {
358 enum machine_mode to_mode = GET_MODE (to);
359 enum machine_mode from_mode = GET_MODE (from);
360 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
361 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
362 enum insn_code code;
363 rtx libcall;
364
365 /* rtx code for making an equivalent value. */
366 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
367 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
368
369
370 gcc_assert (to_real == from_real);
371 gcc_assert (to_mode != BLKmode);
372 gcc_assert (from_mode != BLKmode);
373
374 /* If the source and destination are already the same, then there's
375 nothing to do. */
376 if (to == from)
377 return;
378
379 /* If FROM is a SUBREG that indicates that we have already done at least
380 the required extension, strip it. We don't handle such SUBREGs as
381 TO here. */
382
383 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
384 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
385 >= GET_MODE_SIZE (to_mode))
386 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
387 from = gen_lowpart (to_mode, from), from_mode = to_mode;
388
389 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
390
391 if (to_mode == from_mode
392 || (from_mode == VOIDmode && CONSTANT_P (from)))
393 {
394 emit_move_insn (to, from);
395 return;
396 }
397
398 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
399 {
400 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
401
402 if (VECTOR_MODE_P (to_mode))
403 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
404 else
405 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
406
407 emit_move_insn (to, from);
408 return;
409 }
410
411 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
412 {
413 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
414 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
415 return;
416 }
417
418 if (to_real)
419 {
420 rtx value, insns;
421 convert_optab tab;
422
423 gcc_assert ((GET_MODE_PRECISION (from_mode)
424 != GET_MODE_PRECISION (to_mode))
425 || (DECIMAL_FLOAT_MODE_P (from_mode)
426 != DECIMAL_FLOAT_MODE_P (to_mode)));
427
428 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
429 /* Conversion between decimal float and binary float, same size. */
430 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
431 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
432 tab = sext_optab;
433 else
434 tab = trunc_optab;
435
436 /* Try converting directly if the insn is supported. */
437
438 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
439 if (code != CODE_FOR_nothing)
440 {
441 emit_unop_insn (code, to, from,
442 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
443 return;
444 }
445
446 /* Otherwise use a libcall. */
447 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
448
449 /* Is this conversion implemented yet? */
450 gcc_assert (libcall);
451
452 start_sequence ();
453 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
454 1, from, from_mode);
455 insns = get_insns ();
456 end_sequence ();
457 emit_libcall_block (insns, to, value,
458 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
459 from)
460 : gen_rtx_FLOAT_EXTEND (to_mode, from));
461 return;
462 }
463
464 /* Handle pointer conversion. */ /* SPEE 900220. */
465 /* Targets are expected to provide conversion insns between PxImode and
466 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
467 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
468 {
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
471
472 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
473 != CODE_FOR_nothing);
474
475 if (full_mode != from_mode)
476 from = convert_to_mode (full_mode, from, unsignedp);
477 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
478 to, from, UNKNOWN);
479 return;
480 }
481 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
482 {
483 rtx new_from;
484 enum machine_mode full_mode
485 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
486
487 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
488 != CODE_FOR_nothing);
489
490 if (to_mode == full_mode)
491 {
492 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
493 to, from, UNKNOWN);
494 return;
495 }
496
497 new_from = gen_reg_rtx (full_mode);
498 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
499 new_from, from, UNKNOWN);
500
501 /* else proceed to integer conversions below. */
502 from_mode = full_mode;
503 from = new_from;
504 }
505
506 /* Make sure both are fixed-point modes or both are not. */
507 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
508 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
509 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
510 {
511 /* If we widen from_mode to to_mode and they are in the same class,
512 we won't saturate the result.
513 Otherwise, always saturate the result to play safe. */
514 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
515 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
516 expand_fixed_convert (to, from, 0, 0);
517 else
518 expand_fixed_convert (to, from, 0, 1);
519 return;
520 }
521
522 /* Now both modes are integers. */
523
524 /* Handle expanding beyond a word. */
525 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
526 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
527 {
528 rtx insns;
529 rtx lowpart;
530 rtx fill_value;
531 rtx lowfrom;
532 int i;
533 enum machine_mode lowpart_mode;
534 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
535
536 /* Try converting directly if the insn is supported. */
537 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
538 != CODE_FOR_nothing)
539 {
540 /* If FROM is a SUBREG, put it into a register. Do this
541 so that we always generate the same set of insns for
542 better cse'ing; if an intermediate assignment occurred,
543 we won't be doing the operation directly on the SUBREG. */
544 if (optimize > 0 && GET_CODE (from) == SUBREG)
545 from = force_reg (from_mode, from);
546 emit_unop_insn (code, to, from, equiv_code);
547 return;
548 }
549 /* Next, try converting via full word. */
550 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
551 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
552 != CODE_FOR_nothing))
553 {
554 if (REG_P (to))
555 {
556 if (reg_overlap_mentioned_p (to, from))
557 from = force_reg (from_mode, from);
558 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
559 }
560 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
561 emit_unop_insn (code, to,
562 gen_lowpart (word_mode, to), equiv_code);
563 return;
564 }
565
566 /* No special multiword conversion insn; do it by hand. */
567 start_sequence ();
568
569 /* Since we will turn this into a no conflict block, we must ensure
570 that the source does not overlap the target. */
571
572 if (reg_overlap_mentioned_p (to, from))
573 from = force_reg (from_mode, from);
574
575 /* Get a copy of FROM widened to a word, if necessary. */
576 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
577 lowpart_mode = word_mode;
578 else
579 lowpart_mode = from_mode;
580
581 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
582
583 lowpart = gen_lowpart (lowpart_mode, to);
584 emit_move_insn (lowpart, lowfrom);
585
586 /* Compute the value to put in each remaining word. */
587 if (unsignedp)
588 fill_value = const0_rtx;
589 else
590 {
591 #ifdef HAVE_slt
592 if (HAVE_slt
593 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
594 && STORE_FLAG_VALUE == -1)
595 {
596 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
597 lowpart_mode, 0);
598 fill_value = gen_reg_rtx (word_mode);
599 emit_insn (gen_slt (fill_value));
600 }
601 else
602 #endif
603 {
604 fill_value
605 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
606 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
607 NULL_RTX, 0);
608 fill_value = convert_to_mode (word_mode, fill_value, 1);
609 }
610 }
611
612 /* Fill the remaining words. */
613 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
614 {
615 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
616 rtx subword = operand_subword (to, index, 1, to_mode);
617
618 gcc_assert (subword);
619
620 if (fill_value != subword)
621 emit_move_insn (subword, fill_value);
622 }
623
624 insns = get_insns ();
625 end_sequence ();
626
627 emit_no_conflict_block (insns, to, from, NULL_RTX,
628 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
629 return;
630 }
631
632 /* Truncating multi-word to a word or less. */
633 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
634 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
635 {
636 if (!((MEM_P (from)
637 && ! MEM_VOLATILE_P (from)
638 && direct_load[(int) to_mode]
639 && ! mode_dependent_address_p (XEXP (from, 0)))
640 || REG_P (from)
641 || GET_CODE (from) == SUBREG))
642 from = force_reg (from_mode, from);
643 convert_move (to, gen_lowpart (word_mode, from), 0);
644 return;
645 }
646
647 /* Now follow all the conversions between integers
648 no more than a word long. */
649
650 /* For truncation, usually we can just refer to FROM in a narrower mode. */
651 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
652 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
653 GET_MODE_BITSIZE (from_mode)))
654 {
655 if (!((MEM_P (from)
656 && ! MEM_VOLATILE_P (from)
657 && direct_load[(int) to_mode]
658 && ! mode_dependent_address_p (XEXP (from, 0)))
659 || REG_P (from)
660 || GET_CODE (from) == SUBREG))
661 from = force_reg (from_mode, from);
662 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
663 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
664 from = copy_to_reg (from);
665 emit_move_insn (to, gen_lowpart (to_mode, from));
666 return;
667 }
668
669 /* Handle extension. */
670 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
671 {
672 /* Convert directly if that works. */
673 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
674 != CODE_FOR_nothing)
675 {
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
678 }
679 else
680 {
681 enum machine_mode intermediate;
682 rtx tmp;
683 tree shift_amount;
684
685 /* Search for a mode to convert via. */
686 for (intermediate = from_mode; intermediate != VOIDmode;
687 intermediate = GET_MODE_WIDER_MODE (intermediate))
688 if (((can_extend_p (to_mode, intermediate, unsignedp)
689 != CODE_FOR_nothing)
690 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
691 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
692 GET_MODE_BITSIZE (intermediate))))
693 && (can_extend_p (intermediate, from_mode, unsignedp)
694 != CODE_FOR_nothing))
695 {
696 convert_move (to, convert_to_mode (intermediate, from,
697 unsignedp), unsignedp);
698 return;
699 }
700
701 /* No suitable intermediate mode.
702 Generate what we need with shifts. */
703 shift_amount = build_int_cst (NULL_TREE,
704 GET_MODE_BITSIZE (to_mode)
705 - GET_MODE_BITSIZE (from_mode));
706 from = gen_lowpart (to_mode, force_reg (from_mode, from));
707 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
708 to, unsignedp);
709 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
710 to, unsignedp);
711 if (tmp != to)
712 emit_move_insn (to, tmp);
713 return;
714 }
715 }
716
717 /* Support special truncate insns for certain modes. */
718 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
719 {
720 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
721 to, from, UNKNOWN);
722 return;
723 }
724
725 /* Handle truncation of volatile memrefs, and so on;
726 the things that couldn't be truncated directly,
727 and for which there was no special instruction.
728
729 ??? Code above formerly short-circuited this, for most integer
730 mode pairs, with a force_reg in from_mode followed by a recursive
731 call to this routine. Appears always to have been wrong. */
732 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
733 {
734 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
735 emit_move_insn (to, temp);
736 return;
737 }
738
739 /* Mode combination is not recognized. */
740 gcc_unreachable ();
741 }
742
743 /* Return an rtx for a value that would result
744 from converting X to mode MODE.
745 Both X and MODE may be floating, or both integer.
746 UNSIGNEDP is nonzero if X is an unsigned value.
747 This can be done by referring to a part of X in place
748 or by copying to a new temporary with conversion. */
749
750 rtx
751 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
752 {
753 return convert_modes (mode, VOIDmode, x, unsignedp);
754 }
755
756 /* Return an rtx for a value that would result
757 from converting X from mode OLDMODE to mode MODE.
758 Both modes may be floating, or both integer.
759 UNSIGNEDP is nonzero if X is an unsigned value.
760
761 This can be done by referring to a part of X in place
762 or by copying to a new temporary with conversion.
763
764 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
765
766 rtx
767 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
768 {
769 rtx temp;
770
771 /* If FROM is a SUBREG that indicates that we have already done at least
772 the required extension, strip it. */
773
774 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
775 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
776 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
777 x = gen_lowpart (mode, x);
778
779 if (GET_MODE (x) != VOIDmode)
780 oldmode = GET_MODE (x);
781
782 if (mode == oldmode)
783 return x;
784
785 /* There is one case that we must handle specially: If we are converting
786 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
787 we are to interpret the constant as unsigned, gen_lowpart will do
788 the wrong if the constant appears negative. What we want to do is
789 make the high-order word of the constant zero, not all ones. */
790
791 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
793 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
794 {
795 HOST_WIDE_INT val = INTVAL (x);
796
797 if (oldmode != VOIDmode
798 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
799 {
800 int width = GET_MODE_BITSIZE (oldmode);
801
802 /* We need to zero extend VAL. */
803 val &= ((HOST_WIDE_INT) 1 << width) - 1;
804 }
805
806 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
807 }
808
809 /* We can do this with a gen_lowpart if both desired and current modes
810 are integer, and this is either a constant integer, a register, or a
811 non-volatile MEM. Except for the constant case where MODE is no
812 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
813
814 if ((GET_CODE (x) == CONST_INT
815 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
816 || (GET_MODE_CLASS (mode) == MODE_INT
817 && GET_MODE_CLASS (oldmode) == MODE_INT
818 && (GET_CODE (x) == CONST_DOUBLE
819 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
820 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
821 && direct_load[(int) mode])
822 || (REG_P (x)
823 && (! HARD_REGISTER_P (x)
824 || HARD_REGNO_MODE_OK (REGNO (x), mode))
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
826 GET_MODE_BITSIZE (GET_MODE (x)))))))))
827 {
828 /* ?? If we don't know OLDMODE, we have to assume here that
829 X does not need sign- or zero-extension. This may not be
830 the case, but it's the best we can do. */
831 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
832 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
833 {
834 HOST_WIDE_INT val = INTVAL (x);
835 int width = GET_MODE_BITSIZE (oldmode);
836
837 /* We must sign or zero-extend in this case. Start by
838 zero-extending, then sign extend if we need to. */
839 val &= ((HOST_WIDE_INT) 1 << width) - 1;
840 if (! unsignedp
841 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
842 val |= (HOST_WIDE_INT) (-1) << width;
843
844 return gen_int_mode (val, mode);
845 }
846
847 return gen_lowpart (mode, x);
848 }
849
850 /* Converting from integer constant into mode is always equivalent to an
851 subreg operation. */
852 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
853 {
854 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
855 return simplify_gen_subreg (mode, x, oldmode, 0);
856 }
857
858 temp = gen_reg_rtx (mode);
859 convert_move (temp, x, unsignedp);
860 return temp;
861 }
862 \f
863 /* STORE_MAX_PIECES is the number of bytes at a time that we can
864 store efficiently. Due to internal GCC limitations, this is
865 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
866 for an immediate constant. */
867
868 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
869
870 /* Determine whether the LEN bytes can be moved by using several move
871 instructions. Return nonzero if a call to move_by_pieces should
872 succeed. */
873
874 int
875 can_move_by_pieces (unsigned HOST_WIDE_INT len,
876 unsigned int align ATTRIBUTE_UNUSED)
877 {
878 return MOVE_BY_PIECES_P (len, align);
879 }
880
881 /* Generate several move instructions to copy LEN bytes from block FROM to
882 block TO. (These are MEM rtx's with BLKmode).
883
884 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
885 used to push FROM to the stack.
886
887 ALIGN is maximum stack alignment we can assume.
888
889 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
890 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
891 stpcpy. */
892
893 rtx
894 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
895 unsigned int align, int endp)
896 {
897 struct move_by_pieces data;
898 rtx to_addr, from_addr = XEXP (from, 0);
899 unsigned int max_size = MOVE_MAX_PIECES + 1;
900 enum machine_mode mode = VOIDmode, tmode;
901 enum insn_code icode;
902
903 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
904
905 data.offset = 0;
906 data.from_addr = from_addr;
907 if (to)
908 {
909 to_addr = XEXP (to, 0);
910 data.to = to;
911 data.autinc_to
912 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
913 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
914 data.reverse
915 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
916 }
917 else
918 {
919 to_addr = NULL_RTX;
920 data.to = NULL_RTX;
921 data.autinc_to = 1;
922 #ifdef STACK_GROWS_DOWNWARD
923 data.reverse = 1;
924 #else
925 data.reverse = 0;
926 #endif
927 }
928 data.to_addr = to_addr;
929 data.from = from;
930 data.autinc_from
931 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
932 || GET_CODE (from_addr) == POST_INC
933 || GET_CODE (from_addr) == POST_DEC);
934
935 data.explicit_inc_from = 0;
936 data.explicit_inc_to = 0;
937 if (data.reverse) data.offset = len;
938 data.len = len;
939
940 /* If copying requires more than two move insns,
941 copy addresses to registers (to make displacements shorter)
942 and use post-increment if available. */
943 if (!(data.autinc_from && data.autinc_to)
944 && move_by_pieces_ninsns (len, align, max_size) > 2)
945 {
946 /* Find the mode of the largest move... */
947 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
948 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
949 if (GET_MODE_SIZE (tmode) < max_size)
950 mode = tmode;
951
952 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
953 {
954 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
955 data.autinc_from = 1;
956 data.explicit_inc_from = -1;
957 }
958 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
959 {
960 data.from_addr = copy_addr_to_reg (from_addr);
961 data.autinc_from = 1;
962 data.explicit_inc_from = 1;
963 }
964 if (!data.autinc_from && CONSTANT_P (from_addr))
965 data.from_addr = copy_addr_to_reg (from_addr);
966 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
967 {
968 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
969 data.autinc_to = 1;
970 data.explicit_inc_to = -1;
971 }
972 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
973 {
974 data.to_addr = copy_addr_to_reg (to_addr);
975 data.autinc_to = 1;
976 data.explicit_inc_to = 1;
977 }
978 if (!data.autinc_to && CONSTANT_P (to_addr))
979 data.to_addr = copy_addr_to_reg (to_addr);
980 }
981
982 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
983 if (align >= GET_MODE_ALIGNMENT (tmode))
984 align = GET_MODE_ALIGNMENT (tmode);
985 else
986 {
987 enum machine_mode xmode;
988
989 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
990 tmode != VOIDmode;
991 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
992 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
993 || SLOW_UNALIGNED_ACCESS (tmode, align))
994 break;
995
996 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
997 }
998
999 /* First move what we can in the largest integer mode, then go to
1000 successively smaller modes. */
1001
1002 while (max_size > 1)
1003 {
1004 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1005 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1006 if (GET_MODE_SIZE (tmode) < max_size)
1007 mode = tmode;
1008
1009 if (mode == VOIDmode)
1010 break;
1011
1012 icode = optab_handler (mov_optab, mode)->insn_code;
1013 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1014 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1015
1016 max_size = GET_MODE_SIZE (mode);
1017 }
1018
1019 /* The code above should have handled everything. */
1020 gcc_assert (!data.len);
1021
1022 if (endp)
1023 {
1024 rtx to1;
1025
1026 gcc_assert (!data.reverse);
1027 if (data.autinc_to)
1028 {
1029 if (endp == 2)
1030 {
1031 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1032 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1033 else
1034 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1035 -1));
1036 }
1037 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1038 data.offset);
1039 }
1040 else
1041 {
1042 if (endp == 2)
1043 --data.offset;
1044 to1 = adjust_address (data.to, QImode, data.offset);
1045 }
1046 return to1;
1047 }
1048 else
1049 return data.to;
1050 }
1051
1052 /* Return number of insns required to move L bytes by pieces.
1053 ALIGN (in bits) is maximum alignment we can assume. */
1054
1055 static unsigned HOST_WIDE_INT
1056 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1057 unsigned int max_size)
1058 {
1059 unsigned HOST_WIDE_INT n_insns = 0;
1060 enum machine_mode tmode;
1061
1062 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1063 if (align >= GET_MODE_ALIGNMENT (tmode))
1064 align = GET_MODE_ALIGNMENT (tmode);
1065 else
1066 {
1067 enum machine_mode tmode, xmode;
1068
1069 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1070 tmode != VOIDmode;
1071 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1072 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1073 || SLOW_UNALIGNED_ACCESS (tmode, align))
1074 break;
1075
1076 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1077 }
1078
1079 while (max_size > 1)
1080 {
1081 enum machine_mode mode = VOIDmode;
1082 enum insn_code icode;
1083
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1087 mode = tmode;
1088
1089 if (mode == VOIDmode)
1090 break;
1091
1092 icode = optab_handler (mov_optab, mode)->insn_code;
1093 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1094 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1095
1096 max_size = GET_MODE_SIZE (mode);
1097 }
1098
1099 gcc_assert (!l);
1100 return n_insns;
1101 }
1102
1103 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1104 with move instructions for mode MODE. GENFUN is the gen_... function
1105 to make a move insn for that mode. DATA has all the other info. */
1106
1107 static void
1108 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1109 struct move_by_pieces *data)
1110 {
1111 unsigned int size = GET_MODE_SIZE (mode);
1112 rtx to1 = NULL_RTX, from1;
1113
1114 while (data->len >= size)
1115 {
1116 if (data->reverse)
1117 data->offset -= size;
1118
1119 if (data->to)
1120 {
1121 if (data->autinc_to)
1122 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1123 data->offset);
1124 else
1125 to1 = adjust_address (data->to, mode, data->offset);
1126 }
1127
1128 if (data->autinc_from)
1129 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1130 data->offset);
1131 else
1132 from1 = adjust_address (data->from, mode, data->offset);
1133
1134 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1135 emit_insn (gen_add2_insn (data->to_addr,
1136 GEN_INT (-(HOST_WIDE_INT)size)));
1137 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1138 emit_insn (gen_add2_insn (data->from_addr,
1139 GEN_INT (-(HOST_WIDE_INT)size)));
1140
1141 if (data->to)
1142 emit_insn ((*genfun) (to1, from1));
1143 else
1144 {
1145 #ifdef PUSH_ROUNDING
1146 emit_single_push_insn (mode, from1, NULL);
1147 #else
1148 gcc_unreachable ();
1149 #endif
1150 }
1151
1152 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1153 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1154 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1155 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1156
1157 if (! data->reverse)
1158 data->offset += size;
1159
1160 data->len -= size;
1161 }
1162 }
1163 \f
1164 /* Emit code to move a block Y to a block X. This may be done with
1165 string-move instructions, with multiple scalar move instructions,
1166 or with a library call.
1167
1168 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have.
1171 METHOD describes what kind of copy this is, and what mechanisms may be used.
1172
1173 Return the address of the new block, if memcpy is called and returns it,
1174 0 otherwise. */
1175
1176 rtx
1177 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1178 unsigned int expected_align, HOST_WIDE_INT expected_size)
1179 {
1180 bool may_use_call;
1181 rtx retval = 0;
1182 unsigned int align;
1183
1184 switch (method)
1185 {
1186 case BLOCK_OP_NORMAL:
1187 case BLOCK_OP_TAILCALL:
1188 may_use_call = true;
1189 break;
1190
1191 case BLOCK_OP_CALL_PARM:
1192 may_use_call = block_move_libcall_safe_for_call_parm ();
1193
1194 /* Make inhibit_defer_pop nonzero around the library call
1195 to force it to pop the arguments right away. */
1196 NO_DEFER_POP;
1197 break;
1198
1199 case BLOCK_OP_NO_LIBCALL:
1200 may_use_call = false;
1201 break;
1202
1203 default:
1204 gcc_unreachable ();
1205 }
1206
1207 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1208
1209 gcc_assert (MEM_P (x));
1210 gcc_assert (MEM_P (y));
1211 gcc_assert (size);
1212
1213 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1214 block copy is more efficient for other large modes, e.g. DCmode. */
1215 x = adjust_address (x, BLKmode, 0);
1216 y = adjust_address (y, BLKmode, 0);
1217
1218 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1219 can be incorrect is coming from __builtin_memcpy. */
1220 if (GET_CODE (size) == CONST_INT)
1221 {
1222 if (INTVAL (size) == 0)
1223 return 0;
1224
1225 x = shallow_copy_rtx (x);
1226 y = shallow_copy_rtx (y);
1227 set_mem_size (x, size);
1228 set_mem_size (y, size);
1229 }
1230
1231 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1232 move_by_pieces (x, y, INTVAL (size), align, 0);
1233 else if (emit_block_move_via_movmem (x, y, size, align,
1234 expected_align, expected_size))
1235 ;
1236 else if (may_use_call)
1237 retval = emit_block_move_via_libcall (x, y, size,
1238 method == BLOCK_OP_TAILCALL);
1239 else
1240 emit_block_move_via_loop (x, y, size, align);
1241
1242 if (method == BLOCK_OP_CALL_PARM)
1243 OK_DEFER_POP;
1244
1245 return retval;
1246 }
1247
1248 rtx
1249 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1250 {
1251 return emit_block_move_hints (x, y, size, method, 0, -1);
1252 }
1253
1254 /* A subroutine of emit_block_move. Returns true if calling the
1255 block move libcall will not clobber any parameters which may have
1256 already been placed on the stack. */
1257
1258 static bool
1259 block_move_libcall_safe_for_call_parm (void)
1260 {
1261 /* If arguments are pushed on the stack, then they're safe. */
1262 if (PUSH_ARGS)
1263 return true;
1264
1265 /* If registers go on the stack anyway, any argument is sure to clobber
1266 an outgoing argument. */
1267 #if defined (REG_PARM_STACK_SPACE)
1268 if (OUTGOING_REG_PARM_STACK_SPACE)
1269 {
1270 tree fn;
1271 fn = emit_block_move_libcall_fn (false);
1272 if (REG_PARM_STACK_SPACE (fn) != 0)
1273 return false;
1274 }
1275 #endif
1276
1277 /* If any argument goes in memory, then it might clobber an outgoing
1278 argument. */
1279 {
1280 CUMULATIVE_ARGS args_so_far;
1281 tree fn, arg;
1282
1283 fn = emit_block_move_libcall_fn (false);
1284 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1285
1286 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1287 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1288 {
1289 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1290 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1291 if (!tmp || !REG_P (tmp))
1292 return false;
1293 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1294 return false;
1295 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1296 }
1297 }
1298 return true;
1299 }
1300
1301 /* A subroutine of emit_block_move. Expand a movmem pattern;
1302 return true if successful. */
1303
1304 static bool
1305 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1306 unsigned int expected_align, HOST_WIDE_INT expected_size)
1307 {
1308 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1309 int save_volatile_ok = volatile_ok;
1310 enum machine_mode mode;
1311
1312 if (expected_align < align)
1313 expected_align = align;
1314
1315 /* Since this is a move insn, we don't care about volatility. */
1316 volatile_ok = 1;
1317
1318 /* Try the most limited insn first, because there's no point
1319 including more than one in the machine description unless
1320 the more limited one has some advantage. */
1321
1322 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1323 mode = GET_MODE_WIDER_MODE (mode))
1324 {
1325 enum insn_code code = movmem_optab[(int) mode];
1326 insn_operand_predicate_fn pred;
1327
1328 if (code != CODE_FOR_nothing
1329 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1330 here because if SIZE is less than the mode mask, as it is
1331 returned by the macro, it will definitely be less than the
1332 actual mode mask. */
1333 && ((GET_CODE (size) == CONST_INT
1334 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1335 <= (GET_MODE_MASK (mode) >> 1)))
1336 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1337 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1338 || (*pred) (x, BLKmode))
1339 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1340 || (*pred) (y, BLKmode))
1341 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1342 || (*pred) (opalign, VOIDmode)))
1343 {
1344 rtx op2;
1345 rtx last = get_last_insn ();
1346 rtx pat;
1347
1348 op2 = convert_to_mode (mode, size, 1);
1349 pred = insn_data[(int) code].operand[2].predicate;
1350 if (pred != 0 && ! (*pred) (op2, mode))
1351 op2 = copy_to_mode_reg (mode, op2);
1352
1353 /* ??? When called via emit_block_move_for_call, it'd be
1354 nice if there were some way to inform the backend, so
1355 that it doesn't fail the expansion because it thinks
1356 emitting the libcall would be more efficient. */
1357
1358 if (insn_data[(int) code].n_operands == 4)
1359 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1360 else
1361 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1362 GEN_INT (expected_align),
1363 GEN_INT (expected_size));
1364 if (pat)
1365 {
1366 emit_insn (pat);
1367 volatile_ok = save_volatile_ok;
1368 return true;
1369 }
1370 else
1371 delete_insns_since (last);
1372 }
1373 }
1374
1375 volatile_ok = save_volatile_ok;
1376 return false;
1377 }
1378
1379 /* A subroutine of emit_block_move. Expand a call to memcpy.
1380 Return the return value from memcpy, 0 otherwise. */
1381
1382 rtx
1383 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1384 {
1385 rtx dst_addr, src_addr;
1386 tree call_expr, fn, src_tree, dst_tree, size_tree;
1387 enum machine_mode size_mode;
1388 rtx retval;
1389
1390 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1391 pseudos. We can then place those new pseudos into a VAR_DECL and
1392 use them later. */
1393
1394 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1395 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1396
1397 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1398 src_addr = convert_memory_address (ptr_mode, src_addr);
1399
1400 dst_tree = make_tree (ptr_type_node, dst_addr);
1401 src_tree = make_tree (ptr_type_node, src_addr);
1402
1403 size_mode = TYPE_MODE (sizetype);
1404
1405 size = convert_to_mode (size_mode, size, 1);
1406 size = copy_to_mode_reg (size_mode, size);
1407
1408 /* It is incorrect to use the libcall calling conventions to call
1409 memcpy in this context. This could be a user call to memcpy and
1410 the user may wish to examine the return value from memcpy. For
1411 targets where libcalls and normal calls have different conventions
1412 for returning pointers, we could end up generating incorrect code. */
1413
1414 size_tree = make_tree (sizetype, size);
1415
1416 fn = emit_block_move_libcall_fn (true);
1417 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1418 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1419
1420 retval = expand_normal (call_expr);
1421
1422 return retval;
1423 }
1424
1425 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1426 for the function we use for block copies. The first time FOR_CALL
1427 is true, we call assemble_external. */
1428
1429 static GTY(()) tree block_move_fn;
1430
1431 void
1432 init_block_move_fn (const char *asmspec)
1433 {
1434 if (!block_move_fn)
1435 {
1436 tree args, fn;
1437
1438 fn = get_identifier ("memcpy");
1439 args = build_function_type_list (ptr_type_node, ptr_type_node,
1440 const_ptr_type_node, sizetype,
1441 NULL_TREE);
1442
1443 fn = build_decl (FUNCTION_DECL, fn, args);
1444 DECL_EXTERNAL (fn) = 1;
1445 TREE_PUBLIC (fn) = 1;
1446 DECL_ARTIFICIAL (fn) = 1;
1447 TREE_NOTHROW (fn) = 1;
1448 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1449 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1450
1451 block_move_fn = fn;
1452 }
1453
1454 if (asmspec)
1455 set_user_assembler_name (block_move_fn, asmspec);
1456 }
1457
1458 static tree
1459 emit_block_move_libcall_fn (int for_call)
1460 {
1461 static bool emitted_extern;
1462
1463 if (!block_move_fn)
1464 init_block_move_fn (NULL);
1465
1466 if (for_call && !emitted_extern)
1467 {
1468 emitted_extern = true;
1469 make_decl_rtl (block_move_fn);
1470 assemble_external (block_move_fn);
1471 }
1472
1473 return block_move_fn;
1474 }
1475
1476 /* A subroutine of emit_block_move. Copy the data via an explicit
1477 loop. This is used only when libcalls are forbidden. */
1478 /* ??? It'd be nice to copy in hunks larger than QImode. */
1479
1480 static void
1481 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1482 unsigned int align ATTRIBUTE_UNUSED)
1483 {
1484 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1485 enum machine_mode iter_mode;
1486
1487 iter_mode = GET_MODE (size);
1488 if (iter_mode == VOIDmode)
1489 iter_mode = word_mode;
1490
1491 top_label = gen_label_rtx ();
1492 cmp_label = gen_label_rtx ();
1493 iter = gen_reg_rtx (iter_mode);
1494
1495 emit_move_insn (iter, const0_rtx);
1496
1497 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1498 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1499 do_pending_stack_adjust ();
1500
1501 emit_jump (cmp_label);
1502 emit_label (top_label);
1503
1504 tmp = convert_modes (Pmode, iter_mode, iter, true);
1505 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1506 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1507 x = change_address (x, QImode, x_addr);
1508 y = change_address (y, QImode, y_addr);
1509
1510 emit_move_insn (x, y);
1511
1512 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1513 true, OPTAB_LIB_WIDEN);
1514 if (tmp != iter)
1515 emit_move_insn (iter, tmp);
1516
1517 emit_label (cmp_label);
1518
1519 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1520 true, top_label);
1521 }
1522 \f
1523 /* Copy all or part of a value X into registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1525
1526 void
1527 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1528 {
1529 int i;
1530 #ifdef HAVE_load_multiple
1531 rtx pat;
1532 rtx last;
1533 #endif
1534
1535 if (nregs == 0)
1536 return;
1537
1538 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1539 x = validize_mem (force_const_mem (mode, x));
1540
1541 /* See if the machine can do this with a load multiple insn. */
1542 #ifdef HAVE_load_multiple
1543 if (HAVE_load_multiple)
1544 {
1545 last = get_last_insn ();
1546 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1547 GEN_INT (nregs));
1548 if (pat)
1549 {
1550 emit_insn (pat);
1551 return;
1552 }
1553 else
1554 delete_insns_since (last);
1555 }
1556 #endif
1557
1558 for (i = 0; i < nregs; i++)
1559 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1560 operand_subword_force (x, i, mode));
1561 }
1562
1563 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1564 The number of registers to be filled is NREGS. */
1565
1566 void
1567 move_block_from_reg (int regno, rtx x, int nregs)
1568 {
1569 int i;
1570
1571 if (nregs == 0)
1572 return;
1573
1574 /* See if the machine can do this with a store multiple insn. */
1575 #ifdef HAVE_store_multiple
1576 if (HAVE_store_multiple)
1577 {
1578 rtx last = get_last_insn ();
1579 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1580 GEN_INT (nregs));
1581 if (pat)
1582 {
1583 emit_insn (pat);
1584 return;
1585 }
1586 else
1587 delete_insns_since (last);
1588 }
1589 #endif
1590
1591 for (i = 0; i < nregs; i++)
1592 {
1593 rtx tem = operand_subword (x, i, 1, BLKmode);
1594
1595 gcc_assert (tem);
1596
1597 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1598 }
1599 }
1600
1601 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1602 ORIG, where ORIG is a non-consecutive group of registers represented by
1603 a PARALLEL. The clone is identical to the original except in that the
1604 original set of registers is replaced by a new set of pseudo registers.
1605 The new set has the same modes as the original set. */
1606
1607 rtx
1608 gen_group_rtx (rtx orig)
1609 {
1610 int i, length;
1611 rtx *tmps;
1612
1613 gcc_assert (GET_CODE (orig) == PARALLEL);
1614
1615 length = XVECLEN (orig, 0);
1616 tmps = alloca (sizeof (rtx) * length);
1617
1618 /* Skip a NULL entry in first slot. */
1619 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1620
1621 if (i)
1622 tmps[0] = 0;
1623
1624 for (; i < length; i++)
1625 {
1626 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1627 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1628
1629 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1630 }
1631
1632 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1633 }
1634
1635 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1636 except that values are placed in TMPS[i], and must later be moved
1637 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1638
1639 static void
1640 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1641 {
1642 rtx src;
1643 int start, i;
1644 enum machine_mode m = GET_MODE (orig_src);
1645
1646 gcc_assert (GET_CODE (dst) == PARALLEL);
1647
1648 if (m != VOIDmode
1649 && !SCALAR_INT_MODE_P (m)
1650 && !MEM_P (orig_src)
1651 && GET_CODE (orig_src) != CONCAT)
1652 {
1653 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1654 if (imode == BLKmode)
1655 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1656 else
1657 src = gen_reg_rtx (imode);
1658 if (imode != BLKmode)
1659 src = gen_lowpart (GET_MODE (orig_src), src);
1660 emit_move_insn (src, orig_src);
1661 /* ...and back again. */
1662 if (imode != BLKmode)
1663 src = gen_lowpart (imode, src);
1664 emit_group_load_1 (tmps, dst, src, type, ssize);
1665 return;
1666 }
1667
1668 /* Check for a NULL entry, used to indicate that the parameter goes
1669 both on the stack and in registers. */
1670 if (XEXP (XVECEXP (dst, 0, 0), 0))
1671 start = 0;
1672 else
1673 start = 1;
1674
1675 /* Process the pieces. */
1676 for (i = start; i < XVECLEN (dst, 0); i++)
1677 {
1678 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1679 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1680 unsigned int bytelen = GET_MODE_SIZE (mode);
1681 int shift = 0;
1682
1683 /* Handle trailing fragments that run over the size of the struct. */
1684 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1685 {
1686 /* Arrange to shift the fragment to where it belongs.
1687 extract_bit_field loads to the lsb of the reg. */
1688 if (
1689 #ifdef BLOCK_REG_PADDING
1690 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1691 == (BYTES_BIG_ENDIAN ? upward : downward)
1692 #else
1693 BYTES_BIG_ENDIAN
1694 #endif
1695 )
1696 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1697 bytelen = ssize - bytepos;
1698 gcc_assert (bytelen > 0);
1699 }
1700
1701 /* If we won't be loading directly from memory, protect the real source
1702 from strange tricks we might play; but make sure that the source can
1703 be loaded directly into the destination. */
1704 src = orig_src;
1705 if (!MEM_P (orig_src)
1706 && (!CONSTANT_P (orig_src)
1707 || (GET_MODE (orig_src) != mode
1708 && GET_MODE (orig_src) != VOIDmode)))
1709 {
1710 if (GET_MODE (orig_src) == VOIDmode)
1711 src = gen_reg_rtx (mode);
1712 else
1713 src = gen_reg_rtx (GET_MODE (orig_src));
1714
1715 emit_move_insn (src, orig_src);
1716 }
1717
1718 /* Optimize the access just a bit. */
1719 if (MEM_P (src)
1720 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1721 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1722 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1723 && bytelen == GET_MODE_SIZE (mode))
1724 {
1725 tmps[i] = gen_reg_rtx (mode);
1726 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1727 }
1728 else if (COMPLEX_MODE_P (mode)
1729 && GET_MODE (src) == mode
1730 && bytelen == GET_MODE_SIZE (mode))
1731 /* Let emit_move_complex do the bulk of the work. */
1732 tmps[i] = src;
1733 else if (GET_CODE (src) == CONCAT)
1734 {
1735 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1736 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1737
1738 if ((bytepos == 0 && bytelen == slen0)
1739 || (bytepos != 0 && bytepos + bytelen <= slen))
1740 {
1741 /* The following assumes that the concatenated objects all
1742 have the same size. In this case, a simple calculation
1743 can be used to determine the object and the bit field
1744 to be extracted. */
1745 tmps[i] = XEXP (src, bytepos / slen0);
1746 if (! CONSTANT_P (tmps[i])
1747 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1748 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1749 (bytepos % slen0) * BITS_PER_UNIT,
1750 1, NULL_RTX, mode, mode);
1751 }
1752 else
1753 {
1754 rtx mem;
1755
1756 gcc_assert (!bytepos);
1757 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1758 emit_move_insn (mem, src);
1759 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1760 0, 1, NULL_RTX, mode, mode);
1761 }
1762 }
1763 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1764 SIMD register, which is currently broken. While we get GCC
1765 to emit proper RTL for these cases, let's dump to memory. */
1766 else if (VECTOR_MODE_P (GET_MODE (dst))
1767 && REG_P (src))
1768 {
1769 int slen = GET_MODE_SIZE (GET_MODE (src));
1770 rtx mem;
1771
1772 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1773 emit_move_insn (mem, src);
1774 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1775 }
1776 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1777 && XVECLEN (dst, 0) > 1)
1778 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1779 else if (CONSTANT_P (src)
1780 || (REG_P (src) && GET_MODE (src) == mode))
1781 tmps[i] = src;
1782 else
1783 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1784 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1785 mode, mode);
1786
1787 if (shift)
1788 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1789 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1790 }
1791 }
1792
1793 /* Emit code to move a block SRC of type TYPE to a block DST,
1794 where DST is non-consecutive registers represented by a PARALLEL.
1795 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1796 if not known. */
1797
1798 void
1799 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1800 {
1801 rtx *tmps;
1802 int i;
1803
1804 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1805 emit_group_load_1 (tmps, dst, src, type, ssize);
1806
1807 /* Copy the extracted pieces into the proper (probable) hard regs. */
1808 for (i = 0; i < XVECLEN (dst, 0); i++)
1809 {
1810 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1811 if (d == NULL)
1812 continue;
1813 emit_move_insn (d, tmps[i]);
1814 }
1815 }
1816
1817 /* Similar, but load SRC into new pseudos in a format that looks like
1818 PARALLEL. This can later be fed to emit_group_move to get things
1819 in the right place. */
1820
1821 rtx
1822 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1823 {
1824 rtvec vec;
1825 int i;
1826
1827 vec = rtvec_alloc (XVECLEN (parallel, 0));
1828 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1829
1830 /* Convert the vector to look just like the original PARALLEL, except
1831 with the computed values. */
1832 for (i = 0; i < XVECLEN (parallel, 0); i++)
1833 {
1834 rtx e = XVECEXP (parallel, 0, i);
1835 rtx d = XEXP (e, 0);
1836
1837 if (d)
1838 {
1839 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1840 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1841 }
1842 RTVEC_ELT (vec, i) = e;
1843 }
1844
1845 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1846 }
1847
1848 /* Emit code to move a block SRC to block DST, where SRC and DST are
1849 non-consecutive groups of registers, each represented by a PARALLEL. */
1850
1851 void
1852 emit_group_move (rtx dst, rtx src)
1853 {
1854 int i;
1855
1856 gcc_assert (GET_CODE (src) == PARALLEL
1857 && GET_CODE (dst) == PARALLEL
1858 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1859
1860 /* Skip first entry if NULL. */
1861 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1862 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1863 XEXP (XVECEXP (src, 0, i), 0));
1864 }
1865
1866 /* Move a group of registers represented by a PARALLEL into pseudos. */
1867
1868 rtx
1869 emit_group_move_into_temps (rtx src)
1870 {
1871 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1872 int i;
1873
1874 for (i = 0; i < XVECLEN (src, 0); i++)
1875 {
1876 rtx e = XVECEXP (src, 0, i);
1877 rtx d = XEXP (e, 0);
1878
1879 if (d)
1880 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1881 RTVEC_ELT (vec, i) = e;
1882 }
1883
1884 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1885 }
1886
1887 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1888 where SRC is non-consecutive registers represented by a PARALLEL.
1889 SSIZE represents the total size of block ORIG_DST, or -1 if not
1890 known. */
1891
1892 void
1893 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1894 {
1895 rtx *tmps, dst;
1896 int start, finish, i;
1897 enum machine_mode m = GET_MODE (orig_dst);
1898
1899 gcc_assert (GET_CODE (src) == PARALLEL);
1900
1901 if (!SCALAR_INT_MODE_P (m)
1902 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1903 {
1904 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1905 if (imode == BLKmode)
1906 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1907 else
1908 dst = gen_reg_rtx (imode);
1909 emit_group_store (dst, src, type, ssize);
1910 if (imode != BLKmode)
1911 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1912 emit_move_insn (orig_dst, dst);
1913 return;
1914 }
1915
1916 /* Check for a NULL entry, used to indicate that the parameter goes
1917 both on the stack and in registers. */
1918 if (XEXP (XVECEXP (src, 0, 0), 0))
1919 start = 0;
1920 else
1921 start = 1;
1922 finish = XVECLEN (src, 0);
1923
1924 tmps = alloca (sizeof (rtx) * finish);
1925
1926 /* Copy the (probable) hard regs into pseudos. */
1927 for (i = start; i < finish; i++)
1928 {
1929 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1930 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1931 {
1932 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1933 emit_move_insn (tmps[i], reg);
1934 }
1935 else
1936 tmps[i] = reg;
1937 }
1938
1939 /* If we won't be storing directly into memory, protect the real destination
1940 from strange tricks we might play. */
1941 dst = orig_dst;
1942 if (GET_CODE (dst) == PARALLEL)
1943 {
1944 rtx temp;
1945
1946 /* We can get a PARALLEL dst if there is a conditional expression in
1947 a return statement. In that case, the dst and src are the same,
1948 so no action is necessary. */
1949 if (rtx_equal_p (dst, src))
1950 return;
1951
1952 /* It is unclear if we can ever reach here, but we may as well handle
1953 it. Allocate a temporary, and split this into a store/load to/from
1954 the temporary. */
1955
1956 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1957 emit_group_store (temp, src, type, ssize);
1958 emit_group_load (dst, temp, type, ssize);
1959 return;
1960 }
1961 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1962 {
1963 enum machine_mode outer = GET_MODE (dst);
1964 enum machine_mode inner;
1965 HOST_WIDE_INT bytepos;
1966 bool done = false;
1967 rtx temp;
1968
1969 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1970 dst = gen_reg_rtx (outer);
1971
1972 /* Make life a bit easier for combine. */
1973 /* If the first element of the vector is the low part
1974 of the destination mode, use a paradoxical subreg to
1975 initialize the destination. */
1976 if (start < finish)
1977 {
1978 inner = GET_MODE (tmps[start]);
1979 bytepos = subreg_lowpart_offset (inner, outer);
1980 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1981 {
1982 temp = simplify_gen_subreg (outer, tmps[start],
1983 inner, 0);
1984 if (temp)
1985 {
1986 emit_move_insn (dst, temp);
1987 done = true;
1988 start++;
1989 }
1990 }
1991 }
1992
1993 /* If the first element wasn't the low part, try the last. */
1994 if (!done
1995 && start < finish - 1)
1996 {
1997 inner = GET_MODE (tmps[finish - 1]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2000 {
2001 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2002 inner, 0);
2003 if (temp)
2004 {
2005 emit_move_insn (dst, temp);
2006 done = true;
2007 finish--;
2008 }
2009 }
2010 }
2011
2012 /* Otherwise, simply initialize the result to zero. */
2013 if (!done)
2014 emit_move_insn (dst, CONST0_RTX (outer));
2015 }
2016
2017 /* Process the pieces. */
2018 for (i = start; i < finish; i++)
2019 {
2020 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2021 enum machine_mode mode = GET_MODE (tmps[i]);
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2023 rtx dest = dst;
2024
2025 /* Handle trailing fragments that run over the size of the struct. */
2026 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2027 {
2028 /* store_bit_field always takes its value from the lsb.
2029 Move the fragment to the lsb if it's not already there. */
2030 if (
2031 #ifdef BLOCK_REG_PADDING
2032 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2033 == (BYTES_BIG_ENDIAN ? upward : downward)
2034 #else
2035 BYTES_BIG_ENDIAN
2036 #endif
2037 )
2038 {
2039 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2040 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2041 build_int_cst (NULL_TREE, shift),
2042 tmps[i], 0);
2043 }
2044 bytelen = ssize - bytepos;
2045 }
2046
2047 if (GET_CODE (dst) == CONCAT)
2048 {
2049 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2050 dest = XEXP (dst, 0);
2051 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2052 {
2053 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2054 dest = XEXP (dst, 1);
2055 }
2056 else
2057 {
2058 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2059 dest = assign_stack_temp (GET_MODE (dest),
2060 GET_MODE_SIZE (GET_MODE (dest)), 0);
2061 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2062 tmps[i]);
2063 dst = dest;
2064 break;
2065 }
2066 }
2067
2068 /* Optimize the access just a bit. */
2069 if (MEM_P (dest)
2070 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2071 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2072 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2073 && bytelen == GET_MODE_SIZE (mode))
2074 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075 else
2076 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 mode, tmps[i]);
2078 }
2079
2080 /* Copy from the pseudo into the (probable) hard reg. */
2081 if (orig_dst != dst)
2082 emit_move_insn (orig_dst, dst);
2083 }
2084
2085 /* Generate code to copy a BLKmode object of TYPE out of a
2086 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2087 is null, a stack temporary is created. TGTBLK is returned.
2088
2089 The purpose of this routine is to handle functions that return
2090 BLKmode structures in registers. Some machines (the PA for example)
2091 want to return all small structures in registers regardless of the
2092 structure's alignment. */
2093
2094 rtx
2095 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2096 {
2097 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2098 rtx src = NULL, dst = NULL;
2099 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2100 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2101
2102 if (tgtblk == 0)
2103 {
2104 tgtblk = assign_temp (build_qualified_type (type,
2105 (TYPE_QUALS (type)
2106 | TYPE_QUAL_CONST)),
2107 0, 1, 1);
2108 preserve_temp_slots (tgtblk);
2109 }
2110
2111 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2112 into a new pseudo which is a full word. */
2113
2114 if (GET_MODE (srcreg) != BLKmode
2115 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2116 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2117
2118 /* If the structure doesn't take up a whole number of words, see whether
2119 SRCREG is padded on the left or on the right. If it's on the left,
2120 set PADDING_CORRECTION to the number of bits to skip.
2121
2122 In most ABIs, the structure will be returned at the least end of
2123 the register, which translates to right padding on little-endian
2124 targets and left padding on big-endian targets. The opposite
2125 holds if the structure is returned at the most significant
2126 end of the register. */
2127 if (bytes % UNITS_PER_WORD != 0
2128 && (targetm.calls.return_in_msb (type)
2129 ? !BYTES_BIG_ENDIAN
2130 : BYTES_BIG_ENDIAN))
2131 padding_correction
2132 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2133
2134 /* Copy the structure BITSIZE bites at a time.
2135
2136 We could probably emit more efficient code for machines which do not use
2137 strict alignment, but it doesn't seem worth the effort at the current
2138 time. */
2139 for (bitpos = 0, xbitpos = padding_correction;
2140 bitpos < bytes * BITS_PER_UNIT;
2141 bitpos += bitsize, xbitpos += bitsize)
2142 {
2143 /* We need a new source operand each time xbitpos is on a
2144 word boundary and when xbitpos == padding_correction
2145 (the first time through). */
2146 if (xbitpos % BITS_PER_WORD == 0
2147 || xbitpos == padding_correction)
2148 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2149 GET_MODE (srcreg));
2150
2151 /* We need a new destination operand each time bitpos is on
2152 a word boundary. */
2153 if (bitpos % BITS_PER_WORD == 0)
2154 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2155
2156 /* Use xbitpos for the source extraction (right justified) and
2157 xbitpos for the destination store (left justified). */
2158 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2159 extract_bit_field (src, bitsize,
2160 xbitpos % BITS_PER_WORD, 1,
2161 NULL_RTX, word_mode, word_mode));
2162 }
2163
2164 return tgtblk;
2165 }
2166
2167 /* Add a USE expression for REG to the (possibly empty) list pointed
2168 to by CALL_FUSAGE. REG must denote a hard register. */
2169
2170 void
2171 use_reg (rtx *call_fusage, rtx reg)
2172 {
2173 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2174
2175 *call_fusage
2176 = gen_rtx_EXPR_LIST (VOIDmode,
2177 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2178 }
2179
2180 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2181 starting at REGNO. All of these registers must be hard registers. */
2182
2183 void
2184 use_regs (rtx *call_fusage, int regno, int nregs)
2185 {
2186 int i;
2187
2188 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2189
2190 for (i = 0; i < nregs; i++)
2191 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2192 }
2193
2194 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2195 PARALLEL REGS. This is for calls that pass values in multiple
2196 non-contiguous locations. The Irix 6 ABI has examples of this. */
2197
2198 void
2199 use_group_regs (rtx *call_fusage, rtx regs)
2200 {
2201 int i;
2202
2203 for (i = 0; i < XVECLEN (regs, 0); i++)
2204 {
2205 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2206
2207 /* A NULL entry means the parameter goes both on the stack and in
2208 registers. This can also be a MEM for targets that pass values
2209 partially on the stack and partially in registers. */
2210 if (reg != 0 && REG_P (reg))
2211 use_reg (call_fusage, reg);
2212 }
2213 }
2214 \f
2215
2216 /* Determine whether the LEN bytes generated by CONSTFUN can be
2217 stored to memory using several move instructions. CONSTFUNDATA is
2218 a pointer which will be passed as argument in every CONSTFUN call.
2219 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2220 a memset operation and false if it's a copy of a constant string.
2221 Return nonzero if a call to store_by_pieces should succeed. */
2222
2223 int
2224 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2225 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2226 void *constfundata, unsigned int align, bool memsetp)
2227 {
2228 unsigned HOST_WIDE_INT l;
2229 unsigned int max_size;
2230 HOST_WIDE_INT offset = 0;
2231 enum machine_mode mode, tmode;
2232 enum insn_code icode;
2233 int reverse;
2234 rtx cst;
2235
2236 if (len == 0)
2237 return 1;
2238
2239 if (! (memsetp
2240 ? SET_BY_PIECES_P (len, align)
2241 : STORE_BY_PIECES_P (len, align)))
2242 return 0;
2243
2244 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2245 if (align >= GET_MODE_ALIGNMENT (tmode))
2246 align = GET_MODE_ALIGNMENT (tmode);
2247 else
2248 {
2249 enum machine_mode xmode;
2250
2251 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2252 tmode != VOIDmode;
2253 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2254 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2255 || SLOW_UNALIGNED_ACCESS (tmode, align))
2256 break;
2257
2258 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2259 }
2260
2261 /* We would first store what we can in the largest integer mode, then go to
2262 successively smaller modes. */
2263
2264 for (reverse = 0;
2265 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2266 reverse++)
2267 {
2268 l = len;
2269 mode = VOIDmode;
2270 max_size = STORE_MAX_PIECES + 1;
2271 while (max_size > 1)
2272 {
2273 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2274 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2275 if (GET_MODE_SIZE (tmode) < max_size)
2276 mode = tmode;
2277
2278 if (mode == VOIDmode)
2279 break;
2280
2281 icode = optab_handler (mov_optab, mode)->insn_code;
2282 if (icode != CODE_FOR_nothing
2283 && align >= GET_MODE_ALIGNMENT (mode))
2284 {
2285 unsigned int size = GET_MODE_SIZE (mode);
2286
2287 while (l >= size)
2288 {
2289 if (reverse)
2290 offset -= size;
2291
2292 cst = (*constfun) (constfundata, offset, mode);
2293 if (!LEGITIMATE_CONSTANT_P (cst))
2294 return 0;
2295
2296 if (!reverse)
2297 offset += size;
2298
2299 l -= size;
2300 }
2301 }
2302
2303 max_size = GET_MODE_SIZE (mode);
2304 }
2305
2306 /* The code above should have handled everything. */
2307 gcc_assert (!l);
2308 }
2309
2310 return 1;
2311 }
2312
2313 /* Generate several move instructions to store LEN bytes generated by
2314 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2315 pointer which will be passed as argument in every CONSTFUN call.
2316 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2317 a memset operation and false if it's a copy of a constant string.
2318 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2319 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2320 stpcpy. */
2321
2322 rtx
2323 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2324 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2325 void *constfundata, unsigned int align, bool memsetp, int endp)
2326 {
2327 struct store_by_pieces data;
2328
2329 if (len == 0)
2330 {
2331 gcc_assert (endp != 2);
2332 return to;
2333 }
2334
2335 gcc_assert (memsetp
2336 ? SET_BY_PIECES_P (len, align)
2337 : STORE_BY_PIECES_P (len, align));
2338 data.constfun = constfun;
2339 data.constfundata = constfundata;
2340 data.len = len;
2341 data.to = to;
2342 store_by_pieces_1 (&data, align);
2343 if (endp)
2344 {
2345 rtx to1;
2346
2347 gcc_assert (!data.reverse);
2348 if (data.autinc_to)
2349 {
2350 if (endp == 2)
2351 {
2352 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2353 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2354 else
2355 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2356 -1));
2357 }
2358 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2359 data.offset);
2360 }
2361 else
2362 {
2363 if (endp == 2)
2364 --data.offset;
2365 to1 = adjust_address (data.to, QImode, data.offset);
2366 }
2367 return to1;
2368 }
2369 else
2370 return data.to;
2371 }
2372
2373 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2375
2376 static void
2377 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2378 {
2379 struct store_by_pieces data;
2380
2381 if (len == 0)
2382 return;
2383
2384 data.constfun = clear_by_pieces_1;
2385 data.constfundata = NULL;
2386 data.len = len;
2387 data.to = to;
2388 store_by_pieces_1 (&data, align);
2389 }
2390
2391 /* Callback routine for clear_by_pieces.
2392 Return const0_rtx unconditionally. */
2393
2394 static rtx
2395 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2396 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2397 enum machine_mode mode ATTRIBUTE_UNUSED)
2398 {
2399 return const0_rtx;
2400 }
2401
2402 /* Subroutine of clear_by_pieces and store_by_pieces.
2403 Generate several move instructions to store LEN bytes of block TO. (A MEM
2404 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2405
2406 static void
2407 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2408 unsigned int align ATTRIBUTE_UNUSED)
2409 {
2410 rtx to_addr = XEXP (data->to, 0);
2411 unsigned int max_size = STORE_MAX_PIECES + 1;
2412 enum machine_mode mode = VOIDmode, tmode;
2413 enum insn_code icode;
2414
2415 data->offset = 0;
2416 data->to_addr = to_addr;
2417 data->autinc_to
2418 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2419 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2420
2421 data->explicit_inc_to = 0;
2422 data->reverse
2423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2424 if (data->reverse)
2425 data->offset = data->len;
2426
2427 /* If storing requires more than two move insns,
2428 copy addresses to registers (to make displacements shorter)
2429 and use post-increment if available. */
2430 if (!data->autinc_to
2431 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2432 {
2433 /* Determine the main mode we'll be using. */
2434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2435 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2436 if (GET_MODE_SIZE (tmode) < max_size)
2437 mode = tmode;
2438
2439 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2440 {
2441 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2442 data->autinc_to = 1;
2443 data->explicit_inc_to = -1;
2444 }
2445
2446 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2447 && ! data->autinc_to)
2448 {
2449 data->to_addr = copy_addr_to_reg (to_addr);
2450 data->autinc_to = 1;
2451 data->explicit_inc_to = 1;
2452 }
2453
2454 if ( !data->autinc_to && CONSTANT_P (to_addr))
2455 data->to_addr = copy_addr_to_reg (to_addr);
2456 }
2457
2458 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2459 if (align >= GET_MODE_ALIGNMENT (tmode))
2460 align = GET_MODE_ALIGNMENT (tmode);
2461 else
2462 {
2463 enum machine_mode xmode;
2464
2465 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2466 tmode != VOIDmode;
2467 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2468 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2469 || SLOW_UNALIGNED_ACCESS (tmode, align))
2470 break;
2471
2472 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2473 }
2474
2475 /* First store what we can in the largest integer mode, then go to
2476 successively smaller modes. */
2477
2478 while (max_size > 1)
2479 {
2480 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2481 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2482 if (GET_MODE_SIZE (tmode) < max_size)
2483 mode = tmode;
2484
2485 if (mode == VOIDmode)
2486 break;
2487
2488 icode = optab_handler (mov_optab, mode)->insn_code;
2489 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2490 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2491
2492 max_size = GET_MODE_SIZE (mode);
2493 }
2494
2495 /* The code above should have handled everything. */
2496 gcc_assert (!data->len);
2497 }
2498
2499 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2500 with move instructions for mode MODE. GENFUN is the gen_... function
2501 to make a move insn for that mode. DATA has all the other info. */
2502
2503 static void
2504 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2505 struct store_by_pieces *data)
2506 {
2507 unsigned int size = GET_MODE_SIZE (mode);
2508 rtx to1, cst;
2509
2510 while (data->len >= size)
2511 {
2512 if (data->reverse)
2513 data->offset -= size;
2514
2515 if (data->autinc_to)
2516 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2517 data->offset);
2518 else
2519 to1 = adjust_address (data->to, mode, data->offset);
2520
2521 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2522 emit_insn (gen_add2_insn (data->to_addr,
2523 GEN_INT (-(HOST_WIDE_INT) size)));
2524
2525 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2526 emit_insn ((*genfun) (to1, cst));
2527
2528 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2529 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2530
2531 if (! data->reverse)
2532 data->offset += size;
2533
2534 data->len -= size;
2535 }
2536 }
2537 \f
2538 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2539 its length in bytes. */
2540
2541 rtx
2542 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2543 unsigned int expected_align, HOST_WIDE_INT expected_size)
2544 {
2545 enum machine_mode mode = GET_MODE (object);
2546 unsigned int align;
2547
2548 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2549
2550 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2551 just move a zero. Otherwise, do this a piece at a time. */
2552 if (mode != BLKmode
2553 && GET_CODE (size) == CONST_INT
2554 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2555 {
2556 rtx zero = CONST0_RTX (mode);
2557 if (zero != NULL)
2558 {
2559 emit_move_insn (object, zero);
2560 return NULL;
2561 }
2562
2563 if (COMPLEX_MODE_P (mode))
2564 {
2565 zero = CONST0_RTX (GET_MODE_INNER (mode));
2566 if (zero != NULL)
2567 {
2568 write_complex_part (object, zero, 0);
2569 write_complex_part (object, zero, 1);
2570 return NULL;
2571 }
2572 }
2573 }
2574
2575 if (size == const0_rtx)
2576 return NULL;
2577
2578 align = MEM_ALIGN (object);
2579
2580 if (GET_CODE (size) == CONST_INT
2581 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2582 clear_by_pieces (object, INTVAL (size), align);
2583 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2584 expected_align, expected_size))
2585 ;
2586 else
2587 return set_storage_via_libcall (object, size, const0_rtx,
2588 method == BLOCK_OP_TAILCALL);
2589
2590 return NULL;
2591 }
2592
2593 rtx
2594 clear_storage (rtx object, rtx size, enum block_op_methods method)
2595 {
2596 return clear_storage_hints (object, size, method, 0, -1);
2597 }
2598
2599
2600 /* A subroutine of clear_storage. Expand a call to memset.
2601 Return the return value of memset, 0 otherwise. */
2602
2603 rtx
2604 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2605 {
2606 tree call_expr, fn, object_tree, size_tree, val_tree;
2607 enum machine_mode size_mode;
2608 rtx retval;
2609
2610 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2611 place those into new pseudos into a VAR_DECL and use them later. */
2612
2613 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2614
2615 size_mode = TYPE_MODE (sizetype);
2616 size = convert_to_mode (size_mode, size, 1);
2617 size = copy_to_mode_reg (size_mode, size);
2618
2619 /* It is incorrect to use the libcall calling conventions to call
2620 memset in this context. This could be a user call to memset and
2621 the user may wish to examine the return value from memset. For
2622 targets where libcalls and normal calls have different conventions
2623 for returning pointers, we could end up generating incorrect code. */
2624
2625 object_tree = make_tree (ptr_type_node, object);
2626 if (GET_CODE (val) != CONST_INT)
2627 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2628 size_tree = make_tree (sizetype, size);
2629 val_tree = make_tree (integer_type_node, val);
2630
2631 fn = clear_storage_libcall_fn (true);
2632 call_expr = build_call_expr (fn, 3,
2633 object_tree, integer_zero_node, size_tree);
2634 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2635
2636 retval = expand_normal (call_expr);
2637
2638 return retval;
2639 }
2640
2641 /* A subroutine of set_storage_via_libcall. Create the tree node
2642 for the function we use for block clears. The first time FOR_CALL
2643 is true, we call assemble_external. */
2644
2645 static GTY(()) tree block_clear_fn;
2646
2647 void
2648 init_block_clear_fn (const char *asmspec)
2649 {
2650 if (!block_clear_fn)
2651 {
2652 tree fn, args;
2653
2654 fn = get_identifier ("memset");
2655 args = build_function_type_list (ptr_type_node, ptr_type_node,
2656 integer_type_node, sizetype,
2657 NULL_TREE);
2658
2659 fn = build_decl (FUNCTION_DECL, fn, args);
2660 DECL_EXTERNAL (fn) = 1;
2661 TREE_PUBLIC (fn) = 1;
2662 DECL_ARTIFICIAL (fn) = 1;
2663 TREE_NOTHROW (fn) = 1;
2664 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2665 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2666
2667 block_clear_fn = fn;
2668 }
2669
2670 if (asmspec)
2671 set_user_assembler_name (block_clear_fn, asmspec);
2672 }
2673
2674 static tree
2675 clear_storage_libcall_fn (int for_call)
2676 {
2677 static bool emitted_extern;
2678
2679 if (!block_clear_fn)
2680 init_block_clear_fn (NULL);
2681
2682 if (for_call && !emitted_extern)
2683 {
2684 emitted_extern = true;
2685 make_decl_rtl (block_clear_fn);
2686 assemble_external (block_clear_fn);
2687 }
2688
2689 return block_clear_fn;
2690 }
2691 \f
2692 /* Expand a setmem pattern; return true if successful. */
2693
2694 bool
2695 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2696 unsigned int expected_align, HOST_WIDE_INT expected_size)
2697 {
2698 /* Try the most limited insn first, because there's no point
2699 including more than one in the machine description unless
2700 the more limited one has some advantage. */
2701
2702 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2703 enum machine_mode mode;
2704
2705 if (expected_align < align)
2706 expected_align = align;
2707
2708 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2709 mode = GET_MODE_WIDER_MODE (mode))
2710 {
2711 enum insn_code code = setmem_optab[(int) mode];
2712 insn_operand_predicate_fn pred;
2713
2714 if (code != CODE_FOR_nothing
2715 /* We don't need MODE to be narrower than
2716 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2717 the mode mask, as it is returned by the macro, it will
2718 definitely be less than the actual mode mask. */
2719 && ((GET_CODE (size) == CONST_INT
2720 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2721 <= (GET_MODE_MASK (mode) >> 1)))
2722 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2723 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2724 || (*pred) (object, BLKmode))
2725 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2726 || (*pred) (opalign, VOIDmode)))
2727 {
2728 rtx opsize, opchar;
2729 enum machine_mode char_mode;
2730 rtx last = get_last_insn ();
2731 rtx pat;
2732
2733 opsize = convert_to_mode (mode, size, 1);
2734 pred = insn_data[(int) code].operand[1].predicate;
2735 if (pred != 0 && ! (*pred) (opsize, mode))
2736 opsize = copy_to_mode_reg (mode, opsize);
2737
2738 opchar = val;
2739 char_mode = insn_data[(int) code].operand[2].mode;
2740 if (char_mode != VOIDmode)
2741 {
2742 opchar = convert_to_mode (char_mode, opchar, 1);
2743 pred = insn_data[(int) code].operand[2].predicate;
2744 if (pred != 0 && ! (*pred) (opchar, char_mode))
2745 opchar = copy_to_mode_reg (char_mode, opchar);
2746 }
2747
2748 if (insn_data[(int) code].n_operands == 4)
2749 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2750 else
2751 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2752 GEN_INT (expected_align),
2753 GEN_INT (expected_size));
2754 if (pat)
2755 {
2756 emit_insn (pat);
2757 return true;
2758 }
2759 else
2760 delete_insns_since (last);
2761 }
2762 }
2763
2764 return false;
2765 }
2766
2767 \f
2768 /* Write to one of the components of the complex value CPLX. Write VAL to
2769 the real part if IMAG_P is false, and the imaginary part if its true. */
2770
2771 static void
2772 write_complex_part (rtx cplx, rtx val, bool imag_p)
2773 {
2774 enum machine_mode cmode;
2775 enum machine_mode imode;
2776 unsigned ibitsize;
2777
2778 if (GET_CODE (cplx) == CONCAT)
2779 {
2780 emit_move_insn (XEXP (cplx, imag_p), val);
2781 return;
2782 }
2783
2784 cmode = GET_MODE (cplx);
2785 imode = GET_MODE_INNER (cmode);
2786 ibitsize = GET_MODE_BITSIZE (imode);
2787
2788 /* For MEMs simplify_gen_subreg may generate an invalid new address
2789 because, e.g., the original address is considered mode-dependent
2790 by the target, which restricts simplify_subreg from invoking
2791 adjust_address_nv. Instead of preparing fallback support for an
2792 invalid address, we call adjust_address_nv directly. */
2793 if (MEM_P (cplx))
2794 {
2795 emit_move_insn (adjust_address_nv (cplx, imode,
2796 imag_p ? GET_MODE_SIZE (imode) : 0),
2797 val);
2798 return;
2799 }
2800
2801 /* If the sub-object is at least word sized, then we know that subregging
2802 will work. This special case is important, since store_bit_field
2803 wants to operate on integer modes, and there's rarely an OImode to
2804 correspond to TCmode. */
2805 if (ibitsize >= BITS_PER_WORD
2806 /* For hard regs we have exact predicates. Assume we can split
2807 the original object if it spans an even number of hard regs.
2808 This special case is important for SCmode on 64-bit platforms
2809 where the natural size of floating-point regs is 32-bit. */
2810 || (REG_P (cplx)
2811 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2812 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2813 {
2814 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2815 imag_p ? GET_MODE_SIZE (imode) : 0);
2816 if (part)
2817 {
2818 emit_move_insn (part, val);
2819 return;
2820 }
2821 else
2822 /* simplify_gen_subreg may fail for sub-word MEMs. */
2823 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2824 }
2825
2826 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2827 }
2828
2829 /* Extract one of the components of the complex value CPLX. Extract the
2830 real part if IMAG_P is false, and the imaginary part if it's true. */
2831
2832 static rtx
2833 read_complex_part (rtx cplx, bool imag_p)
2834 {
2835 enum machine_mode cmode, imode;
2836 unsigned ibitsize;
2837
2838 if (GET_CODE (cplx) == CONCAT)
2839 return XEXP (cplx, imag_p);
2840
2841 cmode = GET_MODE (cplx);
2842 imode = GET_MODE_INNER (cmode);
2843 ibitsize = GET_MODE_BITSIZE (imode);
2844
2845 /* Special case reads from complex constants that got spilled to memory. */
2846 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2847 {
2848 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2849 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2850 {
2851 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2852 if (CONSTANT_CLASS_P (part))
2853 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2854 }
2855 }
2856
2857 /* For MEMs simplify_gen_subreg may generate an invalid new address
2858 because, e.g., the original address is considered mode-dependent
2859 by the target, which restricts simplify_subreg from invoking
2860 adjust_address_nv. Instead of preparing fallback support for an
2861 invalid address, we call adjust_address_nv directly. */
2862 if (MEM_P (cplx))
2863 return adjust_address_nv (cplx, imode,
2864 imag_p ? GET_MODE_SIZE (imode) : 0);
2865
2866 /* If the sub-object is at least word sized, then we know that subregging
2867 will work. This special case is important, since extract_bit_field
2868 wants to operate on integer modes, and there's rarely an OImode to
2869 correspond to TCmode. */
2870 if (ibitsize >= BITS_PER_WORD
2871 /* For hard regs we have exact predicates. Assume we can split
2872 the original object if it spans an even number of hard regs.
2873 This special case is important for SCmode on 64-bit platforms
2874 where the natural size of floating-point regs is 32-bit. */
2875 || (REG_P (cplx)
2876 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2877 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2878 {
2879 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2880 imag_p ? GET_MODE_SIZE (imode) : 0);
2881 if (ret)
2882 return ret;
2883 else
2884 /* simplify_gen_subreg may fail for sub-word MEMs. */
2885 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2886 }
2887
2888 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2889 true, NULL_RTX, imode, imode);
2890 }
2891 \f
2892 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2893 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2894 represented in NEW_MODE. If FORCE is true, this will never happen, as
2895 we'll force-create a SUBREG if needed. */
2896
2897 static rtx
2898 emit_move_change_mode (enum machine_mode new_mode,
2899 enum machine_mode old_mode, rtx x, bool force)
2900 {
2901 rtx ret;
2902
2903 if (push_operand (x, GET_MODE (x)))
2904 {
2905 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2906 MEM_COPY_ATTRIBUTES (ret, x);
2907 }
2908 else if (MEM_P (x))
2909 {
2910 /* We don't have to worry about changing the address since the
2911 size in bytes is supposed to be the same. */
2912 if (reload_in_progress)
2913 {
2914 /* Copy the MEM to change the mode and move any
2915 substitutions from the old MEM to the new one. */
2916 ret = adjust_address_nv (x, new_mode, 0);
2917 copy_replacements (x, ret);
2918 }
2919 else
2920 ret = adjust_address (x, new_mode, 0);
2921 }
2922 else
2923 {
2924 /* Note that we do want simplify_subreg's behavior of validating
2925 that the new mode is ok for a hard register. If we were to use
2926 simplify_gen_subreg, we would create the subreg, but would
2927 probably run into the target not being able to implement it. */
2928 /* Except, of course, when FORCE is true, when this is exactly what
2929 we want. Which is needed for CCmodes on some targets. */
2930 if (force)
2931 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2932 else
2933 ret = simplify_subreg (new_mode, x, old_mode, 0);
2934 }
2935
2936 return ret;
2937 }
2938
2939 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2940 an integer mode of the same size as MODE. Returns the instruction
2941 emitted, or NULL if such a move could not be generated. */
2942
2943 static rtx
2944 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2945 {
2946 enum machine_mode imode;
2947 enum insn_code code;
2948
2949 /* There must exist a mode of the exact size we require. */
2950 imode = int_mode_for_mode (mode);
2951 if (imode == BLKmode)
2952 return NULL_RTX;
2953
2954 /* The target must support moves in this mode. */
2955 code = optab_handler (mov_optab, imode)->insn_code;
2956 if (code == CODE_FOR_nothing)
2957 return NULL_RTX;
2958
2959 x = emit_move_change_mode (imode, mode, x, force);
2960 if (x == NULL_RTX)
2961 return NULL_RTX;
2962 y = emit_move_change_mode (imode, mode, y, force);
2963 if (y == NULL_RTX)
2964 return NULL_RTX;
2965 return emit_insn (GEN_FCN (code) (x, y));
2966 }
2967
2968 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2969 Return an equivalent MEM that does not use an auto-increment. */
2970
2971 static rtx
2972 emit_move_resolve_push (enum machine_mode mode, rtx x)
2973 {
2974 enum rtx_code code = GET_CODE (XEXP (x, 0));
2975 HOST_WIDE_INT adjust;
2976 rtx temp;
2977
2978 adjust = GET_MODE_SIZE (mode);
2979 #ifdef PUSH_ROUNDING
2980 adjust = PUSH_ROUNDING (adjust);
2981 #endif
2982 if (code == PRE_DEC || code == POST_DEC)
2983 adjust = -adjust;
2984 else if (code == PRE_MODIFY || code == POST_MODIFY)
2985 {
2986 rtx expr = XEXP (XEXP (x, 0), 1);
2987 HOST_WIDE_INT val;
2988
2989 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2990 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2991 val = INTVAL (XEXP (expr, 1));
2992 if (GET_CODE (expr) == MINUS)
2993 val = -val;
2994 gcc_assert (adjust == val || adjust == -val);
2995 adjust = val;
2996 }
2997
2998 /* Do not use anti_adjust_stack, since we don't want to update
2999 stack_pointer_delta. */
3000 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3001 GEN_INT (adjust), stack_pointer_rtx,
3002 0, OPTAB_LIB_WIDEN);
3003 if (temp != stack_pointer_rtx)
3004 emit_move_insn (stack_pointer_rtx, temp);
3005
3006 switch (code)
3007 {
3008 case PRE_INC:
3009 case PRE_DEC:
3010 case PRE_MODIFY:
3011 temp = stack_pointer_rtx;
3012 break;
3013 case POST_INC:
3014 case POST_DEC:
3015 case POST_MODIFY:
3016 temp = plus_constant (stack_pointer_rtx, -adjust);
3017 break;
3018 default:
3019 gcc_unreachable ();
3020 }
3021
3022 return replace_equiv_address (x, temp);
3023 }
3024
3025 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3026 X is known to satisfy push_operand, and MODE is known to be complex.
3027 Returns the last instruction emitted. */
3028
3029 rtx
3030 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3031 {
3032 enum machine_mode submode = GET_MODE_INNER (mode);
3033 bool imag_first;
3034
3035 #ifdef PUSH_ROUNDING
3036 unsigned int submodesize = GET_MODE_SIZE (submode);
3037
3038 /* In case we output to the stack, but the size is smaller than the
3039 machine can push exactly, we need to use move instructions. */
3040 if (PUSH_ROUNDING (submodesize) != submodesize)
3041 {
3042 x = emit_move_resolve_push (mode, x);
3043 return emit_move_insn (x, y);
3044 }
3045 #endif
3046
3047 /* Note that the real part always precedes the imag part in memory
3048 regardless of machine's endianness. */
3049 switch (GET_CODE (XEXP (x, 0)))
3050 {
3051 case PRE_DEC:
3052 case POST_DEC:
3053 imag_first = true;
3054 break;
3055 case PRE_INC:
3056 case POST_INC:
3057 imag_first = false;
3058 break;
3059 default:
3060 gcc_unreachable ();
3061 }
3062
3063 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3064 read_complex_part (y, imag_first));
3065 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3066 read_complex_part (y, !imag_first));
3067 }
3068
3069 /* A subroutine of emit_move_complex. Perform the move from Y to X
3070 via two moves of the parts. Returns the last instruction emitted. */
3071
3072 rtx
3073 emit_move_complex_parts (rtx x, rtx y)
3074 {
3075 /* Show the output dies here. This is necessary for SUBREGs
3076 of pseudos since we cannot track their lifetimes correctly;
3077 hard regs shouldn't appear here except as return values. */
3078 if (!reload_completed && !reload_in_progress
3079 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3080 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3081
3082 write_complex_part (x, read_complex_part (y, false), false);
3083 write_complex_part (x, read_complex_part (y, true), true);
3084
3085 return get_last_insn ();
3086 }
3087
3088 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3089 MODE is known to be complex. Returns the last instruction emitted. */
3090
3091 static rtx
3092 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3093 {
3094 bool try_int;
3095
3096 /* Need to take special care for pushes, to maintain proper ordering
3097 of the data, and possibly extra padding. */
3098 if (push_operand (x, mode))
3099 return emit_move_complex_push (mode, x, y);
3100
3101 /* See if we can coerce the target into moving both values at once. */
3102
3103 /* Move floating point as parts. */
3104 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3105 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3106 try_int = false;
3107 /* Not possible if the values are inherently not adjacent. */
3108 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3109 try_int = false;
3110 /* Is possible if both are registers (or subregs of registers). */
3111 else if (register_operand (x, mode) && register_operand (y, mode))
3112 try_int = true;
3113 /* If one of the operands is a memory, and alignment constraints
3114 are friendly enough, we may be able to do combined memory operations.
3115 We do not attempt this if Y is a constant because that combination is
3116 usually better with the by-parts thing below. */
3117 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3118 && (!STRICT_ALIGNMENT
3119 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3120 try_int = true;
3121 else
3122 try_int = false;
3123
3124 if (try_int)
3125 {
3126 rtx ret;
3127
3128 /* For memory to memory moves, optimal behavior can be had with the
3129 existing block move logic. */
3130 if (MEM_P (x) && MEM_P (y))
3131 {
3132 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3133 BLOCK_OP_NO_LIBCALL);
3134 return get_last_insn ();
3135 }
3136
3137 ret = emit_move_via_integer (mode, x, y, true);
3138 if (ret)
3139 return ret;
3140 }
3141
3142 return emit_move_complex_parts (x, y);
3143 }
3144
3145 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3146 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3147
3148 static rtx
3149 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3150 {
3151 rtx ret;
3152
3153 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3154 if (mode != CCmode)
3155 {
3156 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3157 if (code != CODE_FOR_nothing)
3158 {
3159 x = emit_move_change_mode (CCmode, mode, x, true);
3160 y = emit_move_change_mode (CCmode, mode, y, true);
3161 return emit_insn (GEN_FCN (code) (x, y));
3162 }
3163 }
3164
3165 /* Otherwise, find the MODE_INT mode of the same width. */
3166 ret = emit_move_via_integer (mode, x, y, false);
3167 gcc_assert (ret != NULL);
3168 return ret;
3169 }
3170
3171 /* Return true if word I of OP lies entirely in the
3172 undefined bits of a paradoxical subreg. */
3173
3174 static bool
3175 undefined_operand_subword_p (const_rtx op, int i)
3176 {
3177 enum machine_mode innermode, innermostmode;
3178 int offset;
3179 if (GET_CODE (op) != SUBREG)
3180 return false;
3181 innermode = GET_MODE (op);
3182 innermostmode = GET_MODE (SUBREG_REG (op));
3183 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3184 /* The SUBREG_BYTE represents offset, as if the value were stored in
3185 memory, except for a paradoxical subreg where we define
3186 SUBREG_BYTE to be 0; undo this exception as in
3187 simplify_subreg. */
3188 if (SUBREG_BYTE (op) == 0
3189 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3190 {
3191 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3192 if (WORDS_BIG_ENDIAN)
3193 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3194 if (BYTES_BIG_ENDIAN)
3195 offset += difference % UNITS_PER_WORD;
3196 }
3197 if (offset >= GET_MODE_SIZE (innermostmode)
3198 || offset <= -GET_MODE_SIZE (word_mode))
3199 return true;
3200 return false;
3201 }
3202
3203 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3204 MODE is any multi-word or full-word mode that lacks a move_insn
3205 pattern. Note that you will get better code if you define such
3206 patterns, even if they must turn into multiple assembler instructions. */
3207
3208 static rtx
3209 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3210 {
3211 rtx last_insn = 0;
3212 rtx seq, inner;
3213 bool need_clobber;
3214 int i;
3215
3216 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3217
3218 /* If X is a push on the stack, do the push now and replace
3219 X with a reference to the stack pointer. */
3220 if (push_operand (x, mode))
3221 x = emit_move_resolve_push (mode, x);
3222
3223 /* If we are in reload, see if either operand is a MEM whose address
3224 is scheduled for replacement. */
3225 if (reload_in_progress && MEM_P (x)
3226 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3227 x = replace_equiv_address_nv (x, inner);
3228 if (reload_in_progress && MEM_P (y)
3229 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3230 y = replace_equiv_address_nv (y, inner);
3231
3232 start_sequence ();
3233
3234 need_clobber = false;
3235 for (i = 0;
3236 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3237 i++)
3238 {
3239 rtx xpart = operand_subword (x, i, 1, mode);
3240 rtx ypart;
3241
3242 /* Do not generate code for a move if it would come entirely
3243 from the undefined bits of a paradoxical subreg. */
3244 if (undefined_operand_subword_p (y, i))
3245 continue;
3246
3247 ypart = operand_subword (y, i, 1, mode);
3248
3249 /* If we can't get a part of Y, put Y into memory if it is a
3250 constant. Otherwise, force it into a register. Then we must
3251 be able to get a part of Y. */
3252 if (ypart == 0 && CONSTANT_P (y))
3253 {
3254 y = use_anchored_address (force_const_mem (mode, y));
3255 ypart = operand_subword (y, i, 1, mode);
3256 }
3257 else if (ypart == 0)
3258 ypart = operand_subword_force (y, i, mode);
3259
3260 gcc_assert (xpart && ypart);
3261
3262 need_clobber |= (GET_CODE (xpart) == SUBREG);
3263
3264 last_insn = emit_move_insn (xpart, ypart);
3265 }
3266
3267 seq = get_insns ();
3268 end_sequence ();
3269
3270 /* Show the output dies here. This is necessary for SUBREGs
3271 of pseudos since we cannot track their lifetimes correctly;
3272 hard regs shouldn't appear here except as return values.
3273 We never want to emit such a clobber after reload. */
3274 if (x != y
3275 && ! (reload_in_progress || reload_completed)
3276 && need_clobber != 0)
3277 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3278
3279 emit_insn (seq);
3280
3281 return last_insn;
3282 }
3283
3284 /* Low level part of emit_move_insn.
3285 Called just like emit_move_insn, but assumes X and Y
3286 are basically valid. */
3287
3288 rtx
3289 emit_move_insn_1 (rtx x, rtx y)
3290 {
3291 enum machine_mode mode = GET_MODE (x);
3292 enum insn_code code;
3293
3294 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3295
3296 code = optab_handler (mov_optab, mode)->insn_code;
3297 if (code != CODE_FOR_nothing)
3298 return emit_insn (GEN_FCN (code) (x, y));
3299
3300 /* Expand complex moves by moving real part and imag part. */
3301 if (COMPLEX_MODE_P (mode))
3302 return emit_move_complex (mode, x, y);
3303
3304 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3305 || ALL_FIXED_POINT_MODE_P (mode))
3306 {
3307 rtx result = emit_move_via_integer (mode, x, y, true);
3308
3309 /* If we can't find an integer mode, use multi words. */
3310 if (result)
3311 return result;
3312 else
3313 return emit_move_multi_word (mode, x, y);
3314 }
3315
3316 if (GET_MODE_CLASS (mode) == MODE_CC)
3317 return emit_move_ccmode (mode, x, y);
3318
3319 /* Try using a move pattern for the corresponding integer mode. This is
3320 only safe when simplify_subreg can convert MODE constants into integer
3321 constants. At present, it can only do this reliably if the value
3322 fits within a HOST_WIDE_INT. */
3323 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3324 {
3325 rtx ret = emit_move_via_integer (mode, x, y, false);
3326 if (ret)
3327 return ret;
3328 }
3329
3330 return emit_move_multi_word (mode, x, y);
3331 }
3332
3333 /* Generate code to copy Y into X.
3334 Both Y and X must have the same mode, except that
3335 Y can be a constant with VOIDmode.
3336 This mode cannot be BLKmode; use emit_block_move for that.
3337
3338 Return the last instruction emitted. */
3339
3340 rtx
3341 emit_move_insn (rtx x, rtx y)
3342 {
3343 enum machine_mode mode = GET_MODE (x);
3344 rtx y_cst = NULL_RTX;
3345 rtx last_insn, set;
3346
3347 gcc_assert (mode != BLKmode
3348 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3349
3350 if (CONSTANT_P (y))
3351 {
3352 if (optimize
3353 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3354 && (last_insn = compress_float_constant (x, y)))
3355 return last_insn;
3356
3357 y_cst = y;
3358
3359 if (!LEGITIMATE_CONSTANT_P (y))
3360 {
3361 y = force_const_mem (mode, y);
3362
3363 /* If the target's cannot_force_const_mem prevented the spill,
3364 assume that the target's move expanders will also take care
3365 of the non-legitimate constant. */
3366 if (!y)
3367 y = y_cst;
3368 else
3369 y = use_anchored_address (y);
3370 }
3371 }
3372
3373 /* If X or Y are memory references, verify that their addresses are valid
3374 for the machine. */
3375 if (MEM_P (x)
3376 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3377 && ! push_operand (x, GET_MODE (x)))
3378 || (flag_force_addr
3379 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3380 x = validize_mem (x);
3381
3382 if (MEM_P (y)
3383 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3384 || (flag_force_addr
3385 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3386 y = validize_mem (y);
3387
3388 gcc_assert (mode != BLKmode);
3389
3390 last_insn = emit_move_insn_1 (x, y);
3391
3392 if (y_cst && REG_P (x)
3393 && (set = single_set (last_insn)) != NULL_RTX
3394 && SET_DEST (set) == x
3395 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3396 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3397
3398 return last_insn;
3399 }
3400
3401 /* If Y is representable exactly in a narrower mode, and the target can
3402 perform the extension directly from constant or memory, then emit the
3403 move as an extension. */
3404
3405 static rtx
3406 compress_float_constant (rtx x, rtx y)
3407 {
3408 enum machine_mode dstmode = GET_MODE (x);
3409 enum machine_mode orig_srcmode = GET_MODE (y);
3410 enum machine_mode srcmode;
3411 REAL_VALUE_TYPE r;
3412 int oldcost, newcost;
3413
3414 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3415
3416 if (LEGITIMATE_CONSTANT_P (y))
3417 oldcost = rtx_cost (y, SET);
3418 else
3419 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3420
3421 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3422 srcmode != orig_srcmode;
3423 srcmode = GET_MODE_WIDER_MODE (srcmode))
3424 {
3425 enum insn_code ic;
3426 rtx trunc_y, last_insn;
3427
3428 /* Skip if the target can't extend this way. */
3429 ic = can_extend_p (dstmode, srcmode, 0);
3430 if (ic == CODE_FOR_nothing)
3431 continue;
3432
3433 /* Skip if the narrowed value isn't exact. */
3434 if (! exact_real_truncate (srcmode, &r))
3435 continue;
3436
3437 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3438
3439 if (LEGITIMATE_CONSTANT_P (trunc_y))
3440 {
3441 /* Skip if the target needs extra instructions to perform
3442 the extension. */
3443 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3444 continue;
3445 /* This is valid, but may not be cheaper than the original. */
3446 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3447 if (oldcost < newcost)
3448 continue;
3449 }
3450 else if (float_extend_from_mem[dstmode][srcmode])
3451 {
3452 trunc_y = force_const_mem (srcmode, trunc_y);
3453 /* This is valid, but may not be cheaper than the original. */
3454 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3455 if (oldcost < newcost)
3456 continue;
3457 trunc_y = validize_mem (trunc_y);
3458 }
3459 else
3460 continue;
3461
3462 /* For CSE's benefit, force the compressed constant pool entry
3463 into a new pseudo. This constant may be used in different modes,
3464 and if not, combine will put things back together for us. */
3465 trunc_y = force_reg (srcmode, trunc_y);
3466 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3467 last_insn = get_last_insn ();
3468
3469 if (REG_P (x))
3470 set_unique_reg_note (last_insn, REG_EQUAL, y);
3471
3472 return last_insn;
3473 }
3474
3475 return NULL_RTX;
3476 }
3477 \f
3478 /* Pushing data onto the stack. */
3479
3480 /* Push a block of length SIZE (perhaps variable)
3481 and return an rtx to address the beginning of the block.
3482 The value may be virtual_outgoing_args_rtx.
3483
3484 EXTRA is the number of bytes of padding to push in addition to SIZE.
3485 BELOW nonzero means this padding comes at low addresses;
3486 otherwise, the padding comes at high addresses. */
3487
3488 rtx
3489 push_block (rtx size, int extra, int below)
3490 {
3491 rtx temp;
3492
3493 size = convert_modes (Pmode, ptr_mode, size, 1);
3494 if (CONSTANT_P (size))
3495 anti_adjust_stack (plus_constant (size, extra));
3496 else if (REG_P (size) && extra == 0)
3497 anti_adjust_stack (size);
3498 else
3499 {
3500 temp = copy_to_mode_reg (Pmode, size);
3501 if (extra != 0)
3502 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3503 temp, 0, OPTAB_LIB_WIDEN);
3504 anti_adjust_stack (temp);
3505 }
3506
3507 #ifndef STACK_GROWS_DOWNWARD
3508 if (0)
3509 #else
3510 if (1)
3511 #endif
3512 {
3513 temp = virtual_outgoing_args_rtx;
3514 if (extra != 0 && below)
3515 temp = plus_constant (temp, extra);
3516 }
3517 else
3518 {
3519 if (GET_CODE (size) == CONST_INT)
3520 temp = plus_constant (virtual_outgoing_args_rtx,
3521 -INTVAL (size) - (below ? 0 : extra));
3522 else if (extra != 0 && !below)
3523 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3524 negate_rtx (Pmode, plus_constant (size, extra)));
3525 else
3526 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3527 negate_rtx (Pmode, size));
3528 }
3529
3530 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3531 }
3532
3533 #ifdef PUSH_ROUNDING
3534
3535 /* Emit single push insn. */
3536
3537 static void
3538 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3539 {
3540 rtx dest_addr;
3541 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3542 rtx dest;
3543 enum insn_code icode;
3544 insn_operand_predicate_fn pred;
3545
3546 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3547 /* If there is push pattern, use it. Otherwise try old way of throwing
3548 MEM representing push operation to move expander. */
3549 icode = optab_handler (push_optab, mode)->insn_code;
3550 if (icode != CODE_FOR_nothing)
3551 {
3552 if (((pred = insn_data[(int) icode].operand[0].predicate)
3553 && !((*pred) (x, mode))))
3554 x = force_reg (mode, x);
3555 emit_insn (GEN_FCN (icode) (x));
3556 return;
3557 }
3558 if (GET_MODE_SIZE (mode) == rounded_size)
3559 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3560 /* If we are to pad downward, adjust the stack pointer first and
3561 then store X into the stack location using an offset. This is
3562 because emit_move_insn does not know how to pad; it does not have
3563 access to type. */
3564 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3565 {
3566 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3567 HOST_WIDE_INT offset;
3568
3569 emit_move_insn (stack_pointer_rtx,
3570 expand_binop (Pmode,
3571 #ifdef STACK_GROWS_DOWNWARD
3572 sub_optab,
3573 #else
3574 add_optab,
3575 #endif
3576 stack_pointer_rtx,
3577 GEN_INT (rounded_size),
3578 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3579
3580 offset = (HOST_WIDE_INT) padding_size;
3581 #ifdef STACK_GROWS_DOWNWARD
3582 if (STACK_PUSH_CODE == POST_DEC)
3583 /* We have already decremented the stack pointer, so get the
3584 previous value. */
3585 offset += (HOST_WIDE_INT) rounded_size;
3586 #else
3587 if (STACK_PUSH_CODE == POST_INC)
3588 /* We have already incremented the stack pointer, so get the
3589 previous value. */
3590 offset -= (HOST_WIDE_INT) rounded_size;
3591 #endif
3592 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3593 }
3594 else
3595 {
3596 #ifdef STACK_GROWS_DOWNWARD
3597 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3598 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3599 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3600 #else
3601 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3602 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3603 GEN_INT (rounded_size));
3604 #endif
3605 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3606 }
3607
3608 dest = gen_rtx_MEM (mode, dest_addr);
3609
3610 if (type != 0)
3611 {
3612 set_mem_attributes (dest, type, 1);
3613
3614 if (flag_optimize_sibling_calls)
3615 /* Function incoming arguments may overlap with sibling call
3616 outgoing arguments and we cannot allow reordering of reads
3617 from function arguments with stores to outgoing arguments
3618 of sibling calls. */
3619 set_mem_alias_set (dest, 0);
3620 }
3621 emit_move_insn (dest, x);
3622 }
3623 #endif
3624
3625 /* Generate code to push X onto the stack, assuming it has mode MODE and
3626 type TYPE.
3627 MODE is redundant except when X is a CONST_INT (since they don't
3628 carry mode info).
3629 SIZE is an rtx for the size of data to be copied (in bytes),
3630 needed only if X is BLKmode.
3631
3632 ALIGN (in bits) is maximum alignment we can assume.
3633
3634 If PARTIAL and REG are both nonzero, then copy that many of the first
3635 bytes of X into registers starting with REG, and push the rest of X.
3636 The amount of space pushed is decreased by PARTIAL bytes.
3637 REG must be a hard register in this case.
3638 If REG is zero but PARTIAL is not, take any all others actions for an
3639 argument partially in registers, but do not actually load any
3640 registers.
3641
3642 EXTRA is the amount in bytes of extra space to leave next to this arg.
3643 This is ignored if an argument block has already been allocated.
3644
3645 On a machine that lacks real push insns, ARGS_ADDR is the address of
3646 the bottom of the argument block for this call. We use indexing off there
3647 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3648 argument block has not been preallocated.
3649
3650 ARGS_SO_FAR is the size of args previously pushed for this call.
3651
3652 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3653 for arguments passed in registers. If nonzero, it will be the number
3654 of bytes required. */
3655
3656 void
3657 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3658 unsigned int align, int partial, rtx reg, int extra,
3659 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3660 rtx alignment_pad)
3661 {
3662 rtx xinner;
3663 enum direction stack_direction
3664 #ifdef STACK_GROWS_DOWNWARD
3665 = downward;
3666 #else
3667 = upward;
3668 #endif
3669
3670 /* Decide where to pad the argument: `downward' for below,
3671 `upward' for above, or `none' for don't pad it.
3672 Default is below for small data on big-endian machines; else above. */
3673 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3674
3675 /* Invert direction if stack is post-decrement.
3676 FIXME: why? */
3677 if (STACK_PUSH_CODE == POST_DEC)
3678 if (where_pad != none)
3679 where_pad = (where_pad == downward ? upward : downward);
3680
3681 xinner = x;
3682
3683 if (mode == BLKmode
3684 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3685 {
3686 /* Copy a block into the stack, entirely or partially. */
3687
3688 rtx temp;
3689 int used;
3690 int offset;
3691 int skip;
3692
3693 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3694 used = partial - offset;
3695
3696 if (mode != BLKmode)
3697 {
3698 /* A value is to be stored in an insufficiently aligned
3699 stack slot; copy via a suitably aligned slot if
3700 necessary. */
3701 size = GEN_INT (GET_MODE_SIZE (mode));
3702 if (!MEM_P (xinner))
3703 {
3704 temp = assign_temp (type, 0, 1, 1);
3705 emit_move_insn (temp, xinner);
3706 xinner = temp;
3707 }
3708 }
3709
3710 gcc_assert (size);
3711
3712 /* USED is now the # of bytes we need not copy to the stack
3713 because registers will take care of them. */
3714
3715 if (partial != 0)
3716 xinner = adjust_address (xinner, BLKmode, used);
3717
3718 /* If the partial register-part of the arg counts in its stack size,
3719 skip the part of stack space corresponding to the registers.
3720 Otherwise, start copying to the beginning of the stack space,
3721 by setting SKIP to 0. */
3722 skip = (reg_parm_stack_space == 0) ? 0 : used;
3723
3724 #ifdef PUSH_ROUNDING
3725 /* Do it with several push insns if that doesn't take lots of insns
3726 and if there is no difficulty with push insns that skip bytes
3727 on the stack for alignment purposes. */
3728 if (args_addr == 0
3729 && PUSH_ARGS
3730 && GET_CODE (size) == CONST_INT
3731 && skip == 0
3732 && MEM_ALIGN (xinner) >= align
3733 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3734 /* Here we avoid the case of a structure whose weak alignment
3735 forces many pushes of a small amount of data,
3736 and such small pushes do rounding that causes trouble. */
3737 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3738 || align >= BIGGEST_ALIGNMENT
3739 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3740 == (align / BITS_PER_UNIT)))
3741 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3742 {
3743 /* Push padding now if padding above and stack grows down,
3744 or if padding below and stack grows up.
3745 But if space already allocated, this has already been done. */
3746 if (extra && args_addr == 0
3747 && where_pad != none && where_pad != stack_direction)
3748 anti_adjust_stack (GEN_INT (extra));
3749
3750 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3751 }
3752 else
3753 #endif /* PUSH_ROUNDING */
3754 {
3755 rtx target;
3756
3757 /* Otherwise make space on the stack and copy the data
3758 to the address of that space. */
3759
3760 /* Deduct words put into registers from the size we must copy. */
3761 if (partial != 0)
3762 {
3763 if (GET_CODE (size) == CONST_INT)
3764 size = GEN_INT (INTVAL (size) - used);
3765 else
3766 size = expand_binop (GET_MODE (size), sub_optab, size,
3767 GEN_INT (used), NULL_RTX, 0,
3768 OPTAB_LIB_WIDEN);
3769 }
3770
3771 /* Get the address of the stack space.
3772 In this case, we do not deal with EXTRA separately.
3773 A single stack adjust will do. */
3774 if (! args_addr)
3775 {
3776 temp = push_block (size, extra, where_pad == downward);
3777 extra = 0;
3778 }
3779 else if (GET_CODE (args_so_far) == CONST_INT)
3780 temp = memory_address (BLKmode,
3781 plus_constant (args_addr,
3782 skip + INTVAL (args_so_far)));
3783 else
3784 temp = memory_address (BLKmode,
3785 plus_constant (gen_rtx_PLUS (Pmode,
3786 args_addr,
3787 args_so_far),
3788 skip));
3789
3790 if (!ACCUMULATE_OUTGOING_ARGS)
3791 {
3792 /* If the source is referenced relative to the stack pointer,
3793 copy it to another register to stabilize it. We do not need
3794 to do this if we know that we won't be changing sp. */
3795
3796 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3797 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3798 temp = copy_to_reg (temp);
3799 }
3800
3801 target = gen_rtx_MEM (BLKmode, temp);
3802
3803 /* We do *not* set_mem_attributes here, because incoming arguments
3804 may overlap with sibling call outgoing arguments and we cannot
3805 allow reordering of reads from function arguments with stores
3806 to outgoing arguments of sibling calls. We do, however, want
3807 to record the alignment of the stack slot. */
3808 /* ALIGN may well be better aligned than TYPE, e.g. due to
3809 PARM_BOUNDARY. Assume the caller isn't lying. */
3810 set_mem_align (target, align);
3811
3812 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3813 }
3814 }
3815 else if (partial > 0)
3816 {
3817 /* Scalar partly in registers. */
3818
3819 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3820 int i;
3821 int not_stack;
3822 /* # bytes of start of argument
3823 that we must make space for but need not store. */
3824 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3825 int args_offset = INTVAL (args_so_far);
3826 int skip;
3827
3828 /* Push padding now if padding above and stack grows down,
3829 or if padding below and stack grows up.
3830 But if space already allocated, this has already been done. */
3831 if (extra && args_addr == 0
3832 && where_pad != none && where_pad != stack_direction)
3833 anti_adjust_stack (GEN_INT (extra));
3834
3835 /* If we make space by pushing it, we might as well push
3836 the real data. Otherwise, we can leave OFFSET nonzero
3837 and leave the space uninitialized. */
3838 if (args_addr == 0)
3839 offset = 0;
3840
3841 /* Now NOT_STACK gets the number of words that we don't need to
3842 allocate on the stack. Convert OFFSET to words too. */
3843 not_stack = (partial - offset) / UNITS_PER_WORD;
3844 offset /= UNITS_PER_WORD;
3845
3846 /* If the partial register-part of the arg counts in its stack size,
3847 skip the part of stack space corresponding to the registers.
3848 Otherwise, start copying to the beginning of the stack space,
3849 by setting SKIP to 0. */
3850 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3851
3852 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3853 x = validize_mem (force_const_mem (mode, x));
3854
3855 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3856 SUBREGs of such registers are not allowed. */
3857 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3858 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3859 x = copy_to_reg (x);
3860
3861 /* Loop over all the words allocated on the stack for this arg. */
3862 /* We can do it by words, because any scalar bigger than a word
3863 has a size a multiple of a word. */
3864 #ifndef PUSH_ARGS_REVERSED
3865 for (i = not_stack; i < size; i++)
3866 #else
3867 for (i = size - 1; i >= not_stack; i--)
3868 #endif
3869 if (i >= not_stack + offset)
3870 emit_push_insn (operand_subword_force (x, i, mode),
3871 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3872 0, args_addr,
3873 GEN_INT (args_offset + ((i - not_stack + skip)
3874 * UNITS_PER_WORD)),
3875 reg_parm_stack_space, alignment_pad);
3876 }
3877 else
3878 {
3879 rtx addr;
3880 rtx dest;
3881
3882 /* Push padding now if padding above and stack grows down,
3883 or if padding below and stack grows up.
3884 But if space already allocated, this has already been done. */
3885 if (extra && args_addr == 0
3886 && where_pad != none && where_pad != stack_direction)
3887 anti_adjust_stack (GEN_INT (extra));
3888
3889 #ifdef PUSH_ROUNDING
3890 if (args_addr == 0 && PUSH_ARGS)
3891 emit_single_push_insn (mode, x, type);
3892 else
3893 #endif
3894 {
3895 if (GET_CODE (args_so_far) == CONST_INT)
3896 addr
3897 = memory_address (mode,
3898 plus_constant (args_addr,
3899 INTVAL (args_so_far)));
3900 else
3901 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3902 args_so_far));
3903 dest = gen_rtx_MEM (mode, addr);
3904
3905 /* We do *not* set_mem_attributes here, because incoming arguments
3906 may overlap with sibling call outgoing arguments and we cannot
3907 allow reordering of reads from function arguments with stores
3908 to outgoing arguments of sibling calls. We do, however, want
3909 to record the alignment of the stack slot. */
3910 /* ALIGN may well be better aligned than TYPE, e.g. due to
3911 PARM_BOUNDARY. Assume the caller isn't lying. */
3912 set_mem_align (dest, align);
3913
3914 emit_move_insn (dest, x);
3915 }
3916 }
3917
3918 /* If part should go in registers, copy that part
3919 into the appropriate registers. Do this now, at the end,
3920 since mem-to-mem copies above may do function calls. */
3921 if (partial > 0 && reg != 0)
3922 {
3923 /* Handle calls that pass values in multiple non-contiguous locations.
3924 The Irix 6 ABI has examples of this. */
3925 if (GET_CODE (reg) == PARALLEL)
3926 emit_group_load (reg, x, type, -1);
3927 else
3928 {
3929 gcc_assert (partial % UNITS_PER_WORD == 0);
3930 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3931 }
3932 }
3933
3934 if (extra && args_addr == 0 && where_pad == stack_direction)
3935 anti_adjust_stack (GEN_INT (extra));
3936
3937 if (alignment_pad && args_addr == 0)
3938 anti_adjust_stack (alignment_pad);
3939 }
3940 \f
3941 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3942 operations. */
3943
3944 static rtx
3945 get_subtarget (rtx x)
3946 {
3947 return (optimize
3948 || x == 0
3949 /* Only registers can be subtargets. */
3950 || !REG_P (x)
3951 /* Don't use hard regs to avoid extending their life. */
3952 || REGNO (x) < FIRST_PSEUDO_REGISTER
3953 ? 0 : x);
3954 }
3955
3956 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3957 FIELD is a bitfield. Returns true if the optimization was successful,
3958 and there's nothing else to do. */
3959
3960 static bool
3961 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3962 unsigned HOST_WIDE_INT bitpos,
3963 enum machine_mode mode1, rtx str_rtx,
3964 tree to, tree src)
3965 {
3966 enum machine_mode str_mode = GET_MODE (str_rtx);
3967 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3968 tree op0, op1;
3969 rtx value, result;
3970 optab binop;
3971
3972 if (mode1 != VOIDmode
3973 || bitsize >= BITS_PER_WORD
3974 || str_bitsize > BITS_PER_WORD
3975 || TREE_SIDE_EFFECTS (to)
3976 || TREE_THIS_VOLATILE (to))
3977 return false;
3978
3979 STRIP_NOPS (src);
3980 if (!BINARY_CLASS_P (src)
3981 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3982 return false;
3983
3984 op0 = TREE_OPERAND (src, 0);
3985 op1 = TREE_OPERAND (src, 1);
3986 STRIP_NOPS (op0);
3987
3988 if (!operand_equal_p (to, op0, 0))
3989 return false;
3990
3991 if (MEM_P (str_rtx))
3992 {
3993 unsigned HOST_WIDE_INT offset1;
3994
3995 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3996 str_mode = word_mode;
3997 str_mode = get_best_mode (bitsize, bitpos,
3998 MEM_ALIGN (str_rtx), str_mode, 0);
3999 if (str_mode == VOIDmode)
4000 return false;
4001 str_bitsize = GET_MODE_BITSIZE (str_mode);
4002
4003 offset1 = bitpos;
4004 bitpos %= str_bitsize;
4005 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4006 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4007 }
4008 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4009 return false;
4010
4011 /* If the bit field covers the whole REG/MEM, store_field
4012 will likely generate better code. */
4013 if (bitsize >= str_bitsize)
4014 return false;
4015
4016 /* We can't handle fields split across multiple entities. */
4017 if (bitpos + bitsize > str_bitsize)
4018 return false;
4019
4020 if (BYTES_BIG_ENDIAN)
4021 bitpos = str_bitsize - bitpos - bitsize;
4022
4023 switch (TREE_CODE (src))
4024 {
4025 case PLUS_EXPR:
4026 case MINUS_EXPR:
4027 /* For now, just optimize the case of the topmost bitfield
4028 where we don't need to do any masking and also
4029 1 bit bitfields where xor can be used.
4030 We might win by one instruction for the other bitfields
4031 too if insv/extv instructions aren't used, so that
4032 can be added later. */
4033 if (bitpos + bitsize != str_bitsize
4034 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4035 break;
4036
4037 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4038 value = convert_modes (str_mode,
4039 TYPE_MODE (TREE_TYPE (op1)), value,
4040 TYPE_UNSIGNED (TREE_TYPE (op1)));
4041
4042 /* We may be accessing data outside the field, which means
4043 we can alias adjacent data. */
4044 if (MEM_P (str_rtx))
4045 {
4046 str_rtx = shallow_copy_rtx (str_rtx);
4047 set_mem_alias_set (str_rtx, 0);
4048 set_mem_expr (str_rtx, 0);
4049 }
4050
4051 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4052 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4053 {
4054 value = expand_and (str_mode, value, const1_rtx, NULL);
4055 binop = xor_optab;
4056 }
4057 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4058 build_int_cst (NULL_TREE, bitpos),
4059 NULL_RTX, 1);
4060 result = expand_binop (str_mode, binop, str_rtx,
4061 value, str_rtx, 1, OPTAB_WIDEN);
4062 if (result != str_rtx)
4063 emit_move_insn (str_rtx, result);
4064 return true;
4065
4066 case BIT_IOR_EXPR:
4067 case BIT_XOR_EXPR:
4068 if (TREE_CODE (op1) != INTEGER_CST)
4069 break;
4070 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4071 value = convert_modes (GET_MODE (str_rtx),
4072 TYPE_MODE (TREE_TYPE (op1)), value,
4073 TYPE_UNSIGNED (TREE_TYPE (op1)));
4074
4075 /* We may be accessing data outside the field, which means
4076 we can alias adjacent data. */
4077 if (MEM_P (str_rtx))
4078 {
4079 str_rtx = shallow_copy_rtx (str_rtx);
4080 set_mem_alias_set (str_rtx, 0);
4081 set_mem_expr (str_rtx, 0);
4082 }
4083
4084 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4085 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4086 {
4087 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4088 - 1);
4089 value = expand_and (GET_MODE (str_rtx), value, mask,
4090 NULL_RTX);
4091 }
4092 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4093 build_int_cst (NULL_TREE, bitpos),
4094 NULL_RTX, 1);
4095 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4096 value, str_rtx, 1, OPTAB_WIDEN);
4097 if (result != str_rtx)
4098 emit_move_insn (str_rtx, result);
4099 return true;
4100
4101 default:
4102 break;
4103 }
4104
4105 return false;
4106 }
4107
4108
4109 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4110 is true, try generating a nontemporal store. */
4111
4112 void
4113 expand_assignment (tree to, tree from, bool nontemporal)
4114 {
4115 rtx to_rtx = 0;
4116 rtx result;
4117
4118 /* Don't crash if the lhs of the assignment was erroneous. */
4119 if (TREE_CODE (to) == ERROR_MARK)
4120 {
4121 result = expand_normal (from);
4122 return;
4123 }
4124
4125 /* Optimize away no-op moves without side-effects. */
4126 if (operand_equal_p (to, from, 0))
4127 return;
4128
4129 /* Assignment of a structure component needs special treatment
4130 if the structure component's rtx is not simply a MEM.
4131 Assignment of an array element at a constant index, and assignment of
4132 an array element in an unaligned packed structure field, has the same
4133 problem. */
4134 if (handled_component_p (to)
4135 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4136 {
4137 enum machine_mode mode1;
4138 HOST_WIDE_INT bitsize, bitpos;
4139 tree offset;
4140 int unsignedp;
4141 int volatilep = 0;
4142 tree tem;
4143
4144 push_temp_slots ();
4145 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4146 &unsignedp, &volatilep, true);
4147
4148 /* If we are going to use store_bit_field and extract_bit_field,
4149 make sure to_rtx will be safe for multiple use. */
4150
4151 to_rtx = expand_normal (tem);
4152
4153 if (offset != 0)
4154 {
4155 rtx offset_rtx;
4156
4157 if (!MEM_P (to_rtx))
4158 {
4159 /* We can get constant negative offsets into arrays with broken
4160 user code. Translate this to a trap instead of ICEing. */
4161 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4162 expand_builtin_trap ();
4163 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4164 }
4165
4166 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4167 #ifdef POINTERS_EXTEND_UNSIGNED
4168 if (GET_MODE (offset_rtx) != Pmode)
4169 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4170 #else
4171 if (GET_MODE (offset_rtx) != ptr_mode)
4172 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4173 #endif
4174
4175 /* A constant address in TO_RTX can have VOIDmode, we must not try
4176 to call force_reg for that case. Avoid that case. */
4177 if (MEM_P (to_rtx)
4178 && GET_MODE (to_rtx) == BLKmode
4179 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4180 && bitsize > 0
4181 && (bitpos % bitsize) == 0
4182 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4183 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4184 {
4185 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4186 bitpos = 0;
4187 }
4188
4189 to_rtx = offset_address (to_rtx, offset_rtx,
4190 highest_pow2_factor_for_target (to,
4191 offset));
4192 }
4193
4194 /* Handle expand_expr of a complex value returning a CONCAT. */
4195 if (GET_CODE (to_rtx) == CONCAT)
4196 {
4197 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4198 {
4199 gcc_assert (bitpos == 0);
4200 result = store_expr (from, to_rtx, false, nontemporal);
4201 }
4202 else
4203 {
4204 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4205 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4206 nontemporal);
4207 }
4208 }
4209 else
4210 {
4211 if (MEM_P (to_rtx))
4212 {
4213 /* If the field is at offset zero, we could have been given the
4214 DECL_RTX of the parent struct. Don't munge it. */
4215 to_rtx = shallow_copy_rtx (to_rtx);
4216
4217 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4218
4219 /* Deal with volatile and readonly fields. The former is only
4220 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4221 if (volatilep)
4222 MEM_VOLATILE_P (to_rtx) = 1;
4223 if (component_uses_parent_alias_set (to))
4224 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4225 }
4226
4227 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4228 to_rtx, to, from))
4229 result = NULL;
4230 else
4231 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4232 TREE_TYPE (tem), get_alias_set (to),
4233 nontemporal);
4234 }
4235
4236 if (result)
4237 preserve_temp_slots (result);
4238 free_temp_slots ();
4239 pop_temp_slots ();
4240 return;
4241 }
4242
4243 /* If the rhs is a function call and its value is not an aggregate,
4244 call the function before we start to compute the lhs.
4245 This is needed for correct code for cases such as
4246 val = setjmp (buf) on machines where reference to val
4247 requires loading up part of an address in a separate insn.
4248
4249 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4250 since it might be a promoted variable where the zero- or sign- extension
4251 needs to be done. Handling this in the normal way is safe because no
4252 computation is done before the call. */
4253 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4254 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4255 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4256 && REG_P (DECL_RTL (to))))
4257 {
4258 rtx value;
4259
4260 push_temp_slots ();
4261 value = expand_normal (from);
4262 if (to_rtx == 0)
4263 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4264
4265 /* Handle calls that return values in multiple non-contiguous locations.
4266 The Irix 6 ABI has examples of this. */
4267 if (GET_CODE (to_rtx) == PARALLEL)
4268 emit_group_load (to_rtx, value, TREE_TYPE (from),
4269 int_size_in_bytes (TREE_TYPE (from)));
4270 else if (GET_MODE (to_rtx) == BLKmode)
4271 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4272 else
4273 {
4274 if (POINTER_TYPE_P (TREE_TYPE (to)))
4275 value = convert_memory_address (GET_MODE (to_rtx), value);
4276 emit_move_insn (to_rtx, value);
4277 }
4278 preserve_temp_slots (to_rtx);
4279 free_temp_slots ();
4280 pop_temp_slots ();
4281 return;
4282 }
4283
4284 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4285 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4286
4287 if (to_rtx == 0)
4288 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4289
4290 /* Don't move directly into a return register. */
4291 if (TREE_CODE (to) == RESULT_DECL
4292 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4293 {
4294 rtx temp;
4295
4296 push_temp_slots ();
4297 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4298
4299 if (GET_CODE (to_rtx) == PARALLEL)
4300 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4301 int_size_in_bytes (TREE_TYPE (from)));
4302 else
4303 emit_move_insn (to_rtx, temp);
4304
4305 preserve_temp_slots (to_rtx);
4306 free_temp_slots ();
4307 pop_temp_slots ();
4308 return;
4309 }
4310
4311 /* In case we are returning the contents of an object which overlaps
4312 the place the value is being stored, use a safe function when copying
4313 a value through a pointer into a structure value return block. */
4314 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4315 && current_function_returns_struct
4316 && !current_function_returns_pcc_struct)
4317 {
4318 rtx from_rtx, size;
4319
4320 push_temp_slots ();
4321 size = expr_size (from);
4322 from_rtx = expand_normal (from);
4323
4324 emit_library_call (memmove_libfunc, LCT_NORMAL,
4325 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4326 XEXP (from_rtx, 0), Pmode,
4327 convert_to_mode (TYPE_MODE (sizetype),
4328 size, TYPE_UNSIGNED (sizetype)),
4329 TYPE_MODE (sizetype));
4330
4331 preserve_temp_slots (to_rtx);
4332 free_temp_slots ();
4333 pop_temp_slots ();
4334 return;
4335 }
4336
4337 /* Compute FROM and store the value in the rtx we got. */
4338
4339 push_temp_slots ();
4340 result = store_expr (from, to_rtx, 0, nontemporal);
4341 preserve_temp_slots (result);
4342 free_temp_slots ();
4343 pop_temp_slots ();
4344 return;
4345 }
4346
4347 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4348 succeeded, false otherwise. */
4349
4350 static bool
4351 emit_storent_insn (rtx to, rtx from)
4352 {
4353 enum machine_mode mode = GET_MODE (to), imode;
4354 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4355 rtx pattern;
4356
4357 if (code == CODE_FOR_nothing)
4358 return false;
4359
4360 imode = insn_data[code].operand[0].mode;
4361 if (!insn_data[code].operand[0].predicate (to, imode))
4362 return false;
4363
4364 imode = insn_data[code].operand[1].mode;
4365 if (!insn_data[code].operand[1].predicate (from, imode))
4366 {
4367 from = copy_to_mode_reg (imode, from);
4368 if (!insn_data[code].operand[1].predicate (from, imode))
4369 return false;
4370 }
4371
4372 pattern = GEN_FCN (code) (to, from);
4373 if (pattern == NULL_RTX)
4374 return false;
4375
4376 emit_insn (pattern);
4377 return true;
4378 }
4379
4380 /* Generate code for computing expression EXP,
4381 and storing the value into TARGET.
4382
4383 If the mode is BLKmode then we may return TARGET itself.
4384 It turns out that in BLKmode it doesn't cause a problem.
4385 because C has no operators that could combine two different
4386 assignments into the same BLKmode object with different values
4387 with no sequence point. Will other languages need this to
4388 be more thorough?
4389
4390 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4391 stack, and block moves may need to be treated specially.
4392
4393 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4394
4395 rtx
4396 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4397 {
4398 rtx temp;
4399 rtx alt_rtl = NULL_RTX;
4400 int dont_return_target = 0;
4401
4402 if (VOID_TYPE_P (TREE_TYPE (exp)))
4403 {
4404 /* C++ can generate ?: expressions with a throw expression in one
4405 branch and an rvalue in the other. Here, we resolve attempts to
4406 store the throw expression's nonexistent result. */
4407 gcc_assert (!call_param_p);
4408 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4409 return NULL_RTX;
4410 }
4411 if (TREE_CODE (exp) == COMPOUND_EXPR)
4412 {
4413 /* Perform first part of compound expression, then assign from second
4414 part. */
4415 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4416 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4417 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4418 nontemporal);
4419 }
4420 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4421 {
4422 /* For conditional expression, get safe form of the target. Then
4423 test the condition, doing the appropriate assignment on either
4424 side. This avoids the creation of unnecessary temporaries.
4425 For non-BLKmode, it is more efficient not to do this. */
4426
4427 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4428
4429 do_pending_stack_adjust ();
4430 NO_DEFER_POP;
4431 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4432 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4433 nontemporal);
4434 emit_jump_insn (gen_jump (lab2));
4435 emit_barrier ();
4436 emit_label (lab1);
4437 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4438 nontemporal);
4439 emit_label (lab2);
4440 OK_DEFER_POP;
4441
4442 return NULL_RTX;
4443 }
4444 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4445 /* If this is a scalar in a register that is stored in a wider mode
4446 than the declared mode, compute the result into its declared mode
4447 and then convert to the wider mode. Our value is the computed
4448 expression. */
4449 {
4450 rtx inner_target = 0;
4451
4452 /* We can do the conversion inside EXP, which will often result
4453 in some optimizations. Do the conversion in two steps: first
4454 change the signedness, if needed, then the extend. But don't
4455 do this if the type of EXP is a subtype of something else
4456 since then the conversion might involve more than just
4457 converting modes. */
4458 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4459 && TREE_TYPE (TREE_TYPE (exp)) == 0
4460 && (!lang_hooks.reduce_bit_field_operations
4461 || (GET_MODE_PRECISION (GET_MODE (target))
4462 == TYPE_PRECISION (TREE_TYPE (exp)))))
4463 {
4464 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4465 != SUBREG_PROMOTED_UNSIGNED_P (target))
4466 {
4467 /* Some types, e.g. Fortran's logical*4, won't have a signed
4468 version, so use the mode instead. */
4469 tree ntype
4470 = (signed_or_unsigned_type_for
4471 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4472 if (ntype == NULL)
4473 ntype = lang_hooks.types.type_for_mode
4474 (TYPE_MODE (TREE_TYPE (exp)),
4475 SUBREG_PROMOTED_UNSIGNED_P (target));
4476
4477 exp = fold_convert (ntype, exp);
4478 }
4479
4480 exp = fold_convert (lang_hooks.types.type_for_mode
4481 (GET_MODE (SUBREG_REG (target)),
4482 SUBREG_PROMOTED_UNSIGNED_P (target)),
4483 exp);
4484
4485 inner_target = SUBREG_REG (target);
4486 }
4487
4488 temp = expand_expr (exp, inner_target, VOIDmode,
4489 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4490
4491 /* If TEMP is a VOIDmode constant, use convert_modes to make
4492 sure that we properly convert it. */
4493 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4494 {
4495 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4496 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4497 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4498 GET_MODE (target), temp,
4499 SUBREG_PROMOTED_UNSIGNED_P (target));
4500 }
4501
4502 convert_move (SUBREG_REG (target), temp,
4503 SUBREG_PROMOTED_UNSIGNED_P (target));
4504
4505 return NULL_RTX;
4506 }
4507 else if (TREE_CODE (exp) == STRING_CST
4508 && !nontemporal && !call_param_p
4509 && TREE_STRING_LENGTH (exp) > 0
4510 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4511 {
4512 /* Optimize initialization of an array with a STRING_CST. */
4513 HOST_WIDE_INT exp_len, str_copy_len;
4514 rtx dest_mem;
4515
4516 exp_len = int_expr_size (exp);
4517 if (exp_len <= 0)
4518 goto normal_expr;
4519
4520 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4521 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4522 goto normal_expr;
4523
4524 str_copy_len = TREE_STRING_LENGTH (exp);
4525 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4526 {
4527 str_copy_len += STORE_MAX_PIECES - 1;
4528 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4529 }
4530 str_copy_len = MIN (str_copy_len, exp_len);
4531 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4532 (void *) TREE_STRING_POINTER (exp),
4533 MEM_ALIGN (target), false))
4534 goto normal_expr;
4535
4536 dest_mem = target;
4537
4538 dest_mem = store_by_pieces (dest_mem,
4539 str_copy_len, builtin_strncpy_read_str,
4540 (void *) TREE_STRING_POINTER (exp),
4541 MEM_ALIGN (target), false,
4542 exp_len > str_copy_len ? 1 : 0);
4543 if (exp_len > str_copy_len)
4544 clear_storage (dest_mem, GEN_INT (exp_len - str_copy_len),
4545 BLOCK_OP_NORMAL);
4546 return NULL_RTX;
4547 }
4548 else
4549 {
4550 rtx tmp_target;
4551
4552 normal_expr:
4553 /* If we want to use a nontemporal store, force the value to
4554 register first. */
4555 tmp_target = nontemporal ? NULL_RTX : target;
4556 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4557 (call_param_p
4558 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4559 &alt_rtl);
4560 /* Return TARGET if it's a specified hardware register.
4561 If TARGET is a volatile mem ref, either return TARGET
4562 or return a reg copied *from* TARGET; ANSI requires this.
4563
4564 Otherwise, if TEMP is not TARGET, return TEMP
4565 if it is constant (for efficiency),
4566 or if we really want the correct value. */
4567 if (!(target && REG_P (target)
4568 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4569 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4570 && ! rtx_equal_p (temp, target)
4571 && CONSTANT_P (temp))
4572 dont_return_target = 1;
4573 }
4574
4575 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4576 the same as that of TARGET, adjust the constant. This is needed, for
4577 example, in case it is a CONST_DOUBLE and we want only a word-sized
4578 value. */
4579 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4580 && TREE_CODE (exp) != ERROR_MARK
4581 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4582 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4583 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4584
4585 /* If value was not generated in the target, store it there.
4586 Convert the value to TARGET's type first if necessary and emit the
4587 pending incrementations that have been queued when expanding EXP.
4588 Note that we cannot emit the whole queue blindly because this will
4589 effectively disable the POST_INC optimization later.
4590
4591 If TEMP and TARGET compare equal according to rtx_equal_p, but
4592 one or both of them are volatile memory refs, we have to distinguish
4593 two cases:
4594 - expand_expr has used TARGET. In this case, we must not generate
4595 another copy. This can be detected by TARGET being equal according
4596 to == .
4597 - expand_expr has not used TARGET - that means that the source just
4598 happens to have the same RTX form. Since temp will have been created
4599 by expand_expr, it will compare unequal according to == .
4600 We must generate a copy in this case, to reach the correct number
4601 of volatile memory references. */
4602
4603 if ((! rtx_equal_p (temp, target)
4604 || (temp != target && (side_effects_p (temp)
4605 || side_effects_p (target))))
4606 && TREE_CODE (exp) != ERROR_MARK
4607 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4608 but TARGET is not valid memory reference, TEMP will differ
4609 from TARGET although it is really the same location. */
4610 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4611 /* If there's nothing to copy, don't bother. Don't call
4612 expr_size unless necessary, because some front-ends (C++)
4613 expr_size-hook must not be given objects that are not
4614 supposed to be bit-copied or bit-initialized. */
4615 && expr_size (exp) != const0_rtx)
4616 {
4617 if (GET_MODE (temp) != GET_MODE (target)
4618 && GET_MODE (temp) != VOIDmode)
4619 {
4620 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4621 if (dont_return_target)
4622 {
4623 /* In this case, we will return TEMP,
4624 so make sure it has the proper mode.
4625 But don't forget to store the value into TARGET. */
4626 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4627 emit_move_insn (target, temp);
4628 }
4629 else if (GET_MODE (target) == BLKmode)
4630 emit_block_move (target, temp, expr_size (exp),
4631 (call_param_p
4632 ? BLOCK_OP_CALL_PARM
4633 : BLOCK_OP_NORMAL));
4634 else
4635 convert_move (target, temp, unsignedp);
4636 }
4637
4638 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4639 {
4640 /* Handle copying a string constant into an array. The string
4641 constant may be shorter than the array. So copy just the string's
4642 actual length, and clear the rest. First get the size of the data
4643 type of the string, which is actually the size of the target. */
4644 rtx size = expr_size (exp);
4645
4646 if (GET_CODE (size) == CONST_INT
4647 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4648 emit_block_move (target, temp, size,
4649 (call_param_p
4650 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4651 else
4652 {
4653 /* Compute the size of the data to copy from the string. */
4654 tree copy_size
4655 = size_binop (MIN_EXPR,
4656 make_tree (sizetype, size),
4657 size_int (TREE_STRING_LENGTH (exp)));
4658 rtx copy_size_rtx
4659 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4660 (call_param_p
4661 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4662 rtx label = 0;
4663
4664 /* Copy that much. */
4665 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4666 TYPE_UNSIGNED (sizetype));
4667 emit_block_move (target, temp, copy_size_rtx,
4668 (call_param_p
4669 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4670
4671 /* Figure out how much is left in TARGET that we have to clear.
4672 Do all calculations in ptr_mode. */
4673 if (GET_CODE (copy_size_rtx) == CONST_INT)
4674 {
4675 size = plus_constant (size, -INTVAL (copy_size_rtx));
4676 target = adjust_address (target, BLKmode,
4677 INTVAL (copy_size_rtx));
4678 }
4679 else
4680 {
4681 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4682 copy_size_rtx, NULL_RTX, 0,
4683 OPTAB_LIB_WIDEN);
4684
4685 #ifdef POINTERS_EXTEND_UNSIGNED
4686 if (GET_MODE (copy_size_rtx) != Pmode)
4687 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4688 TYPE_UNSIGNED (sizetype));
4689 #endif
4690
4691 target = offset_address (target, copy_size_rtx,
4692 highest_pow2_factor (copy_size));
4693 label = gen_label_rtx ();
4694 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4695 GET_MODE (size), 0, label);
4696 }
4697
4698 if (size != const0_rtx)
4699 clear_storage (target, size, BLOCK_OP_NORMAL);
4700
4701 if (label)
4702 emit_label (label);
4703 }
4704 }
4705 /* Handle calls that return values in multiple non-contiguous locations.
4706 The Irix 6 ABI has examples of this. */
4707 else if (GET_CODE (target) == PARALLEL)
4708 emit_group_load (target, temp, TREE_TYPE (exp),
4709 int_size_in_bytes (TREE_TYPE (exp)));
4710 else if (GET_MODE (temp) == BLKmode)
4711 emit_block_move (target, temp, expr_size (exp),
4712 (call_param_p
4713 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4714 else if (nontemporal
4715 && emit_storent_insn (target, temp))
4716 /* If we managed to emit a nontemporal store, there is nothing else to
4717 do. */
4718 ;
4719 else
4720 {
4721 temp = force_operand (temp, target);
4722 if (temp != target)
4723 emit_move_insn (target, temp);
4724 }
4725 }
4726
4727 return NULL_RTX;
4728 }
4729 \f
4730 /* Helper for categorize_ctor_elements. Identical interface. */
4731
4732 static bool
4733 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4734 HOST_WIDE_INT *p_elt_count,
4735 bool *p_must_clear)
4736 {
4737 unsigned HOST_WIDE_INT idx;
4738 HOST_WIDE_INT nz_elts, elt_count;
4739 tree value, purpose;
4740
4741 /* Whether CTOR is a valid constant initializer, in accordance with what
4742 initializer_constant_valid_p does. If inferred from the constructor
4743 elements, true until proven otherwise. */
4744 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4745 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4746
4747 nz_elts = 0;
4748 elt_count = 0;
4749
4750 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4751 {
4752 HOST_WIDE_INT mult;
4753
4754 mult = 1;
4755 if (TREE_CODE (purpose) == RANGE_EXPR)
4756 {
4757 tree lo_index = TREE_OPERAND (purpose, 0);
4758 tree hi_index = TREE_OPERAND (purpose, 1);
4759
4760 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4761 mult = (tree_low_cst (hi_index, 1)
4762 - tree_low_cst (lo_index, 1) + 1);
4763 }
4764
4765 switch (TREE_CODE (value))
4766 {
4767 case CONSTRUCTOR:
4768 {
4769 HOST_WIDE_INT nz = 0, ic = 0;
4770
4771 bool const_elt_p
4772 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4773
4774 nz_elts += mult * nz;
4775 elt_count += mult * ic;
4776
4777 if (const_from_elts_p && const_p)
4778 const_p = const_elt_p;
4779 }
4780 break;
4781
4782 case INTEGER_CST:
4783 case REAL_CST:
4784 case FIXED_CST:
4785 if (!initializer_zerop (value))
4786 nz_elts += mult;
4787 elt_count += mult;
4788 break;
4789
4790 case STRING_CST:
4791 nz_elts += mult * TREE_STRING_LENGTH (value);
4792 elt_count += mult * TREE_STRING_LENGTH (value);
4793 break;
4794
4795 case COMPLEX_CST:
4796 if (!initializer_zerop (TREE_REALPART (value)))
4797 nz_elts += mult;
4798 if (!initializer_zerop (TREE_IMAGPART (value)))
4799 nz_elts += mult;
4800 elt_count += mult;
4801 break;
4802
4803 case VECTOR_CST:
4804 {
4805 tree v;
4806 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4807 {
4808 if (!initializer_zerop (TREE_VALUE (v)))
4809 nz_elts += mult;
4810 elt_count += mult;
4811 }
4812 }
4813 break;
4814
4815 default:
4816 nz_elts += mult;
4817 elt_count += mult;
4818
4819 if (const_from_elts_p && const_p)
4820 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4821 != NULL_TREE;
4822 break;
4823 }
4824 }
4825
4826 if (!*p_must_clear
4827 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4828 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4829 {
4830 tree init_sub_type;
4831 bool clear_this = true;
4832
4833 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4834 {
4835 /* We don't expect more than one element of the union to be
4836 initialized. Not sure what we should do otherwise... */
4837 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4838 == 1);
4839
4840 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4841 CONSTRUCTOR_ELTS (ctor),
4842 0)->value);
4843
4844 /* ??? We could look at each element of the union, and find the
4845 largest element. Which would avoid comparing the size of the
4846 initialized element against any tail padding in the union.
4847 Doesn't seem worth the effort... */
4848 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4849 TYPE_SIZE (init_sub_type)) == 1)
4850 {
4851 /* And now we have to find out if the element itself is fully
4852 constructed. E.g. for union { struct { int a, b; } s; } u
4853 = { .s = { .a = 1 } }. */
4854 if (elt_count == count_type_elements (init_sub_type, false))
4855 clear_this = false;
4856 }
4857 }
4858
4859 *p_must_clear = clear_this;
4860 }
4861
4862 *p_nz_elts += nz_elts;
4863 *p_elt_count += elt_count;
4864
4865 return const_p;
4866 }
4867
4868 /* Examine CTOR to discover:
4869 * how many scalar fields are set to nonzero values,
4870 and place it in *P_NZ_ELTS;
4871 * how many scalar fields in total are in CTOR,
4872 and place it in *P_ELT_COUNT.
4873 * if a type is a union, and the initializer from the constructor
4874 is not the largest element in the union, then set *p_must_clear.
4875
4876 Return whether or not CTOR is a valid static constant initializer, the same
4877 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4878
4879 bool
4880 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4881 HOST_WIDE_INT *p_elt_count,
4882 bool *p_must_clear)
4883 {
4884 *p_nz_elts = 0;
4885 *p_elt_count = 0;
4886 *p_must_clear = false;
4887
4888 return
4889 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4890 }
4891
4892 /* Count the number of scalars in TYPE. Return -1 on overflow or
4893 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4894 array member at the end of the structure. */
4895
4896 HOST_WIDE_INT
4897 count_type_elements (const_tree type, bool allow_flexarr)
4898 {
4899 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4900 switch (TREE_CODE (type))
4901 {
4902 case ARRAY_TYPE:
4903 {
4904 tree telts = array_type_nelts (type);
4905 if (telts && host_integerp (telts, 1))
4906 {
4907 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4908 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4909 if (n == 0)
4910 return 0;
4911 else if (max / n > m)
4912 return n * m;
4913 }
4914 return -1;
4915 }
4916
4917 case RECORD_TYPE:
4918 {
4919 HOST_WIDE_INT n = 0, t;
4920 tree f;
4921
4922 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4923 if (TREE_CODE (f) == FIELD_DECL)
4924 {
4925 t = count_type_elements (TREE_TYPE (f), false);
4926 if (t < 0)
4927 {
4928 /* Check for structures with flexible array member. */
4929 tree tf = TREE_TYPE (f);
4930 if (allow_flexarr
4931 && TREE_CHAIN (f) == NULL
4932 && TREE_CODE (tf) == ARRAY_TYPE
4933 && TYPE_DOMAIN (tf)
4934 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4935 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4936 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4937 && int_size_in_bytes (type) >= 0)
4938 break;
4939
4940 return -1;
4941 }
4942 n += t;
4943 }
4944
4945 return n;
4946 }
4947
4948 case UNION_TYPE:
4949 case QUAL_UNION_TYPE:
4950 {
4951 /* Ho hum. How in the world do we guess here? Clearly it isn't
4952 right to count the fields. Guess based on the number of words. */
4953 HOST_WIDE_INT n = int_size_in_bytes (type);
4954 if (n < 0)
4955 return -1;
4956 return n / UNITS_PER_WORD;
4957 }
4958
4959 case COMPLEX_TYPE:
4960 return 2;
4961
4962 case VECTOR_TYPE:
4963 return TYPE_VECTOR_SUBPARTS (type);
4964
4965 case INTEGER_TYPE:
4966 case REAL_TYPE:
4967 case FIXED_POINT_TYPE:
4968 case ENUMERAL_TYPE:
4969 case BOOLEAN_TYPE:
4970 case POINTER_TYPE:
4971 case OFFSET_TYPE:
4972 case REFERENCE_TYPE:
4973 return 1;
4974
4975 case VOID_TYPE:
4976 case METHOD_TYPE:
4977 case FUNCTION_TYPE:
4978 case LANG_TYPE:
4979 default:
4980 gcc_unreachable ();
4981 }
4982 }
4983
4984 /* Return 1 if EXP contains mostly (3/4) zeros. */
4985
4986 static int
4987 mostly_zeros_p (const_tree exp)
4988 {
4989 if (TREE_CODE (exp) == CONSTRUCTOR)
4990
4991 {
4992 HOST_WIDE_INT nz_elts, count, elts;
4993 bool must_clear;
4994
4995 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4996 if (must_clear)
4997 return 1;
4998
4999 elts = count_type_elements (TREE_TYPE (exp), false);
5000
5001 return nz_elts < elts / 4;
5002 }
5003
5004 return initializer_zerop (exp);
5005 }
5006
5007 /* Return 1 if EXP contains all zeros. */
5008
5009 static int
5010 all_zeros_p (const_tree exp)
5011 {
5012 if (TREE_CODE (exp) == CONSTRUCTOR)
5013
5014 {
5015 HOST_WIDE_INT nz_elts, count;
5016 bool must_clear;
5017
5018 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5019 return nz_elts == 0;
5020 }
5021
5022 return initializer_zerop (exp);
5023 }
5024 \f
5025 /* Helper function for store_constructor.
5026 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5027 TYPE is the type of the CONSTRUCTOR, not the element type.
5028 CLEARED is as for store_constructor.
5029 ALIAS_SET is the alias set to use for any stores.
5030
5031 This provides a recursive shortcut back to store_constructor when it isn't
5032 necessary to go through store_field. This is so that we can pass through
5033 the cleared field to let store_constructor know that we may not have to
5034 clear a substructure if the outer structure has already been cleared. */
5035
5036 static void
5037 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5038 HOST_WIDE_INT bitpos, enum machine_mode mode,
5039 tree exp, tree type, int cleared,
5040 alias_set_type alias_set)
5041 {
5042 if (TREE_CODE (exp) == CONSTRUCTOR
5043 /* We can only call store_constructor recursively if the size and
5044 bit position are on a byte boundary. */
5045 && bitpos % BITS_PER_UNIT == 0
5046 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5047 /* If we have a nonzero bitpos for a register target, then we just
5048 let store_field do the bitfield handling. This is unlikely to
5049 generate unnecessary clear instructions anyways. */
5050 && (bitpos == 0 || MEM_P (target)))
5051 {
5052 if (MEM_P (target))
5053 target
5054 = adjust_address (target,
5055 GET_MODE (target) == BLKmode
5056 || 0 != (bitpos
5057 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5058 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5059
5060
5061 /* Update the alias set, if required. */
5062 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5063 && MEM_ALIAS_SET (target) != 0)
5064 {
5065 target = copy_rtx (target);
5066 set_mem_alias_set (target, alias_set);
5067 }
5068
5069 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5070 }
5071 else
5072 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5073 }
5074
5075 /* Store the value of constructor EXP into the rtx TARGET.
5076 TARGET is either a REG or a MEM; we know it cannot conflict, since
5077 safe_from_p has been called.
5078 CLEARED is true if TARGET is known to have been zero'd.
5079 SIZE is the number of bytes of TARGET we are allowed to modify: this
5080 may not be the same as the size of EXP if we are assigning to a field
5081 which has been packed to exclude padding bits. */
5082
5083 static void
5084 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5085 {
5086 tree type = TREE_TYPE (exp);
5087 #ifdef WORD_REGISTER_OPERATIONS
5088 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5089 #endif
5090
5091 switch (TREE_CODE (type))
5092 {
5093 case RECORD_TYPE:
5094 case UNION_TYPE:
5095 case QUAL_UNION_TYPE:
5096 {
5097 unsigned HOST_WIDE_INT idx;
5098 tree field, value;
5099
5100 /* If size is zero or the target is already cleared, do nothing. */
5101 if (size == 0 || cleared)
5102 cleared = 1;
5103 /* We either clear the aggregate or indicate the value is dead. */
5104 else if ((TREE_CODE (type) == UNION_TYPE
5105 || TREE_CODE (type) == QUAL_UNION_TYPE)
5106 && ! CONSTRUCTOR_ELTS (exp))
5107 /* If the constructor is empty, clear the union. */
5108 {
5109 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5110 cleared = 1;
5111 }
5112
5113 /* If we are building a static constructor into a register,
5114 set the initial value as zero so we can fold the value into
5115 a constant. But if more than one register is involved,
5116 this probably loses. */
5117 else if (REG_P (target) && TREE_STATIC (exp)
5118 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5119 {
5120 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5121 cleared = 1;
5122 }
5123
5124 /* If the constructor has fewer fields than the structure or
5125 if we are initializing the structure to mostly zeros, clear
5126 the whole structure first. Don't do this if TARGET is a
5127 register whose mode size isn't equal to SIZE since
5128 clear_storage can't handle this case. */
5129 else if (size > 0
5130 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5131 != fields_length (type))
5132 || mostly_zeros_p (exp))
5133 && (!REG_P (target)
5134 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5135 == size)))
5136 {
5137 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5138 cleared = 1;
5139 }
5140
5141 if (REG_P (target) && !cleared)
5142 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5143
5144 /* Store each element of the constructor into the
5145 corresponding field of TARGET. */
5146 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5147 {
5148 enum machine_mode mode;
5149 HOST_WIDE_INT bitsize;
5150 HOST_WIDE_INT bitpos = 0;
5151 tree offset;
5152 rtx to_rtx = target;
5153
5154 /* Just ignore missing fields. We cleared the whole
5155 structure, above, if any fields are missing. */
5156 if (field == 0)
5157 continue;
5158
5159 if (cleared && initializer_zerop (value))
5160 continue;
5161
5162 if (host_integerp (DECL_SIZE (field), 1))
5163 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5164 else
5165 bitsize = -1;
5166
5167 mode = DECL_MODE (field);
5168 if (DECL_BIT_FIELD (field))
5169 mode = VOIDmode;
5170
5171 offset = DECL_FIELD_OFFSET (field);
5172 if (host_integerp (offset, 0)
5173 && host_integerp (bit_position (field), 0))
5174 {
5175 bitpos = int_bit_position (field);
5176 offset = 0;
5177 }
5178 else
5179 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5180
5181 if (offset)
5182 {
5183 rtx offset_rtx;
5184
5185 offset
5186 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5187 make_tree (TREE_TYPE (exp),
5188 target));
5189
5190 offset_rtx = expand_normal (offset);
5191 gcc_assert (MEM_P (to_rtx));
5192
5193 #ifdef POINTERS_EXTEND_UNSIGNED
5194 if (GET_MODE (offset_rtx) != Pmode)
5195 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5196 #else
5197 if (GET_MODE (offset_rtx) != ptr_mode)
5198 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5199 #endif
5200
5201 to_rtx = offset_address (to_rtx, offset_rtx,
5202 highest_pow2_factor (offset));
5203 }
5204
5205 #ifdef WORD_REGISTER_OPERATIONS
5206 /* If this initializes a field that is smaller than a
5207 word, at the start of a word, try to widen it to a full
5208 word. This special case allows us to output C++ member
5209 function initializations in a form that the optimizers
5210 can understand. */
5211 if (REG_P (target)
5212 && bitsize < BITS_PER_WORD
5213 && bitpos % BITS_PER_WORD == 0
5214 && GET_MODE_CLASS (mode) == MODE_INT
5215 && TREE_CODE (value) == INTEGER_CST
5216 && exp_size >= 0
5217 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5218 {
5219 tree type = TREE_TYPE (value);
5220
5221 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5222 {
5223 type = lang_hooks.types.type_for_size
5224 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5225 value = fold_convert (type, value);
5226 }
5227
5228 if (BYTES_BIG_ENDIAN)
5229 value
5230 = fold_build2 (LSHIFT_EXPR, type, value,
5231 build_int_cst (type,
5232 BITS_PER_WORD - bitsize));
5233 bitsize = BITS_PER_WORD;
5234 mode = word_mode;
5235 }
5236 #endif
5237
5238 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5239 && DECL_NONADDRESSABLE_P (field))
5240 {
5241 to_rtx = copy_rtx (to_rtx);
5242 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5243 }
5244
5245 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5246 value, type, cleared,
5247 get_alias_set (TREE_TYPE (field)));
5248 }
5249 break;
5250 }
5251 case ARRAY_TYPE:
5252 {
5253 tree value, index;
5254 unsigned HOST_WIDE_INT i;
5255 int need_to_clear;
5256 tree domain;
5257 tree elttype = TREE_TYPE (type);
5258 int const_bounds_p;
5259 HOST_WIDE_INT minelt = 0;
5260 HOST_WIDE_INT maxelt = 0;
5261
5262 domain = TYPE_DOMAIN (type);
5263 const_bounds_p = (TYPE_MIN_VALUE (domain)
5264 && TYPE_MAX_VALUE (domain)
5265 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5266 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5267
5268 /* If we have constant bounds for the range of the type, get them. */
5269 if (const_bounds_p)
5270 {
5271 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5272 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5273 }
5274
5275 /* If the constructor has fewer elements than the array, clear
5276 the whole array first. Similarly if this is static
5277 constructor of a non-BLKmode object. */
5278 if (cleared)
5279 need_to_clear = 0;
5280 else if (REG_P (target) && TREE_STATIC (exp))
5281 need_to_clear = 1;
5282 else
5283 {
5284 unsigned HOST_WIDE_INT idx;
5285 tree index, value;
5286 HOST_WIDE_INT count = 0, zero_count = 0;
5287 need_to_clear = ! const_bounds_p;
5288
5289 /* This loop is a more accurate version of the loop in
5290 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5291 is also needed to check for missing elements. */
5292 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5293 {
5294 HOST_WIDE_INT this_node_count;
5295
5296 if (need_to_clear)
5297 break;
5298
5299 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5300 {
5301 tree lo_index = TREE_OPERAND (index, 0);
5302 tree hi_index = TREE_OPERAND (index, 1);
5303
5304 if (! host_integerp (lo_index, 1)
5305 || ! host_integerp (hi_index, 1))
5306 {
5307 need_to_clear = 1;
5308 break;
5309 }
5310
5311 this_node_count = (tree_low_cst (hi_index, 1)
5312 - tree_low_cst (lo_index, 1) + 1);
5313 }
5314 else
5315 this_node_count = 1;
5316
5317 count += this_node_count;
5318 if (mostly_zeros_p (value))
5319 zero_count += this_node_count;
5320 }
5321
5322 /* Clear the entire array first if there are any missing
5323 elements, or if the incidence of zero elements is >=
5324 75%. */
5325 if (! need_to_clear
5326 && (count < maxelt - minelt + 1
5327 || 4 * zero_count >= 3 * count))
5328 need_to_clear = 1;
5329 }
5330
5331 if (need_to_clear && size > 0)
5332 {
5333 if (REG_P (target))
5334 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5335 else
5336 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5337 cleared = 1;
5338 }
5339
5340 if (!cleared && REG_P (target))
5341 /* Inform later passes that the old value is dead. */
5342 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5343
5344 /* Store each element of the constructor into the
5345 corresponding element of TARGET, determined by counting the
5346 elements. */
5347 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5348 {
5349 enum machine_mode mode;
5350 HOST_WIDE_INT bitsize;
5351 HOST_WIDE_INT bitpos;
5352 int unsignedp;
5353 rtx xtarget = target;
5354
5355 if (cleared && initializer_zerop (value))
5356 continue;
5357
5358 unsignedp = TYPE_UNSIGNED (elttype);
5359 mode = TYPE_MODE (elttype);
5360 if (mode == BLKmode)
5361 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5362 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5363 : -1);
5364 else
5365 bitsize = GET_MODE_BITSIZE (mode);
5366
5367 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5368 {
5369 tree lo_index = TREE_OPERAND (index, 0);
5370 tree hi_index = TREE_OPERAND (index, 1);
5371 rtx index_r, pos_rtx;
5372 HOST_WIDE_INT lo, hi, count;
5373 tree position;
5374
5375 /* If the range is constant and "small", unroll the loop. */
5376 if (const_bounds_p
5377 && host_integerp (lo_index, 0)
5378 && host_integerp (hi_index, 0)
5379 && (lo = tree_low_cst (lo_index, 0),
5380 hi = tree_low_cst (hi_index, 0),
5381 count = hi - lo + 1,
5382 (!MEM_P (target)
5383 || count <= 2
5384 || (host_integerp (TYPE_SIZE (elttype), 1)
5385 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5386 <= 40 * 8)))))
5387 {
5388 lo -= minelt; hi -= minelt;
5389 for (; lo <= hi; lo++)
5390 {
5391 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5392
5393 if (MEM_P (target)
5394 && !MEM_KEEP_ALIAS_SET_P (target)
5395 && TREE_CODE (type) == ARRAY_TYPE
5396 && TYPE_NONALIASED_COMPONENT (type))
5397 {
5398 target = copy_rtx (target);
5399 MEM_KEEP_ALIAS_SET_P (target) = 1;
5400 }
5401
5402 store_constructor_field
5403 (target, bitsize, bitpos, mode, value, type, cleared,
5404 get_alias_set (elttype));
5405 }
5406 }
5407 else
5408 {
5409 rtx loop_start = gen_label_rtx ();
5410 rtx loop_end = gen_label_rtx ();
5411 tree exit_cond;
5412
5413 expand_normal (hi_index);
5414 unsignedp = TYPE_UNSIGNED (domain);
5415
5416 index = build_decl (VAR_DECL, NULL_TREE, domain);
5417
5418 index_r
5419 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5420 &unsignedp, 0));
5421 SET_DECL_RTL (index, index_r);
5422 store_expr (lo_index, index_r, 0, false);
5423
5424 /* Build the head of the loop. */
5425 do_pending_stack_adjust ();
5426 emit_label (loop_start);
5427
5428 /* Assign value to element index. */
5429 position =
5430 fold_convert (ssizetype,
5431 fold_build2 (MINUS_EXPR,
5432 TREE_TYPE (index),
5433 index,
5434 TYPE_MIN_VALUE (domain)));
5435
5436 position =
5437 size_binop (MULT_EXPR, position,
5438 fold_convert (ssizetype,
5439 TYPE_SIZE_UNIT (elttype)));
5440
5441 pos_rtx = expand_normal (position);
5442 xtarget = offset_address (target, pos_rtx,
5443 highest_pow2_factor (position));
5444 xtarget = adjust_address (xtarget, mode, 0);
5445 if (TREE_CODE (value) == CONSTRUCTOR)
5446 store_constructor (value, xtarget, cleared,
5447 bitsize / BITS_PER_UNIT);
5448 else
5449 store_expr (value, xtarget, 0, false);
5450
5451 /* Generate a conditional jump to exit the loop. */
5452 exit_cond = build2 (LT_EXPR, integer_type_node,
5453 index, hi_index);
5454 jumpif (exit_cond, loop_end);
5455
5456 /* Update the loop counter, and jump to the head of
5457 the loop. */
5458 expand_assignment (index,
5459 build2 (PLUS_EXPR, TREE_TYPE (index),
5460 index, integer_one_node),
5461 false);
5462
5463 emit_jump (loop_start);
5464
5465 /* Build the end of the loop. */
5466 emit_label (loop_end);
5467 }
5468 }
5469 else if ((index != 0 && ! host_integerp (index, 0))
5470 || ! host_integerp (TYPE_SIZE (elttype), 1))
5471 {
5472 tree position;
5473
5474 if (index == 0)
5475 index = ssize_int (1);
5476
5477 if (minelt)
5478 index = fold_convert (ssizetype,
5479 fold_build2 (MINUS_EXPR,
5480 TREE_TYPE (index),
5481 index,
5482 TYPE_MIN_VALUE (domain)));
5483
5484 position =
5485 size_binop (MULT_EXPR, index,
5486 fold_convert (ssizetype,
5487 TYPE_SIZE_UNIT (elttype)));
5488 xtarget = offset_address (target,
5489 expand_normal (position),
5490 highest_pow2_factor (position));
5491 xtarget = adjust_address (xtarget, mode, 0);
5492 store_expr (value, xtarget, 0, false);
5493 }
5494 else
5495 {
5496 if (index != 0)
5497 bitpos = ((tree_low_cst (index, 0) - minelt)
5498 * tree_low_cst (TYPE_SIZE (elttype), 1));
5499 else
5500 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5501
5502 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5503 && TREE_CODE (type) == ARRAY_TYPE
5504 && TYPE_NONALIASED_COMPONENT (type))
5505 {
5506 target = copy_rtx (target);
5507 MEM_KEEP_ALIAS_SET_P (target) = 1;
5508 }
5509 store_constructor_field (target, bitsize, bitpos, mode, value,
5510 type, cleared, get_alias_set (elttype));
5511 }
5512 }
5513 break;
5514 }
5515
5516 case VECTOR_TYPE:
5517 {
5518 unsigned HOST_WIDE_INT idx;
5519 constructor_elt *ce;
5520 int i;
5521 int need_to_clear;
5522 int icode = 0;
5523 tree elttype = TREE_TYPE (type);
5524 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5525 enum machine_mode eltmode = TYPE_MODE (elttype);
5526 HOST_WIDE_INT bitsize;
5527 HOST_WIDE_INT bitpos;
5528 rtvec vector = NULL;
5529 unsigned n_elts;
5530
5531 gcc_assert (eltmode != BLKmode);
5532
5533 n_elts = TYPE_VECTOR_SUBPARTS (type);
5534 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5535 {
5536 enum machine_mode mode = GET_MODE (target);
5537
5538 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5539 if (icode != CODE_FOR_nothing)
5540 {
5541 unsigned int i;
5542
5543 vector = rtvec_alloc (n_elts);
5544 for (i = 0; i < n_elts; i++)
5545 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5546 }
5547 }
5548
5549 /* If the constructor has fewer elements than the vector,
5550 clear the whole array first. Similarly if this is static
5551 constructor of a non-BLKmode object. */
5552 if (cleared)
5553 need_to_clear = 0;
5554 else if (REG_P (target) && TREE_STATIC (exp))
5555 need_to_clear = 1;
5556 else
5557 {
5558 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5559 tree value;
5560
5561 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5562 {
5563 int n_elts_here = tree_low_cst
5564 (int_const_binop (TRUNC_DIV_EXPR,
5565 TYPE_SIZE (TREE_TYPE (value)),
5566 TYPE_SIZE (elttype), 0), 1);
5567
5568 count += n_elts_here;
5569 if (mostly_zeros_p (value))
5570 zero_count += n_elts_here;
5571 }
5572
5573 /* Clear the entire vector first if there are any missing elements,
5574 or if the incidence of zero elements is >= 75%. */
5575 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5576 }
5577
5578 if (need_to_clear && size > 0 && !vector)
5579 {
5580 if (REG_P (target))
5581 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5582 else
5583 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5584 cleared = 1;
5585 }
5586
5587 /* Inform later passes that the old value is dead. */
5588 if (!cleared && !vector && REG_P (target))
5589 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5590
5591 /* Store each element of the constructor into the corresponding
5592 element of TARGET, determined by counting the elements. */
5593 for (idx = 0, i = 0;
5594 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5595 idx++, i += bitsize / elt_size)
5596 {
5597 HOST_WIDE_INT eltpos;
5598 tree value = ce->value;
5599
5600 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5601 if (cleared && initializer_zerop (value))
5602 continue;
5603
5604 if (ce->index)
5605 eltpos = tree_low_cst (ce->index, 1);
5606 else
5607 eltpos = i;
5608
5609 if (vector)
5610 {
5611 /* Vector CONSTRUCTORs should only be built from smaller
5612 vectors in the case of BLKmode vectors. */
5613 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5614 RTVEC_ELT (vector, eltpos)
5615 = expand_normal (value);
5616 }
5617 else
5618 {
5619 enum machine_mode value_mode =
5620 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5621 ? TYPE_MODE (TREE_TYPE (value))
5622 : eltmode;
5623 bitpos = eltpos * elt_size;
5624 store_constructor_field (target, bitsize, bitpos,
5625 value_mode, value, type,
5626 cleared, get_alias_set (elttype));
5627 }
5628 }
5629
5630 if (vector)
5631 emit_insn (GEN_FCN (icode)
5632 (target,
5633 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5634 break;
5635 }
5636
5637 default:
5638 gcc_unreachable ();
5639 }
5640 }
5641
5642 /* Store the value of EXP (an expression tree)
5643 into a subfield of TARGET which has mode MODE and occupies
5644 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5645 If MODE is VOIDmode, it means that we are storing into a bit-field.
5646
5647 Always return const0_rtx unless we have something particular to
5648 return.
5649
5650 TYPE is the type of the underlying object,
5651
5652 ALIAS_SET is the alias set for the destination. This value will
5653 (in general) be different from that for TARGET, since TARGET is a
5654 reference to the containing structure.
5655
5656 If NONTEMPORAL is true, try generating a nontemporal store. */
5657
5658 static rtx
5659 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5660 enum machine_mode mode, tree exp, tree type,
5661 alias_set_type alias_set, bool nontemporal)
5662 {
5663 HOST_WIDE_INT width_mask = 0;
5664
5665 if (TREE_CODE (exp) == ERROR_MARK)
5666 return const0_rtx;
5667
5668 /* If we have nothing to store, do nothing unless the expression has
5669 side-effects. */
5670 if (bitsize == 0)
5671 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5672 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5673 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5674
5675 /* If we are storing into an unaligned field of an aligned union that is
5676 in a register, we may have the mode of TARGET being an integer mode but
5677 MODE == BLKmode. In that case, get an aligned object whose size and
5678 alignment are the same as TARGET and store TARGET into it (we can avoid
5679 the store if the field being stored is the entire width of TARGET). Then
5680 call ourselves recursively to store the field into a BLKmode version of
5681 that object. Finally, load from the object into TARGET. This is not
5682 very efficient in general, but should only be slightly more expensive
5683 than the otherwise-required unaligned accesses. Perhaps this can be
5684 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5685 twice, once with emit_move_insn and once via store_field. */
5686
5687 if (mode == BLKmode
5688 && (REG_P (target) || GET_CODE (target) == SUBREG))
5689 {
5690 rtx object = assign_temp (type, 0, 1, 1);
5691 rtx blk_object = adjust_address (object, BLKmode, 0);
5692
5693 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5694 emit_move_insn (object, target);
5695
5696 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5697 nontemporal);
5698
5699 emit_move_insn (target, object);
5700
5701 /* We want to return the BLKmode version of the data. */
5702 return blk_object;
5703 }
5704
5705 if (GET_CODE (target) == CONCAT)
5706 {
5707 /* We're storing into a struct containing a single __complex. */
5708
5709 gcc_assert (!bitpos);
5710 return store_expr (exp, target, 0, nontemporal);
5711 }
5712
5713 /* If the structure is in a register or if the component
5714 is a bit field, we cannot use addressing to access it.
5715 Use bit-field techniques or SUBREG to store in it. */
5716
5717 if (mode == VOIDmode
5718 || (mode != BLKmode && ! direct_store[(int) mode]
5719 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5720 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5721 || REG_P (target)
5722 || GET_CODE (target) == SUBREG
5723 /* If the field isn't aligned enough to store as an ordinary memref,
5724 store it as a bit field. */
5725 || (mode != BLKmode
5726 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5727 || bitpos % GET_MODE_ALIGNMENT (mode))
5728 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5729 || (bitpos % BITS_PER_UNIT != 0)))
5730 /* If the RHS and field are a constant size and the size of the
5731 RHS isn't the same size as the bitfield, we must use bitfield
5732 operations. */
5733 || (bitsize >= 0
5734 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5735 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5736 {
5737 rtx temp;
5738
5739 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5740 implies a mask operation. If the precision is the same size as
5741 the field we're storing into, that mask is redundant. This is
5742 particularly common with bit field assignments generated by the
5743 C front end. */
5744 if (TREE_CODE (exp) == NOP_EXPR)
5745 {
5746 tree type = TREE_TYPE (exp);
5747 if (INTEGRAL_TYPE_P (type)
5748 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5749 && bitsize == TYPE_PRECISION (type))
5750 {
5751 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5752 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5753 exp = TREE_OPERAND (exp, 0);
5754 }
5755 }
5756
5757 temp = expand_normal (exp);
5758
5759 /* If BITSIZE is narrower than the size of the type of EXP
5760 we will be narrowing TEMP. Normally, what's wanted are the
5761 low-order bits. However, if EXP's type is a record and this is
5762 big-endian machine, we want the upper BITSIZE bits. */
5763 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5764 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5765 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5766 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5767 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5768 - bitsize),
5769 NULL_RTX, 1);
5770
5771 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5772 MODE. */
5773 if (mode != VOIDmode && mode != BLKmode
5774 && mode != TYPE_MODE (TREE_TYPE (exp)))
5775 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5776
5777 /* If the modes of TARGET and TEMP are both BLKmode, both
5778 must be in memory and BITPOS must be aligned on a byte
5779 boundary. If so, we simply do a block copy. */
5780 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5781 {
5782 gcc_assert (MEM_P (target) && MEM_P (temp)
5783 && !(bitpos % BITS_PER_UNIT));
5784
5785 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5786 emit_block_move (target, temp,
5787 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5788 / BITS_PER_UNIT),
5789 BLOCK_OP_NORMAL);
5790
5791 return const0_rtx;
5792 }
5793
5794 /* Store the value in the bitfield. */
5795 store_bit_field (target, bitsize, bitpos, mode, temp);
5796
5797 return const0_rtx;
5798 }
5799 else
5800 {
5801 /* Now build a reference to just the desired component. */
5802 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5803
5804 if (to_rtx == target)
5805 to_rtx = copy_rtx (to_rtx);
5806
5807 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5808 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5809 set_mem_alias_set (to_rtx, alias_set);
5810
5811 return store_expr (exp, to_rtx, 0, nontemporal);
5812 }
5813 }
5814 \f
5815 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5816 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5817 codes and find the ultimate containing object, which we return.
5818
5819 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5820 bit position, and *PUNSIGNEDP to the signedness of the field.
5821 If the position of the field is variable, we store a tree
5822 giving the variable offset (in units) in *POFFSET.
5823 This offset is in addition to the bit position.
5824 If the position is not variable, we store 0 in *POFFSET.
5825
5826 If any of the extraction expressions is volatile,
5827 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5828
5829 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5830 is a mode that can be used to access the field. In that case, *PBITSIZE
5831 is redundant.
5832
5833 If the field describes a variable-sized object, *PMODE is set to
5834 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5835 this case, but the address of the object can be found.
5836
5837 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5838 look through nodes that serve as markers of a greater alignment than
5839 the one that can be deduced from the expression. These nodes make it
5840 possible for front-ends to prevent temporaries from being created by
5841 the middle-end on alignment considerations. For that purpose, the
5842 normal operating mode at high-level is to always pass FALSE so that
5843 the ultimate containing object is really returned; moreover, the
5844 associated predicate handled_component_p will always return TRUE
5845 on these nodes, thus indicating that they are essentially handled
5846 by get_inner_reference. TRUE should only be passed when the caller
5847 is scanning the expression in order to build another representation
5848 and specifically knows how to handle these nodes; as such, this is
5849 the normal operating mode in the RTL expanders. */
5850
5851 tree
5852 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5853 HOST_WIDE_INT *pbitpos, tree *poffset,
5854 enum machine_mode *pmode, int *punsignedp,
5855 int *pvolatilep, bool keep_aligning)
5856 {
5857 tree size_tree = 0;
5858 enum machine_mode mode = VOIDmode;
5859 tree offset = size_zero_node;
5860 tree bit_offset = bitsize_zero_node;
5861
5862 /* First get the mode, signedness, and size. We do this from just the
5863 outermost expression. */
5864 if (TREE_CODE (exp) == COMPONENT_REF)
5865 {
5866 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5867 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5868 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5869
5870 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5871 }
5872 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5873 {
5874 size_tree = TREE_OPERAND (exp, 1);
5875 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5876
5877 /* For vector types, with the correct size of access, use the mode of
5878 inner type. */
5879 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5880 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5881 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5882 mode = TYPE_MODE (TREE_TYPE (exp));
5883 }
5884 else
5885 {
5886 mode = TYPE_MODE (TREE_TYPE (exp));
5887 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5888
5889 if (mode == BLKmode)
5890 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5891 else
5892 *pbitsize = GET_MODE_BITSIZE (mode);
5893 }
5894
5895 if (size_tree != 0)
5896 {
5897 if (! host_integerp (size_tree, 1))
5898 mode = BLKmode, *pbitsize = -1;
5899 else
5900 *pbitsize = tree_low_cst (size_tree, 1);
5901 }
5902
5903 *pmode = mode;
5904
5905 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5906 and find the ultimate containing object. */
5907 while (1)
5908 {
5909 switch (TREE_CODE (exp))
5910 {
5911 case BIT_FIELD_REF:
5912 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5913 TREE_OPERAND (exp, 2));
5914 break;
5915
5916 case COMPONENT_REF:
5917 {
5918 tree field = TREE_OPERAND (exp, 1);
5919 tree this_offset = component_ref_field_offset (exp);
5920
5921 /* If this field hasn't been filled in yet, don't go past it.
5922 This should only happen when folding expressions made during
5923 type construction. */
5924 if (this_offset == 0)
5925 break;
5926
5927 offset = size_binop (PLUS_EXPR, offset, this_offset);
5928 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5929 DECL_FIELD_BIT_OFFSET (field));
5930
5931 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5932 }
5933 break;
5934
5935 case ARRAY_REF:
5936 case ARRAY_RANGE_REF:
5937 {
5938 tree index = TREE_OPERAND (exp, 1);
5939 tree low_bound = array_ref_low_bound (exp);
5940 tree unit_size = array_ref_element_size (exp);
5941
5942 /* We assume all arrays have sizes that are a multiple of a byte.
5943 First subtract the lower bound, if any, in the type of the
5944 index, then convert to sizetype and multiply by the size of
5945 the array element. */
5946 if (! integer_zerop (low_bound))
5947 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5948 index, low_bound);
5949
5950 offset = size_binop (PLUS_EXPR, offset,
5951 size_binop (MULT_EXPR,
5952 fold_convert (sizetype, index),
5953 unit_size));
5954 }
5955 break;
5956
5957 case REALPART_EXPR:
5958 break;
5959
5960 case IMAGPART_EXPR:
5961 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5962 bitsize_int (*pbitsize));
5963 break;
5964
5965 case VIEW_CONVERT_EXPR:
5966 if (keep_aligning && STRICT_ALIGNMENT
5967 && (TYPE_ALIGN (TREE_TYPE (exp))
5968 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5969 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5970 < BIGGEST_ALIGNMENT)
5971 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5972 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5973 goto done;
5974 break;
5975
5976 default:
5977 goto done;
5978 }
5979
5980 /* If any reference in the chain is volatile, the effect is volatile. */
5981 if (TREE_THIS_VOLATILE (exp))
5982 *pvolatilep = 1;
5983
5984 exp = TREE_OPERAND (exp, 0);
5985 }
5986 done:
5987
5988 /* If OFFSET is constant, see if we can return the whole thing as a
5989 constant bit position. Make sure to handle overflow during
5990 this conversion. */
5991 if (host_integerp (offset, 0))
5992 {
5993 double_int tem = double_int_mul (tree_to_double_int (offset),
5994 uhwi_to_double_int (BITS_PER_UNIT));
5995 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5996 if (double_int_fits_in_shwi_p (tem))
5997 {
5998 *pbitpos = double_int_to_shwi (tem);
5999 *poffset = NULL_TREE;
6000 return exp;
6001 }
6002 }
6003
6004 /* Otherwise, split it up. */
6005 *pbitpos = tree_low_cst (bit_offset, 0);
6006 *poffset = offset;
6007
6008 return exp;
6009 }
6010
6011 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6012 look for whether EXP or any nested component-refs within EXP is marked
6013 as PACKED. */
6014
6015 bool
6016 contains_packed_reference (const_tree exp)
6017 {
6018 bool packed_p = false;
6019
6020 while (1)
6021 {
6022 switch (TREE_CODE (exp))
6023 {
6024 case COMPONENT_REF:
6025 {
6026 tree field = TREE_OPERAND (exp, 1);
6027 packed_p = DECL_PACKED (field)
6028 || TYPE_PACKED (TREE_TYPE (field))
6029 || TYPE_PACKED (TREE_TYPE (exp));
6030 if (packed_p)
6031 goto done;
6032 }
6033 break;
6034
6035 case BIT_FIELD_REF:
6036 case ARRAY_REF:
6037 case ARRAY_RANGE_REF:
6038 case REALPART_EXPR:
6039 case IMAGPART_EXPR:
6040 case VIEW_CONVERT_EXPR:
6041 break;
6042
6043 default:
6044 goto done;
6045 }
6046 exp = TREE_OPERAND (exp, 0);
6047 }
6048 done:
6049 return packed_p;
6050 }
6051
6052 /* Return a tree of sizetype representing the size, in bytes, of the element
6053 of EXP, an ARRAY_REF. */
6054
6055 tree
6056 array_ref_element_size (tree exp)
6057 {
6058 tree aligned_size = TREE_OPERAND (exp, 3);
6059 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6060
6061 /* If a size was specified in the ARRAY_REF, it's the size measured
6062 in alignment units of the element type. So multiply by that value. */
6063 if (aligned_size)
6064 {
6065 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6066 sizetype from another type of the same width and signedness. */
6067 if (TREE_TYPE (aligned_size) != sizetype)
6068 aligned_size = fold_convert (sizetype, aligned_size);
6069 return size_binop (MULT_EXPR, aligned_size,
6070 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6071 }
6072
6073 /* Otherwise, take the size from that of the element type. Substitute
6074 any PLACEHOLDER_EXPR that we have. */
6075 else
6076 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6077 }
6078
6079 /* Return a tree representing the lower bound of the array mentioned in
6080 EXP, an ARRAY_REF. */
6081
6082 tree
6083 array_ref_low_bound (tree exp)
6084 {
6085 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6086
6087 /* If a lower bound is specified in EXP, use it. */
6088 if (TREE_OPERAND (exp, 2))
6089 return TREE_OPERAND (exp, 2);
6090
6091 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6092 substituting for a PLACEHOLDER_EXPR as needed. */
6093 if (domain_type && TYPE_MIN_VALUE (domain_type))
6094 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6095
6096 /* Otherwise, return a zero of the appropriate type. */
6097 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6098 }
6099
6100 /* Return a tree representing the upper bound of the array mentioned in
6101 EXP, an ARRAY_REF. */
6102
6103 tree
6104 array_ref_up_bound (tree exp)
6105 {
6106 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6107
6108 /* If there is a domain type and it has an upper bound, use it, substituting
6109 for a PLACEHOLDER_EXPR as needed. */
6110 if (domain_type && TYPE_MAX_VALUE (domain_type))
6111 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6112
6113 /* Otherwise fail. */
6114 return NULL_TREE;
6115 }
6116
6117 /* Return a tree representing the offset, in bytes, of the field referenced
6118 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6119
6120 tree
6121 component_ref_field_offset (tree exp)
6122 {
6123 tree aligned_offset = TREE_OPERAND (exp, 2);
6124 tree field = TREE_OPERAND (exp, 1);
6125
6126 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6127 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6128 value. */
6129 if (aligned_offset)
6130 {
6131 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6132 sizetype from another type of the same width and signedness. */
6133 if (TREE_TYPE (aligned_offset) != sizetype)
6134 aligned_offset = fold_convert (sizetype, aligned_offset);
6135 return size_binop (MULT_EXPR, aligned_offset,
6136 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6137 }
6138
6139 /* Otherwise, take the offset from that of the field. Substitute
6140 any PLACEHOLDER_EXPR that we have. */
6141 else
6142 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6143 }
6144
6145 /* Return 1 if T is an expression that get_inner_reference handles. */
6146
6147 int
6148 handled_component_p (const_tree t)
6149 {
6150 switch (TREE_CODE (t))
6151 {
6152 case BIT_FIELD_REF:
6153 case COMPONENT_REF:
6154 case ARRAY_REF:
6155 case ARRAY_RANGE_REF:
6156 case VIEW_CONVERT_EXPR:
6157 case REALPART_EXPR:
6158 case IMAGPART_EXPR:
6159 return 1;
6160
6161 default:
6162 return 0;
6163 }
6164 }
6165 \f
6166 /* Given an rtx VALUE that may contain additions and multiplications, return
6167 an equivalent value that just refers to a register, memory, or constant.
6168 This is done by generating instructions to perform the arithmetic and
6169 returning a pseudo-register containing the value.
6170
6171 The returned value may be a REG, SUBREG, MEM or constant. */
6172
6173 rtx
6174 force_operand (rtx value, rtx target)
6175 {
6176 rtx op1, op2;
6177 /* Use subtarget as the target for operand 0 of a binary operation. */
6178 rtx subtarget = get_subtarget (target);
6179 enum rtx_code code = GET_CODE (value);
6180
6181 /* Check for subreg applied to an expression produced by loop optimizer. */
6182 if (code == SUBREG
6183 && !REG_P (SUBREG_REG (value))
6184 && !MEM_P (SUBREG_REG (value)))
6185 {
6186 value
6187 = simplify_gen_subreg (GET_MODE (value),
6188 force_reg (GET_MODE (SUBREG_REG (value)),
6189 force_operand (SUBREG_REG (value),
6190 NULL_RTX)),
6191 GET_MODE (SUBREG_REG (value)),
6192 SUBREG_BYTE (value));
6193 code = GET_CODE (value);
6194 }
6195
6196 /* Check for a PIC address load. */
6197 if ((code == PLUS || code == MINUS)
6198 && XEXP (value, 0) == pic_offset_table_rtx
6199 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6200 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6201 || GET_CODE (XEXP (value, 1)) == CONST))
6202 {
6203 if (!subtarget)
6204 subtarget = gen_reg_rtx (GET_MODE (value));
6205 emit_move_insn (subtarget, value);
6206 return subtarget;
6207 }
6208
6209 if (ARITHMETIC_P (value))
6210 {
6211 op2 = XEXP (value, 1);
6212 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6213 subtarget = 0;
6214 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6215 {
6216 code = PLUS;
6217 op2 = negate_rtx (GET_MODE (value), op2);
6218 }
6219
6220 /* Check for an addition with OP2 a constant integer and our first
6221 operand a PLUS of a virtual register and something else. In that
6222 case, we want to emit the sum of the virtual register and the
6223 constant first and then add the other value. This allows virtual
6224 register instantiation to simply modify the constant rather than
6225 creating another one around this addition. */
6226 if (code == PLUS && GET_CODE (op2) == CONST_INT
6227 && GET_CODE (XEXP (value, 0)) == PLUS
6228 && REG_P (XEXP (XEXP (value, 0), 0))
6229 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6230 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6231 {
6232 rtx temp = expand_simple_binop (GET_MODE (value), code,
6233 XEXP (XEXP (value, 0), 0), op2,
6234 subtarget, 0, OPTAB_LIB_WIDEN);
6235 return expand_simple_binop (GET_MODE (value), code, temp,
6236 force_operand (XEXP (XEXP (value,
6237 0), 1), 0),
6238 target, 0, OPTAB_LIB_WIDEN);
6239 }
6240
6241 op1 = force_operand (XEXP (value, 0), subtarget);
6242 op2 = force_operand (op2, NULL_RTX);
6243 switch (code)
6244 {
6245 case MULT:
6246 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6247 case DIV:
6248 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6249 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6250 target, 1, OPTAB_LIB_WIDEN);
6251 else
6252 return expand_divmod (0,
6253 FLOAT_MODE_P (GET_MODE (value))
6254 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6255 GET_MODE (value), op1, op2, target, 0);
6256 case MOD:
6257 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6258 target, 0);
6259 case UDIV:
6260 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6261 target, 1);
6262 case UMOD:
6263 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6264 target, 1);
6265 case ASHIFTRT:
6266 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6267 target, 0, OPTAB_LIB_WIDEN);
6268 default:
6269 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6270 target, 1, OPTAB_LIB_WIDEN);
6271 }
6272 }
6273 if (UNARY_P (value))
6274 {
6275 if (!target)
6276 target = gen_reg_rtx (GET_MODE (value));
6277 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6278 switch (code)
6279 {
6280 case ZERO_EXTEND:
6281 case SIGN_EXTEND:
6282 case TRUNCATE:
6283 case FLOAT_EXTEND:
6284 case FLOAT_TRUNCATE:
6285 convert_move (target, op1, code == ZERO_EXTEND);
6286 return target;
6287
6288 case FIX:
6289 case UNSIGNED_FIX:
6290 expand_fix (target, op1, code == UNSIGNED_FIX);
6291 return target;
6292
6293 case FLOAT:
6294 case UNSIGNED_FLOAT:
6295 expand_float (target, op1, code == UNSIGNED_FLOAT);
6296 return target;
6297
6298 default:
6299 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6300 }
6301 }
6302
6303 #ifdef INSN_SCHEDULING
6304 /* On machines that have insn scheduling, we want all memory reference to be
6305 explicit, so we need to deal with such paradoxical SUBREGs. */
6306 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6307 && (GET_MODE_SIZE (GET_MODE (value))
6308 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6309 value
6310 = simplify_gen_subreg (GET_MODE (value),
6311 force_reg (GET_MODE (SUBREG_REG (value)),
6312 force_operand (SUBREG_REG (value),
6313 NULL_RTX)),
6314 GET_MODE (SUBREG_REG (value)),
6315 SUBREG_BYTE (value));
6316 #endif
6317
6318 return value;
6319 }
6320 \f
6321 /* Subroutine of expand_expr: return nonzero iff there is no way that
6322 EXP can reference X, which is being modified. TOP_P is nonzero if this
6323 call is going to be used to determine whether we need a temporary
6324 for EXP, as opposed to a recursive call to this function.
6325
6326 It is always safe for this routine to return zero since it merely
6327 searches for optimization opportunities. */
6328
6329 int
6330 safe_from_p (const_rtx x, tree exp, int top_p)
6331 {
6332 rtx exp_rtl = 0;
6333 int i, nops;
6334
6335 if (x == 0
6336 /* If EXP has varying size, we MUST use a target since we currently
6337 have no way of allocating temporaries of variable size
6338 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6339 So we assume here that something at a higher level has prevented a
6340 clash. This is somewhat bogus, but the best we can do. Only
6341 do this when X is BLKmode and when we are at the top level. */
6342 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6343 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6344 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6345 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6346 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6347 != INTEGER_CST)
6348 && GET_MODE (x) == BLKmode)
6349 /* If X is in the outgoing argument area, it is always safe. */
6350 || (MEM_P (x)
6351 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6352 || (GET_CODE (XEXP (x, 0)) == PLUS
6353 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6354 return 1;
6355
6356 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6357 find the underlying pseudo. */
6358 if (GET_CODE (x) == SUBREG)
6359 {
6360 x = SUBREG_REG (x);
6361 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6362 return 0;
6363 }
6364
6365 /* Now look at our tree code and possibly recurse. */
6366 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6367 {
6368 case tcc_declaration:
6369 exp_rtl = DECL_RTL_IF_SET (exp);
6370 break;
6371
6372 case tcc_constant:
6373 return 1;
6374
6375 case tcc_exceptional:
6376 if (TREE_CODE (exp) == TREE_LIST)
6377 {
6378 while (1)
6379 {
6380 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6381 return 0;
6382 exp = TREE_CHAIN (exp);
6383 if (!exp)
6384 return 1;
6385 if (TREE_CODE (exp) != TREE_LIST)
6386 return safe_from_p (x, exp, 0);
6387 }
6388 }
6389 else if (TREE_CODE (exp) == CONSTRUCTOR)
6390 {
6391 constructor_elt *ce;
6392 unsigned HOST_WIDE_INT idx;
6393
6394 for (idx = 0;
6395 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6396 idx++)
6397 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6398 || !safe_from_p (x, ce->value, 0))
6399 return 0;
6400 return 1;
6401 }
6402 else if (TREE_CODE (exp) == ERROR_MARK)
6403 return 1; /* An already-visited SAVE_EXPR? */
6404 else
6405 return 0;
6406
6407 case tcc_statement:
6408 /* The only case we look at here is the DECL_INITIAL inside a
6409 DECL_EXPR. */
6410 return (TREE_CODE (exp) != DECL_EXPR
6411 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6412 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6413 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6414
6415 case tcc_binary:
6416 case tcc_comparison:
6417 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6418 return 0;
6419 /* Fall through. */
6420
6421 case tcc_unary:
6422 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6423
6424 case tcc_expression:
6425 case tcc_reference:
6426 case tcc_vl_exp:
6427 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6428 the expression. If it is set, we conflict iff we are that rtx or
6429 both are in memory. Otherwise, we check all operands of the
6430 expression recursively. */
6431
6432 switch (TREE_CODE (exp))
6433 {
6434 case ADDR_EXPR:
6435 /* If the operand is static or we are static, we can't conflict.
6436 Likewise if we don't conflict with the operand at all. */
6437 if (staticp (TREE_OPERAND (exp, 0))
6438 || TREE_STATIC (exp)
6439 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6440 return 1;
6441
6442 /* Otherwise, the only way this can conflict is if we are taking
6443 the address of a DECL a that address if part of X, which is
6444 very rare. */
6445 exp = TREE_OPERAND (exp, 0);
6446 if (DECL_P (exp))
6447 {
6448 if (!DECL_RTL_SET_P (exp)
6449 || !MEM_P (DECL_RTL (exp)))
6450 return 0;
6451 else
6452 exp_rtl = XEXP (DECL_RTL (exp), 0);
6453 }
6454 break;
6455
6456 case MISALIGNED_INDIRECT_REF:
6457 case ALIGN_INDIRECT_REF:
6458 case INDIRECT_REF:
6459 if (MEM_P (x)
6460 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6461 get_alias_set (exp)))
6462 return 0;
6463 break;
6464
6465 case CALL_EXPR:
6466 /* Assume that the call will clobber all hard registers and
6467 all of memory. */
6468 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6469 || MEM_P (x))
6470 return 0;
6471 break;
6472
6473 case WITH_CLEANUP_EXPR:
6474 case CLEANUP_POINT_EXPR:
6475 /* Lowered by gimplify.c. */
6476 gcc_unreachable ();
6477
6478 case SAVE_EXPR:
6479 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6480
6481 default:
6482 break;
6483 }
6484
6485 /* If we have an rtx, we do not need to scan our operands. */
6486 if (exp_rtl)
6487 break;
6488
6489 nops = TREE_OPERAND_LENGTH (exp);
6490 for (i = 0; i < nops; i++)
6491 if (TREE_OPERAND (exp, i) != 0
6492 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6493 return 0;
6494
6495 break;
6496
6497 case tcc_type:
6498 /* Should never get a type here. */
6499 gcc_unreachable ();
6500
6501 case tcc_gimple_stmt:
6502 gcc_unreachable ();
6503 }
6504
6505 /* If we have an rtl, find any enclosed object. Then see if we conflict
6506 with it. */
6507 if (exp_rtl)
6508 {
6509 if (GET_CODE (exp_rtl) == SUBREG)
6510 {
6511 exp_rtl = SUBREG_REG (exp_rtl);
6512 if (REG_P (exp_rtl)
6513 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6514 return 0;
6515 }
6516
6517 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6518 are memory and they conflict. */
6519 return ! (rtx_equal_p (x, exp_rtl)
6520 || (MEM_P (x) && MEM_P (exp_rtl)
6521 && true_dependence (exp_rtl, VOIDmode, x,
6522 rtx_addr_varies_p)));
6523 }
6524
6525 /* If we reach here, it is safe. */
6526 return 1;
6527 }
6528
6529 \f
6530 /* Return the highest power of two that EXP is known to be a multiple of.
6531 This is used in updating alignment of MEMs in array references. */
6532
6533 unsigned HOST_WIDE_INT
6534 highest_pow2_factor (const_tree exp)
6535 {
6536 unsigned HOST_WIDE_INT c0, c1;
6537
6538 switch (TREE_CODE (exp))
6539 {
6540 case INTEGER_CST:
6541 /* We can find the lowest bit that's a one. If the low
6542 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6543 We need to handle this case since we can find it in a COND_EXPR,
6544 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6545 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6546 later ICE. */
6547 if (TREE_OVERFLOW (exp))
6548 return BIGGEST_ALIGNMENT;
6549 else
6550 {
6551 /* Note: tree_low_cst is intentionally not used here,
6552 we don't care about the upper bits. */
6553 c0 = TREE_INT_CST_LOW (exp);
6554 c0 &= -c0;
6555 return c0 ? c0 : BIGGEST_ALIGNMENT;
6556 }
6557 break;
6558
6559 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6560 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6561 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6562 return MIN (c0, c1);
6563
6564 case MULT_EXPR:
6565 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6566 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6567 return c0 * c1;
6568
6569 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6570 case CEIL_DIV_EXPR:
6571 if (integer_pow2p (TREE_OPERAND (exp, 1))
6572 && host_integerp (TREE_OPERAND (exp, 1), 1))
6573 {
6574 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6575 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6576 return MAX (1, c0 / c1);
6577 }
6578 break;
6579
6580 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6581 case SAVE_EXPR:
6582 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6583
6584 case COMPOUND_EXPR:
6585 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6586
6587 case COND_EXPR:
6588 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6589 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6590 return MIN (c0, c1);
6591
6592 default:
6593 break;
6594 }
6595
6596 return 1;
6597 }
6598
6599 /* Similar, except that the alignment requirements of TARGET are
6600 taken into account. Assume it is at least as aligned as its
6601 type, unless it is a COMPONENT_REF in which case the layout of
6602 the structure gives the alignment. */
6603
6604 static unsigned HOST_WIDE_INT
6605 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6606 {
6607 unsigned HOST_WIDE_INT target_align, factor;
6608
6609 factor = highest_pow2_factor (exp);
6610 if (TREE_CODE (target) == COMPONENT_REF)
6611 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6612 else
6613 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6614 return MAX (factor, target_align);
6615 }
6616 \f
6617 /* Return &VAR expression for emulated thread local VAR. */
6618
6619 static tree
6620 emutls_var_address (tree var)
6621 {
6622 tree emuvar = emutls_decl (var);
6623 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6624 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6625 tree arglist = build_tree_list (NULL_TREE, arg);
6626 tree call = build_function_call_expr (fn, arglist);
6627 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6628 }
6629 \f
6630 /* Expands variable VAR. */
6631
6632 void
6633 expand_var (tree var)
6634 {
6635 if (DECL_EXTERNAL (var))
6636 return;
6637
6638 if (TREE_STATIC (var))
6639 /* If this is an inlined copy of a static local variable,
6640 look up the original decl. */
6641 var = DECL_ORIGIN (var);
6642
6643 if (TREE_STATIC (var)
6644 ? !TREE_ASM_WRITTEN (var)
6645 : !DECL_RTL_SET_P (var))
6646 {
6647 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6648 /* Should be ignored. */;
6649 else if (lang_hooks.expand_decl (var))
6650 /* OK. */;
6651 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6652 expand_decl (var);
6653 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6654 rest_of_decl_compilation (var, 0, 0);
6655 else
6656 /* No expansion needed. */
6657 gcc_assert (TREE_CODE (var) == TYPE_DECL
6658 || TREE_CODE (var) == CONST_DECL
6659 || TREE_CODE (var) == FUNCTION_DECL
6660 || TREE_CODE (var) == LABEL_DECL);
6661 }
6662 }
6663
6664 /* Subroutine of expand_expr. Expand the two operands of a binary
6665 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6666 The value may be stored in TARGET if TARGET is nonzero. The
6667 MODIFIER argument is as documented by expand_expr. */
6668
6669 static void
6670 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6671 enum expand_modifier modifier)
6672 {
6673 if (! safe_from_p (target, exp1, 1))
6674 target = 0;
6675 if (operand_equal_p (exp0, exp1, 0))
6676 {
6677 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6678 *op1 = copy_rtx (*op0);
6679 }
6680 else
6681 {
6682 /* If we need to preserve evaluation order, copy exp0 into its own
6683 temporary variable so that it can't be clobbered by exp1. */
6684 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6685 exp0 = save_expr (exp0);
6686 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6687 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6688 }
6689 }
6690
6691 \f
6692 /* Return a MEM that contains constant EXP. DEFER is as for
6693 output_constant_def and MODIFIER is as for expand_expr. */
6694
6695 static rtx
6696 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6697 {
6698 rtx mem;
6699
6700 mem = output_constant_def (exp, defer);
6701 if (modifier != EXPAND_INITIALIZER)
6702 mem = use_anchored_address (mem);
6703 return mem;
6704 }
6705
6706 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6707 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6708
6709 static rtx
6710 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6711 enum expand_modifier modifier)
6712 {
6713 rtx result, subtarget;
6714 tree inner, offset;
6715 HOST_WIDE_INT bitsize, bitpos;
6716 int volatilep, unsignedp;
6717 enum machine_mode mode1;
6718
6719 /* If we are taking the address of a constant and are at the top level,
6720 we have to use output_constant_def since we can't call force_const_mem
6721 at top level. */
6722 /* ??? This should be considered a front-end bug. We should not be
6723 generating ADDR_EXPR of something that isn't an LVALUE. The only
6724 exception here is STRING_CST. */
6725 if (TREE_CODE (exp) == CONSTRUCTOR
6726 || CONSTANT_CLASS_P (exp))
6727 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6728
6729 /* Everything must be something allowed by is_gimple_addressable. */
6730 switch (TREE_CODE (exp))
6731 {
6732 case INDIRECT_REF:
6733 /* This case will happen via recursion for &a->b. */
6734 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6735
6736 case CONST_DECL:
6737 /* Recurse and make the output_constant_def clause above handle this. */
6738 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6739 tmode, modifier);
6740
6741 case REALPART_EXPR:
6742 /* The real part of the complex number is always first, therefore
6743 the address is the same as the address of the parent object. */
6744 offset = 0;
6745 bitpos = 0;
6746 inner = TREE_OPERAND (exp, 0);
6747 break;
6748
6749 case IMAGPART_EXPR:
6750 /* The imaginary part of the complex number is always second.
6751 The expression is therefore always offset by the size of the
6752 scalar type. */
6753 offset = 0;
6754 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6755 inner = TREE_OPERAND (exp, 0);
6756 break;
6757
6758 case VAR_DECL:
6759 /* TLS emulation hook - replace __thread VAR's &VAR with
6760 __emutls_get_address (&_emutls.VAR). */
6761 if (! targetm.have_tls
6762 && TREE_CODE (exp) == VAR_DECL
6763 && DECL_THREAD_LOCAL_P (exp))
6764 {
6765 exp = emutls_var_address (exp);
6766 return expand_expr (exp, target, tmode, modifier);
6767 }
6768 /* Fall through. */
6769
6770 default:
6771 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6772 expand_expr, as that can have various side effects; LABEL_DECLs for
6773 example, may not have their DECL_RTL set yet. Assume language
6774 specific tree nodes can be expanded in some interesting way. */
6775 if (DECL_P (exp)
6776 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6777 {
6778 result = expand_expr (exp, target, tmode,
6779 modifier == EXPAND_INITIALIZER
6780 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6781
6782 /* If the DECL isn't in memory, then the DECL wasn't properly
6783 marked TREE_ADDRESSABLE, which will be either a front-end
6784 or a tree optimizer bug. */
6785 gcc_assert (MEM_P (result));
6786 result = XEXP (result, 0);
6787
6788 /* ??? Is this needed anymore? */
6789 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6790 {
6791 assemble_external (exp);
6792 TREE_USED (exp) = 1;
6793 }
6794
6795 if (modifier != EXPAND_INITIALIZER
6796 && modifier != EXPAND_CONST_ADDRESS)
6797 result = force_operand (result, target);
6798 return result;
6799 }
6800
6801 /* Pass FALSE as the last argument to get_inner_reference although
6802 we are expanding to RTL. The rationale is that we know how to
6803 handle "aligning nodes" here: we can just bypass them because
6804 they won't change the final object whose address will be returned
6805 (they actually exist only for that purpose). */
6806 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6807 &mode1, &unsignedp, &volatilep, false);
6808 break;
6809 }
6810
6811 /* We must have made progress. */
6812 gcc_assert (inner != exp);
6813
6814 subtarget = offset || bitpos ? NULL_RTX : target;
6815 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6816
6817 if (offset)
6818 {
6819 rtx tmp;
6820
6821 if (modifier != EXPAND_NORMAL)
6822 result = force_operand (result, NULL);
6823 tmp = expand_expr (offset, NULL_RTX, tmode,
6824 modifier == EXPAND_INITIALIZER
6825 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6826
6827 result = convert_memory_address (tmode, result);
6828 tmp = convert_memory_address (tmode, tmp);
6829
6830 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6831 result = gen_rtx_PLUS (tmode, result, tmp);
6832 else
6833 {
6834 subtarget = bitpos ? NULL_RTX : target;
6835 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6836 1, OPTAB_LIB_WIDEN);
6837 }
6838 }
6839
6840 if (bitpos)
6841 {
6842 /* Someone beforehand should have rejected taking the address
6843 of such an object. */
6844 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6845
6846 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6847 if (modifier < EXPAND_SUM)
6848 result = force_operand (result, target);
6849 }
6850
6851 return result;
6852 }
6853
6854 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6855 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6856
6857 static rtx
6858 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6859 enum expand_modifier modifier)
6860 {
6861 enum machine_mode rmode;
6862 rtx result;
6863
6864 /* Target mode of VOIDmode says "whatever's natural". */
6865 if (tmode == VOIDmode)
6866 tmode = TYPE_MODE (TREE_TYPE (exp));
6867
6868 /* We can get called with some Weird Things if the user does silliness
6869 like "(short) &a". In that case, convert_memory_address won't do
6870 the right thing, so ignore the given target mode. */
6871 if (tmode != Pmode && tmode != ptr_mode)
6872 tmode = Pmode;
6873
6874 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6875 tmode, modifier);
6876
6877 /* Despite expand_expr claims concerning ignoring TMODE when not
6878 strictly convenient, stuff breaks if we don't honor it. Note
6879 that combined with the above, we only do this for pointer modes. */
6880 rmode = GET_MODE (result);
6881 if (rmode == VOIDmode)
6882 rmode = tmode;
6883 if (rmode != tmode)
6884 result = convert_memory_address (tmode, result);
6885
6886 return result;
6887 }
6888
6889
6890 /* expand_expr: generate code for computing expression EXP.
6891 An rtx for the computed value is returned. The value is never null.
6892 In the case of a void EXP, const0_rtx is returned.
6893
6894 The value may be stored in TARGET if TARGET is nonzero.
6895 TARGET is just a suggestion; callers must assume that
6896 the rtx returned may not be the same as TARGET.
6897
6898 If TARGET is CONST0_RTX, it means that the value will be ignored.
6899
6900 If TMODE is not VOIDmode, it suggests generating the
6901 result in mode TMODE. But this is done only when convenient.
6902 Otherwise, TMODE is ignored and the value generated in its natural mode.
6903 TMODE is just a suggestion; callers must assume that
6904 the rtx returned may not have mode TMODE.
6905
6906 Note that TARGET may have neither TMODE nor MODE. In that case, it
6907 probably will not be used.
6908
6909 If MODIFIER is EXPAND_SUM then when EXP is an addition
6910 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6911 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6912 products as above, or REG or MEM, or constant.
6913 Ordinarily in such cases we would output mul or add instructions
6914 and then return a pseudo reg containing the sum.
6915
6916 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6917 it also marks a label as absolutely required (it can't be dead).
6918 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6919 This is used for outputting expressions used in initializers.
6920
6921 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6922 with a constant address even if that address is not normally legitimate.
6923 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6924
6925 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6926 a call parameter. Such targets require special care as we haven't yet
6927 marked TARGET so that it's safe from being trashed by libcalls. We
6928 don't want to use TARGET for anything but the final result;
6929 Intermediate values must go elsewhere. Additionally, calls to
6930 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6931
6932 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6933 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6934 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6935 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6936 recursively. */
6937
6938 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6939 enum expand_modifier, rtx *);
6940
6941 rtx
6942 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6943 enum expand_modifier modifier, rtx *alt_rtl)
6944 {
6945 int rn = -1;
6946 rtx ret, last = NULL;
6947
6948 /* Handle ERROR_MARK before anybody tries to access its type. */
6949 if (TREE_CODE (exp) == ERROR_MARK
6950 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6951 {
6952 ret = CONST0_RTX (tmode);
6953 return ret ? ret : const0_rtx;
6954 }
6955
6956 if (flag_non_call_exceptions)
6957 {
6958 rn = lookup_stmt_eh_region (exp);
6959 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6960 if (rn >= 0)
6961 last = get_last_insn ();
6962 }
6963
6964 /* If this is an expression of some kind and it has an associated line
6965 number, then emit the line number before expanding the expression.
6966
6967 We need to save and restore the file and line information so that
6968 errors discovered during expansion are emitted with the right
6969 information. It would be better of the diagnostic routines
6970 used the file/line information embedded in the tree nodes rather
6971 than globals. */
6972 if (cfun && EXPR_HAS_LOCATION (exp))
6973 {
6974 location_t saved_location = input_location;
6975 input_location = EXPR_LOCATION (exp);
6976 set_curr_insn_source_location (input_location);
6977
6978 /* Record where the insns produced belong. */
6979 set_curr_insn_block (TREE_BLOCK (exp));
6980
6981 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6982
6983 input_location = saved_location;
6984 }
6985 else
6986 {
6987 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6988 }
6989
6990 /* If using non-call exceptions, mark all insns that may trap.
6991 expand_call() will mark CALL_INSNs before we get to this code,
6992 but it doesn't handle libcalls, and these may trap. */
6993 if (rn >= 0)
6994 {
6995 rtx insn;
6996 for (insn = next_real_insn (last); insn;
6997 insn = next_real_insn (insn))
6998 {
6999 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7000 /* If we want exceptions for non-call insns, any
7001 may_trap_p instruction may throw. */
7002 && GET_CODE (PATTERN (insn)) != CLOBBER
7003 && GET_CODE (PATTERN (insn)) != USE
7004 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7005 {
7006 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
7007 REG_NOTES (insn));
7008 }
7009 }
7010 }
7011
7012 return ret;
7013 }
7014
7015 static rtx
7016 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7017 enum expand_modifier modifier, rtx *alt_rtl)
7018 {
7019 rtx op0, op1, op2, temp, decl_rtl;
7020 tree type;
7021 int unsignedp;
7022 enum machine_mode mode;
7023 enum tree_code code = TREE_CODE (exp);
7024 optab this_optab;
7025 rtx subtarget, original_target;
7026 int ignore;
7027 tree context, subexp0, subexp1;
7028 bool reduce_bit_field = false;
7029 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7030 ? reduce_to_bit_field_precision ((expr), \
7031 target, \
7032 type) \
7033 : (expr))
7034
7035 if (GIMPLE_STMT_P (exp))
7036 {
7037 type = void_type_node;
7038 mode = VOIDmode;
7039 unsignedp = 0;
7040 }
7041 else
7042 {
7043 type = TREE_TYPE (exp);
7044 mode = TYPE_MODE (type);
7045 unsignedp = TYPE_UNSIGNED (type);
7046 }
7047 if (lang_hooks.reduce_bit_field_operations
7048 && TREE_CODE (type) == INTEGER_TYPE
7049 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7050 {
7051 /* An operation in what may be a bit-field type needs the
7052 result to be reduced to the precision of the bit-field type,
7053 which is narrower than that of the type's mode. */
7054 reduce_bit_field = true;
7055 if (modifier == EXPAND_STACK_PARM)
7056 target = 0;
7057 }
7058
7059 /* Use subtarget as the target for operand 0 of a binary operation. */
7060 subtarget = get_subtarget (target);
7061 original_target = target;
7062 ignore = (target == const0_rtx
7063 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7064 || code == CONVERT_EXPR || code == COND_EXPR
7065 || code == VIEW_CONVERT_EXPR)
7066 && TREE_CODE (type) == VOID_TYPE));
7067
7068 /* If we are going to ignore this result, we need only do something
7069 if there is a side-effect somewhere in the expression. If there
7070 is, short-circuit the most common cases here. Note that we must
7071 not call expand_expr with anything but const0_rtx in case this
7072 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7073
7074 if (ignore)
7075 {
7076 if (! TREE_SIDE_EFFECTS (exp))
7077 return const0_rtx;
7078
7079 /* Ensure we reference a volatile object even if value is ignored, but
7080 don't do this if all we are doing is taking its address. */
7081 if (TREE_THIS_VOLATILE (exp)
7082 && TREE_CODE (exp) != FUNCTION_DECL
7083 && mode != VOIDmode && mode != BLKmode
7084 && modifier != EXPAND_CONST_ADDRESS)
7085 {
7086 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7087 if (MEM_P (temp))
7088 temp = copy_to_reg (temp);
7089 return const0_rtx;
7090 }
7091
7092 if (TREE_CODE_CLASS (code) == tcc_unary
7093 || code == COMPONENT_REF || code == INDIRECT_REF)
7094 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7095 modifier);
7096
7097 else if (TREE_CODE_CLASS (code) == tcc_binary
7098 || TREE_CODE_CLASS (code) == tcc_comparison
7099 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7100 {
7101 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7102 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7103 return const0_rtx;
7104 }
7105 else if (code == BIT_FIELD_REF)
7106 {
7107 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7108 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7109 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7110 return const0_rtx;
7111 }
7112
7113 target = 0;
7114 }
7115
7116
7117 switch (code)
7118 {
7119 case LABEL_DECL:
7120 {
7121 tree function = decl_function_context (exp);
7122
7123 temp = label_rtx (exp);
7124 temp = gen_rtx_LABEL_REF (Pmode, temp);
7125
7126 if (function != current_function_decl
7127 && function != 0)
7128 LABEL_REF_NONLOCAL_P (temp) = 1;
7129
7130 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7131 return temp;
7132 }
7133
7134 case SSA_NAME:
7135 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7136 NULL);
7137
7138 case PARM_DECL:
7139 case VAR_DECL:
7140 /* If a static var's type was incomplete when the decl was written,
7141 but the type is complete now, lay out the decl now. */
7142 if (DECL_SIZE (exp) == 0
7143 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7144 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7145 layout_decl (exp, 0);
7146
7147 /* TLS emulation hook - replace __thread vars with
7148 *__emutls_get_address (&_emutls.var). */
7149 if (! targetm.have_tls
7150 && TREE_CODE (exp) == VAR_DECL
7151 && DECL_THREAD_LOCAL_P (exp))
7152 {
7153 exp = build_fold_indirect_ref (emutls_var_address (exp));
7154 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7155 }
7156
7157 /* ... fall through ... */
7158
7159 case FUNCTION_DECL:
7160 case RESULT_DECL:
7161 decl_rtl = DECL_RTL (exp);
7162 gcc_assert (decl_rtl);
7163 decl_rtl = copy_rtx (decl_rtl);
7164
7165 /* Ensure variable marked as used even if it doesn't go through
7166 a parser. If it hasn't be used yet, write out an external
7167 definition. */
7168 if (! TREE_USED (exp))
7169 {
7170 assemble_external (exp);
7171 TREE_USED (exp) = 1;
7172 }
7173
7174 /* Show we haven't gotten RTL for this yet. */
7175 temp = 0;
7176
7177 /* Variables inherited from containing functions should have
7178 been lowered by this point. */
7179 context = decl_function_context (exp);
7180 gcc_assert (!context
7181 || context == current_function_decl
7182 || TREE_STATIC (exp)
7183 /* ??? C++ creates functions that are not TREE_STATIC. */
7184 || TREE_CODE (exp) == FUNCTION_DECL);
7185
7186 /* This is the case of an array whose size is to be determined
7187 from its initializer, while the initializer is still being parsed.
7188 See expand_decl. */
7189
7190 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7191 temp = validize_mem (decl_rtl);
7192
7193 /* If DECL_RTL is memory, we are in the normal case and either
7194 the address is not valid or it is not a register and -fforce-addr
7195 is specified, get the address into a register. */
7196
7197 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7198 {
7199 if (alt_rtl)
7200 *alt_rtl = decl_rtl;
7201 decl_rtl = use_anchored_address (decl_rtl);
7202 if (modifier != EXPAND_CONST_ADDRESS
7203 && modifier != EXPAND_SUM
7204 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7205 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7206 temp = replace_equiv_address (decl_rtl,
7207 copy_rtx (XEXP (decl_rtl, 0)));
7208 }
7209
7210 /* If we got something, return it. But first, set the alignment
7211 if the address is a register. */
7212 if (temp != 0)
7213 {
7214 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7215 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7216
7217 return temp;
7218 }
7219
7220 /* If the mode of DECL_RTL does not match that of the decl, it
7221 must be a promoted value. We return a SUBREG of the wanted mode,
7222 but mark it so that we know that it was already extended. */
7223
7224 if (REG_P (decl_rtl)
7225 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7226 {
7227 enum machine_mode pmode;
7228
7229 /* Get the signedness used for this variable. Ensure we get the
7230 same mode we got when the variable was declared. */
7231 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7232 (TREE_CODE (exp) == RESULT_DECL
7233 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7234 gcc_assert (GET_MODE (decl_rtl) == pmode);
7235
7236 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7237 SUBREG_PROMOTED_VAR_P (temp) = 1;
7238 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7239 return temp;
7240 }
7241
7242 return decl_rtl;
7243
7244 case INTEGER_CST:
7245 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7246 TREE_INT_CST_HIGH (exp), mode);
7247
7248 return temp;
7249
7250 case VECTOR_CST:
7251 {
7252 tree tmp = NULL_TREE;
7253 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7254 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7255 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7256 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7257 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7258 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7259 return const_vector_from_tree (exp);
7260 if (GET_MODE_CLASS (mode) == MODE_INT)
7261 {
7262 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7263 if (type_for_mode)
7264 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7265 }
7266 if (!tmp)
7267 tmp = build_constructor_from_list (type,
7268 TREE_VECTOR_CST_ELTS (exp));
7269 return expand_expr (tmp, ignore ? const0_rtx : target,
7270 tmode, modifier);
7271 }
7272
7273 case CONST_DECL:
7274 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7275
7276 case REAL_CST:
7277 /* If optimized, generate immediate CONST_DOUBLE
7278 which will be turned into memory by reload if necessary.
7279
7280 We used to force a register so that loop.c could see it. But
7281 this does not allow gen_* patterns to perform optimizations with
7282 the constants. It also produces two insns in cases like "x = 1.0;".
7283 On most machines, floating-point constants are not permitted in
7284 many insns, so we'd end up copying it to a register in any case.
7285
7286 Now, we do the copying in expand_binop, if appropriate. */
7287 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7288 TYPE_MODE (TREE_TYPE (exp)));
7289
7290 case FIXED_CST:
7291 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7292 TYPE_MODE (TREE_TYPE (exp)));
7293
7294 case COMPLEX_CST:
7295 /* Handle evaluating a complex constant in a CONCAT target. */
7296 if (original_target && GET_CODE (original_target) == CONCAT)
7297 {
7298 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7299 rtx rtarg, itarg;
7300
7301 rtarg = XEXP (original_target, 0);
7302 itarg = XEXP (original_target, 1);
7303
7304 /* Move the real and imaginary parts separately. */
7305 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7306 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7307
7308 if (op0 != rtarg)
7309 emit_move_insn (rtarg, op0);
7310 if (op1 != itarg)
7311 emit_move_insn (itarg, op1);
7312
7313 return original_target;
7314 }
7315
7316 /* ... fall through ... */
7317
7318 case STRING_CST:
7319 temp = expand_expr_constant (exp, 1, modifier);
7320
7321 /* temp contains a constant address.
7322 On RISC machines where a constant address isn't valid,
7323 make some insns to get that address into a register. */
7324 if (modifier != EXPAND_CONST_ADDRESS
7325 && modifier != EXPAND_INITIALIZER
7326 && modifier != EXPAND_SUM
7327 && (! memory_address_p (mode, XEXP (temp, 0))
7328 || flag_force_addr))
7329 return replace_equiv_address (temp,
7330 copy_rtx (XEXP (temp, 0)));
7331 return temp;
7332
7333 case SAVE_EXPR:
7334 {
7335 tree val = TREE_OPERAND (exp, 0);
7336 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7337
7338 if (!SAVE_EXPR_RESOLVED_P (exp))
7339 {
7340 /* We can indeed still hit this case, typically via builtin
7341 expanders calling save_expr immediately before expanding
7342 something. Assume this means that we only have to deal
7343 with non-BLKmode values. */
7344 gcc_assert (GET_MODE (ret) != BLKmode);
7345
7346 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7347 DECL_ARTIFICIAL (val) = 1;
7348 DECL_IGNORED_P (val) = 1;
7349 TREE_OPERAND (exp, 0) = val;
7350 SAVE_EXPR_RESOLVED_P (exp) = 1;
7351
7352 if (!CONSTANT_P (ret))
7353 ret = copy_to_reg (ret);
7354 SET_DECL_RTL (val, ret);
7355 }
7356
7357 return ret;
7358 }
7359
7360 case GOTO_EXPR:
7361 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7362 expand_goto (TREE_OPERAND (exp, 0));
7363 else
7364 expand_computed_goto (TREE_OPERAND (exp, 0));
7365 return const0_rtx;
7366
7367 case CONSTRUCTOR:
7368 /* If we don't need the result, just ensure we evaluate any
7369 subexpressions. */
7370 if (ignore)
7371 {
7372 unsigned HOST_WIDE_INT idx;
7373 tree value;
7374
7375 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7376 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7377
7378 return const0_rtx;
7379 }
7380
7381 /* Try to avoid creating a temporary at all. This is possible
7382 if all of the initializer is zero.
7383 FIXME: try to handle all [0..255] initializers we can handle
7384 with memset. */
7385 else if (TREE_STATIC (exp)
7386 && !TREE_ADDRESSABLE (exp)
7387 && target != 0 && mode == BLKmode
7388 && all_zeros_p (exp))
7389 {
7390 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7391 return target;
7392 }
7393
7394 /* All elts simple constants => refer to a constant in memory. But
7395 if this is a non-BLKmode mode, let it store a field at a time
7396 since that should make a CONST_INT or CONST_DOUBLE when we
7397 fold. Likewise, if we have a target we can use, it is best to
7398 store directly into the target unless the type is large enough
7399 that memcpy will be used. If we are making an initializer and
7400 all operands are constant, put it in memory as well.
7401
7402 FIXME: Avoid trying to fill vector constructors piece-meal.
7403 Output them with output_constant_def below unless we're sure
7404 they're zeros. This should go away when vector initializers
7405 are treated like VECTOR_CST instead of arrays.
7406 */
7407 else if ((TREE_STATIC (exp)
7408 && ((mode == BLKmode
7409 && ! (target != 0 && safe_from_p (target, exp, 1)))
7410 || TREE_ADDRESSABLE (exp)
7411 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7412 && (! MOVE_BY_PIECES_P
7413 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7414 TYPE_ALIGN (type)))
7415 && ! mostly_zeros_p (exp))))
7416 || ((modifier == EXPAND_INITIALIZER
7417 || modifier == EXPAND_CONST_ADDRESS)
7418 && TREE_CONSTANT (exp)))
7419 {
7420 rtx constructor = expand_expr_constant (exp, 1, modifier);
7421
7422 if (modifier != EXPAND_CONST_ADDRESS
7423 && modifier != EXPAND_INITIALIZER
7424 && modifier != EXPAND_SUM)
7425 constructor = validize_mem (constructor);
7426
7427 return constructor;
7428 }
7429 else
7430 {
7431 /* Handle calls that pass values in multiple non-contiguous
7432 locations. The Irix 6 ABI has examples of this. */
7433 if (target == 0 || ! safe_from_p (target, exp, 1)
7434 || GET_CODE (target) == PARALLEL
7435 || modifier == EXPAND_STACK_PARM)
7436 target
7437 = assign_temp (build_qualified_type (type,
7438 (TYPE_QUALS (type)
7439 | (TREE_READONLY (exp)
7440 * TYPE_QUAL_CONST))),
7441 0, TREE_ADDRESSABLE (exp), 1);
7442
7443 store_constructor (exp, target, 0, int_expr_size (exp));
7444 return target;
7445 }
7446
7447 case MISALIGNED_INDIRECT_REF:
7448 case ALIGN_INDIRECT_REF:
7449 case INDIRECT_REF:
7450 {
7451 tree exp1 = TREE_OPERAND (exp, 0);
7452
7453 if (modifier != EXPAND_WRITE)
7454 {
7455 tree t;
7456
7457 t = fold_read_from_constant_string (exp);
7458 if (t)
7459 return expand_expr (t, target, tmode, modifier);
7460 }
7461
7462 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7463 op0 = memory_address (mode, op0);
7464
7465 if (code == ALIGN_INDIRECT_REF)
7466 {
7467 int align = TYPE_ALIGN_UNIT (type);
7468 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7469 op0 = memory_address (mode, op0);
7470 }
7471
7472 temp = gen_rtx_MEM (mode, op0);
7473
7474 set_mem_attributes (temp, exp, 0);
7475
7476 /* Resolve the misalignment now, so that we don't have to remember
7477 to resolve it later. Of course, this only works for reads. */
7478 /* ??? When we get around to supporting writes, we'll have to handle
7479 this in store_expr directly. The vectorizer isn't generating
7480 those yet, however. */
7481 if (code == MISALIGNED_INDIRECT_REF)
7482 {
7483 int icode;
7484 rtx reg, insn;
7485
7486 gcc_assert (modifier == EXPAND_NORMAL
7487 || modifier == EXPAND_STACK_PARM);
7488
7489 /* The vectorizer should have already checked the mode. */
7490 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7491 gcc_assert (icode != CODE_FOR_nothing);
7492
7493 /* We've already validated the memory, and we're creating a
7494 new pseudo destination. The predicates really can't fail. */
7495 reg = gen_reg_rtx (mode);
7496
7497 /* Nor can the insn generator. */
7498 insn = GEN_FCN (icode) (reg, temp);
7499 emit_insn (insn);
7500
7501 return reg;
7502 }
7503
7504 return temp;
7505 }
7506
7507 case TARGET_MEM_REF:
7508 {
7509 struct mem_address addr;
7510
7511 get_address_description (exp, &addr);
7512 op0 = addr_for_mem_ref (&addr, true);
7513 op0 = memory_address (mode, op0);
7514 temp = gen_rtx_MEM (mode, op0);
7515 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7516 }
7517 return temp;
7518
7519 case ARRAY_REF:
7520
7521 {
7522 tree array = TREE_OPERAND (exp, 0);
7523 tree index = TREE_OPERAND (exp, 1);
7524
7525 /* Fold an expression like: "foo"[2].
7526 This is not done in fold so it won't happen inside &.
7527 Don't fold if this is for wide characters since it's too
7528 difficult to do correctly and this is a very rare case. */
7529
7530 if (modifier != EXPAND_CONST_ADDRESS
7531 && modifier != EXPAND_INITIALIZER
7532 && modifier != EXPAND_MEMORY)
7533 {
7534 tree t = fold_read_from_constant_string (exp);
7535
7536 if (t)
7537 return expand_expr (t, target, tmode, modifier);
7538 }
7539
7540 /* If this is a constant index into a constant array,
7541 just get the value from the array. Handle both the cases when
7542 we have an explicit constructor and when our operand is a variable
7543 that was declared const. */
7544
7545 if (modifier != EXPAND_CONST_ADDRESS
7546 && modifier != EXPAND_INITIALIZER
7547 && modifier != EXPAND_MEMORY
7548 && TREE_CODE (array) == CONSTRUCTOR
7549 && ! TREE_SIDE_EFFECTS (array)
7550 && TREE_CODE (index) == INTEGER_CST)
7551 {
7552 unsigned HOST_WIDE_INT ix;
7553 tree field, value;
7554
7555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7556 field, value)
7557 if (tree_int_cst_equal (field, index))
7558 {
7559 if (!TREE_SIDE_EFFECTS (value))
7560 return expand_expr (fold (value), target, tmode, modifier);
7561 break;
7562 }
7563 }
7564
7565 else if (optimize >= 1
7566 && modifier != EXPAND_CONST_ADDRESS
7567 && modifier != EXPAND_INITIALIZER
7568 && modifier != EXPAND_MEMORY
7569 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7570 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7571 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7572 && targetm.binds_local_p (array))
7573 {
7574 if (TREE_CODE (index) == INTEGER_CST)
7575 {
7576 tree init = DECL_INITIAL (array);
7577
7578 if (TREE_CODE (init) == CONSTRUCTOR)
7579 {
7580 unsigned HOST_WIDE_INT ix;
7581 tree field, value;
7582
7583 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7584 field, value)
7585 if (tree_int_cst_equal (field, index))
7586 {
7587 if (!TREE_SIDE_EFFECTS (value))
7588 return expand_expr (fold (value), target, tmode,
7589 modifier);
7590 break;
7591 }
7592 }
7593 else if(TREE_CODE (init) == STRING_CST)
7594 {
7595 tree index1 = index;
7596 tree low_bound = array_ref_low_bound (exp);
7597 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7598
7599 /* Optimize the special-case of a zero lower bound.
7600
7601 We convert the low_bound to sizetype to avoid some problems
7602 with constant folding. (E.g. suppose the lower bound is 1,
7603 and its mode is QI. Without the conversion,l (ARRAY
7604 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7605 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7606
7607 if (! integer_zerop (low_bound))
7608 index1 = size_diffop (index1, fold_convert (sizetype,
7609 low_bound));
7610
7611 if (0 > compare_tree_int (index1,
7612 TREE_STRING_LENGTH (init)))
7613 {
7614 tree type = TREE_TYPE (TREE_TYPE (init));
7615 enum machine_mode mode = TYPE_MODE (type);
7616
7617 if (GET_MODE_CLASS (mode) == MODE_INT
7618 && GET_MODE_SIZE (mode) == 1)
7619 return gen_int_mode (TREE_STRING_POINTER (init)
7620 [TREE_INT_CST_LOW (index1)],
7621 mode);
7622 }
7623 }
7624 }
7625 }
7626 }
7627 goto normal_inner_ref;
7628
7629 case COMPONENT_REF:
7630 /* If the operand is a CONSTRUCTOR, we can just extract the
7631 appropriate field if it is present. */
7632 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7633 {
7634 unsigned HOST_WIDE_INT idx;
7635 tree field, value;
7636
7637 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7638 idx, field, value)
7639 if (field == TREE_OPERAND (exp, 1)
7640 /* We can normally use the value of the field in the
7641 CONSTRUCTOR. However, if this is a bitfield in
7642 an integral mode that we can fit in a HOST_WIDE_INT,
7643 we must mask only the number of bits in the bitfield,
7644 since this is done implicitly by the constructor. If
7645 the bitfield does not meet either of those conditions,
7646 we can't do this optimization. */
7647 && (! DECL_BIT_FIELD (field)
7648 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7649 && (GET_MODE_BITSIZE (DECL_MODE (field))
7650 <= HOST_BITS_PER_WIDE_INT))))
7651 {
7652 if (DECL_BIT_FIELD (field)
7653 && modifier == EXPAND_STACK_PARM)
7654 target = 0;
7655 op0 = expand_expr (value, target, tmode, modifier);
7656 if (DECL_BIT_FIELD (field))
7657 {
7658 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7659 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7660
7661 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7662 {
7663 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7664 op0 = expand_and (imode, op0, op1, target);
7665 }
7666 else
7667 {
7668 tree count
7669 = build_int_cst (NULL_TREE,
7670 GET_MODE_BITSIZE (imode) - bitsize);
7671
7672 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7673 target, 0);
7674 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7675 target, 0);
7676 }
7677 }
7678
7679 return op0;
7680 }
7681 }
7682 goto normal_inner_ref;
7683
7684 case BIT_FIELD_REF:
7685 case ARRAY_RANGE_REF:
7686 normal_inner_ref:
7687 {
7688 enum machine_mode mode1;
7689 HOST_WIDE_INT bitsize, bitpos;
7690 tree offset;
7691 int volatilep = 0;
7692 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7693 &mode1, &unsignedp, &volatilep, true);
7694 rtx orig_op0;
7695
7696 /* If we got back the original object, something is wrong. Perhaps
7697 we are evaluating an expression too early. In any event, don't
7698 infinitely recurse. */
7699 gcc_assert (tem != exp);
7700
7701 /* If TEM's type is a union of variable size, pass TARGET to the inner
7702 computation, since it will need a temporary and TARGET is known
7703 to have to do. This occurs in unchecked conversion in Ada. */
7704
7705 orig_op0 = op0
7706 = expand_expr (tem,
7707 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7708 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7709 != INTEGER_CST)
7710 && modifier != EXPAND_STACK_PARM
7711 ? target : NULL_RTX),
7712 VOIDmode,
7713 (modifier == EXPAND_INITIALIZER
7714 || modifier == EXPAND_CONST_ADDRESS
7715 || modifier == EXPAND_STACK_PARM)
7716 ? modifier : EXPAND_NORMAL);
7717
7718 /* If this is a constant, put it into a register if it is a legitimate
7719 constant, OFFSET is 0, and we won't try to extract outside the
7720 register (in case we were passed a partially uninitialized object
7721 or a view_conversion to a larger size). Force the constant to
7722 memory otherwise. */
7723 if (CONSTANT_P (op0))
7724 {
7725 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7726 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7727 && offset == 0
7728 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7729 op0 = force_reg (mode, op0);
7730 else
7731 op0 = validize_mem (force_const_mem (mode, op0));
7732 }
7733
7734 /* Otherwise, if this object not in memory and we either have an
7735 offset, a BLKmode result, or a reference outside the object, put it
7736 there. Such cases can occur in Ada if we have unchecked conversion
7737 of an expression from a scalar type to an array or record type or
7738 for an ARRAY_RANGE_REF whose type is BLKmode. */
7739 else if (!MEM_P (op0)
7740 && (offset != 0
7741 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7742 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7743 {
7744 tree nt = build_qualified_type (TREE_TYPE (tem),
7745 (TYPE_QUALS (TREE_TYPE (tem))
7746 | TYPE_QUAL_CONST));
7747 rtx memloc = assign_temp (nt, 1, 1, 1);
7748
7749 emit_move_insn (memloc, op0);
7750 op0 = memloc;
7751 }
7752
7753 if (offset != 0)
7754 {
7755 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7756 EXPAND_SUM);
7757
7758 gcc_assert (MEM_P (op0));
7759
7760 #ifdef POINTERS_EXTEND_UNSIGNED
7761 if (GET_MODE (offset_rtx) != Pmode)
7762 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7763 #else
7764 if (GET_MODE (offset_rtx) != ptr_mode)
7765 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7766 #endif
7767
7768 if (GET_MODE (op0) == BLKmode
7769 /* A constant address in OP0 can have VOIDmode, we must
7770 not try to call force_reg in that case. */
7771 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7772 && bitsize != 0
7773 && (bitpos % bitsize) == 0
7774 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7775 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7776 {
7777 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7778 bitpos = 0;
7779 }
7780
7781 op0 = offset_address (op0, offset_rtx,
7782 highest_pow2_factor (offset));
7783 }
7784
7785 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7786 record its alignment as BIGGEST_ALIGNMENT. */
7787 if (MEM_P (op0) && bitpos == 0 && offset != 0
7788 && is_aligning_offset (offset, tem))
7789 set_mem_align (op0, BIGGEST_ALIGNMENT);
7790
7791 /* Don't forget about volatility even if this is a bitfield. */
7792 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7793 {
7794 if (op0 == orig_op0)
7795 op0 = copy_rtx (op0);
7796
7797 MEM_VOLATILE_P (op0) = 1;
7798 }
7799
7800 /* The following code doesn't handle CONCAT.
7801 Assume only bitpos == 0 can be used for CONCAT, due to
7802 one element arrays having the same mode as its element. */
7803 if (GET_CODE (op0) == CONCAT)
7804 {
7805 gcc_assert (bitpos == 0
7806 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7807 return op0;
7808 }
7809
7810 /* In cases where an aligned union has an unaligned object
7811 as a field, we might be extracting a BLKmode value from
7812 an integer-mode (e.g., SImode) object. Handle this case
7813 by doing the extract into an object as wide as the field
7814 (which we know to be the width of a basic mode), then
7815 storing into memory, and changing the mode to BLKmode. */
7816 if (mode1 == VOIDmode
7817 || REG_P (op0) || GET_CODE (op0) == SUBREG
7818 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7819 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7820 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7821 && modifier != EXPAND_CONST_ADDRESS
7822 && modifier != EXPAND_INITIALIZER)
7823 /* If the field isn't aligned enough to fetch as a memref,
7824 fetch it as a bit field. */
7825 || (mode1 != BLKmode
7826 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7827 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7828 || (MEM_P (op0)
7829 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7830 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7831 && ((modifier == EXPAND_CONST_ADDRESS
7832 || modifier == EXPAND_INITIALIZER)
7833 ? STRICT_ALIGNMENT
7834 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7835 || (bitpos % BITS_PER_UNIT != 0)))
7836 /* If the type and the field are a constant size and the
7837 size of the type isn't the same size as the bitfield,
7838 we must use bitfield operations. */
7839 || (bitsize >= 0
7840 && TYPE_SIZE (TREE_TYPE (exp))
7841 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7842 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7843 bitsize)))
7844 {
7845 enum machine_mode ext_mode = mode;
7846
7847 if (ext_mode == BLKmode
7848 && ! (target != 0 && MEM_P (op0)
7849 && MEM_P (target)
7850 && bitpos % BITS_PER_UNIT == 0))
7851 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7852
7853 if (ext_mode == BLKmode)
7854 {
7855 if (target == 0)
7856 target = assign_temp (type, 0, 1, 1);
7857
7858 if (bitsize == 0)
7859 return target;
7860
7861 /* In this case, BITPOS must start at a byte boundary and
7862 TARGET, if specified, must be a MEM. */
7863 gcc_assert (MEM_P (op0)
7864 && (!target || MEM_P (target))
7865 && !(bitpos % BITS_PER_UNIT));
7866
7867 emit_block_move (target,
7868 adjust_address (op0, VOIDmode,
7869 bitpos / BITS_PER_UNIT),
7870 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7871 / BITS_PER_UNIT),
7872 (modifier == EXPAND_STACK_PARM
7873 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7874
7875 return target;
7876 }
7877
7878 op0 = validize_mem (op0);
7879
7880 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7881 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7882
7883 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7884 (modifier == EXPAND_STACK_PARM
7885 ? NULL_RTX : target),
7886 ext_mode, ext_mode);
7887
7888 /* If the result is a record type and BITSIZE is narrower than
7889 the mode of OP0, an integral mode, and this is a big endian
7890 machine, we must put the field into the high-order bits. */
7891 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7892 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7893 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7894 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7895 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7896 - bitsize),
7897 op0, 1);
7898
7899 /* If the result type is BLKmode, store the data into a temporary
7900 of the appropriate type, but with the mode corresponding to the
7901 mode for the data we have (op0's mode). It's tempting to make
7902 this a constant type, since we know it's only being stored once,
7903 but that can cause problems if we are taking the address of this
7904 COMPONENT_REF because the MEM of any reference via that address
7905 will have flags corresponding to the type, which will not
7906 necessarily be constant. */
7907 if (mode == BLKmode)
7908 {
7909 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7910 rtx new;
7911
7912 /* If the reference doesn't use the alias set of its type,
7913 we cannot create the temporary using that type. */
7914 if (component_uses_parent_alias_set (exp))
7915 {
7916 new = assign_stack_local (ext_mode, size, 0);
7917 set_mem_alias_set (new, get_alias_set (exp));
7918 }
7919 else
7920 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7921
7922 emit_move_insn (new, op0);
7923 op0 = copy_rtx (new);
7924 PUT_MODE (op0, BLKmode);
7925 set_mem_attributes (op0, exp, 1);
7926 }
7927
7928 return op0;
7929 }
7930
7931 /* If the result is BLKmode, use that to access the object
7932 now as well. */
7933 if (mode == BLKmode)
7934 mode1 = BLKmode;
7935
7936 /* Get a reference to just this component. */
7937 if (modifier == EXPAND_CONST_ADDRESS
7938 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7939 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7940 else
7941 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7942
7943 if (op0 == orig_op0)
7944 op0 = copy_rtx (op0);
7945
7946 set_mem_attributes (op0, exp, 0);
7947 if (REG_P (XEXP (op0, 0)))
7948 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7949
7950 MEM_VOLATILE_P (op0) |= volatilep;
7951 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7952 || modifier == EXPAND_CONST_ADDRESS
7953 || modifier == EXPAND_INITIALIZER)
7954 return op0;
7955 else if (target == 0)
7956 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7957
7958 convert_move (target, op0, unsignedp);
7959 return target;
7960 }
7961
7962 case OBJ_TYPE_REF:
7963 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7964
7965 case CALL_EXPR:
7966 /* All valid uses of __builtin_va_arg_pack () are removed during
7967 inlining. */
7968 if (CALL_EXPR_VA_ARG_PACK (exp))
7969 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7970 /* Check for a built-in function. */
7971 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7972 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7973 == FUNCTION_DECL)
7974 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7975 {
7976 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7977 == BUILT_IN_FRONTEND)
7978 return lang_hooks.expand_expr (exp, original_target,
7979 tmode, modifier,
7980 alt_rtl);
7981 else
7982 return expand_builtin (exp, target, subtarget, tmode, ignore);
7983 }
7984
7985 return expand_call (exp, target, ignore);
7986
7987 case NON_LVALUE_EXPR:
7988 case NOP_EXPR:
7989 case CONVERT_EXPR:
7990 if (TREE_OPERAND (exp, 0) == error_mark_node)
7991 return const0_rtx;
7992
7993 if (TREE_CODE (type) == UNION_TYPE)
7994 {
7995 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7996
7997 /* If both input and output are BLKmode, this conversion isn't doing
7998 anything except possibly changing memory attribute. */
7999 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8000 {
8001 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8002 modifier);
8003
8004 result = copy_rtx (result);
8005 set_mem_attributes (result, exp, 0);
8006 return result;
8007 }
8008
8009 if (target == 0)
8010 {
8011 if (TYPE_MODE (type) != BLKmode)
8012 target = gen_reg_rtx (TYPE_MODE (type));
8013 else
8014 target = assign_temp (type, 0, 1, 1);
8015 }
8016
8017 if (MEM_P (target))
8018 /* Store data into beginning of memory target. */
8019 store_expr (TREE_OPERAND (exp, 0),
8020 adjust_address (target, TYPE_MODE (valtype), 0),
8021 modifier == EXPAND_STACK_PARM,
8022 false);
8023
8024 else
8025 {
8026 gcc_assert (REG_P (target));
8027
8028 /* Store this field into a union of the proper type. */
8029 store_field (target,
8030 MIN ((int_size_in_bytes (TREE_TYPE
8031 (TREE_OPERAND (exp, 0)))
8032 * BITS_PER_UNIT),
8033 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8034 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8035 type, 0, false);
8036 }
8037
8038 /* Return the entire union. */
8039 return target;
8040 }
8041
8042 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8043 {
8044 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8045 modifier);
8046
8047 /* If the signedness of the conversion differs and OP0 is
8048 a promoted SUBREG, clear that indication since we now
8049 have to do the proper extension. */
8050 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8051 && GET_CODE (op0) == SUBREG)
8052 SUBREG_PROMOTED_VAR_P (op0) = 0;
8053
8054 return REDUCE_BIT_FIELD (op0);
8055 }
8056
8057 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8058 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8059 if (GET_MODE (op0) == mode)
8060 ;
8061
8062 /* If OP0 is a constant, just convert it into the proper mode. */
8063 else if (CONSTANT_P (op0))
8064 {
8065 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8066 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8067
8068 if (modifier == EXPAND_INITIALIZER)
8069 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8070 subreg_lowpart_offset (mode,
8071 inner_mode));
8072 else
8073 op0= convert_modes (mode, inner_mode, op0,
8074 TYPE_UNSIGNED (inner_type));
8075 }
8076
8077 else if (modifier == EXPAND_INITIALIZER)
8078 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8079
8080 else if (target == 0)
8081 op0 = convert_to_mode (mode, op0,
8082 TYPE_UNSIGNED (TREE_TYPE
8083 (TREE_OPERAND (exp, 0))));
8084 else
8085 {
8086 convert_move (target, op0,
8087 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8088 op0 = target;
8089 }
8090
8091 return REDUCE_BIT_FIELD (op0);
8092
8093 case VIEW_CONVERT_EXPR:
8094 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8095
8096 /* If the input and output modes are both the same, we are done. */
8097 if (TYPE_MODE (type) == GET_MODE (op0))
8098 ;
8099 /* If neither mode is BLKmode, and both modes are the same size
8100 then we can use gen_lowpart. */
8101 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8102 && GET_MODE_SIZE (TYPE_MODE (type))
8103 == GET_MODE_SIZE (GET_MODE (op0)))
8104 {
8105 if (GET_CODE (op0) == SUBREG)
8106 op0 = force_reg (GET_MODE (op0), op0);
8107 op0 = gen_lowpart (TYPE_MODE (type), op0);
8108 }
8109 /* If both modes are integral, then we can convert from one to the
8110 other. */
8111 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8112 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8113 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8114 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8115 /* As a last resort, spill op0 to memory, and reload it in a
8116 different mode. */
8117 else if (!MEM_P (op0))
8118 {
8119 /* If the operand is not a MEM, force it into memory. Since we
8120 are going to be changing the mode of the MEM, don't call
8121 force_const_mem for constants because we don't allow pool
8122 constants to change mode. */
8123 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8124
8125 gcc_assert (!TREE_ADDRESSABLE (exp));
8126
8127 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8128 target
8129 = assign_stack_temp_for_type
8130 (TYPE_MODE (inner_type),
8131 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8132
8133 emit_move_insn (target, op0);
8134 op0 = target;
8135 }
8136
8137 /* At this point, OP0 is in the correct mode. If the output type is such
8138 that the operand is known to be aligned, indicate that it is.
8139 Otherwise, we need only be concerned about alignment for non-BLKmode
8140 results. */
8141 if (MEM_P (op0))
8142 {
8143 op0 = copy_rtx (op0);
8144
8145 if (TYPE_ALIGN_OK (type))
8146 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8147 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8148 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8149 {
8150 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8151 HOST_WIDE_INT temp_size
8152 = MAX (int_size_in_bytes (inner_type),
8153 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8154 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8155 temp_size, 0, type);
8156 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8157
8158 gcc_assert (!TREE_ADDRESSABLE (exp));
8159
8160 if (GET_MODE (op0) == BLKmode)
8161 emit_block_move (new_with_op0_mode, op0,
8162 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8163 (modifier == EXPAND_STACK_PARM
8164 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8165 else
8166 emit_move_insn (new_with_op0_mode, op0);
8167
8168 op0 = new;
8169 }
8170
8171 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8172 }
8173
8174 return op0;
8175
8176 case POINTER_PLUS_EXPR:
8177 /* Even though the sizetype mode and the pointer's mode can be different
8178 expand is able to handle this correctly and get the correct result out
8179 of the PLUS_EXPR code. */
8180 case PLUS_EXPR:
8181
8182 /* Check if this is a case for multiplication and addition. */
8183 if ((TREE_CODE (type) == INTEGER_TYPE
8184 || TREE_CODE (type) == FIXED_POINT_TYPE)
8185 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8186 {
8187 tree subsubexp0, subsubexp1;
8188 enum tree_code code0, code1, this_code;
8189
8190 subexp0 = TREE_OPERAND (exp, 0);
8191 subsubexp0 = TREE_OPERAND (subexp0, 0);
8192 subsubexp1 = TREE_OPERAND (subexp0, 1);
8193 code0 = TREE_CODE (subsubexp0);
8194 code1 = TREE_CODE (subsubexp1);
8195 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8196 : FIXED_CONVERT_EXPR;
8197 if (code0 == this_code && code1 == this_code
8198 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8199 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8200 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8201 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8202 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8203 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8204 {
8205 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8206 enum machine_mode innermode = TYPE_MODE (op0type);
8207 bool zextend_p = TYPE_UNSIGNED (op0type);
8208 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8209 if (sat_p == 0)
8210 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8211 else
8212 this_optab = zextend_p ? usmadd_widen_optab
8213 : ssmadd_widen_optab;
8214 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8215 && (optab_handler (this_optab, mode)->insn_code
8216 != CODE_FOR_nothing))
8217 {
8218 expand_operands (TREE_OPERAND (subsubexp0, 0),
8219 TREE_OPERAND (subsubexp1, 0),
8220 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8221 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8222 VOIDmode, EXPAND_NORMAL);
8223 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8224 target, unsignedp);
8225 gcc_assert (temp);
8226 return REDUCE_BIT_FIELD (temp);
8227 }
8228 }
8229 }
8230
8231 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8232 something else, make sure we add the register to the constant and
8233 then to the other thing. This case can occur during strength
8234 reduction and doing it this way will produce better code if the
8235 frame pointer or argument pointer is eliminated.
8236
8237 fold-const.c will ensure that the constant is always in the inner
8238 PLUS_EXPR, so the only case we need to do anything about is if
8239 sp, ap, or fp is our second argument, in which case we must swap
8240 the innermost first argument and our second argument. */
8241
8242 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8243 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8244 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8245 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8246 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8247 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8248 {
8249 tree t = TREE_OPERAND (exp, 1);
8250
8251 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8252 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8253 }
8254
8255 /* If the result is to be ptr_mode and we are adding an integer to
8256 something, we might be forming a constant. So try to use
8257 plus_constant. If it produces a sum and we can't accept it,
8258 use force_operand. This allows P = &ARR[const] to generate
8259 efficient code on machines where a SYMBOL_REF is not a valid
8260 address.
8261
8262 If this is an EXPAND_SUM call, always return the sum. */
8263 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8264 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8265 {
8266 if (modifier == EXPAND_STACK_PARM)
8267 target = 0;
8268 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8269 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8270 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8271 {
8272 rtx constant_part;
8273
8274 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8275 EXPAND_SUM);
8276 /* Use immed_double_const to ensure that the constant is
8277 truncated according to the mode of OP1, then sign extended
8278 to a HOST_WIDE_INT. Using the constant directly can result
8279 in non-canonical RTL in a 64x32 cross compile. */
8280 constant_part
8281 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8282 (HOST_WIDE_INT) 0,
8283 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8284 op1 = plus_constant (op1, INTVAL (constant_part));
8285 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8286 op1 = force_operand (op1, target);
8287 return REDUCE_BIT_FIELD (op1);
8288 }
8289
8290 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8291 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8292 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8293 {
8294 rtx constant_part;
8295
8296 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8297 (modifier == EXPAND_INITIALIZER
8298 ? EXPAND_INITIALIZER : EXPAND_SUM));
8299 if (! CONSTANT_P (op0))
8300 {
8301 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8302 VOIDmode, modifier);
8303 /* Return a PLUS if modifier says it's OK. */
8304 if (modifier == EXPAND_SUM
8305 || modifier == EXPAND_INITIALIZER)
8306 return simplify_gen_binary (PLUS, mode, op0, op1);
8307 goto binop2;
8308 }
8309 /* Use immed_double_const to ensure that the constant is
8310 truncated according to the mode of OP1, then sign extended
8311 to a HOST_WIDE_INT. Using the constant directly can result
8312 in non-canonical RTL in a 64x32 cross compile. */
8313 constant_part
8314 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8315 (HOST_WIDE_INT) 0,
8316 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8317 op0 = plus_constant (op0, INTVAL (constant_part));
8318 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8319 op0 = force_operand (op0, target);
8320 return REDUCE_BIT_FIELD (op0);
8321 }
8322 }
8323
8324 /* No sense saving up arithmetic to be done
8325 if it's all in the wrong mode to form part of an address.
8326 And force_operand won't know whether to sign-extend or
8327 zero-extend. */
8328 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8329 || mode != ptr_mode)
8330 {
8331 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8332 subtarget, &op0, &op1, 0);
8333 if (op0 == const0_rtx)
8334 return op1;
8335 if (op1 == const0_rtx)
8336 return op0;
8337 goto binop2;
8338 }
8339
8340 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8341 subtarget, &op0, &op1, modifier);
8342 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8343
8344 case MINUS_EXPR:
8345 /* Check if this is a case for multiplication and subtraction. */
8346 if ((TREE_CODE (type) == INTEGER_TYPE
8347 || TREE_CODE (type) == FIXED_POINT_TYPE)
8348 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8349 {
8350 tree subsubexp0, subsubexp1;
8351 enum tree_code code0, code1, this_code;
8352
8353 subexp1 = TREE_OPERAND (exp, 1);
8354 subsubexp0 = TREE_OPERAND (subexp1, 0);
8355 subsubexp1 = TREE_OPERAND (subexp1, 1);
8356 code0 = TREE_CODE (subsubexp0);
8357 code1 = TREE_CODE (subsubexp1);
8358 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8359 : FIXED_CONVERT_EXPR;
8360 if (code0 == this_code && code1 == this_code
8361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8362 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8363 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8364 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8365 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8366 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8367 {
8368 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8369 enum machine_mode innermode = TYPE_MODE (op0type);
8370 bool zextend_p = TYPE_UNSIGNED (op0type);
8371 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8372 if (sat_p == 0)
8373 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8374 else
8375 this_optab = zextend_p ? usmsub_widen_optab
8376 : ssmsub_widen_optab;
8377 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8378 && (optab_handler (this_optab, mode)->insn_code
8379 != CODE_FOR_nothing))
8380 {
8381 expand_operands (TREE_OPERAND (subsubexp0, 0),
8382 TREE_OPERAND (subsubexp1, 0),
8383 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8384 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8385 VOIDmode, EXPAND_NORMAL);
8386 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8387 target, unsignedp);
8388 gcc_assert (temp);
8389 return REDUCE_BIT_FIELD (temp);
8390 }
8391 }
8392 }
8393
8394 /* For initializers, we are allowed to return a MINUS of two
8395 symbolic constants. Here we handle all cases when both operands
8396 are constant. */
8397 /* Handle difference of two symbolic constants,
8398 for the sake of an initializer. */
8399 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8400 && really_constant_p (TREE_OPERAND (exp, 0))
8401 && really_constant_p (TREE_OPERAND (exp, 1)))
8402 {
8403 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8404 NULL_RTX, &op0, &op1, modifier);
8405
8406 /* If the last operand is a CONST_INT, use plus_constant of
8407 the negated constant. Else make the MINUS. */
8408 if (GET_CODE (op1) == CONST_INT)
8409 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8410 else
8411 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8412 }
8413
8414 /* No sense saving up arithmetic to be done
8415 if it's all in the wrong mode to form part of an address.
8416 And force_operand won't know whether to sign-extend or
8417 zero-extend. */
8418 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8419 || mode != ptr_mode)
8420 goto binop;
8421
8422 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8423 subtarget, &op0, &op1, modifier);
8424
8425 /* Convert A - const to A + (-const). */
8426 if (GET_CODE (op1) == CONST_INT)
8427 {
8428 op1 = negate_rtx (mode, op1);
8429 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8430 }
8431
8432 goto binop2;
8433
8434 case MULT_EXPR:
8435 /* If this is a fixed-point operation, then we cannot use the code
8436 below because "expand_mult" doesn't support sat/no-sat fixed-point
8437 multiplications. */
8438 if (ALL_FIXED_POINT_MODE_P (mode))
8439 goto binop;
8440
8441 /* If first operand is constant, swap them.
8442 Thus the following special case checks need only
8443 check the second operand. */
8444 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8445 {
8446 tree t1 = TREE_OPERAND (exp, 0);
8447 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8448 TREE_OPERAND (exp, 1) = t1;
8449 }
8450
8451 /* Attempt to return something suitable for generating an
8452 indexed address, for machines that support that. */
8453
8454 if (modifier == EXPAND_SUM && mode == ptr_mode
8455 && host_integerp (TREE_OPERAND (exp, 1), 0))
8456 {
8457 tree exp1 = TREE_OPERAND (exp, 1);
8458
8459 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8460 EXPAND_SUM);
8461
8462 if (!REG_P (op0))
8463 op0 = force_operand (op0, NULL_RTX);
8464 if (!REG_P (op0))
8465 op0 = copy_to_mode_reg (mode, op0);
8466
8467 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8468 gen_int_mode (tree_low_cst (exp1, 0),
8469 TYPE_MODE (TREE_TYPE (exp1)))));
8470 }
8471
8472 if (modifier == EXPAND_STACK_PARM)
8473 target = 0;
8474
8475 /* Check for multiplying things that have been extended
8476 from a narrower type. If this machine supports multiplying
8477 in that narrower type with a result in the desired type,
8478 do it that way, and avoid the explicit type-conversion. */
8479
8480 subexp0 = TREE_OPERAND (exp, 0);
8481 subexp1 = TREE_OPERAND (exp, 1);
8482 /* First, check if we have a multiplication of one signed and one
8483 unsigned operand. */
8484 if (TREE_CODE (subexp0) == NOP_EXPR
8485 && TREE_CODE (subexp1) == NOP_EXPR
8486 && TREE_CODE (type) == INTEGER_TYPE
8487 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8488 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8489 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8490 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8491 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8492 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8493 {
8494 enum machine_mode innermode
8495 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8496 this_optab = usmul_widen_optab;
8497 if (mode == GET_MODE_WIDER_MODE (innermode))
8498 {
8499 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8500 {
8501 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8502 expand_operands (TREE_OPERAND (subexp0, 0),
8503 TREE_OPERAND (subexp1, 0),
8504 NULL_RTX, &op0, &op1, 0);
8505 else
8506 expand_operands (TREE_OPERAND (subexp0, 0),
8507 TREE_OPERAND (subexp1, 0),
8508 NULL_RTX, &op1, &op0, 0);
8509
8510 goto binop3;
8511 }
8512 }
8513 }
8514 /* Check for a multiplication with matching signedness. */
8515 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8516 && TREE_CODE (type) == INTEGER_TYPE
8517 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8518 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8519 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8520 && int_fits_type_p (TREE_OPERAND (exp, 1),
8521 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8522 /* Don't use a widening multiply if a shift will do. */
8523 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8524 > HOST_BITS_PER_WIDE_INT)
8525 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8526 ||
8527 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8528 && (TYPE_PRECISION (TREE_TYPE
8529 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8530 == TYPE_PRECISION (TREE_TYPE
8531 (TREE_OPERAND
8532 (TREE_OPERAND (exp, 0), 0))))
8533 /* If both operands are extended, they must either both
8534 be zero-extended or both be sign-extended. */
8535 && (TYPE_UNSIGNED (TREE_TYPE
8536 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8537 == TYPE_UNSIGNED (TREE_TYPE
8538 (TREE_OPERAND
8539 (TREE_OPERAND (exp, 0), 0)))))))
8540 {
8541 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8542 enum machine_mode innermode = TYPE_MODE (op0type);
8543 bool zextend_p = TYPE_UNSIGNED (op0type);
8544 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8545 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8546
8547 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8548 {
8549 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8550 {
8551 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8552 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8553 TREE_OPERAND (exp, 1),
8554 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8555 else
8556 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8557 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8558 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8559 goto binop3;
8560 }
8561 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8562 && innermode == word_mode)
8563 {
8564 rtx htem, hipart;
8565 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8566 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8567 op1 = convert_modes (innermode, mode,
8568 expand_normal (TREE_OPERAND (exp, 1)),
8569 unsignedp);
8570 else
8571 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8572 temp = expand_binop (mode, other_optab, op0, op1, target,
8573 unsignedp, OPTAB_LIB_WIDEN);
8574 hipart = gen_highpart (innermode, temp);
8575 htem = expand_mult_highpart_adjust (innermode, hipart,
8576 op0, op1, hipart,
8577 zextend_p);
8578 if (htem != hipart)
8579 emit_move_insn (hipart, htem);
8580 return REDUCE_BIT_FIELD (temp);
8581 }
8582 }
8583 }
8584 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8585 subtarget, &op0, &op1, 0);
8586 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8587
8588 case TRUNC_DIV_EXPR:
8589 case FLOOR_DIV_EXPR:
8590 case CEIL_DIV_EXPR:
8591 case ROUND_DIV_EXPR:
8592 case EXACT_DIV_EXPR:
8593 /* If this is a fixed-point operation, then we cannot use the code
8594 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8595 divisions. */
8596 if (ALL_FIXED_POINT_MODE_P (mode))
8597 goto binop;
8598
8599 if (modifier == EXPAND_STACK_PARM)
8600 target = 0;
8601 /* Possible optimization: compute the dividend with EXPAND_SUM
8602 then if the divisor is constant can optimize the case
8603 where some terms of the dividend have coeffs divisible by it. */
8604 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8605 subtarget, &op0, &op1, 0);
8606 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8607
8608 case RDIV_EXPR:
8609 goto binop;
8610
8611 case TRUNC_MOD_EXPR:
8612 case FLOOR_MOD_EXPR:
8613 case CEIL_MOD_EXPR:
8614 case ROUND_MOD_EXPR:
8615 if (modifier == EXPAND_STACK_PARM)
8616 target = 0;
8617 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8618 subtarget, &op0, &op1, 0);
8619 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8620
8621 case FIXED_CONVERT_EXPR:
8622 op0 = expand_normal (TREE_OPERAND (exp, 0));
8623 if (target == 0 || modifier == EXPAND_STACK_PARM)
8624 target = gen_reg_rtx (mode);
8625
8626 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8627 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8628 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8629 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8630 else
8631 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8632 return target;
8633
8634 case FIX_TRUNC_EXPR:
8635 op0 = expand_normal (TREE_OPERAND (exp, 0));
8636 if (target == 0 || modifier == EXPAND_STACK_PARM)
8637 target = gen_reg_rtx (mode);
8638 expand_fix (target, op0, unsignedp);
8639 return target;
8640
8641 case FLOAT_EXPR:
8642 op0 = expand_normal (TREE_OPERAND (exp, 0));
8643 if (target == 0 || modifier == EXPAND_STACK_PARM)
8644 target = gen_reg_rtx (mode);
8645 /* expand_float can't figure out what to do if FROM has VOIDmode.
8646 So give it the correct mode. With -O, cse will optimize this. */
8647 if (GET_MODE (op0) == VOIDmode)
8648 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8649 op0);
8650 expand_float (target, op0,
8651 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8652 return target;
8653
8654 case NEGATE_EXPR:
8655 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8656 VOIDmode, EXPAND_NORMAL);
8657 if (modifier == EXPAND_STACK_PARM)
8658 target = 0;
8659 temp = expand_unop (mode,
8660 optab_for_tree_code (NEGATE_EXPR, type),
8661 op0, target, 0);
8662 gcc_assert (temp);
8663 return REDUCE_BIT_FIELD (temp);
8664
8665 case ABS_EXPR:
8666 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8667 VOIDmode, EXPAND_NORMAL);
8668 if (modifier == EXPAND_STACK_PARM)
8669 target = 0;
8670
8671 /* ABS_EXPR is not valid for complex arguments. */
8672 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8673 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8674
8675 /* Unsigned abs is simply the operand. Testing here means we don't
8676 risk generating incorrect code below. */
8677 if (TYPE_UNSIGNED (type))
8678 return op0;
8679
8680 return expand_abs (mode, op0, target, unsignedp,
8681 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8682
8683 case MAX_EXPR:
8684 case MIN_EXPR:
8685 target = original_target;
8686 if (target == 0
8687 || modifier == EXPAND_STACK_PARM
8688 || (MEM_P (target) && MEM_VOLATILE_P (target))
8689 || GET_MODE (target) != mode
8690 || (REG_P (target)
8691 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8692 target = gen_reg_rtx (mode);
8693 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8694 target, &op0, &op1, 0);
8695
8696 /* First try to do it with a special MIN or MAX instruction.
8697 If that does not win, use a conditional jump to select the proper
8698 value. */
8699 this_optab = optab_for_tree_code (code, type);
8700 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8701 OPTAB_WIDEN);
8702 if (temp != 0)
8703 return temp;
8704
8705 /* At this point, a MEM target is no longer useful; we will get better
8706 code without it. */
8707
8708 if (! REG_P (target))
8709 target = gen_reg_rtx (mode);
8710
8711 /* If op1 was placed in target, swap op0 and op1. */
8712 if (target != op0 && target == op1)
8713 {
8714 temp = op0;
8715 op0 = op1;
8716 op1 = temp;
8717 }
8718
8719 /* We generate better code and avoid problems with op1 mentioning
8720 target by forcing op1 into a pseudo if it isn't a constant. */
8721 if (! CONSTANT_P (op1))
8722 op1 = force_reg (mode, op1);
8723
8724 {
8725 enum rtx_code comparison_code;
8726 rtx cmpop1 = op1;
8727
8728 if (code == MAX_EXPR)
8729 comparison_code = unsignedp ? GEU : GE;
8730 else
8731 comparison_code = unsignedp ? LEU : LE;
8732
8733 /* Canonicalize to comparisons against 0. */
8734 if (op1 == const1_rtx)
8735 {
8736 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8737 or (a != 0 ? a : 1) for unsigned.
8738 For MIN we are safe converting (a <= 1 ? a : 1)
8739 into (a <= 0 ? a : 1) */
8740 cmpop1 = const0_rtx;
8741 if (code == MAX_EXPR)
8742 comparison_code = unsignedp ? NE : GT;
8743 }
8744 if (op1 == constm1_rtx && !unsignedp)
8745 {
8746 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8747 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8748 cmpop1 = const0_rtx;
8749 if (code == MIN_EXPR)
8750 comparison_code = LT;
8751 }
8752 #ifdef HAVE_conditional_move
8753 /* Use a conditional move if possible. */
8754 if (can_conditionally_move_p (mode))
8755 {
8756 rtx insn;
8757
8758 /* ??? Same problem as in expmed.c: emit_conditional_move
8759 forces a stack adjustment via compare_from_rtx, and we
8760 lose the stack adjustment if the sequence we are about
8761 to create is discarded. */
8762 do_pending_stack_adjust ();
8763
8764 start_sequence ();
8765
8766 /* Try to emit the conditional move. */
8767 insn = emit_conditional_move (target, comparison_code,
8768 op0, cmpop1, mode,
8769 op0, op1, mode,
8770 unsignedp);
8771
8772 /* If we could do the conditional move, emit the sequence,
8773 and return. */
8774 if (insn)
8775 {
8776 rtx seq = get_insns ();
8777 end_sequence ();
8778 emit_insn (seq);
8779 return target;
8780 }
8781
8782 /* Otherwise discard the sequence and fall back to code with
8783 branches. */
8784 end_sequence ();
8785 }
8786 #endif
8787 if (target != op0)
8788 emit_move_insn (target, op0);
8789
8790 temp = gen_label_rtx ();
8791 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8792 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8793 }
8794 emit_move_insn (target, op1);
8795 emit_label (temp);
8796 return target;
8797
8798 case BIT_NOT_EXPR:
8799 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8800 VOIDmode, EXPAND_NORMAL);
8801 if (modifier == EXPAND_STACK_PARM)
8802 target = 0;
8803 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8804 gcc_assert (temp);
8805 return temp;
8806
8807 /* ??? Can optimize bitwise operations with one arg constant.
8808 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8809 and (a bitwise1 b) bitwise2 b (etc)
8810 but that is probably not worth while. */
8811
8812 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8813 boolean values when we want in all cases to compute both of them. In
8814 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8815 as actual zero-or-1 values and then bitwise anding. In cases where
8816 there cannot be any side effects, better code would be made by
8817 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8818 how to recognize those cases. */
8819
8820 case TRUTH_AND_EXPR:
8821 code = BIT_AND_EXPR;
8822 case BIT_AND_EXPR:
8823 goto binop;
8824
8825 case TRUTH_OR_EXPR:
8826 code = BIT_IOR_EXPR;
8827 case BIT_IOR_EXPR:
8828 goto binop;
8829
8830 case TRUTH_XOR_EXPR:
8831 code = BIT_XOR_EXPR;
8832 case BIT_XOR_EXPR:
8833 goto binop;
8834
8835 case LSHIFT_EXPR:
8836 case RSHIFT_EXPR:
8837 case LROTATE_EXPR:
8838 case RROTATE_EXPR:
8839 /* If this is a fixed-point operation, then we cannot use the code
8840 below because "expand_shift" doesn't support sat/no-sat fixed-point
8841 shifts. */
8842 if (ALL_FIXED_POINT_MODE_P (mode))
8843 goto binop;
8844
8845 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8846 subtarget = 0;
8847 if (modifier == EXPAND_STACK_PARM)
8848 target = 0;
8849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8850 VOIDmode, EXPAND_NORMAL);
8851 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8852 unsignedp);
8853
8854 /* Could determine the answer when only additive constants differ. Also,
8855 the addition of one can be handled by changing the condition. */
8856 case LT_EXPR:
8857 case LE_EXPR:
8858 case GT_EXPR:
8859 case GE_EXPR:
8860 case EQ_EXPR:
8861 case NE_EXPR:
8862 case UNORDERED_EXPR:
8863 case ORDERED_EXPR:
8864 case UNLT_EXPR:
8865 case UNLE_EXPR:
8866 case UNGT_EXPR:
8867 case UNGE_EXPR:
8868 case UNEQ_EXPR:
8869 case LTGT_EXPR:
8870 temp = do_store_flag (exp,
8871 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8872 tmode != VOIDmode ? tmode : mode, 0);
8873 if (temp != 0)
8874 return temp;
8875
8876 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8877 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8878 && original_target
8879 && REG_P (original_target)
8880 && (GET_MODE (original_target)
8881 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8882 {
8883 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8884 VOIDmode, EXPAND_NORMAL);
8885
8886 /* If temp is constant, we can just compute the result. */
8887 if (GET_CODE (temp) == CONST_INT)
8888 {
8889 if (INTVAL (temp) != 0)
8890 emit_move_insn (target, const1_rtx);
8891 else
8892 emit_move_insn (target, const0_rtx);
8893
8894 return target;
8895 }
8896
8897 if (temp != original_target)
8898 {
8899 enum machine_mode mode1 = GET_MODE (temp);
8900 if (mode1 == VOIDmode)
8901 mode1 = tmode != VOIDmode ? tmode : mode;
8902
8903 temp = copy_to_mode_reg (mode1, temp);
8904 }
8905
8906 op1 = gen_label_rtx ();
8907 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8908 GET_MODE (temp), unsignedp, op1);
8909 emit_move_insn (temp, const1_rtx);
8910 emit_label (op1);
8911 return temp;
8912 }
8913
8914 /* If no set-flag instruction, must generate a conditional store
8915 into a temporary variable. Drop through and handle this
8916 like && and ||. */
8917
8918 if (! ignore
8919 && (target == 0
8920 || modifier == EXPAND_STACK_PARM
8921 || ! safe_from_p (target, exp, 1)
8922 /* Make sure we don't have a hard reg (such as function's return
8923 value) live across basic blocks, if not optimizing. */
8924 || (!optimize && REG_P (target)
8925 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8926 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8927
8928 if (target)
8929 emit_move_insn (target, const0_rtx);
8930
8931 op1 = gen_label_rtx ();
8932 jumpifnot (exp, op1);
8933
8934 if (target)
8935 emit_move_insn (target, const1_rtx);
8936
8937 emit_label (op1);
8938 return ignore ? const0_rtx : target;
8939
8940 case TRUTH_NOT_EXPR:
8941 if (modifier == EXPAND_STACK_PARM)
8942 target = 0;
8943 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8944 VOIDmode, EXPAND_NORMAL);
8945 /* The parser is careful to generate TRUTH_NOT_EXPR
8946 only with operands that are always zero or one. */
8947 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8948 target, 1, OPTAB_LIB_WIDEN);
8949 gcc_assert (temp);
8950 return temp;
8951
8952 case STATEMENT_LIST:
8953 {
8954 tree_stmt_iterator iter;
8955
8956 gcc_assert (ignore);
8957
8958 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8959 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8960 }
8961 return const0_rtx;
8962
8963 case COND_EXPR:
8964 /* A COND_EXPR with its type being VOID_TYPE represents a
8965 conditional jump and is handled in
8966 expand_gimple_cond_expr. */
8967 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8968
8969 /* Note that COND_EXPRs whose type is a structure or union
8970 are required to be constructed to contain assignments of
8971 a temporary variable, so that we can evaluate them here
8972 for side effect only. If type is void, we must do likewise. */
8973
8974 gcc_assert (!TREE_ADDRESSABLE (type)
8975 && !ignore
8976 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8977 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8978
8979 /* If we are not to produce a result, we have no target. Otherwise,
8980 if a target was specified use it; it will not be used as an
8981 intermediate target unless it is safe. If no target, use a
8982 temporary. */
8983
8984 if (modifier != EXPAND_STACK_PARM
8985 && original_target
8986 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8987 && GET_MODE (original_target) == mode
8988 #ifdef HAVE_conditional_move
8989 && (! can_conditionally_move_p (mode)
8990 || REG_P (original_target))
8991 #endif
8992 && !MEM_P (original_target))
8993 temp = original_target;
8994 else
8995 temp = assign_temp (type, 0, 0, 1);
8996
8997 do_pending_stack_adjust ();
8998 NO_DEFER_POP;
8999 op0 = gen_label_rtx ();
9000 op1 = gen_label_rtx ();
9001 jumpifnot (TREE_OPERAND (exp, 0), op0);
9002 store_expr (TREE_OPERAND (exp, 1), temp,
9003 modifier == EXPAND_STACK_PARM,
9004 false);
9005
9006 emit_jump_insn (gen_jump (op1));
9007 emit_barrier ();
9008 emit_label (op0);
9009 store_expr (TREE_OPERAND (exp, 2), temp,
9010 modifier == EXPAND_STACK_PARM,
9011 false);
9012
9013 emit_label (op1);
9014 OK_DEFER_POP;
9015 return temp;
9016
9017 case VEC_COND_EXPR:
9018 target = expand_vec_cond_expr (exp, target);
9019 return target;
9020
9021 case MODIFY_EXPR:
9022 {
9023 tree lhs = TREE_OPERAND (exp, 0);
9024 tree rhs = TREE_OPERAND (exp, 1);
9025 gcc_assert (ignore);
9026 expand_assignment (lhs, rhs, false);
9027 return const0_rtx;
9028 }
9029
9030 case GIMPLE_MODIFY_STMT:
9031 {
9032 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
9033 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
9034
9035 gcc_assert (ignore);
9036
9037 /* Check for |= or &= of a bitfield of size one into another bitfield
9038 of size 1. In this case, (unless we need the result of the
9039 assignment) we can do this more efficiently with a
9040 test followed by an assignment, if necessary.
9041
9042 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9043 things change so we do, this code should be enhanced to
9044 support it. */
9045 if (TREE_CODE (lhs) == COMPONENT_REF
9046 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9047 || TREE_CODE (rhs) == BIT_AND_EXPR)
9048 && TREE_OPERAND (rhs, 0) == lhs
9049 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9050 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9051 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9052 {
9053 rtx label = gen_label_rtx ();
9054 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9055 do_jump (TREE_OPERAND (rhs, 1),
9056 value ? label : 0,
9057 value ? 0 : label);
9058 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9059 MOVE_NONTEMPORAL (exp));
9060 do_pending_stack_adjust ();
9061 emit_label (label);
9062 return const0_rtx;
9063 }
9064
9065 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9066 return const0_rtx;
9067 }
9068
9069 case RETURN_EXPR:
9070 if (!TREE_OPERAND (exp, 0))
9071 expand_null_return ();
9072 else
9073 expand_return (TREE_OPERAND (exp, 0));
9074 return const0_rtx;
9075
9076 case ADDR_EXPR:
9077 return expand_expr_addr_expr (exp, target, tmode, modifier);
9078
9079 case COMPLEX_EXPR:
9080 /* Get the rtx code of the operands. */
9081 op0 = expand_normal (TREE_OPERAND (exp, 0));
9082 op1 = expand_normal (TREE_OPERAND (exp, 1));
9083
9084 if (!target)
9085 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9086
9087 /* Move the real (op0) and imaginary (op1) parts to their location. */
9088 write_complex_part (target, op0, false);
9089 write_complex_part (target, op1, true);
9090
9091 return target;
9092
9093 case REALPART_EXPR:
9094 op0 = expand_normal (TREE_OPERAND (exp, 0));
9095 return read_complex_part (op0, false);
9096
9097 case IMAGPART_EXPR:
9098 op0 = expand_normal (TREE_OPERAND (exp, 0));
9099 return read_complex_part (op0, true);
9100
9101 case RESX_EXPR:
9102 expand_resx_expr (exp);
9103 return const0_rtx;
9104
9105 case TRY_CATCH_EXPR:
9106 case CATCH_EXPR:
9107 case EH_FILTER_EXPR:
9108 case TRY_FINALLY_EXPR:
9109 /* Lowered by tree-eh.c. */
9110 gcc_unreachable ();
9111
9112 case WITH_CLEANUP_EXPR:
9113 case CLEANUP_POINT_EXPR:
9114 case TARGET_EXPR:
9115 case CASE_LABEL_EXPR:
9116 case VA_ARG_EXPR:
9117 case BIND_EXPR:
9118 case INIT_EXPR:
9119 case CONJ_EXPR:
9120 case COMPOUND_EXPR:
9121 case PREINCREMENT_EXPR:
9122 case PREDECREMENT_EXPR:
9123 case POSTINCREMENT_EXPR:
9124 case POSTDECREMENT_EXPR:
9125 case LOOP_EXPR:
9126 case EXIT_EXPR:
9127 case TRUTH_ANDIF_EXPR:
9128 case TRUTH_ORIF_EXPR:
9129 /* Lowered by gimplify.c. */
9130 gcc_unreachable ();
9131
9132 case CHANGE_DYNAMIC_TYPE_EXPR:
9133 /* This is ignored at the RTL level. The tree level set
9134 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9135 overkill for the RTL layer but is all that we can
9136 represent. */
9137 return const0_rtx;
9138
9139 case EXC_PTR_EXPR:
9140 return get_exception_pointer (cfun);
9141
9142 case FILTER_EXPR:
9143 return get_exception_filter (cfun);
9144
9145 case FDESC_EXPR:
9146 /* Function descriptors are not valid except for as
9147 initialization constants, and should not be expanded. */
9148 gcc_unreachable ();
9149
9150 case SWITCH_EXPR:
9151 expand_case (exp);
9152 return const0_rtx;
9153
9154 case LABEL_EXPR:
9155 expand_label (TREE_OPERAND (exp, 0));
9156 return const0_rtx;
9157
9158 case ASM_EXPR:
9159 expand_asm_expr (exp);
9160 return const0_rtx;
9161
9162 case WITH_SIZE_EXPR:
9163 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9164 have pulled out the size to use in whatever context it needed. */
9165 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9166 modifier, alt_rtl);
9167
9168 case REALIGN_LOAD_EXPR:
9169 {
9170 tree oprnd0 = TREE_OPERAND (exp, 0);
9171 tree oprnd1 = TREE_OPERAND (exp, 1);
9172 tree oprnd2 = TREE_OPERAND (exp, 2);
9173 rtx op2;
9174
9175 this_optab = optab_for_tree_code (code, type);
9176 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9177 op2 = expand_normal (oprnd2);
9178 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9179 target, unsignedp);
9180 gcc_assert (temp);
9181 return temp;
9182 }
9183
9184 case DOT_PROD_EXPR:
9185 {
9186 tree oprnd0 = TREE_OPERAND (exp, 0);
9187 tree oprnd1 = TREE_OPERAND (exp, 1);
9188 tree oprnd2 = TREE_OPERAND (exp, 2);
9189 rtx op2;
9190
9191 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9192 op2 = expand_normal (oprnd2);
9193 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9194 target, unsignedp);
9195 return target;
9196 }
9197
9198 case WIDEN_SUM_EXPR:
9199 {
9200 tree oprnd0 = TREE_OPERAND (exp, 0);
9201 tree oprnd1 = TREE_OPERAND (exp, 1);
9202
9203 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9204 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9205 target, unsignedp);
9206 return target;
9207 }
9208
9209 case REDUC_MAX_EXPR:
9210 case REDUC_MIN_EXPR:
9211 case REDUC_PLUS_EXPR:
9212 {
9213 op0 = expand_normal (TREE_OPERAND (exp, 0));
9214 this_optab = optab_for_tree_code (code, type);
9215 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9216 gcc_assert (temp);
9217 return temp;
9218 }
9219
9220 case VEC_EXTRACT_EVEN_EXPR:
9221 case VEC_EXTRACT_ODD_EXPR:
9222 {
9223 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9224 NULL_RTX, &op0, &op1, 0);
9225 this_optab = optab_for_tree_code (code, type);
9226 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9227 OPTAB_WIDEN);
9228 gcc_assert (temp);
9229 return temp;
9230 }
9231
9232 case VEC_INTERLEAVE_HIGH_EXPR:
9233 case VEC_INTERLEAVE_LOW_EXPR:
9234 {
9235 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9236 NULL_RTX, &op0, &op1, 0);
9237 this_optab = optab_for_tree_code (code, type);
9238 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9239 OPTAB_WIDEN);
9240 gcc_assert (temp);
9241 return temp;
9242 }
9243
9244 case VEC_LSHIFT_EXPR:
9245 case VEC_RSHIFT_EXPR:
9246 {
9247 target = expand_vec_shift_expr (exp, target);
9248 return target;
9249 }
9250
9251 case VEC_UNPACK_HI_EXPR:
9252 case VEC_UNPACK_LO_EXPR:
9253 {
9254 op0 = expand_normal (TREE_OPERAND (exp, 0));
9255 this_optab = optab_for_tree_code (code, type);
9256 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9257 target, unsignedp);
9258 gcc_assert (temp);
9259 return temp;
9260 }
9261
9262 case VEC_UNPACK_FLOAT_HI_EXPR:
9263 case VEC_UNPACK_FLOAT_LO_EXPR:
9264 {
9265 op0 = expand_normal (TREE_OPERAND (exp, 0));
9266 /* The signedness is determined from input operand. */
9267 this_optab = optab_for_tree_code (code,
9268 TREE_TYPE (TREE_OPERAND (exp, 0)));
9269 temp = expand_widen_pattern_expr
9270 (exp, op0, NULL_RTX, NULL_RTX,
9271 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9272
9273 gcc_assert (temp);
9274 return temp;
9275 }
9276
9277 case VEC_WIDEN_MULT_HI_EXPR:
9278 case VEC_WIDEN_MULT_LO_EXPR:
9279 {
9280 tree oprnd0 = TREE_OPERAND (exp, 0);
9281 tree oprnd1 = TREE_OPERAND (exp, 1);
9282
9283 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9284 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9285 target, unsignedp);
9286 gcc_assert (target);
9287 return target;
9288 }
9289
9290 case VEC_PACK_TRUNC_EXPR:
9291 case VEC_PACK_SAT_EXPR:
9292 case VEC_PACK_FIX_TRUNC_EXPR:
9293 {
9294 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9295 goto binop;
9296 }
9297
9298 default:
9299 return lang_hooks.expand_expr (exp, original_target, tmode,
9300 modifier, alt_rtl);
9301 }
9302
9303 /* Here to do an ordinary binary operator. */
9304 binop:
9305 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9306 subtarget, &op0, &op1, 0);
9307 binop2:
9308 this_optab = optab_for_tree_code (code, type);
9309 binop3:
9310 if (modifier == EXPAND_STACK_PARM)
9311 target = 0;
9312 temp = expand_binop (mode, this_optab, op0, op1, target,
9313 unsignedp, OPTAB_LIB_WIDEN);
9314 gcc_assert (temp);
9315 return REDUCE_BIT_FIELD (temp);
9316 }
9317 #undef REDUCE_BIT_FIELD
9318 \f
9319 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9320 signedness of TYPE), possibly returning the result in TARGET. */
9321 static rtx
9322 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9323 {
9324 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9325 if (target && GET_MODE (target) != GET_MODE (exp))
9326 target = 0;
9327 /* For constant values, reduce using build_int_cst_type. */
9328 if (GET_CODE (exp) == CONST_INT)
9329 {
9330 HOST_WIDE_INT value = INTVAL (exp);
9331 tree t = build_int_cst_type (type, value);
9332 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9333 }
9334 else if (TYPE_UNSIGNED (type))
9335 {
9336 rtx mask;
9337 if (prec < HOST_BITS_PER_WIDE_INT)
9338 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9339 GET_MODE (exp));
9340 else
9341 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9342 ((unsigned HOST_WIDE_INT) 1
9343 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9344 GET_MODE (exp));
9345 return expand_and (GET_MODE (exp), exp, mask, target);
9346 }
9347 else
9348 {
9349 tree count = build_int_cst (NULL_TREE,
9350 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9351 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9352 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9353 }
9354 }
9355 \f
9356 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9357 when applied to the address of EXP produces an address known to be
9358 aligned more than BIGGEST_ALIGNMENT. */
9359
9360 static int
9361 is_aligning_offset (const_tree offset, const_tree exp)
9362 {
9363 /* Strip off any conversions. */
9364 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9365 || TREE_CODE (offset) == NOP_EXPR
9366 || TREE_CODE (offset) == CONVERT_EXPR)
9367 offset = TREE_OPERAND (offset, 0);
9368
9369 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9370 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9371 if (TREE_CODE (offset) != BIT_AND_EXPR
9372 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9373 || compare_tree_int (TREE_OPERAND (offset, 1),
9374 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9375 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9376 return 0;
9377
9378 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9379 It must be NEGATE_EXPR. Then strip any more conversions. */
9380 offset = TREE_OPERAND (offset, 0);
9381 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9382 || TREE_CODE (offset) == NOP_EXPR
9383 || TREE_CODE (offset) == CONVERT_EXPR)
9384 offset = TREE_OPERAND (offset, 0);
9385
9386 if (TREE_CODE (offset) != NEGATE_EXPR)
9387 return 0;
9388
9389 offset = TREE_OPERAND (offset, 0);
9390 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9391 || TREE_CODE (offset) == NOP_EXPR
9392 || TREE_CODE (offset) == CONVERT_EXPR)
9393 offset = TREE_OPERAND (offset, 0);
9394
9395 /* This must now be the address of EXP. */
9396 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9397 }
9398 \f
9399 /* Return the tree node if an ARG corresponds to a string constant or zero
9400 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9401 in bytes within the string that ARG is accessing. The type of the
9402 offset will be `sizetype'. */
9403
9404 tree
9405 string_constant (tree arg, tree *ptr_offset)
9406 {
9407 tree array, offset, lower_bound;
9408 STRIP_NOPS (arg);
9409
9410 if (TREE_CODE (arg) == ADDR_EXPR)
9411 {
9412 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9413 {
9414 *ptr_offset = size_zero_node;
9415 return TREE_OPERAND (arg, 0);
9416 }
9417 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9418 {
9419 array = TREE_OPERAND (arg, 0);
9420 offset = size_zero_node;
9421 }
9422 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9423 {
9424 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9425 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9426 if (TREE_CODE (array) != STRING_CST
9427 && TREE_CODE (array) != VAR_DECL)
9428 return 0;
9429
9430 /* Check if the array has a nonzero lower bound. */
9431 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9432 if (!integer_zerop (lower_bound))
9433 {
9434 /* If the offset and base aren't both constants, return 0. */
9435 if (TREE_CODE (lower_bound) != INTEGER_CST)
9436 return 0;
9437 if (TREE_CODE (offset) != INTEGER_CST)
9438 return 0;
9439 /* Adjust offset by the lower bound. */
9440 offset = size_diffop (fold_convert (sizetype, offset),
9441 fold_convert (sizetype, lower_bound));
9442 }
9443 }
9444 else
9445 return 0;
9446 }
9447 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9448 {
9449 tree arg0 = TREE_OPERAND (arg, 0);
9450 tree arg1 = TREE_OPERAND (arg, 1);
9451
9452 STRIP_NOPS (arg0);
9453 STRIP_NOPS (arg1);
9454
9455 if (TREE_CODE (arg0) == ADDR_EXPR
9456 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9457 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9458 {
9459 array = TREE_OPERAND (arg0, 0);
9460 offset = arg1;
9461 }
9462 else if (TREE_CODE (arg1) == ADDR_EXPR
9463 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9464 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9465 {
9466 array = TREE_OPERAND (arg1, 0);
9467 offset = arg0;
9468 }
9469 else
9470 return 0;
9471 }
9472 else
9473 return 0;
9474
9475 if (TREE_CODE (array) == STRING_CST)
9476 {
9477 *ptr_offset = fold_convert (sizetype, offset);
9478 return array;
9479 }
9480 else if (TREE_CODE (array) == VAR_DECL)
9481 {
9482 int length;
9483
9484 /* Variables initialized to string literals can be handled too. */
9485 if (DECL_INITIAL (array) == NULL_TREE
9486 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9487 return 0;
9488
9489 /* If they are read-only, non-volatile and bind locally. */
9490 if (! TREE_READONLY (array)
9491 || TREE_SIDE_EFFECTS (array)
9492 || ! targetm.binds_local_p (array))
9493 return 0;
9494
9495 /* Avoid const char foo[4] = "abcde"; */
9496 if (DECL_SIZE_UNIT (array) == NULL_TREE
9497 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9498 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9499 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9500 return 0;
9501
9502 /* If variable is bigger than the string literal, OFFSET must be constant
9503 and inside of the bounds of the string literal. */
9504 offset = fold_convert (sizetype, offset);
9505 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9506 && (! host_integerp (offset, 1)
9507 || compare_tree_int (offset, length) >= 0))
9508 return 0;
9509
9510 *ptr_offset = offset;
9511 return DECL_INITIAL (array);
9512 }
9513
9514 return 0;
9515 }
9516 \f
9517 /* Generate code to calculate EXP using a store-flag instruction
9518 and return an rtx for the result. EXP is either a comparison
9519 or a TRUTH_NOT_EXPR whose operand is a comparison.
9520
9521 If TARGET is nonzero, store the result there if convenient.
9522
9523 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9524 cheap.
9525
9526 Return zero if there is no suitable set-flag instruction
9527 available on this machine.
9528
9529 Once expand_expr has been called on the arguments of the comparison,
9530 we are committed to doing the store flag, since it is not safe to
9531 re-evaluate the expression. We emit the store-flag insn by calling
9532 emit_store_flag, but only expand the arguments if we have a reason
9533 to believe that emit_store_flag will be successful. If we think that
9534 it will, but it isn't, we have to simulate the store-flag with a
9535 set/jump/set sequence. */
9536
9537 static rtx
9538 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9539 {
9540 enum rtx_code code;
9541 tree arg0, arg1, type;
9542 tree tem;
9543 enum machine_mode operand_mode;
9544 int invert = 0;
9545 int unsignedp;
9546 rtx op0, op1;
9547 enum insn_code icode;
9548 rtx subtarget = target;
9549 rtx result, label;
9550
9551 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9552 result at the end. We can't simply invert the test since it would
9553 have already been inverted if it were valid. This case occurs for
9554 some floating-point comparisons. */
9555
9556 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9557 invert = 1, exp = TREE_OPERAND (exp, 0);
9558
9559 arg0 = TREE_OPERAND (exp, 0);
9560 arg1 = TREE_OPERAND (exp, 1);
9561
9562 /* Don't crash if the comparison was erroneous. */
9563 if (arg0 == error_mark_node || arg1 == error_mark_node)
9564 return const0_rtx;
9565
9566 type = TREE_TYPE (arg0);
9567 operand_mode = TYPE_MODE (type);
9568 unsignedp = TYPE_UNSIGNED (type);
9569
9570 /* We won't bother with BLKmode store-flag operations because it would mean
9571 passing a lot of information to emit_store_flag. */
9572 if (operand_mode == BLKmode)
9573 return 0;
9574
9575 /* We won't bother with store-flag operations involving function pointers
9576 when function pointers must be canonicalized before comparisons. */
9577 #ifdef HAVE_canonicalize_funcptr_for_compare
9578 if (HAVE_canonicalize_funcptr_for_compare
9579 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9580 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9581 == FUNCTION_TYPE))
9582 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9583 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9584 == FUNCTION_TYPE))))
9585 return 0;
9586 #endif
9587
9588 STRIP_NOPS (arg0);
9589 STRIP_NOPS (arg1);
9590
9591 /* Get the rtx comparison code to use. We know that EXP is a comparison
9592 operation of some type. Some comparisons against 1 and -1 can be
9593 converted to comparisons with zero. Do so here so that the tests
9594 below will be aware that we have a comparison with zero. These
9595 tests will not catch constants in the first operand, but constants
9596 are rarely passed as the first operand. */
9597
9598 switch (TREE_CODE (exp))
9599 {
9600 case EQ_EXPR:
9601 code = EQ;
9602 break;
9603 case NE_EXPR:
9604 code = NE;
9605 break;
9606 case LT_EXPR:
9607 if (integer_onep (arg1))
9608 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9609 else
9610 code = unsignedp ? LTU : LT;
9611 break;
9612 case LE_EXPR:
9613 if (! unsignedp && integer_all_onesp (arg1))
9614 arg1 = integer_zero_node, code = LT;
9615 else
9616 code = unsignedp ? LEU : LE;
9617 break;
9618 case GT_EXPR:
9619 if (! unsignedp && integer_all_onesp (arg1))
9620 arg1 = integer_zero_node, code = GE;
9621 else
9622 code = unsignedp ? GTU : GT;
9623 break;
9624 case GE_EXPR:
9625 if (integer_onep (arg1))
9626 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9627 else
9628 code = unsignedp ? GEU : GE;
9629 break;
9630
9631 case UNORDERED_EXPR:
9632 code = UNORDERED;
9633 break;
9634 case ORDERED_EXPR:
9635 code = ORDERED;
9636 break;
9637 case UNLT_EXPR:
9638 code = UNLT;
9639 break;
9640 case UNLE_EXPR:
9641 code = UNLE;
9642 break;
9643 case UNGT_EXPR:
9644 code = UNGT;
9645 break;
9646 case UNGE_EXPR:
9647 code = UNGE;
9648 break;
9649 case UNEQ_EXPR:
9650 code = UNEQ;
9651 break;
9652 case LTGT_EXPR:
9653 code = LTGT;
9654 break;
9655
9656 default:
9657 gcc_unreachable ();
9658 }
9659
9660 /* Put a constant second. */
9661 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9662 || TREE_CODE (arg0) == FIXED_CST)
9663 {
9664 tem = arg0; arg0 = arg1; arg1 = tem;
9665 code = swap_condition (code);
9666 }
9667
9668 /* If this is an equality or inequality test of a single bit, we can
9669 do this by shifting the bit being tested to the low-order bit and
9670 masking the result with the constant 1. If the condition was EQ,
9671 we xor it with 1. This does not require an scc insn and is faster
9672 than an scc insn even if we have it.
9673
9674 The code to make this transformation was moved into fold_single_bit_test,
9675 so we just call into the folder and expand its result. */
9676
9677 if ((code == NE || code == EQ)
9678 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9679 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9680 {
9681 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9682 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9683 arg0, arg1, type),
9684 target, VOIDmode, EXPAND_NORMAL);
9685 }
9686
9687 /* Now see if we are likely to be able to do this. Return if not. */
9688 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9689 return 0;
9690
9691 icode = setcc_gen_code[(int) code];
9692
9693 if (icode == CODE_FOR_nothing)
9694 {
9695 enum machine_mode wmode;
9696
9697 for (wmode = operand_mode;
9698 icode == CODE_FOR_nothing && wmode != VOIDmode;
9699 wmode = GET_MODE_WIDER_MODE (wmode))
9700 icode = optab_handler (cstore_optab, wmode)->insn_code;
9701 }
9702
9703 if (icode == CODE_FOR_nothing
9704 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9705 {
9706 /* We can only do this if it is one of the special cases that
9707 can be handled without an scc insn. */
9708 if ((code == LT && integer_zerop (arg1))
9709 || (! only_cheap && code == GE && integer_zerop (arg1)))
9710 ;
9711 else if (! only_cheap && (code == NE || code == EQ)
9712 && TREE_CODE (type) != REAL_TYPE
9713 && ((optab_handler (abs_optab, operand_mode)->insn_code
9714 != CODE_FOR_nothing)
9715 || (optab_handler (ffs_optab, operand_mode)->insn_code
9716 != CODE_FOR_nothing)))
9717 ;
9718 else
9719 return 0;
9720 }
9721
9722 if (! get_subtarget (target)
9723 || GET_MODE (subtarget) != operand_mode)
9724 subtarget = 0;
9725
9726 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9727
9728 if (target == 0)
9729 target = gen_reg_rtx (mode);
9730
9731 result = emit_store_flag (target, code, op0, op1,
9732 operand_mode, unsignedp, 1);
9733
9734 if (result)
9735 {
9736 if (invert)
9737 result = expand_binop (mode, xor_optab, result, const1_rtx,
9738 result, 0, OPTAB_LIB_WIDEN);
9739 return result;
9740 }
9741
9742 /* If this failed, we have to do this with set/compare/jump/set code. */
9743 if (!REG_P (target)
9744 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9745 target = gen_reg_rtx (GET_MODE (target));
9746
9747 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9748 label = gen_label_rtx ();
9749 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9750 NULL_RTX, label);
9751
9752 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9753 emit_label (label);
9754
9755 return target;
9756 }
9757 \f
9758
9759 /* Stubs in case we haven't got a casesi insn. */
9760 #ifndef HAVE_casesi
9761 # define HAVE_casesi 0
9762 # define gen_casesi(a, b, c, d, e) (0)
9763 # define CODE_FOR_casesi CODE_FOR_nothing
9764 #endif
9765
9766 /* If the machine does not have a case insn that compares the bounds,
9767 this means extra overhead for dispatch tables, which raises the
9768 threshold for using them. */
9769 #ifndef CASE_VALUES_THRESHOLD
9770 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9771 #endif /* CASE_VALUES_THRESHOLD */
9772
9773 unsigned int
9774 case_values_threshold (void)
9775 {
9776 return CASE_VALUES_THRESHOLD;
9777 }
9778
9779 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9780 0 otherwise (i.e. if there is no casesi instruction). */
9781 int
9782 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9783 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9784 {
9785 enum machine_mode index_mode = SImode;
9786 int index_bits = GET_MODE_BITSIZE (index_mode);
9787 rtx op1, op2, index;
9788 enum machine_mode op_mode;
9789
9790 if (! HAVE_casesi)
9791 return 0;
9792
9793 /* Convert the index to SImode. */
9794 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9795 {
9796 enum machine_mode omode = TYPE_MODE (index_type);
9797 rtx rangertx = expand_normal (range);
9798
9799 /* We must handle the endpoints in the original mode. */
9800 index_expr = build2 (MINUS_EXPR, index_type,
9801 index_expr, minval);
9802 minval = integer_zero_node;
9803 index = expand_normal (index_expr);
9804 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9805 omode, 1, default_label);
9806 /* Now we can safely truncate. */
9807 index = convert_to_mode (index_mode, index, 0);
9808 }
9809 else
9810 {
9811 if (TYPE_MODE (index_type) != index_mode)
9812 {
9813 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9814 index_expr = fold_convert (index_type, index_expr);
9815 }
9816
9817 index = expand_normal (index_expr);
9818 }
9819
9820 do_pending_stack_adjust ();
9821
9822 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9823 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9824 (index, op_mode))
9825 index = copy_to_mode_reg (op_mode, index);
9826
9827 op1 = expand_normal (minval);
9828
9829 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9830 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9831 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9832 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9833 (op1, op_mode))
9834 op1 = copy_to_mode_reg (op_mode, op1);
9835
9836 op2 = expand_normal (range);
9837
9838 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9839 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9840 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9841 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9842 (op2, op_mode))
9843 op2 = copy_to_mode_reg (op_mode, op2);
9844
9845 emit_jump_insn (gen_casesi (index, op1, op2,
9846 table_label, default_label));
9847 return 1;
9848 }
9849
9850 /* Attempt to generate a tablejump instruction; same concept. */
9851 #ifndef HAVE_tablejump
9852 #define HAVE_tablejump 0
9853 #define gen_tablejump(x, y) (0)
9854 #endif
9855
9856 /* Subroutine of the next function.
9857
9858 INDEX is the value being switched on, with the lowest value
9859 in the table already subtracted.
9860 MODE is its expected mode (needed if INDEX is constant).
9861 RANGE is the length of the jump table.
9862 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9863
9864 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9865 index value is out of range. */
9866
9867 static void
9868 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9869 rtx default_label)
9870 {
9871 rtx temp, vector;
9872
9873 if (INTVAL (range) > cfun->max_jumptable_ents)
9874 cfun->max_jumptable_ents = INTVAL (range);
9875
9876 /* Do an unsigned comparison (in the proper mode) between the index
9877 expression and the value which represents the length of the range.
9878 Since we just finished subtracting the lower bound of the range
9879 from the index expression, this comparison allows us to simultaneously
9880 check that the original index expression value is both greater than
9881 or equal to the minimum value of the range and less than or equal to
9882 the maximum value of the range. */
9883
9884 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9885 default_label);
9886
9887 /* If index is in range, it must fit in Pmode.
9888 Convert to Pmode so we can index with it. */
9889 if (mode != Pmode)
9890 index = convert_to_mode (Pmode, index, 1);
9891
9892 /* Don't let a MEM slip through, because then INDEX that comes
9893 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9894 and break_out_memory_refs will go to work on it and mess it up. */
9895 #ifdef PIC_CASE_VECTOR_ADDRESS
9896 if (flag_pic && !REG_P (index))
9897 index = copy_to_mode_reg (Pmode, index);
9898 #endif
9899
9900 /* If flag_force_addr were to affect this address
9901 it could interfere with the tricky assumptions made
9902 about addresses that contain label-refs,
9903 which may be valid only very near the tablejump itself. */
9904 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9905 GET_MODE_SIZE, because this indicates how large insns are. The other
9906 uses should all be Pmode, because they are addresses. This code
9907 could fail if addresses and insns are not the same size. */
9908 index = gen_rtx_PLUS (Pmode,
9909 gen_rtx_MULT (Pmode, index,
9910 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9911 gen_rtx_LABEL_REF (Pmode, table_label));
9912 #ifdef PIC_CASE_VECTOR_ADDRESS
9913 if (flag_pic)
9914 index = PIC_CASE_VECTOR_ADDRESS (index);
9915 else
9916 #endif
9917 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9918 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9919 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9920 convert_move (temp, vector, 0);
9921
9922 emit_jump_insn (gen_tablejump (temp, table_label));
9923
9924 /* If we are generating PIC code or if the table is PC-relative, the
9925 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9926 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9927 emit_barrier ();
9928 }
9929
9930 int
9931 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9932 rtx table_label, rtx default_label)
9933 {
9934 rtx index;
9935
9936 if (! HAVE_tablejump)
9937 return 0;
9938
9939 index_expr = fold_build2 (MINUS_EXPR, index_type,
9940 fold_convert (index_type, index_expr),
9941 fold_convert (index_type, minval));
9942 index = expand_normal (index_expr);
9943 do_pending_stack_adjust ();
9944
9945 do_tablejump (index, TYPE_MODE (index_type),
9946 convert_modes (TYPE_MODE (index_type),
9947 TYPE_MODE (TREE_TYPE (range)),
9948 expand_normal (range),
9949 TYPE_UNSIGNED (TREE_TYPE (range))),
9950 table_label, default_label);
9951 return 1;
9952 }
9953
9954 /* Nonzero if the mode is a valid vector mode for this architecture.
9955 This returns nonzero even if there is no hardware support for the
9956 vector mode, but we can emulate with narrower modes. */
9957
9958 int
9959 vector_mode_valid_p (enum machine_mode mode)
9960 {
9961 enum mode_class class = GET_MODE_CLASS (mode);
9962 enum machine_mode innermode;
9963
9964 /* Doh! What's going on? */
9965 if (class != MODE_VECTOR_INT
9966 && class != MODE_VECTOR_FLOAT
9967 && class != MODE_VECTOR_FRACT
9968 && class != MODE_VECTOR_UFRACT
9969 && class != MODE_VECTOR_ACCUM
9970 && class != MODE_VECTOR_UACCUM)
9971 return 0;
9972
9973 /* Hardware support. Woo hoo! */
9974 if (targetm.vector_mode_supported_p (mode))
9975 return 1;
9976
9977 innermode = GET_MODE_INNER (mode);
9978
9979 /* We should probably return 1 if requesting V4DI and we have no DI,
9980 but we have V2DI, but this is probably very unlikely. */
9981
9982 /* If we have support for the inner mode, we can safely emulate it.
9983 We may not have V2DI, but me can emulate with a pair of DIs. */
9984 return targetm.scalar_mode_supported_p (innermode);
9985 }
9986
9987 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9988 static rtx
9989 const_vector_from_tree (tree exp)
9990 {
9991 rtvec v;
9992 int units, i;
9993 tree link, elt;
9994 enum machine_mode inner, mode;
9995
9996 mode = TYPE_MODE (TREE_TYPE (exp));
9997
9998 if (initializer_zerop (exp))
9999 return CONST0_RTX (mode);
10000
10001 units = GET_MODE_NUNITS (mode);
10002 inner = GET_MODE_INNER (mode);
10003
10004 v = rtvec_alloc (units);
10005
10006 link = TREE_VECTOR_CST_ELTS (exp);
10007 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10008 {
10009 elt = TREE_VALUE (link);
10010
10011 if (TREE_CODE (elt) == REAL_CST)
10012 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10013 inner);
10014 else if (TREE_CODE (elt) == FIXED_CST)
10015 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10016 inner);
10017 else
10018 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10019 TREE_INT_CST_HIGH (elt),
10020 inner);
10021 }
10022
10023 /* Initialize remaining elements to 0. */
10024 for (; i < units; ++i)
10025 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10026
10027 return gen_rtx_CONST_VECTOR (mode, v);
10028 }
10029 #include "gt-expr.h"
This page took 0.46048 seconds and 6 git commands to generate.