]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
Squash commit of EH in gimple
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
57 #include "ssaexpand.h"
58
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
61
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
64
65 #ifdef PUSH_ROUNDING
66
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
70 #endif
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
90 int cse_not_expected;
91
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces_d
95 {
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 rtx from;
101 rtx from_addr;
102 int autinc_from;
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
106 int reverse;
107 };
108
109 /* This structure is used by store_by_pieces to describe the clear to
110 be performed. */
111
112 struct store_by_pieces_d
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
121 void *constfundata;
122 int reverse;
123 };
124
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 unsigned int,
127 unsigned int);
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
180 #endif
181
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
188 #endif
189
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero. */
192 #ifndef SET_BY_PIECES_P
193 #define SET_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
196 #endif
197
198 /* This macro is used to determine whether store_by_pieces should be
199 called to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
204 #endif
205
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208
209 /* This array records the insn_code of insns to perform block sets. */
210 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211
212 /* These arrays record the insn_code of three different kinds of insns
213 to perform block compares. */
214 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
216 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217
218 /* Synchronization primitives. */
219 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
246 \f
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
250
251 void
252 init_expr_target (void)
253 {
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
259
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
269
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
273
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
276 {
277 int regno;
278
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
283
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
286
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
291 {
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
294
295 SET_REGNO (reg, regno);
296
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
301
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
306
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
311
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
316 }
317 }
318
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
323 {
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
327 {
328 enum insn_code ic;
329
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
333
334 PUT_MODE (mem, srcmode);
335
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
338 }
339 }
340 }
341
342 /* This is run at the start of compiling a function. */
343
344 void
345 init_expr (void)
346 {
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
348 }
349 \f
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
355
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
358 {
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
365
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
369
370
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
374
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
379
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
383
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
389
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
391
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
394 {
395 emit_move_insn (to, from);
396 return;
397 }
398
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
400 {
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
402
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
407
408 emit_move_insn (to, from);
409 return;
410 }
411
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
413 {
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
417 }
418
419 if (to_real)
420 {
421 rtx value, insns;
422 convert_optab tab;
423
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
428
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
436
437 /* Try converting directly if the insn is supported. */
438
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
441 {
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
445 }
446
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
449
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
452
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
463 }
464
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
469 {
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
472
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
475
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
481 }
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
483 {
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
487
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
490
491 if (to_mode == full_mode)
492 {
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
496 }
497
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
501
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
505 }
506
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
511 {
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
521 }
522
523 /* Now both modes are integers. */
524
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
528 {
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
536
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
540 {
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
549 }
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
554 {
555 rtx word_to = gen_reg_rtx (word_mode);
556 if (REG_P (to))
557 {
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
560 emit_clobber (to);
561 }
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
564 return;
565 }
566
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
569
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
572
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
575
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
581
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
583
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
586
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
591 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
592 LT, lowfrom, const0_rtx,
593 VOIDmode, 0, -1);
594
595 /* Fill the remaining words. */
596 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
597 {
598 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599 rtx subword = operand_subword (to, index, 1, to_mode);
600
601 gcc_assert (subword);
602
603 if (fill_value != subword)
604 emit_move_insn (subword, fill_value);
605 }
606
607 insns = get_insns ();
608 end_sequence ();
609
610 emit_insn (insns);
611 return;
612 }
613
614 /* Truncating multi-word to a word or less. */
615 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
617 {
618 if (!((MEM_P (from)
619 && ! MEM_VOLATILE_P (from)
620 && direct_load[(int) to_mode]
621 && ! mode_dependent_address_p (XEXP (from, 0)))
622 || REG_P (from)
623 || GET_CODE (from) == SUBREG))
624 from = force_reg (from_mode, from);
625 convert_move (to, gen_lowpart (word_mode, from), 0);
626 return;
627 }
628
629 /* Now follow all the conversions between integers
630 no more than a word long. */
631
632 /* For truncation, usually we can just refer to FROM in a narrower mode. */
633 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635 GET_MODE_BITSIZE (from_mode)))
636 {
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646 from = copy_to_reg (from);
647 emit_move_insn (to, gen_lowpart (to_mode, from));
648 return;
649 }
650
651 /* Handle extension. */
652 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
653 {
654 /* Convert directly if that works. */
655 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
656 != CODE_FOR_nothing)
657 {
658 emit_unop_insn (code, to, from, equiv_code);
659 return;
660 }
661 else
662 {
663 enum machine_mode intermediate;
664 rtx tmp;
665 tree shift_amount;
666
667 /* Search for a mode to convert via. */
668 for (intermediate = from_mode; intermediate != VOIDmode;
669 intermediate = GET_MODE_WIDER_MODE (intermediate))
670 if (((can_extend_p (to_mode, intermediate, unsignedp)
671 != CODE_FOR_nothing)
672 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674 GET_MODE_BITSIZE (intermediate))))
675 && (can_extend_p (intermediate, from_mode, unsignedp)
676 != CODE_FOR_nothing))
677 {
678 convert_move (to, convert_to_mode (intermediate, from,
679 unsignedp), unsignedp);
680 return;
681 }
682
683 /* No suitable intermediate mode.
684 Generate what we need with shifts. */
685 shift_amount = build_int_cst (NULL_TREE,
686 GET_MODE_BITSIZE (to_mode)
687 - GET_MODE_BITSIZE (from_mode));
688 from = gen_lowpart (to_mode, force_reg (from_mode, from));
689 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
690 to, unsignedp);
691 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
692 to, unsignedp);
693 if (tmp != to)
694 emit_move_insn (to, tmp);
695 return;
696 }
697 }
698
699 /* Support special truncate insns for certain modes. */
700 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
701 {
702 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
703 to, from, UNKNOWN);
704 return;
705 }
706
707 /* Handle truncation of volatile memrefs, and so on;
708 the things that couldn't be truncated directly,
709 and for which there was no special instruction.
710
711 ??? Code above formerly short-circuited this, for most integer
712 mode pairs, with a force_reg in from_mode followed by a recursive
713 call to this routine. Appears always to have been wrong. */
714 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
715 {
716 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717 emit_move_insn (to, temp);
718 return;
719 }
720
721 /* Mode combination is not recognized. */
722 gcc_unreachable ();
723 }
724
725 /* Return an rtx for a value that would result
726 from converting X to mode MODE.
727 Both X and MODE may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729 This can be done by referring to a part of X in place
730 or by copying to a new temporary with conversion. */
731
732 rtx
733 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
734 {
735 return convert_modes (mode, VOIDmode, x, unsignedp);
736 }
737
738 /* Return an rtx for a value that would result
739 from converting X from mode OLDMODE to mode MODE.
740 Both modes may be floating, or both integer.
741 UNSIGNEDP is nonzero if X is an unsigned value.
742
743 This can be done by referring to a part of X in place
744 or by copying to a new temporary with conversion.
745
746 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
747
748 rtx
749 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
750 {
751 rtx temp;
752
753 /* If FROM is a SUBREG that indicates that we have already done at least
754 the required extension, strip it. */
755
756 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759 x = gen_lowpart (mode, x);
760
761 if (GET_MODE (x) != VOIDmode)
762 oldmode = GET_MODE (x);
763
764 if (mode == oldmode)
765 return x;
766
767 /* There is one case that we must handle specially: If we are converting
768 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769 we are to interpret the constant as unsigned, gen_lowpart will do
770 the wrong if the constant appears negative. What we want to do is
771 make the high-order word of the constant zero, not all ones. */
772
773 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775 && CONST_INT_P (x) && INTVAL (x) < 0)
776 {
777 HOST_WIDE_INT val = INTVAL (x);
778
779 if (oldmode != VOIDmode
780 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
781 {
782 int width = GET_MODE_BITSIZE (oldmode);
783
784 /* We need to zero extend VAL. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
786 }
787
788 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
789 }
790
791 /* We can do this with a gen_lowpart if both desired and current modes
792 are integer, and this is either a constant integer, a register, or a
793 non-volatile MEM. Except for the constant case where MODE is no
794 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
795
796 if ((CONST_INT_P (x)
797 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
798 || (GET_MODE_CLASS (mode) == MODE_INT
799 && GET_MODE_CLASS (oldmode) == MODE_INT
800 && (GET_CODE (x) == CONST_DOUBLE
801 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
802 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
803 && direct_load[(int) mode])
804 || (REG_P (x)
805 && (! HARD_REGISTER_P (x)
806 || HARD_REGNO_MODE_OK (REGNO (x), mode))
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
808 GET_MODE_BITSIZE (GET_MODE (x)))))))))
809 {
810 /* ?? If we don't know OLDMODE, we have to assume here that
811 X does not need sign- or zero-extension. This may not be
812 the case, but it's the best we can do. */
813 if (CONST_INT_P (x) && oldmode != VOIDmode
814 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
815 {
816 HOST_WIDE_INT val = INTVAL (x);
817 int width = GET_MODE_BITSIZE (oldmode);
818
819 /* We must sign or zero-extend in this case. Start by
820 zero-extending, then sign extend if we need to. */
821 val &= ((HOST_WIDE_INT) 1 << width) - 1;
822 if (! unsignedp
823 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
824 val |= (HOST_WIDE_INT) (-1) << width;
825
826 return gen_int_mode (val, mode);
827 }
828
829 return gen_lowpart (mode, x);
830 }
831
832 /* Converting from integer constant into mode is always equivalent to an
833 subreg operation. */
834 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
835 {
836 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
837 return simplify_gen_subreg (mode, x, oldmode, 0);
838 }
839
840 temp = gen_reg_rtx (mode);
841 convert_move (temp, x, unsignedp);
842 return temp;
843 }
844 \f
845 /* STORE_MAX_PIECES is the number of bytes at a time that we can
846 store efficiently. Due to internal GCC limitations, this is
847 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848 for an immediate constant. */
849
850 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
851
852 /* Determine whether the LEN bytes can be moved by using several move
853 instructions. Return nonzero if a call to move_by_pieces should
854 succeed. */
855
856 int
857 can_move_by_pieces (unsigned HOST_WIDE_INT len,
858 unsigned int align ATTRIBUTE_UNUSED)
859 {
860 return MOVE_BY_PIECES_P (len, align);
861 }
862
863 /* Generate several move instructions to copy LEN bytes from block FROM to
864 block TO. (These are MEM rtx's with BLKmode).
865
866 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867 used to push FROM to the stack.
868
869 ALIGN is maximum stack alignment we can assume.
870
871 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
873 stpcpy. */
874
875 rtx
876 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
877 unsigned int align, int endp)
878 {
879 struct move_by_pieces_d data;
880 rtx to_addr, from_addr = XEXP (from, 0);
881 unsigned int max_size = MOVE_MAX_PIECES + 1;
882 enum machine_mode mode = VOIDmode, tmode;
883 enum insn_code icode;
884
885 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
886
887 data.offset = 0;
888 data.from_addr = from_addr;
889 if (to)
890 {
891 to_addr = XEXP (to, 0);
892 data.to = to;
893 data.autinc_to
894 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
895 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
896 data.reverse
897 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
898 }
899 else
900 {
901 to_addr = NULL_RTX;
902 data.to = NULL_RTX;
903 data.autinc_to = 1;
904 #ifdef STACK_GROWS_DOWNWARD
905 data.reverse = 1;
906 #else
907 data.reverse = 0;
908 #endif
909 }
910 data.to_addr = to_addr;
911 data.from = from;
912 data.autinc_from
913 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
914 || GET_CODE (from_addr) == POST_INC
915 || GET_CODE (from_addr) == POST_DEC);
916
917 data.explicit_inc_from = 0;
918 data.explicit_inc_to = 0;
919 if (data.reverse) data.offset = len;
920 data.len = len;
921
922 /* If copying requires more than two move insns,
923 copy addresses to registers (to make displacements shorter)
924 and use post-increment if available. */
925 if (!(data.autinc_from && data.autinc_to)
926 && move_by_pieces_ninsns (len, align, max_size) > 2)
927 {
928 /* Find the mode of the largest move... */
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
930 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
931 if (GET_MODE_SIZE (tmode) < max_size)
932 mode = tmode;
933
934 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
935 {
936 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
937 data.autinc_from = 1;
938 data.explicit_inc_from = -1;
939 }
940 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
941 {
942 data.from_addr = copy_addr_to_reg (from_addr);
943 data.autinc_from = 1;
944 data.explicit_inc_from = 1;
945 }
946 if (!data.autinc_from && CONSTANT_P (from_addr))
947 data.from_addr = copy_addr_to_reg (from_addr);
948 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
949 {
950 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
951 data.autinc_to = 1;
952 data.explicit_inc_to = -1;
953 }
954 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 {
956 data.to_addr = copy_addr_to_reg (to_addr);
957 data.autinc_to = 1;
958 data.explicit_inc_to = 1;
959 }
960 if (!data.autinc_to && CONSTANT_P (to_addr))
961 data.to_addr = copy_addr_to_reg (to_addr);
962 }
963
964 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
965 if (align >= GET_MODE_ALIGNMENT (tmode))
966 align = GET_MODE_ALIGNMENT (tmode);
967 else
968 {
969 enum machine_mode xmode;
970
971 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
972 tmode != VOIDmode;
973 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
974 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
975 || SLOW_UNALIGNED_ACCESS (tmode, align))
976 break;
977
978 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
979 }
980
981 /* First move what we can in the largest integer mode, then go to
982 successively smaller modes. */
983
984 while (max_size > 1)
985 {
986 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
987 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
988 if (GET_MODE_SIZE (tmode) < max_size)
989 mode = tmode;
990
991 if (mode == VOIDmode)
992 break;
993
994 icode = optab_handler (mov_optab, mode)->insn_code;
995 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
996 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
997
998 max_size = GET_MODE_SIZE (mode);
999 }
1000
1001 /* The code above should have handled everything. */
1002 gcc_assert (!data.len);
1003
1004 if (endp)
1005 {
1006 rtx to1;
1007
1008 gcc_assert (!data.reverse);
1009 if (data.autinc_to)
1010 {
1011 if (endp == 2)
1012 {
1013 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1014 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1015 else
1016 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1017 -1));
1018 }
1019 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1020 data.offset);
1021 }
1022 else
1023 {
1024 if (endp == 2)
1025 --data.offset;
1026 to1 = adjust_address (data.to, QImode, data.offset);
1027 }
1028 return to1;
1029 }
1030 else
1031 return data.to;
1032 }
1033
1034 /* Return number of insns required to move L bytes by pieces.
1035 ALIGN (in bits) is maximum alignment we can assume. */
1036
1037 static unsigned HOST_WIDE_INT
1038 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1039 unsigned int max_size)
1040 {
1041 unsigned HOST_WIDE_INT n_insns = 0;
1042 enum machine_mode tmode;
1043
1044 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1045 if (align >= GET_MODE_ALIGNMENT (tmode))
1046 align = GET_MODE_ALIGNMENT (tmode);
1047 else
1048 {
1049 enum machine_mode tmode, xmode;
1050
1051 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1052 tmode != VOIDmode;
1053 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1054 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1055 || SLOW_UNALIGNED_ACCESS (tmode, align))
1056 break;
1057
1058 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1059 }
1060
1061 while (max_size > 1)
1062 {
1063 enum machine_mode mode = VOIDmode;
1064 enum insn_code icode;
1065
1066 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1067 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1068 if (GET_MODE_SIZE (tmode) < max_size)
1069 mode = tmode;
1070
1071 if (mode == VOIDmode)
1072 break;
1073
1074 icode = optab_handler (mov_optab, mode)->insn_code;
1075 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1076 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1077
1078 max_size = GET_MODE_SIZE (mode);
1079 }
1080
1081 gcc_assert (!l);
1082 return n_insns;
1083 }
1084
1085 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1086 with move instructions for mode MODE. GENFUN is the gen_... function
1087 to make a move insn for that mode. DATA has all the other info. */
1088
1089 static void
1090 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1091 struct move_by_pieces_d *data)
1092 {
1093 unsigned int size = GET_MODE_SIZE (mode);
1094 rtx to1 = NULL_RTX, from1;
1095
1096 while (data->len >= size)
1097 {
1098 if (data->reverse)
1099 data->offset -= size;
1100
1101 if (data->to)
1102 {
1103 if (data->autinc_to)
1104 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1105 data->offset);
1106 else
1107 to1 = adjust_address (data->to, mode, data->offset);
1108 }
1109
1110 if (data->autinc_from)
1111 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1112 data->offset);
1113 else
1114 from1 = adjust_address (data->from, mode, data->offset);
1115
1116 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1117 emit_insn (gen_add2_insn (data->to_addr,
1118 GEN_INT (-(HOST_WIDE_INT)size)));
1119 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1120 emit_insn (gen_add2_insn (data->from_addr,
1121 GEN_INT (-(HOST_WIDE_INT)size)));
1122
1123 if (data->to)
1124 emit_insn ((*genfun) (to1, from1));
1125 else
1126 {
1127 #ifdef PUSH_ROUNDING
1128 emit_single_push_insn (mode, from1, NULL);
1129 #else
1130 gcc_unreachable ();
1131 #endif
1132 }
1133
1134 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1135 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1136 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1137 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1138
1139 if (! data->reverse)
1140 data->offset += size;
1141
1142 data->len -= size;
1143 }
1144 }
1145 \f
1146 /* Emit code to move a block Y to a block X. This may be done with
1147 string-move instructions, with multiple scalar move instructions,
1148 or with a library call.
1149
1150 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1151 SIZE is an rtx that says how long they are.
1152 ALIGN is the maximum alignment we can assume they have.
1153 METHOD describes what kind of copy this is, and what mechanisms may be used.
1154
1155 Return the address of the new block, if memcpy is called and returns it,
1156 0 otherwise. */
1157
1158 rtx
1159 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1160 unsigned int expected_align, HOST_WIDE_INT expected_size)
1161 {
1162 bool may_use_call;
1163 rtx retval = 0;
1164 unsigned int align;
1165
1166 switch (method)
1167 {
1168 case BLOCK_OP_NORMAL:
1169 case BLOCK_OP_TAILCALL:
1170 may_use_call = true;
1171 break;
1172
1173 case BLOCK_OP_CALL_PARM:
1174 may_use_call = block_move_libcall_safe_for_call_parm ();
1175
1176 /* Make inhibit_defer_pop nonzero around the library call
1177 to force it to pop the arguments right away. */
1178 NO_DEFER_POP;
1179 break;
1180
1181 case BLOCK_OP_NO_LIBCALL:
1182 may_use_call = false;
1183 break;
1184
1185 default:
1186 gcc_unreachable ();
1187 }
1188
1189 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1190
1191 gcc_assert (MEM_P (x));
1192 gcc_assert (MEM_P (y));
1193 gcc_assert (size);
1194
1195 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1196 block copy is more efficient for other large modes, e.g. DCmode. */
1197 x = adjust_address (x, BLKmode, 0);
1198 y = adjust_address (y, BLKmode, 0);
1199
1200 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1201 can be incorrect is coming from __builtin_memcpy. */
1202 if (CONST_INT_P (size))
1203 {
1204 if (INTVAL (size) == 0)
1205 return 0;
1206
1207 x = shallow_copy_rtx (x);
1208 y = shallow_copy_rtx (y);
1209 set_mem_size (x, size);
1210 set_mem_size (y, size);
1211 }
1212
1213 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1214 move_by_pieces (x, y, INTVAL (size), align, 0);
1215 else if (emit_block_move_via_movmem (x, y, size, align,
1216 expected_align, expected_size))
1217 ;
1218 else if (may_use_call)
1219 retval = emit_block_move_via_libcall (x, y, size,
1220 method == BLOCK_OP_TAILCALL);
1221 else
1222 emit_block_move_via_loop (x, y, size, align);
1223
1224 if (method == BLOCK_OP_CALL_PARM)
1225 OK_DEFER_POP;
1226
1227 return retval;
1228 }
1229
1230 rtx
1231 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1232 {
1233 return emit_block_move_hints (x, y, size, method, 0, -1);
1234 }
1235
1236 /* A subroutine of emit_block_move. Returns true if calling the
1237 block move libcall will not clobber any parameters which may have
1238 already been placed on the stack. */
1239
1240 static bool
1241 block_move_libcall_safe_for_call_parm (void)
1242 {
1243 #if defined (REG_PARM_STACK_SPACE)
1244 tree fn;
1245 #endif
1246
1247 /* If arguments are pushed on the stack, then they're safe. */
1248 if (PUSH_ARGS)
1249 return true;
1250
1251 /* If registers go on the stack anyway, any argument is sure to clobber
1252 an outgoing argument. */
1253 #if defined (REG_PARM_STACK_SPACE)
1254 fn = emit_block_move_libcall_fn (false);
1255 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1256 && REG_PARM_STACK_SPACE (fn) != 0)
1257 return false;
1258 #endif
1259
1260 /* If any argument goes in memory, then it might clobber an outgoing
1261 argument. */
1262 {
1263 CUMULATIVE_ARGS args_so_far;
1264 tree fn, arg;
1265
1266 fn = emit_block_move_libcall_fn (false);
1267 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1268
1269 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1270 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1271 {
1272 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1273 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1274 if (!tmp || !REG_P (tmp))
1275 return false;
1276 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1277 return false;
1278 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1279 }
1280 }
1281 return true;
1282 }
1283
1284 /* A subroutine of emit_block_move. Expand a movmem pattern;
1285 return true if successful. */
1286
1287 static bool
1288 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1289 unsigned int expected_align, HOST_WIDE_INT expected_size)
1290 {
1291 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1292 int save_volatile_ok = volatile_ok;
1293 enum machine_mode mode;
1294
1295 if (expected_align < align)
1296 expected_align = align;
1297
1298 /* Since this is a move insn, we don't care about volatility. */
1299 volatile_ok = 1;
1300
1301 /* Try the most limited insn first, because there's no point
1302 including more than one in the machine description unless
1303 the more limited one has some advantage. */
1304
1305 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1306 mode = GET_MODE_WIDER_MODE (mode))
1307 {
1308 enum insn_code code = movmem_optab[(int) mode];
1309 insn_operand_predicate_fn pred;
1310
1311 if (code != CODE_FOR_nothing
1312 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1313 here because if SIZE is less than the mode mask, as it is
1314 returned by the macro, it will definitely be less than the
1315 actual mode mask. */
1316 && ((CONST_INT_P (size)
1317 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1318 <= (GET_MODE_MASK (mode) >> 1)))
1319 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1320 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1321 || (*pred) (x, BLKmode))
1322 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1323 || (*pred) (y, BLKmode))
1324 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1325 || (*pred) (opalign, VOIDmode)))
1326 {
1327 rtx op2;
1328 rtx last = get_last_insn ();
1329 rtx pat;
1330
1331 op2 = convert_to_mode (mode, size, 1);
1332 pred = insn_data[(int) code].operand[2].predicate;
1333 if (pred != 0 && ! (*pred) (op2, mode))
1334 op2 = copy_to_mode_reg (mode, op2);
1335
1336 /* ??? When called via emit_block_move_for_call, it'd be
1337 nice if there were some way to inform the backend, so
1338 that it doesn't fail the expansion because it thinks
1339 emitting the libcall would be more efficient. */
1340
1341 if (insn_data[(int) code].n_operands == 4)
1342 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1343 else
1344 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1345 GEN_INT (expected_align
1346 / BITS_PER_UNIT),
1347 GEN_INT (expected_size));
1348 if (pat)
1349 {
1350 emit_insn (pat);
1351 volatile_ok = save_volatile_ok;
1352 return true;
1353 }
1354 else
1355 delete_insns_since (last);
1356 }
1357 }
1358
1359 volatile_ok = save_volatile_ok;
1360 return false;
1361 }
1362
1363 /* A subroutine of emit_block_move. Expand a call to memcpy.
1364 Return the return value from memcpy, 0 otherwise. */
1365
1366 rtx
1367 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1368 {
1369 rtx dst_addr, src_addr;
1370 tree call_expr, fn, src_tree, dst_tree, size_tree;
1371 enum machine_mode size_mode;
1372 rtx retval;
1373
1374 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1375 pseudos. We can then place those new pseudos into a VAR_DECL and
1376 use them later. */
1377
1378 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1379 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1380
1381 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1382 src_addr = convert_memory_address (ptr_mode, src_addr);
1383
1384 dst_tree = make_tree (ptr_type_node, dst_addr);
1385 src_tree = make_tree (ptr_type_node, src_addr);
1386
1387 size_mode = TYPE_MODE (sizetype);
1388
1389 size = convert_to_mode (size_mode, size, 1);
1390 size = copy_to_mode_reg (size_mode, size);
1391
1392 /* It is incorrect to use the libcall calling conventions to call
1393 memcpy in this context. This could be a user call to memcpy and
1394 the user may wish to examine the return value from memcpy. For
1395 targets where libcalls and normal calls have different conventions
1396 for returning pointers, we could end up generating incorrect code. */
1397
1398 size_tree = make_tree (sizetype, size);
1399
1400 fn = emit_block_move_libcall_fn (true);
1401 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1402 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1403
1404 retval = expand_normal (call_expr);
1405
1406 return retval;
1407 }
1408
1409 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1410 for the function we use for block copies. The first time FOR_CALL
1411 is true, we call assemble_external. */
1412
1413 static GTY(()) tree block_move_fn;
1414
1415 void
1416 init_block_move_fn (const char *asmspec)
1417 {
1418 if (!block_move_fn)
1419 {
1420 tree args, fn;
1421
1422 fn = get_identifier ("memcpy");
1423 args = build_function_type_list (ptr_type_node, ptr_type_node,
1424 const_ptr_type_node, sizetype,
1425 NULL_TREE);
1426
1427 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1428 DECL_EXTERNAL (fn) = 1;
1429 TREE_PUBLIC (fn) = 1;
1430 DECL_ARTIFICIAL (fn) = 1;
1431 TREE_NOTHROW (fn) = 1;
1432 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1433 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1434
1435 block_move_fn = fn;
1436 }
1437
1438 if (asmspec)
1439 set_user_assembler_name (block_move_fn, asmspec);
1440 }
1441
1442 static tree
1443 emit_block_move_libcall_fn (int for_call)
1444 {
1445 static bool emitted_extern;
1446
1447 if (!block_move_fn)
1448 init_block_move_fn (NULL);
1449
1450 if (for_call && !emitted_extern)
1451 {
1452 emitted_extern = true;
1453 make_decl_rtl (block_move_fn);
1454 assemble_external (block_move_fn);
1455 }
1456
1457 return block_move_fn;
1458 }
1459
1460 /* A subroutine of emit_block_move. Copy the data via an explicit
1461 loop. This is used only when libcalls are forbidden. */
1462 /* ??? It'd be nice to copy in hunks larger than QImode. */
1463
1464 static void
1465 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1466 unsigned int align ATTRIBUTE_UNUSED)
1467 {
1468 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1469 enum machine_mode iter_mode;
1470
1471 iter_mode = GET_MODE (size);
1472 if (iter_mode == VOIDmode)
1473 iter_mode = word_mode;
1474
1475 top_label = gen_label_rtx ();
1476 cmp_label = gen_label_rtx ();
1477 iter = gen_reg_rtx (iter_mode);
1478
1479 emit_move_insn (iter, const0_rtx);
1480
1481 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1482 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1483 do_pending_stack_adjust ();
1484
1485 emit_jump (cmp_label);
1486 emit_label (top_label);
1487
1488 tmp = convert_modes (Pmode, iter_mode, iter, true);
1489 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1490 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1491 x = change_address (x, QImode, x_addr);
1492 y = change_address (y, QImode, y_addr);
1493
1494 emit_move_insn (x, y);
1495
1496 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1497 true, OPTAB_LIB_WIDEN);
1498 if (tmp != iter)
1499 emit_move_insn (iter, tmp);
1500
1501 emit_label (cmp_label);
1502
1503 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1504 true, top_label);
1505 }
1506 \f
1507 /* Copy all or part of a value X into registers starting at REGNO.
1508 The number of registers to be filled is NREGS. */
1509
1510 void
1511 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1512 {
1513 int i;
1514 #ifdef HAVE_load_multiple
1515 rtx pat;
1516 rtx last;
1517 #endif
1518
1519 if (nregs == 0)
1520 return;
1521
1522 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1523 x = validize_mem (force_const_mem (mode, x));
1524
1525 /* See if the machine can do this with a load multiple insn. */
1526 #ifdef HAVE_load_multiple
1527 if (HAVE_load_multiple)
1528 {
1529 last = get_last_insn ();
1530 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1531 GEN_INT (nregs));
1532 if (pat)
1533 {
1534 emit_insn (pat);
1535 return;
1536 }
1537 else
1538 delete_insns_since (last);
1539 }
1540 #endif
1541
1542 for (i = 0; i < nregs; i++)
1543 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1544 operand_subword_force (x, i, mode));
1545 }
1546
1547 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1548 The number of registers to be filled is NREGS. */
1549
1550 void
1551 move_block_from_reg (int regno, rtx x, int nregs)
1552 {
1553 int i;
1554
1555 if (nregs == 0)
1556 return;
1557
1558 /* See if the machine can do this with a store multiple insn. */
1559 #ifdef HAVE_store_multiple
1560 if (HAVE_store_multiple)
1561 {
1562 rtx last = get_last_insn ();
1563 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1564 GEN_INT (nregs));
1565 if (pat)
1566 {
1567 emit_insn (pat);
1568 return;
1569 }
1570 else
1571 delete_insns_since (last);
1572 }
1573 #endif
1574
1575 for (i = 0; i < nregs; i++)
1576 {
1577 rtx tem = operand_subword (x, i, 1, BLKmode);
1578
1579 gcc_assert (tem);
1580
1581 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1582 }
1583 }
1584
1585 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1586 ORIG, where ORIG is a non-consecutive group of registers represented by
1587 a PARALLEL. The clone is identical to the original except in that the
1588 original set of registers is replaced by a new set of pseudo registers.
1589 The new set has the same modes as the original set. */
1590
1591 rtx
1592 gen_group_rtx (rtx orig)
1593 {
1594 int i, length;
1595 rtx *tmps;
1596
1597 gcc_assert (GET_CODE (orig) == PARALLEL);
1598
1599 length = XVECLEN (orig, 0);
1600 tmps = XALLOCAVEC (rtx, length);
1601
1602 /* Skip a NULL entry in first slot. */
1603 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1604
1605 if (i)
1606 tmps[0] = 0;
1607
1608 for (; i < length; i++)
1609 {
1610 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1611 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1612
1613 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1614 }
1615
1616 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1617 }
1618
1619 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1620 except that values are placed in TMPS[i], and must later be moved
1621 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1622
1623 static void
1624 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1625 {
1626 rtx src;
1627 int start, i;
1628 enum machine_mode m = GET_MODE (orig_src);
1629
1630 gcc_assert (GET_CODE (dst) == PARALLEL);
1631
1632 if (m != VOIDmode
1633 && !SCALAR_INT_MODE_P (m)
1634 && !MEM_P (orig_src)
1635 && GET_CODE (orig_src) != CONCAT)
1636 {
1637 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1638 if (imode == BLKmode)
1639 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1640 else
1641 src = gen_reg_rtx (imode);
1642 if (imode != BLKmode)
1643 src = gen_lowpart (GET_MODE (orig_src), src);
1644 emit_move_insn (src, orig_src);
1645 /* ...and back again. */
1646 if (imode != BLKmode)
1647 src = gen_lowpart (imode, src);
1648 emit_group_load_1 (tmps, dst, src, type, ssize);
1649 return;
1650 }
1651
1652 /* Check for a NULL entry, used to indicate that the parameter goes
1653 both on the stack and in registers. */
1654 if (XEXP (XVECEXP (dst, 0, 0), 0))
1655 start = 0;
1656 else
1657 start = 1;
1658
1659 /* Process the pieces. */
1660 for (i = start; i < XVECLEN (dst, 0); i++)
1661 {
1662 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1663 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1664 unsigned int bytelen = GET_MODE_SIZE (mode);
1665 int shift = 0;
1666
1667 /* Handle trailing fragments that run over the size of the struct. */
1668 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1669 {
1670 /* Arrange to shift the fragment to where it belongs.
1671 extract_bit_field loads to the lsb of the reg. */
1672 if (
1673 #ifdef BLOCK_REG_PADDING
1674 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1675 == (BYTES_BIG_ENDIAN ? upward : downward)
1676 #else
1677 BYTES_BIG_ENDIAN
1678 #endif
1679 )
1680 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1681 bytelen = ssize - bytepos;
1682 gcc_assert (bytelen > 0);
1683 }
1684
1685 /* If we won't be loading directly from memory, protect the real source
1686 from strange tricks we might play; but make sure that the source can
1687 be loaded directly into the destination. */
1688 src = orig_src;
1689 if (!MEM_P (orig_src)
1690 && (!CONSTANT_P (orig_src)
1691 || (GET_MODE (orig_src) != mode
1692 && GET_MODE (orig_src) != VOIDmode)))
1693 {
1694 if (GET_MODE (orig_src) == VOIDmode)
1695 src = gen_reg_rtx (mode);
1696 else
1697 src = gen_reg_rtx (GET_MODE (orig_src));
1698
1699 emit_move_insn (src, orig_src);
1700 }
1701
1702 /* Optimize the access just a bit. */
1703 if (MEM_P (src)
1704 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1705 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1706 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1707 && bytelen == GET_MODE_SIZE (mode))
1708 {
1709 tmps[i] = gen_reg_rtx (mode);
1710 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1711 }
1712 else if (COMPLEX_MODE_P (mode)
1713 && GET_MODE (src) == mode
1714 && bytelen == GET_MODE_SIZE (mode))
1715 /* Let emit_move_complex do the bulk of the work. */
1716 tmps[i] = src;
1717 else if (GET_CODE (src) == CONCAT)
1718 {
1719 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1720 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1721
1722 if ((bytepos == 0 && bytelen == slen0)
1723 || (bytepos != 0 && bytepos + bytelen <= slen))
1724 {
1725 /* The following assumes that the concatenated objects all
1726 have the same size. In this case, a simple calculation
1727 can be used to determine the object and the bit field
1728 to be extracted. */
1729 tmps[i] = XEXP (src, bytepos / slen0);
1730 if (! CONSTANT_P (tmps[i])
1731 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1732 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1733 (bytepos % slen0) * BITS_PER_UNIT,
1734 1, NULL_RTX, mode, mode);
1735 }
1736 else
1737 {
1738 rtx mem;
1739
1740 gcc_assert (!bytepos);
1741 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1742 emit_move_insn (mem, src);
1743 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1744 0, 1, NULL_RTX, mode, mode);
1745 }
1746 }
1747 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1748 SIMD register, which is currently broken. While we get GCC
1749 to emit proper RTL for these cases, let's dump to memory. */
1750 else if (VECTOR_MODE_P (GET_MODE (dst))
1751 && REG_P (src))
1752 {
1753 int slen = GET_MODE_SIZE (GET_MODE (src));
1754 rtx mem;
1755
1756 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1757 emit_move_insn (mem, src);
1758 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1759 }
1760 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1761 && XVECLEN (dst, 0) > 1)
1762 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1763 else if (CONSTANT_P (src))
1764 {
1765 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1766
1767 if (len == ssize)
1768 tmps[i] = src;
1769 else
1770 {
1771 rtx first, second;
1772
1773 gcc_assert (2 * len == ssize);
1774 split_double (src, &first, &second);
1775 if (i)
1776 tmps[i] = second;
1777 else
1778 tmps[i] = first;
1779 }
1780 }
1781 else if (REG_P (src) && GET_MODE (src) == mode)
1782 tmps[i] = src;
1783 else
1784 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1785 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1786 mode, mode);
1787
1788 if (shift)
1789 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1790 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1791 }
1792 }
1793
1794 /* Emit code to move a block SRC of type TYPE to a block DST,
1795 where DST is non-consecutive registers represented by a PARALLEL.
1796 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1797 if not known. */
1798
1799 void
1800 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1801 {
1802 rtx *tmps;
1803 int i;
1804
1805 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1806 emit_group_load_1 (tmps, dst, src, type, ssize);
1807
1808 /* Copy the extracted pieces into the proper (probable) hard regs. */
1809 for (i = 0; i < XVECLEN (dst, 0); i++)
1810 {
1811 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1812 if (d == NULL)
1813 continue;
1814 emit_move_insn (d, tmps[i]);
1815 }
1816 }
1817
1818 /* Similar, but load SRC into new pseudos in a format that looks like
1819 PARALLEL. This can later be fed to emit_group_move to get things
1820 in the right place. */
1821
1822 rtx
1823 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1824 {
1825 rtvec vec;
1826 int i;
1827
1828 vec = rtvec_alloc (XVECLEN (parallel, 0));
1829 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1830
1831 /* Convert the vector to look just like the original PARALLEL, except
1832 with the computed values. */
1833 for (i = 0; i < XVECLEN (parallel, 0); i++)
1834 {
1835 rtx e = XVECEXP (parallel, 0, i);
1836 rtx d = XEXP (e, 0);
1837
1838 if (d)
1839 {
1840 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1841 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1842 }
1843 RTVEC_ELT (vec, i) = e;
1844 }
1845
1846 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1847 }
1848
1849 /* Emit code to move a block SRC to block DST, where SRC and DST are
1850 non-consecutive groups of registers, each represented by a PARALLEL. */
1851
1852 void
1853 emit_group_move (rtx dst, rtx src)
1854 {
1855 int i;
1856
1857 gcc_assert (GET_CODE (src) == PARALLEL
1858 && GET_CODE (dst) == PARALLEL
1859 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1860
1861 /* Skip first entry if NULL. */
1862 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1863 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1864 XEXP (XVECEXP (src, 0, i), 0));
1865 }
1866
1867 /* Move a group of registers represented by a PARALLEL into pseudos. */
1868
1869 rtx
1870 emit_group_move_into_temps (rtx src)
1871 {
1872 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1873 int i;
1874
1875 for (i = 0; i < XVECLEN (src, 0); i++)
1876 {
1877 rtx e = XVECEXP (src, 0, i);
1878 rtx d = XEXP (e, 0);
1879
1880 if (d)
1881 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1882 RTVEC_ELT (vec, i) = e;
1883 }
1884
1885 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1886 }
1887
1888 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1889 where SRC is non-consecutive registers represented by a PARALLEL.
1890 SSIZE represents the total size of block ORIG_DST, or -1 if not
1891 known. */
1892
1893 void
1894 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1895 {
1896 rtx *tmps, dst;
1897 int start, finish, i;
1898 enum machine_mode m = GET_MODE (orig_dst);
1899
1900 gcc_assert (GET_CODE (src) == PARALLEL);
1901
1902 if (!SCALAR_INT_MODE_P (m)
1903 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1904 {
1905 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1906 if (imode == BLKmode)
1907 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1908 else
1909 dst = gen_reg_rtx (imode);
1910 emit_group_store (dst, src, type, ssize);
1911 if (imode != BLKmode)
1912 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1913 emit_move_insn (orig_dst, dst);
1914 return;
1915 }
1916
1917 /* Check for a NULL entry, used to indicate that the parameter goes
1918 both on the stack and in registers. */
1919 if (XEXP (XVECEXP (src, 0, 0), 0))
1920 start = 0;
1921 else
1922 start = 1;
1923 finish = XVECLEN (src, 0);
1924
1925 tmps = XALLOCAVEC (rtx, finish);
1926
1927 /* Copy the (probable) hard regs into pseudos. */
1928 for (i = start; i < finish; i++)
1929 {
1930 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1931 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1932 {
1933 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1934 emit_move_insn (tmps[i], reg);
1935 }
1936 else
1937 tmps[i] = reg;
1938 }
1939
1940 /* If we won't be storing directly into memory, protect the real destination
1941 from strange tricks we might play. */
1942 dst = orig_dst;
1943 if (GET_CODE (dst) == PARALLEL)
1944 {
1945 rtx temp;
1946
1947 /* We can get a PARALLEL dst if there is a conditional expression in
1948 a return statement. In that case, the dst and src are the same,
1949 so no action is necessary. */
1950 if (rtx_equal_p (dst, src))
1951 return;
1952
1953 /* It is unclear if we can ever reach here, but we may as well handle
1954 it. Allocate a temporary, and split this into a store/load to/from
1955 the temporary. */
1956
1957 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1958 emit_group_store (temp, src, type, ssize);
1959 emit_group_load (dst, temp, type, ssize);
1960 return;
1961 }
1962 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1963 {
1964 enum machine_mode outer = GET_MODE (dst);
1965 enum machine_mode inner;
1966 HOST_WIDE_INT bytepos;
1967 bool done = false;
1968 rtx temp;
1969
1970 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1971 dst = gen_reg_rtx (outer);
1972
1973 /* Make life a bit easier for combine. */
1974 /* If the first element of the vector is the low part
1975 of the destination mode, use a paradoxical subreg to
1976 initialize the destination. */
1977 if (start < finish)
1978 {
1979 inner = GET_MODE (tmps[start]);
1980 bytepos = subreg_lowpart_offset (inner, outer);
1981 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1982 {
1983 temp = simplify_gen_subreg (outer, tmps[start],
1984 inner, 0);
1985 if (temp)
1986 {
1987 emit_move_insn (dst, temp);
1988 done = true;
1989 start++;
1990 }
1991 }
1992 }
1993
1994 /* If the first element wasn't the low part, try the last. */
1995 if (!done
1996 && start < finish - 1)
1997 {
1998 inner = GET_MODE (tmps[finish - 1]);
1999 bytepos = subreg_lowpart_offset (inner, outer);
2000 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2001 {
2002 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2003 inner, 0);
2004 if (temp)
2005 {
2006 emit_move_insn (dst, temp);
2007 done = true;
2008 finish--;
2009 }
2010 }
2011 }
2012
2013 /* Otherwise, simply initialize the result to zero. */
2014 if (!done)
2015 emit_move_insn (dst, CONST0_RTX (outer));
2016 }
2017
2018 /* Process the pieces. */
2019 for (i = start; i < finish; i++)
2020 {
2021 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2022 enum machine_mode mode = GET_MODE (tmps[i]);
2023 unsigned int bytelen = GET_MODE_SIZE (mode);
2024 unsigned int adj_bytelen = bytelen;
2025 rtx dest = dst;
2026
2027 /* Handle trailing fragments that run over the size of the struct. */
2028 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2029 adj_bytelen = ssize - bytepos;
2030
2031 if (GET_CODE (dst) == CONCAT)
2032 {
2033 if (bytepos + adj_bytelen
2034 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2035 dest = XEXP (dst, 0);
2036 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2037 {
2038 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2039 dest = XEXP (dst, 1);
2040 }
2041 else
2042 {
2043 enum machine_mode dest_mode = GET_MODE (dest);
2044 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2045
2046 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2047
2048 if (GET_MODE_ALIGNMENT (dest_mode)
2049 >= GET_MODE_ALIGNMENT (tmp_mode))
2050 {
2051 dest = assign_stack_temp (dest_mode,
2052 GET_MODE_SIZE (dest_mode),
2053 0);
2054 emit_move_insn (adjust_address (dest,
2055 tmp_mode,
2056 bytepos),
2057 tmps[i]);
2058 dst = dest;
2059 }
2060 else
2061 {
2062 dest = assign_stack_temp (tmp_mode,
2063 GET_MODE_SIZE (tmp_mode),
2064 0);
2065 emit_move_insn (dest, tmps[i]);
2066 dst = adjust_address (dest, dest_mode, bytepos);
2067 }
2068 break;
2069 }
2070 }
2071
2072 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2073 {
2074 /* store_bit_field always takes its value from the lsb.
2075 Move the fragment to the lsb if it's not already there. */
2076 if (
2077 #ifdef BLOCK_REG_PADDING
2078 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2079 == (BYTES_BIG_ENDIAN ? upward : downward)
2080 #else
2081 BYTES_BIG_ENDIAN
2082 #endif
2083 )
2084 {
2085 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2086 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2087 build_int_cst (NULL_TREE, shift),
2088 tmps[i], 0);
2089 }
2090 bytelen = adj_bytelen;
2091 }
2092
2093 /* Optimize the access just a bit. */
2094 if (MEM_P (dest)
2095 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2096 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2097 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2098 && bytelen == GET_MODE_SIZE (mode))
2099 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2100 else
2101 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2102 mode, tmps[i]);
2103 }
2104
2105 /* Copy from the pseudo into the (probable) hard reg. */
2106 if (orig_dst != dst)
2107 emit_move_insn (orig_dst, dst);
2108 }
2109
2110 /* Generate code to copy a BLKmode object of TYPE out of a
2111 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2112 is null, a stack temporary is created. TGTBLK is returned.
2113
2114 The purpose of this routine is to handle functions that return
2115 BLKmode structures in registers. Some machines (the PA for example)
2116 want to return all small structures in registers regardless of the
2117 structure's alignment. */
2118
2119 rtx
2120 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2121 {
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2126 enum machine_mode copy_mode;
2127
2128 if (tgtblk == 0)
2129 {
2130 tgtblk = assign_temp (build_qualified_type (type,
2131 (TYPE_QUALS (type)
2132 | TYPE_QUAL_CONST)),
2133 0, 1, 1);
2134 preserve_temp_slots (tgtblk);
2135 }
2136
2137 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2138 into a new pseudo which is a full word. */
2139
2140 if (GET_MODE (srcreg) != BLKmode
2141 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2142 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2143
2144 /* If the structure doesn't take up a whole number of words, see whether
2145 SRCREG is padded on the left or on the right. If it's on the left,
2146 set PADDING_CORRECTION to the number of bits to skip.
2147
2148 In most ABIs, the structure will be returned at the least end of
2149 the register, which translates to right padding on little-endian
2150 targets and left padding on big-endian targets. The opposite
2151 holds if the structure is returned at the most significant
2152 end of the register. */
2153 if (bytes % UNITS_PER_WORD != 0
2154 && (targetm.calls.return_in_msb (type)
2155 ? !BYTES_BIG_ENDIAN
2156 : BYTES_BIG_ENDIAN))
2157 padding_correction
2158 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2159
2160 /* Copy the structure BITSIZE bits at a time. If the target lives in
2161 memory, take care of not reading/writing past its end by selecting
2162 a copy mode suited to BITSIZE. This should always be possible given
2163 how it is computed.
2164
2165 We could probably emit more efficient code for machines which do not use
2166 strict alignment, but it doesn't seem worth the effort at the current
2167 time. */
2168
2169 copy_mode = word_mode;
2170 if (MEM_P (tgtblk))
2171 {
2172 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2173 if (mem_mode != BLKmode)
2174 copy_mode = mem_mode;
2175 }
2176
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2180 {
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2187 GET_MODE (srcreg));
2188
2189 /* We need a new destination operand each time bitpos is on
2190 a word boundary. */
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2193
2194 /* Use xbitpos for the source extraction (right justified) and
2195 bitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, copy_mode, copy_mode));
2200 }
2201
2202 return tgtblk;
2203 }
2204
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2207
2208 void
2209 use_reg (rtx *call_fusage, rtx reg)
2210 {
2211 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2212
2213 *call_fusage
2214 = gen_rtx_EXPR_LIST (VOIDmode,
2215 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2216 }
2217
2218 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2219 starting at REGNO. All of these registers must be hard registers. */
2220
2221 void
2222 use_regs (rtx *call_fusage, int regno, int nregs)
2223 {
2224 int i;
2225
2226 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2227
2228 for (i = 0; i < nregs; i++)
2229 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2230 }
2231
2232 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2233 PARALLEL REGS. This is for calls that pass values in multiple
2234 non-contiguous locations. The Irix 6 ABI has examples of this. */
2235
2236 void
2237 use_group_regs (rtx *call_fusage, rtx regs)
2238 {
2239 int i;
2240
2241 for (i = 0; i < XVECLEN (regs, 0); i++)
2242 {
2243 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2244
2245 /* A NULL entry means the parameter goes both on the stack and in
2246 registers. This can also be a MEM for targets that pass values
2247 partially on the stack and partially in registers. */
2248 if (reg != 0 && REG_P (reg))
2249 use_reg (call_fusage, reg);
2250 }
2251 }
2252
2253 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2254 assigment and the code of the expresion on the RHS is CODE. Return
2255 NULL otherwise. */
2256
2257 static gimple
2258 get_def_for_expr (tree name, enum tree_code code)
2259 {
2260 gimple def_stmt;
2261
2262 if (TREE_CODE (name) != SSA_NAME)
2263 return NULL;
2264
2265 def_stmt = get_gimple_for_ssa_name (name);
2266 if (!def_stmt
2267 || gimple_assign_rhs_code (def_stmt) != code)
2268 return NULL;
2269
2270 return def_stmt;
2271 }
2272 \f
2273
2274 /* Determine whether the LEN bytes generated by CONSTFUN can be
2275 stored to memory using several move instructions. CONSTFUNDATA is
2276 a pointer which will be passed as argument in every CONSTFUN call.
2277 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2278 a memset operation and false if it's a copy of a constant string.
2279 Return nonzero if a call to store_by_pieces should succeed. */
2280
2281 int
2282 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2283 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2284 void *constfundata, unsigned int align, bool memsetp)
2285 {
2286 unsigned HOST_WIDE_INT l;
2287 unsigned int max_size;
2288 HOST_WIDE_INT offset = 0;
2289 enum machine_mode mode, tmode;
2290 enum insn_code icode;
2291 int reverse;
2292 rtx cst;
2293
2294 if (len == 0)
2295 return 1;
2296
2297 if (! (memsetp
2298 ? SET_BY_PIECES_P (len, align)
2299 : STORE_BY_PIECES_P (len, align)))
2300 return 0;
2301
2302 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2303 if (align >= GET_MODE_ALIGNMENT (tmode))
2304 align = GET_MODE_ALIGNMENT (tmode);
2305 else
2306 {
2307 enum machine_mode xmode;
2308
2309 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2310 tmode != VOIDmode;
2311 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2312 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2313 || SLOW_UNALIGNED_ACCESS (tmode, align))
2314 break;
2315
2316 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2317 }
2318
2319 /* We would first store what we can in the largest integer mode, then go to
2320 successively smaller modes. */
2321
2322 for (reverse = 0;
2323 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2324 reverse++)
2325 {
2326 l = len;
2327 mode = VOIDmode;
2328 max_size = STORE_MAX_PIECES + 1;
2329 while (max_size > 1)
2330 {
2331 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2332 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2333 if (GET_MODE_SIZE (tmode) < max_size)
2334 mode = tmode;
2335
2336 if (mode == VOIDmode)
2337 break;
2338
2339 icode = optab_handler (mov_optab, mode)->insn_code;
2340 if (icode != CODE_FOR_nothing
2341 && align >= GET_MODE_ALIGNMENT (mode))
2342 {
2343 unsigned int size = GET_MODE_SIZE (mode);
2344
2345 while (l >= size)
2346 {
2347 if (reverse)
2348 offset -= size;
2349
2350 cst = (*constfun) (constfundata, offset, mode);
2351 if (!LEGITIMATE_CONSTANT_P (cst))
2352 return 0;
2353
2354 if (!reverse)
2355 offset += size;
2356
2357 l -= size;
2358 }
2359 }
2360
2361 max_size = GET_MODE_SIZE (mode);
2362 }
2363
2364 /* The code above should have handled everything. */
2365 gcc_assert (!l);
2366 }
2367
2368 return 1;
2369 }
2370
2371 /* Generate several move instructions to store LEN bytes generated by
2372 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2373 pointer which will be passed as argument in every CONSTFUN call.
2374 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2375 a memset operation and false if it's a copy of a constant string.
2376 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2377 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2378 stpcpy. */
2379
2380 rtx
2381 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2382 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2383 void *constfundata, unsigned int align, bool memsetp, int endp)
2384 {
2385 struct store_by_pieces_d data;
2386
2387 if (len == 0)
2388 {
2389 gcc_assert (endp != 2);
2390 return to;
2391 }
2392
2393 gcc_assert (memsetp
2394 ? SET_BY_PIECES_P (len, align)
2395 : STORE_BY_PIECES_P (len, align));
2396 data.constfun = constfun;
2397 data.constfundata = constfundata;
2398 data.len = len;
2399 data.to = to;
2400 store_by_pieces_1 (&data, align);
2401 if (endp)
2402 {
2403 rtx to1;
2404
2405 gcc_assert (!data.reverse);
2406 if (data.autinc_to)
2407 {
2408 if (endp == 2)
2409 {
2410 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2411 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2412 else
2413 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2414 -1));
2415 }
2416 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2417 data.offset);
2418 }
2419 else
2420 {
2421 if (endp == 2)
2422 --data.offset;
2423 to1 = adjust_address (data.to, QImode, data.offset);
2424 }
2425 return to1;
2426 }
2427 else
2428 return data.to;
2429 }
2430
2431 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2432 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2433
2434 static void
2435 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2436 {
2437 struct store_by_pieces_d data;
2438
2439 if (len == 0)
2440 return;
2441
2442 data.constfun = clear_by_pieces_1;
2443 data.constfundata = NULL;
2444 data.len = len;
2445 data.to = to;
2446 store_by_pieces_1 (&data, align);
2447 }
2448
2449 /* Callback routine for clear_by_pieces.
2450 Return const0_rtx unconditionally. */
2451
2452 static rtx
2453 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2454 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2455 enum machine_mode mode ATTRIBUTE_UNUSED)
2456 {
2457 return const0_rtx;
2458 }
2459
2460 /* Subroutine of clear_by_pieces and store_by_pieces.
2461 Generate several move instructions to store LEN bytes of block TO. (A MEM
2462 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2463
2464 static void
2465 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2466 unsigned int align ATTRIBUTE_UNUSED)
2467 {
2468 rtx to_addr = XEXP (data->to, 0);
2469 unsigned int max_size = STORE_MAX_PIECES + 1;
2470 enum machine_mode mode = VOIDmode, tmode;
2471 enum insn_code icode;
2472
2473 data->offset = 0;
2474 data->to_addr = to_addr;
2475 data->autinc_to
2476 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2477 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2478
2479 data->explicit_inc_to = 0;
2480 data->reverse
2481 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2482 if (data->reverse)
2483 data->offset = data->len;
2484
2485 /* If storing requires more than two move insns,
2486 copy addresses to registers (to make displacements shorter)
2487 and use post-increment if available. */
2488 if (!data->autinc_to
2489 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2490 {
2491 /* Determine the main mode we'll be using. */
2492 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2493 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2494 if (GET_MODE_SIZE (tmode) < max_size)
2495 mode = tmode;
2496
2497 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2498 {
2499 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2500 data->autinc_to = 1;
2501 data->explicit_inc_to = -1;
2502 }
2503
2504 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2505 && ! data->autinc_to)
2506 {
2507 data->to_addr = copy_addr_to_reg (to_addr);
2508 data->autinc_to = 1;
2509 data->explicit_inc_to = 1;
2510 }
2511
2512 if ( !data->autinc_to && CONSTANT_P (to_addr))
2513 data->to_addr = copy_addr_to_reg (to_addr);
2514 }
2515
2516 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2517 if (align >= GET_MODE_ALIGNMENT (tmode))
2518 align = GET_MODE_ALIGNMENT (tmode);
2519 else
2520 {
2521 enum machine_mode xmode;
2522
2523 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2524 tmode != VOIDmode;
2525 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2526 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2527 || SLOW_UNALIGNED_ACCESS (tmode, align))
2528 break;
2529
2530 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2531 }
2532
2533 /* First store what we can in the largest integer mode, then go to
2534 successively smaller modes. */
2535
2536 while (max_size > 1)
2537 {
2538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2540 if (GET_MODE_SIZE (tmode) < max_size)
2541 mode = tmode;
2542
2543 if (mode == VOIDmode)
2544 break;
2545
2546 icode = optab_handler (mov_optab, mode)->insn_code;
2547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2548 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2549
2550 max_size = GET_MODE_SIZE (mode);
2551 }
2552
2553 /* The code above should have handled everything. */
2554 gcc_assert (!data->len);
2555 }
2556
2557 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2558 with move instructions for mode MODE. GENFUN is the gen_... function
2559 to make a move insn for that mode. DATA has all the other info. */
2560
2561 static void
2562 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2563 struct store_by_pieces_d *data)
2564 {
2565 unsigned int size = GET_MODE_SIZE (mode);
2566 rtx to1, cst;
2567
2568 while (data->len >= size)
2569 {
2570 if (data->reverse)
2571 data->offset -= size;
2572
2573 if (data->autinc_to)
2574 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2575 data->offset);
2576 else
2577 to1 = adjust_address (data->to, mode, data->offset);
2578
2579 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2580 emit_insn (gen_add2_insn (data->to_addr,
2581 GEN_INT (-(HOST_WIDE_INT) size)));
2582
2583 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2584 emit_insn ((*genfun) (to1, cst));
2585
2586 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2587 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2588
2589 if (! data->reverse)
2590 data->offset += size;
2591
2592 data->len -= size;
2593 }
2594 }
2595 \f
2596 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2597 its length in bytes. */
2598
2599 rtx
2600 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2601 unsigned int expected_align, HOST_WIDE_INT expected_size)
2602 {
2603 enum machine_mode mode = GET_MODE (object);
2604 unsigned int align;
2605
2606 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2607
2608 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2609 just move a zero. Otherwise, do this a piece at a time. */
2610 if (mode != BLKmode
2611 && CONST_INT_P (size)
2612 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2613 {
2614 rtx zero = CONST0_RTX (mode);
2615 if (zero != NULL)
2616 {
2617 emit_move_insn (object, zero);
2618 return NULL;
2619 }
2620
2621 if (COMPLEX_MODE_P (mode))
2622 {
2623 zero = CONST0_RTX (GET_MODE_INNER (mode));
2624 if (zero != NULL)
2625 {
2626 write_complex_part (object, zero, 0);
2627 write_complex_part (object, zero, 1);
2628 return NULL;
2629 }
2630 }
2631 }
2632
2633 if (size == const0_rtx)
2634 return NULL;
2635
2636 align = MEM_ALIGN (object);
2637
2638 if (CONST_INT_P (size)
2639 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2640 clear_by_pieces (object, INTVAL (size), align);
2641 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2642 expected_align, expected_size))
2643 ;
2644 else
2645 return set_storage_via_libcall (object, size, const0_rtx,
2646 method == BLOCK_OP_TAILCALL);
2647
2648 return NULL;
2649 }
2650
2651 rtx
2652 clear_storage (rtx object, rtx size, enum block_op_methods method)
2653 {
2654 return clear_storage_hints (object, size, method, 0, -1);
2655 }
2656
2657
2658 /* A subroutine of clear_storage. Expand a call to memset.
2659 Return the return value of memset, 0 otherwise. */
2660
2661 rtx
2662 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2663 {
2664 tree call_expr, fn, object_tree, size_tree, val_tree;
2665 enum machine_mode size_mode;
2666 rtx retval;
2667
2668 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2669 place those into new pseudos into a VAR_DECL and use them later. */
2670
2671 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2672
2673 size_mode = TYPE_MODE (sizetype);
2674 size = convert_to_mode (size_mode, size, 1);
2675 size = copy_to_mode_reg (size_mode, size);
2676
2677 /* It is incorrect to use the libcall calling conventions to call
2678 memset in this context. This could be a user call to memset and
2679 the user may wish to examine the return value from memset. For
2680 targets where libcalls and normal calls have different conventions
2681 for returning pointers, we could end up generating incorrect code. */
2682
2683 object_tree = make_tree (ptr_type_node, object);
2684 if (!CONST_INT_P (val))
2685 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2686 size_tree = make_tree (sizetype, size);
2687 val_tree = make_tree (integer_type_node, val);
2688
2689 fn = clear_storage_libcall_fn (true);
2690 call_expr = build_call_expr (fn, 3,
2691 object_tree, integer_zero_node, size_tree);
2692 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2693
2694 retval = expand_normal (call_expr);
2695
2696 return retval;
2697 }
2698
2699 /* A subroutine of set_storage_via_libcall. Create the tree node
2700 for the function we use for block clears. The first time FOR_CALL
2701 is true, we call assemble_external. */
2702
2703 tree block_clear_fn;
2704
2705 void
2706 init_block_clear_fn (const char *asmspec)
2707 {
2708 if (!block_clear_fn)
2709 {
2710 tree fn, args;
2711
2712 fn = get_identifier ("memset");
2713 args = build_function_type_list (ptr_type_node, ptr_type_node,
2714 integer_type_node, sizetype,
2715 NULL_TREE);
2716
2717 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2718 DECL_EXTERNAL (fn) = 1;
2719 TREE_PUBLIC (fn) = 1;
2720 DECL_ARTIFICIAL (fn) = 1;
2721 TREE_NOTHROW (fn) = 1;
2722 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2723 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2724
2725 block_clear_fn = fn;
2726 }
2727
2728 if (asmspec)
2729 set_user_assembler_name (block_clear_fn, asmspec);
2730 }
2731
2732 static tree
2733 clear_storage_libcall_fn (int for_call)
2734 {
2735 static bool emitted_extern;
2736
2737 if (!block_clear_fn)
2738 init_block_clear_fn (NULL);
2739
2740 if (for_call && !emitted_extern)
2741 {
2742 emitted_extern = true;
2743 make_decl_rtl (block_clear_fn);
2744 assemble_external (block_clear_fn);
2745 }
2746
2747 return block_clear_fn;
2748 }
2749 \f
2750 /* Expand a setmem pattern; return true if successful. */
2751
2752 bool
2753 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2754 unsigned int expected_align, HOST_WIDE_INT expected_size)
2755 {
2756 /* Try the most limited insn first, because there's no point
2757 including more than one in the machine description unless
2758 the more limited one has some advantage. */
2759
2760 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2761 enum machine_mode mode;
2762
2763 if (expected_align < align)
2764 expected_align = align;
2765
2766 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2767 mode = GET_MODE_WIDER_MODE (mode))
2768 {
2769 enum insn_code code = setmem_optab[(int) mode];
2770 insn_operand_predicate_fn pred;
2771
2772 if (code != CODE_FOR_nothing
2773 /* We don't need MODE to be narrower than
2774 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2775 the mode mask, as it is returned by the macro, it will
2776 definitely be less than the actual mode mask. */
2777 && ((CONST_INT_P (size)
2778 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2779 <= (GET_MODE_MASK (mode) >> 1)))
2780 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2781 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2782 || (*pred) (object, BLKmode))
2783 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2784 || (*pred) (opalign, VOIDmode)))
2785 {
2786 rtx opsize, opchar;
2787 enum machine_mode char_mode;
2788 rtx last = get_last_insn ();
2789 rtx pat;
2790
2791 opsize = convert_to_mode (mode, size, 1);
2792 pred = insn_data[(int) code].operand[1].predicate;
2793 if (pred != 0 && ! (*pred) (opsize, mode))
2794 opsize = copy_to_mode_reg (mode, opsize);
2795
2796 opchar = val;
2797 char_mode = insn_data[(int) code].operand[2].mode;
2798 if (char_mode != VOIDmode)
2799 {
2800 opchar = convert_to_mode (char_mode, opchar, 1);
2801 pred = insn_data[(int) code].operand[2].predicate;
2802 if (pred != 0 && ! (*pred) (opchar, char_mode))
2803 opchar = copy_to_mode_reg (char_mode, opchar);
2804 }
2805
2806 if (insn_data[(int) code].n_operands == 4)
2807 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2808 else
2809 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2810 GEN_INT (expected_align
2811 / BITS_PER_UNIT),
2812 GEN_INT (expected_size));
2813 if (pat)
2814 {
2815 emit_insn (pat);
2816 return true;
2817 }
2818 else
2819 delete_insns_since (last);
2820 }
2821 }
2822
2823 return false;
2824 }
2825
2826 \f
2827 /* Write to one of the components of the complex value CPLX. Write VAL to
2828 the real part if IMAG_P is false, and the imaginary part if its true. */
2829
2830 static void
2831 write_complex_part (rtx cplx, rtx val, bool imag_p)
2832 {
2833 enum machine_mode cmode;
2834 enum machine_mode imode;
2835 unsigned ibitsize;
2836
2837 if (GET_CODE (cplx) == CONCAT)
2838 {
2839 emit_move_insn (XEXP (cplx, imag_p), val);
2840 return;
2841 }
2842
2843 cmode = GET_MODE (cplx);
2844 imode = GET_MODE_INNER (cmode);
2845 ibitsize = GET_MODE_BITSIZE (imode);
2846
2847 /* For MEMs simplify_gen_subreg may generate an invalid new address
2848 because, e.g., the original address is considered mode-dependent
2849 by the target, which restricts simplify_subreg from invoking
2850 adjust_address_nv. Instead of preparing fallback support for an
2851 invalid address, we call adjust_address_nv directly. */
2852 if (MEM_P (cplx))
2853 {
2854 emit_move_insn (adjust_address_nv (cplx, imode,
2855 imag_p ? GET_MODE_SIZE (imode) : 0),
2856 val);
2857 return;
2858 }
2859
2860 /* If the sub-object is at least word sized, then we know that subregging
2861 will work. This special case is important, since store_bit_field
2862 wants to operate on integer modes, and there's rarely an OImode to
2863 correspond to TCmode. */
2864 if (ibitsize >= BITS_PER_WORD
2865 /* For hard regs we have exact predicates. Assume we can split
2866 the original object if it spans an even number of hard regs.
2867 This special case is important for SCmode on 64-bit platforms
2868 where the natural size of floating-point regs is 32-bit. */
2869 || (REG_P (cplx)
2870 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2871 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2872 {
2873 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2874 imag_p ? GET_MODE_SIZE (imode) : 0);
2875 if (part)
2876 {
2877 emit_move_insn (part, val);
2878 return;
2879 }
2880 else
2881 /* simplify_gen_subreg may fail for sub-word MEMs. */
2882 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2883 }
2884
2885 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2886 }
2887
2888 /* Extract one of the components of the complex value CPLX. Extract the
2889 real part if IMAG_P is false, and the imaginary part if it's true. */
2890
2891 static rtx
2892 read_complex_part (rtx cplx, bool imag_p)
2893 {
2894 enum machine_mode cmode, imode;
2895 unsigned ibitsize;
2896
2897 if (GET_CODE (cplx) == CONCAT)
2898 return XEXP (cplx, imag_p);
2899
2900 cmode = GET_MODE (cplx);
2901 imode = GET_MODE_INNER (cmode);
2902 ibitsize = GET_MODE_BITSIZE (imode);
2903
2904 /* Special case reads from complex constants that got spilled to memory. */
2905 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2906 {
2907 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2908 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2909 {
2910 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2911 if (CONSTANT_CLASS_P (part))
2912 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2913 }
2914 }
2915
2916 /* For MEMs simplify_gen_subreg may generate an invalid new address
2917 because, e.g., the original address is considered mode-dependent
2918 by the target, which restricts simplify_subreg from invoking
2919 adjust_address_nv. Instead of preparing fallback support for an
2920 invalid address, we call adjust_address_nv directly. */
2921 if (MEM_P (cplx))
2922 return adjust_address_nv (cplx, imode,
2923 imag_p ? GET_MODE_SIZE (imode) : 0);
2924
2925 /* If the sub-object is at least word sized, then we know that subregging
2926 will work. This special case is important, since extract_bit_field
2927 wants to operate on integer modes, and there's rarely an OImode to
2928 correspond to TCmode. */
2929 if (ibitsize >= BITS_PER_WORD
2930 /* For hard regs we have exact predicates. Assume we can split
2931 the original object if it spans an even number of hard regs.
2932 This special case is important for SCmode on 64-bit platforms
2933 where the natural size of floating-point regs is 32-bit. */
2934 || (REG_P (cplx)
2935 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2936 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2937 {
2938 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2939 imag_p ? GET_MODE_SIZE (imode) : 0);
2940 if (ret)
2941 return ret;
2942 else
2943 /* simplify_gen_subreg may fail for sub-word MEMs. */
2944 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2945 }
2946
2947 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2948 true, NULL_RTX, imode, imode);
2949 }
2950 \f
2951 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2952 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2953 represented in NEW_MODE. If FORCE is true, this will never happen, as
2954 we'll force-create a SUBREG if needed. */
2955
2956 static rtx
2957 emit_move_change_mode (enum machine_mode new_mode,
2958 enum machine_mode old_mode, rtx x, bool force)
2959 {
2960 rtx ret;
2961
2962 if (push_operand (x, GET_MODE (x)))
2963 {
2964 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2965 MEM_COPY_ATTRIBUTES (ret, x);
2966 }
2967 else if (MEM_P (x))
2968 {
2969 /* We don't have to worry about changing the address since the
2970 size in bytes is supposed to be the same. */
2971 if (reload_in_progress)
2972 {
2973 /* Copy the MEM to change the mode and move any
2974 substitutions from the old MEM to the new one. */
2975 ret = adjust_address_nv (x, new_mode, 0);
2976 copy_replacements (x, ret);
2977 }
2978 else
2979 ret = adjust_address (x, new_mode, 0);
2980 }
2981 else
2982 {
2983 /* Note that we do want simplify_subreg's behavior of validating
2984 that the new mode is ok for a hard register. If we were to use
2985 simplify_gen_subreg, we would create the subreg, but would
2986 probably run into the target not being able to implement it. */
2987 /* Except, of course, when FORCE is true, when this is exactly what
2988 we want. Which is needed for CCmodes on some targets. */
2989 if (force)
2990 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2991 else
2992 ret = simplify_subreg (new_mode, x, old_mode, 0);
2993 }
2994
2995 return ret;
2996 }
2997
2998 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2999 an integer mode of the same size as MODE. Returns the instruction
3000 emitted, or NULL if such a move could not be generated. */
3001
3002 static rtx
3003 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3004 {
3005 enum machine_mode imode;
3006 enum insn_code code;
3007
3008 /* There must exist a mode of the exact size we require. */
3009 imode = int_mode_for_mode (mode);
3010 if (imode == BLKmode)
3011 return NULL_RTX;
3012
3013 /* The target must support moves in this mode. */
3014 code = optab_handler (mov_optab, imode)->insn_code;
3015 if (code == CODE_FOR_nothing)
3016 return NULL_RTX;
3017
3018 x = emit_move_change_mode (imode, mode, x, force);
3019 if (x == NULL_RTX)
3020 return NULL_RTX;
3021 y = emit_move_change_mode (imode, mode, y, force);
3022 if (y == NULL_RTX)
3023 return NULL_RTX;
3024 return emit_insn (GEN_FCN (code) (x, y));
3025 }
3026
3027 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3028 Return an equivalent MEM that does not use an auto-increment. */
3029
3030 static rtx
3031 emit_move_resolve_push (enum machine_mode mode, rtx x)
3032 {
3033 enum rtx_code code = GET_CODE (XEXP (x, 0));
3034 HOST_WIDE_INT adjust;
3035 rtx temp;
3036
3037 adjust = GET_MODE_SIZE (mode);
3038 #ifdef PUSH_ROUNDING
3039 adjust = PUSH_ROUNDING (adjust);
3040 #endif
3041 if (code == PRE_DEC || code == POST_DEC)
3042 adjust = -adjust;
3043 else if (code == PRE_MODIFY || code == POST_MODIFY)
3044 {
3045 rtx expr = XEXP (XEXP (x, 0), 1);
3046 HOST_WIDE_INT val;
3047
3048 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3049 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3050 val = INTVAL (XEXP (expr, 1));
3051 if (GET_CODE (expr) == MINUS)
3052 val = -val;
3053 gcc_assert (adjust == val || adjust == -val);
3054 adjust = val;
3055 }
3056
3057 /* Do not use anti_adjust_stack, since we don't want to update
3058 stack_pointer_delta. */
3059 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3060 GEN_INT (adjust), stack_pointer_rtx,
3061 0, OPTAB_LIB_WIDEN);
3062 if (temp != stack_pointer_rtx)
3063 emit_move_insn (stack_pointer_rtx, temp);
3064
3065 switch (code)
3066 {
3067 case PRE_INC:
3068 case PRE_DEC:
3069 case PRE_MODIFY:
3070 temp = stack_pointer_rtx;
3071 break;
3072 case POST_INC:
3073 case POST_DEC:
3074 case POST_MODIFY:
3075 temp = plus_constant (stack_pointer_rtx, -adjust);
3076 break;
3077 default:
3078 gcc_unreachable ();
3079 }
3080
3081 return replace_equiv_address (x, temp);
3082 }
3083
3084 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3085 X is known to satisfy push_operand, and MODE is known to be complex.
3086 Returns the last instruction emitted. */
3087
3088 rtx
3089 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3090 {
3091 enum machine_mode submode = GET_MODE_INNER (mode);
3092 bool imag_first;
3093
3094 #ifdef PUSH_ROUNDING
3095 unsigned int submodesize = GET_MODE_SIZE (submode);
3096
3097 /* In case we output to the stack, but the size is smaller than the
3098 machine can push exactly, we need to use move instructions. */
3099 if (PUSH_ROUNDING (submodesize) != submodesize)
3100 {
3101 x = emit_move_resolve_push (mode, x);
3102 return emit_move_insn (x, y);
3103 }
3104 #endif
3105
3106 /* Note that the real part always precedes the imag part in memory
3107 regardless of machine's endianness. */
3108 switch (GET_CODE (XEXP (x, 0)))
3109 {
3110 case PRE_DEC:
3111 case POST_DEC:
3112 imag_first = true;
3113 break;
3114 case PRE_INC:
3115 case POST_INC:
3116 imag_first = false;
3117 break;
3118 default:
3119 gcc_unreachable ();
3120 }
3121
3122 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3123 read_complex_part (y, imag_first));
3124 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3125 read_complex_part (y, !imag_first));
3126 }
3127
3128 /* A subroutine of emit_move_complex. Perform the move from Y to X
3129 via two moves of the parts. Returns the last instruction emitted. */
3130
3131 rtx
3132 emit_move_complex_parts (rtx x, rtx y)
3133 {
3134 /* Show the output dies here. This is necessary for SUBREGs
3135 of pseudos since we cannot track their lifetimes correctly;
3136 hard regs shouldn't appear here except as return values. */
3137 if (!reload_completed && !reload_in_progress
3138 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3139 emit_clobber (x);
3140
3141 write_complex_part (x, read_complex_part (y, false), false);
3142 write_complex_part (x, read_complex_part (y, true), true);
3143
3144 return get_last_insn ();
3145 }
3146
3147 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3148 MODE is known to be complex. Returns the last instruction emitted. */
3149
3150 static rtx
3151 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3152 {
3153 bool try_int;
3154
3155 /* Need to take special care for pushes, to maintain proper ordering
3156 of the data, and possibly extra padding. */
3157 if (push_operand (x, mode))
3158 return emit_move_complex_push (mode, x, y);
3159
3160 /* See if we can coerce the target into moving both values at once. */
3161
3162 /* Move floating point as parts. */
3163 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3164 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3165 try_int = false;
3166 /* Not possible if the values are inherently not adjacent. */
3167 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3168 try_int = false;
3169 /* Is possible if both are registers (or subregs of registers). */
3170 else if (register_operand (x, mode) && register_operand (y, mode))
3171 try_int = true;
3172 /* If one of the operands is a memory, and alignment constraints
3173 are friendly enough, we may be able to do combined memory operations.
3174 We do not attempt this if Y is a constant because that combination is
3175 usually better with the by-parts thing below. */
3176 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3177 && (!STRICT_ALIGNMENT
3178 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3179 try_int = true;
3180 else
3181 try_int = false;
3182
3183 if (try_int)
3184 {
3185 rtx ret;
3186
3187 /* For memory to memory moves, optimal behavior can be had with the
3188 existing block move logic. */
3189 if (MEM_P (x) && MEM_P (y))
3190 {
3191 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3192 BLOCK_OP_NO_LIBCALL);
3193 return get_last_insn ();
3194 }
3195
3196 ret = emit_move_via_integer (mode, x, y, true);
3197 if (ret)
3198 return ret;
3199 }
3200
3201 return emit_move_complex_parts (x, y);
3202 }
3203
3204 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3205 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3206
3207 static rtx
3208 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3209 {
3210 rtx ret;
3211
3212 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3213 if (mode != CCmode)
3214 {
3215 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3216 if (code != CODE_FOR_nothing)
3217 {
3218 x = emit_move_change_mode (CCmode, mode, x, true);
3219 y = emit_move_change_mode (CCmode, mode, y, true);
3220 return emit_insn (GEN_FCN (code) (x, y));
3221 }
3222 }
3223
3224 /* Otherwise, find the MODE_INT mode of the same width. */
3225 ret = emit_move_via_integer (mode, x, y, false);
3226 gcc_assert (ret != NULL);
3227 return ret;
3228 }
3229
3230 /* Return true if word I of OP lies entirely in the
3231 undefined bits of a paradoxical subreg. */
3232
3233 static bool
3234 undefined_operand_subword_p (const_rtx op, int i)
3235 {
3236 enum machine_mode innermode, innermostmode;
3237 int offset;
3238 if (GET_CODE (op) != SUBREG)
3239 return false;
3240 innermode = GET_MODE (op);
3241 innermostmode = GET_MODE (SUBREG_REG (op));
3242 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3243 /* The SUBREG_BYTE represents offset, as if the value were stored in
3244 memory, except for a paradoxical subreg where we define
3245 SUBREG_BYTE to be 0; undo this exception as in
3246 simplify_subreg. */
3247 if (SUBREG_BYTE (op) == 0
3248 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3249 {
3250 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3251 if (WORDS_BIG_ENDIAN)
3252 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3253 if (BYTES_BIG_ENDIAN)
3254 offset += difference % UNITS_PER_WORD;
3255 }
3256 if (offset >= GET_MODE_SIZE (innermostmode)
3257 || offset <= -GET_MODE_SIZE (word_mode))
3258 return true;
3259 return false;
3260 }
3261
3262 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3263 MODE is any multi-word or full-word mode that lacks a move_insn
3264 pattern. Note that you will get better code if you define such
3265 patterns, even if they must turn into multiple assembler instructions. */
3266
3267 static rtx
3268 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3269 {
3270 rtx last_insn = 0;
3271 rtx seq, inner;
3272 bool need_clobber;
3273 int i;
3274
3275 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3276
3277 /* If X is a push on the stack, do the push now and replace
3278 X with a reference to the stack pointer. */
3279 if (push_operand (x, mode))
3280 x = emit_move_resolve_push (mode, x);
3281
3282 /* If we are in reload, see if either operand is a MEM whose address
3283 is scheduled for replacement. */
3284 if (reload_in_progress && MEM_P (x)
3285 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3286 x = replace_equiv_address_nv (x, inner);
3287 if (reload_in_progress && MEM_P (y)
3288 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3289 y = replace_equiv_address_nv (y, inner);
3290
3291 start_sequence ();
3292
3293 need_clobber = false;
3294 for (i = 0;
3295 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3296 i++)
3297 {
3298 rtx xpart = operand_subword (x, i, 1, mode);
3299 rtx ypart;
3300
3301 /* Do not generate code for a move if it would come entirely
3302 from the undefined bits of a paradoxical subreg. */
3303 if (undefined_operand_subword_p (y, i))
3304 continue;
3305
3306 ypart = operand_subword (y, i, 1, mode);
3307
3308 /* If we can't get a part of Y, put Y into memory if it is a
3309 constant. Otherwise, force it into a register. Then we must
3310 be able to get a part of Y. */
3311 if (ypart == 0 && CONSTANT_P (y))
3312 {
3313 y = use_anchored_address (force_const_mem (mode, y));
3314 ypart = operand_subword (y, i, 1, mode);
3315 }
3316 else if (ypart == 0)
3317 ypart = operand_subword_force (y, i, mode);
3318
3319 gcc_assert (xpart && ypart);
3320
3321 need_clobber |= (GET_CODE (xpart) == SUBREG);
3322
3323 last_insn = emit_move_insn (xpart, ypart);
3324 }
3325
3326 seq = get_insns ();
3327 end_sequence ();
3328
3329 /* Show the output dies here. This is necessary for SUBREGs
3330 of pseudos since we cannot track their lifetimes correctly;
3331 hard regs shouldn't appear here except as return values.
3332 We never want to emit such a clobber after reload. */
3333 if (x != y
3334 && ! (reload_in_progress || reload_completed)
3335 && need_clobber != 0)
3336 emit_clobber (x);
3337
3338 emit_insn (seq);
3339
3340 return last_insn;
3341 }
3342
3343 /* Low level part of emit_move_insn.
3344 Called just like emit_move_insn, but assumes X and Y
3345 are basically valid. */
3346
3347 rtx
3348 emit_move_insn_1 (rtx x, rtx y)
3349 {
3350 enum machine_mode mode = GET_MODE (x);
3351 enum insn_code code;
3352
3353 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3354
3355 code = optab_handler (mov_optab, mode)->insn_code;
3356 if (code != CODE_FOR_nothing)
3357 return emit_insn (GEN_FCN (code) (x, y));
3358
3359 /* Expand complex moves by moving real part and imag part. */
3360 if (COMPLEX_MODE_P (mode))
3361 return emit_move_complex (mode, x, y);
3362
3363 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3364 || ALL_FIXED_POINT_MODE_P (mode))
3365 {
3366 rtx result = emit_move_via_integer (mode, x, y, true);
3367
3368 /* If we can't find an integer mode, use multi words. */
3369 if (result)
3370 return result;
3371 else
3372 return emit_move_multi_word (mode, x, y);
3373 }
3374
3375 if (GET_MODE_CLASS (mode) == MODE_CC)
3376 return emit_move_ccmode (mode, x, y);
3377
3378 /* Try using a move pattern for the corresponding integer mode. This is
3379 only safe when simplify_subreg can convert MODE constants into integer
3380 constants. At present, it can only do this reliably if the value
3381 fits within a HOST_WIDE_INT. */
3382 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3383 {
3384 rtx ret = emit_move_via_integer (mode, x, y, false);
3385 if (ret)
3386 return ret;
3387 }
3388
3389 return emit_move_multi_word (mode, x, y);
3390 }
3391
3392 /* Generate code to copy Y into X.
3393 Both Y and X must have the same mode, except that
3394 Y can be a constant with VOIDmode.
3395 This mode cannot be BLKmode; use emit_block_move for that.
3396
3397 Return the last instruction emitted. */
3398
3399 rtx
3400 emit_move_insn (rtx x, rtx y)
3401 {
3402 enum machine_mode mode = GET_MODE (x);
3403 rtx y_cst = NULL_RTX;
3404 rtx last_insn, set;
3405
3406 gcc_assert (mode != BLKmode
3407 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3408
3409 if (CONSTANT_P (y))
3410 {
3411 if (optimize
3412 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3413 && (last_insn = compress_float_constant (x, y)))
3414 return last_insn;
3415
3416 y_cst = y;
3417
3418 if (!LEGITIMATE_CONSTANT_P (y))
3419 {
3420 y = force_const_mem (mode, y);
3421
3422 /* If the target's cannot_force_const_mem prevented the spill,
3423 assume that the target's move expanders will also take care
3424 of the non-legitimate constant. */
3425 if (!y)
3426 y = y_cst;
3427 else
3428 y = use_anchored_address (y);
3429 }
3430 }
3431
3432 /* If X or Y are memory references, verify that their addresses are valid
3433 for the machine. */
3434 if (MEM_P (x)
3435 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3436 && ! push_operand (x, GET_MODE (x))))
3437 x = validize_mem (x);
3438
3439 if (MEM_P (y)
3440 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3441 y = validize_mem (y);
3442
3443 gcc_assert (mode != BLKmode);
3444
3445 last_insn = emit_move_insn_1 (x, y);
3446
3447 if (y_cst && REG_P (x)
3448 && (set = single_set (last_insn)) != NULL_RTX
3449 && SET_DEST (set) == x
3450 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3451 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3452
3453 return last_insn;
3454 }
3455
3456 /* If Y is representable exactly in a narrower mode, and the target can
3457 perform the extension directly from constant or memory, then emit the
3458 move as an extension. */
3459
3460 static rtx
3461 compress_float_constant (rtx x, rtx y)
3462 {
3463 enum machine_mode dstmode = GET_MODE (x);
3464 enum machine_mode orig_srcmode = GET_MODE (y);
3465 enum machine_mode srcmode;
3466 REAL_VALUE_TYPE r;
3467 int oldcost, newcost;
3468 bool speed = optimize_insn_for_speed_p ();
3469
3470 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3471
3472 if (LEGITIMATE_CONSTANT_P (y))
3473 oldcost = rtx_cost (y, SET, speed);
3474 else
3475 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3476
3477 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3478 srcmode != orig_srcmode;
3479 srcmode = GET_MODE_WIDER_MODE (srcmode))
3480 {
3481 enum insn_code ic;
3482 rtx trunc_y, last_insn;
3483
3484 /* Skip if the target can't extend this way. */
3485 ic = can_extend_p (dstmode, srcmode, 0);
3486 if (ic == CODE_FOR_nothing)
3487 continue;
3488
3489 /* Skip if the narrowed value isn't exact. */
3490 if (! exact_real_truncate (srcmode, &r))
3491 continue;
3492
3493 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3494
3495 if (LEGITIMATE_CONSTANT_P (trunc_y))
3496 {
3497 /* Skip if the target needs extra instructions to perform
3498 the extension. */
3499 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3500 continue;
3501 /* This is valid, but may not be cheaper than the original. */
3502 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3503 if (oldcost < newcost)
3504 continue;
3505 }
3506 else if (float_extend_from_mem[dstmode][srcmode])
3507 {
3508 trunc_y = force_const_mem (srcmode, trunc_y);
3509 /* This is valid, but may not be cheaper than the original. */
3510 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3511 if (oldcost < newcost)
3512 continue;
3513 trunc_y = validize_mem (trunc_y);
3514 }
3515 else
3516 continue;
3517
3518 /* For CSE's benefit, force the compressed constant pool entry
3519 into a new pseudo. This constant may be used in different modes,
3520 and if not, combine will put things back together for us. */
3521 trunc_y = force_reg (srcmode, trunc_y);
3522 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3523 last_insn = get_last_insn ();
3524
3525 if (REG_P (x))
3526 set_unique_reg_note (last_insn, REG_EQUAL, y);
3527
3528 return last_insn;
3529 }
3530
3531 return NULL_RTX;
3532 }
3533 \f
3534 /* Pushing data onto the stack. */
3535
3536 /* Push a block of length SIZE (perhaps variable)
3537 and return an rtx to address the beginning of the block.
3538 The value may be virtual_outgoing_args_rtx.
3539
3540 EXTRA is the number of bytes of padding to push in addition to SIZE.
3541 BELOW nonzero means this padding comes at low addresses;
3542 otherwise, the padding comes at high addresses. */
3543
3544 rtx
3545 push_block (rtx size, int extra, int below)
3546 {
3547 rtx temp;
3548
3549 size = convert_modes (Pmode, ptr_mode, size, 1);
3550 if (CONSTANT_P (size))
3551 anti_adjust_stack (plus_constant (size, extra));
3552 else if (REG_P (size) && extra == 0)
3553 anti_adjust_stack (size);
3554 else
3555 {
3556 temp = copy_to_mode_reg (Pmode, size);
3557 if (extra != 0)
3558 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3559 temp, 0, OPTAB_LIB_WIDEN);
3560 anti_adjust_stack (temp);
3561 }
3562
3563 #ifndef STACK_GROWS_DOWNWARD
3564 if (0)
3565 #else
3566 if (1)
3567 #endif
3568 {
3569 temp = virtual_outgoing_args_rtx;
3570 if (extra != 0 && below)
3571 temp = plus_constant (temp, extra);
3572 }
3573 else
3574 {
3575 if (CONST_INT_P (size))
3576 temp = plus_constant (virtual_outgoing_args_rtx,
3577 -INTVAL (size) - (below ? 0 : extra));
3578 else if (extra != 0 && !below)
3579 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3580 negate_rtx (Pmode, plus_constant (size, extra)));
3581 else
3582 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3583 negate_rtx (Pmode, size));
3584 }
3585
3586 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3587 }
3588
3589 #ifdef PUSH_ROUNDING
3590
3591 /* Emit single push insn. */
3592
3593 static void
3594 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3595 {
3596 rtx dest_addr;
3597 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3598 rtx dest;
3599 enum insn_code icode;
3600 insn_operand_predicate_fn pred;
3601
3602 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3603 /* If there is push pattern, use it. Otherwise try old way of throwing
3604 MEM representing push operation to move expander. */
3605 icode = optab_handler (push_optab, mode)->insn_code;
3606 if (icode != CODE_FOR_nothing)
3607 {
3608 if (((pred = insn_data[(int) icode].operand[0].predicate)
3609 && !((*pred) (x, mode))))
3610 x = force_reg (mode, x);
3611 emit_insn (GEN_FCN (icode) (x));
3612 return;
3613 }
3614 if (GET_MODE_SIZE (mode) == rounded_size)
3615 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3616 /* If we are to pad downward, adjust the stack pointer first and
3617 then store X into the stack location using an offset. This is
3618 because emit_move_insn does not know how to pad; it does not have
3619 access to type. */
3620 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3621 {
3622 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3623 HOST_WIDE_INT offset;
3624
3625 emit_move_insn (stack_pointer_rtx,
3626 expand_binop (Pmode,
3627 #ifdef STACK_GROWS_DOWNWARD
3628 sub_optab,
3629 #else
3630 add_optab,
3631 #endif
3632 stack_pointer_rtx,
3633 GEN_INT (rounded_size),
3634 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3635
3636 offset = (HOST_WIDE_INT) padding_size;
3637 #ifdef STACK_GROWS_DOWNWARD
3638 if (STACK_PUSH_CODE == POST_DEC)
3639 /* We have already decremented the stack pointer, so get the
3640 previous value. */
3641 offset += (HOST_WIDE_INT) rounded_size;
3642 #else
3643 if (STACK_PUSH_CODE == POST_INC)
3644 /* We have already incremented the stack pointer, so get the
3645 previous value. */
3646 offset -= (HOST_WIDE_INT) rounded_size;
3647 #endif
3648 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3649 }
3650 else
3651 {
3652 #ifdef STACK_GROWS_DOWNWARD
3653 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3654 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3655 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3656 #else
3657 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3658 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3659 GEN_INT (rounded_size));
3660 #endif
3661 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3662 }
3663
3664 dest = gen_rtx_MEM (mode, dest_addr);
3665
3666 if (type != 0)
3667 {
3668 set_mem_attributes (dest, type, 1);
3669
3670 if (flag_optimize_sibling_calls)
3671 /* Function incoming arguments may overlap with sibling call
3672 outgoing arguments and we cannot allow reordering of reads
3673 from function arguments with stores to outgoing arguments
3674 of sibling calls. */
3675 set_mem_alias_set (dest, 0);
3676 }
3677 emit_move_insn (dest, x);
3678 }
3679 #endif
3680
3681 /* Generate code to push X onto the stack, assuming it has mode MODE and
3682 type TYPE.
3683 MODE is redundant except when X is a CONST_INT (since they don't
3684 carry mode info).
3685 SIZE is an rtx for the size of data to be copied (in bytes),
3686 needed only if X is BLKmode.
3687
3688 ALIGN (in bits) is maximum alignment we can assume.
3689
3690 If PARTIAL and REG are both nonzero, then copy that many of the first
3691 bytes of X into registers starting with REG, and push the rest of X.
3692 The amount of space pushed is decreased by PARTIAL bytes.
3693 REG must be a hard register in this case.
3694 If REG is zero but PARTIAL is not, take any all others actions for an
3695 argument partially in registers, but do not actually load any
3696 registers.
3697
3698 EXTRA is the amount in bytes of extra space to leave next to this arg.
3699 This is ignored if an argument block has already been allocated.
3700
3701 On a machine that lacks real push insns, ARGS_ADDR is the address of
3702 the bottom of the argument block for this call. We use indexing off there
3703 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3704 argument block has not been preallocated.
3705
3706 ARGS_SO_FAR is the size of args previously pushed for this call.
3707
3708 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3709 for arguments passed in registers. If nonzero, it will be the number
3710 of bytes required. */
3711
3712 void
3713 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3714 unsigned int align, int partial, rtx reg, int extra,
3715 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3716 rtx alignment_pad)
3717 {
3718 rtx xinner;
3719 enum direction stack_direction
3720 #ifdef STACK_GROWS_DOWNWARD
3721 = downward;
3722 #else
3723 = upward;
3724 #endif
3725
3726 /* Decide where to pad the argument: `downward' for below,
3727 `upward' for above, or `none' for don't pad it.
3728 Default is below for small data on big-endian machines; else above. */
3729 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3730
3731 /* Invert direction if stack is post-decrement.
3732 FIXME: why? */
3733 if (STACK_PUSH_CODE == POST_DEC)
3734 if (where_pad != none)
3735 where_pad = (where_pad == downward ? upward : downward);
3736
3737 xinner = x;
3738
3739 if (mode == BLKmode
3740 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3741 {
3742 /* Copy a block into the stack, entirely or partially. */
3743
3744 rtx temp;
3745 int used;
3746 int offset;
3747 int skip;
3748
3749 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3750 used = partial - offset;
3751
3752 if (mode != BLKmode)
3753 {
3754 /* A value is to be stored in an insufficiently aligned
3755 stack slot; copy via a suitably aligned slot if
3756 necessary. */
3757 size = GEN_INT (GET_MODE_SIZE (mode));
3758 if (!MEM_P (xinner))
3759 {
3760 temp = assign_temp (type, 0, 1, 1);
3761 emit_move_insn (temp, xinner);
3762 xinner = temp;
3763 }
3764 }
3765
3766 gcc_assert (size);
3767
3768 /* USED is now the # of bytes we need not copy to the stack
3769 because registers will take care of them. */
3770
3771 if (partial != 0)
3772 xinner = adjust_address (xinner, BLKmode, used);
3773
3774 /* If the partial register-part of the arg counts in its stack size,
3775 skip the part of stack space corresponding to the registers.
3776 Otherwise, start copying to the beginning of the stack space,
3777 by setting SKIP to 0. */
3778 skip = (reg_parm_stack_space == 0) ? 0 : used;
3779
3780 #ifdef PUSH_ROUNDING
3781 /* Do it with several push insns if that doesn't take lots of insns
3782 and if there is no difficulty with push insns that skip bytes
3783 on the stack for alignment purposes. */
3784 if (args_addr == 0
3785 && PUSH_ARGS
3786 && CONST_INT_P (size)
3787 && skip == 0
3788 && MEM_ALIGN (xinner) >= align
3789 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3790 /* Here we avoid the case of a structure whose weak alignment
3791 forces many pushes of a small amount of data,
3792 and such small pushes do rounding that causes trouble. */
3793 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3794 || align >= BIGGEST_ALIGNMENT
3795 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3796 == (align / BITS_PER_UNIT)))
3797 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3798 {
3799 /* Push padding now if padding above and stack grows down,
3800 or if padding below and stack grows up.
3801 But if space already allocated, this has already been done. */
3802 if (extra && args_addr == 0
3803 && where_pad != none && where_pad != stack_direction)
3804 anti_adjust_stack (GEN_INT (extra));
3805
3806 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3807 }
3808 else
3809 #endif /* PUSH_ROUNDING */
3810 {
3811 rtx target;
3812
3813 /* Otherwise make space on the stack and copy the data
3814 to the address of that space. */
3815
3816 /* Deduct words put into registers from the size we must copy. */
3817 if (partial != 0)
3818 {
3819 if (CONST_INT_P (size))
3820 size = GEN_INT (INTVAL (size) - used);
3821 else
3822 size = expand_binop (GET_MODE (size), sub_optab, size,
3823 GEN_INT (used), NULL_RTX, 0,
3824 OPTAB_LIB_WIDEN);
3825 }
3826
3827 /* Get the address of the stack space.
3828 In this case, we do not deal with EXTRA separately.
3829 A single stack adjust will do. */
3830 if (! args_addr)
3831 {
3832 temp = push_block (size, extra, where_pad == downward);
3833 extra = 0;
3834 }
3835 else if (CONST_INT_P (args_so_far))
3836 temp = memory_address (BLKmode,
3837 plus_constant (args_addr,
3838 skip + INTVAL (args_so_far)));
3839 else
3840 temp = memory_address (BLKmode,
3841 plus_constant (gen_rtx_PLUS (Pmode,
3842 args_addr,
3843 args_so_far),
3844 skip));
3845
3846 if (!ACCUMULATE_OUTGOING_ARGS)
3847 {
3848 /* If the source is referenced relative to the stack pointer,
3849 copy it to another register to stabilize it. We do not need
3850 to do this if we know that we won't be changing sp. */
3851
3852 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3853 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3854 temp = copy_to_reg (temp);
3855 }
3856
3857 target = gen_rtx_MEM (BLKmode, temp);
3858
3859 /* We do *not* set_mem_attributes here, because incoming arguments
3860 may overlap with sibling call outgoing arguments and we cannot
3861 allow reordering of reads from function arguments with stores
3862 to outgoing arguments of sibling calls. We do, however, want
3863 to record the alignment of the stack slot. */
3864 /* ALIGN may well be better aligned than TYPE, e.g. due to
3865 PARM_BOUNDARY. Assume the caller isn't lying. */
3866 set_mem_align (target, align);
3867
3868 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3869 }
3870 }
3871 else if (partial > 0)
3872 {
3873 /* Scalar partly in registers. */
3874
3875 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3876 int i;
3877 int not_stack;
3878 /* # bytes of start of argument
3879 that we must make space for but need not store. */
3880 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3881 int args_offset = INTVAL (args_so_far);
3882 int skip;
3883
3884 /* Push padding now if padding above and stack grows down,
3885 or if padding below and stack grows up.
3886 But if space already allocated, this has already been done. */
3887 if (extra && args_addr == 0
3888 && where_pad != none && where_pad != stack_direction)
3889 anti_adjust_stack (GEN_INT (extra));
3890
3891 /* If we make space by pushing it, we might as well push
3892 the real data. Otherwise, we can leave OFFSET nonzero
3893 and leave the space uninitialized. */
3894 if (args_addr == 0)
3895 offset = 0;
3896
3897 /* Now NOT_STACK gets the number of words that we don't need to
3898 allocate on the stack. Convert OFFSET to words too. */
3899 not_stack = (partial - offset) / UNITS_PER_WORD;
3900 offset /= UNITS_PER_WORD;
3901
3902 /* If the partial register-part of the arg counts in its stack size,
3903 skip the part of stack space corresponding to the registers.
3904 Otherwise, start copying to the beginning of the stack space,
3905 by setting SKIP to 0. */
3906 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3907
3908 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3909 x = validize_mem (force_const_mem (mode, x));
3910
3911 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3912 SUBREGs of such registers are not allowed. */
3913 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3914 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3915 x = copy_to_reg (x);
3916
3917 /* Loop over all the words allocated on the stack for this arg. */
3918 /* We can do it by words, because any scalar bigger than a word
3919 has a size a multiple of a word. */
3920 #ifndef PUSH_ARGS_REVERSED
3921 for (i = not_stack; i < size; i++)
3922 #else
3923 for (i = size - 1; i >= not_stack; i--)
3924 #endif
3925 if (i >= not_stack + offset)
3926 emit_push_insn (operand_subword_force (x, i, mode),
3927 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3928 0, args_addr,
3929 GEN_INT (args_offset + ((i - not_stack + skip)
3930 * UNITS_PER_WORD)),
3931 reg_parm_stack_space, alignment_pad);
3932 }
3933 else
3934 {
3935 rtx addr;
3936 rtx dest;
3937
3938 /* Push padding now if padding above and stack grows down,
3939 or if padding below and stack grows up.
3940 But if space already allocated, this has already been done. */
3941 if (extra && args_addr == 0
3942 && where_pad != none && where_pad != stack_direction)
3943 anti_adjust_stack (GEN_INT (extra));
3944
3945 #ifdef PUSH_ROUNDING
3946 if (args_addr == 0 && PUSH_ARGS)
3947 emit_single_push_insn (mode, x, type);
3948 else
3949 #endif
3950 {
3951 if (CONST_INT_P (args_so_far))
3952 addr
3953 = memory_address (mode,
3954 plus_constant (args_addr,
3955 INTVAL (args_so_far)));
3956 else
3957 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3958 args_so_far));
3959 dest = gen_rtx_MEM (mode, addr);
3960
3961 /* We do *not* set_mem_attributes here, because incoming arguments
3962 may overlap with sibling call outgoing arguments and we cannot
3963 allow reordering of reads from function arguments with stores
3964 to outgoing arguments of sibling calls. We do, however, want
3965 to record the alignment of the stack slot. */
3966 /* ALIGN may well be better aligned than TYPE, e.g. due to
3967 PARM_BOUNDARY. Assume the caller isn't lying. */
3968 set_mem_align (dest, align);
3969
3970 emit_move_insn (dest, x);
3971 }
3972 }
3973
3974 /* If part should go in registers, copy that part
3975 into the appropriate registers. Do this now, at the end,
3976 since mem-to-mem copies above may do function calls. */
3977 if (partial > 0 && reg != 0)
3978 {
3979 /* Handle calls that pass values in multiple non-contiguous locations.
3980 The Irix 6 ABI has examples of this. */
3981 if (GET_CODE (reg) == PARALLEL)
3982 emit_group_load (reg, x, type, -1);
3983 else
3984 {
3985 gcc_assert (partial % UNITS_PER_WORD == 0);
3986 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3987 }
3988 }
3989
3990 if (extra && args_addr == 0 && where_pad == stack_direction)
3991 anti_adjust_stack (GEN_INT (extra));
3992
3993 if (alignment_pad && args_addr == 0)
3994 anti_adjust_stack (alignment_pad);
3995 }
3996 \f
3997 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3998 operations. */
3999
4000 static rtx
4001 get_subtarget (rtx x)
4002 {
4003 return (optimize
4004 || x == 0
4005 /* Only registers can be subtargets. */
4006 || !REG_P (x)
4007 /* Don't use hard regs to avoid extending their life. */
4008 || REGNO (x) < FIRST_PSEUDO_REGISTER
4009 ? 0 : x);
4010 }
4011
4012 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4013 FIELD is a bitfield. Returns true if the optimization was successful,
4014 and there's nothing else to do. */
4015
4016 static bool
4017 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4018 unsigned HOST_WIDE_INT bitpos,
4019 enum machine_mode mode1, rtx str_rtx,
4020 tree to, tree src)
4021 {
4022 enum machine_mode str_mode = GET_MODE (str_rtx);
4023 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4024 tree op0, op1;
4025 rtx value, result;
4026 optab binop;
4027
4028 if (mode1 != VOIDmode
4029 || bitsize >= BITS_PER_WORD
4030 || str_bitsize > BITS_PER_WORD
4031 || TREE_SIDE_EFFECTS (to)
4032 || TREE_THIS_VOLATILE (to))
4033 return false;
4034
4035 STRIP_NOPS (src);
4036 if (!BINARY_CLASS_P (src)
4037 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4038 return false;
4039
4040 op0 = TREE_OPERAND (src, 0);
4041 op1 = TREE_OPERAND (src, 1);
4042 STRIP_NOPS (op0);
4043
4044 if (!operand_equal_p (to, op0, 0))
4045 return false;
4046
4047 if (MEM_P (str_rtx))
4048 {
4049 unsigned HOST_WIDE_INT offset1;
4050
4051 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4052 str_mode = word_mode;
4053 str_mode = get_best_mode (bitsize, bitpos,
4054 MEM_ALIGN (str_rtx), str_mode, 0);
4055 if (str_mode == VOIDmode)
4056 return false;
4057 str_bitsize = GET_MODE_BITSIZE (str_mode);
4058
4059 offset1 = bitpos;
4060 bitpos %= str_bitsize;
4061 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4062 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4063 }
4064 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4065 return false;
4066
4067 /* If the bit field covers the whole REG/MEM, store_field
4068 will likely generate better code. */
4069 if (bitsize >= str_bitsize)
4070 return false;
4071
4072 /* We can't handle fields split across multiple entities. */
4073 if (bitpos + bitsize > str_bitsize)
4074 return false;
4075
4076 if (BYTES_BIG_ENDIAN)
4077 bitpos = str_bitsize - bitpos - bitsize;
4078
4079 switch (TREE_CODE (src))
4080 {
4081 case PLUS_EXPR:
4082 case MINUS_EXPR:
4083 /* For now, just optimize the case of the topmost bitfield
4084 where we don't need to do any masking and also
4085 1 bit bitfields where xor can be used.
4086 We might win by one instruction for the other bitfields
4087 too if insv/extv instructions aren't used, so that
4088 can be added later. */
4089 if (bitpos + bitsize != str_bitsize
4090 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4091 break;
4092
4093 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4094 value = convert_modes (str_mode,
4095 TYPE_MODE (TREE_TYPE (op1)), value,
4096 TYPE_UNSIGNED (TREE_TYPE (op1)));
4097
4098 /* We may be accessing data outside the field, which means
4099 we can alias adjacent data. */
4100 if (MEM_P (str_rtx))
4101 {
4102 str_rtx = shallow_copy_rtx (str_rtx);
4103 set_mem_alias_set (str_rtx, 0);
4104 set_mem_expr (str_rtx, 0);
4105 }
4106
4107 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4108 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4109 {
4110 value = expand_and (str_mode, value, const1_rtx, NULL);
4111 binop = xor_optab;
4112 }
4113 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4114 build_int_cst (NULL_TREE, bitpos),
4115 NULL_RTX, 1);
4116 result = expand_binop (str_mode, binop, str_rtx,
4117 value, str_rtx, 1, OPTAB_WIDEN);
4118 if (result != str_rtx)
4119 emit_move_insn (str_rtx, result);
4120 return true;
4121
4122 case BIT_IOR_EXPR:
4123 case BIT_XOR_EXPR:
4124 if (TREE_CODE (op1) != INTEGER_CST)
4125 break;
4126 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4127 value = convert_modes (GET_MODE (str_rtx),
4128 TYPE_MODE (TREE_TYPE (op1)), value,
4129 TYPE_UNSIGNED (TREE_TYPE (op1)));
4130
4131 /* We may be accessing data outside the field, which means
4132 we can alias adjacent data. */
4133 if (MEM_P (str_rtx))
4134 {
4135 str_rtx = shallow_copy_rtx (str_rtx);
4136 set_mem_alias_set (str_rtx, 0);
4137 set_mem_expr (str_rtx, 0);
4138 }
4139
4140 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4141 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4142 {
4143 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4144 - 1);
4145 value = expand_and (GET_MODE (str_rtx), value, mask,
4146 NULL_RTX);
4147 }
4148 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4149 build_int_cst (NULL_TREE, bitpos),
4150 NULL_RTX, 1);
4151 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4152 value, str_rtx, 1, OPTAB_WIDEN);
4153 if (result != str_rtx)
4154 emit_move_insn (str_rtx, result);
4155 return true;
4156
4157 default:
4158 break;
4159 }
4160
4161 return false;
4162 }
4163
4164
4165 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4166 is true, try generating a nontemporal store. */
4167
4168 void
4169 expand_assignment (tree to, tree from, bool nontemporal)
4170 {
4171 rtx to_rtx = 0;
4172 rtx result;
4173
4174 /* Don't crash if the lhs of the assignment was erroneous. */
4175 if (TREE_CODE (to) == ERROR_MARK)
4176 {
4177 result = expand_normal (from);
4178 return;
4179 }
4180
4181 /* Optimize away no-op moves without side-effects. */
4182 if (operand_equal_p (to, from, 0))
4183 return;
4184
4185 /* Assignment of a structure component needs special treatment
4186 if the structure component's rtx is not simply a MEM.
4187 Assignment of an array element at a constant index, and assignment of
4188 an array element in an unaligned packed structure field, has the same
4189 problem. */
4190 if (handled_component_p (to)
4191 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4192 {
4193 enum machine_mode mode1;
4194 HOST_WIDE_INT bitsize, bitpos;
4195 tree offset;
4196 int unsignedp;
4197 int volatilep = 0;
4198 tree tem;
4199
4200 push_temp_slots ();
4201 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4202 &unsignedp, &volatilep, true);
4203
4204 /* If we are going to use store_bit_field and extract_bit_field,
4205 make sure to_rtx will be safe for multiple use. */
4206
4207 to_rtx = expand_normal (tem);
4208
4209 if (offset != 0)
4210 {
4211 rtx offset_rtx;
4212
4213 if (!MEM_P (to_rtx))
4214 {
4215 /* We can get constant negative offsets into arrays with broken
4216 user code. Translate this to a trap instead of ICEing. */
4217 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4218 expand_builtin_trap ();
4219 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4220 }
4221
4222 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4223 #ifdef POINTERS_EXTEND_UNSIGNED
4224 if (GET_MODE (offset_rtx) != Pmode)
4225 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4226 #else
4227 if (GET_MODE (offset_rtx) != ptr_mode)
4228 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4229 #endif
4230
4231 /* A constant address in TO_RTX can have VOIDmode, we must not try
4232 to call force_reg for that case. Avoid that case. */
4233 if (MEM_P (to_rtx)
4234 && GET_MODE (to_rtx) == BLKmode
4235 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4236 && bitsize > 0
4237 && (bitpos % bitsize) == 0
4238 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4239 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4240 {
4241 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4242 bitpos = 0;
4243 }
4244
4245 to_rtx = offset_address (to_rtx, offset_rtx,
4246 highest_pow2_factor_for_target (to,
4247 offset));
4248 }
4249
4250 /* Handle expand_expr of a complex value returning a CONCAT. */
4251 if (GET_CODE (to_rtx) == CONCAT)
4252 {
4253 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4254 {
4255 gcc_assert (bitpos == 0);
4256 result = store_expr (from, to_rtx, false, nontemporal);
4257 }
4258 else
4259 {
4260 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4261 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4262 nontemporal);
4263 }
4264 }
4265 else
4266 {
4267 if (MEM_P (to_rtx))
4268 {
4269 /* If the field is at offset zero, we could have been given the
4270 DECL_RTX of the parent struct. Don't munge it. */
4271 to_rtx = shallow_copy_rtx (to_rtx);
4272
4273 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4274
4275 /* Deal with volatile and readonly fields. The former is only
4276 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4277 if (volatilep)
4278 MEM_VOLATILE_P (to_rtx) = 1;
4279 if (component_uses_parent_alias_set (to))
4280 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4281 }
4282
4283 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4284 to_rtx, to, from))
4285 result = NULL;
4286 else
4287 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4288 TREE_TYPE (tem), get_alias_set (to),
4289 nontemporal);
4290 }
4291
4292 if (result)
4293 preserve_temp_slots (result);
4294 free_temp_slots ();
4295 pop_temp_slots ();
4296 return;
4297 }
4298
4299 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4300 {
4301 enum machine_mode mode, op_mode1;
4302 enum insn_code icode;
4303 rtx reg, addr, mem, insn;
4304
4305 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4306 reg = force_not_mem (reg);
4307
4308 mode = TYPE_MODE (TREE_TYPE (to));
4309 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4310 EXPAND_SUM);
4311 addr = memory_address (mode, addr);
4312 mem = gen_rtx_MEM (mode, addr);
4313
4314 set_mem_attributes (mem, to, 0);
4315
4316 icode = movmisalign_optab->handlers[mode].insn_code;
4317 gcc_assert (icode != CODE_FOR_nothing);
4318
4319 op_mode1 = insn_data[icode].operand[1].mode;
4320 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4321 && op_mode1 != VOIDmode)
4322 reg = copy_to_mode_reg (op_mode1, reg);
4323
4324 insn = GEN_FCN (icode) (mem, reg);
4325 emit_insn (insn);
4326 return;
4327 }
4328
4329 /* If the rhs is a function call and its value is not an aggregate,
4330 call the function before we start to compute the lhs.
4331 This is needed for correct code for cases such as
4332 val = setjmp (buf) on machines where reference to val
4333 requires loading up part of an address in a separate insn.
4334
4335 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4336 since it might be a promoted variable where the zero- or sign- extension
4337 needs to be done. Handling this in the normal way is safe because no
4338 computation is done before the call. The same is true for SSA names. */
4339 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4340 && COMPLETE_TYPE_P (TREE_TYPE (from))
4341 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4342 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4343 && REG_P (DECL_RTL (to)))
4344 || TREE_CODE (to) == SSA_NAME))
4345 {
4346 rtx value;
4347
4348 push_temp_slots ();
4349 value = expand_normal (from);
4350 if (to_rtx == 0)
4351 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4352
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 if (GET_CODE (to_rtx) == PARALLEL)
4356 emit_group_load (to_rtx, value, TREE_TYPE (from),
4357 int_size_in_bytes (TREE_TYPE (from)));
4358 else if (GET_MODE (to_rtx) == BLKmode)
4359 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4360 else
4361 {
4362 if (POINTER_TYPE_P (TREE_TYPE (to)))
4363 value = convert_memory_address (GET_MODE (to_rtx), value);
4364 emit_move_insn (to_rtx, value);
4365 }
4366 preserve_temp_slots (to_rtx);
4367 free_temp_slots ();
4368 pop_temp_slots ();
4369 return;
4370 }
4371
4372 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4373 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4374
4375 if (to_rtx == 0)
4376 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4377
4378 /* Don't move directly into a return register. */
4379 if (TREE_CODE (to) == RESULT_DECL
4380 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4381 {
4382 rtx temp;
4383
4384 push_temp_slots ();
4385 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4386
4387 if (GET_CODE (to_rtx) == PARALLEL)
4388 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4389 int_size_in_bytes (TREE_TYPE (from)));
4390 else
4391 emit_move_insn (to_rtx, temp);
4392
4393 preserve_temp_slots (to_rtx);
4394 free_temp_slots ();
4395 pop_temp_slots ();
4396 return;
4397 }
4398
4399 /* In case we are returning the contents of an object which overlaps
4400 the place the value is being stored, use a safe function when copying
4401 a value through a pointer into a structure value return block. */
4402 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4403 && cfun->returns_struct
4404 && !cfun->returns_pcc_struct)
4405 {
4406 rtx from_rtx, size;
4407
4408 push_temp_slots ();
4409 size = expr_size (from);
4410 from_rtx = expand_normal (from);
4411
4412 emit_library_call (memmove_libfunc, LCT_NORMAL,
4413 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4414 XEXP (from_rtx, 0), Pmode,
4415 convert_to_mode (TYPE_MODE (sizetype),
4416 size, TYPE_UNSIGNED (sizetype)),
4417 TYPE_MODE (sizetype));
4418
4419 preserve_temp_slots (to_rtx);
4420 free_temp_slots ();
4421 pop_temp_slots ();
4422 return;
4423 }
4424
4425 /* Compute FROM and store the value in the rtx we got. */
4426
4427 push_temp_slots ();
4428 result = store_expr (from, to_rtx, 0, nontemporal);
4429 preserve_temp_slots (result);
4430 free_temp_slots ();
4431 pop_temp_slots ();
4432 return;
4433 }
4434
4435 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4436 succeeded, false otherwise. */
4437
4438 bool
4439 emit_storent_insn (rtx to, rtx from)
4440 {
4441 enum machine_mode mode = GET_MODE (to), imode;
4442 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4443 rtx pattern;
4444
4445 if (code == CODE_FOR_nothing)
4446 return false;
4447
4448 imode = insn_data[code].operand[0].mode;
4449 if (!insn_data[code].operand[0].predicate (to, imode))
4450 return false;
4451
4452 imode = insn_data[code].operand[1].mode;
4453 if (!insn_data[code].operand[1].predicate (from, imode))
4454 {
4455 from = copy_to_mode_reg (imode, from);
4456 if (!insn_data[code].operand[1].predicate (from, imode))
4457 return false;
4458 }
4459
4460 pattern = GEN_FCN (code) (to, from);
4461 if (pattern == NULL_RTX)
4462 return false;
4463
4464 emit_insn (pattern);
4465 return true;
4466 }
4467
4468 /* Generate code for computing expression EXP,
4469 and storing the value into TARGET.
4470
4471 If the mode is BLKmode then we may return TARGET itself.
4472 It turns out that in BLKmode it doesn't cause a problem.
4473 because C has no operators that could combine two different
4474 assignments into the same BLKmode object with different values
4475 with no sequence point. Will other languages need this to
4476 be more thorough?
4477
4478 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4479 stack, and block moves may need to be treated specially.
4480
4481 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4482
4483 rtx
4484 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4485 {
4486 rtx temp;
4487 rtx alt_rtl = NULL_RTX;
4488 location_t loc = EXPR_LOCATION (exp);
4489
4490 if (VOID_TYPE_P (TREE_TYPE (exp)))
4491 {
4492 /* C++ can generate ?: expressions with a throw expression in one
4493 branch and an rvalue in the other. Here, we resolve attempts to
4494 store the throw expression's nonexistent result. */
4495 gcc_assert (!call_param_p);
4496 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4497 return NULL_RTX;
4498 }
4499 if (TREE_CODE (exp) == COMPOUND_EXPR)
4500 {
4501 /* Perform first part of compound expression, then assign from second
4502 part. */
4503 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4504 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4505 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4506 nontemporal);
4507 }
4508 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4509 {
4510 /* For conditional expression, get safe form of the target. Then
4511 test the condition, doing the appropriate assignment on either
4512 side. This avoids the creation of unnecessary temporaries.
4513 For non-BLKmode, it is more efficient not to do this. */
4514
4515 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4516
4517 do_pending_stack_adjust ();
4518 NO_DEFER_POP;
4519 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4520 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4521 nontemporal);
4522 emit_jump_insn (gen_jump (lab2));
4523 emit_barrier ();
4524 emit_label (lab1);
4525 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4526 nontemporal);
4527 emit_label (lab2);
4528 OK_DEFER_POP;
4529
4530 return NULL_RTX;
4531 }
4532 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4533 /* If this is a scalar in a register that is stored in a wider mode
4534 than the declared mode, compute the result into its declared mode
4535 and then convert to the wider mode. Our value is the computed
4536 expression. */
4537 {
4538 rtx inner_target = 0;
4539
4540 /* We can do the conversion inside EXP, which will often result
4541 in some optimizations. Do the conversion in two steps: first
4542 change the signedness, if needed, then the extend. But don't
4543 do this if the type of EXP is a subtype of something else
4544 since then the conversion might involve more than just
4545 converting modes. */
4546 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4547 && TREE_TYPE (TREE_TYPE (exp)) == 0
4548 && GET_MODE_PRECISION (GET_MODE (target))
4549 == TYPE_PRECISION (TREE_TYPE (exp)))
4550 {
4551 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4552 != SUBREG_PROMOTED_UNSIGNED_P (target))
4553 {
4554 /* Some types, e.g. Fortran's logical*4, won't have a signed
4555 version, so use the mode instead. */
4556 tree ntype
4557 = (signed_or_unsigned_type_for
4558 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4559 if (ntype == NULL)
4560 ntype = lang_hooks.types.type_for_mode
4561 (TYPE_MODE (TREE_TYPE (exp)),
4562 SUBREG_PROMOTED_UNSIGNED_P (target));
4563
4564 exp = fold_convert_loc (loc, ntype, exp);
4565 }
4566
4567 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4568 (GET_MODE (SUBREG_REG (target)),
4569 SUBREG_PROMOTED_UNSIGNED_P (target)),
4570 exp);
4571
4572 inner_target = SUBREG_REG (target);
4573 }
4574
4575 temp = expand_expr (exp, inner_target, VOIDmode,
4576 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4577
4578 /* If TEMP is a VOIDmode constant, use convert_modes to make
4579 sure that we properly convert it. */
4580 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4581 {
4582 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4583 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4584 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4585 GET_MODE (target), temp,
4586 SUBREG_PROMOTED_UNSIGNED_P (target));
4587 }
4588
4589 convert_move (SUBREG_REG (target), temp,
4590 SUBREG_PROMOTED_UNSIGNED_P (target));
4591
4592 return NULL_RTX;
4593 }
4594 else if (TREE_CODE (exp) == STRING_CST
4595 && !nontemporal && !call_param_p
4596 && TREE_STRING_LENGTH (exp) > 0
4597 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4598 {
4599 /* Optimize initialization of an array with a STRING_CST. */
4600 HOST_WIDE_INT exp_len, str_copy_len;
4601 rtx dest_mem;
4602
4603 exp_len = int_expr_size (exp);
4604 if (exp_len <= 0)
4605 goto normal_expr;
4606
4607 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4608 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4609 goto normal_expr;
4610
4611 str_copy_len = TREE_STRING_LENGTH (exp);
4612 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4613 {
4614 str_copy_len += STORE_MAX_PIECES - 1;
4615 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4616 }
4617 str_copy_len = MIN (str_copy_len, exp_len);
4618 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4619 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4620 MEM_ALIGN (target), false))
4621 goto normal_expr;
4622
4623 dest_mem = target;
4624
4625 dest_mem = store_by_pieces (dest_mem,
4626 str_copy_len, builtin_strncpy_read_str,
4627 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4628 MEM_ALIGN (target), false,
4629 exp_len > str_copy_len ? 1 : 0);
4630 if (exp_len > str_copy_len)
4631 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4632 GEN_INT (exp_len - str_copy_len),
4633 BLOCK_OP_NORMAL);
4634 return NULL_RTX;
4635 }
4636 else
4637 {
4638 rtx tmp_target;
4639
4640 normal_expr:
4641 /* If we want to use a nontemporal store, force the value to
4642 register first. */
4643 tmp_target = nontemporal ? NULL_RTX : target;
4644 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4645 (call_param_p
4646 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4647 &alt_rtl);
4648 }
4649
4650 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4651 the same as that of TARGET, adjust the constant. This is needed, for
4652 example, in case it is a CONST_DOUBLE and we want only a word-sized
4653 value. */
4654 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4655 && TREE_CODE (exp) != ERROR_MARK
4656 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4657 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4658 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4659
4660 /* If value was not generated in the target, store it there.
4661 Convert the value to TARGET's type first if necessary and emit the
4662 pending incrementations that have been queued when expanding EXP.
4663 Note that we cannot emit the whole queue blindly because this will
4664 effectively disable the POST_INC optimization later.
4665
4666 If TEMP and TARGET compare equal according to rtx_equal_p, but
4667 one or both of them are volatile memory refs, we have to distinguish
4668 two cases:
4669 - expand_expr has used TARGET. In this case, we must not generate
4670 another copy. This can be detected by TARGET being equal according
4671 to == .
4672 - expand_expr has not used TARGET - that means that the source just
4673 happens to have the same RTX form. Since temp will have been created
4674 by expand_expr, it will compare unequal according to == .
4675 We must generate a copy in this case, to reach the correct number
4676 of volatile memory references. */
4677
4678 if ((! rtx_equal_p (temp, target)
4679 || (temp != target && (side_effects_p (temp)
4680 || side_effects_p (target))))
4681 && TREE_CODE (exp) != ERROR_MARK
4682 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4683 but TARGET is not valid memory reference, TEMP will differ
4684 from TARGET although it is really the same location. */
4685 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4686 /* If there's nothing to copy, don't bother. Don't call
4687 expr_size unless necessary, because some front-ends (C++)
4688 expr_size-hook must not be given objects that are not
4689 supposed to be bit-copied or bit-initialized. */
4690 && expr_size (exp) != const0_rtx)
4691 {
4692 if (GET_MODE (temp) != GET_MODE (target)
4693 && GET_MODE (temp) != VOIDmode)
4694 {
4695 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4696 if (GET_MODE (target) == BLKmode
4697 || GET_MODE (temp) == BLKmode)
4698 emit_block_move (target, temp, expr_size (exp),
4699 (call_param_p
4700 ? BLOCK_OP_CALL_PARM
4701 : BLOCK_OP_NORMAL));
4702 else
4703 convert_move (target, temp, unsignedp);
4704 }
4705
4706 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4707 {
4708 /* Handle copying a string constant into an array. The string
4709 constant may be shorter than the array. So copy just the string's
4710 actual length, and clear the rest. First get the size of the data
4711 type of the string, which is actually the size of the target. */
4712 rtx size = expr_size (exp);
4713
4714 if (CONST_INT_P (size)
4715 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4716 emit_block_move (target, temp, size,
4717 (call_param_p
4718 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4719 else
4720 {
4721 /* Compute the size of the data to copy from the string. */
4722 tree copy_size
4723 = size_binop_loc (loc, MIN_EXPR,
4724 make_tree (sizetype, size),
4725 size_int (TREE_STRING_LENGTH (exp)));
4726 rtx copy_size_rtx
4727 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4728 (call_param_p
4729 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4730 rtx label = 0;
4731
4732 /* Copy that much. */
4733 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4734 TYPE_UNSIGNED (sizetype));
4735 emit_block_move (target, temp, copy_size_rtx,
4736 (call_param_p
4737 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4738
4739 /* Figure out how much is left in TARGET that we have to clear.
4740 Do all calculations in ptr_mode. */
4741 if (CONST_INT_P (copy_size_rtx))
4742 {
4743 size = plus_constant (size, -INTVAL (copy_size_rtx));
4744 target = adjust_address (target, BLKmode,
4745 INTVAL (copy_size_rtx));
4746 }
4747 else
4748 {
4749 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4750 copy_size_rtx, NULL_RTX, 0,
4751 OPTAB_LIB_WIDEN);
4752
4753 #ifdef POINTERS_EXTEND_UNSIGNED
4754 if (GET_MODE (copy_size_rtx) != Pmode)
4755 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4756 TYPE_UNSIGNED (sizetype));
4757 #endif
4758
4759 target = offset_address (target, copy_size_rtx,
4760 highest_pow2_factor (copy_size));
4761 label = gen_label_rtx ();
4762 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4763 GET_MODE (size), 0, label);
4764 }
4765
4766 if (size != const0_rtx)
4767 clear_storage (target, size, BLOCK_OP_NORMAL);
4768
4769 if (label)
4770 emit_label (label);
4771 }
4772 }
4773 /* Handle calls that return values in multiple non-contiguous locations.
4774 The Irix 6 ABI has examples of this. */
4775 else if (GET_CODE (target) == PARALLEL)
4776 emit_group_load (target, temp, TREE_TYPE (exp),
4777 int_size_in_bytes (TREE_TYPE (exp)));
4778 else if (GET_MODE (temp) == BLKmode)
4779 emit_block_move (target, temp, expr_size (exp),
4780 (call_param_p
4781 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4782 else if (nontemporal
4783 && emit_storent_insn (target, temp))
4784 /* If we managed to emit a nontemporal store, there is nothing else to
4785 do. */
4786 ;
4787 else
4788 {
4789 temp = force_operand (temp, target);
4790 if (temp != target)
4791 emit_move_insn (target, temp);
4792 }
4793 }
4794
4795 return NULL_RTX;
4796 }
4797 \f
4798 /* Helper for categorize_ctor_elements. Identical interface. */
4799
4800 static bool
4801 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4802 HOST_WIDE_INT *p_elt_count,
4803 bool *p_must_clear)
4804 {
4805 unsigned HOST_WIDE_INT idx;
4806 HOST_WIDE_INT nz_elts, elt_count;
4807 tree value, purpose;
4808
4809 /* Whether CTOR is a valid constant initializer, in accordance with what
4810 initializer_constant_valid_p does. If inferred from the constructor
4811 elements, true until proven otherwise. */
4812 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4813 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4814
4815 nz_elts = 0;
4816 elt_count = 0;
4817
4818 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4819 {
4820 HOST_WIDE_INT mult;
4821
4822 mult = 1;
4823 if (TREE_CODE (purpose) == RANGE_EXPR)
4824 {
4825 tree lo_index = TREE_OPERAND (purpose, 0);
4826 tree hi_index = TREE_OPERAND (purpose, 1);
4827
4828 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4829 mult = (tree_low_cst (hi_index, 1)
4830 - tree_low_cst (lo_index, 1) + 1);
4831 }
4832
4833 switch (TREE_CODE (value))
4834 {
4835 case CONSTRUCTOR:
4836 {
4837 HOST_WIDE_INT nz = 0, ic = 0;
4838
4839 bool const_elt_p
4840 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4841
4842 nz_elts += mult * nz;
4843 elt_count += mult * ic;
4844
4845 if (const_from_elts_p && const_p)
4846 const_p = const_elt_p;
4847 }
4848 break;
4849
4850 case INTEGER_CST:
4851 case REAL_CST:
4852 case FIXED_CST:
4853 if (!initializer_zerop (value))
4854 nz_elts += mult;
4855 elt_count += mult;
4856 break;
4857
4858 case STRING_CST:
4859 nz_elts += mult * TREE_STRING_LENGTH (value);
4860 elt_count += mult * TREE_STRING_LENGTH (value);
4861 break;
4862
4863 case COMPLEX_CST:
4864 if (!initializer_zerop (TREE_REALPART (value)))
4865 nz_elts += mult;
4866 if (!initializer_zerop (TREE_IMAGPART (value)))
4867 nz_elts += mult;
4868 elt_count += mult;
4869 break;
4870
4871 case VECTOR_CST:
4872 {
4873 tree v;
4874 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4875 {
4876 if (!initializer_zerop (TREE_VALUE (v)))
4877 nz_elts += mult;
4878 elt_count += mult;
4879 }
4880 }
4881 break;
4882
4883 default:
4884 nz_elts += mult;
4885 elt_count += mult;
4886
4887 if (const_from_elts_p && const_p)
4888 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4889 != NULL_TREE;
4890 break;
4891 }
4892 }
4893
4894 if (!*p_must_clear
4895 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4896 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4897 {
4898 tree init_sub_type;
4899 bool clear_this = true;
4900
4901 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4902 {
4903 /* We don't expect more than one element of the union to be
4904 initialized. Not sure what we should do otherwise... */
4905 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4906 == 1);
4907
4908 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4909 CONSTRUCTOR_ELTS (ctor),
4910 0)->value);
4911
4912 /* ??? We could look at each element of the union, and find the
4913 largest element. Which would avoid comparing the size of the
4914 initialized element against any tail padding in the union.
4915 Doesn't seem worth the effort... */
4916 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4917 TYPE_SIZE (init_sub_type)) == 1)
4918 {
4919 /* And now we have to find out if the element itself is fully
4920 constructed. E.g. for union { struct { int a, b; } s; } u
4921 = { .s = { .a = 1 } }. */
4922 if (elt_count == count_type_elements (init_sub_type, false))
4923 clear_this = false;
4924 }
4925 }
4926
4927 *p_must_clear = clear_this;
4928 }
4929
4930 *p_nz_elts += nz_elts;
4931 *p_elt_count += elt_count;
4932
4933 return const_p;
4934 }
4935
4936 /* Examine CTOR to discover:
4937 * how many scalar fields are set to nonzero values,
4938 and place it in *P_NZ_ELTS;
4939 * how many scalar fields in total are in CTOR,
4940 and place it in *P_ELT_COUNT.
4941 * if a type is a union, and the initializer from the constructor
4942 is not the largest element in the union, then set *p_must_clear.
4943
4944 Return whether or not CTOR is a valid static constant initializer, the same
4945 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4946
4947 bool
4948 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4949 HOST_WIDE_INT *p_elt_count,
4950 bool *p_must_clear)
4951 {
4952 *p_nz_elts = 0;
4953 *p_elt_count = 0;
4954 *p_must_clear = false;
4955
4956 return
4957 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4958 }
4959
4960 /* Count the number of scalars in TYPE. Return -1 on overflow or
4961 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4962 array member at the end of the structure. */
4963
4964 HOST_WIDE_INT
4965 count_type_elements (const_tree type, bool allow_flexarr)
4966 {
4967 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4968 switch (TREE_CODE (type))
4969 {
4970 case ARRAY_TYPE:
4971 {
4972 tree telts = array_type_nelts (type);
4973 if (telts && host_integerp (telts, 1))
4974 {
4975 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4976 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4977 if (n == 0)
4978 return 0;
4979 else if (max / n > m)
4980 return n * m;
4981 }
4982 return -1;
4983 }
4984
4985 case RECORD_TYPE:
4986 {
4987 HOST_WIDE_INT n = 0, t;
4988 tree f;
4989
4990 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4991 if (TREE_CODE (f) == FIELD_DECL)
4992 {
4993 t = count_type_elements (TREE_TYPE (f), false);
4994 if (t < 0)
4995 {
4996 /* Check for structures with flexible array member. */
4997 tree tf = TREE_TYPE (f);
4998 if (allow_flexarr
4999 && TREE_CHAIN (f) == NULL
5000 && TREE_CODE (tf) == ARRAY_TYPE
5001 && TYPE_DOMAIN (tf)
5002 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5003 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5004 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5005 && int_size_in_bytes (type) >= 0)
5006 break;
5007
5008 return -1;
5009 }
5010 n += t;
5011 }
5012
5013 return n;
5014 }
5015
5016 case UNION_TYPE:
5017 case QUAL_UNION_TYPE:
5018 return -1;
5019
5020 case COMPLEX_TYPE:
5021 return 2;
5022
5023 case VECTOR_TYPE:
5024 return TYPE_VECTOR_SUBPARTS (type);
5025
5026 case INTEGER_TYPE:
5027 case REAL_TYPE:
5028 case FIXED_POINT_TYPE:
5029 case ENUMERAL_TYPE:
5030 case BOOLEAN_TYPE:
5031 case POINTER_TYPE:
5032 case OFFSET_TYPE:
5033 case REFERENCE_TYPE:
5034 return 1;
5035
5036 case ERROR_MARK:
5037 return 0;
5038
5039 case VOID_TYPE:
5040 case METHOD_TYPE:
5041 case FUNCTION_TYPE:
5042 case LANG_TYPE:
5043 default:
5044 gcc_unreachable ();
5045 }
5046 }
5047
5048 /* Return 1 if EXP contains mostly (3/4) zeros. */
5049
5050 static int
5051 mostly_zeros_p (const_tree exp)
5052 {
5053 if (TREE_CODE (exp) == CONSTRUCTOR)
5054
5055 {
5056 HOST_WIDE_INT nz_elts, count, elts;
5057 bool must_clear;
5058
5059 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5060 if (must_clear)
5061 return 1;
5062
5063 elts = count_type_elements (TREE_TYPE (exp), false);
5064
5065 return nz_elts < elts / 4;
5066 }
5067
5068 return initializer_zerop (exp);
5069 }
5070
5071 /* Return 1 if EXP contains all zeros. */
5072
5073 static int
5074 all_zeros_p (const_tree exp)
5075 {
5076 if (TREE_CODE (exp) == CONSTRUCTOR)
5077
5078 {
5079 HOST_WIDE_INT nz_elts, count;
5080 bool must_clear;
5081
5082 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5083 return nz_elts == 0;
5084 }
5085
5086 return initializer_zerop (exp);
5087 }
5088 \f
5089 /* Helper function for store_constructor.
5090 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5091 TYPE is the type of the CONSTRUCTOR, not the element type.
5092 CLEARED is as for store_constructor.
5093 ALIAS_SET is the alias set to use for any stores.
5094
5095 This provides a recursive shortcut back to store_constructor when it isn't
5096 necessary to go through store_field. This is so that we can pass through
5097 the cleared field to let store_constructor know that we may not have to
5098 clear a substructure if the outer structure has already been cleared. */
5099
5100 static void
5101 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5102 HOST_WIDE_INT bitpos, enum machine_mode mode,
5103 tree exp, tree type, int cleared,
5104 alias_set_type alias_set)
5105 {
5106 if (TREE_CODE (exp) == CONSTRUCTOR
5107 /* We can only call store_constructor recursively if the size and
5108 bit position are on a byte boundary. */
5109 && bitpos % BITS_PER_UNIT == 0
5110 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5111 /* If we have a nonzero bitpos for a register target, then we just
5112 let store_field do the bitfield handling. This is unlikely to
5113 generate unnecessary clear instructions anyways. */
5114 && (bitpos == 0 || MEM_P (target)))
5115 {
5116 if (MEM_P (target))
5117 target
5118 = adjust_address (target,
5119 GET_MODE (target) == BLKmode
5120 || 0 != (bitpos
5121 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5122 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5123
5124
5125 /* Update the alias set, if required. */
5126 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5127 && MEM_ALIAS_SET (target) != 0)
5128 {
5129 target = copy_rtx (target);
5130 set_mem_alias_set (target, alias_set);
5131 }
5132
5133 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5134 }
5135 else
5136 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5137 }
5138
5139 /* Store the value of constructor EXP into the rtx TARGET.
5140 TARGET is either a REG or a MEM; we know it cannot conflict, since
5141 safe_from_p has been called.
5142 CLEARED is true if TARGET is known to have been zero'd.
5143 SIZE is the number of bytes of TARGET we are allowed to modify: this
5144 may not be the same as the size of EXP if we are assigning to a field
5145 which has been packed to exclude padding bits. */
5146
5147 static void
5148 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5149 {
5150 tree type = TREE_TYPE (exp);
5151 #ifdef WORD_REGISTER_OPERATIONS
5152 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5153 #endif
5154
5155 switch (TREE_CODE (type))
5156 {
5157 case RECORD_TYPE:
5158 case UNION_TYPE:
5159 case QUAL_UNION_TYPE:
5160 {
5161 unsigned HOST_WIDE_INT idx;
5162 tree field, value;
5163
5164 /* If size is zero or the target is already cleared, do nothing. */
5165 if (size == 0 || cleared)
5166 cleared = 1;
5167 /* We either clear the aggregate or indicate the value is dead. */
5168 else if ((TREE_CODE (type) == UNION_TYPE
5169 || TREE_CODE (type) == QUAL_UNION_TYPE)
5170 && ! CONSTRUCTOR_ELTS (exp))
5171 /* If the constructor is empty, clear the union. */
5172 {
5173 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5174 cleared = 1;
5175 }
5176
5177 /* If we are building a static constructor into a register,
5178 set the initial value as zero so we can fold the value into
5179 a constant. But if more than one register is involved,
5180 this probably loses. */
5181 else if (REG_P (target) && TREE_STATIC (exp)
5182 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5183 {
5184 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5185 cleared = 1;
5186 }
5187
5188 /* If the constructor has fewer fields than the structure or
5189 if we are initializing the structure to mostly zeros, clear
5190 the whole structure first. Don't do this if TARGET is a
5191 register whose mode size isn't equal to SIZE since
5192 clear_storage can't handle this case. */
5193 else if (size > 0
5194 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5195 != fields_length (type))
5196 || mostly_zeros_p (exp))
5197 && (!REG_P (target)
5198 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5199 == size)))
5200 {
5201 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5202 cleared = 1;
5203 }
5204
5205 if (REG_P (target) && !cleared)
5206 emit_clobber (target);
5207
5208 /* Store each element of the constructor into the
5209 corresponding field of TARGET. */
5210 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5211 {
5212 enum machine_mode mode;
5213 HOST_WIDE_INT bitsize;
5214 HOST_WIDE_INT bitpos = 0;
5215 tree offset;
5216 rtx to_rtx = target;
5217
5218 /* Just ignore missing fields. We cleared the whole
5219 structure, above, if any fields are missing. */
5220 if (field == 0)
5221 continue;
5222
5223 if (cleared && initializer_zerop (value))
5224 continue;
5225
5226 if (host_integerp (DECL_SIZE (field), 1))
5227 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5228 else
5229 bitsize = -1;
5230
5231 mode = DECL_MODE (field);
5232 if (DECL_BIT_FIELD (field))
5233 mode = VOIDmode;
5234
5235 offset = DECL_FIELD_OFFSET (field);
5236 if (host_integerp (offset, 0)
5237 && host_integerp (bit_position (field), 0))
5238 {
5239 bitpos = int_bit_position (field);
5240 offset = 0;
5241 }
5242 else
5243 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5244
5245 if (offset)
5246 {
5247 rtx offset_rtx;
5248
5249 offset
5250 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5251 make_tree (TREE_TYPE (exp),
5252 target));
5253
5254 offset_rtx = expand_normal (offset);
5255 gcc_assert (MEM_P (to_rtx));
5256
5257 #ifdef POINTERS_EXTEND_UNSIGNED
5258 if (GET_MODE (offset_rtx) != Pmode)
5259 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5260 #else
5261 if (GET_MODE (offset_rtx) != ptr_mode)
5262 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5263 #endif
5264
5265 to_rtx = offset_address (to_rtx, offset_rtx,
5266 highest_pow2_factor (offset));
5267 }
5268
5269 #ifdef WORD_REGISTER_OPERATIONS
5270 /* If this initializes a field that is smaller than a
5271 word, at the start of a word, try to widen it to a full
5272 word. This special case allows us to output C++ member
5273 function initializations in a form that the optimizers
5274 can understand. */
5275 if (REG_P (target)
5276 && bitsize < BITS_PER_WORD
5277 && bitpos % BITS_PER_WORD == 0
5278 && GET_MODE_CLASS (mode) == MODE_INT
5279 && TREE_CODE (value) == INTEGER_CST
5280 && exp_size >= 0
5281 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5282 {
5283 tree type = TREE_TYPE (value);
5284
5285 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5286 {
5287 type = lang_hooks.types.type_for_size
5288 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5289 value = fold_convert (type, value);
5290 }
5291
5292 if (BYTES_BIG_ENDIAN)
5293 value
5294 = fold_build2 (LSHIFT_EXPR, type, value,
5295 build_int_cst (type,
5296 BITS_PER_WORD - bitsize));
5297 bitsize = BITS_PER_WORD;
5298 mode = word_mode;
5299 }
5300 #endif
5301
5302 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5303 && DECL_NONADDRESSABLE_P (field))
5304 {
5305 to_rtx = copy_rtx (to_rtx);
5306 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5307 }
5308
5309 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5310 value, type, cleared,
5311 get_alias_set (TREE_TYPE (field)));
5312 }
5313 break;
5314 }
5315 case ARRAY_TYPE:
5316 {
5317 tree value, index;
5318 unsigned HOST_WIDE_INT i;
5319 int need_to_clear;
5320 tree domain;
5321 tree elttype = TREE_TYPE (type);
5322 int const_bounds_p;
5323 HOST_WIDE_INT minelt = 0;
5324 HOST_WIDE_INT maxelt = 0;
5325
5326 domain = TYPE_DOMAIN (type);
5327 const_bounds_p = (TYPE_MIN_VALUE (domain)
5328 && TYPE_MAX_VALUE (domain)
5329 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5330 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5331
5332 /* If we have constant bounds for the range of the type, get them. */
5333 if (const_bounds_p)
5334 {
5335 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5336 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5337 }
5338
5339 /* If the constructor has fewer elements than the array, clear
5340 the whole array first. Similarly if this is static
5341 constructor of a non-BLKmode object. */
5342 if (cleared)
5343 need_to_clear = 0;
5344 else if (REG_P (target) && TREE_STATIC (exp))
5345 need_to_clear = 1;
5346 else
5347 {
5348 unsigned HOST_WIDE_INT idx;
5349 tree index, value;
5350 HOST_WIDE_INT count = 0, zero_count = 0;
5351 need_to_clear = ! const_bounds_p;
5352
5353 /* This loop is a more accurate version of the loop in
5354 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5355 is also needed to check for missing elements. */
5356 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5357 {
5358 HOST_WIDE_INT this_node_count;
5359
5360 if (need_to_clear)
5361 break;
5362
5363 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5364 {
5365 tree lo_index = TREE_OPERAND (index, 0);
5366 tree hi_index = TREE_OPERAND (index, 1);
5367
5368 if (! host_integerp (lo_index, 1)
5369 || ! host_integerp (hi_index, 1))
5370 {
5371 need_to_clear = 1;
5372 break;
5373 }
5374
5375 this_node_count = (tree_low_cst (hi_index, 1)
5376 - tree_low_cst (lo_index, 1) + 1);
5377 }
5378 else
5379 this_node_count = 1;
5380
5381 count += this_node_count;
5382 if (mostly_zeros_p (value))
5383 zero_count += this_node_count;
5384 }
5385
5386 /* Clear the entire array first if there are any missing
5387 elements, or if the incidence of zero elements is >=
5388 75%. */
5389 if (! need_to_clear
5390 && (count < maxelt - minelt + 1
5391 || 4 * zero_count >= 3 * count))
5392 need_to_clear = 1;
5393 }
5394
5395 if (need_to_clear && size > 0)
5396 {
5397 if (REG_P (target))
5398 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5399 else
5400 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5401 cleared = 1;
5402 }
5403
5404 if (!cleared && REG_P (target))
5405 /* Inform later passes that the old value is dead. */
5406 emit_clobber (target);
5407
5408 /* Store each element of the constructor into the
5409 corresponding element of TARGET, determined by counting the
5410 elements. */
5411 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5412 {
5413 enum machine_mode mode;
5414 HOST_WIDE_INT bitsize;
5415 HOST_WIDE_INT bitpos;
5416 rtx xtarget = target;
5417
5418 if (cleared && initializer_zerop (value))
5419 continue;
5420
5421 mode = TYPE_MODE (elttype);
5422 if (mode == BLKmode)
5423 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5424 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5425 : -1);
5426 else
5427 bitsize = GET_MODE_BITSIZE (mode);
5428
5429 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5430 {
5431 tree lo_index = TREE_OPERAND (index, 0);
5432 tree hi_index = TREE_OPERAND (index, 1);
5433 rtx index_r, pos_rtx;
5434 HOST_WIDE_INT lo, hi, count;
5435 tree position;
5436
5437 /* If the range is constant and "small", unroll the loop. */
5438 if (const_bounds_p
5439 && host_integerp (lo_index, 0)
5440 && host_integerp (hi_index, 0)
5441 && (lo = tree_low_cst (lo_index, 0),
5442 hi = tree_low_cst (hi_index, 0),
5443 count = hi - lo + 1,
5444 (!MEM_P (target)
5445 || count <= 2
5446 || (host_integerp (TYPE_SIZE (elttype), 1)
5447 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5448 <= 40 * 8)))))
5449 {
5450 lo -= minelt; hi -= minelt;
5451 for (; lo <= hi; lo++)
5452 {
5453 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5454
5455 if (MEM_P (target)
5456 && !MEM_KEEP_ALIAS_SET_P (target)
5457 && TREE_CODE (type) == ARRAY_TYPE
5458 && TYPE_NONALIASED_COMPONENT (type))
5459 {
5460 target = copy_rtx (target);
5461 MEM_KEEP_ALIAS_SET_P (target) = 1;
5462 }
5463
5464 store_constructor_field
5465 (target, bitsize, bitpos, mode, value, type, cleared,
5466 get_alias_set (elttype));
5467 }
5468 }
5469 else
5470 {
5471 rtx loop_start = gen_label_rtx ();
5472 rtx loop_end = gen_label_rtx ();
5473 tree exit_cond;
5474
5475 expand_normal (hi_index);
5476
5477 index = build_decl (EXPR_LOCATION (exp),
5478 VAR_DECL, NULL_TREE, domain);
5479 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5480 SET_DECL_RTL (index, index_r);
5481 store_expr (lo_index, index_r, 0, false);
5482
5483 /* Build the head of the loop. */
5484 do_pending_stack_adjust ();
5485 emit_label (loop_start);
5486
5487 /* Assign value to element index. */
5488 position =
5489 fold_convert (ssizetype,
5490 fold_build2 (MINUS_EXPR,
5491 TREE_TYPE (index),
5492 index,
5493 TYPE_MIN_VALUE (domain)));
5494
5495 position =
5496 size_binop (MULT_EXPR, position,
5497 fold_convert (ssizetype,
5498 TYPE_SIZE_UNIT (elttype)));
5499
5500 pos_rtx = expand_normal (position);
5501 xtarget = offset_address (target, pos_rtx,
5502 highest_pow2_factor (position));
5503 xtarget = adjust_address (xtarget, mode, 0);
5504 if (TREE_CODE (value) == CONSTRUCTOR)
5505 store_constructor (value, xtarget, cleared,
5506 bitsize / BITS_PER_UNIT);
5507 else
5508 store_expr (value, xtarget, 0, false);
5509
5510 /* Generate a conditional jump to exit the loop. */
5511 exit_cond = build2 (LT_EXPR, integer_type_node,
5512 index, hi_index);
5513 jumpif (exit_cond, loop_end);
5514
5515 /* Update the loop counter, and jump to the head of
5516 the loop. */
5517 expand_assignment (index,
5518 build2 (PLUS_EXPR, TREE_TYPE (index),
5519 index, integer_one_node),
5520 false);
5521
5522 emit_jump (loop_start);
5523
5524 /* Build the end of the loop. */
5525 emit_label (loop_end);
5526 }
5527 }
5528 else if ((index != 0 && ! host_integerp (index, 0))
5529 || ! host_integerp (TYPE_SIZE (elttype), 1))
5530 {
5531 tree position;
5532
5533 if (index == 0)
5534 index = ssize_int (1);
5535
5536 if (minelt)
5537 index = fold_convert (ssizetype,
5538 fold_build2 (MINUS_EXPR,
5539 TREE_TYPE (index),
5540 index,
5541 TYPE_MIN_VALUE (domain)));
5542
5543 position =
5544 size_binop (MULT_EXPR, index,
5545 fold_convert (ssizetype,
5546 TYPE_SIZE_UNIT (elttype)));
5547 xtarget = offset_address (target,
5548 expand_normal (position),
5549 highest_pow2_factor (position));
5550 xtarget = adjust_address (xtarget, mode, 0);
5551 store_expr (value, xtarget, 0, false);
5552 }
5553 else
5554 {
5555 if (index != 0)
5556 bitpos = ((tree_low_cst (index, 0) - minelt)
5557 * tree_low_cst (TYPE_SIZE (elttype), 1));
5558 else
5559 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5560
5561 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5562 && TREE_CODE (type) == ARRAY_TYPE
5563 && TYPE_NONALIASED_COMPONENT (type))
5564 {
5565 target = copy_rtx (target);
5566 MEM_KEEP_ALIAS_SET_P (target) = 1;
5567 }
5568 store_constructor_field (target, bitsize, bitpos, mode, value,
5569 type, cleared, get_alias_set (elttype));
5570 }
5571 }
5572 break;
5573 }
5574
5575 case VECTOR_TYPE:
5576 {
5577 unsigned HOST_WIDE_INT idx;
5578 constructor_elt *ce;
5579 int i;
5580 int need_to_clear;
5581 int icode = 0;
5582 tree elttype = TREE_TYPE (type);
5583 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5584 enum machine_mode eltmode = TYPE_MODE (elttype);
5585 HOST_WIDE_INT bitsize;
5586 HOST_WIDE_INT bitpos;
5587 rtvec vector = NULL;
5588 unsigned n_elts;
5589 alias_set_type alias;
5590
5591 gcc_assert (eltmode != BLKmode);
5592
5593 n_elts = TYPE_VECTOR_SUBPARTS (type);
5594 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5595 {
5596 enum machine_mode mode = GET_MODE (target);
5597
5598 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5599 if (icode != CODE_FOR_nothing)
5600 {
5601 unsigned int i;
5602
5603 vector = rtvec_alloc (n_elts);
5604 for (i = 0; i < n_elts; i++)
5605 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5606 }
5607 }
5608
5609 /* If the constructor has fewer elements than the vector,
5610 clear the whole array first. Similarly if this is static
5611 constructor of a non-BLKmode object. */
5612 if (cleared)
5613 need_to_clear = 0;
5614 else if (REG_P (target) && TREE_STATIC (exp))
5615 need_to_clear = 1;
5616 else
5617 {
5618 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5619 tree value;
5620
5621 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5622 {
5623 int n_elts_here = tree_low_cst
5624 (int_const_binop (TRUNC_DIV_EXPR,
5625 TYPE_SIZE (TREE_TYPE (value)),
5626 TYPE_SIZE (elttype), 0), 1);
5627
5628 count += n_elts_here;
5629 if (mostly_zeros_p (value))
5630 zero_count += n_elts_here;
5631 }
5632
5633 /* Clear the entire vector first if there are any missing elements,
5634 or if the incidence of zero elements is >= 75%. */
5635 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5636 }
5637
5638 if (need_to_clear && size > 0 && !vector)
5639 {
5640 if (REG_P (target))
5641 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5642 else
5643 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5644 cleared = 1;
5645 }
5646
5647 /* Inform later passes that the old value is dead. */
5648 if (!cleared && !vector && REG_P (target))
5649 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5650
5651 if (MEM_P (target))
5652 alias = MEM_ALIAS_SET (target);
5653 else
5654 alias = get_alias_set (elttype);
5655
5656 /* Store each element of the constructor into the corresponding
5657 element of TARGET, determined by counting the elements. */
5658 for (idx = 0, i = 0;
5659 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5660 idx++, i += bitsize / elt_size)
5661 {
5662 HOST_WIDE_INT eltpos;
5663 tree value = ce->value;
5664
5665 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5666 if (cleared && initializer_zerop (value))
5667 continue;
5668
5669 if (ce->index)
5670 eltpos = tree_low_cst (ce->index, 1);
5671 else
5672 eltpos = i;
5673
5674 if (vector)
5675 {
5676 /* Vector CONSTRUCTORs should only be built from smaller
5677 vectors in the case of BLKmode vectors. */
5678 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5679 RTVEC_ELT (vector, eltpos)
5680 = expand_normal (value);
5681 }
5682 else
5683 {
5684 enum machine_mode value_mode =
5685 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5686 ? TYPE_MODE (TREE_TYPE (value))
5687 : eltmode;
5688 bitpos = eltpos * elt_size;
5689 store_constructor_field (target, bitsize, bitpos,
5690 value_mode, value, type,
5691 cleared, alias);
5692 }
5693 }
5694
5695 if (vector)
5696 emit_insn (GEN_FCN (icode)
5697 (target,
5698 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5699 break;
5700 }
5701
5702 default:
5703 gcc_unreachable ();
5704 }
5705 }
5706
5707 /* Store the value of EXP (an expression tree)
5708 into a subfield of TARGET which has mode MODE and occupies
5709 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5710 If MODE is VOIDmode, it means that we are storing into a bit-field.
5711
5712 Always return const0_rtx unless we have something particular to
5713 return.
5714
5715 TYPE is the type of the underlying object,
5716
5717 ALIAS_SET is the alias set for the destination. This value will
5718 (in general) be different from that for TARGET, since TARGET is a
5719 reference to the containing structure.
5720
5721 If NONTEMPORAL is true, try generating a nontemporal store. */
5722
5723 static rtx
5724 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5725 enum machine_mode mode, tree exp, tree type,
5726 alias_set_type alias_set, bool nontemporal)
5727 {
5728 HOST_WIDE_INT width_mask = 0;
5729
5730 if (TREE_CODE (exp) == ERROR_MARK)
5731 return const0_rtx;
5732
5733 /* If we have nothing to store, do nothing unless the expression has
5734 side-effects. */
5735 if (bitsize == 0)
5736 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5737 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5738 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5739
5740 /* If we are storing into an unaligned field of an aligned union that is
5741 in a register, we may have the mode of TARGET being an integer mode but
5742 MODE == BLKmode. In that case, get an aligned object whose size and
5743 alignment are the same as TARGET and store TARGET into it (we can avoid
5744 the store if the field being stored is the entire width of TARGET). Then
5745 call ourselves recursively to store the field into a BLKmode version of
5746 that object. Finally, load from the object into TARGET. This is not
5747 very efficient in general, but should only be slightly more expensive
5748 than the otherwise-required unaligned accesses. Perhaps this can be
5749 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5750 twice, once with emit_move_insn and once via store_field. */
5751
5752 if (mode == BLKmode
5753 && (REG_P (target) || GET_CODE (target) == SUBREG))
5754 {
5755 rtx object = assign_temp (type, 0, 1, 1);
5756 rtx blk_object = adjust_address (object, BLKmode, 0);
5757
5758 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5759 emit_move_insn (object, target);
5760
5761 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5762 nontemporal);
5763
5764 emit_move_insn (target, object);
5765
5766 /* We want to return the BLKmode version of the data. */
5767 return blk_object;
5768 }
5769
5770 if (GET_CODE (target) == CONCAT)
5771 {
5772 /* We're storing into a struct containing a single __complex. */
5773
5774 gcc_assert (!bitpos);
5775 return store_expr (exp, target, 0, nontemporal);
5776 }
5777
5778 /* If the structure is in a register or if the component
5779 is a bit field, we cannot use addressing to access it.
5780 Use bit-field techniques or SUBREG to store in it. */
5781
5782 if (mode == VOIDmode
5783 || (mode != BLKmode && ! direct_store[(int) mode]
5784 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5785 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5786 || REG_P (target)
5787 || GET_CODE (target) == SUBREG
5788 /* If the field isn't aligned enough to store as an ordinary memref,
5789 store it as a bit field. */
5790 || (mode != BLKmode
5791 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5792 || bitpos % GET_MODE_ALIGNMENT (mode))
5793 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5794 || (bitpos % BITS_PER_UNIT != 0)))
5795 /* If the RHS and field are a constant size and the size of the
5796 RHS isn't the same size as the bitfield, we must use bitfield
5797 operations. */
5798 || (bitsize >= 0
5799 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5800 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5801 {
5802 rtx temp;
5803 gimple nop_def;
5804
5805 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5806 implies a mask operation. If the precision is the same size as
5807 the field we're storing into, that mask is redundant. This is
5808 particularly common with bit field assignments generated by the
5809 C front end. */
5810 nop_def = get_def_for_expr (exp, NOP_EXPR);
5811 if (nop_def)
5812 {
5813 tree type = TREE_TYPE (exp);
5814 if (INTEGRAL_TYPE_P (type)
5815 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5816 && bitsize == TYPE_PRECISION (type))
5817 {
5818 tree op = gimple_assign_rhs1 (nop_def);
5819 type = TREE_TYPE (op);
5820 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5821 exp = op;
5822 }
5823 }
5824
5825 temp = expand_normal (exp);
5826
5827 /* If BITSIZE is narrower than the size of the type of EXP
5828 we will be narrowing TEMP. Normally, what's wanted are the
5829 low-order bits. However, if EXP's type is a record and this is
5830 big-endian machine, we want the upper BITSIZE bits. */
5831 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5832 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5833 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5834 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5835 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5836 - bitsize),
5837 NULL_RTX, 1);
5838
5839 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5840 MODE. */
5841 if (mode != VOIDmode && mode != BLKmode
5842 && mode != TYPE_MODE (TREE_TYPE (exp)))
5843 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5844
5845 /* If the modes of TEMP and TARGET are both BLKmode, both
5846 must be in memory and BITPOS must be aligned on a byte
5847 boundary. If so, we simply do a block copy. Likewise
5848 for a BLKmode-like TARGET. */
5849 if (GET_MODE (temp) == BLKmode
5850 && (GET_MODE (target) == BLKmode
5851 || (MEM_P (target)
5852 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5853 && (bitpos % BITS_PER_UNIT) == 0
5854 && (bitsize % BITS_PER_UNIT) == 0)))
5855 {
5856 gcc_assert (MEM_P (target) && MEM_P (temp)
5857 && (bitpos % BITS_PER_UNIT) == 0);
5858
5859 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5860 emit_block_move (target, temp,
5861 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5862 / BITS_PER_UNIT),
5863 BLOCK_OP_NORMAL);
5864
5865 return const0_rtx;
5866 }
5867
5868 /* Store the value in the bitfield. */
5869 store_bit_field (target, bitsize, bitpos, mode, temp);
5870
5871 return const0_rtx;
5872 }
5873 else
5874 {
5875 /* Now build a reference to just the desired component. */
5876 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5877
5878 if (to_rtx == target)
5879 to_rtx = copy_rtx (to_rtx);
5880
5881 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5882 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5883 set_mem_alias_set (to_rtx, alias_set);
5884
5885 return store_expr (exp, to_rtx, 0, nontemporal);
5886 }
5887 }
5888 \f
5889 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5890 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5891 codes and find the ultimate containing object, which we return.
5892
5893 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5894 bit position, and *PUNSIGNEDP to the signedness of the field.
5895 If the position of the field is variable, we store a tree
5896 giving the variable offset (in units) in *POFFSET.
5897 This offset is in addition to the bit position.
5898 If the position is not variable, we store 0 in *POFFSET.
5899
5900 If any of the extraction expressions is volatile,
5901 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5902
5903 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5904 Otherwise, it is a mode that can be used to access the field.
5905
5906 If the field describes a variable-sized object, *PMODE is set to
5907 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5908 this case, but the address of the object can be found.
5909
5910 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5911 look through nodes that serve as markers of a greater alignment than
5912 the one that can be deduced from the expression. These nodes make it
5913 possible for front-ends to prevent temporaries from being created by
5914 the middle-end on alignment considerations. For that purpose, the
5915 normal operating mode at high-level is to always pass FALSE so that
5916 the ultimate containing object is really returned; moreover, the
5917 associated predicate handled_component_p will always return TRUE
5918 on these nodes, thus indicating that they are essentially handled
5919 by get_inner_reference. TRUE should only be passed when the caller
5920 is scanning the expression in order to build another representation
5921 and specifically knows how to handle these nodes; as such, this is
5922 the normal operating mode in the RTL expanders. */
5923
5924 tree
5925 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5926 HOST_WIDE_INT *pbitpos, tree *poffset,
5927 enum machine_mode *pmode, int *punsignedp,
5928 int *pvolatilep, bool keep_aligning)
5929 {
5930 tree size_tree = 0;
5931 enum machine_mode mode = VOIDmode;
5932 bool blkmode_bitfield = false;
5933 tree offset = size_zero_node;
5934 tree bit_offset = bitsize_zero_node;
5935
5936 /* First get the mode, signedness, and size. We do this from just the
5937 outermost expression. */
5938 if (TREE_CODE (exp) == COMPONENT_REF)
5939 {
5940 tree field = TREE_OPERAND (exp, 1);
5941 size_tree = DECL_SIZE (field);
5942 if (!DECL_BIT_FIELD (field))
5943 mode = DECL_MODE (field);
5944 else if (DECL_MODE (field) == BLKmode)
5945 blkmode_bitfield = true;
5946
5947 *punsignedp = DECL_UNSIGNED (field);
5948 }
5949 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5950 {
5951 size_tree = TREE_OPERAND (exp, 1);
5952 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5953 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5954
5955 /* For vector types, with the correct size of access, use the mode of
5956 inner type. */
5957 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5958 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5959 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5960 mode = TYPE_MODE (TREE_TYPE (exp));
5961 }
5962 else
5963 {
5964 mode = TYPE_MODE (TREE_TYPE (exp));
5965 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5966
5967 if (mode == BLKmode)
5968 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5969 else
5970 *pbitsize = GET_MODE_BITSIZE (mode);
5971 }
5972
5973 if (size_tree != 0)
5974 {
5975 if (! host_integerp (size_tree, 1))
5976 mode = BLKmode, *pbitsize = -1;
5977 else
5978 *pbitsize = tree_low_cst (size_tree, 1);
5979 }
5980
5981 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5982 and find the ultimate containing object. */
5983 while (1)
5984 {
5985 switch (TREE_CODE (exp))
5986 {
5987 case BIT_FIELD_REF:
5988 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5989 TREE_OPERAND (exp, 2));
5990 break;
5991
5992 case COMPONENT_REF:
5993 {
5994 tree field = TREE_OPERAND (exp, 1);
5995 tree this_offset = component_ref_field_offset (exp);
5996
5997 /* If this field hasn't been filled in yet, don't go past it.
5998 This should only happen when folding expressions made during
5999 type construction. */
6000 if (this_offset == 0)
6001 break;
6002
6003 offset = size_binop (PLUS_EXPR, offset, this_offset);
6004 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6005 DECL_FIELD_BIT_OFFSET (field));
6006
6007 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6008 }
6009 break;
6010
6011 case ARRAY_REF:
6012 case ARRAY_RANGE_REF:
6013 {
6014 tree index = TREE_OPERAND (exp, 1);
6015 tree low_bound = array_ref_low_bound (exp);
6016 tree unit_size = array_ref_element_size (exp);
6017
6018 /* We assume all arrays have sizes that are a multiple of a byte.
6019 First subtract the lower bound, if any, in the type of the
6020 index, then convert to sizetype and multiply by the size of
6021 the array element. */
6022 if (! integer_zerop (low_bound))
6023 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6024 index, low_bound);
6025
6026 offset = size_binop (PLUS_EXPR, offset,
6027 size_binop (MULT_EXPR,
6028 fold_convert (sizetype, index),
6029 unit_size));
6030 }
6031 break;
6032
6033 case REALPART_EXPR:
6034 break;
6035
6036 case IMAGPART_EXPR:
6037 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6038 bitsize_int (*pbitsize));
6039 break;
6040
6041 case VIEW_CONVERT_EXPR:
6042 if (keep_aligning && STRICT_ALIGNMENT
6043 && (TYPE_ALIGN (TREE_TYPE (exp))
6044 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6045 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6046 < BIGGEST_ALIGNMENT)
6047 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6048 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6049 goto done;
6050 break;
6051
6052 default:
6053 goto done;
6054 }
6055
6056 /* If any reference in the chain is volatile, the effect is volatile. */
6057 if (TREE_THIS_VOLATILE (exp))
6058 *pvolatilep = 1;
6059
6060 exp = TREE_OPERAND (exp, 0);
6061 }
6062 done:
6063
6064 /* If OFFSET is constant, see if we can return the whole thing as a
6065 constant bit position. Make sure to handle overflow during
6066 this conversion. */
6067 if (host_integerp (offset, 0))
6068 {
6069 double_int tem = double_int_mul (tree_to_double_int (offset),
6070 uhwi_to_double_int (BITS_PER_UNIT));
6071 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6072 if (double_int_fits_in_shwi_p (tem))
6073 {
6074 *pbitpos = double_int_to_shwi (tem);
6075 *poffset = offset = NULL_TREE;
6076 }
6077 }
6078
6079 /* Otherwise, split it up. */
6080 if (offset)
6081 {
6082 *pbitpos = tree_low_cst (bit_offset, 0);
6083 *poffset = offset;
6084 }
6085
6086 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6087 if (mode == VOIDmode
6088 && blkmode_bitfield
6089 && (*pbitpos % BITS_PER_UNIT) == 0
6090 && (*pbitsize % BITS_PER_UNIT) == 0)
6091 *pmode = BLKmode;
6092 else
6093 *pmode = mode;
6094
6095 return exp;
6096 }
6097
6098 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6099 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6100 EXP is marked as PACKED. */
6101
6102 bool
6103 contains_packed_reference (const_tree exp)
6104 {
6105 bool packed_p = false;
6106
6107 while (1)
6108 {
6109 switch (TREE_CODE (exp))
6110 {
6111 case COMPONENT_REF:
6112 {
6113 tree field = TREE_OPERAND (exp, 1);
6114 packed_p = DECL_PACKED (field)
6115 || TYPE_PACKED (TREE_TYPE (field))
6116 || TYPE_PACKED (TREE_TYPE (exp));
6117 if (packed_p)
6118 goto done;
6119 }
6120 break;
6121
6122 case BIT_FIELD_REF:
6123 case ARRAY_REF:
6124 case ARRAY_RANGE_REF:
6125 case REALPART_EXPR:
6126 case IMAGPART_EXPR:
6127 case VIEW_CONVERT_EXPR:
6128 break;
6129
6130 default:
6131 goto done;
6132 }
6133 exp = TREE_OPERAND (exp, 0);
6134 }
6135 done:
6136 return packed_p;
6137 }
6138
6139 /* Return a tree of sizetype representing the size, in bytes, of the element
6140 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6141
6142 tree
6143 array_ref_element_size (tree exp)
6144 {
6145 tree aligned_size = TREE_OPERAND (exp, 3);
6146 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6147 location_t loc = EXPR_LOCATION (exp);
6148
6149 /* If a size was specified in the ARRAY_REF, it's the size measured
6150 in alignment units of the element type. So multiply by that value. */
6151 if (aligned_size)
6152 {
6153 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6154 sizetype from another type of the same width and signedness. */
6155 if (TREE_TYPE (aligned_size) != sizetype)
6156 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6157 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6158 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6159 }
6160
6161 /* Otherwise, take the size from that of the element type. Substitute
6162 any PLACEHOLDER_EXPR that we have. */
6163 else
6164 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6165 }
6166
6167 /* Return a tree representing the lower bound of the array mentioned in
6168 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6169
6170 tree
6171 array_ref_low_bound (tree exp)
6172 {
6173 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6174
6175 /* If a lower bound is specified in EXP, use it. */
6176 if (TREE_OPERAND (exp, 2))
6177 return TREE_OPERAND (exp, 2);
6178
6179 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6180 substituting for a PLACEHOLDER_EXPR as needed. */
6181 if (domain_type && TYPE_MIN_VALUE (domain_type))
6182 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6183
6184 /* Otherwise, return a zero of the appropriate type. */
6185 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6186 }
6187
6188 /* Return a tree representing the upper bound of the array mentioned in
6189 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6190
6191 tree
6192 array_ref_up_bound (tree exp)
6193 {
6194 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6195
6196 /* If there is a domain type and it has an upper bound, use it, substituting
6197 for a PLACEHOLDER_EXPR as needed. */
6198 if (domain_type && TYPE_MAX_VALUE (domain_type))
6199 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6200
6201 /* Otherwise fail. */
6202 return NULL_TREE;
6203 }
6204
6205 /* Return a tree representing the offset, in bytes, of the field referenced
6206 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6207
6208 tree
6209 component_ref_field_offset (tree exp)
6210 {
6211 tree aligned_offset = TREE_OPERAND (exp, 2);
6212 tree field = TREE_OPERAND (exp, 1);
6213 location_t loc = EXPR_LOCATION (exp);
6214
6215 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6216 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6217 value. */
6218 if (aligned_offset)
6219 {
6220 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6221 sizetype from another type of the same width and signedness. */
6222 if (TREE_TYPE (aligned_offset) != sizetype)
6223 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6224 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6225 size_int (DECL_OFFSET_ALIGN (field)
6226 / BITS_PER_UNIT));
6227 }
6228
6229 /* Otherwise, take the offset from that of the field. Substitute
6230 any PLACEHOLDER_EXPR that we have. */
6231 else
6232 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6233 }
6234
6235 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6236
6237 static unsigned HOST_WIDE_INT
6238 target_align (const_tree target)
6239 {
6240 /* We might have a chain of nested references with intermediate misaligning
6241 bitfields components, so need to recurse to find out. */
6242
6243 unsigned HOST_WIDE_INT this_align, outer_align;
6244
6245 switch (TREE_CODE (target))
6246 {
6247 case BIT_FIELD_REF:
6248 return 1;
6249
6250 case COMPONENT_REF:
6251 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6252 outer_align = target_align (TREE_OPERAND (target, 0));
6253 return MIN (this_align, outer_align);
6254
6255 case ARRAY_REF:
6256 case ARRAY_RANGE_REF:
6257 this_align = TYPE_ALIGN (TREE_TYPE (target));
6258 outer_align = target_align (TREE_OPERAND (target, 0));
6259 return MIN (this_align, outer_align);
6260
6261 CASE_CONVERT:
6262 case NON_LVALUE_EXPR:
6263 case VIEW_CONVERT_EXPR:
6264 this_align = TYPE_ALIGN (TREE_TYPE (target));
6265 outer_align = target_align (TREE_OPERAND (target, 0));
6266 return MAX (this_align, outer_align);
6267
6268 default:
6269 return TYPE_ALIGN (TREE_TYPE (target));
6270 }
6271 }
6272
6273 \f
6274 /* Given an rtx VALUE that may contain additions and multiplications, return
6275 an equivalent value that just refers to a register, memory, or constant.
6276 This is done by generating instructions to perform the arithmetic and
6277 returning a pseudo-register containing the value.
6278
6279 The returned value may be a REG, SUBREG, MEM or constant. */
6280
6281 rtx
6282 force_operand (rtx value, rtx target)
6283 {
6284 rtx op1, op2;
6285 /* Use subtarget as the target for operand 0 of a binary operation. */
6286 rtx subtarget = get_subtarget (target);
6287 enum rtx_code code = GET_CODE (value);
6288
6289 /* Check for subreg applied to an expression produced by loop optimizer. */
6290 if (code == SUBREG
6291 && !REG_P (SUBREG_REG (value))
6292 && !MEM_P (SUBREG_REG (value)))
6293 {
6294 value
6295 = simplify_gen_subreg (GET_MODE (value),
6296 force_reg (GET_MODE (SUBREG_REG (value)),
6297 force_operand (SUBREG_REG (value),
6298 NULL_RTX)),
6299 GET_MODE (SUBREG_REG (value)),
6300 SUBREG_BYTE (value));
6301 code = GET_CODE (value);
6302 }
6303
6304 /* Check for a PIC address load. */
6305 if ((code == PLUS || code == MINUS)
6306 && XEXP (value, 0) == pic_offset_table_rtx
6307 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6308 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6309 || GET_CODE (XEXP (value, 1)) == CONST))
6310 {
6311 if (!subtarget)
6312 subtarget = gen_reg_rtx (GET_MODE (value));
6313 emit_move_insn (subtarget, value);
6314 return subtarget;
6315 }
6316
6317 if (ARITHMETIC_P (value))
6318 {
6319 op2 = XEXP (value, 1);
6320 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6321 subtarget = 0;
6322 if (code == MINUS && CONST_INT_P (op2))
6323 {
6324 code = PLUS;
6325 op2 = negate_rtx (GET_MODE (value), op2);
6326 }
6327
6328 /* Check for an addition with OP2 a constant integer and our first
6329 operand a PLUS of a virtual register and something else. In that
6330 case, we want to emit the sum of the virtual register and the
6331 constant first and then add the other value. This allows virtual
6332 register instantiation to simply modify the constant rather than
6333 creating another one around this addition. */
6334 if (code == PLUS && CONST_INT_P (op2)
6335 && GET_CODE (XEXP (value, 0)) == PLUS
6336 && REG_P (XEXP (XEXP (value, 0), 0))
6337 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6338 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6339 {
6340 rtx temp = expand_simple_binop (GET_MODE (value), code,
6341 XEXP (XEXP (value, 0), 0), op2,
6342 subtarget, 0, OPTAB_LIB_WIDEN);
6343 return expand_simple_binop (GET_MODE (value), code, temp,
6344 force_operand (XEXP (XEXP (value,
6345 0), 1), 0),
6346 target, 0, OPTAB_LIB_WIDEN);
6347 }
6348
6349 op1 = force_operand (XEXP (value, 0), subtarget);
6350 op2 = force_operand (op2, NULL_RTX);
6351 switch (code)
6352 {
6353 case MULT:
6354 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6355 case DIV:
6356 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6357 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6358 target, 1, OPTAB_LIB_WIDEN);
6359 else
6360 return expand_divmod (0,
6361 FLOAT_MODE_P (GET_MODE (value))
6362 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6363 GET_MODE (value), op1, op2, target, 0);
6364 case MOD:
6365 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6366 target, 0);
6367 case UDIV:
6368 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6369 target, 1);
6370 case UMOD:
6371 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6372 target, 1);
6373 case ASHIFTRT:
6374 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6375 target, 0, OPTAB_LIB_WIDEN);
6376 default:
6377 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6378 target, 1, OPTAB_LIB_WIDEN);
6379 }
6380 }
6381 if (UNARY_P (value))
6382 {
6383 if (!target)
6384 target = gen_reg_rtx (GET_MODE (value));
6385 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6386 switch (code)
6387 {
6388 case ZERO_EXTEND:
6389 case SIGN_EXTEND:
6390 case TRUNCATE:
6391 case FLOAT_EXTEND:
6392 case FLOAT_TRUNCATE:
6393 convert_move (target, op1, code == ZERO_EXTEND);
6394 return target;
6395
6396 case FIX:
6397 case UNSIGNED_FIX:
6398 expand_fix (target, op1, code == UNSIGNED_FIX);
6399 return target;
6400
6401 case FLOAT:
6402 case UNSIGNED_FLOAT:
6403 expand_float (target, op1, code == UNSIGNED_FLOAT);
6404 return target;
6405
6406 default:
6407 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6408 }
6409 }
6410
6411 #ifdef INSN_SCHEDULING
6412 /* On machines that have insn scheduling, we want all memory reference to be
6413 explicit, so we need to deal with such paradoxical SUBREGs. */
6414 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6415 && (GET_MODE_SIZE (GET_MODE (value))
6416 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6417 value
6418 = simplify_gen_subreg (GET_MODE (value),
6419 force_reg (GET_MODE (SUBREG_REG (value)),
6420 force_operand (SUBREG_REG (value),
6421 NULL_RTX)),
6422 GET_MODE (SUBREG_REG (value)),
6423 SUBREG_BYTE (value));
6424 #endif
6425
6426 return value;
6427 }
6428 \f
6429 /* Subroutine of expand_expr: return nonzero iff there is no way that
6430 EXP can reference X, which is being modified. TOP_P is nonzero if this
6431 call is going to be used to determine whether we need a temporary
6432 for EXP, as opposed to a recursive call to this function.
6433
6434 It is always safe for this routine to return zero since it merely
6435 searches for optimization opportunities. */
6436
6437 int
6438 safe_from_p (const_rtx x, tree exp, int top_p)
6439 {
6440 rtx exp_rtl = 0;
6441 int i, nops;
6442
6443 if (x == 0
6444 /* If EXP has varying size, we MUST use a target since we currently
6445 have no way of allocating temporaries of variable size
6446 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6447 So we assume here that something at a higher level has prevented a
6448 clash. This is somewhat bogus, but the best we can do. Only
6449 do this when X is BLKmode and when we are at the top level. */
6450 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6451 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6452 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6453 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6454 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6455 != INTEGER_CST)
6456 && GET_MODE (x) == BLKmode)
6457 /* If X is in the outgoing argument area, it is always safe. */
6458 || (MEM_P (x)
6459 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6460 || (GET_CODE (XEXP (x, 0)) == PLUS
6461 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6462 return 1;
6463
6464 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6465 find the underlying pseudo. */
6466 if (GET_CODE (x) == SUBREG)
6467 {
6468 x = SUBREG_REG (x);
6469 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6470 return 0;
6471 }
6472
6473 /* Now look at our tree code and possibly recurse. */
6474 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6475 {
6476 case tcc_declaration:
6477 exp_rtl = DECL_RTL_IF_SET (exp);
6478 break;
6479
6480 case tcc_constant:
6481 return 1;
6482
6483 case tcc_exceptional:
6484 if (TREE_CODE (exp) == TREE_LIST)
6485 {
6486 while (1)
6487 {
6488 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6489 return 0;
6490 exp = TREE_CHAIN (exp);
6491 if (!exp)
6492 return 1;
6493 if (TREE_CODE (exp) != TREE_LIST)
6494 return safe_from_p (x, exp, 0);
6495 }
6496 }
6497 else if (TREE_CODE (exp) == CONSTRUCTOR)
6498 {
6499 constructor_elt *ce;
6500 unsigned HOST_WIDE_INT idx;
6501
6502 for (idx = 0;
6503 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6504 idx++)
6505 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6506 || !safe_from_p (x, ce->value, 0))
6507 return 0;
6508 return 1;
6509 }
6510 else if (TREE_CODE (exp) == ERROR_MARK)
6511 return 1; /* An already-visited SAVE_EXPR? */
6512 else
6513 return 0;
6514
6515 case tcc_statement:
6516 /* The only case we look at here is the DECL_INITIAL inside a
6517 DECL_EXPR. */
6518 return (TREE_CODE (exp) != DECL_EXPR
6519 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6520 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6521 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6522
6523 case tcc_binary:
6524 case tcc_comparison:
6525 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6526 return 0;
6527 /* Fall through. */
6528
6529 case tcc_unary:
6530 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6531
6532 case tcc_expression:
6533 case tcc_reference:
6534 case tcc_vl_exp:
6535 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6536 the expression. If it is set, we conflict iff we are that rtx or
6537 both are in memory. Otherwise, we check all operands of the
6538 expression recursively. */
6539
6540 switch (TREE_CODE (exp))
6541 {
6542 case ADDR_EXPR:
6543 /* If the operand is static or we are static, we can't conflict.
6544 Likewise if we don't conflict with the operand at all. */
6545 if (staticp (TREE_OPERAND (exp, 0))
6546 || TREE_STATIC (exp)
6547 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6548 return 1;
6549
6550 /* Otherwise, the only way this can conflict is if we are taking
6551 the address of a DECL a that address if part of X, which is
6552 very rare. */
6553 exp = TREE_OPERAND (exp, 0);
6554 if (DECL_P (exp))
6555 {
6556 if (!DECL_RTL_SET_P (exp)
6557 || !MEM_P (DECL_RTL (exp)))
6558 return 0;
6559 else
6560 exp_rtl = XEXP (DECL_RTL (exp), 0);
6561 }
6562 break;
6563
6564 case MISALIGNED_INDIRECT_REF:
6565 case ALIGN_INDIRECT_REF:
6566 case INDIRECT_REF:
6567 if (MEM_P (x)
6568 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6569 get_alias_set (exp)))
6570 return 0;
6571 break;
6572
6573 case CALL_EXPR:
6574 /* Assume that the call will clobber all hard registers and
6575 all of memory. */
6576 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6577 || MEM_P (x))
6578 return 0;
6579 break;
6580
6581 case WITH_CLEANUP_EXPR:
6582 case CLEANUP_POINT_EXPR:
6583 /* Lowered by gimplify.c. */
6584 gcc_unreachable ();
6585
6586 case SAVE_EXPR:
6587 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6588
6589 default:
6590 break;
6591 }
6592
6593 /* If we have an rtx, we do not need to scan our operands. */
6594 if (exp_rtl)
6595 break;
6596
6597 nops = TREE_OPERAND_LENGTH (exp);
6598 for (i = 0; i < nops; i++)
6599 if (TREE_OPERAND (exp, i) != 0
6600 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6601 return 0;
6602
6603 break;
6604
6605 case tcc_type:
6606 /* Should never get a type here. */
6607 gcc_unreachable ();
6608 }
6609
6610 /* If we have an rtl, find any enclosed object. Then see if we conflict
6611 with it. */
6612 if (exp_rtl)
6613 {
6614 if (GET_CODE (exp_rtl) == SUBREG)
6615 {
6616 exp_rtl = SUBREG_REG (exp_rtl);
6617 if (REG_P (exp_rtl)
6618 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6619 return 0;
6620 }
6621
6622 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6623 are memory and they conflict. */
6624 return ! (rtx_equal_p (x, exp_rtl)
6625 || (MEM_P (x) && MEM_P (exp_rtl)
6626 && true_dependence (exp_rtl, VOIDmode, x,
6627 rtx_addr_varies_p)));
6628 }
6629
6630 /* If we reach here, it is safe. */
6631 return 1;
6632 }
6633
6634 \f
6635 /* Return the highest power of two that EXP is known to be a multiple of.
6636 This is used in updating alignment of MEMs in array references. */
6637
6638 unsigned HOST_WIDE_INT
6639 highest_pow2_factor (const_tree exp)
6640 {
6641 unsigned HOST_WIDE_INT c0, c1;
6642
6643 switch (TREE_CODE (exp))
6644 {
6645 case INTEGER_CST:
6646 /* We can find the lowest bit that's a one. If the low
6647 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6648 We need to handle this case since we can find it in a COND_EXPR,
6649 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6650 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6651 later ICE. */
6652 if (TREE_OVERFLOW (exp))
6653 return BIGGEST_ALIGNMENT;
6654 else
6655 {
6656 /* Note: tree_low_cst is intentionally not used here,
6657 we don't care about the upper bits. */
6658 c0 = TREE_INT_CST_LOW (exp);
6659 c0 &= -c0;
6660 return c0 ? c0 : BIGGEST_ALIGNMENT;
6661 }
6662 break;
6663
6664 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6665 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6666 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6667 return MIN (c0, c1);
6668
6669 case MULT_EXPR:
6670 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6671 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6672 return c0 * c1;
6673
6674 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6675 case CEIL_DIV_EXPR:
6676 if (integer_pow2p (TREE_OPERAND (exp, 1))
6677 && host_integerp (TREE_OPERAND (exp, 1), 1))
6678 {
6679 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6680 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6681 return MAX (1, c0 / c1);
6682 }
6683 break;
6684
6685 case BIT_AND_EXPR:
6686 /* The highest power of two of a bit-and expression is the maximum of
6687 that of its operands. We typically get here for a complex LHS and
6688 a constant negative power of two on the RHS to force an explicit
6689 alignment, so don't bother looking at the LHS. */
6690 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6691
6692 CASE_CONVERT:
6693 case SAVE_EXPR:
6694 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6695
6696 case COMPOUND_EXPR:
6697 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6698
6699 case COND_EXPR:
6700 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6701 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6702 return MIN (c0, c1);
6703
6704 default:
6705 break;
6706 }
6707
6708 return 1;
6709 }
6710
6711 /* Similar, except that the alignment requirements of TARGET are
6712 taken into account. Assume it is at least as aligned as its
6713 type, unless it is a COMPONENT_REF in which case the layout of
6714 the structure gives the alignment. */
6715
6716 static unsigned HOST_WIDE_INT
6717 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6718 {
6719 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6720 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6721
6722 return MAX (factor, talign);
6723 }
6724 \f
6725 /* Return &VAR expression for emulated thread local VAR. */
6726
6727 static tree
6728 emutls_var_address (tree var)
6729 {
6730 tree emuvar = emutls_decl (var);
6731 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6732 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6733 tree arglist = build_tree_list (NULL_TREE, arg);
6734 tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6735 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6736 }
6737 \f
6738
6739 /* Subroutine of expand_expr. Expand the two operands of a binary
6740 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6741 The value may be stored in TARGET if TARGET is nonzero. The
6742 MODIFIER argument is as documented by expand_expr. */
6743
6744 static void
6745 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6746 enum expand_modifier modifier)
6747 {
6748 if (! safe_from_p (target, exp1, 1))
6749 target = 0;
6750 if (operand_equal_p (exp0, exp1, 0))
6751 {
6752 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6753 *op1 = copy_rtx (*op0);
6754 }
6755 else
6756 {
6757 /* If we need to preserve evaluation order, copy exp0 into its own
6758 temporary variable so that it can't be clobbered by exp1. */
6759 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6760 exp0 = save_expr (exp0);
6761 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6762 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6763 }
6764 }
6765
6766 \f
6767 /* Return a MEM that contains constant EXP. DEFER is as for
6768 output_constant_def and MODIFIER is as for expand_expr. */
6769
6770 static rtx
6771 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6772 {
6773 rtx mem;
6774
6775 mem = output_constant_def (exp, defer);
6776 if (modifier != EXPAND_INITIALIZER)
6777 mem = use_anchored_address (mem);
6778 return mem;
6779 }
6780
6781 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6782 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6783
6784 static rtx
6785 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6786 enum expand_modifier modifier)
6787 {
6788 rtx result, subtarget;
6789 tree inner, offset;
6790 HOST_WIDE_INT bitsize, bitpos;
6791 int volatilep, unsignedp;
6792 enum machine_mode mode1;
6793
6794 /* If we are taking the address of a constant and are at the top level,
6795 we have to use output_constant_def since we can't call force_const_mem
6796 at top level. */
6797 /* ??? This should be considered a front-end bug. We should not be
6798 generating ADDR_EXPR of something that isn't an LVALUE. The only
6799 exception here is STRING_CST. */
6800 if (CONSTANT_CLASS_P (exp))
6801 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6802
6803 /* Everything must be something allowed by is_gimple_addressable. */
6804 switch (TREE_CODE (exp))
6805 {
6806 case INDIRECT_REF:
6807 /* This case will happen via recursion for &a->b. */
6808 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6809
6810 case CONST_DECL:
6811 /* Recurse and make the output_constant_def clause above handle this. */
6812 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6813 tmode, modifier);
6814
6815 case REALPART_EXPR:
6816 /* The real part of the complex number is always first, therefore
6817 the address is the same as the address of the parent object. */
6818 offset = 0;
6819 bitpos = 0;
6820 inner = TREE_OPERAND (exp, 0);
6821 break;
6822
6823 case IMAGPART_EXPR:
6824 /* The imaginary part of the complex number is always second.
6825 The expression is therefore always offset by the size of the
6826 scalar type. */
6827 offset = 0;
6828 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6829 inner = TREE_OPERAND (exp, 0);
6830 break;
6831
6832 case VAR_DECL:
6833 /* TLS emulation hook - replace __thread VAR's &VAR with
6834 __emutls_get_address (&_emutls.VAR). */
6835 if (! targetm.have_tls
6836 && TREE_CODE (exp) == VAR_DECL
6837 && DECL_THREAD_LOCAL_P (exp))
6838 {
6839 exp = emutls_var_address (exp);
6840 return expand_expr (exp, target, tmode, modifier);
6841 }
6842 /* Fall through. */
6843
6844 default:
6845 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6846 expand_expr, as that can have various side effects; LABEL_DECLs for
6847 example, may not have their DECL_RTL set yet. Expand the rtl of
6848 CONSTRUCTORs too, which should yield a memory reference for the
6849 constructor's contents. Assume language specific tree nodes can
6850 be expanded in some interesting way. */
6851 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6852 if (DECL_P (exp)
6853 || TREE_CODE (exp) == CONSTRUCTOR
6854 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6855 {
6856 result = expand_expr (exp, target, tmode,
6857 modifier == EXPAND_INITIALIZER
6858 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6859
6860 /* If the DECL isn't in memory, then the DECL wasn't properly
6861 marked TREE_ADDRESSABLE, which will be either a front-end
6862 or a tree optimizer bug. */
6863 gcc_assert (MEM_P (result));
6864 result = XEXP (result, 0);
6865
6866 /* ??? Is this needed anymore? */
6867 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6868 {
6869 assemble_external (exp);
6870 TREE_USED (exp) = 1;
6871 }
6872
6873 if (modifier != EXPAND_INITIALIZER
6874 && modifier != EXPAND_CONST_ADDRESS)
6875 result = force_operand (result, target);
6876 return result;
6877 }
6878
6879 /* Pass FALSE as the last argument to get_inner_reference although
6880 we are expanding to RTL. The rationale is that we know how to
6881 handle "aligning nodes" here: we can just bypass them because
6882 they won't change the final object whose address will be returned
6883 (they actually exist only for that purpose). */
6884 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6885 &mode1, &unsignedp, &volatilep, false);
6886 break;
6887 }
6888
6889 /* We must have made progress. */
6890 gcc_assert (inner != exp);
6891
6892 subtarget = offset || bitpos ? NULL_RTX : target;
6893 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6894 inner alignment, force the inner to be sufficiently aligned. */
6895 if (CONSTANT_CLASS_P (inner)
6896 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6897 {
6898 inner = copy_node (inner);
6899 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6900 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6901 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6902 }
6903 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6904
6905 if (offset)
6906 {
6907 rtx tmp;
6908
6909 if (modifier != EXPAND_NORMAL)
6910 result = force_operand (result, NULL);
6911 tmp = expand_expr (offset, NULL_RTX, tmode,
6912 modifier == EXPAND_INITIALIZER
6913 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6914
6915 result = convert_memory_address (tmode, result);
6916 tmp = convert_memory_address (tmode, tmp);
6917
6918 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6919 result = gen_rtx_PLUS (tmode, result, tmp);
6920 else
6921 {
6922 subtarget = bitpos ? NULL_RTX : target;
6923 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6924 1, OPTAB_LIB_WIDEN);
6925 }
6926 }
6927
6928 if (bitpos)
6929 {
6930 /* Someone beforehand should have rejected taking the address
6931 of such an object. */
6932 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6933
6934 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6935 if (modifier < EXPAND_SUM)
6936 result = force_operand (result, target);
6937 }
6938
6939 return result;
6940 }
6941
6942 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6943 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6944
6945 static rtx
6946 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6947 enum expand_modifier modifier)
6948 {
6949 enum machine_mode rmode;
6950 rtx result;
6951
6952 /* Target mode of VOIDmode says "whatever's natural". */
6953 if (tmode == VOIDmode)
6954 tmode = TYPE_MODE (TREE_TYPE (exp));
6955
6956 /* We can get called with some Weird Things if the user does silliness
6957 like "(short) &a". In that case, convert_memory_address won't do
6958 the right thing, so ignore the given target mode. */
6959 if (tmode != Pmode && tmode != ptr_mode)
6960 tmode = Pmode;
6961
6962 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6963 tmode, modifier);
6964
6965 /* Despite expand_expr claims concerning ignoring TMODE when not
6966 strictly convenient, stuff breaks if we don't honor it. Note
6967 that combined with the above, we only do this for pointer modes. */
6968 rmode = GET_MODE (result);
6969 if (rmode == VOIDmode)
6970 rmode = tmode;
6971 if (rmode != tmode)
6972 result = convert_memory_address (tmode, result);
6973
6974 return result;
6975 }
6976
6977 /* Generate code for computing CONSTRUCTOR EXP.
6978 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6979 is TRUE, instead of creating a temporary variable in memory
6980 NULL is returned and the caller needs to handle it differently. */
6981
6982 static rtx
6983 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6984 bool avoid_temp_mem)
6985 {
6986 tree type = TREE_TYPE (exp);
6987 enum machine_mode mode = TYPE_MODE (type);
6988
6989 /* Try to avoid creating a temporary at all. This is possible
6990 if all of the initializer is zero.
6991 FIXME: try to handle all [0..255] initializers we can handle
6992 with memset. */
6993 if (TREE_STATIC (exp)
6994 && !TREE_ADDRESSABLE (exp)
6995 && target != 0 && mode == BLKmode
6996 && all_zeros_p (exp))
6997 {
6998 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6999 return target;
7000 }
7001
7002 /* All elts simple constants => refer to a constant in memory. But
7003 if this is a non-BLKmode mode, let it store a field at a time
7004 since that should make a CONST_INT or CONST_DOUBLE when we
7005 fold. Likewise, if we have a target we can use, it is best to
7006 store directly into the target unless the type is large enough
7007 that memcpy will be used. If we are making an initializer and
7008 all operands are constant, put it in memory as well.
7009
7010 FIXME: Avoid trying to fill vector constructors piece-meal.
7011 Output them with output_constant_def below unless we're sure
7012 they're zeros. This should go away when vector initializers
7013 are treated like VECTOR_CST instead of arrays. */
7014 if ((TREE_STATIC (exp)
7015 && ((mode == BLKmode
7016 && ! (target != 0 && safe_from_p (target, exp, 1)))
7017 || TREE_ADDRESSABLE (exp)
7018 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7019 && (! MOVE_BY_PIECES_P
7020 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7021 TYPE_ALIGN (type)))
7022 && ! mostly_zeros_p (exp))))
7023 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7024 && TREE_CONSTANT (exp)))
7025 {
7026 rtx constructor;
7027
7028 if (avoid_temp_mem)
7029 return NULL_RTX;
7030
7031 constructor = expand_expr_constant (exp, 1, modifier);
7032
7033 if (modifier != EXPAND_CONST_ADDRESS
7034 && modifier != EXPAND_INITIALIZER
7035 && modifier != EXPAND_SUM)
7036 constructor = validize_mem (constructor);
7037
7038 return constructor;
7039 }
7040
7041 /* Handle calls that pass values in multiple non-contiguous
7042 locations. The Irix 6 ABI has examples of this. */
7043 if (target == 0 || ! safe_from_p (target, exp, 1)
7044 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7045 {
7046 if (avoid_temp_mem)
7047 return NULL_RTX;
7048
7049 target
7050 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7051 | (TREE_READONLY (exp)
7052 * TYPE_QUAL_CONST))),
7053 0, TREE_ADDRESSABLE (exp), 1);
7054 }
7055
7056 store_constructor (exp, target, 0, int_expr_size (exp));
7057 return target;
7058 }
7059
7060
7061 /* expand_expr: generate code for computing expression EXP.
7062 An rtx for the computed value is returned. The value is never null.
7063 In the case of a void EXP, const0_rtx is returned.
7064
7065 The value may be stored in TARGET if TARGET is nonzero.
7066 TARGET is just a suggestion; callers must assume that
7067 the rtx returned may not be the same as TARGET.
7068
7069 If TARGET is CONST0_RTX, it means that the value will be ignored.
7070
7071 If TMODE is not VOIDmode, it suggests generating the
7072 result in mode TMODE. But this is done only when convenient.
7073 Otherwise, TMODE is ignored and the value generated in its natural mode.
7074 TMODE is just a suggestion; callers must assume that
7075 the rtx returned may not have mode TMODE.
7076
7077 Note that TARGET may have neither TMODE nor MODE. In that case, it
7078 probably will not be used.
7079
7080 If MODIFIER is EXPAND_SUM then when EXP is an addition
7081 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7082 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7083 products as above, or REG or MEM, or constant.
7084 Ordinarily in such cases we would output mul or add instructions
7085 and then return a pseudo reg containing the sum.
7086
7087 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7088 it also marks a label as absolutely required (it can't be dead).
7089 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7090 This is used for outputting expressions used in initializers.
7091
7092 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7093 with a constant address even if that address is not normally legitimate.
7094 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7095
7096 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7097 a call parameter. Such targets require special care as we haven't yet
7098 marked TARGET so that it's safe from being trashed by libcalls. We
7099 don't want to use TARGET for anything but the final result;
7100 Intermediate values must go elsewhere. Additionally, calls to
7101 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7102
7103 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7104 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7105 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7106 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7107 recursively. */
7108
7109 rtx
7110 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7111 enum expand_modifier modifier, rtx *alt_rtl)
7112 {
7113 int lp_nr = 0;
7114 rtx ret, last = NULL;
7115
7116 /* Handle ERROR_MARK before anybody tries to access its type. */
7117 if (TREE_CODE (exp) == ERROR_MARK
7118 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7119 {
7120 ret = CONST0_RTX (tmode);
7121 return ret ? ret : const0_rtx;
7122 }
7123
7124 if (flag_non_call_exceptions)
7125 {
7126 lp_nr = lookup_expr_eh_lp (exp);
7127 if (lp_nr)
7128 last = get_last_insn ();
7129 }
7130
7131 /* If this is an expression of some kind and it has an associated line
7132 number, then emit the line number before expanding the expression.
7133
7134 We need to save and restore the file and line information so that
7135 errors discovered during expansion are emitted with the right
7136 information. It would be better of the diagnostic routines
7137 used the file/line information embedded in the tree nodes rather
7138 than globals. */
7139 if (cfun && EXPR_HAS_LOCATION (exp))
7140 {
7141 location_t saved_location = input_location;
7142 input_location = EXPR_LOCATION (exp);
7143 set_curr_insn_source_location (input_location);
7144
7145 /* Record where the insns produced belong. */
7146 set_curr_insn_block (TREE_BLOCK (exp));
7147
7148 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7149
7150 input_location = saved_location;
7151 }
7152 else
7153 {
7154 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7155 }
7156
7157 /* If using non-call exceptions, mark all insns that may trap.
7158 expand_call() will mark CALL_INSNs before we get to this code,
7159 but it doesn't handle libcalls, and these may trap. */
7160 if (lp_nr)
7161 {
7162 rtx insn;
7163 for (insn = next_real_insn (last); insn;
7164 insn = next_real_insn (insn))
7165 {
7166 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7167 /* If we want exceptions for non-call insns, any
7168 may_trap_p instruction may throw. */
7169 && GET_CODE (PATTERN (insn)) != CLOBBER
7170 && GET_CODE (PATTERN (insn)) != USE
7171 && insn_could_throw_p (insn))
7172 make_reg_eh_region_note (insn, 0, lp_nr);
7173 }
7174 }
7175
7176 return ret;
7177 }
7178
7179 rtx
7180 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7181 enum expand_modifier modifier)
7182 {
7183 rtx op0, op1, op2, temp;
7184 tree type;
7185 int unsignedp;
7186 enum machine_mode mode;
7187 enum tree_code code = ops->code;
7188 optab this_optab;
7189 rtx subtarget, original_target;
7190 int ignore;
7191 tree subexp0, subexp1;
7192 bool reduce_bit_field;
7193 gimple subexp0_def, subexp1_def;
7194 tree top0, top1;
7195 location_t loc = ops->location;
7196 tree treeop0, treeop1, treeop2;
7197 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7198 ? reduce_to_bit_field_precision ((expr), \
7199 target, \
7200 type) \
7201 : (expr))
7202
7203 type = ops->type;
7204 mode = TYPE_MODE (type);
7205 unsignedp = TYPE_UNSIGNED (type);
7206
7207 treeop0 = ops->op0;
7208 treeop1 = ops->op1;
7209 treeop2 = ops->op2;
7210
7211 /* We should be called only on simple (binary or unary) expressions,
7212 exactly those that are valid in gimple expressions that aren't
7213 GIMPLE_SINGLE_RHS (or invalid). */
7214 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7215 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
7216
7217 ignore = (target == const0_rtx
7218 || ((CONVERT_EXPR_CODE_P (code)
7219 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7220 && TREE_CODE (type) == VOID_TYPE));
7221
7222 /* We should be called only if we need the result. */
7223 gcc_assert (!ignore);
7224
7225 /* An operation in what may be a bit-field type needs the
7226 result to be reduced to the precision of the bit-field type,
7227 which is narrower than that of the type's mode. */
7228 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7229 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7230
7231 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7232 target = 0;
7233
7234 /* Use subtarget as the target for operand 0 of a binary operation. */
7235 subtarget = get_subtarget (target);
7236 original_target = target;
7237
7238 switch (code)
7239 {
7240 case NON_LVALUE_EXPR:
7241 case PAREN_EXPR:
7242 CASE_CONVERT:
7243 if (treeop0 == error_mark_node)
7244 return const0_rtx;
7245
7246 if (TREE_CODE (type) == UNION_TYPE)
7247 {
7248 tree valtype = TREE_TYPE (treeop0);
7249
7250 /* If both input and output are BLKmode, this conversion isn't doing
7251 anything except possibly changing memory attribute. */
7252 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7253 {
7254 rtx result = expand_expr (treeop0, target, tmode,
7255 modifier);
7256
7257 result = copy_rtx (result);
7258 set_mem_attributes (result, type, 0);
7259 return result;
7260 }
7261
7262 if (target == 0)
7263 {
7264 if (TYPE_MODE (type) != BLKmode)
7265 target = gen_reg_rtx (TYPE_MODE (type));
7266 else
7267 target = assign_temp (type, 0, 1, 1);
7268 }
7269
7270 if (MEM_P (target))
7271 /* Store data into beginning of memory target. */
7272 store_expr (treeop0,
7273 adjust_address (target, TYPE_MODE (valtype), 0),
7274 modifier == EXPAND_STACK_PARM,
7275 false);
7276
7277 else
7278 {
7279 gcc_assert (REG_P (target));
7280
7281 /* Store this field into a union of the proper type. */
7282 store_field (target,
7283 MIN ((int_size_in_bytes (TREE_TYPE
7284 (treeop0))
7285 * BITS_PER_UNIT),
7286 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7287 0, TYPE_MODE (valtype), treeop0,
7288 type, 0, false);
7289 }
7290
7291 /* Return the entire union. */
7292 return target;
7293 }
7294
7295 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7296 {
7297 op0 = expand_expr (treeop0, target, VOIDmode,
7298 modifier);
7299
7300 /* If the signedness of the conversion differs and OP0 is
7301 a promoted SUBREG, clear that indication since we now
7302 have to do the proper extension. */
7303 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7304 && GET_CODE (op0) == SUBREG)
7305 SUBREG_PROMOTED_VAR_P (op0) = 0;
7306
7307 return REDUCE_BIT_FIELD (op0);
7308 }
7309
7310 op0 = expand_expr (treeop0, NULL_RTX, mode,
7311 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7312 if (GET_MODE (op0) == mode)
7313 ;
7314
7315 /* If OP0 is a constant, just convert it into the proper mode. */
7316 else if (CONSTANT_P (op0))
7317 {
7318 tree inner_type = TREE_TYPE (treeop0);
7319 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7320
7321 if (modifier == EXPAND_INITIALIZER)
7322 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7323 subreg_lowpart_offset (mode,
7324 inner_mode));
7325 else
7326 op0= convert_modes (mode, inner_mode, op0,
7327 TYPE_UNSIGNED (inner_type));
7328 }
7329
7330 else if (modifier == EXPAND_INITIALIZER)
7331 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7332
7333 else if (target == 0)
7334 op0 = convert_to_mode (mode, op0,
7335 TYPE_UNSIGNED (TREE_TYPE
7336 (treeop0)));
7337 else
7338 {
7339 convert_move (target, op0,
7340 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7341 op0 = target;
7342 }
7343
7344 return REDUCE_BIT_FIELD (op0);
7345
7346 case POINTER_PLUS_EXPR:
7347 /* Even though the sizetype mode and the pointer's mode can be different
7348 expand is able to handle this correctly and get the correct result out
7349 of the PLUS_EXPR code. */
7350 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7351 if sizetype precision is smaller than pointer precision. */
7352 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7353 treeop1 = fold_convert_loc (loc, type,
7354 fold_convert_loc (loc, ssizetype,
7355 treeop1));
7356 case PLUS_EXPR:
7357
7358 /* Check if this is a case for multiplication and addition. */
7359 if ((TREE_CODE (type) == INTEGER_TYPE
7360 || TREE_CODE (type) == FIXED_POINT_TYPE)
7361 && (subexp0_def = get_def_for_expr (treeop0,
7362 MULT_EXPR)))
7363 {
7364 tree subsubexp0, subsubexp1;
7365 gimple subsubexp0_def, subsubexp1_def;
7366 enum tree_code this_code;
7367
7368 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7369 : FIXED_CONVERT_EXPR;
7370 subsubexp0 = gimple_assign_rhs1 (subexp0_def);
7371 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7372 subsubexp1 = gimple_assign_rhs2 (subexp0_def);
7373 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7374 if (subsubexp0_def && subsubexp1_def
7375 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7376 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7377 && (TYPE_PRECISION (TREE_TYPE (top0))
7378 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7379 && (TYPE_PRECISION (TREE_TYPE (top0))
7380 == TYPE_PRECISION (TREE_TYPE (top1)))
7381 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7382 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7383 {
7384 tree op0type = TREE_TYPE (top0);
7385 enum machine_mode innermode = TYPE_MODE (op0type);
7386 bool zextend_p = TYPE_UNSIGNED (op0type);
7387 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7388 if (sat_p == 0)
7389 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
7390 else
7391 this_optab = zextend_p ? usmadd_widen_optab
7392 : ssmadd_widen_optab;
7393 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7394 && (optab_handler (this_optab, mode)->insn_code
7395 != CODE_FOR_nothing))
7396 {
7397 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7398 EXPAND_NORMAL);
7399 op2 = expand_expr (treeop1, subtarget,
7400 VOIDmode, EXPAND_NORMAL);
7401 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7402 target, unsignedp);
7403 gcc_assert (temp);
7404 return REDUCE_BIT_FIELD (temp);
7405 }
7406 }
7407 }
7408
7409 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7410 something else, make sure we add the register to the constant and
7411 then to the other thing. This case can occur during strength
7412 reduction and doing it this way will produce better code if the
7413 frame pointer or argument pointer is eliminated.
7414
7415 fold-const.c will ensure that the constant is always in the inner
7416 PLUS_EXPR, so the only case we need to do anything about is if
7417 sp, ap, or fp is our second argument, in which case we must swap
7418 the innermost first argument and our second argument. */
7419
7420 if (TREE_CODE (treeop0) == PLUS_EXPR
7421 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7422 && TREE_CODE (treeop1) == VAR_DECL
7423 && (DECL_RTL (treeop1) == frame_pointer_rtx
7424 || DECL_RTL (treeop1) == stack_pointer_rtx
7425 || DECL_RTL (treeop1) == arg_pointer_rtx))
7426 {
7427 tree t = treeop1;
7428
7429 treeop1 = TREE_OPERAND (treeop0, 0);
7430 TREE_OPERAND (treeop0, 0) = t;
7431 }
7432
7433 /* If the result is to be ptr_mode and we are adding an integer to
7434 something, we might be forming a constant. So try to use
7435 plus_constant. If it produces a sum and we can't accept it,
7436 use force_operand. This allows P = &ARR[const] to generate
7437 efficient code on machines where a SYMBOL_REF is not a valid
7438 address.
7439
7440 If this is an EXPAND_SUM call, always return the sum. */
7441 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7442 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7443 {
7444 if (modifier == EXPAND_STACK_PARM)
7445 target = 0;
7446 if (TREE_CODE (treeop0) == INTEGER_CST
7447 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7448 && TREE_CONSTANT (treeop1))
7449 {
7450 rtx constant_part;
7451
7452 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7453 EXPAND_SUM);
7454 /* Use immed_double_const to ensure that the constant is
7455 truncated according to the mode of OP1, then sign extended
7456 to a HOST_WIDE_INT. Using the constant directly can result
7457 in non-canonical RTL in a 64x32 cross compile. */
7458 constant_part
7459 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7460 (HOST_WIDE_INT) 0,
7461 TYPE_MODE (TREE_TYPE (treeop1)));
7462 op1 = plus_constant (op1, INTVAL (constant_part));
7463 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7464 op1 = force_operand (op1, target);
7465 return REDUCE_BIT_FIELD (op1);
7466 }
7467
7468 else if (TREE_CODE (treeop1) == INTEGER_CST
7469 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7470 && TREE_CONSTANT (treeop0))
7471 {
7472 rtx constant_part;
7473
7474 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7475 (modifier == EXPAND_INITIALIZER
7476 ? EXPAND_INITIALIZER : EXPAND_SUM));
7477 if (! CONSTANT_P (op0))
7478 {
7479 op1 = expand_expr (treeop1, NULL_RTX,
7480 VOIDmode, modifier);
7481 /* Return a PLUS if modifier says it's OK. */
7482 if (modifier == EXPAND_SUM
7483 || modifier == EXPAND_INITIALIZER)
7484 return simplify_gen_binary (PLUS, mode, op0, op1);
7485 goto binop2;
7486 }
7487 /* Use immed_double_const to ensure that the constant is
7488 truncated according to the mode of OP1, then sign extended
7489 to a HOST_WIDE_INT. Using the constant directly can result
7490 in non-canonical RTL in a 64x32 cross compile. */
7491 constant_part
7492 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7493 (HOST_WIDE_INT) 0,
7494 TYPE_MODE (TREE_TYPE (treeop0)));
7495 op0 = plus_constant (op0, INTVAL (constant_part));
7496 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7497 op0 = force_operand (op0, target);
7498 return REDUCE_BIT_FIELD (op0);
7499 }
7500 }
7501
7502 /* No sense saving up arithmetic to be done
7503 if it's all in the wrong mode to form part of an address.
7504 And force_operand won't know whether to sign-extend or
7505 zero-extend. */
7506 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7507 || mode != ptr_mode)
7508 {
7509 expand_operands (treeop0, treeop1,
7510 subtarget, &op0, &op1, EXPAND_NORMAL);
7511 if (op0 == const0_rtx)
7512 return op1;
7513 if (op1 == const0_rtx)
7514 return op0;
7515 goto binop2;
7516 }
7517
7518 expand_operands (treeop0, treeop1,
7519 subtarget, &op0, &op1, modifier);
7520 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7521
7522 case MINUS_EXPR:
7523 /* Check if this is a case for multiplication and subtraction. */
7524 if ((TREE_CODE (type) == INTEGER_TYPE
7525 || TREE_CODE (type) == FIXED_POINT_TYPE)
7526 && (subexp1_def = get_def_for_expr (treeop1,
7527 MULT_EXPR)))
7528 {
7529 tree subsubexp0, subsubexp1;
7530 gimple subsubexp0_def, subsubexp1_def;
7531 enum tree_code this_code;
7532
7533 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7534 : FIXED_CONVERT_EXPR;
7535 subsubexp0 = gimple_assign_rhs1 (subexp1_def);
7536 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7537 subsubexp1 = gimple_assign_rhs2 (subexp1_def);
7538 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7539 if (subsubexp0_def && subsubexp1_def
7540 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7541 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7542 && (TYPE_PRECISION (TREE_TYPE (top0))
7543 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7544 && (TYPE_PRECISION (TREE_TYPE (top0))
7545 == TYPE_PRECISION (TREE_TYPE (top1)))
7546 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7547 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7548 {
7549 tree op0type = TREE_TYPE (top0);
7550 enum machine_mode innermode = TYPE_MODE (op0type);
7551 bool zextend_p = TYPE_UNSIGNED (op0type);
7552 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7553 if (sat_p == 0)
7554 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
7555 else
7556 this_optab = zextend_p ? usmsub_widen_optab
7557 : ssmsub_widen_optab;
7558 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7559 && (optab_handler (this_optab, mode)->insn_code
7560 != CODE_FOR_nothing))
7561 {
7562 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7563 EXPAND_NORMAL);
7564 op2 = expand_expr (treeop0, subtarget,
7565 VOIDmode, EXPAND_NORMAL);
7566 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7567 target, unsignedp);
7568 gcc_assert (temp);
7569 return REDUCE_BIT_FIELD (temp);
7570 }
7571 }
7572 }
7573
7574 /* For initializers, we are allowed to return a MINUS of two
7575 symbolic constants. Here we handle all cases when both operands
7576 are constant. */
7577 /* Handle difference of two symbolic constants,
7578 for the sake of an initializer. */
7579 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7580 && really_constant_p (treeop0)
7581 && really_constant_p (treeop1))
7582 {
7583 expand_operands (treeop0, treeop1,
7584 NULL_RTX, &op0, &op1, modifier);
7585
7586 /* If the last operand is a CONST_INT, use plus_constant of
7587 the negated constant. Else make the MINUS. */
7588 if (CONST_INT_P (op1))
7589 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7590 else
7591 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7592 }
7593
7594 /* No sense saving up arithmetic to be done
7595 if it's all in the wrong mode to form part of an address.
7596 And force_operand won't know whether to sign-extend or
7597 zero-extend. */
7598 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7599 || mode != ptr_mode)
7600 goto binop;
7601
7602 expand_operands (treeop0, treeop1,
7603 subtarget, &op0, &op1, modifier);
7604
7605 /* Convert A - const to A + (-const). */
7606 if (CONST_INT_P (op1))
7607 {
7608 op1 = negate_rtx (mode, op1);
7609 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7610 }
7611
7612 goto binop2;
7613
7614 case MULT_EXPR:
7615 /* If this is a fixed-point operation, then we cannot use the code
7616 below because "expand_mult" doesn't support sat/no-sat fixed-point
7617 multiplications. */
7618 if (ALL_FIXED_POINT_MODE_P (mode))
7619 goto binop;
7620
7621 /* If first operand is constant, swap them.
7622 Thus the following special case checks need only
7623 check the second operand. */
7624 if (TREE_CODE (treeop0) == INTEGER_CST)
7625 {
7626 tree t1 = treeop0;
7627 treeop0 = treeop1;
7628 treeop1 = t1;
7629 }
7630
7631 /* Attempt to return something suitable for generating an
7632 indexed address, for machines that support that. */
7633
7634 if (modifier == EXPAND_SUM && mode == ptr_mode
7635 && host_integerp (treeop1, 0))
7636 {
7637 tree exp1 = treeop1;
7638
7639 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7640 EXPAND_SUM);
7641
7642 if (!REG_P (op0))
7643 op0 = force_operand (op0, NULL_RTX);
7644 if (!REG_P (op0))
7645 op0 = copy_to_mode_reg (mode, op0);
7646
7647 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7648 gen_int_mode (tree_low_cst (exp1, 0),
7649 TYPE_MODE (TREE_TYPE (exp1)))));
7650 }
7651
7652 if (modifier == EXPAND_STACK_PARM)
7653 target = 0;
7654
7655 /* Check for multiplying things that have been extended
7656 from a narrower type. If this machine supports multiplying
7657 in that narrower type with a result in the desired type,
7658 do it that way, and avoid the explicit type-conversion. */
7659
7660 subexp0 = treeop0;
7661 subexp1 = treeop1;
7662 subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
7663 subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
7664 top0 = top1 = NULL_TREE;
7665
7666 /* First, check if we have a multiplication of one signed and one
7667 unsigned operand. */
7668 if (subexp0_def
7669 && (top0 = gimple_assign_rhs1 (subexp0_def))
7670 && subexp1_def
7671 && (top1 = gimple_assign_rhs1 (subexp1_def))
7672 && TREE_CODE (type) == INTEGER_TYPE
7673 && (TYPE_PRECISION (TREE_TYPE (top0))
7674 < TYPE_PRECISION (TREE_TYPE (subexp0)))
7675 && (TYPE_PRECISION (TREE_TYPE (top0))
7676 == TYPE_PRECISION (TREE_TYPE (top1)))
7677 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7678 != TYPE_UNSIGNED (TREE_TYPE (top1))))
7679 {
7680 enum machine_mode innermode
7681 = TYPE_MODE (TREE_TYPE (top0));
7682 this_optab = usmul_widen_optab;
7683 if (mode == GET_MODE_WIDER_MODE (innermode))
7684 {
7685 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7686 {
7687 if (TYPE_UNSIGNED (TREE_TYPE (top0)))
7688 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7689 EXPAND_NORMAL);
7690 else
7691 expand_operands (top0, top1, NULL_RTX, &op1, &op0,
7692 EXPAND_NORMAL);
7693
7694 goto binop3;
7695 }
7696 }
7697 }
7698 /* Check for a multiplication with matching signedness. If
7699 valid, TOP0 and TOP1 were set in the previous if
7700 condition. */
7701 else if (top0
7702 && TREE_CODE (type) == INTEGER_TYPE
7703 && (TYPE_PRECISION (TREE_TYPE (top0))
7704 < TYPE_PRECISION (TREE_TYPE (subexp0)))
7705 && ((TREE_CODE (subexp1) == INTEGER_CST
7706 && int_fits_type_p (subexp1, TREE_TYPE (top0))
7707 /* Don't use a widening multiply if a shift will do. */
7708 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
7709 > HOST_BITS_PER_WIDE_INT)
7710 || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
7711 ||
7712 (top1
7713 && (TYPE_PRECISION (TREE_TYPE (top1))
7714 == TYPE_PRECISION (TREE_TYPE (top0))
7715 /* If both operands are extended, they must either both
7716 be zero-extended or both be sign-extended. */
7717 && (TYPE_UNSIGNED (TREE_TYPE (top1))
7718 == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
7719 {
7720 tree op0type = TREE_TYPE (top0);
7721 enum machine_mode innermode = TYPE_MODE (op0type);
7722 bool zextend_p = TYPE_UNSIGNED (op0type);
7723 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7724 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7725
7726 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7727 {
7728 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7729 {
7730 if (TREE_CODE (subexp1) == INTEGER_CST)
7731 expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
7732 EXPAND_NORMAL);
7733 else
7734 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7735 EXPAND_NORMAL);
7736 goto binop3;
7737 }
7738 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7739 && innermode == word_mode)
7740 {
7741 rtx htem, hipart;
7742 op0 = expand_normal (top0);
7743 if (TREE_CODE (subexp1) == INTEGER_CST)
7744 op1 = convert_modes (innermode, mode,
7745 expand_normal (subexp1), unsignedp);
7746 else
7747 op1 = expand_normal (top1);
7748 temp = expand_binop (mode, other_optab, op0, op1, target,
7749 unsignedp, OPTAB_LIB_WIDEN);
7750 hipart = gen_highpart (innermode, temp);
7751 htem = expand_mult_highpart_adjust (innermode, hipart,
7752 op0, op1, hipart,
7753 zextend_p);
7754 if (htem != hipart)
7755 emit_move_insn (hipart, htem);
7756 return REDUCE_BIT_FIELD (temp);
7757 }
7758 }
7759 }
7760 expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
7761 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7762
7763 case TRUNC_DIV_EXPR:
7764 case FLOOR_DIV_EXPR:
7765 case CEIL_DIV_EXPR:
7766 case ROUND_DIV_EXPR:
7767 case EXACT_DIV_EXPR:
7768 /* If this is a fixed-point operation, then we cannot use the code
7769 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7770 divisions. */
7771 if (ALL_FIXED_POINT_MODE_P (mode))
7772 goto binop;
7773
7774 if (modifier == EXPAND_STACK_PARM)
7775 target = 0;
7776 /* Possible optimization: compute the dividend with EXPAND_SUM
7777 then if the divisor is constant can optimize the case
7778 where some terms of the dividend have coeffs divisible by it. */
7779 expand_operands (treeop0, treeop1,
7780 subtarget, &op0, &op1, EXPAND_NORMAL);
7781 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7782
7783 case RDIV_EXPR:
7784 goto binop;
7785
7786 case TRUNC_MOD_EXPR:
7787 case FLOOR_MOD_EXPR:
7788 case CEIL_MOD_EXPR:
7789 case ROUND_MOD_EXPR:
7790 if (modifier == EXPAND_STACK_PARM)
7791 target = 0;
7792 expand_operands (treeop0, treeop1,
7793 subtarget, &op0, &op1, EXPAND_NORMAL);
7794 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7795
7796 case FIXED_CONVERT_EXPR:
7797 op0 = expand_normal (treeop0);
7798 if (target == 0 || modifier == EXPAND_STACK_PARM)
7799 target = gen_reg_rtx (mode);
7800
7801 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7802 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7803 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7804 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7805 else
7806 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7807 return target;
7808
7809 case FIX_TRUNC_EXPR:
7810 op0 = expand_normal (treeop0);
7811 if (target == 0 || modifier == EXPAND_STACK_PARM)
7812 target = gen_reg_rtx (mode);
7813 expand_fix (target, op0, unsignedp);
7814 return target;
7815
7816 case FLOAT_EXPR:
7817 op0 = expand_normal (treeop0);
7818 if (target == 0 || modifier == EXPAND_STACK_PARM)
7819 target = gen_reg_rtx (mode);
7820 /* expand_float can't figure out what to do if FROM has VOIDmode.
7821 So give it the correct mode. With -O, cse will optimize this. */
7822 if (GET_MODE (op0) == VOIDmode)
7823 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7824 op0);
7825 expand_float (target, op0,
7826 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7827 return target;
7828
7829 case NEGATE_EXPR:
7830 op0 = expand_expr (treeop0, subtarget,
7831 VOIDmode, EXPAND_NORMAL);
7832 if (modifier == EXPAND_STACK_PARM)
7833 target = 0;
7834 temp = expand_unop (mode,
7835 optab_for_tree_code (NEGATE_EXPR, type,
7836 optab_default),
7837 op0, target, 0);
7838 gcc_assert (temp);
7839 return REDUCE_BIT_FIELD (temp);
7840
7841 case ABS_EXPR:
7842 op0 = expand_expr (treeop0, subtarget,
7843 VOIDmode, EXPAND_NORMAL);
7844 if (modifier == EXPAND_STACK_PARM)
7845 target = 0;
7846
7847 /* ABS_EXPR is not valid for complex arguments. */
7848 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7849 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7850
7851 /* Unsigned abs is simply the operand. Testing here means we don't
7852 risk generating incorrect code below. */
7853 if (TYPE_UNSIGNED (type))
7854 return op0;
7855
7856 return expand_abs (mode, op0, target, unsignedp,
7857 safe_from_p (target, treeop0, 1));
7858
7859 case MAX_EXPR:
7860 case MIN_EXPR:
7861 target = original_target;
7862 if (target == 0
7863 || modifier == EXPAND_STACK_PARM
7864 || (MEM_P (target) && MEM_VOLATILE_P (target))
7865 || GET_MODE (target) != mode
7866 || (REG_P (target)
7867 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7868 target = gen_reg_rtx (mode);
7869 expand_operands (treeop0, treeop1,
7870 target, &op0, &op1, EXPAND_NORMAL);
7871
7872 /* First try to do it with a special MIN or MAX instruction.
7873 If that does not win, use a conditional jump to select the proper
7874 value. */
7875 this_optab = optab_for_tree_code (code, type, optab_default);
7876 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7877 OPTAB_WIDEN);
7878 if (temp != 0)
7879 return temp;
7880
7881 /* At this point, a MEM target is no longer useful; we will get better
7882 code without it. */
7883
7884 if (! REG_P (target))
7885 target = gen_reg_rtx (mode);
7886
7887 /* If op1 was placed in target, swap op0 and op1. */
7888 if (target != op0 && target == op1)
7889 {
7890 temp = op0;
7891 op0 = op1;
7892 op1 = temp;
7893 }
7894
7895 /* We generate better code and avoid problems with op1 mentioning
7896 target by forcing op1 into a pseudo if it isn't a constant. */
7897 if (! CONSTANT_P (op1))
7898 op1 = force_reg (mode, op1);
7899
7900 {
7901 enum rtx_code comparison_code;
7902 rtx cmpop1 = op1;
7903
7904 if (code == MAX_EXPR)
7905 comparison_code = unsignedp ? GEU : GE;
7906 else
7907 comparison_code = unsignedp ? LEU : LE;
7908
7909 /* Canonicalize to comparisons against 0. */
7910 if (op1 == const1_rtx)
7911 {
7912 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7913 or (a != 0 ? a : 1) for unsigned.
7914 For MIN we are safe converting (a <= 1 ? a : 1)
7915 into (a <= 0 ? a : 1) */
7916 cmpop1 = const0_rtx;
7917 if (code == MAX_EXPR)
7918 comparison_code = unsignedp ? NE : GT;
7919 }
7920 if (op1 == constm1_rtx && !unsignedp)
7921 {
7922 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7923 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7924 cmpop1 = const0_rtx;
7925 if (code == MIN_EXPR)
7926 comparison_code = LT;
7927 }
7928 #ifdef HAVE_conditional_move
7929 /* Use a conditional move if possible. */
7930 if (can_conditionally_move_p (mode))
7931 {
7932 rtx insn;
7933
7934 /* ??? Same problem as in expmed.c: emit_conditional_move
7935 forces a stack adjustment via compare_from_rtx, and we
7936 lose the stack adjustment if the sequence we are about
7937 to create is discarded. */
7938 do_pending_stack_adjust ();
7939
7940 start_sequence ();
7941
7942 /* Try to emit the conditional move. */
7943 insn = emit_conditional_move (target, comparison_code,
7944 op0, cmpop1, mode,
7945 op0, op1, mode,
7946 unsignedp);
7947
7948 /* If we could do the conditional move, emit the sequence,
7949 and return. */
7950 if (insn)
7951 {
7952 rtx seq = get_insns ();
7953 end_sequence ();
7954 emit_insn (seq);
7955 return target;
7956 }
7957
7958 /* Otherwise discard the sequence and fall back to code with
7959 branches. */
7960 end_sequence ();
7961 }
7962 #endif
7963 if (target != op0)
7964 emit_move_insn (target, op0);
7965
7966 temp = gen_label_rtx ();
7967 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7968 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
7969 }
7970 emit_move_insn (target, op1);
7971 emit_label (temp);
7972 return target;
7973
7974 case BIT_NOT_EXPR:
7975 op0 = expand_expr (treeop0, subtarget,
7976 VOIDmode, EXPAND_NORMAL);
7977 if (modifier == EXPAND_STACK_PARM)
7978 target = 0;
7979 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7980 gcc_assert (temp);
7981 return temp;
7982
7983 /* ??? Can optimize bitwise operations with one arg constant.
7984 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7985 and (a bitwise1 b) bitwise2 b (etc)
7986 but that is probably not worth while. */
7987
7988 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7989 boolean values when we want in all cases to compute both of them. In
7990 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7991 as actual zero-or-1 values and then bitwise anding. In cases where
7992 there cannot be any side effects, better code would be made by
7993 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7994 how to recognize those cases. */
7995
7996 case TRUTH_AND_EXPR:
7997 code = BIT_AND_EXPR;
7998 case BIT_AND_EXPR:
7999 goto binop;
8000
8001 case TRUTH_OR_EXPR:
8002 code = BIT_IOR_EXPR;
8003 case BIT_IOR_EXPR:
8004 goto binop;
8005
8006 case TRUTH_XOR_EXPR:
8007 code = BIT_XOR_EXPR;
8008 case BIT_XOR_EXPR:
8009 goto binop;
8010
8011 case LROTATE_EXPR:
8012 case RROTATE_EXPR:
8013 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8014 || (GET_MODE_PRECISION (TYPE_MODE (type))
8015 == TYPE_PRECISION (type)));
8016 /* fall through */
8017
8018 case LSHIFT_EXPR:
8019 case RSHIFT_EXPR:
8020 /* If this is a fixed-point operation, then we cannot use the code
8021 below because "expand_shift" doesn't support sat/no-sat fixed-point
8022 shifts. */
8023 if (ALL_FIXED_POINT_MODE_P (mode))
8024 goto binop;
8025
8026 if (! safe_from_p (subtarget, treeop1, 1))
8027 subtarget = 0;
8028 if (modifier == EXPAND_STACK_PARM)
8029 target = 0;
8030 op0 = expand_expr (treeop0, subtarget,
8031 VOIDmode, EXPAND_NORMAL);
8032 temp = expand_shift (code, mode, op0, treeop1, target,
8033 unsignedp);
8034 if (code == LSHIFT_EXPR)
8035 temp = REDUCE_BIT_FIELD (temp);
8036 return temp;
8037
8038 /* Could determine the answer when only additive constants differ. Also,
8039 the addition of one can be handled by changing the condition. */
8040 case LT_EXPR:
8041 case LE_EXPR:
8042 case GT_EXPR:
8043 case GE_EXPR:
8044 case EQ_EXPR:
8045 case NE_EXPR:
8046 case UNORDERED_EXPR:
8047 case ORDERED_EXPR:
8048 case UNLT_EXPR:
8049 case UNLE_EXPR:
8050 case UNGT_EXPR:
8051 case UNGE_EXPR:
8052 case UNEQ_EXPR:
8053 case LTGT_EXPR:
8054 temp = do_store_flag (ops,
8055 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8056 tmode != VOIDmode ? tmode : mode);
8057 if (temp)
8058 return temp;
8059
8060 /* Use a compare and a jump for BLKmode comparisons, or for function
8061 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8062
8063 if ((target == 0
8064 || modifier == EXPAND_STACK_PARM
8065 || ! safe_from_p (target, treeop0, 1)
8066 || ! safe_from_p (target, treeop1, 1)
8067 /* Make sure we don't have a hard reg (such as function's return
8068 value) live across basic blocks, if not optimizing. */
8069 || (!optimize && REG_P (target)
8070 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8071 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8072
8073 emit_move_insn (target, const0_rtx);
8074
8075 op1 = gen_label_rtx ();
8076 jumpifnot_1 (code, treeop0, treeop1, op1);
8077
8078 emit_move_insn (target, const1_rtx);
8079
8080 emit_label (op1);
8081 return target;
8082
8083 case TRUTH_NOT_EXPR:
8084 if (modifier == EXPAND_STACK_PARM)
8085 target = 0;
8086 op0 = expand_expr (treeop0, target,
8087 VOIDmode, EXPAND_NORMAL);
8088 /* The parser is careful to generate TRUTH_NOT_EXPR
8089 only with operands that are always zero or one. */
8090 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8091 target, 1, OPTAB_LIB_WIDEN);
8092 gcc_assert (temp);
8093 return temp;
8094
8095 case COMPLEX_EXPR:
8096 /* Get the rtx code of the operands. */
8097 op0 = expand_normal (treeop0);
8098 op1 = expand_normal (treeop1);
8099
8100 if (!target)
8101 target = gen_reg_rtx (TYPE_MODE (type));
8102
8103 /* Move the real (op0) and imaginary (op1) parts to their location. */
8104 write_complex_part (target, op0, false);
8105 write_complex_part (target, op1, true);
8106
8107 return target;
8108
8109 case WIDEN_SUM_EXPR:
8110 {
8111 tree oprnd0 = treeop0;
8112 tree oprnd1 = treeop1;
8113
8114 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8115 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8116 target, unsignedp);
8117 return target;
8118 }
8119
8120 case REDUC_MAX_EXPR:
8121 case REDUC_MIN_EXPR:
8122 case REDUC_PLUS_EXPR:
8123 {
8124 op0 = expand_normal (treeop0);
8125 this_optab = optab_for_tree_code (code, type, optab_default);
8126 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8127 gcc_assert (temp);
8128 return temp;
8129 }
8130
8131 case VEC_EXTRACT_EVEN_EXPR:
8132 case VEC_EXTRACT_ODD_EXPR:
8133 {
8134 expand_operands (treeop0, treeop1,
8135 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8136 this_optab = optab_for_tree_code (code, type, optab_default);
8137 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8138 OPTAB_WIDEN);
8139 gcc_assert (temp);
8140 return temp;
8141 }
8142
8143 case VEC_INTERLEAVE_HIGH_EXPR:
8144 case VEC_INTERLEAVE_LOW_EXPR:
8145 {
8146 expand_operands (treeop0, treeop1,
8147 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8148 this_optab = optab_for_tree_code (code, type, optab_default);
8149 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8150 OPTAB_WIDEN);
8151 gcc_assert (temp);
8152 return temp;
8153 }
8154
8155 case VEC_LSHIFT_EXPR:
8156 case VEC_RSHIFT_EXPR:
8157 {
8158 target = expand_vec_shift_expr (ops, target);
8159 return target;
8160 }
8161
8162 case VEC_UNPACK_HI_EXPR:
8163 case VEC_UNPACK_LO_EXPR:
8164 {
8165 op0 = expand_normal (treeop0);
8166 this_optab = optab_for_tree_code (code, type, optab_default);
8167 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8168 target, unsignedp);
8169 gcc_assert (temp);
8170 return temp;
8171 }
8172
8173 case VEC_UNPACK_FLOAT_HI_EXPR:
8174 case VEC_UNPACK_FLOAT_LO_EXPR:
8175 {
8176 op0 = expand_normal (treeop0);
8177 /* The signedness is determined from input operand. */
8178 this_optab = optab_for_tree_code (code,
8179 TREE_TYPE (treeop0),
8180 optab_default);
8181 temp = expand_widen_pattern_expr
8182 (ops, op0, NULL_RTX, NULL_RTX,
8183 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8184
8185 gcc_assert (temp);
8186 return temp;
8187 }
8188
8189 case VEC_WIDEN_MULT_HI_EXPR:
8190 case VEC_WIDEN_MULT_LO_EXPR:
8191 {
8192 tree oprnd0 = treeop0;
8193 tree oprnd1 = treeop1;
8194
8195 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8196 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8197 target, unsignedp);
8198 gcc_assert (target);
8199 return target;
8200 }
8201
8202 case VEC_PACK_TRUNC_EXPR:
8203 case VEC_PACK_SAT_EXPR:
8204 case VEC_PACK_FIX_TRUNC_EXPR:
8205 mode = TYPE_MODE (TREE_TYPE (treeop0));
8206 goto binop;
8207
8208 default:
8209 gcc_unreachable ();
8210 }
8211
8212 /* Here to do an ordinary binary operator. */
8213 binop:
8214 expand_operands (treeop0, treeop1,
8215 subtarget, &op0, &op1, EXPAND_NORMAL);
8216 binop2:
8217 this_optab = optab_for_tree_code (code, type, optab_default);
8218 binop3:
8219 if (modifier == EXPAND_STACK_PARM)
8220 target = 0;
8221 temp = expand_binop (mode, this_optab, op0, op1, target,
8222 unsignedp, OPTAB_LIB_WIDEN);
8223 gcc_assert (temp);
8224 return REDUCE_BIT_FIELD (temp);
8225 }
8226 #undef REDUCE_BIT_FIELD
8227
8228 rtx
8229 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8230 enum expand_modifier modifier, rtx *alt_rtl)
8231 {
8232 rtx op0, op1, temp, decl_rtl;
8233 tree type;
8234 int unsignedp;
8235 enum machine_mode mode;
8236 enum tree_code code = TREE_CODE (exp);
8237 optab this_optab;
8238 rtx subtarget, original_target;
8239 int ignore;
8240 tree context;
8241 bool reduce_bit_field;
8242 location_t loc = EXPR_LOCATION (exp);
8243 struct separate_ops ops;
8244 tree treeop0, treeop1, treeop2;
8245
8246 type = TREE_TYPE (exp);
8247 mode = TYPE_MODE (type);
8248 unsignedp = TYPE_UNSIGNED (type);
8249
8250 treeop0 = treeop1 = treeop2 = NULL_TREE;
8251 if (!VL_EXP_CLASS_P (exp))
8252 switch (TREE_CODE_LENGTH (code))
8253 {
8254 default:
8255 case 3: treeop2 = TREE_OPERAND (exp, 2);
8256 case 2: treeop1 = TREE_OPERAND (exp, 1);
8257 case 1: treeop0 = TREE_OPERAND (exp, 0);
8258 case 0: break;
8259 }
8260 ops.code = code;
8261 ops.type = type;
8262 ops.op0 = treeop0;
8263 ops.op1 = treeop1;
8264 ops.op2 = treeop2;
8265 ops.location = loc;
8266
8267 ignore = (target == const0_rtx
8268 || ((CONVERT_EXPR_CODE_P (code)
8269 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8270 && TREE_CODE (type) == VOID_TYPE));
8271
8272 /* An operation in what may be a bit-field type needs the
8273 result to be reduced to the precision of the bit-field type,
8274 which is narrower than that of the type's mode. */
8275 reduce_bit_field = (!ignore
8276 && TREE_CODE (type) == INTEGER_TYPE
8277 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8278
8279 /* If we are going to ignore this result, we need only do something
8280 if there is a side-effect somewhere in the expression. If there
8281 is, short-circuit the most common cases here. Note that we must
8282 not call expand_expr with anything but const0_rtx in case this
8283 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8284
8285 if (ignore)
8286 {
8287 if (! TREE_SIDE_EFFECTS (exp))
8288 return const0_rtx;
8289
8290 /* Ensure we reference a volatile object even if value is ignored, but
8291 don't do this if all we are doing is taking its address. */
8292 if (TREE_THIS_VOLATILE (exp)
8293 && TREE_CODE (exp) != FUNCTION_DECL
8294 && mode != VOIDmode && mode != BLKmode
8295 && modifier != EXPAND_CONST_ADDRESS)
8296 {
8297 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8298 if (MEM_P (temp))
8299 temp = copy_to_reg (temp);
8300 return const0_rtx;
8301 }
8302
8303 if (TREE_CODE_CLASS (code) == tcc_unary
8304 || code == COMPONENT_REF || code == INDIRECT_REF)
8305 return expand_expr (treeop0, const0_rtx, VOIDmode,
8306 modifier);
8307
8308 else if (TREE_CODE_CLASS (code) == tcc_binary
8309 || TREE_CODE_CLASS (code) == tcc_comparison
8310 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8311 {
8312 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8313 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8314 return const0_rtx;
8315 }
8316 else if (code == BIT_FIELD_REF)
8317 {
8318 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8319 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8320 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8321 return const0_rtx;
8322 }
8323
8324 target = 0;
8325 }
8326
8327 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8328 target = 0;
8329
8330 /* Use subtarget as the target for operand 0 of a binary operation. */
8331 subtarget = get_subtarget (target);
8332 original_target = target;
8333
8334 switch (code)
8335 {
8336 case LABEL_DECL:
8337 {
8338 tree function = decl_function_context (exp);
8339
8340 temp = label_rtx (exp);
8341 temp = gen_rtx_LABEL_REF (Pmode, temp);
8342
8343 if (function != current_function_decl
8344 && function != 0)
8345 LABEL_REF_NONLOCAL_P (temp) = 1;
8346
8347 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8348 return temp;
8349 }
8350
8351 case SSA_NAME:
8352 /* ??? ivopts calls expander, without any preparation from
8353 out-of-ssa. So fake instructions as if this was an access to the
8354 base variable. This unnecessarily allocates a pseudo, see how we can
8355 reuse it, if partition base vars have it set already. */
8356 if (!currently_expanding_to_rtl)
8357 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
8358 {
8359 gimple g = get_gimple_for_ssa_name (exp);
8360 if (g)
8361 return expand_expr_real_1 (gimple_assign_rhs_to_tree (g), target,
8362 tmode, modifier, NULL);
8363 }
8364 decl_rtl = get_rtx_for_ssa_name (exp);
8365 exp = SSA_NAME_VAR (exp);
8366 goto expand_decl_rtl;
8367
8368 case PARM_DECL:
8369 case VAR_DECL:
8370 /* If a static var's type was incomplete when the decl was written,
8371 but the type is complete now, lay out the decl now. */
8372 if (DECL_SIZE (exp) == 0
8373 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8374 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8375 layout_decl (exp, 0);
8376
8377 /* TLS emulation hook - replace __thread vars with
8378 *__emutls_get_address (&_emutls.var). */
8379 if (! targetm.have_tls
8380 && TREE_CODE (exp) == VAR_DECL
8381 && DECL_THREAD_LOCAL_P (exp))
8382 {
8383 exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8384 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8385 }
8386
8387 /* ... fall through ... */
8388
8389 case FUNCTION_DECL:
8390 case RESULT_DECL:
8391 decl_rtl = DECL_RTL (exp);
8392 expand_decl_rtl:
8393 gcc_assert (decl_rtl);
8394 decl_rtl = copy_rtx (decl_rtl);
8395
8396 /* Ensure variable marked as used even if it doesn't go through
8397 a parser. If it hasn't be used yet, write out an external
8398 definition. */
8399 if (! TREE_USED (exp))
8400 {
8401 assemble_external (exp);
8402 TREE_USED (exp) = 1;
8403 }
8404
8405 /* Show we haven't gotten RTL for this yet. */
8406 temp = 0;
8407
8408 /* Variables inherited from containing functions should have
8409 been lowered by this point. */
8410 context = decl_function_context (exp);
8411 gcc_assert (!context
8412 || context == current_function_decl
8413 || TREE_STATIC (exp)
8414 /* ??? C++ creates functions that are not TREE_STATIC. */
8415 || TREE_CODE (exp) == FUNCTION_DECL);
8416
8417 /* This is the case of an array whose size is to be determined
8418 from its initializer, while the initializer is still being parsed.
8419 See expand_decl. */
8420
8421 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8422 temp = validize_mem (decl_rtl);
8423
8424 /* If DECL_RTL is memory, we are in the normal case and the
8425 address is not valid, get the address into a register. */
8426
8427 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8428 {
8429 if (alt_rtl)
8430 *alt_rtl = decl_rtl;
8431 decl_rtl = use_anchored_address (decl_rtl);
8432 if (modifier != EXPAND_CONST_ADDRESS
8433 && modifier != EXPAND_SUM
8434 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
8435 temp = replace_equiv_address (decl_rtl,
8436 copy_rtx (XEXP (decl_rtl, 0)));
8437 }
8438
8439 /* If we got something, return it. But first, set the alignment
8440 if the address is a register. */
8441 if (temp != 0)
8442 {
8443 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8444 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8445
8446 return temp;
8447 }
8448
8449 /* If the mode of DECL_RTL does not match that of the decl, it
8450 must be a promoted value. We return a SUBREG of the wanted mode,
8451 but mark it so that we know that it was already extended. */
8452
8453 if (REG_P (decl_rtl)
8454 && GET_MODE (decl_rtl) != DECL_MODE (exp))
8455 {
8456 enum machine_mode pmode;
8457
8458 /* Get the signedness used for this variable. Ensure we get the
8459 same mode we got when the variable was declared. */
8460 pmode = promote_decl_mode (exp, &unsignedp);
8461 gcc_assert (GET_MODE (decl_rtl) == pmode);
8462
8463 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8464 SUBREG_PROMOTED_VAR_P (temp) = 1;
8465 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8466 return temp;
8467 }
8468
8469 return decl_rtl;
8470
8471 case INTEGER_CST:
8472 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8473 TREE_INT_CST_HIGH (exp), mode);
8474
8475 return temp;
8476
8477 case VECTOR_CST:
8478 {
8479 tree tmp = NULL_TREE;
8480 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8481 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8482 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8483 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8484 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8485 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8486 return const_vector_from_tree (exp);
8487 if (GET_MODE_CLASS (mode) == MODE_INT)
8488 {
8489 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8490 if (type_for_mode)
8491 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8492 }
8493 if (!tmp)
8494 tmp = build_constructor_from_list (type,
8495 TREE_VECTOR_CST_ELTS (exp));
8496 return expand_expr (tmp, ignore ? const0_rtx : target,
8497 tmode, modifier);
8498 }
8499
8500 case CONST_DECL:
8501 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8502
8503 case REAL_CST:
8504 /* If optimized, generate immediate CONST_DOUBLE
8505 which will be turned into memory by reload if necessary.
8506
8507 We used to force a register so that loop.c could see it. But
8508 this does not allow gen_* patterns to perform optimizations with
8509 the constants. It also produces two insns in cases like "x = 1.0;".
8510 On most machines, floating-point constants are not permitted in
8511 many insns, so we'd end up copying it to a register in any case.
8512
8513 Now, we do the copying in expand_binop, if appropriate. */
8514 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8515 TYPE_MODE (TREE_TYPE (exp)));
8516
8517 case FIXED_CST:
8518 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8519 TYPE_MODE (TREE_TYPE (exp)));
8520
8521 case COMPLEX_CST:
8522 /* Handle evaluating a complex constant in a CONCAT target. */
8523 if (original_target && GET_CODE (original_target) == CONCAT)
8524 {
8525 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8526 rtx rtarg, itarg;
8527
8528 rtarg = XEXP (original_target, 0);
8529 itarg = XEXP (original_target, 1);
8530
8531 /* Move the real and imaginary parts separately. */
8532 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8533 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8534
8535 if (op0 != rtarg)
8536 emit_move_insn (rtarg, op0);
8537 if (op1 != itarg)
8538 emit_move_insn (itarg, op1);
8539
8540 return original_target;
8541 }
8542
8543 /* ... fall through ... */
8544
8545 case STRING_CST:
8546 temp = expand_expr_constant (exp, 1, modifier);
8547
8548 /* temp contains a constant address.
8549 On RISC machines where a constant address isn't valid,
8550 make some insns to get that address into a register. */
8551 if (modifier != EXPAND_CONST_ADDRESS
8552 && modifier != EXPAND_INITIALIZER
8553 && modifier != EXPAND_SUM
8554 && ! memory_address_p (mode, XEXP (temp, 0)))
8555 return replace_equiv_address (temp,
8556 copy_rtx (XEXP (temp, 0)));
8557 return temp;
8558
8559 case SAVE_EXPR:
8560 {
8561 tree val = treeop0;
8562 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8563
8564 if (!SAVE_EXPR_RESOLVED_P (exp))
8565 {
8566 /* We can indeed still hit this case, typically via builtin
8567 expanders calling save_expr immediately before expanding
8568 something. Assume this means that we only have to deal
8569 with non-BLKmode values. */
8570 gcc_assert (GET_MODE (ret) != BLKmode);
8571
8572 val = build_decl (EXPR_LOCATION (exp),
8573 VAR_DECL, NULL, TREE_TYPE (exp));
8574 DECL_ARTIFICIAL (val) = 1;
8575 DECL_IGNORED_P (val) = 1;
8576 treeop0 = val;
8577 TREE_OPERAND (exp, 0) = treeop0;
8578 SAVE_EXPR_RESOLVED_P (exp) = 1;
8579
8580 if (!CONSTANT_P (ret))
8581 ret = copy_to_reg (ret);
8582 SET_DECL_RTL (val, ret);
8583 }
8584
8585 return ret;
8586 }
8587
8588
8589 case CONSTRUCTOR:
8590 /* If we don't need the result, just ensure we evaluate any
8591 subexpressions. */
8592 if (ignore)
8593 {
8594 unsigned HOST_WIDE_INT idx;
8595 tree value;
8596
8597 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8598 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8599
8600 return const0_rtx;
8601 }
8602
8603 return expand_constructor (exp, target, modifier, false);
8604
8605 case MISALIGNED_INDIRECT_REF:
8606 case ALIGN_INDIRECT_REF:
8607 case INDIRECT_REF:
8608 {
8609 tree exp1 = treeop0;
8610
8611 if (modifier != EXPAND_WRITE)
8612 {
8613 tree t;
8614
8615 t = fold_read_from_constant_string (exp);
8616 if (t)
8617 return expand_expr (t, target, tmode, modifier);
8618 }
8619
8620 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8621 op0 = memory_address (mode, op0);
8622
8623 if (code == ALIGN_INDIRECT_REF)
8624 {
8625 int align = TYPE_ALIGN_UNIT (type);
8626 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
8627 op0 = memory_address (mode, op0);
8628 }
8629
8630 temp = gen_rtx_MEM (mode, op0);
8631
8632 set_mem_attributes (temp, exp, 0);
8633
8634 /* Resolve the misalignment now, so that we don't have to remember
8635 to resolve it later. Of course, this only works for reads. */
8636 if (code == MISALIGNED_INDIRECT_REF)
8637 {
8638 int icode;
8639 rtx reg, insn;
8640
8641 gcc_assert (modifier == EXPAND_NORMAL
8642 || modifier == EXPAND_STACK_PARM);
8643
8644 /* The vectorizer should have already checked the mode. */
8645 icode = optab_handler (movmisalign_optab, mode)->insn_code;
8646 gcc_assert (icode != CODE_FOR_nothing);
8647
8648 /* We've already validated the memory, and we're creating a
8649 new pseudo destination. The predicates really can't fail. */
8650 reg = gen_reg_rtx (mode);
8651
8652 /* Nor can the insn generator. */
8653 insn = GEN_FCN (icode) (reg, temp);
8654 emit_insn (insn);
8655
8656 return reg;
8657 }
8658
8659 return temp;
8660 }
8661
8662 case TARGET_MEM_REF:
8663 {
8664 struct mem_address addr;
8665
8666 get_address_description (exp, &addr);
8667 op0 = addr_for_mem_ref (&addr, true);
8668 op0 = memory_address (mode, op0);
8669 temp = gen_rtx_MEM (mode, op0);
8670 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8671 }
8672 return temp;
8673
8674 case ARRAY_REF:
8675
8676 {
8677 tree array = treeop0;
8678 tree index = treeop1;
8679
8680 /* Fold an expression like: "foo"[2].
8681 This is not done in fold so it won't happen inside &.
8682 Don't fold if this is for wide characters since it's too
8683 difficult to do correctly and this is a very rare case. */
8684
8685 if (modifier != EXPAND_CONST_ADDRESS
8686 && modifier != EXPAND_INITIALIZER
8687 && modifier != EXPAND_MEMORY)
8688 {
8689 tree t = fold_read_from_constant_string (exp);
8690
8691 if (t)
8692 return expand_expr (t, target, tmode, modifier);
8693 }
8694
8695 /* If this is a constant index into a constant array,
8696 just get the value from the array. Handle both the cases when
8697 we have an explicit constructor and when our operand is a variable
8698 that was declared const. */
8699
8700 if (modifier != EXPAND_CONST_ADDRESS
8701 && modifier != EXPAND_INITIALIZER
8702 && modifier != EXPAND_MEMORY
8703 && TREE_CODE (array) == CONSTRUCTOR
8704 && ! TREE_SIDE_EFFECTS (array)
8705 && TREE_CODE (index) == INTEGER_CST)
8706 {
8707 unsigned HOST_WIDE_INT ix;
8708 tree field, value;
8709
8710 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8711 field, value)
8712 if (tree_int_cst_equal (field, index))
8713 {
8714 if (!TREE_SIDE_EFFECTS (value))
8715 return expand_expr (fold (value), target, tmode, modifier);
8716 break;
8717 }
8718 }
8719
8720 else if (optimize >= 1
8721 && modifier != EXPAND_CONST_ADDRESS
8722 && modifier != EXPAND_INITIALIZER
8723 && modifier != EXPAND_MEMORY
8724 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8725 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8726 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8727 && targetm.binds_local_p (array))
8728 {
8729 if (TREE_CODE (index) == INTEGER_CST)
8730 {
8731 tree init = DECL_INITIAL (array);
8732
8733 if (TREE_CODE (init) == CONSTRUCTOR)
8734 {
8735 unsigned HOST_WIDE_INT ix;
8736 tree field, value;
8737
8738 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8739 field, value)
8740 if (tree_int_cst_equal (field, index))
8741 {
8742 if (TREE_SIDE_EFFECTS (value))
8743 break;
8744
8745 if (TREE_CODE (value) == CONSTRUCTOR)
8746 {
8747 /* If VALUE is a CONSTRUCTOR, this
8748 optimization is only useful if
8749 this doesn't store the CONSTRUCTOR
8750 into memory. If it does, it is more
8751 efficient to just load the data from
8752 the array directly. */
8753 rtx ret = expand_constructor (value, target,
8754 modifier, true);
8755 if (ret == NULL_RTX)
8756 break;
8757 }
8758
8759 return expand_expr (fold (value), target, tmode,
8760 modifier);
8761 }
8762 }
8763 else if(TREE_CODE (init) == STRING_CST)
8764 {
8765 tree index1 = index;
8766 tree low_bound = array_ref_low_bound (exp);
8767 index1 = fold_convert_loc (loc, sizetype,
8768 treeop1);
8769
8770 /* Optimize the special-case of a zero lower bound.
8771
8772 We convert the low_bound to sizetype to avoid some problems
8773 with constant folding. (E.g. suppose the lower bound is 1,
8774 and its mode is QI. Without the conversion,l (ARRAY
8775 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8776 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8777
8778 if (! integer_zerop (low_bound))
8779 index1 = size_diffop_loc (loc, index1,
8780 fold_convert_loc (loc, sizetype,
8781 low_bound));
8782
8783 if (0 > compare_tree_int (index1,
8784 TREE_STRING_LENGTH (init)))
8785 {
8786 tree type = TREE_TYPE (TREE_TYPE (init));
8787 enum machine_mode mode = TYPE_MODE (type);
8788
8789 if (GET_MODE_CLASS (mode) == MODE_INT
8790 && GET_MODE_SIZE (mode) == 1)
8791 return gen_int_mode (TREE_STRING_POINTER (init)
8792 [TREE_INT_CST_LOW (index1)],
8793 mode);
8794 }
8795 }
8796 }
8797 }
8798 }
8799 goto normal_inner_ref;
8800
8801 case COMPONENT_REF:
8802 /* If the operand is a CONSTRUCTOR, we can just extract the
8803 appropriate field if it is present. */
8804 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8805 {
8806 unsigned HOST_WIDE_INT idx;
8807 tree field, value;
8808
8809 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8810 idx, field, value)
8811 if (field == treeop1
8812 /* We can normally use the value of the field in the
8813 CONSTRUCTOR. However, if this is a bitfield in
8814 an integral mode that we can fit in a HOST_WIDE_INT,
8815 we must mask only the number of bits in the bitfield,
8816 since this is done implicitly by the constructor. If
8817 the bitfield does not meet either of those conditions,
8818 we can't do this optimization. */
8819 && (! DECL_BIT_FIELD (field)
8820 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8821 && (GET_MODE_BITSIZE (DECL_MODE (field))
8822 <= HOST_BITS_PER_WIDE_INT))))
8823 {
8824 if (DECL_BIT_FIELD (field)
8825 && modifier == EXPAND_STACK_PARM)
8826 target = 0;
8827 op0 = expand_expr (value, target, tmode, modifier);
8828 if (DECL_BIT_FIELD (field))
8829 {
8830 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8831 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8832
8833 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8834 {
8835 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8836 op0 = expand_and (imode, op0, op1, target);
8837 }
8838 else
8839 {
8840 tree count
8841 = build_int_cst (NULL_TREE,
8842 GET_MODE_BITSIZE (imode) - bitsize);
8843
8844 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8845 target, 0);
8846 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8847 target, 0);
8848 }
8849 }
8850
8851 return op0;
8852 }
8853 }
8854 goto normal_inner_ref;
8855
8856 case BIT_FIELD_REF:
8857 case ARRAY_RANGE_REF:
8858 normal_inner_ref:
8859 {
8860 enum machine_mode mode1, mode2;
8861 HOST_WIDE_INT bitsize, bitpos;
8862 tree offset;
8863 int volatilep = 0, must_force_mem;
8864 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8865 &mode1, &unsignedp, &volatilep, true);
8866 rtx orig_op0, memloc;
8867
8868 /* If we got back the original object, something is wrong. Perhaps
8869 we are evaluating an expression too early. In any event, don't
8870 infinitely recurse. */
8871 gcc_assert (tem != exp);
8872
8873 /* If TEM's type is a union of variable size, pass TARGET to the inner
8874 computation, since it will need a temporary and TARGET is known
8875 to have to do. This occurs in unchecked conversion in Ada. */
8876 orig_op0 = op0
8877 = expand_expr (tem,
8878 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8879 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8880 != INTEGER_CST)
8881 && modifier != EXPAND_STACK_PARM
8882 ? target : NULL_RTX),
8883 VOIDmode,
8884 (modifier == EXPAND_INITIALIZER
8885 || modifier == EXPAND_CONST_ADDRESS
8886 || modifier == EXPAND_STACK_PARM)
8887 ? modifier : EXPAND_NORMAL);
8888
8889 mode2
8890 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
8891
8892 /* If we have either an offset, a BLKmode result, or a reference
8893 outside the underlying object, we must force it to memory.
8894 Such a case can occur in Ada if we have unchecked conversion
8895 of an expression from a scalar type to an aggregate type or
8896 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8897 passed a partially uninitialized object or a view-conversion
8898 to a larger size. */
8899 must_force_mem = (offset
8900 || mode1 == BLKmode
8901 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
8902
8903 /* Handle CONCAT first. */
8904 if (GET_CODE (op0) == CONCAT && !must_force_mem)
8905 {
8906 if (bitpos == 0
8907 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
8908 return op0;
8909 if (bitpos == 0
8910 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8911 && bitsize)
8912 {
8913 op0 = XEXP (op0, 0);
8914 mode2 = GET_MODE (op0);
8915 }
8916 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8917 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
8918 && bitpos
8919 && bitsize)
8920 {
8921 op0 = XEXP (op0, 1);
8922 bitpos = 0;
8923 mode2 = GET_MODE (op0);
8924 }
8925 else
8926 /* Otherwise force into memory. */
8927 must_force_mem = 1;
8928 }
8929
8930 /* If this is a constant, put it in a register if it is a legitimate
8931 constant and we don't need a memory reference. */
8932 if (CONSTANT_P (op0)
8933 && mode2 != BLKmode
8934 && LEGITIMATE_CONSTANT_P (op0)
8935 && !must_force_mem)
8936 op0 = force_reg (mode2, op0);
8937
8938 /* Otherwise, if this is a constant, try to force it to the constant
8939 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
8940 is a legitimate constant. */
8941 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
8942 op0 = validize_mem (memloc);
8943
8944 /* Otherwise, if this is a constant or the object is not in memory
8945 and need be, put it there. */
8946 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
8947 {
8948 tree nt = build_qualified_type (TREE_TYPE (tem),
8949 (TYPE_QUALS (TREE_TYPE (tem))
8950 | TYPE_QUAL_CONST));
8951 memloc = assign_temp (nt, 1, 1, 1);
8952 emit_move_insn (memloc, op0);
8953 op0 = memloc;
8954 }
8955
8956 if (offset)
8957 {
8958 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
8959 EXPAND_SUM);
8960
8961 gcc_assert (MEM_P (op0));
8962
8963 #ifdef POINTERS_EXTEND_UNSIGNED
8964 if (GET_MODE (offset_rtx) != Pmode)
8965 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
8966 #else
8967 if (GET_MODE (offset_rtx) != ptr_mode)
8968 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8969 #endif
8970
8971 if (GET_MODE (op0) == BLKmode
8972 /* A constant address in OP0 can have VOIDmode, we must
8973 not try to call force_reg in that case. */
8974 && GET_MODE (XEXP (op0, 0)) != VOIDmode
8975 && bitsize != 0
8976 && (bitpos % bitsize) == 0
8977 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
8978 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
8979 {
8980 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
8981 bitpos = 0;
8982 }
8983
8984 op0 = offset_address (op0, offset_rtx,
8985 highest_pow2_factor (offset));
8986 }
8987
8988 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
8989 record its alignment as BIGGEST_ALIGNMENT. */
8990 if (MEM_P (op0) && bitpos == 0 && offset != 0
8991 && is_aligning_offset (offset, tem))
8992 set_mem_align (op0, BIGGEST_ALIGNMENT);
8993
8994 /* Don't forget about volatility even if this is a bitfield. */
8995 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
8996 {
8997 if (op0 == orig_op0)
8998 op0 = copy_rtx (op0);
8999
9000 MEM_VOLATILE_P (op0) = 1;
9001 }
9002
9003 /* In cases where an aligned union has an unaligned object
9004 as a field, we might be extracting a BLKmode value from
9005 an integer-mode (e.g., SImode) object. Handle this case
9006 by doing the extract into an object as wide as the field
9007 (which we know to be the width of a basic mode), then
9008 storing into memory, and changing the mode to BLKmode. */
9009 if (mode1 == VOIDmode
9010 || REG_P (op0) || GET_CODE (op0) == SUBREG
9011 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9012 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9013 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9014 && modifier != EXPAND_CONST_ADDRESS
9015 && modifier != EXPAND_INITIALIZER)
9016 /* If the field isn't aligned enough to fetch as a memref,
9017 fetch it as a bit field. */
9018 || (mode1 != BLKmode
9019 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9020 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9021 || (MEM_P (op0)
9022 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9023 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9024 && ((modifier == EXPAND_CONST_ADDRESS
9025 || modifier == EXPAND_INITIALIZER)
9026 ? STRICT_ALIGNMENT
9027 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9028 || (bitpos % BITS_PER_UNIT != 0)))
9029 /* If the type and the field are a constant size and the
9030 size of the type isn't the same size as the bitfield,
9031 we must use bitfield operations. */
9032 || (bitsize >= 0
9033 && TYPE_SIZE (TREE_TYPE (exp))
9034 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9035 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9036 bitsize)))
9037 {
9038 enum machine_mode ext_mode = mode;
9039
9040 if (ext_mode == BLKmode
9041 && ! (target != 0 && MEM_P (op0)
9042 && MEM_P (target)
9043 && bitpos % BITS_PER_UNIT == 0))
9044 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9045
9046 if (ext_mode == BLKmode)
9047 {
9048 if (target == 0)
9049 target = assign_temp (type, 0, 1, 1);
9050
9051 if (bitsize == 0)
9052 return target;
9053
9054 /* In this case, BITPOS must start at a byte boundary and
9055 TARGET, if specified, must be a MEM. */
9056 gcc_assert (MEM_P (op0)
9057 && (!target || MEM_P (target))
9058 && !(bitpos % BITS_PER_UNIT));
9059
9060 emit_block_move (target,
9061 adjust_address (op0, VOIDmode,
9062 bitpos / BITS_PER_UNIT),
9063 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9064 / BITS_PER_UNIT),
9065 (modifier == EXPAND_STACK_PARM
9066 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9067
9068 return target;
9069 }
9070
9071 op0 = validize_mem (op0);
9072
9073 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9074 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9075
9076 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9077 (modifier == EXPAND_STACK_PARM
9078 ? NULL_RTX : target),
9079 ext_mode, ext_mode);
9080
9081 /* If the result is a record type and BITSIZE is narrower than
9082 the mode of OP0, an integral mode, and this is a big endian
9083 machine, we must put the field into the high-order bits. */
9084 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9085 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9086 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9087 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9088 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9089 - bitsize),
9090 op0, 1);
9091
9092 /* If the result type is BLKmode, store the data into a temporary
9093 of the appropriate type, but with the mode corresponding to the
9094 mode for the data we have (op0's mode). It's tempting to make
9095 this a constant type, since we know it's only being stored once,
9096 but that can cause problems if we are taking the address of this
9097 COMPONENT_REF because the MEM of any reference via that address
9098 will have flags corresponding to the type, which will not
9099 necessarily be constant. */
9100 if (mode == BLKmode)
9101 {
9102 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9103 rtx new_rtx;
9104
9105 /* If the reference doesn't use the alias set of its type,
9106 we cannot create the temporary using that type. */
9107 if (component_uses_parent_alias_set (exp))
9108 {
9109 new_rtx = assign_stack_local (ext_mode, size, 0);
9110 set_mem_alias_set (new_rtx, get_alias_set (exp));
9111 }
9112 else
9113 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9114
9115 emit_move_insn (new_rtx, op0);
9116 op0 = copy_rtx (new_rtx);
9117 PUT_MODE (op0, BLKmode);
9118 set_mem_attributes (op0, exp, 1);
9119 }
9120
9121 return op0;
9122 }
9123
9124 /* If the result is BLKmode, use that to access the object
9125 now as well. */
9126 if (mode == BLKmode)
9127 mode1 = BLKmode;
9128
9129 /* Get a reference to just this component. */
9130 if (modifier == EXPAND_CONST_ADDRESS
9131 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9132 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9133 else
9134 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9135
9136 if (op0 == orig_op0)
9137 op0 = copy_rtx (op0);
9138
9139 set_mem_attributes (op0, exp, 0);
9140 if (REG_P (XEXP (op0, 0)))
9141 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9142
9143 MEM_VOLATILE_P (op0) |= volatilep;
9144 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9145 || modifier == EXPAND_CONST_ADDRESS
9146 || modifier == EXPAND_INITIALIZER)
9147 return op0;
9148 else if (target == 0)
9149 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9150
9151 convert_move (target, op0, unsignedp);
9152 return target;
9153 }
9154
9155 case OBJ_TYPE_REF:
9156 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9157
9158 case CALL_EXPR:
9159 /* All valid uses of __builtin_va_arg_pack () are removed during
9160 inlining. */
9161 if (CALL_EXPR_VA_ARG_PACK (exp))
9162 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9163 {
9164 tree fndecl = get_callee_fndecl (exp), attr;
9165
9166 if (fndecl
9167 && (attr = lookup_attribute ("error",
9168 DECL_ATTRIBUTES (fndecl))) != NULL)
9169 error ("%Kcall to %qs declared with attribute error: %s",
9170 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9171 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9172 if (fndecl
9173 && (attr = lookup_attribute ("warning",
9174 DECL_ATTRIBUTES (fndecl))) != NULL)
9175 warning_at (tree_nonartificial_location (exp),
9176 0, "%Kcall to %qs declared with attribute warning: %s",
9177 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9178 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9179
9180 /* Check for a built-in function. */
9181 if (fndecl && DECL_BUILT_IN (fndecl))
9182 {
9183 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9184 return expand_builtin (exp, target, subtarget, tmode, ignore);
9185 }
9186 }
9187 return expand_call (exp, target, ignore);
9188
9189 case VIEW_CONVERT_EXPR:
9190 op0 = NULL_RTX;
9191
9192 /* If we are converting to BLKmode, try to avoid an intermediate
9193 temporary by fetching an inner memory reference. */
9194 if (mode == BLKmode
9195 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9196 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9197 && handled_component_p (treeop0))
9198 {
9199 enum machine_mode mode1;
9200 HOST_WIDE_INT bitsize, bitpos;
9201 tree offset;
9202 int unsignedp;
9203 int volatilep = 0;
9204 tree tem
9205 = get_inner_reference (treeop0, &bitsize, &bitpos,
9206 &offset, &mode1, &unsignedp, &volatilep,
9207 true);
9208 rtx orig_op0;
9209
9210 /* ??? We should work harder and deal with non-zero offsets. */
9211 if (!offset
9212 && (bitpos % BITS_PER_UNIT) == 0
9213 && bitsize >= 0
9214 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9215 {
9216 /* See the normal_inner_ref case for the rationale. */
9217 orig_op0
9218 = expand_expr (tem,
9219 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9220 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9221 != INTEGER_CST)
9222 && modifier != EXPAND_STACK_PARM
9223 ? target : NULL_RTX),
9224 VOIDmode,
9225 (modifier == EXPAND_INITIALIZER
9226 || modifier == EXPAND_CONST_ADDRESS
9227 || modifier == EXPAND_STACK_PARM)
9228 ? modifier : EXPAND_NORMAL);
9229
9230 if (MEM_P (orig_op0))
9231 {
9232 op0 = orig_op0;
9233
9234 /* Get a reference to just this component. */
9235 if (modifier == EXPAND_CONST_ADDRESS
9236 || modifier == EXPAND_SUM
9237 || modifier == EXPAND_INITIALIZER)
9238 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9239 else
9240 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9241
9242 if (op0 == orig_op0)
9243 op0 = copy_rtx (op0);
9244
9245 set_mem_attributes (op0, treeop0, 0);
9246 if (REG_P (XEXP (op0, 0)))
9247 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9248
9249 MEM_VOLATILE_P (op0) |= volatilep;
9250 }
9251 }
9252 }
9253
9254 if (!op0)
9255 op0 = expand_expr (treeop0,
9256 NULL_RTX, VOIDmode, modifier);
9257
9258 /* If the input and output modes are both the same, we are done. */
9259 if (mode == GET_MODE (op0))
9260 ;
9261 /* If neither mode is BLKmode, and both modes are the same size
9262 then we can use gen_lowpart. */
9263 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9264 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9265 && !COMPLEX_MODE_P (GET_MODE (op0)))
9266 {
9267 if (GET_CODE (op0) == SUBREG)
9268 op0 = force_reg (GET_MODE (op0), op0);
9269 op0 = gen_lowpart (mode, op0);
9270 }
9271 /* If both modes are integral, then we can convert from one to the
9272 other. */
9273 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
9274 op0 = convert_modes (mode, GET_MODE (op0), op0,
9275 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9276 /* As a last resort, spill op0 to memory, and reload it in a
9277 different mode. */
9278 else if (!MEM_P (op0))
9279 {
9280 /* If the operand is not a MEM, force it into memory. Since we
9281 are going to be changing the mode of the MEM, don't call
9282 force_const_mem for constants because we don't allow pool
9283 constants to change mode. */
9284 tree inner_type = TREE_TYPE (treeop0);
9285
9286 gcc_assert (!TREE_ADDRESSABLE (exp));
9287
9288 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9289 target
9290 = assign_stack_temp_for_type
9291 (TYPE_MODE (inner_type),
9292 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9293
9294 emit_move_insn (target, op0);
9295 op0 = target;
9296 }
9297
9298 /* At this point, OP0 is in the correct mode. If the output type is
9299 such that the operand is known to be aligned, indicate that it is.
9300 Otherwise, we need only be concerned about alignment for non-BLKmode
9301 results. */
9302 if (MEM_P (op0))
9303 {
9304 op0 = copy_rtx (op0);
9305
9306 if (TYPE_ALIGN_OK (type))
9307 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9308 else if (STRICT_ALIGNMENT
9309 && mode != BLKmode
9310 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9311 {
9312 tree inner_type = TREE_TYPE (treeop0);
9313 HOST_WIDE_INT temp_size
9314 = MAX (int_size_in_bytes (inner_type),
9315 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9316 rtx new_rtx
9317 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9318 rtx new_with_op0_mode
9319 = adjust_address (new_rtx, GET_MODE (op0), 0);
9320
9321 gcc_assert (!TREE_ADDRESSABLE (exp));
9322
9323 if (GET_MODE (op0) == BLKmode)
9324 emit_block_move (new_with_op0_mode, op0,
9325 GEN_INT (GET_MODE_SIZE (mode)),
9326 (modifier == EXPAND_STACK_PARM
9327 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9328 else
9329 emit_move_insn (new_with_op0_mode, op0);
9330
9331 op0 = new_rtx;
9332 }
9333
9334 op0 = adjust_address (op0, mode, 0);
9335 }
9336
9337 return op0;
9338
9339 /* Use a compare and a jump for BLKmode comparisons, or for function
9340 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9341
9342 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9343 are occassionally created by folding during expansion. */
9344 case TRUTH_ANDIF_EXPR:
9345 case TRUTH_ORIF_EXPR:
9346 if (! ignore
9347 && (target == 0
9348 || modifier == EXPAND_STACK_PARM
9349 || ! safe_from_p (target, treeop0, 1)
9350 || ! safe_from_p (target, treeop1, 1)
9351 /* Make sure we don't have a hard reg (such as function's return
9352 value) live across basic blocks, if not optimizing. */
9353 || (!optimize && REG_P (target)
9354 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9355 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9356
9357 if (target)
9358 emit_move_insn (target, const0_rtx);
9359
9360 op1 = gen_label_rtx ();
9361 jumpifnot_1 (code, treeop0, treeop1, op1);
9362
9363 if (target)
9364 emit_move_insn (target, const1_rtx);
9365
9366 emit_label (op1);
9367 return ignore ? const0_rtx : target;
9368
9369 case STATEMENT_LIST:
9370 {
9371 tree_stmt_iterator iter;
9372
9373 gcc_assert (ignore);
9374
9375 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9376 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9377 }
9378 return const0_rtx;
9379
9380 case COND_EXPR:
9381 /* A COND_EXPR with its type being VOID_TYPE represents a
9382 conditional jump and is handled in
9383 expand_gimple_cond_expr. */
9384 gcc_assert (!VOID_TYPE_P (type));
9385
9386 /* Note that COND_EXPRs whose type is a structure or union
9387 are required to be constructed to contain assignments of
9388 a temporary variable, so that we can evaluate them here
9389 for side effect only. If type is void, we must do likewise. */
9390
9391 gcc_assert (!TREE_ADDRESSABLE (type)
9392 && !ignore
9393 && TREE_TYPE (treeop1) != void_type_node
9394 && TREE_TYPE (treeop2) != void_type_node);
9395
9396 /* If we are not to produce a result, we have no target. Otherwise,
9397 if a target was specified use it; it will not be used as an
9398 intermediate target unless it is safe. If no target, use a
9399 temporary. */
9400
9401 if (modifier != EXPAND_STACK_PARM
9402 && original_target
9403 && safe_from_p (original_target, treeop0, 1)
9404 && GET_MODE (original_target) == mode
9405 #ifdef HAVE_conditional_move
9406 && (! can_conditionally_move_p (mode)
9407 || REG_P (original_target))
9408 #endif
9409 && !MEM_P (original_target))
9410 temp = original_target;
9411 else
9412 temp = assign_temp (type, 0, 0, 1);
9413
9414 do_pending_stack_adjust ();
9415 NO_DEFER_POP;
9416 op0 = gen_label_rtx ();
9417 op1 = gen_label_rtx ();
9418 jumpifnot (treeop0, op0);
9419 store_expr (treeop1, temp,
9420 modifier == EXPAND_STACK_PARM,
9421 false);
9422
9423 emit_jump_insn (gen_jump (op1));
9424 emit_barrier ();
9425 emit_label (op0);
9426 store_expr (treeop2, temp,
9427 modifier == EXPAND_STACK_PARM,
9428 false);
9429
9430 emit_label (op1);
9431 OK_DEFER_POP;
9432 return temp;
9433
9434 case VEC_COND_EXPR:
9435 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9436 return target;
9437
9438 case MODIFY_EXPR:
9439 {
9440 tree lhs = treeop0;
9441 tree rhs = treeop1;
9442 gcc_assert (ignore);
9443
9444 /* Check for |= or &= of a bitfield of size one into another bitfield
9445 of size 1. In this case, (unless we need the result of the
9446 assignment) we can do this more efficiently with a
9447 test followed by an assignment, if necessary.
9448
9449 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9450 things change so we do, this code should be enhanced to
9451 support it. */
9452 if (TREE_CODE (lhs) == COMPONENT_REF
9453 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9454 || TREE_CODE (rhs) == BIT_AND_EXPR)
9455 && TREE_OPERAND (rhs, 0) == lhs
9456 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9457 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9458 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9459 {
9460 rtx label = gen_label_rtx ();
9461 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9462 do_jump (TREE_OPERAND (rhs, 1),
9463 value ? label : 0,
9464 value ? 0 : label);
9465 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9466 MOVE_NONTEMPORAL (exp));
9467 do_pending_stack_adjust ();
9468 emit_label (label);
9469 return const0_rtx;
9470 }
9471
9472 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9473 return const0_rtx;
9474 }
9475
9476 case ADDR_EXPR:
9477 return expand_expr_addr_expr (exp, target, tmode, modifier);
9478
9479 case REALPART_EXPR:
9480 op0 = expand_normal (treeop0);
9481 return read_complex_part (op0, false);
9482
9483 case IMAGPART_EXPR:
9484 op0 = expand_normal (treeop0);
9485 return read_complex_part (op0, true);
9486
9487 case RETURN_EXPR:
9488 case LABEL_EXPR:
9489 case GOTO_EXPR:
9490 case SWITCH_EXPR:
9491 case ASM_EXPR:
9492 /* Expanded in cfgexpand.c. */
9493 gcc_unreachable ();
9494
9495 case TRY_CATCH_EXPR:
9496 case CATCH_EXPR:
9497 case EH_FILTER_EXPR:
9498 case TRY_FINALLY_EXPR:
9499 /* Lowered by tree-eh.c. */
9500 gcc_unreachable ();
9501
9502 case WITH_CLEANUP_EXPR:
9503 case CLEANUP_POINT_EXPR:
9504 case TARGET_EXPR:
9505 case CASE_LABEL_EXPR:
9506 case VA_ARG_EXPR:
9507 case BIND_EXPR:
9508 case INIT_EXPR:
9509 case CONJ_EXPR:
9510 case COMPOUND_EXPR:
9511 case PREINCREMENT_EXPR:
9512 case PREDECREMENT_EXPR:
9513 case POSTINCREMENT_EXPR:
9514 case POSTDECREMENT_EXPR:
9515 case LOOP_EXPR:
9516 case EXIT_EXPR:
9517 /* Lowered by gimplify.c. */
9518 gcc_unreachable ();
9519
9520 case FDESC_EXPR:
9521 /* Function descriptors are not valid except for as
9522 initialization constants, and should not be expanded. */
9523 gcc_unreachable ();
9524
9525 case WITH_SIZE_EXPR:
9526 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9527 have pulled out the size to use in whatever context it needed. */
9528 return expand_expr_real (treeop0, original_target, tmode,
9529 modifier, alt_rtl);
9530
9531 case REALIGN_LOAD_EXPR:
9532 {
9533 tree oprnd0 = treeop0;
9534 tree oprnd1 = treeop1;
9535 tree oprnd2 = treeop2;
9536 rtx op2;
9537
9538 this_optab = optab_for_tree_code (code, type, optab_default);
9539 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9540 op2 = expand_normal (oprnd2);
9541 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9542 target, unsignedp);
9543 gcc_assert (temp);
9544 return temp;
9545 }
9546
9547 case DOT_PROD_EXPR:
9548 {
9549 tree oprnd0 = treeop0;
9550 tree oprnd1 = treeop1;
9551 tree oprnd2 = treeop2;
9552 rtx op2;
9553
9554 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9555 op2 = expand_normal (oprnd2);
9556 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9557 target, unsignedp);
9558 return target;
9559 }
9560
9561 case COMPOUND_LITERAL_EXPR:
9562 {
9563 /* Initialize the anonymous variable declared in the compound
9564 literal, then return the variable. */
9565 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9566
9567 /* Create RTL for this variable. */
9568 if (!DECL_RTL_SET_P (decl))
9569 {
9570 if (DECL_HARD_REGISTER (decl))
9571 /* The user specified an assembler name for this variable.
9572 Set that up now. */
9573 rest_of_decl_compilation (decl, 0, 0);
9574 else
9575 expand_decl (decl);
9576 }
9577
9578 return expand_expr_real (decl, original_target, tmode,
9579 modifier, alt_rtl);
9580 }
9581
9582 default:
9583 return expand_expr_real_2 (&ops, target, tmode, modifier);
9584 }
9585 }
9586 \f
9587 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9588 signedness of TYPE), possibly returning the result in TARGET. */
9589 static rtx
9590 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9591 {
9592 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9593 if (target && GET_MODE (target) != GET_MODE (exp))
9594 target = 0;
9595 /* For constant values, reduce using build_int_cst_type. */
9596 if (CONST_INT_P (exp))
9597 {
9598 HOST_WIDE_INT value = INTVAL (exp);
9599 tree t = build_int_cst_type (type, value);
9600 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9601 }
9602 else if (TYPE_UNSIGNED (type))
9603 {
9604 rtx mask;
9605 if (prec < HOST_BITS_PER_WIDE_INT)
9606 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9607 GET_MODE (exp));
9608 else
9609 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9610 ((unsigned HOST_WIDE_INT) 1
9611 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9612 GET_MODE (exp));
9613 return expand_and (GET_MODE (exp), exp, mask, target);
9614 }
9615 else
9616 {
9617 tree count = build_int_cst (NULL_TREE,
9618 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9619 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9620 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9621 }
9622 }
9623 \f
9624 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9625 when applied to the address of EXP produces an address known to be
9626 aligned more than BIGGEST_ALIGNMENT. */
9627
9628 static int
9629 is_aligning_offset (const_tree offset, const_tree exp)
9630 {
9631 /* Strip off any conversions. */
9632 while (CONVERT_EXPR_P (offset))
9633 offset = TREE_OPERAND (offset, 0);
9634
9635 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9636 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9637 if (TREE_CODE (offset) != BIT_AND_EXPR
9638 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9639 || compare_tree_int (TREE_OPERAND (offset, 1),
9640 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9641 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9642 return 0;
9643
9644 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9645 It must be NEGATE_EXPR. Then strip any more conversions. */
9646 offset = TREE_OPERAND (offset, 0);
9647 while (CONVERT_EXPR_P (offset))
9648 offset = TREE_OPERAND (offset, 0);
9649
9650 if (TREE_CODE (offset) != NEGATE_EXPR)
9651 return 0;
9652
9653 offset = TREE_OPERAND (offset, 0);
9654 while (CONVERT_EXPR_P (offset))
9655 offset = TREE_OPERAND (offset, 0);
9656
9657 /* This must now be the address of EXP. */
9658 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9659 }
9660 \f
9661 /* Return the tree node if an ARG corresponds to a string constant or zero
9662 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9663 in bytes within the string that ARG is accessing. The type of the
9664 offset will be `sizetype'. */
9665
9666 tree
9667 string_constant (tree arg, tree *ptr_offset)
9668 {
9669 tree array, offset, lower_bound;
9670 STRIP_NOPS (arg);
9671
9672 if (TREE_CODE (arg) == ADDR_EXPR)
9673 {
9674 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9675 {
9676 *ptr_offset = size_zero_node;
9677 return TREE_OPERAND (arg, 0);
9678 }
9679 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9680 {
9681 array = TREE_OPERAND (arg, 0);
9682 offset = size_zero_node;
9683 }
9684 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9685 {
9686 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9687 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9688 if (TREE_CODE (array) != STRING_CST
9689 && TREE_CODE (array) != VAR_DECL)
9690 return 0;
9691
9692 /* Check if the array has a nonzero lower bound. */
9693 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9694 if (!integer_zerop (lower_bound))
9695 {
9696 /* If the offset and base aren't both constants, return 0. */
9697 if (TREE_CODE (lower_bound) != INTEGER_CST)
9698 return 0;
9699 if (TREE_CODE (offset) != INTEGER_CST)
9700 return 0;
9701 /* Adjust offset by the lower bound. */
9702 offset = size_diffop (fold_convert (sizetype, offset),
9703 fold_convert (sizetype, lower_bound));
9704 }
9705 }
9706 else
9707 return 0;
9708 }
9709 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9710 {
9711 tree arg0 = TREE_OPERAND (arg, 0);
9712 tree arg1 = TREE_OPERAND (arg, 1);
9713
9714 STRIP_NOPS (arg0);
9715 STRIP_NOPS (arg1);
9716
9717 if (TREE_CODE (arg0) == ADDR_EXPR
9718 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9719 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9720 {
9721 array = TREE_OPERAND (arg0, 0);
9722 offset = arg1;
9723 }
9724 else if (TREE_CODE (arg1) == ADDR_EXPR
9725 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9726 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9727 {
9728 array = TREE_OPERAND (arg1, 0);
9729 offset = arg0;
9730 }
9731 else
9732 return 0;
9733 }
9734 else
9735 return 0;
9736
9737 if (TREE_CODE (array) == STRING_CST)
9738 {
9739 *ptr_offset = fold_convert (sizetype, offset);
9740 return array;
9741 }
9742 else if (TREE_CODE (array) == VAR_DECL)
9743 {
9744 int length;
9745
9746 /* Variables initialized to string literals can be handled too. */
9747 if (DECL_INITIAL (array) == NULL_TREE
9748 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9749 return 0;
9750
9751 /* If they are read-only, non-volatile and bind locally. */
9752 if (! TREE_READONLY (array)
9753 || TREE_SIDE_EFFECTS (array)
9754 || ! targetm.binds_local_p (array))
9755 return 0;
9756
9757 /* Avoid const char foo[4] = "abcde"; */
9758 if (DECL_SIZE_UNIT (array) == NULL_TREE
9759 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9760 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9761 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9762 return 0;
9763
9764 /* If variable is bigger than the string literal, OFFSET must be constant
9765 and inside of the bounds of the string literal. */
9766 offset = fold_convert (sizetype, offset);
9767 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9768 && (! host_integerp (offset, 1)
9769 || compare_tree_int (offset, length) >= 0))
9770 return 0;
9771
9772 *ptr_offset = offset;
9773 return DECL_INITIAL (array);
9774 }
9775
9776 return 0;
9777 }
9778 \f
9779 /* Generate code to calculate OPS, and exploded expression
9780 using a store-flag instruction and return an rtx for the result.
9781 OPS reflects a comparison.
9782
9783 If TARGET is nonzero, store the result there if convenient.
9784
9785 Return zero if there is no suitable set-flag instruction
9786 available on this machine.
9787
9788 Once expand_expr has been called on the arguments of the comparison,
9789 we are committed to doing the store flag, since it is not safe to
9790 re-evaluate the expression. We emit the store-flag insn by calling
9791 emit_store_flag, but only expand the arguments if we have a reason
9792 to believe that emit_store_flag will be successful. If we think that
9793 it will, but it isn't, we have to simulate the store-flag with a
9794 set/jump/set sequence. */
9795
9796 static rtx
9797 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9798 {
9799 enum rtx_code code;
9800 tree arg0, arg1, type;
9801 tree tem;
9802 enum machine_mode operand_mode;
9803 int unsignedp;
9804 rtx op0, op1;
9805 rtx subtarget = target;
9806 location_t loc = ops->location;
9807
9808 arg0 = ops->op0;
9809 arg1 = ops->op1;
9810
9811 /* Don't crash if the comparison was erroneous. */
9812 if (arg0 == error_mark_node || arg1 == error_mark_node)
9813 return const0_rtx;
9814
9815 type = TREE_TYPE (arg0);
9816 operand_mode = TYPE_MODE (type);
9817 unsignedp = TYPE_UNSIGNED (type);
9818
9819 /* We won't bother with BLKmode store-flag operations because it would mean
9820 passing a lot of information to emit_store_flag. */
9821 if (operand_mode == BLKmode)
9822 return 0;
9823
9824 /* We won't bother with store-flag operations involving function pointers
9825 when function pointers must be canonicalized before comparisons. */
9826 #ifdef HAVE_canonicalize_funcptr_for_compare
9827 if (HAVE_canonicalize_funcptr_for_compare
9828 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9829 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9830 == FUNCTION_TYPE))
9831 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9832 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9833 == FUNCTION_TYPE))))
9834 return 0;
9835 #endif
9836
9837 STRIP_NOPS (arg0);
9838 STRIP_NOPS (arg1);
9839
9840 /* Get the rtx comparison code to use. We know that EXP is a comparison
9841 operation of some type. Some comparisons against 1 and -1 can be
9842 converted to comparisons with zero. Do so here so that the tests
9843 below will be aware that we have a comparison with zero. These
9844 tests will not catch constants in the first operand, but constants
9845 are rarely passed as the first operand. */
9846
9847 switch (ops->code)
9848 {
9849 case EQ_EXPR:
9850 code = EQ;
9851 break;
9852 case NE_EXPR:
9853 code = NE;
9854 break;
9855 case LT_EXPR:
9856 if (integer_onep (arg1))
9857 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9858 else
9859 code = unsignedp ? LTU : LT;
9860 break;
9861 case LE_EXPR:
9862 if (! unsignedp && integer_all_onesp (arg1))
9863 arg1 = integer_zero_node, code = LT;
9864 else
9865 code = unsignedp ? LEU : LE;
9866 break;
9867 case GT_EXPR:
9868 if (! unsignedp && integer_all_onesp (arg1))
9869 arg1 = integer_zero_node, code = GE;
9870 else
9871 code = unsignedp ? GTU : GT;
9872 break;
9873 case GE_EXPR:
9874 if (integer_onep (arg1))
9875 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9876 else
9877 code = unsignedp ? GEU : GE;
9878 break;
9879
9880 case UNORDERED_EXPR:
9881 code = UNORDERED;
9882 break;
9883 case ORDERED_EXPR:
9884 code = ORDERED;
9885 break;
9886 case UNLT_EXPR:
9887 code = UNLT;
9888 break;
9889 case UNLE_EXPR:
9890 code = UNLE;
9891 break;
9892 case UNGT_EXPR:
9893 code = UNGT;
9894 break;
9895 case UNGE_EXPR:
9896 code = UNGE;
9897 break;
9898 case UNEQ_EXPR:
9899 code = UNEQ;
9900 break;
9901 case LTGT_EXPR:
9902 code = LTGT;
9903 break;
9904
9905 default:
9906 gcc_unreachable ();
9907 }
9908
9909 /* Put a constant second. */
9910 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9911 || TREE_CODE (arg0) == FIXED_CST)
9912 {
9913 tem = arg0; arg0 = arg1; arg1 = tem;
9914 code = swap_condition (code);
9915 }
9916
9917 /* If this is an equality or inequality test of a single bit, we can
9918 do this by shifting the bit being tested to the low-order bit and
9919 masking the result with the constant 1. If the condition was EQ,
9920 we xor it with 1. This does not require an scc insn and is faster
9921 than an scc insn even if we have it.
9922
9923 The code to make this transformation was moved into fold_single_bit_test,
9924 so we just call into the folder and expand its result. */
9925
9926 if ((code == NE || code == EQ)
9927 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9928 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9929 {
9930 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9931 return expand_expr (fold_single_bit_test (loc,
9932 code == NE ? NE_EXPR : EQ_EXPR,
9933 arg0, arg1, type),
9934 target, VOIDmode, EXPAND_NORMAL);
9935 }
9936
9937 if (! get_subtarget (target)
9938 || GET_MODE (subtarget) != operand_mode)
9939 subtarget = 0;
9940
9941 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
9942
9943 if (target == 0)
9944 target = gen_reg_rtx (mode);
9945
9946 /* Try a cstore if possible. */
9947 return emit_store_flag_force (target, code, op0, op1,
9948 operand_mode, unsignedp, 1);
9949 }
9950 \f
9951
9952 /* Stubs in case we haven't got a casesi insn. */
9953 #ifndef HAVE_casesi
9954 # define HAVE_casesi 0
9955 # define gen_casesi(a, b, c, d, e) (0)
9956 # define CODE_FOR_casesi CODE_FOR_nothing
9957 #endif
9958
9959 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9960 0 otherwise (i.e. if there is no casesi instruction). */
9961 int
9962 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9963 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9964 rtx fallback_label ATTRIBUTE_UNUSED)
9965 {
9966 enum machine_mode index_mode = SImode;
9967 int index_bits = GET_MODE_BITSIZE (index_mode);
9968 rtx op1, op2, index;
9969 enum machine_mode op_mode;
9970
9971 if (! HAVE_casesi)
9972 return 0;
9973
9974 /* Convert the index to SImode. */
9975 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9976 {
9977 enum machine_mode omode = TYPE_MODE (index_type);
9978 rtx rangertx = expand_normal (range);
9979
9980 /* We must handle the endpoints in the original mode. */
9981 index_expr = build2 (MINUS_EXPR, index_type,
9982 index_expr, minval);
9983 minval = integer_zero_node;
9984 index = expand_normal (index_expr);
9985 if (default_label)
9986 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9987 omode, 1, default_label);
9988 /* Now we can safely truncate. */
9989 index = convert_to_mode (index_mode, index, 0);
9990 }
9991 else
9992 {
9993 if (TYPE_MODE (index_type) != index_mode)
9994 {
9995 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9996 index_expr = fold_convert (index_type, index_expr);
9997 }
9998
9999 index = expand_normal (index_expr);
10000 }
10001
10002 do_pending_stack_adjust ();
10003
10004 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10005 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10006 (index, op_mode))
10007 index = copy_to_mode_reg (op_mode, index);
10008
10009 op1 = expand_normal (minval);
10010
10011 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10012 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10013 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10014 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10015 (op1, op_mode))
10016 op1 = copy_to_mode_reg (op_mode, op1);
10017
10018 op2 = expand_normal (range);
10019
10020 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10021 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10022 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10023 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10024 (op2, op_mode))
10025 op2 = copy_to_mode_reg (op_mode, op2);
10026
10027 emit_jump_insn (gen_casesi (index, op1, op2,
10028 table_label, !default_label
10029 ? fallback_label : default_label));
10030 return 1;
10031 }
10032
10033 /* Attempt to generate a tablejump instruction; same concept. */
10034 #ifndef HAVE_tablejump
10035 #define HAVE_tablejump 0
10036 #define gen_tablejump(x, y) (0)
10037 #endif
10038
10039 /* Subroutine of the next function.
10040
10041 INDEX is the value being switched on, with the lowest value
10042 in the table already subtracted.
10043 MODE is its expected mode (needed if INDEX is constant).
10044 RANGE is the length of the jump table.
10045 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10046
10047 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10048 index value is out of range. */
10049
10050 static void
10051 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10052 rtx default_label)
10053 {
10054 rtx temp, vector;
10055
10056 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10057 cfun->cfg->max_jumptable_ents = INTVAL (range);
10058
10059 /* Do an unsigned comparison (in the proper mode) between the index
10060 expression and the value which represents the length of the range.
10061 Since we just finished subtracting the lower bound of the range
10062 from the index expression, this comparison allows us to simultaneously
10063 check that the original index expression value is both greater than
10064 or equal to the minimum value of the range and less than or equal to
10065 the maximum value of the range. */
10066
10067 if (default_label)
10068 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10069 default_label);
10070
10071 /* If index is in range, it must fit in Pmode.
10072 Convert to Pmode so we can index with it. */
10073 if (mode != Pmode)
10074 index = convert_to_mode (Pmode, index, 1);
10075
10076 /* Don't let a MEM slip through, because then INDEX that comes
10077 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10078 and break_out_memory_refs will go to work on it and mess it up. */
10079 #ifdef PIC_CASE_VECTOR_ADDRESS
10080 if (flag_pic && !REG_P (index))
10081 index = copy_to_mode_reg (Pmode, index);
10082 #endif
10083
10084 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10085 GET_MODE_SIZE, because this indicates how large insns are. The other
10086 uses should all be Pmode, because they are addresses. This code
10087 could fail if addresses and insns are not the same size. */
10088 index = gen_rtx_PLUS (Pmode,
10089 gen_rtx_MULT (Pmode, index,
10090 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10091 gen_rtx_LABEL_REF (Pmode, table_label));
10092 #ifdef PIC_CASE_VECTOR_ADDRESS
10093 if (flag_pic)
10094 index = PIC_CASE_VECTOR_ADDRESS (index);
10095 else
10096 #endif
10097 index = memory_address (CASE_VECTOR_MODE, index);
10098 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10099 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10100 convert_move (temp, vector, 0);
10101
10102 emit_jump_insn (gen_tablejump (temp, table_label));
10103
10104 /* If we are generating PIC code or if the table is PC-relative, the
10105 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10106 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10107 emit_barrier ();
10108 }
10109
10110 int
10111 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10112 rtx table_label, rtx default_label)
10113 {
10114 rtx index;
10115
10116 if (! HAVE_tablejump)
10117 return 0;
10118
10119 index_expr = fold_build2 (MINUS_EXPR, index_type,
10120 fold_convert (index_type, index_expr),
10121 fold_convert (index_type, minval));
10122 index = expand_normal (index_expr);
10123 do_pending_stack_adjust ();
10124
10125 do_tablejump (index, TYPE_MODE (index_type),
10126 convert_modes (TYPE_MODE (index_type),
10127 TYPE_MODE (TREE_TYPE (range)),
10128 expand_normal (range),
10129 TYPE_UNSIGNED (TREE_TYPE (range))),
10130 table_label, default_label);
10131 return 1;
10132 }
10133
10134 /* Nonzero if the mode is a valid vector mode for this architecture.
10135 This returns nonzero even if there is no hardware support for the
10136 vector mode, but we can emulate with narrower modes. */
10137
10138 int
10139 vector_mode_valid_p (enum machine_mode mode)
10140 {
10141 enum mode_class mclass = GET_MODE_CLASS (mode);
10142 enum machine_mode innermode;
10143
10144 /* Doh! What's going on? */
10145 if (mclass != MODE_VECTOR_INT
10146 && mclass != MODE_VECTOR_FLOAT
10147 && mclass != MODE_VECTOR_FRACT
10148 && mclass != MODE_VECTOR_UFRACT
10149 && mclass != MODE_VECTOR_ACCUM
10150 && mclass != MODE_VECTOR_UACCUM)
10151 return 0;
10152
10153 /* Hardware support. Woo hoo! */
10154 if (targetm.vector_mode_supported_p (mode))
10155 return 1;
10156
10157 innermode = GET_MODE_INNER (mode);
10158
10159 /* We should probably return 1 if requesting V4DI and we have no DI,
10160 but we have V2DI, but this is probably very unlikely. */
10161
10162 /* If we have support for the inner mode, we can safely emulate it.
10163 We may not have V2DI, but me can emulate with a pair of DIs. */
10164 return targetm.scalar_mode_supported_p (innermode);
10165 }
10166
10167 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10168 static rtx
10169 const_vector_from_tree (tree exp)
10170 {
10171 rtvec v;
10172 int units, i;
10173 tree link, elt;
10174 enum machine_mode inner, mode;
10175
10176 mode = TYPE_MODE (TREE_TYPE (exp));
10177
10178 if (initializer_zerop (exp))
10179 return CONST0_RTX (mode);
10180
10181 units = GET_MODE_NUNITS (mode);
10182 inner = GET_MODE_INNER (mode);
10183
10184 v = rtvec_alloc (units);
10185
10186 link = TREE_VECTOR_CST_ELTS (exp);
10187 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10188 {
10189 elt = TREE_VALUE (link);
10190
10191 if (TREE_CODE (elt) == REAL_CST)
10192 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10193 inner);
10194 else if (TREE_CODE (elt) == FIXED_CST)
10195 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10196 inner);
10197 else
10198 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10199 TREE_INT_CST_HIGH (elt),
10200 inner);
10201 }
10202
10203 /* Initialize remaining elements to 0. */
10204 for (; i < units; ++i)
10205 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10206
10207 return gen_rtx_CONST_VECTOR (mode, v);
10208 }
10209
10210
10211 /* Build a decl for a EH personality function named NAME. */
10212
10213 tree
10214 build_personality_function (const char *name)
10215 {
10216 tree decl, type;
10217
10218 type = build_function_type_list (integer_type_node, integer_type_node,
10219 long_long_unsigned_type_node,
10220 ptr_type_node, ptr_type_node, NULL_TREE);
10221 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10222 get_identifier (name), type);
10223 DECL_ARTIFICIAL (decl) = 1;
10224 DECL_EXTERNAL (decl) = 1;
10225 TREE_PUBLIC (decl) = 1;
10226
10227 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10228 are the flags assigned by targetm.encode_section_info. */
10229 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10230
10231 return decl;
10232 }
10233
10234 /* Extracts the personality function of DECL and returns the corresponding
10235 libfunc. */
10236
10237 rtx
10238 get_personality_function (tree decl)
10239 {
10240 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10241 enum eh_personality_kind pk;
10242
10243 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10244 if (pk == eh_personality_none)
10245 return NULL;
10246
10247 if (!personality
10248 && pk == eh_personality_any)
10249 personality = lang_hooks.eh_personality ();
10250
10251 if (pk == eh_personality_lang)
10252 gcc_assert (personality != NULL_TREE);
10253
10254 return XEXP (DECL_RTL (personality), 0);
10255 }
10256
10257 #include "gt-expr.h"
This page took 0.49486 seconds and 5 git commands to generate.