]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
re PR middle-end/7847 (pragma pack / attribute(packed): unaligned access to packed...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
51
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
57
58 #ifdef PUSH_ROUNDING
59
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
65
66 #endif
67
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
75
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
79 #endif
80
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
85 #else
86 #define TARGET_MEM_FUNCTIONS 0
87 #endif
88
89
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
96 int cse_not_expected;
97
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list = 0;
100
101 /* This structure is used by move_by_pieces to describe the move to
102 be performed. */
103 struct move_by_pieces
104 {
105 rtx to;
106 rtx to_addr;
107 int autinc_to;
108 int explicit_inc_to;
109 rtx from;
110 rtx from_addr;
111 int autinc_from;
112 int explicit_inc_from;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 int reverse;
116 };
117
118 /* This structure is used by store_by_pieces to describe the clear to
119 be performed. */
120
121 struct store_by_pieces
122 {
123 rtx to;
124 rtx to_addr;
125 int autinc_to;
126 int explicit_inc_to;
127 unsigned HOST_WIDE_INT len;
128 HOST_WIDE_INT offset;
129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130 void *constfundata;
131 int reverse;
132 };
133
134 static rtx enqueue_insn (rtx, rtx);
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 unsigned int);
137 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142 static tree emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148 struct store_by_pieces *);
149 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150 static rtx clear_storage_via_libcall (rtx, rtx);
151 static tree clear_storage_libcall_fn (int);
152 static rtx compress_float_constant (rtx, rtx);
153 static rtx get_subtarget (rtx);
154 static int is_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
162
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
169 enum expand_modifier);
170 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 #ifdef PUSH_ROUNDING
172 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 #endif
174 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175 static rtx const_vector_from_tree (tree);
176
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
183
184 /* Record for each mode whether we can float-extend from memory. */
185
186 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
190
191 #ifndef MOVE_RATIO
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 #define MOVE_RATIO 2
194 #else
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 #endif
198 #endif
199
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 #endif
206
207 /* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
209
210 #ifndef CLEAR_RATIO
211 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212 #define CLEAR_RATIO 2
213 #else
214 /* If we are optimizing for space, cut down the default clear ratio. */
215 #define CLEAR_RATIO (optimize_size ? 3 : 15)
216 #endif
217 #endif
218
219 /* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221 #ifndef CLEAR_BY_PIECES_P
222 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224 #endif
225
226 /* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229 #ifndef STORE_BY_PIECES_P
230 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231 #endif
232
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238
239 /* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
242 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
243
244 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245 struct file_stack *expr_wfl_stack;
246
247 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
248
249 #ifndef SLOW_UNALIGNED_ACCESS
250 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 #endif
252 \f
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
255
256 void
257 init_expr_once (void)
258 {
259 rtx insn, pat;
260 enum machine_mode mode;
261 int num_clobbers;
262 rtx mem, mem1;
263 rtx reg;
264
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
269 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
270
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg = gen_rtx_REG (VOIDmode, -1);
274
275 insn = rtx_alloc (INSN);
276 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
277 PATTERN (insn) = pat;
278
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
281 {
282 int regno;
283
284 direct_load[(int) mode] = direct_store[(int) mode] = 0;
285 PUT_MODE (mem, mode);
286 PUT_MODE (mem1, mode);
287 PUT_MODE (reg, mode);
288
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
291
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
296 {
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
299
300 REGNO (reg) = regno;
301
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
306
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
311
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
316
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
321 }
322 }
323
324 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
325
326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
327 mode = GET_MODE_WIDER_MODE (mode))
328 {
329 enum machine_mode srcmode;
330 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
331 srcmode = GET_MODE_WIDER_MODE (srcmode))
332 {
333 enum insn_code ic;
334
335 ic = can_extend_p (mode, srcmode, 0);
336 if (ic == CODE_FOR_nothing)
337 continue;
338
339 PUT_MODE (mem, srcmode);
340
341 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
342 float_extend_from_mem[mode][srcmode] = true;
343 }
344 }
345 }
346
347 /* This is run at the start of compiling a function. */
348
349 void
350 init_expr (void)
351 {
352 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
353 }
354
355 /* Small sanity check that the queue is empty at the end of a function. */
356
357 void
358 finish_expr_for_function (void)
359 {
360 if (pending_chain)
361 abort ();
362 }
363 \f
364 /* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
366
367 /* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
370
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
373
374 static rtx
375 enqueue_insn (rtx var, rtx body)
376 {
377 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
378 body, pending_chain);
379 return pending_chain;
380 }
381
382 /* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
388
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
392
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
396
397 rtx
398 protect_from_queue (rtx x, int modify)
399 {
400 RTX_CODE code = GET_CODE (x);
401
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
405 return x;
406 #endif
407
408 if (code != QUEUED)
409 {
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
414 shared. */
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 {
418 rtx y = XEXP (x, 0);
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
420
421 if (QUEUED_INSN (y))
422 {
423 rtx temp = gen_reg_rtx (GET_MODE (x));
424
425 emit_insn_before (gen_move_insn (temp, new),
426 QUEUED_INSN (y));
427 return temp;
428 }
429
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 }
434
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
437 if (code == MEM)
438 {
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
441 {
442 x = copy_rtx (x);
443 XEXP (x, 0) = tem;
444 }
445 }
446 else if (code == PLUS || code == MULT)
447 {
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 {
452 x = copy_rtx (x);
453 XEXP (x, 0) = new0;
454 XEXP (x, 1) = new1;
455 }
456 }
457 return x;
458 }
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
461 emit_queue. */
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474 }
475
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481 int
482 queued_subexp_p (rtx x)
483 {
484 enum rtx_code code = GET_CODE (x);
485 switch (code)
486 {
487 case QUEUED:
488 return 1;
489 case MEM:
490 return queued_subexp_p (XEXP (x, 0));
491 case MULT:
492 case PLUS:
493 case MINUS:
494 return (queued_subexp_p (XEXP (x, 0))
495 || queued_subexp_p (XEXP (x, 1)));
496 default:
497 return 0;
498 }
499 }
500
501 /* Perform all the pending incrementations. */
502
503 void
504 emit_queue (void)
505 {
506 rtx p;
507 while ((p = pending_chain))
508 {
509 rtx body = QUEUED_BODY (p);
510
511 switch (GET_CODE (body))
512 {
513 case INSN:
514 case JUMP_INSN:
515 case CALL_INSN:
516 case CODE_LABEL:
517 case BARRIER:
518 case NOTE:
519 QUEUED_INSN (p) = body;
520 emit_insn (body);
521 break;
522
523 #ifdef ENABLE_CHECKING
524 case SEQUENCE:
525 abort ();
526 break;
527 #endif
528
529 default:
530 QUEUED_INSN (p) = emit_insn (body);
531 break;
532 }
533
534 pending_chain = QUEUED_NEXT (p);
535 }
536 }
537 \f
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
542
543 void
544 convert_move (rtx to, rtx from, int unsignedp)
545 {
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
550 enum insn_code code;
551 rtx libcall;
552
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
555 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
556
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
559
560 if (to_real != from_real)
561 abort ();
562
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
566
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
575
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 {
579 emit_move_insn (to, from);
580 return;
581 }
582
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 {
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 abort ();
587
588 if (VECTOR_MODE_P (to_mode))
589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 else
591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592
593 emit_move_insn (to, from);
594 return;
595 }
596
597 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 {
599 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
600 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
601 return;
602 }
603
604 if (to_real)
605 {
606 rtx value, insns;
607 convert_optab tab;
608
609 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
610 tab = sext_optab;
611 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
612 tab = trunc_optab;
613 else
614 abort ();
615
616 /* Try converting directly if the insn is supported. */
617
618 code = tab->handlers[to_mode][from_mode].insn_code;
619 if (code != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from,
622 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
623 return;
624 }
625
626 /* Otherwise use a libcall. */
627 libcall = tab->handlers[to_mode][from_mode].libfunc;
628
629 if (!libcall)
630 /* This conversion is not implemented yet. */
631 abort ();
632
633 start_sequence ();
634 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
635 1, from, from_mode);
636 insns = get_insns ();
637 end_sequence ();
638 emit_libcall_block (insns, to, value,
639 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 from)
641 : gen_rtx_FLOAT_EXTEND (to_mode, from));
642 return;
643 }
644
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 {
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652
653 if (trunc_optab->handlers[to_mode][full_mode].insn_code
654 == CODE_FOR_nothing)
655 abort ();
656
657 if (full_mode != from_mode)
658 from = convert_to_mode (full_mode, from, unsignedp);
659 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
660 to, from, UNKNOWN);
661 return;
662 }
663 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 {
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667
668 if (sext_optab->handlers[full_mode][from_mode].insn_code
669 == CODE_FOR_nothing)
670 abort ();
671
672 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 to, from, UNKNOWN);
674 if (to_mode == full_mode)
675 return;
676
677 /* else proceed to integer conversions below */
678 from_mode = full_mode;
679 }
680
681 /* Now both modes are integers. */
682
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
685 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
686 {
687 rtx insns;
688 rtx lowpart;
689 rtx fill_value;
690 rtx lowfrom;
691 int i;
692 enum machine_mode lowpart_mode;
693 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694
695 /* Try converting directly if the insn is supported. */
696 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
697 != CODE_FOR_nothing)
698 {
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize > 0 && GET_CODE (from) == SUBREG)
704 from = force_reg (from_mode, from);
705 emit_unop_insn (code, to, from, equiv_code);
706 return;
707 }
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
710 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
711 != CODE_FOR_nothing))
712 {
713 if (GET_CODE (to) == REG)
714 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
718 return;
719 }
720
721 /* No special multiword conversion insn; do it by hand. */
722 start_sequence ();
723
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
726
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
729
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
733 else
734 lowpart_mode = from_mode;
735
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
737
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
740
741 /* Compute the value to put in each remaining word. */
742 if (unsignedp)
743 fill_value = const0_rtx;
744 else
745 {
746 #ifdef HAVE_slt
747 if (HAVE_slt
748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
749 && STORE_FLAG_VALUE == -1)
750 {
751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
752 lowpart_mode, 0);
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
755 }
756 else
757 #endif
758 {
759 fill_value
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
762 NULL_RTX, 0);
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
764 }
765 }
766
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
769 {
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
772
773 if (subword == 0)
774 abort ();
775
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
778 }
779
780 insns = get_insns ();
781 end_sequence ();
782
783 emit_no_conflict_block (insns, to, from, NULL_RTX,
784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
785 return;
786 }
787
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
791 {
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
799 convert_move (to, gen_lowpart (word_mode, from), 0);
800 return;
801 }
802
803 /* Now follow all the conversions between integers
804 no more than a word long. */
805
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
809 GET_MODE_BITSIZE (from_mode)))
810 {
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
821 emit_move_insn (to, gen_lowpart (to_mode, from));
822 return;
823 }
824
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
827 {
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
831 {
832 if (flag_force_mem)
833 from = force_not_mem (from);
834
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
837 }
838 else
839 {
840 enum machine_mode intermediate;
841 rtx tmp;
842 tree shift_amount;
843
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 != CODE_FOR_nothing)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
854 {
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
857 return;
858 }
859
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 to, unsignedp);
867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
868 to, unsignedp);
869 if (tmp != to)
870 emit_move_insn (to, tmp);
871 return;
872 }
873 }
874
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
877 {
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 to, from, UNKNOWN);
880 return;
881 }
882
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
886
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
891 {
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
894 return;
895 }
896
897 /* Mode combination is not recognized. */
898 abort ();
899 }
900
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
907
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
910
911 rtx
912 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
913 {
914 return convert_modes (mode, VOIDmode, x, unsignedp);
915 }
916
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
921
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
924
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
926
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
929
930 rtx
931 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
932 {
933 rtx temp;
934
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
937
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
942
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
945
946 if (mode == oldmode)
947 return x;
948
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
954
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
958 {
959 HOST_WIDE_INT val = INTVAL (x);
960
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
963 {
964 int width = GET_MODE_BITSIZE (oldmode);
965
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
968 }
969
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
971 }
972
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
977
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
980 || (GET_MODE_CLASS (mode) == MODE_INT
981 && GET_MODE_CLASS (oldmode) == MODE_INT
982 && (GET_CODE (x) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
986 || (GET_CODE (x) == REG
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
991 {
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
997 {
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1000
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 if (! unsignedp
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1007
1008 return gen_int_mode (val, mode);
1009 }
1010
1011 return gen_lowpart (mode, x);
1012 }
1013
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1017 {
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 abort ();
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1021 }
1022
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1025 return temp;
1026 }
1027 \f
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1032
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1034
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1037 succeed. */
1038
1039 int
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
1042 {
1043 return MOVE_BY_PIECES_P (len, align);
1044 }
1045
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1049
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1052
1053 ALIGN is maximum stack alignment we can assume.
1054
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1057 stpcpy. */
1058
1059 rtx
1060 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
1062 {
1063 struct move_by_pieces data;
1064 rtx to_addr, from_addr = XEXP (from, 0);
1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
1068
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1070
1071 data.offset = 0;
1072 data.from_addr = from_addr;
1073 if (to)
1074 {
1075 to_addr = XEXP (to, 0);
1076 data.to = to;
1077 data.autinc_to
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 data.reverse
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1082 }
1083 else
1084 {
1085 to_addr = NULL_RTX;
1086 data.to = NULL_RTX;
1087 data.autinc_to = 1;
1088 #ifdef STACK_GROWS_DOWNWARD
1089 data.reverse = 1;
1090 #else
1091 data.reverse = 0;
1092 #endif
1093 }
1094 data.to_addr = to_addr;
1095 data.from = from;
1096 data.autinc_from
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1100
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
1103 if (data.reverse) data.offset = len;
1104 data.len = len;
1105
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1111 {
1112 /* Find the mode of the largest move... */
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1116 mode = tmode;
1117
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1119 {
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1123 }
1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1125 {
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1129 }
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1133 {
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.autinc_to = 1;
1136 data.explicit_inc_to = -1;
1137 }
1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1139 {
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.autinc_to = 1;
1142 data.explicit_inc_to = 1;
1143 }
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1146 }
1147
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
1151
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1154
1155 while (max_size > 1)
1156 {
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
1160 mode = tmode;
1161
1162 if (mode == VOIDmode)
1163 break;
1164
1165 icode = mov_optab->handlers[(int) mode].insn_code;
1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1168
1169 max_size = GET_MODE_SIZE (mode);
1170 }
1171
1172 /* The code above should have handled everything. */
1173 if (data.len > 0)
1174 abort ();
1175
1176 if (endp)
1177 {
1178 rtx to1;
1179
1180 if (data.reverse)
1181 abort ();
1182 if (data.autinc_to)
1183 {
1184 if (endp == 2)
1185 {
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 else
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 -1));
1191 }
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1193 data.offset);
1194 }
1195 else
1196 {
1197 if (endp == 2)
1198 --data.offset;
1199 to1 = adjust_address (data.to, QImode, data.offset);
1200 }
1201 return to1;
1202 }
1203 else
1204 return data.to;
1205 }
1206
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1209
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1212 {
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1215
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1218 align = MOVE_MAX * BITS_PER_UNIT;
1219
1220 while (max_size > 1)
1221 {
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1224
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
1228 mode = tmode;
1229
1230 if (mode == VOIDmode)
1231 break;
1232
1233 icode = mov_optab->handlers[(int) mode].insn_code;
1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1236
1237 max_size = GET_MODE_SIZE (mode);
1238 }
1239
1240 if (l)
1241 abort ();
1242 return n_insns;
1243 }
1244
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1248
1249 static void
1250 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
1252 {
1253 unsigned int size = GET_MODE_SIZE (mode);
1254 rtx to1 = NULL_RTX, from1;
1255
1256 while (data->len >= size)
1257 {
1258 if (data->reverse)
1259 data->offset -= size;
1260
1261 if (data->to)
1262 {
1263 if (data->autinc_to)
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 data->offset);
1266 else
1267 to1 = adjust_address (data->to, mode, data->offset);
1268 }
1269
1270 if (data->autinc_from)
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 data->offset);
1273 else
1274 from1 = adjust_address (data->from, mode, data->offset);
1275
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1282
1283 if (data->to)
1284 emit_insn ((*genfun) (to1, from1));
1285 else
1286 {
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1289 #else
1290 abort ();
1291 #endif
1292 }
1293
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1298
1299 if (! data->reverse)
1300 data->offset += size;
1301
1302 data->len -= size;
1303 }
1304 }
1305 \f
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1309
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1314
1315 Return the address of the new block, if memcpy is called and returns it,
1316 0 otherwise. */
1317
1318 rtx
1319 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1320 {
1321 bool may_use_call;
1322 rtx retval = 0;
1323 unsigned int align;
1324
1325 switch (method)
1326 {
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1329 break;
1330
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1333
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1336 NO_DEFER_POP;
1337 break;
1338
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1341 break;
1342
1343 default:
1344 abort ();
1345 }
1346
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1348
1349 if (GET_MODE (x) != BLKmode)
1350 abort ();
1351 if (GET_MODE (y) != BLKmode)
1352 abort ();
1353
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
1356 size = protect_from_queue (size, 0);
1357
1358 if (GET_CODE (x) != MEM)
1359 abort ();
1360 if (GET_CODE (y) != MEM)
1361 abort ();
1362 if (size == 0)
1363 abort ();
1364
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1368 {
1369 if (INTVAL (size) == 0)
1370 return 0;
1371
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1376 }
1377
1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1379 move_by_pieces (x, y, INTVAL (size), align, 0);
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1381 ;
1382 else if (may_use_call)
1383 retval = emit_block_move_via_libcall (x, y, size);
1384 else
1385 emit_block_move_via_loop (x, y, size, align);
1386
1387 if (method == BLOCK_OP_CALL_PARM)
1388 OK_DEFER_POP;
1389
1390 return retval;
1391 }
1392
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1396
1397 static bool
1398 block_move_libcall_safe_for_call_parm (void)
1399 {
1400 /* If arguments are pushed on the stack, then they're safe. */
1401 if (PUSH_ARGS)
1402 return true;
1403
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1407 {
1408 tree fn = emit_block_move_libcall_fn (false);
1409 (void) fn;
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 return false;
1412 }
1413 #endif
1414
1415 /* If any argument goes in memory, then it might clobber an outgoing
1416 argument. */
1417 {
1418 CUMULATIVE_ARGS args_so_far;
1419 tree fn, arg;
1420
1421 fn = emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1423
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1426 {
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
1430 return false;
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1433 NULL_TREE, 1))
1434 return false;
1435 #endif
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1437 }
1438 }
1439 return true;
1440 }
1441
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1444
1445 static bool
1446 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1447 {
1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1449 enum machine_mode mode;
1450
1451 /* Since this is a move insn, we don't care about volatility. */
1452 volatile_ok = 1;
1453
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1457
1458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1459 mode = GET_MODE_WIDER_MODE (mode))
1460 {
1461 enum insn_code code = movstr_optab[(int) mode];
1462 insn_operand_predicate_fn pred;
1463
1464 if (code != CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1471 <= (GET_MODE_MASK (mode) >> 1)))
1472 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1473 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1474 || (*pred) (x, BLKmode))
1475 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1476 || (*pred) (y, BLKmode))
1477 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1478 || (*pred) (opalign, VOIDmode)))
1479 {
1480 rtx op2;
1481 rtx last = get_last_insn ();
1482 rtx pat;
1483
1484 op2 = convert_to_mode (mode, size, 1);
1485 pred = insn_data[(int) code].operand[2].predicate;
1486 if (pred != 0 && ! (*pred) (op2, mode))
1487 op2 = copy_to_mode_reg (mode, op2);
1488
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1493
1494 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1495 if (pat)
1496 {
1497 emit_insn (pat);
1498 volatile_ok = 0;
1499 return true;
1500 }
1501 else
1502 delete_insns_since (last);
1503 }
1504 }
1505
1506 volatile_ok = 0;
1507 return false;
1508 }
1509
1510 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
1512
1513 static rtx
1514 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1515 {
1516 rtx dst_addr, src_addr;
1517 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1518 enum machine_mode size_mode;
1519 rtx retval;
1520
1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1522
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
1526
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
1530
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
1534 emit_queue.
1535
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1541
1542 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1543 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1544
1545 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1546 src_addr = convert_memory_address (ptr_mode, src_addr);
1547
1548 dst_tree = make_tree (ptr_type_node, dst_addr);
1549 src_tree = make_tree (ptr_type_node, src_addr);
1550
1551 if (TARGET_MEM_FUNCTIONS)
1552 size_mode = TYPE_MODE (sizetype);
1553 else
1554 size_mode = TYPE_MODE (unsigned_type_node);
1555
1556 size = convert_to_mode (size_mode, size, 1);
1557 size = copy_to_mode_reg (size_mode, size);
1558
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1564
1565 For convenience, we generate the call to bcopy this way as well. */
1566
1567 if (TARGET_MEM_FUNCTIONS)
1568 size_tree = make_tree (sizetype, size);
1569 else
1570 size_tree = make_tree (unsigned_type_node, size);
1571
1572 fn = emit_block_move_libcall_fn (true);
1573 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1574 if (TARGET_MEM_FUNCTIONS)
1575 {
1576 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1577 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1578 }
1579 else
1580 {
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1582 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1583 }
1584
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1587 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1588 call_expr, arg_list, NULL_TREE);
1589
1590 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1591
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1595 decisions. */
1596 if (RTX_UNCHANGING_P (dst))
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1599 gen_rtx_CLOBBER (VOIDmode, dst),
1600 NULL_RTX));
1601
1602 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1603 }
1604
1605 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
1608
1609 static GTY(()) tree block_move_fn;
1610
1611 void
1612 init_block_move_fn (const char *asmspec)
1613 {
1614 if (!block_move_fn)
1615 {
1616 tree args, fn;
1617
1618 if (TARGET_MEM_FUNCTIONS)
1619 {
1620 fn = get_identifier ("memcpy");
1621 args = build_function_type_list (ptr_type_node, ptr_type_node,
1622 const_ptr_type_node, sizetype,
1623 NULL_TREE);
1624 }
1625 else
1626 {
1627 fn = get_identifier ("bcopy");
1628 args = build_function_type_list (void_type_node, const_ptr_type_node,
1629 ptr_type_node, unsigned_type_node,
1630 NULL_TREE);
1631 }
1632
1633 fn = build_decl (FUNCTION_DECL, fn, args);
1634 DECL_EXTERNAL (fn) = 1;
1635 TREE_PUBLIC (fn) = 1;
1636 DECL_ARTIFICIAL (fn) = 1;
1637 TREE_NOTHROW (fn) = 1;
1638
1639 block_move_fn = fn;
1640 }
1641
1642 if (asmspec)
1643 {
1644 SET_DECL_RTL (block_move_fn, NULL_RTX);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1646 }
1647 }
1648
1649 static tree
1650 emit_block_move_libcall_fn (int for_call)
1651 {
1652 static bool emitted_extern;
1653
1654 if (!block_move_fn)
1655 init_block_move_fn (NULL);
1656
1657 if (for_call && !emitted_extern)
1658 {
1659 emitted_extern = true;
1660 make_decl_rtl (block_move_fn, NULL);
1661 assemble_external (block_move_fn);
1662 }
1663
1664 return block_move_fn;
1665 }
1666
1667 /* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669 /* ??? It'd be nice to copy in hunks larger than QImode. */
1670
1671 static void
1672 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1673 unsigned int align ATTRIBUTE_UNUSED)
1674 {
1675 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1676 enum machine_mode iter_mode;
1677
1678 iter_mode = GET_MODE (size);
1679 if (iter_mode == VOIDmode)
1680 iter_mode = word_mode;
1681
1682 top_label = gen_label_rtx ();
1683 cmp_label = gen_label_rtx ();
1684 iter = gen_reg_rtx (iter_mode);
1685
1686 emit_move_insn (iter, const0_rtx);
1687
1688 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1689 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1690 do_pending_stack_adjust ();
1691
1692 emit_note (NOTE_INSN_LOOP_BEG);
1693
1694 emit_jump (cmp_label);
1695 emit_label (top_label);
1696
1697 tmp = convert_modes (Pmode, iter_mode, iter, true);
1698 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1699 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1700 x = change_address (x, QImode, x_addr);
1701 y = change_address (y, QImode, y_addr);
1702
1703 emit_move_insn (x, y);
1704
1705 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1706 true, OPTAB_LIB_WIDEN);
1707 if (tmp != iter)
1708 emit_move_insn (iter, tmp);
1709
1710 emit_note (NOTE_INSN_LOOP_CONT);
1711 emit_label (cmp_label);
1712
1713 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1714 true, top_label);
1715
1716 emit_note (NOTE_INSN_LOOP_END);
1717 }
1718 \f
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722 void
1723 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1724 {
1725 int i;
1726 #ifdef HAVE_load_multiple
1727 rtx pat;
1728 rtx last;
1729 #endif
1730
1731 if (nregs == 0)
1732 return;
1733
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1736
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple)
1740 {
1741 last = get_last_insn ();
1742 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1743 GEN_INT (nregs));
1744 if (pat)
1745 {
1746 emit_insn (pat);
1747 return;
1748 }
1749 else
1750 delete_insns_since (last);
1751 }
1752 #endif
1753
1754 for (i = 0; i < nregs; i++)
1755 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1756 operand_subword_force (x, i, mode));
1757 }
1758
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1761
1762 void
1763 move_block_from_reg (int regno, rtx x, int nregs)
1764 {
1765 int i;
1766
1767 if (nregs == 0)
1768 return;
1769
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple)
1773 {
1774 rtx last = get_last_insn ();
1775 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1776 GEN_INT (nregs));
1777 if (pat)
1778 {
1779 emit_insn (pat);
1780 return;
1781 }
1782 else
1783 delete_insns_since (last);
1784 }
1785 #endif
1786
1787 for (i = 0; i < nregs; i++)
1788 {
1789 rtx tem = operand_subword (x, i, 1, BLKmode);
1790
1791 if (tem == 0)
1792 abort ();
1793
1794 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1795 }
1796 }
1797
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1803
1804 rtx
1805 gen_group_rtx (rtx orig)
1806 {
1807 int i, length;
1808 rtx *tmps;
1809
1810 if (GET_CODE (orig) != PARALLEL)
1811 abort ();
1812
1813 length = XVECLEN (orig, 0);
1814 tmps = alloca (sizeof (rtx) * length);
1815
1816 /* Skip a NULL entry in first slot. */
1817 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1818
1819 if (i)
1820 tmps[0] = 0;
1821
1822 for (; i < length; i++)
1823 {
1824 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1825 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826
1827 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1828 }
1829
1830 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1831 }
1832
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1836 if not known. */
1837
1838 void
1839 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1840 {
1841 rtx *tmps, src;
1842 int start, i;
1843
1844 if (GET_CODE (dst) != PARALLEL)
1845 abort ();
1846
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst, 0, 0), 0))
1850 start = 0;
1851 else
1852 start = 1;
1853
1854 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855
1856 /* Process the pieces. */
1857 for (i = start; i < XVECLEN (dst, 0); i++)
1858 {
1859 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1860 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1861 unsigned int bytelen = GET_MODE_SIZE (mode);
1862 int shift = 0;
1863
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 {
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1869 if (
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1872 == (BYTES_BIG_ENDIAN ? upward : downward)
1873 #else
1874 BYTES_BIG_ENDIAN
1875 #endif
1876 )
1877 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1878 bytelen = ssize - bytepos;
1879 if (bytelen <= 0)
1880 abort ();
1881 }
1882
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1886 src = orig_src;
1887 if (GET_CODE (orig_src) != MEM
1888 && (!CONSTANT_P (orig_src)
1889 || (GET_MODE (orig_src) != mode
1890 && GET_MODE (orig_src) != VOIDmode)))
1891 {
1892 if (GET_MODE (orig_src) == VOIDmode)
1893 src = gen_reg_rtx (mode);
1894 else
1895 src = gen_reg_rtx (GET_MODE (orig_src));
1896
1897 emit_move_insn (src, orig_src);
1898 }
1899
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1903 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1904 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1905 && bytelen == GET_MODE_SIZE (mode))
1906 {
1907 tmps[i] = gen_reg_rtx (mode);
1908 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 }
1910 else if (GET_CODE (src) == CONCAT)
1911 {
1912 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1913 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914
1915 if ((bytepos == 0 && bytelen == slen0)
1916 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 {
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1921 to be extracted. */
1922 tmps[i] = XEXP (src, bytepos / slen0);
1923 if (! CONSTANT_P (tmps[i])
1924 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1925 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1926 (bytepos % slen0) * BITS_PER_UNIT,
1927 1, NULL_RTX, mode, mode, ssize);
1928 }
1929 else if (bytepos == 0)
1930 {
1931 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1932 emit_move_insn (mem, src);
1933 tmps[i] = adjust_address (mem, mode, 0);
1934 }
1935 else
1936 abort ();
1937 }
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst))
1942 && GET_CODE (src) == REG)
1943 {
1944 int slen = GET_MODE_SIZE (GET_MODE (src));
1945 rtx mem;
1946
1947 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1948 emit_move_insn (mem, src);
1949 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 }
1951 else if (CONSTANT_P (src)
1952 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1953 tmps[i] = src;
1954 else
1955 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1956 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1957 mode, mode, ssize);
1958
1959 if (shift)
1960 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1961 tmps[i], 0, OPTAB_WIDEN);
1962 }
1963
1964 emit_queue ();
1965
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1968 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1969 }
1970
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1973
1974 void
1975 emit_group_move (rtx dst, rtx src)
1976 {
1977 int i;
1978
1979 if (GET_CODE (src) != PARALLEL
1980 || GET_CODE (dst) != PARALLEL
1981 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1982 abort ();
1983
1984 /* Skip first entry if NULL. */
1985 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1987 XEXP (XVECEXP (src, 0, i), 0));
1988 }
1989
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1993 known. */
1994
1995 void
1996 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1997 {
1998 rtx *tmps, dst;
1999 int start, i;
2000
2001 if (GET_CODE (src) != PARALLEL)
2002 abort ();
2003
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src, 0, 0), 0))
2007 start = 0;
2008 else
2009 start = 1;
2010
2011 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2012
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i = start; i < XVECLEN (src, 0); i++)
2015 {
2016 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2017 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2018 emit_move_insn (tmps[i], reg);
2019 }
2020 emit_queue ();
2021
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2024 dst = orig_dst;
2025 if (GET_CODE (dst) == PARALLEL)
2026 {
2027 rtx temp;
2028
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst, src))
2033 return;
2034
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2037 the temporary. */
2038
2039 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2040 emit_group_store (temp, src, type, ssize);
2041 emit_group_load (dst, temp, type, ssize);
2042 return;
2043 }
2044 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2045 {
2046 dst = gen_reg_rtx (GET_MODE (orig_dst));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2049 }
2050
2051 /* Process the pieces. */
2052 for (i = start; i < XVECLEN (src, 0); i++)
2053 {
2054 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2055 enum machine_mode mode = GET_MODE (tmps[i]);
2056 unsigned int bytelen = GET_MODE_SIZE (mode);
2057 rtx dest = dst;
2058
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2061 {
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2064 if (
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2067 == (BYTES_BIG_ENDIAN ? upward : downward)
2068 #else
2069 BYTES_BIG_ENDIAN
2070 #endif
2071 )
2072 {
2073 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2074 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2075 tmps[i], 0, OPTAB_WIDEN);
2076 }
2077 bytelen = ssize - bytepos;
2078 }
2079
2080 if (GET_CODE (dst) == CONCAT)
2081 {
2082 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2083 dest = XEXP (dst, 0);
2084 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 {
2086 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2087 dest = XEXP (dst, 1);
2088 }
2089 else if (bytepos == 0 && XVECLEN (src, 0))
2090 {
2091 dest = assign_stack_temp (GET_MODE (dest),
2092 GET_MODE_SIZE (GET_MODE (dest)), 0);
2093 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2094 tmps[i]);
2095 dst = dest;
2096 break;
2097 }
2098 else
2099 abort ();
2100 }
2101
2102 /* Optimize the access just a bit. */
2103 if (GET_CODE (dest) == MEM
2104 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2105 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2106 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2107 && bytelen == GET_MODE_SIZE (mode))
2108 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2109 else
2110 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2111 mode, tmps[i], ssize);
2112 }
2113
2114 emit_queue ();
2115
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst != dst)
2118 emit_move_insn (orig_dst, dst);
2119 }
2120
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2124
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2129
2130 rtx
2131 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2132 {
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2136 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2137
2138 if (tgtblk == 0)
2139 {
2140 tgtblk = assign_temp (build_qualified_type (type,
2141 (TYPE_QUALS (type)
2142 | TYPE_QUAL_CONST)),
2143 0, 1, 1);
2144 preserve_temp_slots (tgtblk);
2145 }
2146
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2149
2150 if (GET_MODE (srcreg) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2152 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2153
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2157
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes % UNITS_PER_WORD != 0
2164 && (targetm.calls.return_in_msb (type)
2165 ? !BYTES_BIG_ENDIAN
2166 : BYTES_BIG_ENDIAN))
2167 padding_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2169
2170 /* Copy the structure BITSIZE bites at a time.
2171
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = padding_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2178 {
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == padding_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2185 GET_MODE (srcreg));
2186
2187 /* We need a new destination operand each time bitpos is on
2188 a word boundary. */
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2191
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2198 BITS_PER_WORD),
2199 BITS_PER_WORD);
2200 }
2201
2202 return tgtblk;
2203 }
2204
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2207
2208 void
2209 use_reg (rtx *call_fusage, rtx reg)
2210 {
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2213 abort ();
2214
2215 *call_fusage
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2218 }
2219
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2222
2223 void
2224 use_regs (rtx *call_fusage, int regno, int nregs)
2225 {
2226 int i;
2227
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2230
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2233 }
2234
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2238
2239 void
2240 use_group_regs (rtx *call_fusage, rtx regs)
2241 {
2242 int i;
2243
2244 for (i = 0; i < XVECLEN (regs, 0); i++)
2245 {
2246 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2247
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg != 0 && GET_CODE (reg) == REG)
2252 use_reg (call_fusage, reg);
2253 }
2254 }
2255 \f
2256
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2262
2263 int
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2265 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2266 void *constfundata, unsigned int align)
2267 {
2268 unsigned HOST_WIDE_INT max_size, l;
2269 HOST_WIDE_INT offset = 0;
2270 enum machine_mode mode, tmode;
2271 enum insn_code icode;
2272 int reverse;
2273 rtx cst;
2274
2275 if (len == 0)
2276 return 1;
2277
2278 if (! STORE_BY_PIECES_P (len, align))
2279 return 0;
2280
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2282 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2283 align = MOVE_MAX * BITS_PER_UNIT;
2284
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2287
2288 for (reverse = 0;
2289 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2290 reverse++)
2291 {
2292 l = len;
2293 mode = VOIDmode;
2294 max_size = STORE_MAX_PIECES + 1;
2295 while (max_size > 1)
2296 {
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2301
2302 if (mode == VOIDmode)
2303 break;
2304
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode))
2308 {
2309 unsigned int size = GET_MODE_SIZE (mode);
2310
2311 while (l >= size)
2312 {
2313 if (reverse)
2314 offset -= size;
2315
2316 cst = (*constfun) (constfundata, offset, mode);
2317 if (!LEGITIMATE_CONSTANT_P (cst))
2318 return 0;
2319
2320 if (!reverse)
2321 offset += size;
2322
2323 l -= size;
2324 }
2325 }
2326
2327 max_size = GET_MODE_SIZE (mode);
2328 }
2329
2330 /* The code above should have handled everything. */
2331 if (l != 0)
2332 abort ();
2333 }
2334
2335 return 1;
2336 }
2337
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2344 stpcpy. */
2345
2346 rtx
2347 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2348 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2349 void *constfundata, unsigned int align, int endp)
2350 {
2351 struct store_by_pieces data;
2352
2353 if (len == 0)
2354 {
2355 if (endp == 2)
2356 abort ();
2357 return to;
2358 }
2359
2360 if (! STORE_BY_PIECES_P (len, align))
2361 abort ();
2362 to = protect_from_queue (to, 1);
2363 data.constfun = constfun;
2364 data.constfundata = constfundata;
2365 data.len = len;
2366 data.to = to;
2367 store_by_pieces_1 (&data, align);
2368 if (endp)
2369 {
2370 rtx to1;
2371
2372 if (data.reverse)
2373 abort ();
2374 if (data.autinc_to)
2375 {
2376 if (endp == 2)
2377 {
2378 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2379 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2380 else
2381 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2382 -1));
2383 }
2384 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2385 data.offset);
2386 }
2387 else
2388 {
2389 if (endp == 2)
2390 --data.offset;
2391 to1 = adjust_address (data.to, QImode, data.offset);
2392 }
2393 return to1;
2394 }
2395 else
2396 return data.to;
2397 }
2398
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2402
2403 static void
2404 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2405 {
2406 struct store_by_pieces data;
2407
2408 if (len == 0)
2409 return;
2410
2411 data.constfun = clear_by_pieces_1;
2412 data.constfundata = NULL;
2413 data.len = len;
2414 data.to = to;
2415 store_by_pieces_1 (&data, align);
2416 }
2417
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2420
2421 static rtx
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2424 enum machine_mode mode ATTRIBUTE_UNUSED)
2425 {
2426 return const0_rtx;
2427 }
2428
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2433
2434 static void
2435 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2436 unsigned int align ATTRIBUTE_UNUSED)
2437 {
2438 rtx to_addr = XEXP (data->to, 0);
2439 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2440 enum machine_mode mode = VOIDmode, tmode;
2441 enum insn_code icode;
2442
2443 data->offset = 0;
2444 data->to_addr = to_addr;
2445 data->autinc_to
2446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2448
2449 data->explicit_inc_to = 0;
2450 data->reverse
2451 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2452 if (data->reverse)
2453 data->offset = data->len;
2454
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data->autinc_to
2459 && move_by_pieces_ninsns (data->len, align) > 2)
2460 {
2461 /* Determine the main mode we'll be using. */
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2465 mode = tmode;
2466
2467 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2468 {
2469 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = -1;
2472 }
2473
2474 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2475 && ! data->autinc_to)
2476 {
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = 1;
2480 }
2481
2482 if ( !data->autinc_to && CONSTANT_P (to_addr))
2483 data->to_addr = copy_addr_to_reg (to_addr);
2484 }
2485
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2487 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2488 align = MOVE_MAX * BITS_PER_UNIT;
2489
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2492
2493 while (max_size > 1)
2494 {
2495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2497 if (GET_MODE_SIZE (tmode) < max_size)
2498 mode = tmode;
2499
2500 if (mode == VOIDmode)
2501 break;
2502
2503 icode = mov_optab->handlers[(int) mode].insn_code;
2504 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2505 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2506
2507 max_size = GET_MODE_SIZE (mode);
2508 }
2509
2510 /* The code above should have handled everything. */
2511 if (data->len != 0)
2512 abort ();
2513 }
2514
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2518
2519 static void
2520 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2521 struct store_by_pieces *data)
2522 {
2523 unsigned int size = GET_MODE_SIZE (mode);
2524 rtx to1, cst;
2525
2526 while (data->len >= size)
2527 {
2528 if (data->reverse)
2529 data->offset -= size;
2530
2531 if (data->autinc_to)
2532 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 data->offset);
2534 else
2535 to1 = adjust_address (data->to, mode, data->offset);
2536
2537 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2538 emit_insn (gen_add2_insn (data->to_addr,
2539 GEN_INT (-(HOST_WIDE_INT) size)));
2540
2541 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2542 emit_insn ((*genfun) (to1, cst));
2543
2544 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2546
2547 if (! data->reverse)
2548 data->offset += size;
2549
2550 data->len -= size;
2551 }
2552 }
2553 \f
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2556
2557 rtx
2558 clear_storage (rtx object, rtx size)
2559 {
2560 rtx retval = 0;
2561 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2563
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object) != BLKmode
2567 && GET_CODE (size) == CONST_INT
2568 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2570 else
2571 {
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2574
2575 if (size == const0_rtx)
2576 ;
2577 else if (GET_CODE (size) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2580 else if (clear_storage_via_clrstr (object, size, align))
2581 ;
2582 else
2583 retval = clear_storage_via_libcall (object, size);
2584 }
2585
2586 return retval;
2587 }
2588
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2591
2592 static bool
2593 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2594 {
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2598
2599 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2600 enum machine_mode mode;
2601
2602 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2603 mode = GET_MODE_WIDER_MODE (mode))
2604 {
2605 enum insn_code code = clrstr_optab[(int) mode];
2606 insn_operand_predicate_fn pred;
2607
2608 if (code != CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2615 <= (GET_MODE_MASK (mode) >> 1)))
2616 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2617 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2618 || (*pred) (object, BLKmode))
2619 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2620 || (*pred) (opalign, VOIDmode)))
2621 {
2622 rtx op1;
2623 rtx last = get_last_insn ();
2624 rtx pat;
2625
2626 op1 = convert_to_mode (mode, size, 1);
2627 pred = insn_data[(int) code].operand[1].predicate;
2628 if (pred != 0 && ! (*pred) (op1, mode))
2629 op1 = copy_to_mode_reg (mode, op1);
2630
2631 pat = GEN_FCN ((int) code) (object, op1, opalign);
2632 if (pat)
2633 {
2634 emit_insn (pat);
2635 return true;
2636 }
2637 else
2638 delete_insns_since (last);
2639 }
2640 }
2641
2642 return false;
2643 }
2644
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2647
2648 static rtx
2649 clear_storage_via_libcall (rtx object, rtx size)
2650 {
2651 tree call_expr, arg_list, fn, object_tree, size_tree;
2652 enum machine_mode size_mode;
2653 rtx retval;
2654
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2656
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2660
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2664
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2668 emit_queue.
2669
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2675
2676 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2677
2678 if (TARGET_MEM_FUNCTIONS)
2679 size_mode = TYPE_MODE (sizetype);
2680 else
2681 size_mode = TYPE_MODE (unsigned_type_node);
2682 size = convert_to_mode (size_mode, size, 1);
2683 size = copy_to_mode_reg (size_mode, size);
2684
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2690
2691 For convenience, we generate the call to bzero this way as well. */
2692
2693 object_tree = make_tree (ptr_type_node, object);
2694 if (TARGET_MEM_FUNCTIONS)
2695 size_tree = make_tree (sizetype, size);
2696 else
2697 size_tree = make_tree (unsigned_type_node, size);
2698
2699 fn = clear_storage_libcall_fn (true);
2700 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2701 if (TARGET_MEM_FUNCTIONS)
2702 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2703 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2704
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2709
2710 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2711
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2717
2718 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2719 }
2720
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2724
2725 static GTY(()) tree block_clear_fn;
2726
2727 void
2728 init_block_clear_fn (const char *asmspec)
2729 {
2730 if (!block_clear_fn)
2731 {
2732 tree fn, args;
2733
2734 if (TARGET_MEM_FUNCTIONS)
2735 {
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2739 NULL_TREE);
2740 }
2741 else
2742 {
2743 fn = get_identifier ("bzero");
2744 args = build_function_type_list (void_type_node, ptr_type_node,
2745 unsigned_type_node, NULL_TREE);
2746 }
2747
2748 fn = build_decl (FUNCTION_DECL, fn, args);
2749 DECL_EXTERNAL (fn) = 1;
2750 TREE_PUBLIC (fn) = 1;
2751 DECL_ARTIFICIAL (fn) = 1;
2752 TREE_NOTHROW (fn) = 1;
2753
2754 block_clear_fn = fn;
2755 }
2756
2757 if (asmspec)
2758 {
2759 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2761 }
2762 }
2763
2764 static tree
2765 clear_storage_libcall_fn (int for_call)
2766 {
2767 static bool emitted_extern;
2768
2769 if (!block_clear_fn)
2770 init_block_clear_fn (NULL);
2771
2772 if (for_call && !emitted_extern)
2773 {
2774 emitted_extern = true;
2775 make_decl_rtl (block_clear_fn, NULL);
2776 assemble_external (block_clear_fn);
2777 }
2778
2779 return block_clear_fn;
2780 }
2781 \f
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2786
2787 Return the last instruction emitted. */
2788
2789 rtx
2790 emit_move_insn (rtx x, rtx y)
2791 {
2792 enum machine_mode mode = GET_MODE (x);
2793 rtx y_cst = NULL_RTX;
2794 rtx last_insn, set;
2795
2796 x = protect_from_queue (x, 1);
2797 y = protect_from_queue (y, 0);
2798
2799 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2800 abort ();
2801
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y) == CONSTANT_P_RTX)
2804 ;
2805 else if (CONSTANT_P (y))
2806 {
2807 if (optimize
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2809 && (last_insn = compress_float_constant (x, y)))
2810 return last_insn;
2811
2812 y_cst = y;
2813
2814 if (!LEGITIMATE_CONSTANT_P (y))
2815 {
2816 y = force_const_mem (mode, y);
2817
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2821 if (!y)
2822 y = y_cst;
2823 }
2824 }
2825
2826 /* If X or Y are memory references, verify that their addresses are valid
2827 for the machine. */
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2831 || (flag_force_addr
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2833 x = validize_mem (x);
2834
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 || (flag_force_addr
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2839 y = validize_mem (y);
2840
2841 if (mode == BLKmode)
2842 abort ();
2843
2844 last_insn = emit_move_insn_1 (x, y);
2845
2846 if (y_cst && GET_CODE (x) == REG
2847 && (set = single_set (last_insn)) != NULL_RTX
2848 && SET_DEST (set) == x
2849 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2851
2852 return last_insn;
2853 }
2854
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2858
2859 rtx
2860 emit_move_insn_1 (rtx x, rtx y)
2861 {
2862 enum machine_mode mode = GET_MODE (x);
2863 enum machine_mode submode;
2864 enum mode_class class = GET_MODE_CLASS (mode);
2865
2866 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2867 abort ();
2868
2869 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 return
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2872
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2875 && BLKmode != (submode = GET_MODE_INNER (mode))
2876 && (mov_optab->handlers[(int) submode].insn_code
2877 != CODE_FOR_nothing))
2878 {
2879 /* Don't split destination if it is a stack push. */
2880 int stack = push_operand (x, GET_MODE (x));
2881
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2885 if (stack
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2887 != GET_MODE_SIZE (submode)))
2888 {
2889 rtx temp;
2890 HOST_WIDE_INT offset1, offset2;
2891
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp = expand_binop (Pmode,
2895 #ifdef STACK_GROWS_DOWNWARD
2896 sub_optab,
2897 #else
2898 add_optab,
2899 #endif
2900 stack_pointer_rtx,
2901 GEN_INT
2902 (PUSH_ROUNDING
2903 (GET_MODE_SIZE (GET_MODE (x)))),
2904 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2905
2906 if (temp != stack_pointer_rtx)
2907 emit_move_insn (stack_pointer_rtx, temp);
2908
2909 #ifdef STACK_GROWS_DOWNWARD
2910 offset1 = 0;
2911 offset2 = GET_MODE_SIZE (submode);
2912 #else
2913 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2914 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2915 + GET_MODE_SIZE (submode));
2916 #endif
2917
2918 emit_move_insn (change_address (x, submode,
2919 gen_rtx_PLUS (Pmode,
2920 stack_pointer_rtx,
2921 GEN_INT (offset1))),
2922 gen_realpart (submode, y));
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2925 stack_pointer_rtx,
2926 GEN_INT (offset2))),
2927 gen_imagpart (submode, y));
2928 }
2929 else
2930 #endif
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2933
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2936 if (stack)
2937 {
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y));
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_realpart (submode, y));
2945 #else
2946 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_imagpart (submode, y));
2950 #endif
2951 }
2952 else
2953 {
2954 rtx realpart_x, realpart_y;
2955 rtx imagpart_x, imagpart_y;
2956
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2967 && (reload_in_progress | reload_completed) == 0)
2968 {
2969 int packed_dest_p
2970 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 int packed_src_p
2972 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2973
2974 if (packed_dest_p || packed_src_p)
2975 {
2976 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2977 ? MODE_FLOAT : MODE_INT);
2978
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2981
2982 if (reg_mode != BLKmode)
2983 {
2984 rtx mem = assign_stack_temp (reg_mode,
2985 GET_MODE_SIZE (mode), 0);
2986 rtx cmem = adjust_address (mem, mode, 0);
2987
2988 cfun->cannot_inline
2989 = N_("function using short complex types cannot be inline");
2990
2991 if (packed_dest_p)
2992 {
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2994
2995 emit_move_insn_1 (cmem, y);
2996 return emit_move_insn_1 (sreg, mem);
2997 }
2998 else
2999 {
3000 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3001
3002 emit_move_insn_1 (mem, sreg);
3003 return emit_move_insn_1 (x, cmem);
3004 }
3005 }
3006 }
3007 }
3008
3009 realpart_x = gen_realpart (submode, x);
3010 realpart_y = gen_realpart (submode, y);
3011 imagpart_x = gen_imagpart (submode, x);
3012 imagpart_y = gen_imagpart (submode, y);
3013
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3018 if (x != y
3019 && ! (reload_in_progress || reload_completed)
3020 && (GET_CODE (realpart_x) == SUBREG
3021 || GET_CODE (imagpart_x) == SUBREG))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3023
3024 emit_move_insn (realpart_x, realpart_y);
3025 emit_move_insn (imagpart_x, imagpart_y);
3026 }
3027
3028 return get_last_insn ();
3029 }
3030
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode) == MODE_CC
3035 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3036 {
3037 enum insn_code insn_code;
3038 enum machine_mode tmode = VOIDmode;
3039 rtx x1 = x, y1 = y;
3040
3041 if (mode != CCmode
3042 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3043 tmode = CCmode;
3044 else
3045 for (tmode = QImode; tmode != VOIDmode;
3046 tmode = GET_MODE_WIDER_MODE (tmode))
3047 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3048 break;
3049
3050 if (tmode == VOIDmode)
3051 abort ();
3052
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3059
3060 if (reload_in_progress)
3061 {
3062 x = gen_lowpart_common (tmode, x1);
3063 if (x == 0 && GET_CODE (x1) == MEM)
3064 {
3065 x = adjust_address_nv (x1, tmode, 0);
3066 copy_replacements (x1, x);
3067 }
3068
3069 y = gen_lowpart_common (tmode, y1);
3070 if (y == 0 && GET_CODE (y1) == MEM)
3071 {
3072 y = adjust_address_nv (y1, tmode, 0);
3073 copy_replacements (y1, y);
3074 }
3075 }
3076 else
3077 {
3078 x = gen_lowpart (tmode, x);
3079 y = gen_lowpart (tmode, y);
3080 }
3081
3082 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3083 return emit_insn (GEN_FCN (insn_code) (x, y));
3084 }
3085
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3091 && (submode = int_mode_for_mode (mode)) != BLKmode
3092 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3093 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3094 (simplify_gen_subreg (submode, x, mode, 0),
3095 simplify_gen_subreg (submode, y, mode, 0)));
3096
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3101 {
3102 rtx last_insn = 0;
3103 rtx seq, inner;
3104 int need_clobber;
3105 int i;
3106
3107 #ifdef PUSH_ROUNDING
3108
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x, GET_MODE (x)))
3112 {
3113 rtx temp;
3114 enum rtx_code code;
3115
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp = expand_binop (Pmode,
3119 #ifdef STACK_GROWS_DOWNWARD
3120 sub_optab,
3121 #else
3122 add_optab,
3123 #endif
3124 stack_pointer_rtx,
3125 GEN_INT
3126 (PUSH_ROUNDING
3127 (GET_MODE_SIZE (GET_MODE (x)))),
3128 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3129
3130 if (temp != stack_pointer_rtx)
3131 emit_move_insn (stack_pointer_rtx, temp);
3132
3133 code = GET_CODE (XEXP (x, 0));
3134
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code == POST_INC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (-((HOST_WIDE_INT)
3139 GET_MODE_SIZE (GET_MODE (x)))));
3140 else if (code == POST_DEC)
3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3143 else
3144 temp = stack_pointer_rtx;
3145
3146 x = change_address (x, VOIDmode, temp);
3147 }
3148 #endif
3149
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && GET_CODE (x) == MEM
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3154 x = replace_equiv_address_nv (x, inner);
3155 if (reload_in_progress && GET_CODE (y) == MEM
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3157 y = replace_equiv_address_nv (y, inner);
3158
3159 start_sequence ();
3160
3161 need_clobber = 0;
3162 for (i = 0;
3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3164 i++)
3165 {
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart = operand_subword (y, i, 1, mode);
3168
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart == 0 && CONSTANT_P (y))
3173 {
3174 y = force_const_mem (mode, y);
3175 ypart = operand_subword (y, i, 1, mode);
3176 }
3177 else if (ypart == 0)
3178 ypart = operand_subword_force (y, i, mode);
3179
3180 if (xpart == 0 || ypart == 0)
3181 abort ();
3182
3183 need_clobber |= (GET_CODE (xpart) == SUBREG);
3184
3185 last_insn = emit_move_insn (xpart, ypart);
3186 }
3187
3188 seq = get_insns ();
3189 end_sequence ();
3190
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3195 if (x != y
3196 && ! (reload_in_progress || reload_completed)
3197 && need_clobber != 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3199
3200 emit_insn (seq);
3201
3202 return last_insn;
3203 }
3204 else
3205 abort ();
3206 }
3207
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3211
3212 static rtx
3213 compress_float_constant (rtx x, rtx y)
3214 {
3215 enum machine_mode dstmode = GET_MODE (x);
3216 enum machine_mode orig_srcmode = GET_MODE (y);
3217 enum machine_mode srcmode;
3218 REAL_VALUE_TYPE r;
3219
3220 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3221
3222 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3223 srcmode != orig_srcmode;
3224 srcmode = GET_MODE_WIDER_MODE (srcmode))
3225 {
3226 enum insn_code ic;
3227 rtx trunc_y, last_insn;
3228
3229 /* Skip if the target can't extend this way. */
3230 ic = can_extend_p (dstmode, srcmode, 0);
3231 if (ic == CODE_FOR_nothing)
3232 continue;
3233
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode, &r))
3236 continue;
3237
3238 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3239
3240 if (LEGITIMATE_CONSTANT_P (trunc_y))
3241 {
3242 /* Skip if the target needs extra instructions to perform
3243 the extension. */
3244 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3245 continue;
3246 }
3247 else if (float_extend_from_mem[dstmode][srcmode])
3248 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3249 else
3250 continue;
3251
3252 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3253 last_insn = get_last_insn ();
3254
3255 if (GET_CODE (x) == REG)
3256 set_unique_reg_note (last_insn, REG_EQUAL, y);
3257
3258 return last_insn;
3259 }
3260
3261 return NULL_RTX;
3262 }
3263 \f
3264 /* Pushing data onto the stack. */
3265
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3270
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3274
3275 rtx
3276 push_block (rtx size, int extra, int below)
3277 {
3278 rtx temp;
3279
3280 size = convert_modes (Pmode, ptr_mode, size, 1);
3281 if (CONSTANT_P (size))
3282 anti_adjust_stack (plus_constant (size, extra));
3283 else if (GET_CODE (size) == REG && extra == 0)
3284 anti_adjust_stack (size);
3285 else
3286 {
3287 temp = copy_to_mode_reg (Pmode, size);
3288 if (extra != 0)
3289 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3290 temp, 0, OPTAB_LIB_WIDEN);
3291 anti_adjust_stack (temp);
3292 }
3293
3294 #ifndef STACK_GROWS_DOWNWARD
3295 if (0)
3296 #else
3297 if (1)
3298 #endif
3299 {
3300 temp = virtual_outgoing_args_rtx;
3301 if (extra != 0 && below)
3302 temp = plus_constant (temp, extra);
3303 }
3304 else
3305 {
3306 if (GET_CODE (size) == CONST_INT)
3307 temp = plus_constant (virtual_outgoing_args_rtx,
3308 -INTVAL (size) - (below ? 0 : extra));
3309 else if (extra != 0 && !below)
3310 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3311 negate_rtx (Pmode, plus_constant (size, extra)));
3312 else
3313 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3314 negate_rtx (Pmode, size));
3315 }
3316
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3318 }
3319
3320 #ifdef PUSH_ROUNDING
3321
3322 /* Emit single push insn. */
3323
3324 static void
3325 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3326 {
3327 rtx dest_addr;
3328 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3329 rtx dest;
3330 enum insn_code icode;
3331 insn_operand_predicate_fn pred;
3332
3333 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode = push_optab->handlers[(int) mode].insn_code;
3337 if (icode != CODE_FOR_nothing)
3338 {
3339 if (((pred = insn_data[(int) icode].operand[0].predicate)
3340 && !((*pred) (x, mode))))
3341 x = force_reg (mode, x);
3342 emit_insn (GEN_FCN (icode) (x));
3343 return;
3344 }
3345 if (GET_MODE_SIZE (mode) == rounded_size)
3346 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3350 access to type. */
3351 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3352 {
3353 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3354 HOST_WIDE_INT offset;
3355
3356 emit_move_insn (stack_pointer_rtx,
3357 expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360 #else
3361 add_optab,
3362 #endif
3363 stack_pointer_rtx,
3364 GEN_INT (rounded_size),
3365 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3366
3367 offset = (HOST_WIDE_INT) padding_size;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE == POST_DEC)
3370 /* We have already decremented the stack pointer, so get the
3371 previous value. */
3372 offset += (HOST_WIDE_INT) rounded_size;
3373 #else
3374 if (STACK_PUSH_CODE == POST_INC)
3375 /* We have already incremented the stack pointer, so get the
3376 previous value. */
3377 offset -= (HOST_WIDE_INT) rounded_size;
3378 #endif
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3380 }
3381 else
3382 {
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3387 #else
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (rounded_size));
3391 #endif
3392 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3393 }
3394
3395 dest = gen_rtx_MEM (mode, dest_addr);
3396
3397 if (type != 0)
3398 {
3399 set_mem_attributes (dest, type, 1);
3400
3401 if (flag_optimize_sibling_calls)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest, 0);
3407 }
3408 emit_move_insn (dest, x);
3409 }
3410 #endif
3411
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3413 type TYPE.
3414 MODE is redundant except when X is a CONST_INT (since they don't
3415 carry mode info).
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3418
3419 ALIGN (in bits) is maximum alignment we can assume.
3420
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3428 registers.
3429
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3432
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3437
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3439
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3443
3444 void
3445 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3446 unsigned int align, int partial, rtx reg, int extra,
3447 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3448 rtx alignment_pad)
3449 {
3450 rtx xinner;
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3453 = downward;
3454 #else
3455 = upward;
3456 #endif
3457
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3462
3463 /* Invert direction if stack is post-decrement.
3464 FIXME: why? */
3465 if (STACK_PUSH_CODE == POST_DEC)
3466 if (where_pad != none)
3467 where_pad = (where_pad == downward ? upward : downward);
3468
3469 xinner = x = protect_from_queue (x, 0);
3470
3471 if (mode == BLKmode)
3472 {
3473 /* Copy a block into the stack, entirely or partially. */
3474
3475 rtx temp;
3476 int used = partial * UNITS_PER_WORD;
3477 int offset;
3478 int skip;
3479
3480 if (reg && GET_CODE (reg) == PARALLEL)
3481 {
3482 /* Use the size of the elt to compute offset. */
3483 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3484 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3485 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3486 }
3487 else
3488 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3489
3490 if (size == 0)
3491 abort ();
3492
3493 used -= offset;
3494
3495 /* USED is now the # of bytes we need not copy to the stack
3496 because registers will take care of them. */
3497
3498 if (partial != 0)
3499 xinner = adjust_address (xinner, BLKmode, used);
3500
3501 /* If the partial register-part of the arg counts in its stack size,
3502 skip the part of stack space corresponding to the registers.
3503 Otherwise, start copying to the beginning of the stack space,
3504 by setting SKIP to 0. */
3505 skip = (reg_parm_stack_space == 0) ? 0 : used;
3506
3507 #ifdef PUSH_ROUNDING
3508 /* Do it with several push insns if that doesn't take lots of insns
3509 and if there is no difficulty with push insns that skip bytes
3510 on the stack for alignment purposes. */
3511 if (args_addr == 0
3512 && PUSH_ARGS
3513 && GET_CODE (size) == CONST_INT
3514 && skip == 0
3515 && MEM_ALIGN (xinner) >= align
3516 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3517 /* Here we avoid the case of a structure whose weak alignment
3518 forces many pushes of a small amount of data,
3519 and such small pushes do rounding that causes trouble. */
3520 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3521 || align >= BIGGEST_ALIGNMENT
3522 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3523 == (align / BITS_PER_UNIT)))
3524 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3525 {
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra && args_addr == 0
3530 && where_pad != none && where_pad != stack_direction)
3531 anti_adjust_stack (GEN_INT (extra));
3532
3533 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3534 }
3535 else
3536 #endif /* PUSH_ROUNDING */
3537 {
3538 rtx target;
3539
3540 /* Otherwise make space on the stack and copy the data
3541 to the address of that space. */
3542
3543 /* Deduct words put into registers from the size we must copy. */
3544 if (partial != 0)
3545 {
3546 if (GET_CODE (size) == CONST_INT)
3547 size = GEN_INT (INTVAL (size) - used);
3548 else
3549 size = expand_binop (GET_MODE (size), sub_optab, size,
3550 GEN_INT (used), NULL_RTX, 0,
3551 OPTAB_LIB_WIDEN);
3552 }
3553
3554 /* Get the address of the stack space.
3555 In this case, we do not deal with EXTRA separately.
3556 A single stack adjust will do. */
3557 if (! args_addr)
3558 {
3559 temp = push_block (size, extra, where_pad == downward);
3560 extra = 0;
3561 }
3562 else if (GET_CODE (args_so_far) == CONST_INT)
3563 temp = memory_address (BLKmode,
3564 plus_constant (args_addr,
3565 skip + INTVAL (args_so_far)));
3566 else
3567 temp = memory_address (BLKmode,
3568 plus_constant (gen_rtx_PLUS (Pmode,
3569 args_addr,
3570 args_so_far),
3571 skip));
3572
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3574 {
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3578
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3582 }
3583
3584 target = gen_rtx_MEM (BLKmode, temp);
3585
3586 if (type != 0)
3587 {
3588 set_mem_attributes (target, type, 1);
3589 /* Function incoming arguments may overlap with sibling call
3590 outgoing arguments and we cannot allow reordering of reads
3591 from function arguments with stores to outgoing arguments
3592 of sibling calls. */
3593 set_mem_alias_set (target, 0);
3594 }
3595
3596 /* ALIGN may well be better aligned than TYPE, e.g. due to
3597 PARM_BOUNDARY. Assume the caller isn't lying. */
3598 set_mem_align (target, align);
3599
3600 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3601 }
3602 }
3603 else if (partial > 0)
3604 {
3605 /* Scalar partly in registers. */
3606
3607 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3608 int i;
3609 int not_stack;
3610 /* # words of start of argument
3611 that we must make space for but need not store. */
3612 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3613 int args_offset = INTVAL (args_so_far);
3614 int skip;
3615
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra && args_addr == 0
3620 && where_pad != none && where_pad != stack_direction)
3621 anti_adjust_stack (GEN_INT (extra));
3622
3623 /* If we make space by pushing it, we might as well push
3624 the real data. Otherwise, we can leave OFFSET nonzero
3625 and leave the space uninitialized. */
3626 if (args_addr == 0)
3627 offset = 0;
3628
3629 /* Now NOT_STACK gets the number of words that we don't need to
3630 allocate on the stack. */
3631 not_stack = partial - offset;
3632
3633 /* If the partial register-part of the arg counts in its stack size,
3634 skip the part of stack space corresponding to the registers.
3635 Otherwise, start copying to the beginning of the stack space,
3636 by setting SKIP to 0. */
3637 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3638
3639 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3640 x = validize_mem (force_const_mem (mode, x));
3641
3642 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3643 SUBREGs of such registers are not allowed. */
3644 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3645 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3646 x = copy_to_reg (x);
3647
3648 /* Loop over all the words allocated on the stack for this arg. */
3649 /* We can do it by words, because any scalar bigger than a word
3650 has a size a multiple of a word. */
3651 #ifndef PUSH_ARGS_REVERSED
3652 for (i = not_stack; i < size; i++)
3653 #else
3654 for (i = size - 1; i >= not_stack; i--)
3655 #endif
3656 if (i >= not_stack + offset)
3657 emit_push_insn (operand_subword_force (x, i, mode),
3658 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3659 0, args_addr,
3660 GEN_INT (args_offset + ((i - not_stack + skip)
3661 * UNITS_PER_WORD)),
3662 reg_parm_stack_space, alignment_pad);
3663 }
3664 else
3665 {
3666 rtx addr;
3667 rtx dest;
3668
3669 /* Push padding now if padding above and stack grows down,
3670 or if padding below and stack grows up.
3671 But if space already allocated, this has already been done. */
3672 if (extra && args_addr == 0
3673 && where_pad != none && where_pad != stack_direction)
3674 anti_adjust_stack (GEN_INT (extra));
3675
3676 #ifdef PUSH_ROUNDING
3677 if (args_addr == 0 && PUSH_ARGS)
3678 emit_single_push_insn (mode, x, type);
3679 else
3680 #endif
3681 {
3682 if (GET_CODE (args_so_far) == CONST_INT)
3683 addr
3684 = memory_address (mode,
3685 plus_constant (args_addr,
3686 INTVAL (args_so_far)));
3687 else
3688 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3689 args_so_far));
3690 dest = gen_rtx_MEM (mode, addr);
3691 if (type != 0)
3692 {
3693 set_mem_attributes (dest, type, 1);
3694 /* Function incoming arguments may overlap with sibling call
3695 outgoing arguments and we cannot allow reordering of reads
3696 from function arguments with stores to outgoing arguments
3697 of sibling calls. */
3698 set_mem_alias_set (dest, 0);
3699 }
3700
3701 emit_move_insn (dest, x);
3702 }
3703 }
3704
3705 /* If part should go in registers, copy that part
3706 into the appropriate registers. Do this now, at the end,
3707 since mem-to-mem copies above may do function calls. */
3708 if (partial > 0 && reg != 0)
3709 {
3710 /* Handle calls that pass values in multiple non-contiguous locations.
3711 The Irix 6 ABI has examples of this. */
3712 if (GET_CODE (reg) == PARALLEL)
3713 emit_group_load (reg, x, type, -1);
3714 else
3715 move_block_to_reg (REGNO (reg), x, partial, mode);
3716 }
3717
3718 if (extra && args_addr == 0 && where_pad == stack_direction)
3719 anti_adjust_stack (GEN_INT (extra));
3720
3721 if (alignment_pad && args_addr == 0)
3722 anti_adjust_stack (alignment_pad);
3723 }
3724 \f
3725 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3726 operations. */
3727
3728 static rtx
3729 get_subtarget (rtx x)
3730 {
3731 return ((x == 0
3732 /* Only registers can be subtargets. */
3733 || GET_CODE (x) != REG
3734 /* If the register is readonly, it can't be set more than once. */
3735 || RTX_UNCHANGING_P (x)
3736 /* Don't use hard regs to avoid extending their life. */
3737 || REGNO (x) < FIRST_PSEUDO_REGISTER
3738 /* Avoid subtargets inside loops,
3739 since they hide some invariant expressions. */
3740 || preserve_subexpressions_p ())
3741 ? 0 : x);
3742 }
3743
3744 /* Expand an assignment that stores the value of FROM into TO.
3745 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3746 (This may contain a QUEUED rtx;
3747 if the value is constant, this rtx is a constant.)
3748 Otherwise, the returned value is NULL_RTX. */
3749
3750 rtx
3751 expand_assignment (tree to, tree from, int want_value)
3752 {
3753 rtx to_rtx = 0;
3754 rtx result;
3755
3756 /* Don't crash if the lhs of the assignment was erroneous. */
3757
3758 if (TREE_CODE (to) == ERROR_MARK)
3759 {
3760 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3761 return want_value ? result : NULL_RTX;
3762 }
3763
3764 /* Assignment of a structure component needs special treatment
3765 if the structure component's rtx is not simply a MEM.
3766 Assignment of an array element at a constant index, and assignment of
3767 an array element in an unaligned packed structure field, has the same
3768 problem. */
3769
3770 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3771 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3772 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3773 {
3774 enum machine_mode mode1;
3775 HOST_WIDE_INT bitsize, bitpos;
3776 rtx orig_to_rtx;
3777 tree offset;
3778 int unsignedp;
3779 int volatilep = 0;
3780 tree tem;
3781
3782 push_temp_slots ();
3783 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3784 &unsignedp, &volatilep);
3785
3786 /* If we are going to use store_bit_field and extract_bit_field,
3787 make sure to_rtx will be safe for multiple use. */
3788
3789 if (mode1 == VOIDmode && want_value)
3790 tem = stabilize_reference (tem);
3791
3792 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3793
3794 if (offset != 0)
3795 {
3796 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3797
3798 if (GET_CODE (to_rtx) != MEM)
3799 abort ();
3800
3801 #ifdef POINTERS_EXTEND_UNSIGNED
3802 if (GET_MODE (offset_rtx) != Pmode)
3803 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3804 #else
3805 if (GET_MODE (offset_rtx) != ptr_mode)
3806 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3807 #endif
3808
3809 /* A constant address in TO_RTX can have VOIDmode, we must not try
3810 to call force_reg for that case. Avoid that case. */
3811 if (GET_CODE (to_rtx) == MEM
3812 && GET_MODE (to_rtx) == BLKmode
3813 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3814 && bitsize > 0
3815 && (bitpos % bitsize) == 0
3816 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3817 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3818 {
3819 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3820 bitpos = 0;
3821 }
3822
3823 to_rtx = offset_address (to_rtx, offset_rtx,
3824 highest_pow2_factor_for_type (TREE_TYPE (to),
3825 offset));
3826 }
3827
3828 if (GET_CODE (to_rtx) == MEM)
3829 {
3830 /* If the field is at offset zero, we could have been given the
3831 DECL_RTX of the parent struct. Don't munge it. */
3832 to_rtx = shallow_copy_rtx (to_rtx);
3833
3834 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3835 }
3836
3837 /* Deal with volatile and readonly fields. The former is only done
3838 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3839 if (volatilep && GET_CODE (to_rtx) == MEM)
3840 {
3841 if (to_rtx == orig_to_rtx)
3842 to_rtx = copy_rtx (to_rtx);
3843 MEM_VOLATILE_P (to_rtx) = 1;
3844 }
3845
3846 if (TREE_CODE (to) == COMPONENT_REF
3847 && TREE_READONLY (TREE_OPERAND (to, 1))
3848 /* We can't assert that a MEM won't be set more than once
3849 if the component is not addressable because another
3850 non-addressable component may be referenced by the same MEM. */
3851 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3852 {
3853 if (to_rtx == orig_to_rtx)
3854 to_rtx = copy_rtx (to_rtx);
3855 RTX_UNCHANGING_P (to_rtx) = 1;
3856 }
3857
3858 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3859 {
3860 if (to_rtx == orig_to_rtx)
3861 to_rtx = copy_rtx (to_rtx);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3863 }
3864
3865 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3866 (want_value
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode)
3869 TYPE_MODE (TREE_TYPE (to)))
3870 : VOIDmode),
3871 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3872
3873 preserve_temp_slots (result);
3874 free_temp_slots ();
3875 pop_temp_slots ();
3876
3877 /* If the value is meaningful, convert RESULT to the proper mode.
3878 Otherwise, return nothing. */
3879 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3880 TYPE_MODE (TREE_TYPE (from)),
3881 result,
3882 TREE_UNSIGNED (TREE_TYPE (to)))
3883 : NULL_RTX);
3884 }
3885
3886 /* If the rhs is a function call and its value is not an aggregate,
3887 call the function before we start to compute the lhs.
3888 This is needed for correct code for cases such as
3889 val = setjmp (buf) on machines where reference to val
3890 requires loading up part of an address in a separate insn.
3891
3892 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3893 since it might be a promoted variable where the zero- or sign- extension
3894 needs to be done. Handling this in the normal way is safe because no
3895 computation is done before the call. */
3896 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3898 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3899 && GET_CODE (DECL_RTL (to)) == REG))
3900 {
3901 rtx value;
3902
3903 push_temp_slots ();
3904 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3905 if (to_rtx == 0)
3906 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3907
3908 /* Handle calls that return values in multiple non-contiguous locations.
3909 The Irix 6 ABI has examples of this. */
3910 if (GET_CODE (to_rtx) == PARALLEL)
3911 emit_group_load (to_rtx, value, TREE_TYPE (from),
3912 int_size_in_bytes (TREE_TYPE (from)));
3913 else if (GET_MODE (to_rtx) == BLKmode)
3914 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3915 else
3916 {
3917 if (POINTER_TYPE_P (TREE_TYPE (to)))
3918 value = convert_memory_address (GET_MODE (to_rtx), value);
3919 emit_move_insn (to_rtx, value);
3920 }
3921 preserve_temp_slots (to_rtx);
3922 free_temp_slots ();
3923 pop_temp_slots ();
3924 return want_value ? to_rtx : NULL_RTX;
3925 }
3926
3927 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3928 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3929
3930 if (to_rtx == 0)
3931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3932
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to) == RESULT_DECL
3935 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3936 {
3937 rtx temp;
3938
3939 push_temp_slots ();
3940 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3941
3942 if (GET_CODE (to_rtx) == PARALLEL)
3943 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3944 int_size_in_bytes (TREE_TYPE (from)));
3945 else
3946 emit_move_insn (to_rtx, temp);
3947
3948 preserve_temp_slots (to_rtx);
3949 free_temp_slots ();
3950 pop_temp_slots ();
3951 return want_value ? to_rtx : NULL_RTX;
3952 }
3953
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct)
3960 {
3961 rtx from_rtx, size;
3962
3963 push_temp_slots ();
3964 size = expr_size (from);
3965 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3966
3967 if (TARGET_MEM_FUNCTIONS)
3968 emit_library_call (memmove_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3970 XEXP (from_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (sizetype),
3972 size, TREE_UNSIGNED (sizetype)),
3973 TYPE_MODE (sizetype));
3974 else
3975 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3976 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3977 XEXP (to_rtx, 0), Pmode,
3978 convert_to_mode (TYPE_MODE (integer_type_node),
3979 size,
3980 TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
3982
3983 preserve_temp_slots (to_rtx);
3984 free_temp_slots ();
3985 pop_temp_slots ();
3986 return want_value ? to_rtx : NULL_RTX;
3987 }
3988
3989 /* Compute FROM and store the value in the rtx we got. */
3990
3991 push_temp_slots ();
3992 result = store_expr (from, to_rtx, want_value);
3993 preserve_temp_slots (result);
3994 free_temp_slots ();
3995 pop_temp_slots ();
3996 return want_value ? result : NULL_RTX;
3997 }
3998
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4002
4003 If WANT_VALUE & 1 is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4010
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4016 be more thorough?
4017
4018 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE.
4021
4022 If WANT_VALUE & 2 is set, this is a store into a call param on the
4023 stack, and block moves may need to be treated specially. */
4024
4025 rtx
4026 store_expr (tree exp, rtx target, int want_value)
4027 {
4028 rtx temp;
4029 int dont_return_target = 0;
4030 int dont_store_target = 0;
4031
4032 if (VOID_TYPE_P (TREE_TYPE (exp)))
4033 {
4034 /* C++ can generate ?: expressions with a throw expression in one
4035 branch and an rvalue in the other. Here, we resolve attempts to
4036 store the throw expression's nonexistent result. */
4037 if (want_value)
4038 abort ();
4039 expand_expr (exp, const0_rtx, VOIDmode, 0);
4040 return NULL_RTX;
4041 }
4042 if (TREE_CODE (exp) == COMPOUND_EXPR)
4043 {
4044 /* Perform first part of compound expression, then assign from second
4045 part. */
4046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4047 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4048 emit_queue ();
4049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4050 }
4051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4052 {
4053 /* For conditional expression, get safe form of the target. Then
4054 test the condition, doing the appropriate assignment on either
4055 side. This avoids the creation of unnecessary temporaries.
4056 For non-BLKmode, it is more efficient not to do this. */
4057
4058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059
4060 emit_queue ();
4061 target = protect_from_queue (target, 1);
4062
4063 do_pending_stack_adjust ();
4064 NO_DEFER_POP;
4065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4066 start_cleanup_deferral ();
4067 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4068 end_cleanup_deferral ();
4069 emit_queue ();
4070 emit_jump_insn (gen_jump (lab2));
4071 emit_barrier ();
4072 emit_label (lab1);
4073 start_cleanup_deferral ();
4074 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4075 end_cleanup_deferral ();
4076 emit_queue ();
4077 emit_label (lab2);
4078 OK_DEFER_POP;
4079
4080 return want_value & 1 ? target : NULL_RTX;
4081 }
4082 else if (queued_subexp_p (target))
4083 /* If target contains a postincrement, let's not risk
4084 using it as the place to generate the rhs. */
4085 {
4086 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4087 {
4088 /* Expand EXP into a new pseudo. */
4089 temp = gen_reg_rtx (GET_MODE (target));
4090 temp = expand_expr (exp, temp, GET_MODE (target),
4091 (want_value & 2
4092 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4093 }
4094 else
4095 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4096 (want_value & 2
4097 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4098
4099 /* If target is volatile, ANSI requires accessing the value
4100 *from* the target, if it is accessed. So make that happen.
4101 In no case return the target itself. */
4102 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4103 dont_return_target = 1;
4104 }
4105 else if ((want_value & 1) != 0
4106 && GET_CODE (target) == MEM
4107 && ! MEM_VOLATILE_P (target)
4108 && GET_MODE (target) != BLKmode)
4109 /* If target is in memory and caller wants value in a register instead,
4110 arrange that. Pass TARGET as target for expand_expr so that,
4111 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4112 We know expand_expr will not use the target in that case.
4113 Don't do this if TARGET is volatile because we are supposed
4114 to write it and then read it. */
4115 {
4116 temp = expand_expr (exp, target, GET_MODE (target),
4117 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4118 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4119 {
4120 /* If TEMP is already in the desired TARGET, only copy it from
4121 memory and don't store it there again. */
4122 if (temp == target
4123 || (rtx_equal_p (temp, target)
4124 && ! side_effects_p (temp) && ! side_effects_p (target)))
4125 dont_store_target = 1;
4126 temp = copy_to_reg (temp);
4127 }
4128 dont_return_target = 1;
4129 }
4130 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4131 /* If this is a scalar in a register that is stored in a wider mode
4132 than the declared mode, compute the result into its declared mode
4133 and then convert to the wider mode. Our value is the computed
4134 expression. */
4135 {
4136 rtx inner_target = 0;
4137
4138 /* If we don't want a value, we can do the conversion inside EXP,
4139 which will often result in some optimizations. Do the conversion
4140 in two steps: first change the signedness, if needed, then
4141 the extend. But don't do this if the type of EXP is a subtype
4142 of something else since then the conversion might involve
4143 more than just converting modes. */
4144 if ((want_value & 1) == 0
4145 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4146 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4147 {
4148 if (TREE_UNSIGNED (TREE_TYPE (exp))
4149 != SUBREG_PROMOTED_UNSIGNED_P (target))
4150 exp = convert
4151 ((*lang_hooks.types.signed_or_unsigned_type)
4152 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4153
4154 exp = convert ((*lang_hooks.types.type_for_mode)
4155 (GET_MODE (SUBREG_REG (target)),
4156 SUBREG_PROMOTED_UNSIGNED_P (target)),
4157 exp);
4158
4159 inner_target = SUBREG_REG (target);
4160 }
4161
4162 temp = expand_expr (exp, inner_target, VOIDmode,
4163 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4164
4165 /* If TEMP is a MEM and we want a result value, make the access
4166 now so it gets done only once. Strictly speaking, this is
4167 only necessary if the MEM is volatile, or if the address
4168 overlaps TARGET. But not performing the load twice also
4169 reduces the amount of rtl we generate and then have to CSE. */
4170 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4171 temp = copy_to_reg (temp);
4172
4173 /* If TEMP is a VOIDmode constant, use convert_modes to make
4174 sure that we properly convert it. */
4175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4176 {
4177 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4178 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4179 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4180 GET_MODE (target), temp,
4181 SUBREG_PROMOTED_UNSIGNED_P (target));
4182 }
4183
4184 convert_move (SUBREG_REG (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4186
4187 /* If we promoted a constant, change the mode back down to match
4188 target. Otherwise, the caller might get confused by a result whose
4189 mode is larger than expected. */
4190
4191 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4192 {
4193 if (GET_MODE (temp) != VOIDmode)
4194 {
4195 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4196 SUBREG_PROMOTED_VAR_P (temp) = 1;
4197 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4198 SUBREG_PROMOTED_UNSIGNED_P (target));
4199 }
4200 else
4201 temp = convert_modes (GET_MODE (target),
4202 GET_MODE (SUBREG_REG (target)),
4203 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4204 }
4205
4206 return want_value & 1 ? temp : NULL_RTX;
4207 }
4208 else
4209 {
4210 temp = expand_expr (exp, target, GET_MODE (target),
4211 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4212 /* Return TARGET if it's a specified hardware register.
4213 If TARGET is a volatile mem ref, either return TARGET
4214 or return a reg copied *from* TARGET; ANSI requires this.
4215
4216 Otherwise, if TEMP is not TARGET, return TEMP
4217 if it is constant (for efficiency),
4218 or if we really want the correct value. */
4219 if (!(target && GET_CODE (target) == REG
4220 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4221 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4222 && ! rtx_equal_p (temp, target)
4223 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4224 dont_return_target = 1;
4225 }
4226
4227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4228 the same as that of TARGET, adjust the constant. This is needed, for
4229 example, in case it is a CONST_DOUBLE and we want only a word-sized
4230 value. */
4231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4235 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4236
4237 /* If value was not generated in the target, store it there.
4238 Convert the value to TARGET's type first if necessary.
4239 If TEMP and TARGET compare equal according to rtx_equal_p, but
4240 one or both of them are volatile memory refs, we have to distinguish
4241 two cases:
4242 - expand_expr has used TARGET. In this case, we must not generate
4243 another copy. This can be detected by TARGET being equal according
4244 to == .
4245 - expand_expr has not used TARGET - that means that the source just
4246 happens to have the same RTX form. Since temp will have been created
4247 by expand_expr, it will compare unequal according to == .
4248 We must generate a copy in this case, to reach the correct number
4249 of volatile memory references. */
4250
4251 if ((! rtx_equal_p (temp, target)
4252 || (temp != target && (side_effects_p (temp)
4253 || side_effects_p (target))))
4254 && TREE_CODE (exp) != ERROR_MARK
4255 && ! dont_store_target
4256 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4257 but TARGET is not valid memory reference, TEMP will differ
4258 from TARGET although it is really the same location. */
4259 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4260 || target != DECL_RTL_IF_SET (exp))
4261 /* If there's nothing to copy, don't bother. Don't call expr_size
4262 unless necessary, because some front-ends (C++) expr_size-hook
4263 aborts on objects that are not supposed to be bit-copied or
4264 bit-initialized. */
4265 && expr_size (exp) != const0_rtx)
4266 {
4267 target = protect_from_queue (target, 1);
4268 if (GET_MODE (temp) != GET_MODE (target)
4269 && GET_MODE (temp) != VOIDmode)
4270 {
4271 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4272 if (dont_return_target)
4273 {
4274 /* In this case, we will return TEMP,
4275 so make sure it has the proper mode.
4276 But don't forget to store the value into TARGET. */
4277 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4278 emit_move_insn (target, temp);
4279 }
4280 else
4281 convert_move (target, temp, unsignedp);
4282 }
4283
4284 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4285 {
4286 /* Handle copying a string constant into an array. The string
4287 constant may be shorter than the array. So copy just the string's
4288 actual length, and clear the rest. First get the size of the data
4289 type of the string, which is actually the size of the target. */
4290 rtx size = expr_size (exp);
4291
4292 if (GET_CODE (size) == CONST_INT
4293 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4294 emit_block_move (target, temp, size,
4295 (want_value & 2
4296 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4297 else
4298 {
4299 /* Compute the size of the data to copy from the string. */
4300 tree copy_size
4301 = size_binop (MIN_EXPR,
4302 make_tree (sizetype, size),
4303 size_int (TREE_STRING_LENGTH (exp)));
4304 rtx copy_size_rtx
4305 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4306 (want_value & 2
4307 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4308 rtx label = 0;
4309
4310 /* Copy that much. */
4311 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4312 TREE_UNSIGNED (sizetype));
4313 emit_block_move (target, temp, copy_size_rtx,
4314 (want_value & 2
4315 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4316
4317 /* Figure out how much is left in TARGET that we have to clear.
4318 Do all calculations in ptr_mode. */
4319 if (GET_CODE (copy_size_rtx) == CONST_INT)
4320 {
4321 size = plus_constant (size, -INTVAL (copy_size_rtx));
4322 target = adjust_address (target, BLKmode,
4323 INTVAL (copy_size_rtx));
4324 }
4325 else
4326 {
4327 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4328 copy_size_rtx, NULL_RTX, 0,
4329 OPTAB_LIB_WIDEN);
4330
4331 #ifdef POINTERS_EXTEND_UNSIGNED
4332 if (GET_MODE (copy_size_rtx) != Pmode)
4333 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4334 TREE_UNSIGNED (sizetype));
4335 #endif
4336
4337 target = offset_address (target, copy_size_rtx,
4338 highest_pow2_factor (copy_size));
4339 label = gen_label_rtx ();
4340 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4341 GET_MODE (size), 0, label);
4342 }
4343
4344 if (size != const0_rtx)
4345 clear_storage (target, size);
4346
4347 if (label)
4348 emit_label (label);
4349 }
4350 }
4351 /* Handle calls that return values in multiple non-contiguous locations.
4352 The Irix 6 ABI has examples of this. */
4353 else if (GET_CODE (target) == PARALLEL)
4354 emit_group_load (target, temp, TREE_TYPE (exp),
4355 int_size_in_bytes (TREE_TYPE (exp)));
4356 else if (GET_MODE (temp) == BLKmode)
4357 emit_block_move (target, temp, expr_size (exp),
4358 (want_value & 2
4359 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4360 else
4361 emit_move_insn (target, temp);
4362 }
4363
4364 /* If we don't want a value, return NULL_RTX. */
4365 if ((want_value & 1) == 0)
4366 return NULL_RTX;
4367
4368 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4369 ??? The latter test doesn't seem to make sense. */
4370 else if (dont_return_target && GET_CODE (temp) != MEM)
4371 return temp;
4372
4373 /* Return TARGET itself if it is a hard register. */
4374 else if ((want_value & 1) != 0
4375 && GET_MODE (target) != BLKmode
4376 && ! (GET_CODE (target) == REG
4377 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4378 return copy_to_reg (target);
4379
4380 else
4381 return target;
4382 }
4383 \f
4384 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4385
4386 static int
4387 is_zeros_p (tree exp)
4388 {
4389 tree elt;
4390
4391 switch (TREE_CODE (exp))
4392 {
4393 case CONVERT_EXPR:
4394 case NOP_EXPR:
4395 case NON_LVALUE_EXPR:
4396 case VIEW_CONVERT_EXPR:
4397 return is_zeros_p (TREE_OPERAND (exp, 0));
4398
4399 case INTEGER_CST:
4400 return integer_zerop (exp);
4401
4402 case COMPLEX_CST:
4403 return
4404 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4405
4406 case REAL_CST:
4407 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4408
4409 case VECTOR_CST:
4410 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4411 elt = TREE_CHAIN (elt))
4412 if (!is_zeros_p (TREE_VALUE (elt)))
4413 return 0;
4414
4415 return 1;
4416
4417 case CONSTRUCTOR:
4418 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4419 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4420 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4421 if (! is_zeros_p (TREE_VALUE (elt)))
4422 return 0;
4423
4424 return 1;
4425
4426 default:
4427 return 0;
4428 }
4429 }
4430
4431 /* Return 1 if EXP contains mostly (3/4) zeros. */
4432
4433 int
4434 mostly_zeros_p (tree exp)
4435 {
4436 if (TREE_CODE (exp) == CONSTRUCTOR)
4437 {
4438 int elts = 0, zeros = 0;
4439 tree elt = CONSTRUCTOR_ELTS (exp);
4440 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4441 {
4442 /* If there are no ranges of true bits, it is all zero. */
4443 return elt == NULL_TREE;
4444 }
4445 for (; elt; elt = TREE_CHAIN (elt))
4446 {
4447 /* We do not handle the case where the index is a RANGE_EXPR,
4448 so the statistic will be somewhat inaccurate.
4449 We do make a more accurate count in store_constructor itself,
4450 so since this function is only used for nested array elements,
4451 this should be close enough. */
4452 if (mostly_zeros_p (TREE_VALUE (elt)))
4453 zeros++;
4454 elts++;
4455 }
4456
4457 return 4 * zeros >= 3 * elts;
4458 }
4459
4460 return is_zeros_p (exp);
4461 }
4462 \f
4463 /* Helper function for store_constructor.
4464 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4465 TYPE is the type of the CONSTRUCTOR, not the element type.
4466 CLEARED is as for store_constructor.
4467 ALIAS_SET is the alias set to use for any stores.
4468
4469 This provides a recursive shortcut back to store_constructor when it isn't
4470 necessary to go through store_field. This is so that we can pass through
4471 the cleared field to let store_constructor know that we may not have to
4472 clear a substructure if the outer structure has already been cleared. */
4473
4474 static void
4475 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4476 HOST_WIDE_INT bitpos, enum machine_mode mode,
4477 tree exp, tree type, int cleared, int alias_set)
4478 {
4479 if (TREE_CODE (exp) == CONSTRUCTOR
4480 && bitpos % BITS_PER_UNIT == 0
4481 /* If we have a nonzero bitpos for a register target, then we just
4482 let store_field do the bitfield handling. This is unlikely to
4483 generate unnecessary clear instructions anyways. */
4484 && (bitpos == 0 || GET_CODE (target) == MEM))
4485 {
4486 if (GET_CODE (target) == MEM)
4487 target
4488 = adjust_address (target,
4489 GET_MODE (target) == BLKmode
4490 || 0 != (bitpos
4491 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4492 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4493
4494
4495 /* Update the alias set, if required. */
4496 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4497 && MEM_ALIAS_SET (target) != 0)
4498 {
4499 target = copy_rtx (target);
4500 set_mem_alias_set (target, alias_set);
4501 }
4502
4503 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4504 }
4505 else
4506 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4507 alias_set);
4508 }
4509
4510 /* Store the value of constructor EXP into the rtx TARGET.
4511 TARGET is either a REG or a MEM; we know it cannot conflict, since
4512 safe_from_p has been called.
4513 CLEARED is true if TARGET is known to have been zero'd.
4514 SIZE is the number of bytes of TARGET we are allowed to modify: this
4515 may not be the same as the size of EXP if we are assigning to a field
4516 which has been packed to exclude padding bits. */
4517
4518 static void
4519 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4520 {
4521 tree type = TREE_TYPE (exp);
4522 #ifdef WORD_REGISTER_OPERATIONS
4523 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4524 #endif
4525
4526 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4527 || TREE_CODE (type) == QUAL_UNION_TYPE)
4528 {
4529 tree elt;
4530
4531 /* If size is zero or the target is already cleared, do nothing. */
4532 if (size == 0 || cleared)
4533 cleared = 1;
4534 /* We either clear the aggregate or indicate the value is dead. */
4535 else if ((TREE_CODE (type) == UNION_TYPE
4536 || TREE_CODE (type) == QUAL_UNION_TYPE)
4537 && ! CONSTRUCTOR_ELTS (exp))
4538 /* If the constructor is empty, clear the union. */
4539 {
4540 clear_storage (target, expr_size (exp));
4541 cleared = 1;
4542 }
4543
4544 /* If we are building a static constructor into a register,
4545 set the initial value as zero so we can fold the value into
4546 a constant. But if more than one register is involved,
4547 this probably loses. */
4548 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4549 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4550 {
4551 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4552 cleared = 1;
4553 }
4554
4555 /* If the constructor has fewer fields than the structure
4556 or if we are initializing the structure to mostly zeros,
4557 clear the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since clear_storage
4559 can't handle this case. */
4560 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4561 || mostly_zeros_p (exp))
4562 && (GET_CODE (target) != REG
4563 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4564 == size)))
4565 {
4566 rtx xtarget = target;
4567
4568 if (readonly_fields_p (type))
4569 {
4570 xtarget = copy_rtx (xtarget);
4571 RTX_UNCHANGING_P (xtarget) = 1;
4572 }
4573
4574 clear_storage (xtarget, GEN_INT (size));
4575 cleared = 1;
4576 }
4577
4578 if (! cleared)
4579 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4580
4581 /* Store each element of the constructor into
4582 the corresponding field of TARGET. */
4583
4584 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4585 {
4586 tree field = TREE_PURPOSE (elt);
4587 tree value = TREE_VALUE (elt);
4588 enum machine_mode mode;
4589 HOST_WIDE_INT bitsize;
4590 HOST_WIDE_INT bitpos = 0;
4591 tree offset;
4592 rtx to_rtx = target;
4593
4594 /* Just ignore missing fields.
4595 We cleared the whole structure, above,
4596 if any fields are missing. */
4597 if (field == 0)
4598 continue;
4599
4600 if (cleared && is_zeros_p (value))
4601 continue;
4602
4603 if (host_integerp (DECL_SIZE (field), 1))
4604 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4605 else
4606 bitsize = -1;
4607
4608 mode = DECL_MODE (field);
4609 if (DECL_BIT_FIELD (field))
4610 mode = VOIDmode;
4611
4612 offset = DECL_FIELD_OFFSET (field);
4613 if (host_integerp (offset, 0)
4614 && host_integerp (bit_position (field), 0))
4615 {
4616 bitpos = int_bit_position (field);
4617 offset = 0;
4618 }
4619 else
4620 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4621
4622 if (offset)
4623 {
4624 rtx offset_rtx;
4625
4626 if (CONTAINS_PLACEHOLDER_P (offset))
4627 offset = build (WITH_RECORD_EXPR, sizetype,
4628 offset, make_tree (TREE_TYPE (exp), target));
4629
4630 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4631 if (GET_CODE (to_rtx) != MEM)
4632 abort ();
4633
4634 #ifdef POINTERS_EXTEND_UNSIGNED
4635 if (GET_MODE (offset_rtx) != Pmode)
4636 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4637 #else
4638 if (GET_MODE (offset_rtx) != ptr_mode)
4639 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4640 #endif
4641
4642 to_rtx = offset_address (to_rtx, offset_rtx,
4643 highest_pow2_factor (offset));
4644 }
4645
4646 if (TREE_READONLY (field))
4647 {
4648 if (GET_CODE (to_rtx) == MEM)
4649 to_rtx = copy_rtx (to_rtx);
4650
4651 RTX_UNCHANGING_P (to_rtx) = 1;
4652 }
4653
4654 #ifdef WORD_REGISTER_OPERATIONS
4655 /* If this initializes a field that is smaller than a word, at the
4656 start of a word, try to widen it to a full word.
4657 This special case allows us to output C++ member function
4658 initializations in a form that the optimizers can understand. */
4659 if (GET_CODE (target) == REG
4660 && bitsize < BITS_PER_WORD
4661 && bitpos % BITS_PER_WORD == 0
4662 && GET_MODE_CLASS (mode) == MODE_INT
4663 && TREE_CODE (value) == INTEGER_CST
4664 && exp_size >= 0
4665 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4666 {
4667 tree type = TREE_TYPE (value);
4668
4669 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4670 {
4671 type = (*lang_hooks.types.type_for_size)
4672 (BITS_PER_WORD, TREE_UNSIGNED (type));
4673 value = convert (type, value);
4674 }
4675
4676 if (BYTES_BIG_ENDIAN)
4677 value
4678 = fold (build (LSHIFT_EXPR, type, value,
4679 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4680 bitsize = BITS_PER_WORD;
4681 mode = word_mode;
4682 }
4683 #endif
4684
4685 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4686 && DECL_NONADDRESSABLE_P (field))
4687 {
4688 to_rtx = copy_rtx (to_rtx);
4689 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4690 }
4691
4692 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4693 value, type, cleared,
4694 get_alias_set (TREE_TYPE (field)));
4695 }
4696 }
4697 else if (TREE_CODE (type) == ARRAY_TYPE
4698 || TREE_CODE (type) == VECTOR_TYPE)
4699 {
4700 tree elt;
4701 int i;
4702 int need_to_clear;
4703 tree domain = TYPE_DOMAIN (type);
4704 tree elttype = TREE_TYPE (type);
4705 int const_bounds_p;
4706 HOST_WIDE_INT minelt = 0;
4707 HOST_WIDE_INT maxelt = 0;
4708
4709 /* Vectors are like arrays, but the domain is stored via an array
4710 type indirectly. */
4711 if (TREE_CODE (type) == VECTOR_TYPE)
4712 {
4713 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4714 the same field as TYPE_DOMAIN, we are not guaranteed that
4715 it always will. */
4716 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4717 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4718 }
4719
4720 const_bounds_p = (TYPE_MIN_VALUE (domain)
4721 && TYPE_MAX_VALUE (domain)
4722 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4723 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4724
4725 /* If we have constant bounds for the range of the type, get them. */
4726 if (const_bounds_p)
4727 {
4728 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4729 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4730 }
4731
4732 /* If the constructor has fewer elements than the array,
4733 clear the whole array first. Similarly if this is
4734 static constructor of a non-BLKmode object. */
4735 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4736 need_to_clear = 1;
4737 else
4738 {
4739 HOST_WIDE_INT count = 0, zero_count = 0;
4740 need_to_clear = ! const_bounds_p;
4741
4742 /* This loop is a more accurate version of the loop in
4743 mostly_zeros_p (it handles RANGE_EXPR in an index).
4744 It is also needed to check for missing elements. */
4745 for (elt = CONSTRUCTOR_ELTS (exp);
4746 elt != NULL_TREE && ! need_to_clear;
4747 elt = TREE_CHAIN (elt))
4748 {
4749 tree index = TREE_PURPOSE (elt);
4750 HOST_WIDE_INT this_node_count;
4751
4752 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4753 {
4754 tree lo_index = TREE_OPERAND (index, 0);
4755 tree hi_index = TREE_OPERAND (index, 1);
4756
4757 if (! host_integerp (lo_index, 1)
4758 || ! host_integerp (hi_index, 1))
4759 {
4760 need_to_clear = 1;
4761 break;
4762 }
4763
4764 this_node_count = (tree_low_cst (hi_index, 1)
4765 - tree_low_cst (lo_index, 1) + 1);
4766 }
4767 else
4768 this_node_count = 1;
4769
4770 count += this_node_count;
4771 if (mostly_zeros_p (TREE_VALUE (elt)))
4772 zero_count += this_node_count;
4773 }
4774
4775 /* Clear the entire array first if there are any missing elements,
4776 or if the incidence of zero elements is >= 75%. */
4777 if (! need_to_clear
4778 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4779 need_to_clear = 1;
4780 }
4781
4782 if (need_to_clear && size > 0)
4783 {
4784 if (! cleared)
4785 {
4786 if (REG_P (target))
4787 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4788 else
4789 clear_storage (target, GEN_INT (size));
4790 }
4791 cleared = 1;
4792 }
4793 else if (REG_P (target))
4794 /* Inform later passes that the old value is dead. */
4795 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4796
4797 /* Store each element of the constructor into
4798 the corresponding element of TARGET, determined
4799 by counting the elements. */
4800 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4801 elt;
4802 elt = TREE_CHAIN (elt), i++)
4803 {
4804 enum machine_mode mode;
4805 HOST_WIDE_INT bitsize;
4806 HOST_WIDE_INT bitpos;
4807 int unsignedp;
4808 tree value = TREE_VALUE (elt);
4809 tree index = TREE_PURPOSE (elt);
4810 rtx xtarget = target;
4811
4812 if (cleared && is_zeros_p (value))
4813 continue;
4814
4815 unsignedp = TREE_UNSIGNED (elttype);
4816 mode = TYPE_MODE (elttype);
4817 if (mode == BLKmode)
4818 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4819 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4820 : -1);
4821 else
4822 bitsize = GET_MODE_BITSIZE (mode);
4823
4824 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4825 {
4826 tree lo_index = TREE_OPERAND (index, 0);
4827 tree hi_index = TREE_OPERAND (index, 1);
4828 rtx index_r, pos_rtx, loop_end;
4829 struct nesting *loop;
4830 HOST_WIDE_INT lo, hi, count;
4831 tree position;
4832
4833 /* If the range is constant and "small", unroll the loop. */
4834 if (const_bounds_p
4835 && host_integerp (lo_index, 0)
4836 && host_integerp (hi_index, 0)
4837 && (lo = tree_low_cst (lo_index, 0),
4838 hi = tree_low_cst (hi_index, 0),
4839 count = hi - lo + 1,
4840 (GET_CODE (target) != MEM
4841 || count <= 2
4842 || (host_integerp (TYPE_SIZE (elttype), 1)
4843 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4844 <= 40 * 8)))))
4845 {
4846 lo -= minelt; hi -= minelt;
4847 for (; lo <= hi; lo++)
4848 {
4849 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4850
4851 if (GET_CODE (target) == MEM
4852 && !MEM_KEEP_ALIAS_SET_P (target)
4853 && TREE_CODE (type) == ARRAY_TYPE
4854 && TYPE_NONALIASED_COMPONENT (type))
4855 {
4856 target = copy_rtx (target);
4857 MEM_KEEP_ALIAS_SET_P (target) = 1;
4858 }
4859
4860 store_constructor_field
4861 (target, bitsize, bitpos, mode, value, type, cleared,
4862 get_alias_set (elttype));
4863 }
4864 }
4865 else
4866 {
4867 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4868 loop_end = gen_label_rtx ();
4869
4870 unsignedp = TREE_UNSIGNED (domain);
4871
4872 index = build_decl (VAR_DECL, NULL_TREE, domain);
4873
4874 index_r
4875 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4876 &unsignedp, 0));
4877 SET_DECL_RTL (index, index_r);
4878 if (TREE_CODE (value) == SAVE_EXPR
4879 && SAVE_EXPR_RTL (value) == 0)
4880 {
4881 /* Make sure value gets expanded once before the
4882 loop. */
4883 expand_expr (value, const0_rtx, VOIDmode, 0);
4884 emit_queue ();
4885 }
4886 store_expr (lo_index, index_r, 0);
4887 loop = expand_start_loop (0);
4888
4889 /* Assign value to element index. */
4890 position
4891 = convert (ssizetype,
4892 fold (build (MINUS_EXPR, TREE_TYPE (index),
4893 index, TYPE_MIN_VALUE (domain))));
4894 position = size_binop (MULT_EXPR, position,
4895 convert (ssizetype,
4896 TYPE_SIZE_UNIT (elttype)));
4897
4898 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4899 xtarget = offset_address (target, pos_rtx,
4900 highest_pow2_factor (position));
4901 xtarget = adjust_address (xtarget, mode, 0);
4902 if (TREE_CODE (value) == CONSTRUCTOR)
4903 store_constructor (value, xtarget, cleared,
4904 bitsize / BITS_PER_UNIT);
4905 else
4906 store_expr (value, xtarget, 0);
4907
4908 expand_exit_loop_if_false (loop,
4909 build (LT_EXPR, integer_type_node,
4910 index, hi_index));
4911
4912 expand_increment (build (PREINCREMENT_EXPR,
4913 TREE_TYPE (index),
4914 index, integer_one_node), 0, 0);
4915 expand_end_loop ();
4916 emit_label (loop_end);
4917 }
4918 }
4919 else if ((index != 0 && ! host_integerp (index, 0))
4920 || ! host_integerp (TYPE_SIZE (elttype), 1))
4921 {
4922 tree position;
4923
4924 if (index == 0)
4925 index = ssize_int (1);
4926
4927 if (minelt)
4928 index = convert (ssizetype,
4929 fold (build (MINUS_EXPR, index,
4930 TYPE_MIN_VALUE (domain))));
4931
4932 position = size_binop (MULT_EXPR, index,
4933 convert (ssizetype,
4934 TYPE_SIZE_UNIT (elttype)));
4935 xtarget = offset_address (target,
4936 expand_expr (position, 0, VOIDmode, 0),
4937 highest_pow2_factor (position));
4938 xtarget = adjust_address (xtarget, mode, 0);
4939 store_expr (value, xtarget, 0);
4940 }
4941 else
4942 {
4943 if (index != 0)
4944 bitpos = ((tree_low_cst (index, 0) - minelt)
4945 * tree_low_cst (TYPE_SIZE (elttype), 1));
4946 else
4947 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4948
4949 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4950 && TREE_CODE (type) == ARRAY_TYPE
4951 && TYPE_NONALIASED_COMPONENT (type))
4952 {
4953 target = copy_rtx (target);
4954 MEM_KEEP_ALIAS_SET_P (target) = 1;
4955 }
4956
4957 store_constructor_field (target, bitsize, bitpos, mode, value,
4958 type, cleared, get_alias_set (elttype));
4959
4960 }
4961 }
4962 }
4963
4964 /* Set constructor assignments. */
4965 else if (TREE_CODE (type) == SET_TYPE)
4966 {
4967 tree elt = CONSTRUCTOR_ELTS (exp);
4968 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4969 tree domain = TYPE_DOMAIN (type);
4970 tree domain_min, domain_max, bitlength;
4971
4972 /* The default implementation strategy is to extract the constant
4973 parts of the constructor, use that to initialize the target,
4974 and then "or" in whatever non-constant ranges we need in addition.
4975
4976 If a large set is all zero or all ones, it is
4977 probably better to set it using memset (if available) or bzero.
4978 Also, if a large set has just a single range, it may also be
4979 better to first clear all the first clear the set (using
4980 bzero/memset), and set the bits we want. */
4981
4982 /* Check for all zeros. */
4983 if (elt == NULL_TREE && size > 0)
4984 {
4985 if (!cleared)
4986 clear_storage (target, GEN_INT (size));
4987 return;
4988 }
4989
4990 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4991 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4992 bitlength = size_binop (PLUS_EXPR,
4993 size_diffop (domain_max, domain_min),
4994 ssize_int (1));
4995
4996 nbits = tree_low_cst (bitlength, 1);
4997
4998 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4999 are "complicated" (more than one range), initialize (the
5000 constant parts) by copying from a constant. */
5001 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5002 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5003 {
5004 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5005 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5006 char *bit_buffer = alloca (nbits);
5007 HOST_WIDE_INT word = 0;
5008 unsigned int bit_pos = 0;
5009 unsigned int ibit = 0;
5010 unsigned int offset = 0; /* In bytes from beginning of set. */
5011
5012 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5013 for (;;)
5014 {
5015 if (bit_buffer[ibit])
5016 {
5017 if (BYTES_BIG_ENDIAN)
5018 word |= (1 << (set_word_size - 1 - bit_pos));
5019 else
5020 word |= 1 << bit_pos;
5021 }
5022
5023 bit_pos++; ibit++;
5024 if (bit_pos >= set_word_size || ibit == nbits)
5025 {
5026 if (word != 0 || ! cleared)
5027 {
5028 rtx datum = GEN_INT (word);
5029 rtx to_rtx;
5030
5031 /* The assumption here is that it is safe to use
5032 XEXP if the set is multi-word, but not if
5033 it's single-word. */
5034 if (GET_CODE (target) == MEM)
5035 to_rtx = adjust_address (target, mode, offset);
5036 else if (offset == 0)
5037 to_rtx = target;
5038 else
5039 abort ();
5040 emit_move_insn (to_rtx, datum);
5041 }
5042
5043 if (ibit == nbits)
5044 break;
5045 word = 0;
5046 bit_pos = 0;
5047 offset += set_word_size / BITS_PER_UNIT;
5048 }
5049 }
5050 }
5051 else if (!cleared)
5052 /* Don't bother clearing storage if the set is all ones. */
5053 if (TREE_CHAIN (elt) != NULL_TREE
5054 || (TREE_PURPOSE (elt) == NULL_TREE
5055 ? nbits != 1
5056 : ( ! host_integerp (TREE_VALUE (elt), 0)
5057 || ! host_integerp (TREE_PURPOSE (elt), 0)
5058 || (tree_low_cst (TREE_VALUE (elt), 0)
5059 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5060 != (HOST_WIDE_INT) nbits))))
5061 clear_storage (target, expr_size (exp));
5062
5063 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5064 {
5065 /* Start of range of element or NULL. */
5066 tree startbit = TREE_PURPOSE (elt);
5067 /* End of range of element, or element value. */
5068 tree endbit = TREE_VALUE (elt);
5069 HOST_WIDE_INT startb, endb;
5070 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5071
5072 bitlength_rtx = expand_expr (bitlength,
5073 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5074
5075 /* Handle non-range tuple element like [ expr ]. */
5076 if (startbit == NULL_TREE)
5077 {
5078 startbit = save_expr (endbit);
5079 endbit = startbit;
5080 }
5081
5082 startbit = convert (sizetype, startbit);
5083 endbit = convert (sizetype, endbit);
5084 if (! integer_zerop (domain_min))
5085 {
5086 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5087 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5088 }
5089 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5090 EXPAND_CONST_ADDRESS);
5091 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5092 EXPAND_CONST_ADDRESS);
5093
5094 if (REG_P (target))
5095 {
5096 targetx
5097 = assign_temp
5098 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5099 (GET_MODE (target), 0),
5100 TYPE_QUAL_CONST)),
5101 0, 1, 1);
5102 emit_move_insn (targetx, target);
5103 }
5104
5105 else if (GET_CODE (target) == MEM)
5106 targetx = target;
5107 else
5108 abort ();
5109
5110 /* Optimization: If startbit and endbit are constants divisible
5111 by BITS_PER_UNIT, call memset instead. */
5112 if (TARGET_MEM_FUNCTIONS
5113 && TREE_CODE (startbit) == INTEGER_CST
5114 && TREE_CODE (endbit) == INTEGER_CST
5115 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5116 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5117 {
5118 emit_library_call (memset_libfunc, LCT_NORMAL,
5119 VOIDmode, 3,
5120 plus_constant (XEXP (targetx, 0),
5121 startb / BITS_PER_UNIT),
5122 Pmode,
5123 constm1_rtx, TYPE_MODE (integer_type_node),
5124 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5125 TYPE_MODE (sizetype));
5126 }
5127 else
5128 emit_library_call (setbits_libfunc, LCT_NORMAL,
5129 VOIDmode, 4, XEXP (targetx, 0),
5130 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5131 startbit_rtx, TYPE_MODE (sizetype),
5132 endbit_rtx, TYPE_MODE (sizetype));
5133
5134 if (REG_P (target))
5135 emit_move_insn (target, targetx);
5136 }
5137 }
5138
5139 else
5140 abort ();
5141 }
5142
5143 /* Store the value of EXP (an expression tree)
5144 into a subfield of TARGET which has mode MODE and occupies
5145 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5146 If MODE is VOIDmode, it means that we are storing into a bit-field.
5147
5148 If VALUE_MODE is VOIDmode, return nothing in particular.
5149 UNSIGNEDP is not used in this case.
5150
5151 Otherwise, return an rtx for the value stored. This rtx
5152 has mode VALUE_MODE if that is convenient to do.
5153 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5154
5155 TYPE is the type of the underlying object,
5156
5157 ALIAS_SET is the alias set for the destination. This value will
5158 (in general) be different from that for TARGET, since TARGET is a
5159 reference to the containing structure. */
5160
5161 static rtx
5162 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5163 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5164 int unsignedp, tree type, int alias_set)
5165 {
5166 HOST_WIDE_INT width_mask = 0;
5167
5168 if (TREE_CODE (exp) == ERROR_MARK)
5169 return const0_rtx;
5170
5171 /* If we have nothing to store, do nothing unless the expression has
5172 side-effects. */
5173 if (bitsize == 0)
5174 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5175 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5176 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5177
5178 /* If we are storing into an unaligned field of an aligned union that is
5179 in a register, we may have the mode of TARGET being an integer mode but
5180 MODE == BLKmode. In that case, get an aligned object whose size and
5181 alignment are the same as TARGET and store TARGET into it (we can avoid
5182 the store if the field being stored is the entire width of TARGET). Then
5183 call ourselves recursively to store the field into a BLKmode version of
5184 that object. Finally, load from the object into TARGET. This is not
5185 very efficient in general, but should only be slightly more expensive
5186 than the otherwise-required unaligned accesses. Perhaps this can be
5187 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5188 twice, once with emit_move_insn and once via store_field. */
5189
5190 if (mode == BLKmode
5191 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5192 {
5193 rtx object = assign_temp (type, 0, 1, 1);
5194 rtx blk_object = adjust_address (object, BLKmode, 0);
5195
5196 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5197 emit_move_insn (object, target);
5198
5199 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5200 alias_set);
5201
5202 emit_move_insn (target, object);
5203
5204 /* We want to return the BLKmode version of the data. */
5205 return blk_object;
5206 }
5207
5208 if (GET_CODE (target) == CONCAT)
5209 {
5210 /* We're storing into a struct containing a single __complex. */
5211
5212 if (bitpos != 0)
5213 abort ();
5214 return store_expr (exp, target, 0);
5215 }
5216
5217 /* If the structure is in a register or if the component
5218 is a bit field, we cannot use addressing to access it.
5219 Use bit-field techniques or SUBREG to store in it. */
5220
5221 if (mode == VOIDmode
5222 || (mode != BLKmode && ! direct_store[(int) mode]
5223 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5224 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5225 || GET_CODE (target) == REG
5226 || GET_CODE (target) == SUBREG
5227 /* If the field isn't aligned enough to store as an ordinary memref,
5228 store it as a bit field. */
5229 || (mode != BLKmode
5230 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5231 || bitpos % GET_MODE_ALIGNMENT (mode))
5232 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5233 || (bitpos % BITS_PER_UNIT != 0)))
5234 /* If the RHS and field are a constant size and the size of the
5235 RHS isn't the same size as the bitfield, we must use bitfield
5236 operations. */
5237 || (bitsize >= 0
5238 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5239 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5240 {
5241 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5242
5243 /* If BITSIZE is narrower than the size of the type of EXP
5244 we will be narrowing TEMP. Normally, what's wanted are the
5245 low-order bits. However, if EXP's type is a record and this is
5246 big-endian machine, we want the upper BITSIZE bits. */
5247 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5248 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5249 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5250 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5251 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5252 - bitsize),
5253 NULL_RTX, 1);
5254
5255 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5256 MODE. */
5257 if (mode != VOIDmode && mode != BLKmode
5258 && mode != TYPE_MODE (TREE_TYPE (exp)))
5259 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5260
5261 /* If the modes of TARGET and TEMP are both BLKmode, both
5262 must be in memory and BITPOS must be aligned on a byte
5263 boundary. If so, we simply do a block copy. */
5264 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5265 {
5266 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5267 || bitpos % BITS_PER_UNIT != 0)
5268 abort ();
5269
5270 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5271 emit_block_move (target, temp,
5272 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5273 / BITS_PER_UNIT),
5274 BLOCK_OP_NORMAL);
5275
5276 return value_mode == VOIDmode ? const0_rtx : target;
5277 }
5278
5279 /* Store the value in the bitfield. */
5280 store_bit_field (target, bitsize, bitpos, mode, temp,
5281 int_size_in_bytes (type));
5282
5283 if (value_mode != VOIDmode)
5284 {
5285 /* The caller wants an rtx for the value.
5286 If possible, avoid refetching from the bitfield itself. */
5287 if (width_mask != 0
5288 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5289 {
5290 tree count;
5291 enum machine_mode tmode;
5292
5293 tmode = GET_MODE (temp);
5294 if (tmode == VOIDmode)
5295 tmode = value_mode;
5296
5297 if (unsignedp)
5298 return expand_and (tmode, temp,
5299 gen_int_mode (width_mask, tmode),
5300 NULL_RTX);
5301
5302 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5303 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5304 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5305 }
5306
5307 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5308 NULL_RTX, value_mode, VOIDmode,
5309 int_size_in_bytes (type));
5310 }
5311 return const0_rtx;
5312 }
5313 else
5314 {
5315 rtx addr = XEXP (target, 0);
5316 rtx to_rtx = target;
5317
5318 /* If a value is wanted, it must be the lhs;
5319 so make the address stable for multiple use. */
5320
5321 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5322 && ! CONSTANT_ADDRESS_P (addr)
5323 /* A frame-pointer reference is already stable. */
5324 && ! (GET_CODE (addr) == PLUS
5325 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5326 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5327 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5328 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5329
5330 /* Now build a reference to just the desired component. */
5331
5332 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5333
5334 if (to_rtx == target)
5335 to_rtx = copy_rtx (to_rtx);
5336
5337 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5338 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5339 set_mem_alias_set (to_rtx, alias_set);
5340
5341 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5342 }
5343 }
5344 \f
5345 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5346 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5347 codes and find the ultimate containing object, which we return.
5348
5349 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5350 bit position, and *PUNSIGNEDP to the signedness of the field.
5351 If the position of the field is variable, we store a tree
5352 giving the variable offset (in units) in *POFFSET.
5353 This offset is in addition to the bit position.
5354 If the position is not variable, we store 0 in *POFFSET.
5355
5356 If any of the extraction expressions is volatile,
5357 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5358
5359 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5360 is a mode that can be used to access the field. In that case, *PBITSIZE
5361 is redundant.
5362
5363 If the field describes a variable-sized object, *PMODE is set to
5364 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5365 this case, but the address of the object can be found. */
5366
5367 tree
5368 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5369 HOST_WIDE_INT *pbitpos, tree *poffset,
5370 enum machine_mode *pmode, int *punsignedp,
5371 int *pvolatilep)
5372 {
5373 tree size_tree = 0;
5374 enum machine_mode mode = VOIDmode;
5375 tree offset = size_zero_node;
5376 tree bit_offset = bitsize_zero_node;
5377 tree placeholder_ptr = 0;
5378 tree tem;
5379
5380 /* First get the mode, signedness, and size. We do this from just the
5381 outermost expression. */
5382 if (TREE_CODE (exp) == COMPONENT_REF)
5383 {
5384 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5385 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5386 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5387
5388 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5389 }
5390 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5391 {
5392 size_tree = TREE_OPERAND (exp, 1);
5393 *punsignedp = TREE_UNSIGNED (exp);
5394 }
5395 else
5396 {
5397 mode = TYPE_MODE (TREE_TYPE (exp));
5398 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5399
5400 if (mode == BLKmode)
5401 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5402 else
5403 *pbitsize = GET_MODE_BITSIZE (mode);
5404 }
5405
5406 if (size_tree != 0)
5407 {
5408 if (! host_integerp (size_tree, 1))
5409 mode = BLKmode, *pbitsize = -1;
5410 else
5411 *pbitsize = tree_low_cst (size_tree, 1);
5412 }
5413
5414 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5415 and find the ultimate containing object. */
5416 while (1)
5417 {
5418 if (TREE_CODE (exp) == BIT_FIELD_REF)
5419 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5420 else if (TREE_CODE (exp) == COMPONENT_REF)
5421 {
5422 tree field = TREE_OPERAND (exp, 1);
5423 tree this_offset = DECL_FIELD_OFFSET (field);
5424
5425 /* If this field hasn't been filled in yet, don't go
5426 past it. This should only happen when folding expressions
5427 made during type construction. */
5428 if (this_offset == 0)
5429 break;
5430 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5431 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5432
5433 offset = size_binop (PLUS_EXPR, offset, this_offset);
5434 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5435 DECL_FIELD_BIT_OFFSET (field));
5436
5437 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5438 }
5439
5440 else if (TREE_CODE (exp) == ARRAY_REF
5441 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5442 {
5443 tree index = TREE_OPERAND (exp, 1);
5444 tree array = TREE_OPERAND (exp, 0);
5445 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5446 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5447 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5448
5449 /* We assume all arrays have sizes that are a multiple of a byte.
5450 First subtract the lower bound, if any, in the type of the
5451 index, then convert to sizetype and multiply by the size of the
5452 array element. */
5453 if (low_bound != 0 && ! integer_zerop (low_bound))
5454 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5455 index, low_bound));
5456
5457 /* If the index has a self-referential type, pass it to a
5458 WITH_RECORD_EXPR; if the component size is, pass our
5459 component to one. */
5460 if (CONTAINS_PLACEHOLDER_P (index))
5461 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5462 if (CONTAINS_PLACEHOLDER_P (unit_size))
5463 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5464
5465 offset = size_binop (PLUS_EXPR, offset,
5466 size_binop (MULT_EXPR,
5467 convert (sizetype, index),
5468 unit_size));
5469 }
5470
5471 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5472 {
5473 tree new = find_placeholder (exp, &placeholder_ptr);
5474
5475 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5476 We might have been called from tree optimization where we
5477 haven't set up an object yet. */
5478 if (new == 0)
5479 break;
5480 else
5481 exp = new;
5482
5483 continue;
5484 }
5485
5486 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5487 conversions that don't change the mode, and all view conversions
5488 except those that need to "step up" the alignment. */
5489 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5490 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5491 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5492 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5493 && STRICT_ALIGNMENT
5494 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5495 < BIGGEST_ALIGNMENT)
5496 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5497 || TYPE_ALIGN_OK (TREE_TYPE
5498 (TREE_OPERAND (exp, 0))))))
5499 && ! ((TREE_CODE (exp) == NOP_EXPR
5500 || TREE_CODE (exp) == CONVERT_EXPR)
5501 && (TYPE_MODE (TREE_TYPE (exp))
5502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5503 break;
5504
5505 /* If any reference in the chain is volatile, the effect is volatile. */
5506 if (TREE_THIS_VOLATILE (exp))
5507 *pvolatilep = 1;
5508
5509 exp = TREE_OPERAND (exp, 0);
5510 }
5511
5512 /* If OFFSET is constant, see if we can return the whole thing as a
5513 constant bit position. Otherwise, split it up. */
5514 if (host_integerp (offset, 0)
5515 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5516 bitsize_unit_node))
5517 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5518 && host_integerp (tem, 0))
5519 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5520 else
5521 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5522
5523 *pmode = mode;
5524 return exp;
5525 }
5526
5527 /* Return 1 if T is an expression that get_inner_reference handles. */
5528
5529 int
5530 handled_component_p (tree t)
5531 {
5532 switch (TREE_CODE (t))
5533 {
5534 case BIT_FIELD_REF:
5535 case COMPONENT_REF:
5536 case ARRAY_REF:
5537 case ARRAY_RANGE_REF:
5538 case NON_LVALUE_EXPR:
5539 case VIEW_CONVERT_EXPR:
5540 return 1;
5541
5542 /* ??? Sure they are handled, but get_inner_reference may return
5543 a different PBITSIZE, depending upon whether the expression is
5544 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5545 case NOP_EXPR:
5546 case CONVERT_EXPR:
5547 return (TYPE_MODE (TREE_TYPE (t))
5548 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5549
5550 default:
5551 return 0;
5552 }
5553 }
5554 \f
5555 /* Given an rtx VALUE that may contain additions and multiplications, return
5556 an equivalent value that just refers to a register, memory, or constant.
5557 This is done by generating instructions to perform the arithmetic and
5558 returning a pseudo-register containing the value.
5559
5560 The returned value may be a REG, SUBREG, MEM or constant. */
5561
5562 rtx
5563 force_operand (rtx value, rtx target)
5564 {
5565 rtx op1, op2;
5566 /* Use subtarget as the target for operand 0 of a binary operation. */
5567 rtx subtarget = get_subtarget (target);
5568 enum rtx_code code = GET_CODE (value);
5569
5570 /* Check for a PIC address load. */
5571 if ((code == PLUS || code == MINUS)
5572 && XEXP (value, 0) == pic_offset_table_rtx
5573 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5574 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5575 || GET_CODE (XEXP (value, 1)) == CONST))
5576 {
5577 if (!subtarget)
5578 subtarget = gen_reg_rtx (GET_MODE (value));
5579 emit_move_insn (subtarget, value);
5580 return subtarget;
5581 }
5582
5583 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5584 {
5585 if (!target)
5586 target = gen_reg_rtx (GET_MODE (value));
5587 convert_move (target, force_operand (XEXP (value, 0), NULL),
5588 code == ZERO_EXTEND);
5589 return target;
5590 }
5591
5592 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5593 {
5594 op2 = XEXP (value, 1);
5595 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5596 subtarget = 0;
5597 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5598 {
5599 code = PLUS;
5600 op2 = negate_rtx (GET_MODE (value), op2);
5601 }
5602
5603 /* Check for an addition with OP2 a constant integer and our first
5604 operand a PLUS of a virtual register and something else. In that
5605 case, we want to emit the sum of the virtual register and the
5606 constant first and then add the other value. This allows virtual
5607 register instantiation to simply modify the constant rather than
5608 creating another one around this addition. */
5609 if (code == PLUS && GET_CODE (op2) == CONST_INT
5610 && GET_CODE (XEXP (value, 0)) == PLUS
5611 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5612 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5613 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5614 {
5615 rtx temp = expand_simple_binop (GET_MODE (value), code,
5616 XEXP (XEXP (value, 0), 0), op2,
5617 subtarget, 0, OPTAB_LIB_WIDEN);
5618 return expand_simple_binop (GET_MODE (value), code, temp,
5619 force_operand (XEXP (XEXP (value,
5620 0), 1), 0),
5621 target, 0, OPTAB_LIB_WIDEN);
5622 }
5623
5624 op1 = force_operand (XEXP (value, 0), subtarget);
5625 op2 = force_operand (op2, NULL_RTX);
5626 switch (code)
5627 {
5628 case MULT:
5629 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5630 case DIV:
5631 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5632 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5633 target, 1, OPTAB_LIB_WIDEN);
5634 else
5635 return expand_divmod (0,
5636 FLOAT_MODE_P (GET_MODE (value))
5637 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5638 GET_MODE (value), op1, op2, target, 0);
5639 break;
5640 case MOD:
5641 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5642 target, 0);
5643 break;
5644 case UDIV:
5645 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5646 target, 1);
5647 break;
5648 case UMOD:
5649 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5650 target, 1);
5651 break;
5652 case ASHIFTRT:
5653 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5654 target, 0, OPTAB_LIB_WIDEN);
5655 break;
5656 default:
5657 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5658 target, 1, OPTAB_LIB_WIDEN);
5659 }
5660 }
5661 if (GET_RTX_CLASS (code) == '1')
5662 {
5663 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5664 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5665 }
5666
5667 #ifdef INSN_SCHEDULING
5668 /* On machines that have insn scheduling, we want all memory reference to be
5669 explicit, so we need to deal with such paradoxical SUBREGs. */
5670 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5671 && (GET_MODE_SIZE (GET_MODE (value))
5672 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5673 value
5674 = simplify_gen_subreg (GET_MODE (value),
5675 force_reg (GET_MODE (SUBREG_REG (value)),
5676 force_operand (SUBREG_REG (value),
5677 NULL_RTX)),
5678 GET_MODE (SUBREG_REG (value)),
5679 SUBREG_BYTE (value));
5680 #endif
5681
5682 return value;
5683 }
5684 \f
5685 /* Subroutine of expand_expr: return nonzero iff there is no way that
5686 EXP can reference X, which is being modified. TOP_P is nonzero if this
5687 call is going to be used to determine whether we need a temporary
5688 for EXP, as opposed to a recursive call to this function.
5689
5690 It is always safe for this routine to return zero since it merely
5691 searches for optimization opportunities. */
5692
5693 int
5694 safe_from_p (rtx x, tree exp, int top_p)
5695 {
5696 rtx exp_rtl = 0;
5697 int i, nops;
5698 static tree save_expr_list;
5699
5700 if (x == 0
5701 /* If EXP has varying size, we MUST use a target since we currently
5702 have no way of allocating temporaries of variable size
5703 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5704 So we assume here that something at a higher level has prevented a
5705 clash. This is somewhat bogus, but the best we can do. Only
5706 do this when X is BLKmode and when we are at the top level. */
5707 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5708 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5709 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5710 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5711 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5712 != INTEGER_CST)
5713 && GET_MODE (x) == BLKmode)
5714 /* If X is in the outgoing argument area, it is always safe. */
5715 || (GET_CODE (x) == MEM
5716 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5717 || (GET_CODE (XEXP (x, 0)) == PLUS
5718 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5719 return 1;
5720
5721 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5722 find the underlying pseudo. */
5723 if (GET_CODE (x) == SUBREG)
5724 {
5725 x = SUBREG_REG (x);
5726 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5727 return 0;
5728 }
5729
5730 /* A SAVE_EXPR might appear many times in the expression passed to the
5731 top-level safe_from_p call, and if it has a complex subexpression,
5732 examining it multiple times could result in a combinatorial explosion.
5733 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5734 with optimization took about 28 minutes to compile -- even though it was
5735 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5736 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5737 we have processed. Note that the only test of top_p was above. */
5738
5739 if (top_p)
5740 {
5741 int rtn;
5742 tree t;
5743
5744 save_expr_list = 0;
5745
5746 rtn = safe_from_p (x, exp, 0);
5747
5748 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5749 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5750
5751 return rtn;
5752 }
5753
5754 /* Now look at our tree code and possibly recurse. */
5755 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5756 {
5757 case 'd':
5758 exp_rtl = DECL_RTL_IF_SET (exp);
5759 break;
5760
5761 case 'c':
5762 return 1;
5763
5764 case 'x':
5765 if (TREE_CODE (exp) == TREE_LIST)
5766 {
5767 while (1)
5768 {
5769 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5770 return 0;
5771 exp = TREE_CHAIN (exp);
5772 if (!exp)
5773 return 1;
5774 if (TREE_CODE (exp) != TREE_LIST)
5775 return safe_from_p (x, exp, 0);
5776 }
5777 }
5778 else if (TREE_CODE (exp) == ERROR_MARK)
5779 return 1; /* An already-visited SAVE_EXPR? */
5780 else
5781 return 0;
5782
5783 case '2':
5784 case '<':
5785 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5786 return 0;
5787 /* FALLTHRU */
5788
5789 case '1':
5790 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5791
5792 case 'e':
5793 case 'r':
5794 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5795 the expression. If it is set, we conflict iff we are that rtx or
5796 both are in memory. Otherwise, we check all operands of the
5797 expression recursively. */
5798
5799 switch (TREE_CODE (exp))
5800 {
5801 case ADDR_EXPR:
5802 /* If the operand is static or we are static, we can't conflict.
5803 Likewise if we don't conflict with the operand at all. */
5804 if (staticp (TREE_OPERAND (exp, 0))
5805 || TREE_STATIC (exp)
5806 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5807 return 1;
5808
5809 /* Otherwise, the only way this can conflict is if we are taking
5810 the address of a DECL a that address if part of X, which is
5811 very rare. */
5812 exp = TREE_OPERAND (exp, 0);
5813 if (DECL_P (exp))
5814 {
5815 if (!DECL_RTL_SET_P (exp)
5816 || GET_CODE (DECL_RTL (exp)) != MEM)
5817 return 0;
5818 else
5819 exp_rtl = XEXP (DECL_RTL (exp), 0);
5820 }
5821 break;
5822
5823 case INDIRECT_REF:
5824 if (GET_CODE (x) == MEM
5825 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5826 get_alias_set (exp)))
5827 return 0;
5828 break;
5829
5830 case CALL_EXPR:
5831 /* Assume that the call will clobber all hard registers and
5832 all of memory. */
5833 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5834 || GET_CODE (x) == MEM)
5835 return 0;
5836 break;
5837
5838 case RTL_EXPR:
5839 /* If a sequence exists, we would have to scan every instruction
5840 in the sequence to see if it was safe. This is probably not
5841 worthwhile. */
5842 if (RTL_EXPR_SEQUENCE (exp))
5843 return 0;
5844
5845 exp_rtl = RTL_EXPR_RTL (exp);
5846 break;
5847
5848 case WITH_CLEANUP_EXPR:
5849 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5850 break;
5851
5852 case CLEANUP_POINT_EXPR:
5853 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5854
5855 case SAVE_EXPR:
5856 exp_rtl = SAVE_EXPR_RTL (exp);
5857 if (exp_rtl)
5858 break;
5859
5860 /* If we've already scanned this, don't do it again. Otherwise,
5861 show we've scanned it and record for clearing the flag if we're
5862 going on. */
5863 if (TREE_PRIVATE (exp))
5864 return 1;
5865
5866 TREE_PRIVATE (exp) = 1;
5867 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5868 {
5869 TREE_PRIVATE (exp) = 0;
5870 return 0;
5871 }
5872
5873 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5874 return 1;
5875
5876 case BIND_EXPR:
5877 /* The only operand we look at is operand 1. The rest aren't
5878 part of the expression. */
5879 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5880
5881 default:
5882 break;
5883 }
5884
5885 /* If we have an rtx, we do not need to scan our operands. */
5886 if (exp_rtl)
5887 break;
5888
5889 nops = first_rtl_op (TREE_CODE (exp));
5890 for (i = 0; i < nops; i++)
5891 if (TREE_OPERAND (exp, i) != 0
5892 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5893 return 0;
5894
5895 /* If this is a language-specific tree code, it may require
5896 special handling. */
5897 if ((unsigned int) TREE_CODE (exp)
5898 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5899 && !(*lang_hooks.safe_from_p) (x, exp))
5900 return 0;
5901 }
5902
5903 /* If we have an rtl, find any enclosed object. Then see if we conflict
5904 with it. */
5905 if (exp_rtl)
5906 {
5907 if (GET_CODE (exp_rtl) == SUBREG)
5908 {
5909 exp_rtl = SUBREG_REG (exp_rtl);
5910 if (GET_CODE (exp_rtl) == REG
5911 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5912 return 0;
5913 }
5914
5915 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5916 are memory and they conflict. */
5917 return ! (rtx_equal_p (x, exp_rtl)
5918 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5919 && true_dependence (exp_rtl, VOIDmode, x,
5920 rtx_addr_varies_p)));
5921 }
5922
5923 /* If we reach here, it is safe. */
5924 return 1;
5925 }
5926
5927 /* Subroutine of expand_expr: return rtx if EXP is a
5928 variable or parameter; else return 0. */
5929
5930 static rtx
5931 var_rtx (tree exp)
5932 {
5933 STRIP_NOPS (exp);
5934 switch (TREE_CODE (exp))
5935 {
5936 case PARM_DECL:
5937 case VAR_DECL:
5938 return DECL_RTL (exp);
5939 default:
5940 return 0;
5941 }
5942 }
5943
5944 #ifdef MAX_INTEGER_COMPUTATION_MODE
5945
5946 void
5947 check_max_integer_computation_mode (tree exp)
5948 {
5949 enum tree_code code;
5950 enum machine_mode mode;
5951
5952 /* Strip any NOPs that don't change the mode. */
5953 STRIP_NOPS (exp);
5954 code = TREE_CODE (exp);
5955
5956 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5957 if (code == NOP_EXPR
5958 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5959 return;
5960
5961 /* First check the type of the overall operation. We need only look at
5962 unary, binary and relational operations. */
5963 if (TREE_CODE_CLASS (code) == '1'
5964 || TREE_CODE_CLASS (code) == '2'
5965 || TREE_CODE_CLASS (code) == '<')
5966 {
5967 mode = TYPE_MODE (TREE_TYPE (exp));
5968 if (GET_MODE_CLASS (mode) == MODE_INT
5969 && mode > MAX_INTEGER_COMPUTATION_MODE)
5970 internal_error ("unsupported wide integer operation");
5971 }
5972
5973 /* Check operand of a unary op. */
5974 if (TREE_CODE_CLASS (code) == '1')
5975 {
5976 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5977 if (GET_MODE_CLASS (mode) == MODE_INT
5978 && mode > MAX_INTEGER_COMPUTATION_MODE)
5979 internal_error ("unsupported wide integer operation");
5980 }
5981
5982 /* Check operands of a binary/comparison op. */
5983 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5984 {
5985 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5986 if (GET_MODE_CLASS (mode) == MODE_INT
5987 && mode > MAX_INTEGER_COMPUTATION_MODE)
5988 internal_error ("unsupported wide integer operation");
5989
5990 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5991 if (GET_MODE_CLASS (mode) == MODE_INT
5992 && mode > MAX_INTEGER_COMPUTATION_MODE)
5993 internal_error ("unsupported wide integer operation");
5994 }
5995 }
5996 #endif
5997 \f
5998 /* Return the highest power of two that EXP is known to be a multiple of.
5999 This is used in updating alignment of MEMs in array references. */
6000
6001 static unsigned HOST_WIDE_INT
6002 highest_pow2_factor (tree exp)
6003 {
6004 unsigned HOST_WIDE_INT c0, c1;
6005
6006 switch (TREE_CODE (exp))
6007 {
6008 case INTEGER_CST:
6009 /* We can find the lowest bit that's a one. If the low
6010 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6011 We need to handle this case since we can find it in a COND_EXPR,
6012 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6013 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6014 later ICE. */
6015 if (TREE_CONSTANT_OVERFLOW (exp))
6016 return BIGGEST_ALIGNMENT;
6017 else
6018 {
6019 /* Note: tree_low_cst is intentionally not used here,
6020 we don't care about the upper bits. */
6021 c0 = TREE_INT_CST_LOW (exp);
6022 c0 &= -c0;
6023 return c0 ? c0 : BIGGEST_ALIGNMENT;
6024 }
6025 break;
6026
6027 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6028 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6029 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6030 return MIN (c0, c1);
6031
6032 case MULT_EXPR:
6033 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6034 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6035 return c0 * c1;
6036
6037 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6038 case CEIL_DIV_EXPR:
6039 if (integer_pow2p (TREE_OPERAND (exp, 1))
6040 && host_integerp (TREE_OPERAND (exp, 1), 1))
6041 {
6042 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6043 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6044 return MAX (1, c0 / c1);
6045 }
6046 break;
6047
6048 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6049 case SAVE_EXPR: case WITH_RECORD_EXPR:
6050 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6051
6052 case COMPOUND_EXPR:
6053 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6054
6055 case COND_EXPR:
6056 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6057 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6058 return MIN (c0, c1);
6059
6060 default:
6061 break;
6062 }
6063
6064 return 1;
6065 }
6066
6067 /* Similar, except that it is known that the expression must be a multiple
6068 of the alignment of TYPE. */
6069
6070 static unsigned HOST_WIDE_INT
6071 highest_pow2_factor_for_type (tree type, tree exp)
6072 {
6073 unsigned HOST_WIDE_INT type_align, factor;
6074
6075 factor = highest_pow2_factor (exp);
6076 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6077 return MAX (factor, type_align);
6078 }
6079 \f
6080 /* Return an object on the placeholder list that matches EXP, a
6081 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6082 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6083 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6084 is a location which initially points to a starting location in the
6085 placeholder list (zero means start of the list) and where a pointer into
6086 the placeholder list at which the object is found is placed. */
6087
6088 tree
6089 find_placeholder (tree exp, tree *plist)
6090 {
6091 tree type = TREE_TYPE (exp);
6092 tree placeholder_expr;
6093
6094 for (placeholder_expr
6095 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6096 placeholder_expr != 0;
6097 placeholder_expr = TREE_CHAIN (placeholder_expr))
6098 {
6099 tree need_type = TYPE_MAIN_VARIANT (type);
6100 tree elt;
6101
6102 /* Find the outermost reference that is of the type we want. If none,
6103 see if any object has a type that is a pointer to the type we
6104 want. */
6105 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6106 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6107 || TREE_CODE (elt) == COND_EXPR)
6108 ? TREE_OPERAND (elt, 1)
6109 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6110 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6111 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6112 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6113 ? TREE_OPERAND (elt, 0) : 0))
6114 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6115 {
6116 if (plist)
6117 *plist = placeholder_expr;
6118 return elt;
6119 }
6120
6121 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6122 elt
6123 = ((TREE_CODE (elt) == COMPOUND_EXPR
6124 || TREE_CODE (elt) == COND_EXPR)
6125 ? TREE_OPERAND (elt, 1)
6126 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6127 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6128 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6129 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6130 ? TREE_OPERAND (elt, 0) : 0))
6131 if (POINTER_TYPE_P (TREE_TYPE (elt))
6132 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6133 == need_type))
6134 {
6135 if (plist)
6136 *plist = placeholder_expr;
6137 return build1 (INDIRECT_REF, need_type, elt);
6138 }
6139 }
6140
6141 return 0;
6142 }
6143
6144 /* Subroutine of expand_expr. Expand the two operands of a binary
6145 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6146 The value may be stored in TARGET if TARGET is nonzero. The
6147 MODIFIER argument is as documented by expand_expr. */
6148
6149 static void
6150 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6151 enum expand_modifier modifier)
6152 {
6153 if (! safe_from_p (target, exp1, 1))
6154 target = 0;
6155 if (operand_equal_p (exp0, exp1, 0))
6156 {
6157 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6158 *op1 = copy_rtx (*op0);
6159 }
6160 else
6161 {
6162 /* If we need to preserve evaluation order, copy exp0 into its own
6163 temporary variable so that it can't be clobbered by exp1. */
6164 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6165 exp0 = save_expr (exp0);
6166 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6167 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6168 }
6169 }
6170
6171 \f
6172 /* expand_expr: generate code for computing expression EXP.
6173 An rtx for the computed value is returned. The value is never null.
6174 In the case of a void EXP, const0_rtx is returned.
6175
6176 The value may be stored in TARGET if TARGET is nonzero.
6177 TARGET is just a suggestion; callers must assume that
6178 the rtx returned may not be the same as TARGET.
6179
6180 If TARGET is CONST0_RTX, it means that the value will be ignored.
6181
6182 If TMODE is not VOIDmode, it suggests generating the
6183 result in mode TMODE. But this is done only when convenient.
6184 Otherwise, TMODE is ignored and the value generated in its natural mode.
6185 TMODE is just a suggestion; callers must assume that
6186 the rtx returned may not have mode TMODE.
6187
6188 Note that TARGET may have neither TMODE nor MODE. In that case, it
6189 probably will not be used.
6190
6191 If MODIFIER is EXPAND_SUM then when EXP is an addition
6192 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6193 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6194 products as above, or REG or MEM, or constant.
6195 Ordinarily in such cases we would output mul or add instructions
6196 and then return a pseudo reg containing the sum.
6197
6198 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6199 it also marks a label as absolutely required (it can't be dead).
6200 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6201 This is used for outputting expressions used in initializers.
6202
6203 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6204 with a constant address even if that address is not normally legitimate.
6205 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6206
6207 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6208 a call parameter. Such targets require special care as we haven't yet
6209 marked TARGET so that it's safe from being trashed by libcalls. We
6210 don't want to use TARGET for anything but the final result;
6211 Intermediate values must go elsewhere. Additionally, calls to
6212 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6213
6214 rtx
6215 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6216 enum expand_modifier modifier)
6217 {
6218 rtx op0, op1, temp;
6219 tree type = TREE_TYPE (exp);
6220 int unsignedp = TREE_UNSIGNED (type);
6221 enum machine_mode mode;
6222 enum tree_code code = TREE_CODE (exp);
6223 optab this_optab;
6224 rtx subtarget, original_target;
6225 int ignore;
6226 tree context;
6227
6228 /* Handle ERROR_MARK before anybody tries to access its type. */
6229 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6230 {
6231 op0 = CONST0_RTX (tmode);
6232 if (op0 != 0)
6233 return op0;
6234 return const0_rtx;
6235 }
6236
6237 mode = TYPE_MODE (type);
6238 /* Use subtarget as the target for operand 0 of a binary operation. */
6239 subtarget = get_subtarget (target);
6240 original_target = target;
6241 ignore = (target == const0_rtx
6242 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6243 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6244 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6245 && TREE_CODE (type) == VOID_TYPE));
6246
6247 /* If we are going to ignore this result, we need only do something
6248 if there is a side-effect somewhere in the expression. If there
6249 is, short-circuit the most common cases here. Note that we must
6250 not call expand_expr with anything but const0_rtx in case this
6251 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6252
6253 if (ignore)
6254 {
6255 if (! TREE_SIDE_EFFECTS (exp))
6256 return const0_rtx;
6257
6258 /* Ensure we reference a volatile object even if value is ignored, but
6259 don't do this if all we are doing is taking its address. */
6260 if (TREE_THIS_VOLATILE (exp)
6261 && TREE_CODE (exp) != FUNCTION_DECL
6262 && mode != VOIDmode && mode != BLKmode
6263 && modifier != EXPAND_CONST_ADDRESS)
6264 {
6265 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6266 if (GET_CODE (temp) == MEM)
6267 temp = copy_to_reg (temp);
6268 return const0_rtx;
6269 }
6270
6271 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6272 || code == INDIRECT_REF || code == BUFFER_REF)
6273 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6274 modifier);
6275
6276 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6277 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6278 {
6279 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6280 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6281 return const0_rtx;
6282 }
6283 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6284 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6285 /* If the second operand has no side effects, just evaluate
6286 the first. */
6287 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6288 modifier);
6289 else if (code == BIT_FIELD_REF)
6290 {
6291 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6292 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6293 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6294 return const0_rtx;
6295 }
6296
6297 target = 0;
6298 }
6299
6300 #ifdef MAX_INTEGER_COMPUTATION_MODE
6301 /* Only check stuff here if the mode we want is different from the mode
6302 of the expression; if it's the same, check_max_integer_computation_mode
6303 will handle it. Do we really need to check this stuff at all? */
6304
6305 if (target
6306 && GET_MODE (target) != mode
6307 && TREE_CODE (exp) != INTEGER_CST
6308 && TREE_CODE (exp) != PARM_DECL
6309 && TREE_CODE (exp) != ARRAY_REF
6310 && TREE_CODE (exp) != ARRAY_RANGE_REF
6311 && TREE_CODE (exp) != COMPONENT_REF
6312 && TREE_CODE (exp) != BIT_FIELD_REF
6313 && TREE_CODE (exp) != INDIRECT_REF
6314 && TREE_CODE (exp) != CALL_EXPR
6315 && TREE_CODE (exp) != VAR_DECL
6316 && TREE_CODE (exp) != RTL_EXPR)
6317 {
6318 enum machine_mode mode = GET_MODE (target);
6319
6320 if (GET_MODE_CLASS (mode) == MODE_INT
6321 && mode > MAX_INTEGER_COMPUTATION_MODE)
6322 internal_error ("unsupported wide integer operation");
6323 }
6324
6325 if (tmode != mode
6326 && TREE_CODE (exp) != INTEGER_CST
6327 && TREE_CODE (exp) != PARM_DECL
6328 && TREE_CODE (exp) != ARRAY_REF
6329 && TREE_CODE (exp) != ARRAY_RANGE_REF
6330 && TREE_CODE (exp) != COMPONENT_REF
6331 && TREE_CODE (exp) != BIT_FIELD_REF
6332 && TREE_CODE (exp) != INDIRECT_REF
6333 && TREE_CODE (exp) != VAR_DECL
6334 && TREE_CODE (exp) != CALL_EXPR
6335 && TREE_CODE (exp) != RTL_EXPR
6336 && GET_MODE_CLASS (tmode) == MODE_INT
6337 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6338 internal_error ("unsupported wide integer operation");
6339
6340 check_max_integer_computation_mode (exp);
6341 #endif
6342
6343 /* If will do cse, generate all results into pseudo registers
6344 since 1) that allows cse to find more things
6345 and 2) otherwise cse could produce an insn the machine
6346 cannot support. An exception is a CONSTRUCTOR into a multi-word
6347 MEM: that's much more likely to be most efficient into the MEM.
6348 Another is a CALL_EXPR which must return in memory. */
6349
6350 if (! cse_not_expected && mode != BLKmode && target
6351 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6352 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6353 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6354 target = 0;
6355
6356 switch (code)
6357 {
6358 case LABEL_DECL:
6359 {
6360 tree function = decl_function_context (exp);
6361 /* Labels in containing functions, or labels used from initializers,
6362 must be forced. */
6363 if (modifier == EXPAND_INITIALIZER
6364 || (function != current_function_decl
6365 && function != inline_function_decl
6366 && function != 0))
6367 temp = force_label_rtx (exp);
6368 else
6369 temp = label_rtx (exp);
6370
6371 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6372 if (function != current_function_decl
6373 && function != inline_function_decl && function != 0)
6374 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6375 return temp;
6376 }
6377
6378 case PARM_DECL:
6379 if (!DECL_RTL_SET_P (exp))
6380 {
6381 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6382 return CONST0_RTX (mode);
6383 }
6384
6385 /* ... fall through ... */
6386
6387 case VAR_DECL:
6388 /* If a static var's type was incomplete when the decl was written,
6389 but the type is complete now, lay out the decl now. */
6390 if (DECL_SIZE (exp) == 0
6391 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6392 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6393 layout_decl (exp, 0);
6394
6395 /* ... fall through ... */
6396
6397 case FUNCTION_DECL:
6398 case RESULT_DECL:
6399 if (DECL_RTL (exp) == 0)
6400 abort ();
6401
6402 /* Ensure variable marked as used even if it doesn't go through
6403 a parser. If it hasn't be used yet, write out an external
6404 definition. */
6405 if (! TREE_USED (exp))
6406 {
6407 assemble_external (exp);
6408 TREE_USED (exp) = 1;
6409 }
6410
6411 /* Show we haven't gotten RTL for this yet. */
6412 temp = 0;
6413
6414 /* Handle variables inherited from containing functions. */
6415 context = decl_function_context (exp);
6416
6417 /* We treat inline_function_decl as an alias for the current function
6418 because that is the inline function whose vars, types, etc.
6419 are being merged into the current function.
6420 See expand_inline_function. */
6421
6422 if (context != 0 && context != current_function_decl
6423 && context != inline_function_decl
6424 /* If var is static, we don't need a static chain to access it. */
6425 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6426 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6427 {
6428 rtx addr;
6429
6430 /* Mark as non-local and addressable. */
6431 DECL_NONLOCAL (exp) = 1;
6432 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6433 abort ();
6434 (*lang_hooks.mark_addressable) (exp);
6435 if (GET_CODE (DECL_RTL (exp)) != MEM)
6436 abort ();
6437 addr = XEXP (DECL_RTL (exp), 0);
6438 if (GET_CODE (addr) == MEM)
6439 addr
6440 = replace_equiv_address (addr,
6441 fix_lexical_addr (XEXP (addr, 0), exp));
6442 else
6443 addr = fix_lexical_addr (addr, exp);
6444
6445 temp = replace_equiv_address (DECL_RTL (exp), addr);
6446 }
6447
6448 /* This is the case of an array whose size is to be determined
6449 from its initializer, while the initializer is still being parsed.
6450 See expand_decl. */
6451
6452 else if (GET_CODE (DECL_RTL (exp)) == MEM
6453 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6454 temp = validize_mem (DECL_RTL (exp));
6455
6456 /* If DECL_RTL is memory, we are in the normal case and either
6457 the address is not valid or it is not a register and -fforce-addr
6458 is specified, get the address into a register. */
6459
6460 else if (GET_CODE (DECL_RTL (exp)) == MEM
6461 && modifier != EXPAND_CONST_ADDRESS
6462 && modifier != EXPAND_SUM
6463 && modifier != EXPAND_INITIALIZER
6464 && (! memory_address_p (DECL_MODE (exp),
6465 XEXP (DECL_RTL (exp), 0))
6466 || (flag_force_addr
6467 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6468 temp = replace_equiv_address (DECL_RTL (exp),
6469 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6470
6471 /* If we got something, return it. But first, set the alignment
6472 if the address is a register. */
6473 if (temp != 0)
6474 {
6475 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6476 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6477
6478 return temp;
6479 }
6480
6481 /* If the mode of DECL_RTL does not match that of the decl, it
6482 must be a promoted value. We return a SUBREG of the wanted mode,
6483 but mark it so that we know that it was already extended. */
6484
6485 if (GET_CODE (DECL_RTL (exp)) == REG
6486 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6487 {
6488 /* Get the signedness used for this variable. Ensure we get the
6489 same mode we got when the variable was declared. */
6490 if (GET_MODE (DECL_RTL (exp))
6491 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6492 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6493 abort ();
6494
6495 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6496 SUBREG_PROMOTED_VAR_P (temp) = 1;
6497 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6498 return temp;
6499 }
6500
6501 return DECL_RTL (exp);
6502
6503 case INTEGER_CST:
6504 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6505 TREE_INT_CST_HIGH (exp), mode);
6506
6507 /* ??? If overflow is set, fold will have done an incomplete job,
6508 which can result in (plus xx (const_int 0)), which can get
6509 simplified by validate_replace_rtx during virtual register
6510 instantiation, which can result in unrecognizable insns.
6511 Avoid this by forcing all overflows into registers. */
6512 if (TREE_CONSTANT_OVERFLOW (exp)
6513 && modifier != EXPAND_INITIALIZER)
6514 temp = force_reg (mode, temp);
6515
6516 return temp;
6517
6518 case VECTOR_CST:
6519 return const_vector_from_tree (exp);
6520
6521 case CONST_DECL:
6522 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6523
6524 case REAL_CST:
6525 /* If optimized, generate immediate CONST_DOUBLE
6526 which will be turned into memory by reload if necessary.
6527
6528 We used to force a register so that loop.c could see it. But
6529 this does not allow gen_* patterns to perform optimizations with
6530 the constants. It also produces two insns in cases like "x = 1.0;".
6531 On most machines, floating-point constants are not permitted in
6532 many insns, so we'd end up copying it to a register in any case.
6533
6534 Now, we do the copying in expand_binop, if appropriate. */
6535 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6536 TYPE_MODE (TREE_TYPE (exp)));
6537
6538 case COMPLEX_CST:
6539 /* Handle evaluating a complex constant in a CONCAT target. */
6540 if (original_target && GET_CODE (original_target) == CONCAT)
6541 {
6542 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6543 rtx rtarg, itarg;
6544
6545 rtarg = XEXP (original_target, 0);
6546 itarg = XEXP (original_target, 1);
6547
6548 /* Move the real and imaginary parts separately. */
6549 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6550 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6551
6552 if (op0 != rtarg)
6553 emit_move_insn (rtarg, op0);
6554 if (op1 != itarg)
6555 emit_move_insn (itarg, op1);
6556
6557 return original_target;
6558 }
6559
6560 /* ... fall through ... */
6561
6562 case STRING_CST:
6563 temp = output_constant_def (exp, 1);
6564
6565 /* temp contains a constant address.
6566 On RISC machines where a constant address isn't valid,
6567 make some insns to get that address into a register. */
6568 if (modifier != EXPAND_CONST_ADDRESS
6569 && modifier != EXPAND_INITIALIZER
6570 && modifier != EXPAND_SUM
6571 && (! memory_address_p (mode, XEXP (temp, 0))
6572 || flag_force_addr))
6573 return replace_equiv_address (temp,
6574 copy_rtx (XEXP (temp, 0)));
6575 return temp;
6576
6577 case EXPR_WITH_FILE_LOCATION:
6578 {
6579 rtx to_return;
6580 struct file_stack fs;
6581
6582 fs.location = input_location;
6583 fs.next = expr_wfl_stack;
6584 input_filename = EXPR_WFL_FILENAME (exp);
6585 input_line = EXPR_WFL_LINENO (exp);
6586 expr_wfl_stack = &fs;
6587 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6588 emit_line_note (input_location);
6589 /* Possibly avoid switching back and forth here. */
6590 to_return = expand_expr (EXPR_WFL_NODE (exp),
6591 (ignore ? const0_rtx : target),
6592 tmode, modifier);
6593 if (expr_wfl_stack != &fs)
6594 abort ();
6595 input_location = fs.location;
6596 expr_wfl_stack = fs.next;
6597 return to_return;
6598 }
6599
6600 case SAVE_EXPR:
6601 context = decl_function_context (exp);
6602
6603 /* If this SAVE_EXPR was at global context, assume we are an
6604 initialization function and move it into our context. */
6605 if (context == 0)
6606 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6607
6608 /* We treat inline_function_decl as an alias for the current function
6609 because that is the inline function whose vars, types, etc.
6610 are being merged into the current function.
6611 See expand_inline_function. */
6612 if (context == current_function_decl || context == inline_function_decl)
6613 context = 0;
6614
6615 /* If this is non-local, handle it. */
6616 if (context)
6617 {
6618 /* The following call just exists to abort if the context is
6619 not of a containing function. */
6620 find_function_data (context);
6621
6622 temp = SAVE_EXPR_RTL (exp);
6623 if (temp && GET_CODE (temp) == REG)
6624 {
6625 put_var_into_stack (exp, /*rescan=*/true);
6626 temp = SAVE_EXPR_RTL (exp);
6627 }
6628 if (temp == 0 || GET_CODE (temp) != MEM)
6629 abort ();
6630 return
6631 replace_equiv_address (temp,
6632 fix_lexical_addr (XEXP (temp, 0), exp));
6633 }
6634 if (SAVE_EXPR_RTL (exp) == 0)
6635 {
6636 if (mode == VOIDmode)
6637 temp = const0_rtx;
6638 else
6639 temp = assign_temp (build_qualified_type (type,
6640 (TYPE_QUALS (type)
6641 | TYPE_QUAL_CONST)),
6642 3, 0, 0);
6643
6644 SAVE_EXPR_RTL (exp) = temp;
6645 if (!optimize && GET_CODE (temp) == REG)
6646 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6647 save_expr_regs);
6648
6649 /* If the mode of TEMP does not match that of the expression, it
6650 must be a promoted value. We pass store_expr a SUBREG of the
6651 wanted mode but mark it so that we know that it was already
6652 extended. */
6653
6654 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6655 {
6656 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6657 promote_mode (type, mode, &unsignedp, 0);
6658 SUBREG_PROMOTED_VAR_P (temp) = 1;
6659 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6660 }
6661
6662 if (temp == const0_rtx)
6663 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6664 else
6665 store_expr (TREE_OPERAND (exp, 0), temp,
6666 modifier == EXPAND_STACK_PARM ? 2 : 0);
6667
6668 TREE_USED (exp) = 1;
6669 }
6670
6671 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6672 must be a promoted value. We return a SUBREG of the wanted mode,
6673 but mark it so that we know that it was already extended. */
6674
6675 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6676 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6677 {
6678 /* Compute the signedness and make the proper SUBREG. */
6679 promote_mode (type, mode, &unsignedp, 0);
6680 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6681 SUBREG_PROMOTED_VAR_P (temp) = 1;
6682 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6683 return temp;
6684 }
6685
6686 return SAVE_EXPR_RTL (exp);
6687
6688 case UNSAVE_EXPR:
6689 {
6690 rtx temp;
6691 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6692 TREE_OPERAND (exp, 0)
6693 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6694 return temp;
6695 }
6696
6697 case PLACEHOLDER_EXPR:
6698 {
6699 tree old_list = placeholder_list;
6700 tree placeholder_expr = 0;
6701
6702 exp = find_placeholder (exp, &placeholder_expr);
6703 if (exp == 0)
6704 abort ();
6705
6706 placeholder_list = TREE_CHAIN (placeholder_expr);
6707 temp = expand_expr (exp, original_target, tmode, modifier);
6708 placeholder_list = old_list;
6709 return temp;
6710 }
6711
6712 case WITH_RECORD_EXPR:
6713 /* Put the object on the placeholder list, expand our first operand,
6714 and pop the list. */
6715 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6716 placeholder_list);
6717 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6718 modifier);
6719 placeholder_list = TREE_CHAIN (placeholder_list);
6720 return target;
6721
6722 case GOTO_EXPR:
6723 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6724 expand_goto (TREE_OPERAND (exp, 0));
6725 else
6726 expand_computed_goto (TREE_OPERAND (exp, 0));
6727 return const0_rtx;
6728
6729 case EXIT_EXPR:
6730 expand_exit_loop_if_false (NULL,
6731 invert_truthvalue (TREE_OPERAND (exp, 0)));
6732 return const0_rtx;
6733
6734 case LABELED_BLOCK_EXPR:
6735 if (LABELED_BLOCK_BODY (exp))
6736 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6737 /* Should perhaps use expand_label, but this is simpler and safer. */
6738 do_pending_stack_adjust ();
6739 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6740 return const0_rtx;
6741
6742 case EXIT_BLOCK_EXPR:
6743 if (EXIT_BLOCK_RETURN (exp))
6744 sorry ("returned value in block_exit_expr");
6745 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6746 return const0_rtx;
6747
6748 case LOOP_EXPR:
6749 push_temp_slots ();
6750 expand_start_loop (1);
6751 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6752 expand_end_loop ();
6753 pop_temp_slots ();
6754
6755 return const0_rtx;
6756
6757 case BIND_EXPR:
6758 {
6759 tree vars = TREE_OPERAND (exp, 0);
6760
6761 /* Need to open a binding contour here because
6762 if there are any cleanups they must be contained here. */
6763 expand_start_bindings (2);
6764
6765 /* Mark the corresponding BLOCK for output in its proper place. */
6766 if (TREE_OPERAND (exp, 2) != 0
6767 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6768 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6769
6770 /* If VARS have not yet been expanded, expand them now. */
6771 while (vars)
6772 {
6773 if (!DECL_RTL_SET_P (vars))
6774 expand_decl (vars);
6775 expand_decl_init (vars);
6776 vars = TREE_CHAIN (vars);
6777 }
6778
6779 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6780
6781 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6782
6783 return temp;
6784 }
6785
6786 case RTL_EXPR:
6787 if (RTL_EXPR_SEQUENCE (exp))
6788 {
6789 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6790 abort ();
6791 emit_insn (RTL_EXPR_SEQUENCE (exp));
6792 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6793 }
6794 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6795 free_temps_for_rtl_expr (exp);
6796 return RTL_EXPR_RTL (exp);
6797
6798 case CONSTRUCTOR:
6799 /* If we don't need the result, just ensure we evaluate any
6800 subexpressions. */
6801 if (ignore)
6802 {
6803 tree elt;
6804
6805 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6806 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6807
6808 return const0_rtx;
6809 }
6810
6811 /* All elts simple constants => refer to a constant in memory. But
6812 if this is a non-BLKmode mode, let it store a field at a time
6813 since that should make a CONST_INT or CONST_DOUBLE when we
6814 fold. Likewise, if we have a target we can use, it is best to
6815 store directly into the target unless the type is large enough
6816 that memcpy will be used. If we are making an initializer and
6817 all operands are constant, put it in memory as well.
6818
6819 FIXME: Avoid trying to fill vector constructors piece-meal.
6820 Output them with output_constant_def below unless we're sure
6821 they're zeros. This should go away when vector initializers
6822 are treated like VECTOR_CST instead of arrays.
6823 */
6824 else if ((TREE_STATIC (exp)
6825 && ((mode == BLKmode
6826 && ! (target != 0 && safe_from_p (target, exp, 1)))
6827 || TREE_ADDRESSABLE (exp)
6828 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6829 && (! MOVE_BY_PIECES_P
6830 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6831 TYPE_ALIGN (type)))
6832 && ((TREE_CODE (type) == VECTOR_TYPE
6833 && !is_zeros_p (exp))
6834 || ! mostly_zeros_p (exp)))))
6835 || ((modifier == EXPAND_INITIALIZER
6836 || modifier == EXPAND_CONST_ADDRESS)
6837 && TREE_CONSTANT (exp)))
6838 {
6839 rtx constructor = output_constant_def (exp, 1);
6840
6841 if (modifier != EXPAND_CONST_ADDRESS
6842 && modifier != EXPAND_INITIALIZER
6843 && modifier != EXPAND_SUM)
6844 constructor = validize_mem (constructor);
6845
6846 return constructor;
6847 }
6848 else
6849 {
6850 /* Handle calls that pass values in multiple non-contiguous
6851 locations. The Irix 6 ABI has examples of this. */
6852 if (target == 0 || ! safe_from_p (target, exp, 1)
6853 || GET_CODE (target) == PARALLEL
6854 || modifier == EXPAND_STACK_PARM)
6855 target
6856 = assign_temp (build_qualified_type (type,
6857 (TYPE_QUALS (type)
6858 | (TREE_READONLY (exp)
6859 * TYPE_QUAL_CONST))),
6860 0, TREE_ADDRESSABLE (exp), 1);
6861
6862 store_constructor (exp, target, 0, int_expr_size (exp));
6863 return target;
6864 }
6865
6866 case INDIRECT_REF:
6867 {
6868 tree exp1 = TREE_OPERAND (exp, 0);
6869 tree index;
6870 tree string = string_constant (exp1, &index);
6871
6872 /* Try to optimize reads from const strings. */
6873 if (string
6874 && TREE_CODE (string) == STRING_CST
6875 && TREE_CODE (index) == INTEGER_CST
6876 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6877 && GET_MODE_CLASS (mode) == MODE_INT
6878 && GET_MODE_SIZE (mode) == 1
6879 && modifier != EXPAND_WRITE)
6880 return gen_int_mode (TREE_STRING_POINTER (string)
6881 [TREE_INT_CST_LOW (index)], mode);
6882
6883 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6884 op0 = memory_address (mode, op0);
6885 temp = gen_rtx_MEM (mode, op0);
6886 set_mem_attributes (temp, exp, 0);
6887
6888 /* If we are writing to this object and its type is a record with
6889 readonly fields, we must mark it as readonly so it will
6890 conflict with readonly references to those fields. */
6891 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6892 RTX_UNCHANGING_P (temp) = 1;
6893
6894 return temp;
6895 }
6896
6897 case ARRAY_REF:
6898 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6899 abort ();
6900
6901 {
6902 tree array = TREE_OPERAND (exp, 0);
6903 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6904 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6905 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6906 HOST_WIDE_INT i;
6907
6908 /* Optimize the special-case of a zero lower bound.
6909
6910 We convert the low_bound to sizetype to avoid some problems
6911 with constant folding. (E.g. suppose the lower bound is 1,
6912 and its mode is QI. Without the conversion, (ARRAY
6913 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6914 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6915
6916 if (! integer_zerop (low_bound))
6917 index = size_diffop (index, convert (sizetype, low_bound));
6918
6919 /* Fold an expression like: "foo"[2].
6920 This is not done in fold so it won't happen inside &.
6921 Don't fold if this is for wide characters since it's too
6922 difficult to do correctly and this is a very rare case. */
6923
6924 if (modifier != EXPAND_CONST_ADDRESS
6925 && modifier != EXPAND_INITIALIZER
6926 && modifier != EXPAND_MEMORY
6927 && TREE_CODE (array) == STRING_CST
6928 && TREE_CODE (index) == INTEGER_CST
6929 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6930 && GET_MODE_CLASS (mode) == MODE_INT
6931 && GET_MODE_SIZE (mode) == 1)
6932 return gen_int_mode (TREE_STRING_POINTER (array)
6933 [TREE_INT_CST_LOW (index)], mode);
6934
6935 /* If this is a constant index into a constant array,
6936 just get the value from the array. Handle both the cases when
6937 we have an explicit constructor and when our operand is a variable
6938 that was declared const. */
6939
6940 if (modifier != EXPAND_CONST_ADDRESS
6941 && modifier != EXPAND_INITIALIZER
6942 && modifier != EXPAND_MEMORY
6943 && TREE_CODE (array) == CONSTRUCTOR
6944 && ! TREE_SIDE_EFFECTS (array)
6945 && TREE_CODE (index) == INTEGER_CST
6946 && 0 > compare_tree_int (index,
6947 list_length (CONSTRUCTOR_ELTS
6948 (TREE_OPERAND (exp, 0)))))
6949 {
6950 tree elem;
6951
6952 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6953 i = TREE_INT_CST_LOW (index);
6954 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6955 ;
6956
6957 if (elem)
6958 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6959 modifier);
6960 }
6961
6962 else if (optimize >= 1
6963 && modifier != EXPAND_CONST_ADDRESS
6964 && modifier != EXPAND_INITIALIZER
6965 && modifier != EXPAND_MEMORY
6966 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6967 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6968 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6969 && targetm.binds_local_p (array))
6970 {
6971 if (TREE_CODE (index) == INTEGER_CST)
6972 {
6973 tree init = DECL_INITIAL (array);
6974
6975 if (TREE_CODE (init) == CONSTRUCTOR)
6976 {
6977 tree elem;
6978
6979 for (elem = CONSTRUCTOR_ELTS (init);
6980 (elem
6981 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6982 elem = TREE_CHAIN (elem))
6983 ;
6984
6985 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6986 return expand_expr (fold (TREE_VALUE (elem)), target,
6987 tmode, modifier);
6988 }
6989 else if (TREE_CODE (init) == STRING_CST
6990 && 0 > compare_tree_int (index,
6991 TREE_STRING_LENGTH (init)))
6992 {
6993 tree type = TREE_TYPE (TREE_TYPE (init));
6994 enum machine_mode mode = TYPE_MODE (type);
6995
6996 if (GET_MODE_CLASS (mode) == MODE_INT
6997 && GET_MODE_SIZE (mode) == 1)
6998 return gen_int_mode (TREE_STRING_POINTER (init)
6999 [TREE_INT_CST_LOW (index)], mode);
7000 }
7001 }
7002 }
7003 }
7004 goto normal_inner_ref;
7005
7006 case COMPONENT_REF:
7007 /* If the operand is a CONSTRUCTOR, we can just extract the
7008 appropriate field if it is present. */
7009 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7010 {
7011 tree elt;
7012
7013 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7014 elt = TREE_CHAIN (elt))
7015 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7016 /* We can normally use the value of the field in the
7017 CONSTRUCTOR. However, if this is a bitfield in
7018 an integral mode that we can fit in a HOST_WIDE_INT,
7019 we must mask only the number of bits in the bitfield,
7020 since this is done implicitly by the constructor. If
7021 the bitfield does not meet either of those conditions,
7022 we can't do this optimization. */
7023 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7024 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7025 == MODE_INT)
7026 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7027 <= HOST_BITS_PER_WIDE_INT))))
7028 {
7029 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7030 && modifier == EXPAND_STACK_PARM)
7031 target = 0;
7032 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7033 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7034 {
7035 HOST_WIDE_INT bitsize
7036 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7037 enum machine_mode imode
7038 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7039
7040 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7041 {
7042 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7043 op0 = expand_and (imode, op0, op1, target);
7044 }
7045 else
7046 {
7047 tree count
7048 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7049 0);
7050
7051 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7052 target, 0);
7053 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7054 target, 0);
7055 }
7056 }
7057
7058 return op0;
7059 }
7060 }
7061 goto normal_inner_ref;
7062
7063 case BIT_FIELD_REF:
7064 case ARRAY_RANGE_REF:
7065 normal_inner_ref:
7066 {
7067 enum machine_mode mode1;
7068 HOST_WIDE_INT bitsize, bitpos;
7069 tree offset;
7070 int volatilep = 0;
7071 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7072 &mode1, &unsignedp, &volatilep);
7073 rtx orig_op0;
7074
7075 /* If we got back the original object, something is wrong. Perhaps
7076 we are evaluating an expression too early. In any event, don't
7077 infinitely recurse. */
7078 if (tem == exp)
7079 abort ();
7080
7081 /* If TEM's type is a union of variable size, pass TARGET to the inner
7082 computation, since it will need a temporary and TARGET is known
7083 to have to do. This occurs in unchecked conversion in Ada. */
7084
7085 orig_op0 = op0
7086 = expand_expr (tem,
7087 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7088 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7089 != INTEGER_CST)
7090 && modifier != EXPAND_STACK_PARM
7091 ? target : NULL_RTX),
7092 VOIDmode,
7093 (modifier == EXPAND_INITIALIZER
7094 || modifier == EXPAND_CONST_ADDRESS
7095 || modifier == EXPAND_STACK_PARM)
7096 ? modifier : EXPAND_NORMAL);
7097
7098 /* If this is a constant, put it into a register if it is a
7099 legitimate constant and OFFSET is 0 and memory if it isn't. */
7100 if (CONSTANT_P (op0))
7101 {
7102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7103 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7104 && offset == 0)
7105 op0 = force_reg (mode, op0);
7106 else
7107 op0 = validize_mem (force_const_mem (mode, op0));
7108 }
7109
7110 /* Otherwise, if this object not in memory and we either have an
7111 offset or a BLKmode result, put it there. This case can't occur in
7112 C, but can in Ada if we have unchecked conversion of an expression
7113 from a scalar type to an array or record type or for an
7114 ARRAY_RANGE_REF whose type is BLKmode. */
7115 else if (GET_CODE (op0) != MEM
7116 && (offset != 0
7117 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7118 {
7119 /* If the operand is a SAVE_EXPR, we can deal with this by
7120 forcing the SAVE_EXPR into memory. */
7121 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7122 {
7123 put_var_into_stack (TREE_OPERAND (exp, 0),
7124 /*rescan=*/true);
7125 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7126 }
7127 else
7128 {
7129 tree nt
7130 = build_qualified_type (TREE_TYPE (tem),
7131 (TYPE_QUALS (TREE_TYPE (tem))
7132 | TYPE_QUAL_CONST));
7133 rtx memloc = assign_temp (nt, 1, 1, 1);
7134
7135 emit_move_insn (memloc, op0);
7136 op0 = memloc;
7137 }
7138 }
7139
7140 if (offset != 0)
7141 {
7142 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7143 EXPAND_SUM);
7144
7145 if (GET_CODE (op0) != MEM)
7146 abort ();
7147
7148 #ifdef POINTERS_EXTEND_UNSIGNED
7149 if (GET_MODE (offset_rtx) != Pmode)
7150 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7151 #else
7152 if (GET_MODE (offset_rtx) != ptr_mode)
7153 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7154 #endif
7155
7156 if (GET_MODE (op0) == BLKmode
7157 /* A constant address in OP0 can have VOIDmode, we must
7158 not try to call force_reg in that case. */
7159 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7160 && bitsize != 0
7161 && (bitpos % bitsize) == 0
7162 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7163 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7164 {
7165 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7166 bitpos = 0;
7167 }
7168
7169 op0 = offset_address (op0, offset_rtx,
7170 highest_pow2_factor (offset));
7171 }
7172
7173 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7174 record its alignment as BIGGEST_ALIGNMENT. */
7175 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7176 && is_aligning_offset (offset, tem))
7177 set_mem_align (op0, BIGGEST_ALIGNMENT);
7178
7179 /* Don't forget about volatility even if this is a bitfield. */
7180 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7181 {
7182 if (op0 == orig_op0)
7183 op0 = copy_rtx (op0);
7184
7185 MEM_VOLATILE_P (op0) = 1;
7186 }
7187
7188 /* The following code doesn't handle CONCAT.
7189 Assume only bitpos == 0 can be used for CONCAT, due to
7190 one element arrays having the same mode as its element. */
7191 if (GET_CODE (op0) == CONCAT)
7192 {
7193 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7194 abort ();
7195 return op0;
7196 }
7197
7198 /* In cases where an aligned union has an unaligned object
7199 as a field, we might be extracting a BLKmode value from
7200 an integer-mode (e.g., SImode) object. Handle this case
7201 by doing the extract into an object as wide as the field
7202 (which we know to be the width of a basic mode), then
7203 storing into memory, and changing the mode to BLKmode. */
7204 if (mode1 == VOIDmode
7205 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7206 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7207 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7208 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7209 && modifier != EXPAND_CONST_ADDRESS
7210 && modifier != EXPAND_INITIALIZER)
7211 /* If the field isn't aligned enough to fetch as a memref,
7212 fetch it as a bit field. */
7213 || (mode1 != BLKmode
7214 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7215 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7216 || (GET_CODE (op0) == MEM
7217 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7218 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7219 && ((modifier == EXPAND_CONST_ADDRESS
7220 || modifier == EXPAND_INITIALIZER)
7221 ? STRICT_ALIGNMENT
7222 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7223 || (bitpos % BITS_PER_UNIT != 0)))
7224 /* If the type and the field are a constant size and the
7225 size of the type isn't the same size as the bitfield,
7226 we must use bitfield operations. */
7227 || (bitsize >= 0
7228 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7229 == INTEGER_CST)
7230 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7231 bitsize)))
7232 {
7233 enum machine_mode ext_mode = mode;
7234
7235 if (ext_mode == BLKmode
7236 && ! (target != 0 && GET_CODE (op0) == MEM
7237 && GET_CODE (target) == MEM
7238 && bitpos % BITS_PER_UNIT == 0))
7239 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7240
7241 if (ext_mode == BLKmode)
7242 {
7243 if (target == 0)
7244 target = assign_temp (type, 0, 1, 1);
7245
7246 if (bitsize == 0)
7247 return target;
7248
7249 /* In this case, BITPOS must start at a byte boundary and
7250 TARGET, if specified, must be a MEM. */
7251 if (GET_CODE (op0) != MEM
7252 || (target != 0 && GET_CODE (target) != MEM)
7253 || bitpos % BITS_PER_UNIT != 0)
7254 abort ();
7255
7256 emit_block_move (target,
7257 adjust_address (op0, VOIDmode,
7258 bitpos / BITS_PER_UNIT),
7259 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7260 / BITS_PER_UNIT),
7261 (modifier == EXPAND_STACK_PARM
7262 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7263
7264 return target;
7265 }
7266
7267 op0 = validize_mem (op0);
7268
7269 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7270 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7271
7272 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7273 (modifier == EXPAND_STACK_PARM
7274 ? NULL_RTX : target),
7275 ext_mode, ext_mode,
7276 int_size_in_bytes (TREE_TYPE (tem)));
7277
7278 /* If the result is a record type and BITSIZE is narrower than
7279 the mode of OP0, an integral mode, and this is a big endian
7280 machine, we must put the field into the high-order bits. */
7281 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7282 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7283 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7284 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7285 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7286 - bitsize),
7287 op0, 1);
7288
7289 if (mode == BLKmode)
7290 {
7291 rtx new = assign_temp (build_qualified_type
7292 ((*lang_hooks.types.type_for_mode)
7293 (ext_mode, 0),
7294 TYPE_QUAL_CONST), 0, 1, 1);
7295
7296 emit_move_insn (new, op0);
7297 op0 = copy_rtx (new);
7298 PUT_MODE (op0, BLKmode);
7299 set_mem_attributes (op0, exp, 1);
7300 }
7301
7302 return op0;
7303 }
7304
7305 /* If the result is BLKmode, use that to access the object
7306 now as well. */
7307 if (mode == BLKmode)
7308 mode1 = BLKmode;
7309
7310 /* Get a reference to just this component. */
7311 if (modifier == EXPAND_CONST_ADDRESS
7312 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7313 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7314 else
7315 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7316
7317 if (op0 == orig_op0)
7318 op0 = copy_rtx (op0);
7319
7320 set_mem_attributes (op0, exp, 0);
7321 if (GET_CODE (XEXP (op0, 0)) == REG)
7322 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7323
7324 MEM_VOLATILE_P (op0) |= volatilep;
7325 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7326 || modifier == EXPAND_CONST_ADDRESS
7327 || modifier == EXPAND_INITIALIZER)
7328 return op0;
7329 else if (target == 0)
7330 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7331
7332 convert_move (target, op0, unsignedp);
7333 return target;
7334 }
7335
7336 case VTABLE_REF:
7337 {
7338 rtx insn, before = get_last_insn (), vtbl_ref;
7339
7340 /* Evaluate the interior expression. */
7341 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7342 tmode, modifier);
7343
7344 /* Get or create an instruction off which to hang a note. */
7345 if (REG_P (subtarget))
7346 {
7347 target = subtarget;
7348 insn = get_last_insn ();
7349 if (insn == before)
7350 abort ();
7351 if (! INSN_P (insn))
7352 insn = prev_nonnote_insn (insn);
7353 }
7354 else
7355 {
7356 target = gen_reg_rtx (GET_MODE (subtarget));
7357 insn = emit_move_insn (target, subtarget);
7358 }
7359
7360 /* Collect the data for the note. */
7361 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7362 vtbl_ref = plus_constant (vtbl_ref,
7363 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7364 /* Discard the initial CONST that was added. */
7365 vtbl_ref = XEXP (vtbl_ref, 0);
7366
7367 REG_NOTES (insn)
7368 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7369
7370 return target;
7371 }
7372
7373 /* Intended for a reference to a buffer of a file-object in Pascal.
7374 But it's not certain that a special tree code will really be
7375 necessary for these. INDIRECT_REF might work for them. */
7376 case BUFFER_REF:
7377 abort ();
7378
7379 case IN_EXPR:
7380 {
7381 /* Pascal set IN expression.
7382
7383 Algorithm:
7384 rlo = set_low - (set_low%bits_per_word);
7385 the_word = set [ (index - rlo)/bits_per_word ];
7386 bit_index = index % bits_per_word;
7387 bitmask = 1 << bit_index;
7388 return !!(the_word & bitmask); */
7389
7390 tree set = TREE_OPERAND (exp, 0);
7391 tree index = TREE_OPERAND (exp, 1);
7392 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7393 tree set_type = TREE_TYPE (set);
7394 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7395 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7396 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7397 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7398 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7399 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7400 rtx setaddr = XEXP (setval, 0);
7401 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7402 rtx rlow;
7403 rtx diff, quo, rem, addr, bit, result;
7404
7405 /* If domain is empty, answer is no. Likewise if index is constant
7406 and out of bounds. */
7407 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7408 && TREE_CODE (set_low_bound) == INTEGER_CST
7409 && tree_int_cst_lt (set_high_bound, set_low_bound))
7410 || (TREE_CODE (index) == INTEGER_CST
7411 && TREE_CODE (set_low_bound) == INTEGER_CST
7412 && tree_int_cst_lt (index, set_low_bound))
7413 || (TREE_CODE (set_high_bound) == INTEGER_CST
7414 && TREE_CODE (index) == INTEGER_CST
7415 && tree_int_cst_lt (set_high_bound, index))))
7416 return const0_rtx;
7417
7418 if (target == 0)
7419 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7420
7421 /* If we get here, we have to generate the code for both cases
7422 (in range and out of range). */
7423
7424 op0 = gen_label_rtx ();
7425 op1 = gen_label_rtx ();
7426
7427 if (! (GET_CODE (index_val) == CONST_INT
7428 && GET_CODE (lo_r) == CONST_INT))
7429 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7430 GET_MODE (index_val), iunsignedp, op1);
7431
7432 if (! (GET_CODE (index_val) == CONST_INT
7433 && GET_CODE (hi_r) == CONST_INT))
7434 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7435 GET_MODE (index_val), iunsignedp, op1);
7436
7437 /* Calculate the element number of bit zero in the first word
7438 of the set. */
7439 if (GET_CODE (lo_r) == CONST_INT)
7440 rlow = GEN_INT (INTVAL (lo_r)
7441 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7442 else
7443 rlow = expand_binop (index_mode, and_optab, lo_r,
7444 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7445 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7446
7447 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7448 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7449
7450 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7451 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7452 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7453 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7454
7455 addr = memory_address (byte_mode,
7456 expand_binop (index_mode, add_optab, diff,
7457 setaddr, NULL_RTX, iunsignedp,
7458 OPTAB_LIB_WIDEN));
7459
7460 /* Extract the bit we want to examine. */
7461 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7462 gen_rtx_MEM (byte_mode, addr),
7463 make_tree (TREE_TYPE (index), rem),
7464 NULL_RTX, 1);
7465 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7466 GET_MODE (target) == byte_mode ? target : 0,
7467 1, OPTAB_LIB_WIDEN);
7468
7469 if (result != target)
7470 convert_move (target, result, 1);
7471
7472 /* Output the code to handle the out-of-range case. */
7473 emit_jump (op0);
7474 emit_label (op1);
7475 emit_move_insn (target, const0_rtx);
7476 emit_label (op0);
7477 return target;
7478 }
7479
7480 case WITH_CLEANUP_EXPR:
7481 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7482 {
7483 WITH_CLEANUP_EXPR_RTL (exp)
7484 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7485 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7486 CLEANUP_EH_ONLY (exp));
7487
7488 /* That's it for this cleanup. */
7489 TREE_OPERAND (exp, 1) = 0;
7490 }
7491 return WITH_CLEANUP_EXPR_RTL (exp);
7492
7493 case CLEANUP_POINT_EXPR:
7494 {
7495 /* Start a new binding layer that will keep track of all cleanup
7496 actions to be performed. */
7497 expand_start_bindings (2);
7498
7499 target_temp_slot_level = temp_slot_level;
7500
7501 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7502 /* If we're going to use this value, load it up now. */
7503 if (! ignore)
7504 op0 = force_not_mem (op0);
7505 preserve_temp_slots (op0);
7506 expand_end_bindings (NULL_TREE, 0, 0);
7507 }
7508 return op0;
7509
7510 case CALL_EXPR:
7511 /* Check for a built-in function. */
7512 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7513 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7514 == FUNCTION_DECL)
7515 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7516 {
7517 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7518 == BUILT_IN_FRONTEND)
7519 return (*lang_hooks.expand_expr) (exp, original_target,
7520 tmode, modifier);
7521 else
7522 return expand_builtin (exp, target, subtarget, tmode, ignore);
7523 }
7524
7525 return expand_call (exp, target, ignore);
7526
7527 case NON_LVALUE_EXPR:
7528 case NOP_EXPR:
7529 case CONVERT_EXPR:
7530 case REFERENCE_EXPR:
7531 if (TREE_OPERAND (exp, 0) == error_mark_node)
7532 return const0_rtx;
7533
7534 if (TREE_CODE (type) == UNION_TYPE)
7535 {
7536 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7537
7538 /* If both input and output are BLKmode, this conversion isn't doing
7539 anything except possibly changing memory attribute. */
7540 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7541 {
7542 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7543 modifier);
7544
7545 result = copy_rtx (result);
7546 set_mem_attributes (result, exp, 0);
7547 return result;
7548 }
7549
7550 if (target == 0)
7551 target = assign_temp (type, 0, 1, 1);
7552
7553 if (GET_CODE (target) == MEM)
7554 /* Store data into beginning of memory target. */
7555 store_expr (TREE_OPERAND (exp, 0),
7556 adjust_address (target, TYPE_MODE (valtype), 0),
7557 modifier == EXPAND_STACK_PARM ? 2 : 0);
7558
7559 else if (GET_CODE (target) == REG)
7560 /* Store this field into a union of the proper type. */
7561 store_field (target,
7562 MIN ((int_size_in_bytes (TREE_TYPE
7563 (TREE_OPERAND (exp, 0)))
7564 * BITS_PER_UNIT),
7565 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7566 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7567 VOIDmode, 0, type, 0);
7568 else
7569 abort ();
7570
7571 /* Return the entire union. */
7572 return target;
7573 }
7574
7575 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7576 {
7577 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7578 modifier);
7579
7580 /* If the signedness of the conversion differs and OP0 is
7581 a promoted SUBREG, clear that indication since we now
7582 have to do the proper extension. */
7583 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7584 && GET_CODE (op0) == SUBREG)
7585 SUBREG_PROMOTED_VAR_P (op0) = 0;
7586
7587 return op0;
7588 }
7589
7590 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7591 if (GET_MODE (op0) == mode)
7592 return op0;
7593
7594 /* If OP0 is a constant, just convert it into the proper mode. */
7595 if (CONSTANT_P (op0))
7596 {
7597 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7598 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7599
7600 if (modifier == EXPAND_INITIALIZER)
7601 return simplify_gen_subreg (mode, op0, inner_mode,
7602 subreg_lowpart_offset (mode,
7603 inner_mode));
7604 else
7605 return convert_modes (mode, inner_mode, op0,
7606 TREE_UNSIGNED (inner_type));
7607 }
7608
7609 if (modifier == EXPAND_INITIALIZER)
7610 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7611
7612 if (target == 0)
7613 return
7614 convert_to_mode (mode, op0,
7615 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7616 else
7617 convert_move (target, op0,
7618 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7619 return target;
7620
7621 case VIEW_CONVERT_EXPR:
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7623
7624 /* If the input and output modes are both the same, we are done.
7625 Otherwise, if neither mode is BLKmode and both are integral and within
7626 a word, we can use gen_lowpart. If neither is true, make sure the
7627 operand is in memory and convert the MEM to the new mode. */
7628 if (TYPE_MODE (type) == GET_MODE (op0))
7629 ;
7630 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7631 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7632 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7633 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7634 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7635 op0 = gen_lowpart (TYPE_MODE (type), op0);
7636 else if (GET_CODE (op0) != MEM)
7637 {
7638 /* If the operand is not a MEM, force it into memory. Since we
7639 are going to be be changing the mode of the MEM, don't call
7640 force_const_mem for constants because we don't allow pool
7641 constants to change mode. */
7642 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7643
7644 if (TREE_ADDRESSABLE (exp))
7645 abort ();
7646
7647 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7648 target
7649 = assign_stack_temp_for_type
7650 (TYPE_MODE (inner_type),
7651 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7652
7653 emit_move_insn (target, op0);
7654 op0 = target;
7655 }
7656
7657 /* At this point, OP0 is in the correct mode. If the output type is such
7658 that the operand is known to be aligned, indicate that it is.
7659 Otherwise, we need only be concerned about alignment for non-BLKmode
7660 results. */
7661 if (GET_CODE (op0) == MEM)
7662 {
7663 op0 = copy_rtx (op0);
7664
7665 if (TYPE_ALIGN_OK (type))
7666 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7667 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7668 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7669 {
7670 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7671 HOST_WIDE_INT temp_size
7672 = MAX (int_size_in_bytes (inner_type),
7673 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7674 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7675 temp_size, 0, type);
7676 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7677
7678 if (TREE_ADDRESSABLE (exp))
7679 abort ();
7680
7681 if (GET_MODE (op0) == BLKmode)
7682 emit_block_move (new_with_op0_mode, op0,
7683 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7684 (modifier == EXPAND_STACK_PARM
7685 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7686 else
7687 emit_move_insn (new_with_op0_mode, op0);
7688
7689 op0 = new;
7690 }
7691
7692 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7693 }
7694
7695 return op0;
7696
7697 case PLUS_EXPR:
7698 this_optab = ! unsignedp && flag_trapv
7699 && (GET_MODE_CLASS (mode) == MODE_INT)
7700 ? addv_optab : add_optab;
7701
7702 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7703 something else, make sure we add the register to the constant and
7704 then to the other thing. This case can occur during strength
7705 reduction and doing it this way will produce better code if the
7706 frame pointer or argument pointer is eliminated.
7707
7708 fold-const.c will ensure that the constant is always in the inner
7709 PLUS_EXPR, so the only case we need to do anything about is if
7710 sp, ap, or fp is our second argument, in which case we must swap
7711 the innermost first argument and our second argument. */
7712
7713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7714 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7715 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7716 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7717 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7718 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7719 {
7720 tree t = TREE_OPERAND (exp, 1);
7721
7722 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7723 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7724 }
7725
7726 /* If the result is to be ptr_mode and we are adding an integer to
7727 something, we might be forming a constant. So try to use
7728 plus_constant. If it produces a sum and we can't accept it,
7729 use force_operand. This allows P = &ARR[const] to generate
7730 efficient code on machines where a SYMBOL_REF is not a valid
7731 address.
7732
7733 If this is an EXPAND_SUM call, always return the sum. */
7734 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7735 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7736 {
7737 if (modifier == EXPAND_STACK_PARM)
7738 target = 0;
7739 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7740 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7741 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7742 {
7743 rtx constant_part;
7744
7745 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7746 EXPAND_SUM);
7747 /* Use immed_double_const to ensure that the constant is
7748 truncated according to the mode of OP1, then sign extended
7749 to a HOST_WIDE_INT. Using the constant directly can result
7750 in non-canonical RTL in a 64x32 cross compile. */
7751 constant_part
7752 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7753 (HOST_WIDE_INT) 0,
7754 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7755 op1 = plus_constant (op1, INTVAL (constant_part));
7756 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7757 op1 = force_operand (op1, target);
7758 return op1;
7759 }
7760
7761 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7762 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7763 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7764 {
7765 rtx constant_part;
7766
7767 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7768 (modifier == EXPAND_INITIALIZER
7769 ? EXPAND_INITIALIZER : EXPAND_SUM));
7770 if (! CONSTANT_P (op0))
7771 {
7772 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7773 VOIDmode, modifier);
7774 /* Return a PLUS if modifier says it's OK. */
7775 if (modifier == EXPAND_SUM
7776 || modifier == EXPAND_INITIALIZER)
7777 return simplify_gen_binary (PLUS, mode, op0, op1);
7778 goto binop2;
7779 }
7780 /* Use immed_double_const to ensure that the constant is
7781 truncated according to the mode of OP1, then sign extended
7782 to a HOST_WIDE_INT. Using the constant directly can result
7783 in non-canonical RTL in a 64x32 cross compile. */
7784 constant_part
7785 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7786 (HOST_WIDE_INT) 0,
7787 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7788 op0 = plus_constant (op0, INTVAL (constant_part));
7789 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7790 op0 = force_operand (op0, target);
7791 return op0;
7792 }
7793 }
7794
7795 /* No sense saving up arithmetic to be done
7796 if it's all in the wrong mode to form part of an address.
7797 And force_operand won't know whether to sign-extend or
7798 zero-extend. */
7799 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7800 || mode != ptr_mode)
7801 {
7802 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7803 subtarget, &op0, &op1, 0);
7804 if (op0 == const0_rtx)
7805 return op1;
7806 if (op1 == const0_rtx)
7807 return op0;
7808 goto binop2;
7809 }
7810
7811 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7812 subtarget, &op0, &op1, modifier);
7813 return simplify_gen_binary (PLUS, mode, op0, op1);
7814
7815 case MINUS_EXPR:
7816 /* For initializers, we are allowed to return a MINUS of two
7817 symbolic constants. Here we handle all cases when both operands
7818 are constant. */
7819 /* Handle difference of two symbolic constants,
7820 for the sake of an initializer. */
7821 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7822 && really_constant_p (TREE_OPERAND (exp, 0))
7823 && really_constant_p (TREE_OPERAND (exp, 1)))
7824 {
7825 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7826 NULL_RTX, &op0, &op1, modifier);
7827
7828 /* If the last operand is a CONST_INT, use plus_constant of
7829 the negated constant. Else make the MINUS. */
7830 if (GET_CODE (op1) == CONST_INT)
7831 return plus_constant (op0, - INTVAL (op1));
7832 else
7833 return gen_rtx_MINUS (mode, op0, op1);
7834 }
7835
7836 this_optab = ! unsignedp && flag_trapv
7837 && (GET_MODE_CLASS(mode) == MODE_INT)
7838 ? subv_optab : sub_optab;
7839
7840 /* No sense saving up arithmetic to be done
7841 if it's all in the wrong mode to form part of an address.
7842 And force_operand won't know whether to sign-extend or
7843 zero-extend. */
7844 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7845 || mode != ptr_mode)
7846 goto binop;
7847
7848 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7849 subtarget, &op0, &op1, modifier);
7850
7851 /* Convert A - const to A + (-const). */
7852 if (GET_CODE (op1) == CONST_INT)
7853 {
7854 op1 = negate_rtx (mode, op1);
7855 return simplify_gen_binary (PLUS, mode, op0, op1);
7856 }
7857
7858 goto binop2;
7859
7860 case MULT_EXPR:
7861 /* If first operand is constant, swap them.
7862 Thus the following special case checks need only
7863 check the second operand. */
7864 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7865 {
7866 tree t1 = TREE_OPERAND (exp, 0);
7867 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7868 TREE_OPERAND (exp, 1) = t1;
7869 }
7870
7871 /* Attempt to return something suitable for generating an
7872 indexed address, for machines that support that. */
7873
7874 if (modifier == EXPAND_SUM && mode == ptr_mode
7875 && host_integerp (TREE_OPERAND (exp, 1), 0))
7876 {
7877 tree exp1 = TREE_OPERAND (exp, 1);
7878
7879 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7880 EXPAND_SUM);
7881
7882 if (GET_CODE (op0) != REG)
7883 op0 = force_operand (op0, NULL_RTX);
7884 if (GET_CODE (op0) != REG)
7885 op0 = copy_to_mode_reg (mode, op0);
7886
7887 return gen_rtx_MULT (mode, op0,
7888 gen_int_mode (tree_low_cst (exp1, 0),
7889 TYPE_MODE (TREE_TYPE (exp1))));
7890 }
7891
7892 if (modifier == EXPAND_STACK_PARM)
7893 target = 0;
7894
7895 /* Check for multiplying things that have been extended
7896 from a narrower type. If this machine supports multiplying
7897 in that narrower type with a result in the desired type,
7898 do it that way, and avoid the explicit type-conversion. */
7899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7900 && TREE_CODE (type) == INTEGER_TYPE
7901 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7902 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7903 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7904 && int_fits_type_p (TREE_OPERAND (exp, 1),
7905 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7906 /* Don't use a widening multiply if a shift will do. */
7907 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7908 > HOST_BITS_PER_WIDE_INT)
7909 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7910 ||
7911 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7912 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7913 ==
7914 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7915 /* If both operands are extended, they must either both
7916 be zero-extended or both be sign-extended. */
7917 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7918 ==
7919 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7920 {
7921 enum machine_mode innermode
7922 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7923 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7924 ? smul_widen_optab : umul_widen_optab);
7925 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7926 ? umul_widen_optab : smul_widen_optab);
7927 if (mode == GET_MODE_WIDER_MODE (innermode))
7928 {
7929 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7930 {
7931 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7932 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7933 TREE_OPERAND (exp, 1),
7934 NULL_RTX, &op0, &op1, 0);
7935 else
7936 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7937 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7938 NULL_RTX, &op0, &op1, 0);
7939 goto binop2;
7940 }
7941 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7942 && innermode == word_mode)
7943 {
7944 rtx htem;
7945 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7946 NULL_RTX, VOIDmode, 0);
7947 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7948 op1 = convert_modes (innermode, mode,
7949 expand_expr (TREE_OPERAND (exp, 1),
7950 NULL_RTX, VOIDmode, 0),
7951 unsignedp);
7952 else
7953 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7954 NULL_RTX, VOIDmode, 0);
7955 temp = expand_binop (mode, other_optab, op0, op1, target,
7956 unsignedp, OPTAB_LIB_WIDEN);
7957 htem = expand_mult_highpart_adjust (innermode,
7958 gen_highpart (innermode, temp),
7959 op0, op1,
7960 gen_highpart (innermode, temp),
7961 unsignedp);
7962 emit_move_insn (gen_highpart (innermode, temp), htem);
7963 return temp;
7964 }
7965 }
7966 }
7967 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7968 subtarget, &op0, &op1, 0);
7969 return expand_mult (mode, op0, op1, target, unsignedp);
7970
7971 case TRUNC_DIV_EXPR:
7972 case FLOOR_DIV_EXPR:
7973 case CEIL_DIV_EXPR:
7974 case ROUND_DIV_EXPR:
7975 case EXACT_DIV_EXPR:
7976 if (modifier == EXPAND_STACK_PARM)
7977 target = 0;
7978 /* Possible optimization: compute the dividend with EXPAND_SUM
7979 then if the divisor is constant can optimize the case
7980 where some terms of the dividend have coeffs divisible by it. */
7981 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7982 subtarget, &op0, &op1, 0);
7983 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7984
7985 case RDIV_EXPR:
7986 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7987 expensive divide. If not, combine will rebuild the original
7988 computation. */
7989 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7990 && TREE_CODE (type) == REAL_TYPE
7991 && !real_onep (TREE_OPERAND (exp, 0)))
7992 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7993 build (RDIV_EXPR, type,
7994 build_real (type, dconst1),
7995 TREE_OPERAND (exp, 1))),
7996 target, tmode, modifier);
7997 this_optab = sdiv_optab;
7998 goto binop;
7999
8000 case TRUNC_MOD_EXPR:
8001 case FLOOR_MOD_EXPR:
8002 case CEIL_MOD_EXPR:
8003 case ROUND_MOD_EXPR:
8004 if (modifier == EXPAND_STACK_PARM)
8005 target = 0;
8006 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8007 subtarget, &op0, &op1, 0);
8008 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8009
8010 case FIX_ROUND_EXPR:
8011 case FIX_FLOOR_EXPR:
8012 case FIX_CEIL_EXPR:
8013 abort (); /* Not used for C. */
8014
8015 case FIX_TRUNC_EXPR:
8016 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8017 if (target == 0 || modifier == EXPAND_STACK_PARM)
8018 target = gen_reg_rtx (mode);
8019 expand_fix (target, op0, unsignedp);
8020 return target;
8021
8022 case FLOAT_EXPR:
8023 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8024 if (target == 0 || modifier == EXPAND_STACK_PARM)
8025 target = gen_reg_rtx (mode);
8026 /* expand_float can't figure out what to do if FROM has VOIDmode.
8027 So give it the correct mode. With -O, cse will optimize this. */
8028 if (GET_MODE (op0) == VOIDmode)
8029 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8030 op0);
8031 expand_float (target, op0,
8032 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8033 return target;
8034
8035 case NEGATE_EXPR:
8036 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8037 if (modifier == EXPAND_STACK_PARM)
8038 target = 0;
8039 temp = expand_unop (mode,
8040 ! unsignedp && flag_trapv
8041 && (GET_MODE_CLASS(mode) == MODE_INT)
8042 ? negv_optab : neg_optab, op0, target, 0);
8043 if (temp == 0)
8044 abort ();
8045 return temp;
8046
8047 case ABS_EXPR:
8048 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8049 if (modifier == EXPAND_STACK_PARM)
8050 target = 0;
8051
8052 /* ABS_EXPR is not valid for complex arguments. */
8053 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8054 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8055 abort ();
8056
8057 /* Unsigned abs is simply the operand. Testing here means we don't
8058 risk generating incorrect code below. */
8059 if (TREE_UNSIGNED (type))
8060 return op0;
8061
8062 return expand_abs (mode, op0, target, unsignedp,
8063 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8064
8065 case MAX_EXPR:
8066 case MIN_EXPR:
8067 target = original_target;
8068 if (target == 0
8069 || modifier == EXPAND_STACK_PARM
8070 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8071 || GET_MODE (target) != mode
8072 || (GET_CODE (target) == REG
8073 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8074 target = gen_reg_rtx (mode);
8075 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8076 target, &op0, &op1, 0);
8077
8078 /* First try to do it with a special MIN or MAX instruction.
8079 If that does not win, use a conditional jump to select the proper
8080 value. */
8081 this_optab = (TREE_UNSIGNED (type)
8082 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8083 : (code == MIN_EXPR ? smin_optab : smax_optab));
8084
8085 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8086 OPTAB_WIDEN);
8087 if (temp != 0)
8088 return temp;
8089
8090 /* At this point, a MEM target is no longer useful; we will get better
8091 code without it. */
8092
8093 if (GET_CODE (target) == MEM)
8094 target = gen_reg_rtx (mode);
8095
8096 /* If op1 was placed in target, swap op0 and op1. */
8097 if (target != op0 && target == op1)
8098 {
8099 rtx tem = op0;
8100 op0 = op1;
8101 op1 = tem;
8102 }
8103
8104 if (target != op0)
8105 emit_move_insn (target, op0);
8106
8107 op0 = gen_label_rtx ();
8108
8109 /* If this mode is an integer too wide to compare properly,
8110 compare word by word. Rely on cse to optimize constant cases. */
8111 if (GET_MODE_CLASS (mode) == MODE_INT
8112 && ! can_compare_p (GE, mode, ccp_jump))
8113 {
8114 if (code == MAX_EXPR)
8115 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8116 target, op1, NULL_RTX, op0);
8117 else
8118 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8119 op1, target, NULL_RTX, op0);
8120 }
8121 else
8122 {
8123 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8124 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8125 unsignedp, mode, NULL_RTX, NULL_RTX,
8126 op0);
8127 }
8128 emit_move_insn (target, op1);
8129 emit_label (op0);
8130 return target;
8131
8132 case BIT_NOT_EXPR:
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8134 if (modifier == EXPAND_STACK_PARM)
8135 target = 0;
8136 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8137 if (temp == 0)
8138 abort ();
8139 return temp;
8140
8141 /* ??? Can optimize bitwise operations with one arg constant.
8142 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8143 and (a bitwise1 b) bitwise2 b (etc)
8144 but that is probably not worth while. */
8145
8146 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8147 boolean values when we want in all cases to compute both of them. In
8148 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8149 as actual zero-or-1 values and then bitwise anding. In cases where
8150 there cannot be any side effects, better code would be made by
8151 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8152 how to recognize those cases. */
8153
8154 case TRUTH_AND_EXPR:
8155 case BIT_AND_EXPR:
8156 this_optab = and_optab;
8157 goto binop;
8158
8159 case TRUTH_OR_EXPR:
8160 case BIT_IOR_EXPR:
8161 this_optab = ior_optab;
8162 goto binop;
8163
8164 case TRUTH_XOR_EXPR:
8165 case BIT_XOR_EXPR:
8166 this_optab = xor_optab;
8167 goto binop;
8168
8169 case LSHIFT_EXPR:
8170 case RSHIFT_EXPR:
8171 case LROTATE_EXPR:
8172 case RROTATE_EXPR:
8173 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8174 subtarget = 0;
8175 if (modifier == EXPAND_STACK_PARM)
8176 target = 0;
8177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8178 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8179 unsignedp);
8180
8181 /* Could determine the answer when only additive constants differ. Also,
8182 the addition of one can be handled by changing the condition. */
8183 case LT_EXPR:
8184 case LE_EXPR:
8185 case GT_EXPR:
8186 case GE_EXPR:
8187 case EQ_EXPR:
8188 case NE_EXPR:
8189 case UNORDERED_EXPR:
8190 case ORDERED_EXPR:
8191 case UNLT_EXPR:
8192 case UNLE_EXPR:
8193 case UNGT_EXPR:
8194 case UNGE_EXPR:
8195 case UNEQ_EXPR:
8196 temp = do_store_flag (exp,
8197 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8198 tmode != VOIDmode ? tmode : mode, 0);
8199 if (temp != 0)
8200 return temp;
8201
8202 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8203 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8204 && original_target
8205 && GET_CODE (original_target) == REG
8206 && (GET_MODE (original_target)
8207 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8208 {
8209 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8210 VOIDmode, 0);
8211
8212 /* If temp is constant, we can just compute the result. */
8213 if (GET_CODE (temp) == CONST_INT)
8214 {
8215 if (INTVAL (temp) != 0)
8216 emit_move_insn (target, const1_rtx);
8217 else
8218 emit_move_insn (target, const0_rtx);
8219
8220 return target;
8221 }
8222
8223 if (temp != original_target)
8224 {
8225 enum machine_mode mode1 = GET_MODE (temp);
8226 if (mode1 == VOIDmode)
8227 mode1 = tmode != VOIDmode ? tmode : mode;
8228
8229 temp = copy_to_mode_reg (mode1, temp);
8230 }
8231
8232 op1 = gen_label_rtx ();
8233 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8234 GET_MODE (temp), unsignedp, op1);
8235 emit_move_insn (temp, const1_rtx);
8236 emit_label (op1);
8237 return temp;
8238 }
8239
8240 /* If no set-flag instruction, must generate a conditional
8241 store into a temporary variable. Drop through
8242 and handle this like && and ||. */
8243
8244 case TRUTH_ANDIF_EXPR:
8245 case TRUTH_ORIF_EXPR:
8246 if (! ignore
8247 && (target == 0
8248 || modifier == EXPAND_STACK_PARM
8249 || ! safe_from_p (target, exp, 1)
8250 /* Make sure we don't have a hard reg (such as function's return
8251 value) live across basic blocks, if not optimizing. */
8252 || (!optimize && GET_CODE (target) == REG
8253 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8254 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8255
8256 if (target)
8257 emit_clr_insn (target);
8258
8259 op1 = gen_label_rtx ();
8260 jumpifnot (exp, op1);
8261
8262 if (target)
8263 emit_0_to_1_insn (target);
8264
8265 emit_label (op1);
8266 return ignore ? const0_rtx : target;
8267
8268 case TRUTH_NOT_EXPR:
8269 if (modifier == EXPAND_STACK_PARM)
8270 target = 0;
8271 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8272 /* The parser is careful to generate TRUTH_NOT_EXPR
8273 only with operands that are always zero or one. */
8274 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8275 target, 1, OPTAB_LIB_WIDEN);
8276 if (temp == 0)
8277 abort ();
8278 return temp;
8279
8280 case COMPOUND_EXPR:
8281 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8282 emit_queue ();
8283 return expand_expr (TREE_OPERAND (exp, 1),
8284 (ignore ? const0_rtx : target),
8285 VOIDmode, modifier);
8286
8287 case COND_EXPR:
8288 /* If we would have a "singleton" (see below) were it not for a
8289 conversion in each arm, bring that conversion back out. */
8290 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8291 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8292 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8293 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8294 {
8295 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8296 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8297
8298 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8299 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8300 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8301 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8302 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8303 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8304 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8305 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8306 return expand_expr (build1 (NOP_EXPR, type,
8307 build (COND_EXPR, TREE_TYPE (iftrue),
8308 TREE_OPERAND (exp, 0),
8309 iftrue, iffalse)),
8310 target, tmode, modifier);
8311 }
8312
8313 {
8314 /* Note that COND_EXPRs whose type is a structure or union
8315 are required to be constructed to contain assignments of
8316 a temporary variable, so that we can evaluate them here
8317 for side effect only. If type is void, we must do likewise. */
8318
8319 /* If an arm of the branch requires a cleanup,
8320 only that cleanup is performed. */
8321
8322 tree singleton = 0;
8323 tree binary_op = 0, unary_op = 0;
8324
8325 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8326 convert it to our mode, if necessary. */
8327 if (integer_onep (TREE_OPERAND (exp, 1))
8328 && integer_zerop (TREE_OPERAND (exp, 2))
8329 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8330 {
8331 if (ignore)
8332 {
8333 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8334 modifier);
8335 return const0_rtx;
8336 }
8337
8338 if (modifier == EXPAND_STACK_PARM)
8339 target = 0;
8340 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8341 if (GET_MODE (op0) == mode)
8342 return op0;
8343
8344 if (target == 0)
8345 target = gen_reg_rtx (mode);
8346 convert_move (target, op0, unsignedp);
8347 return target;
8348 }
8349
8350 /* Check for X ? A + B : A. If we have this, we can copy A to the
8351 output and conditionally add B. Similarly for unary operations.
8352 Don't do this if X has side-effects because those side effects
8353 might affect A or B and the "?" operation is a sequence point in
8354 ANSI. (operand_equal_p tests for side effects.) */
8355
8356 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8357 && operand_equal_p (TREE_OPERAND (exp, 2),
8358 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8359 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8360 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8361 && operand_equal_p (TREE_OPERAND (exp, 1),
8362 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8363 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8364 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8365 && operand_equal_p (TREE_OPERAND (exp, 2),
8366 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8367 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8368 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8369 && operand_equal_p (TREE_OPERAND (exp, 1),
8370 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8371 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8372
8373 /* If we are not to produce a result, we have no target. Otherwise,
8374 if a target was specified use it; it will not be used as an
8375 intermediate target unless it is safe. If no target, use a
8376 temporary. */
8377
8378 if (ignore)
8379 temp = 0;
8380 else if (modifier == EXPAND_STACK_PARM)
8381 temp = assign_temp (type, 0, 0, 1);
8382 else if (original_target
8383 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8384 || (singleton && GET_CODE (original_target) == REG
8385 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8386 && original_target == var_rtx (singleton)))
8387 && GET_MODE (original_target) == mode
8388 #ifdef HAVE_conditional_move
8389 && (! can_conditionally_move_p (mode)
8390 || GET_CODE (original_target) == REG
8391 || TREE_ADDRESSABLE (type))
8392 #endif
8393 && (GET_CODE (original_target) != MEM
8394 || TREE_ADDRESSABLE (type)))
8395 temp = original_target;
8396 else if (TREE_ADDRESSABLE (type))
8397 abort ();
8398 else
8399 temp = assign_temp (type, 0, 0, 1);
8400
8401 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8402 do the test of X as a store-flag operation, do this as
8403 A + ((X != 0) << log C). Similarly for other simple binary
8404 operators. Only do for C == 1 if BRANCH_COST is low. */
8405 if (temp && singleton && binary_op
8406 && (TREE_CODE (binary_op) == PLUS_EXPR
8407 || TREE_CODE (binary_op) == MINUS_EXPR
8408 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8409 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8410 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8411 : integer_onep (TREE_OPERAND (binary_op, 1)))
8412 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8413 {
8414 rtx result;
8415 tree cond;
8416 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8417 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8418 ? addv_optab : add_optab)
8419 : TREE_CODE (binary_op) == MINUS_EXPR
8420 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8421 ? subv_optab : sub_optab)
8422 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8423 : xor_optab);
8424
8425 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8426 if (singleton == TREE_OPERAND (exp, 1))
8427 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8428 else
8429 cond = TREE_OPERAND (exp, 0);
8430
8431 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8432 ? temp : NULL_RTX),
8433 mode, BRANCH_COST <= 1);
8434
8435 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8436 result = expand_shift (LSHIFT_EXPR, mode, result,
8437 build_int_2 (tree_log2
8438 (TREE_OPERAND
8439 (binary_op, 1)),
8440 0),
8441 (safe_from_p (temp, singleton, 1)
8442 ? temp : NULL_RTX), 0);
8443
8444 if (result)
8445 {
8446 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8447 return expand_binop (mode, boptab, op1, result, temp,
8448 unsignedp, OPTAB_LIB_WIDEN);
8449 }
8450 }
8451
8452 do_pending_stack_adjust ();
8453 NO_DEFER_POP;
8454 op0 = gen_label_rtx ();
8455
8456 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8457 {
8458 if (temp != 0)
8459 {
8460 /* If the target conflicts with the other operand of the
8461 binary op, we can't use it. Also, we can't use the target
8462 if it is a hard register, because evaluating the condition
8463 might clobber it. */
8464 if ((binary_op
8465 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8466 || (GET_CODE (temp) == REG
8467 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8468 temp = gen_reg_rtx (mode);
8469 store_expr (singleton, temp,
8470 modifier == EXPAND_STACK_PARM ? 2 : 0);
8471 }
8472 else
8473 expand_expr (singleton,
8474 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8475 if (singleton == TREE_OPERAND (exp, 1))
8476 jumpif (TREE_OPERAND (exp, 0), op0);
8477 else
8478 jumpifnot (TREE_OPERAND (exp, 0), op0);
8479
8480 start_cleanup_deferral ();
8481 if (binary_op && temp == 0)
8482 /* Just touch the other operand. */
8483 expand_expr (TREE_OPERAND (binary_op, 1),
8484 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8485 else if (binary_op)
8486 store_expr (build (TREE_CODE (binary_op), type,
8487 make_tree (type, temp),
8488 TREE_OPERAND (binary_op, 1)),
8489 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8490 else
8491 store_expr (build1 (TREE_CODE (unary_op), type,
8492 make_tree (type, temp)),
8493 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8494 op1 = op0;
8495 }
8496 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8497 comparison operator. If we have one of these cases, set the
8498 output to A, branch on A (cse will merge these two references),
8499 then set the output to FOO. */
8500 else if (temp
8501 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8502 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8503 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8504 TREE_OPERAND (exp, 1), 0)
8505 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8506 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8507 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8508 {
8509 if (GET_CODE (temp) == REG
8510 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8511 temp = gen_reg_rtx (mode);
8512 store_expr (TREE_OPERAND (exp, 1), temp,
8513 modifier == EXPAND_STACK_PARM ? 2 : 0);
8514 jumpif (TREE_OPERAND (exp, 0), op0);
8515
8516 start_cleanup_deferral ();
8517 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8518 store_expr (TREE_OPERAND (exp, 2), temp,
8519 modifier == EXPAND_STACK_PARM ? 2 : 0);
8520 else
8521 expand_expr (TREE_OPERAND (exp, 2),
8522 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8523 op1 = op0;
8524 }
8525 else if (temp
8526 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8527 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8528 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8529 TREE_OPERAND (exp, 2), 0)
8530 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8531 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8532 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8533 {
8534 if (GET_CODE (temp) == REG
8535 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8536 temp = gen_reg_rtx (mode);
8537 store_expr (TREE_OPERAND (exp, 2), temp,
8538 modifier == EXPAND_STACK_PARM ? 2 : 0);
8539 jumpifnot (TREE_OPERAND (exp, 0), op0);
8540
8541 start_cleanup_deferral ();
8542 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8543 store_expr (TREE_OPERAND (exp, 1), temp,
8544 modifier == EXPAND_STACK_PARM ? 2 : 0);
8545 else
8546 expand_expr (TREE_OPERAND (exp, 1),
8547 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8548 op1 = op0;
8549 }
8550 else
8551 {
8552 op1 = gen_label_rtx ();
8553 jumpifnot (TREE_OPERAND (exp, 0), op0);
8554
8555 start_cleanup_deferral ();
8556
8557 /* One branch of the cond can be void, if it never returns. For
8558 example A ? throw : E */
8559 if (temp != 0
8560 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8561 store_expr (TREE_OPERAND (exp, 1), temp,
8562 modifier == EXPAND_STACK_PARM ? 2 : 0);
8563 else
8564 expand_expr (TREE_OPERAND (exp, 1),
8565 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8566 end_cleanup_deferral ();
8567 emit_queue ();
8568 emit_jump_insn (gen_jump (op1));
8569 emit_barrier ();
8570 emit_label (op0);
8571 start_cleanup_deferral ();
8572 if (temp != 0
8573 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8574 store_expr (TREE_OPERAND (exp, 2), temp,
8575 modifier == EXPAND_STACK_PARM ? 2 : 0);
8576 else
8577 expand_expr (TREE_OPERAND (exp, 2),
8578 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8579 }
8580
8581 end_cleanup_deferral ();
8582
8583 emit_queue ();
8584 emit_label (op1);
8585 OK_DEFER_POP;
8586
8587 return temp;
8588 }
8589
8590 case TARGET_EXPR:
8591 {
8592 /* Something needs to be initialized, but we didn't know
8593 where that thing was when building the tree. For example,
8594 it could be the return value of a function, or a parameter
8595 to a function which lays down in the stack, or a temporary
8596 variable which must be passed by reference.
8597
8598 We guarantee that the expression will either be constructed
8599 or copied into our original target. */
8600
8601 tree slot = TREE_OPERAND (exp, 0);
8602 tree cleanups = NULL_TREE;
8603 tree exp1;
8604
8605 if (TREE_CODE (slot) != VAR_DECL)
8606 abort ();
8607
8608 if (! ignore)
8609 target = original_target;
8610
8611 /* Set this here so that if we get a target that refers to a
8612 register variable that's already been used, put_reg_into_stack
8613 knows that it should fix up those uses. */
8614 TREE_USED (slot) = 1;
8615
8616 if (target == 0)
8617 {
8618 if (DECL_RTL_SET_P (slot))
8619 {
8620 target = DECL_RTL (slot);
8621 /* If we have already expanded the slot, so don't do
8622 it again. (mrs) */
8623 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8624 return target;
8625 }
8626 else
8627 {
8628 target = assign_temp (type, 2, 0, 1);
8629 /* All temp slots at this level must not conflict. */
8630 preserve_temp_slots (target);
8631 SET_DECL_RTL (slot, target);
8632 if (TREE_ADDRESSABLE (slot))
8633 put_var_into_stack (slot, /*rescan=*/false);
8634
8635 /* Since SLOT is not known to the called function
8636 to belong to its stack frame, we must build an explicit
8637 cleanup. This case occurs when we must build up a reference
8638 to pass the reference as an argument. In this case,
8639 it is very likely that such a reference need not be
8640 built here. */
8641
8642 if (TREE_OPERAND (exp, 2) == 0)
8643 TREE_OPERAND (exp, 2)
8644 = (*lang_hooks.maybe_build_cleanup) (slot);
8645 cleanups = TREE_OPERAND (exp, 2);
8646 }
8647 }
8648 else
8649 {
8650 /* This case does occur, when expanding a parameter which
8651 needs to be constructed on the stack. The target
8652 is the actual stack address that we want to initialize.
8653 The function we call will perform the cleanup in this case. */
8654
8655 /* If we have already assigned it space, use that space,
8656 not target that we were passed in, as our target
8657 parameter is only a hint. */
8658 if (DECL_RTL_SET_P (slot))
8659 {
8660 target = DECL_RTL (slot);
8661 /* If we have already expanded the slot, so don't do
8662 it again. (mrs) */
8663 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8664 return target;
8665 }
8666 else
8667 {
8668 SET_DECL_RTL (slot, target);
8669 /* If we must have an addressable slot, then make sure that
8670 the RTL that we just stored in slot is OK. */
8671 if (TREE_ADDRESSABLE (slot))
8672 put_var_into_stack (slot, /*rescan=*/true);
8673 }
8674 }
8675
8676 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8677 /* Mark it as expanded. */
8678 TREE_OPERAND (exp, 1) = NULL_TREE;
8679
8680 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8681
8682 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8683
8684 return target;
8685 }
8686
8687 case INIT_EXPR:
8688 {
8689 tree lhs = TREE_OPERAND (exp, 0);
8690 tree rhs = TREE_OPERAND (exp, 1);
8691
8692 temp = expand_assignment (lhs, rhs, ! ignore);
8693 return temp;
8694 }
8695
8696 case MODIFY_EXPR:
8697 {
8698 /* If lhs is complex, expand calls in rhs before computing it.
8699 That's so we don't compute a pointer and save it over a
8700 call. If lhs is simple, compute it first so we can give it
8701 as a target if the rhs is just a call. This avoids an
8702 extra temp and copy and that prevents a partial-subsumption
8703 which makes bad code. Actually we could treat
8704 component_ref's of vars like vars. */
8705
8706 tree lhs = TREE_OPERAND (exp, 0);
8707 tree rhs = TREE_OPERAND (exp, 1);
8708
8709 temp = 0;
8710
8711 /* Check for |= or &= of a bitfield of size one into another bitfield
8712 of size 1. In this case, (unless we need the result of the
8713 assignment) we can do this more efficiently with a
8714 test followed by an assignment, if necessary.
8715
8716 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8717 things change so we do, this code should be enhanced to
8718 support it. */
8719 if (ignore
8720 && TREE_CODE (lhs) == COMPONENT_REF
8721 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8722 || TREE_CODE (rhs) == BIT_AND_EXPR)
8723 && TREE_OPERAND (rhs, 0) == lhs
8724 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8725 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8726 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8727 {
8728 rtx label = gen_label_rtx ();
8729
8730 do_jump (TREE_OPERAND (rhs, 1),
8731 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8732 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8733 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8734 (TREE_CODE (rhs) == BIT_IOR_EXPR
8735 ? integer_one_node
8736 : integer_zero_node)),
8737 0);
8738 do_pending_stack_adjust ();
8739 emit_label (label);
8740 return const0_rtx;
8741 }
8742
8743 temp = expand_assignment (lhs, rhs, ! ignore);
8744
8745 return temp;
8746 }
8747
8748 case RETURN_EXPR:
8749 if (!TREE_OPERAND (exp, 0))
8750 expand_null_return ();
8751 else
8752 expand_return (TREE_OPERAND (exp, 0));
8753 return const0_rtx;
8754
8755 case PREINCREMENT_EXPR:
8756 case PREDECREMENT_EXPR:
8757 return expand_increment (exp, 0, ignore);
8758
8759 case POSTINCREMENT_EXPR:
8760 case POSTDECREMENT_EXPR:
8761 /* Faster to treat as pre-increment if result is not used. */
8762 return expand_increment (exp, ! ignore, ignore);
8763
8764 case ADDR_EXPR:
8765 if (modifier == EXPAND_STACK_PARM)
8766 target = 0;
8767 /* Are we taking the address of a nested function? */
8768 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8769 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8770 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8771 && ! TREE_STATIC (exp))
8772 {
8773 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8774 op0 = force_operand (op0, target);
8775 }
8776 /* If we are taking the address of something erroneous, just
8777 return a zero. */
8778 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8779 return const0_rtx;
8780 /* If we are taking the address of a constant and are at the
8781 top level, we have to use output_constant_def since we can't
8782 call force_const_mem at top level. */
8783 else if (cfun == 0
8784 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8785 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8786 == 'c')))
8787 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8788 else
8789 {
8790 /* We make sure to pass const0_rtx down if we came in with
8791 ignore set, to avoid doing the cleanups twice for something. */
8792 op0 = expand_expr (TREE_OPERAND (exp, 0),
8793 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8794 (modifier == EXPAND_INITIALIZER
8795 ? modifier : EXPAND_CONST_ADDRESS));
8796
8797 /* If we are going to ignore the result, OP0 will have been set
8798 to const0_rtx, so just return it. Don't get confused and
8799 think we are taking the address of the constant. */
8800 if (ignore)
8801 return op0;
8802
8803 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8804 clever and returns a REG when given a MEM. */
8805 op0 = protect_from_queue (op0, 1);
8806
8807 /* We would like the object in memory. If it is a constant, we can
8808 have it be statically allocated into memory. For a non-constant,
8809 we need to allocate some memory and store the value into it. */
8810
8811 if (CONSTANT_P (op0))
8812 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8813 op0);
8814 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8815 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8816 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8817 {
8818 /* If the operand is a SAVE_EXPR, we can deal with this by
8819 forcing the SAVE_EXPR into memory. */
8820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8821 {
8822 put_var_into_stack (TREE_OPERAND (exp, 0),
8823 /*rescan=*/true);
8824 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8825 }
8826 else
8827 {
8828 /* If this object is in a register, it can't be BLKmode. */
8829 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8830 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8831
8832 if (GET_CODE (op0) == PARALLEL)
8833 /* Handle calls that pass values in multiple
8834 non-contiguous locations. The Irix 6 ABI has examples
8835 of this. */
8836 emit_group_store (memloc, op0, inner_type,
8837 int_size_in_bytes (inner_type));
8838 else
8839 emit_move_insn (memloc, op0);
8840
8841 op0 = memloc;
8842 }
8843 }
8844
8845 if (GET_CODE (op0) != MEM)
8846 abort ();
8847
8848 mark_temp_addr_taken (op0);
8849 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8850 {
8851 op0 = XEXP (op0, 0);
8852 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8853 op0 = convert_memory_address (ptr_mode, op0);
8854 return op0;
8855 }
8856
8857 /* If OP0 is not aligned as least as much as the type requires, we
8858 need to make a temporary, copy OP0 to it, and take the address of
8859 the temporary. We want to use the alignment of the type, not of
8860 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8861 the test for BLKmode means that can't happen. The test for
8862 BLKmode is because we never make mis-aligned MEMs with
8863 non-BLKmode.
8864
8865 We don't need to do this at all if the machine doesn't have
8866 strict alignment. */
8867 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8868 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8869 > MEM_ALIGN (op0))
8870 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8871 {
8872 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8873 rtx new;
8874
8875 if (TYPE_ALIGN_OK (inner_type))
8876 abort ();
8877
8878 if (TREE_ADDRESSABLE (inner_type))
8879 {
8880 /* We can't make a bitwise copy of this object, so fail. */
8881 error ("cannot take the address of an unaligned member");
8882 return const0_rtx;
8883 }
8884
8885 new = assign_stack_temp_for_type
8886 (TYPE_MODE (inner_type),
8887 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8888 : int_size_in_bytes (inner_type),
8889 1, build_qualified_type (inner_type,
8890 (TYPE_QUALS (inner_type)
8891 | TYPE_QUAL_CONST)));
8892
8893 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8894 (modifier == EXPAND_STACK_PARM
8895 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8896
8897 op0 = new;
8898 }
8899
8900 op0 = force_operand (XEXP (op0, 0), target);
8901 }
8902
8903 if (flag_force_addr
8904 && GET_CODE (op0) != REG
8905 && modifier != EXPAND_CONST_ADDRESS
8906 && modifier != EXPAND_INITIALIZER
8907 && modifier != EXPAND_SUM)
8908 op0 = force_reg (Pmode, op0);
8909
8910 if (GET_CODE (op0) == REG
8911 && ! REG_USERVAR_P (op0))
8912 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8913
8914 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8915 op0 = convert_memory_address (ptr_mode, op0);
8916
8917 return op0;
8918
8919 case ENTRY_VALUE_EXPR:
8920 abort ();
8921
8922 /* COMPLEX type for Extended Pascal & Fortran */
8923 case COMPLEX_EXPR:
8924 {
8925 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8926 rtx insns;
8927
8928 /* Get the rtx code of the operands. */
8929 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8930 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8931
8932 if (! target)
8933 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8934
8935 start_sequence ();
8936
8937 /* Move the real (op0) and imaginary (op1) parts to their location. */
8938 emit_move_insn (gen_realpart (mode, target), op0);
8939 emit_move_insn (gen_imagpart (mode, target), op1);
8940
8941 insns = get_insns ();
8942 end_sequence ();
8943
8944 /* Complex construction should appear as a single unit. */
8945 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8946 each with a separate pseudo as destination.
8947 It's not correct for flow to treat them as a unit. */
8948 if (GET_CODE (target) != CONCAT)
8949 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8950 else
8951 emit_insn (insns);
8952
8953 return target;
8954 }
8955
8956 case REALPART_EXPR:
8957 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8958 return gen_realpart (mode, op0);
8959
8960 case IMAGPART_EXPR:
8961 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8962 return gen_imagpart (mode, op0);
8963
8964 case CONJ_EXPR:
8965 {
8966 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8967 rtx imag_t;
8968 rtx insns;
8969
8970 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8971
8972 if (! target)
8973 target = gen_reg_rtx (mode);
8974
8975 start_sequence ();
8976
8977 /* Store the realpart and the negated imagpart to target. */
8978 emit_move_insn (gen_realpart (partmode, target),
8979 gen_realpart (partmode, op0));
8980
8981 imag_t = gen_imagpart (partmode, target);
8982 temp = expand_unop (partmode,
8983 ! unsignedp && flag_trapv
8984 && (GET_MODE_CLASS(partmode) == MODE_INT)
8985 ? negv_optab : neg_optab,
8986 gen_imagpart (partmode, op0), imag_t, 0);
8987 if (temp != imag_t)
8988 emit_move_insn (imag_t, temp);
8989
8990 insns = get_insns ();
8991 end_sequence ();
8992
8993 /* Conjugate should appear as a single unit
8994 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8995 each with a separate pseudo as destination.
8996 It's not correct for flow to treat them as a unit. */
8997 if (GET_CODE (target) != CONCAT)
8998 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8999 else
9000 emit_insn (insns);
9001
9002 return target;
9003 }
9004
9005 case TRY_CATCH_EXPR:
9006 {
9007 tree handler = TREE_OPERAND (exp, 1);
9008
9009 expand_eh_region_start ();
9010
9011 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9012
9013 expand_eh_region_end_cleanup (handler);
9014
9015 return op0;
9016 }
9017
9018 case TRY_FINALLY_EXPR:
9019 {
9020 tree try_block = TREE_OPERAND (exp, 0);
9021 tree finally_block = TREE_OPERAND (exp, 1);
9022
9023 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9024 {
9025 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9026 is not sufficient, so we cannot expand the block twice.
9027 So we play games with GOTO_SUBROUTINE_EXPR to let us
9028 expand the thing only once. */
9029 /* When not optimizing, we go ahead with this form since
9030 (1) user breakpoints operate more predictably without
9031 code duplication, and
9032 (2) we're not running any of the global optimizers
9033 that would explode in time/space with the highly
9034 connected CFG created by the indirect branching. */
9035
9036 rtx finally_label = gen_label_rtx ();
9037 rtx done_label = gen_label_rtx ();
9038 rtx return_link = gen_reg_rtx (Pmode);
9039 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9040 (tree) finally_label, (tree) return_link);
9041 TREE_SIDE_EFFECTS (cleanup) = 1;
9042
9043 /* Start a new binding layer that will keep track of all cleanup
9044 actions to be performed. */
9045 expand_start_bindings (2);
9046 target_temp_slot_level = temp_slot_level;
9047
9048 expand_decl_cleanup (NULL_TREE, cleanup);
9049 op0 = expand_expr (try_block, target, tmode, modifier);
9050
9051 preserve_temp_slots (op0);
9052 expand_end_bindings (NULL_TREE, 0, 0);
9053 emit_jump (done_label);
9054 emit_label (finally_label);
9055 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9056 emit_indirect_jump (return_link);
9057 emit_label (done_label);
9058 }
9059 else
9060 {
9061 expand_start_bindings (2);
9062 target_temp_slot_level = temp_slot_level;
9063
9064 expand_decl_cleanup (NULL_TREE, finally_block);
9065 op0 = expand_expr (try_block, target, tmode, modifier);
9066
9067 preserve_temp_slots (op0);
9068 expand_end_bindings (NULL_TREE, 0, 0);
9069 }
9070
9071 return op0;
9072 }
9073
9074 case GOTO_SUBROUTINE_EXPR:
9075 {
9076 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9077 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9078 rtx return_address = gen_label_rtx ();
9079 emit_move_insn (return_link,
9080 gen_rtx_LABEL_REF (Pmode, return_address));
9081 emit_jump (subr);
9082 emit_label (return_address);
9083 return const0_rtx;
9084 }
9085
9086 case VA_ARG_EXPR:
9087 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9088
9089 case EXC_PTR_EXPR:
9090 return get_exception_pointer (cfun);
9091
9092 case FDESC_EXPR:
9093 /* Function descriptors are not valid except for as
9094 initialization constants, and should not be expanded. */
9095 abort ();
9096
9097 default:
9098 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9099 }
9100
9101 /* Here to do an ordinary binary operator, generating an instruction
9102 from the optab already placed in `this_optab'. */
9103 binop:
9104 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9105 subtarget, &op0, &op1, 0);
9106 binop2:
9107 if (modifier == EXPAND_STACK_PARM)
9108 target = 0;
9109 temp = expand_binop (mode, this_optab, op0, op1, target,
9110 unsignedp, OPTAB_LIB_WIDEN);
9111 if (temp == 0)
9112 abort ();
9113 return temp;
9114 }
9115 \f
9116 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9117 when applied to the address of EXP produces an address known to be
9118 aligned more than BIGGEST_ALIGNMENT. */
9119
9120 static int
9121 is_aligning_offset (tree offset, tree exp)
9122 {
9123 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9124 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9125 || TREE_CODE (offset) == NOP_EXPR
9126 || TREE_CODE (offset) == CONVERT_EXPR
9127 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9128 offset = TREE_OPERAND (offset, 0);
9129
9130 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9131 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9132 if (TREE_CODE (offset) != BIT_AND_EXPR
9133 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9134 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9135 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9136 return 0;
9137
9138 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9139 It must be NEGATE_EXPR. Then strip any more conversions. */
9140 offset = TREE_OPERAND (offset, 0);
9141 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9142 || TREE_CODE (offset) == NOP_EXPR
9143 || TREE_CODE (offset) == CONVERT_EXPR)
9144 offset = TREE_OPERAND (offset, 0);
9145
9146 if (TREE_CODE (offset) != NEGATE_EXPR)
9147 return 0;
9148
9149 offset = TREE_OPERAND (offset, 0);
9150 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9151 || TREE_CODE (offset) == NOP_EXPR
9152 || TREE_CODE (offset) == CONVERT_EXPR)
9153 offset = TREE_OPERAND (offset, 0);
9154
9155 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9156 whose type is the same as EXP. */
9157 return (TREE_CODE (offset) == ADDR_EXPR
9158 && (TREE_OPERAND (offset, 0) == exp
9159 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9160 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9161 == TREE_TYPE (exp)))));
9162 }
9163 \f
9164 /* Return the tree node if an ARG corresponds to a string constant or zero
9165 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9166 in bytes within the string that ARG is accessing. The type of the
9167 offset will be `sizetype'. */
9168
9169 tree
9170 string_constant (tree arg, tree *ptr_offset)
9171 {
9172 STRIP_NOPS (arg);
9173
9174 if (TREE_CODE (arg) == ADDR_EXPR
9175 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9176 {
9177 *ptr_offset = size_zero_node;
9178 return TREE_OPERAND (arg, 0);
9179 }
9180 else if (TREE_CODE (arg) == PLUS_EXPR)
9181 {
9182 tree arg0 = TREE_OPERAND (arg, 0);
9183 tree arg1 = TREE_OPERAND (arg, 1);
9184
9185 STRIP_NOPS (arg0);
9186 STRIP_NOPS (arg1);
9187
9188 if (TREE_CODE (arg0) == ADDR_EXPR
9189 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9190 {
9191 *ptr_offset = convert (sizetype, arg1);
9192 return TREE_OPERAND (arg0, 0);
9193 }
9194 else if (TREE_CODE (arg1) == ADDR_EXPR
9195 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9196 {
9197 *ptr_offset = convert (sizetype, arg0);
9198 return TREE_OPERAND (arg1, 0);
9199 }
9200 }
9201
9202 return 0;
9203 }
9204 \f
9205 /* Expand code for a post- or pre- increment or decrement
9206 and return the RTX for the result.
9207 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9208
9209 static rtx
9210 expand_increment (tree exp, int post, int ignore)
9211 {
9212 rtx op0, op1;
9213 rtx temp, value;
9214 tree incremented = TREE_OPERAND (exp, 0);
9215 optab this_optab = add_optab;
9216 int icode;
9217 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9218 int op0_is_copy = 0;
9219 int single_insn = 0;
9220 /* 1 means we can't store into OP0 directly,
9221 because it is a subreg narrower than a word,
9222 and we don't dare clobber the rest of the word. */
9223 int bad_subreg = 0;
9224
9225 /* Stabilize any component ref that might need to be
9226 evaluated more than once below. */
9227 if (!post
9228 || TREE_CODE (incremented) == BIT_FIELD_REF
9229 || (TREE_CODE (incremented) == COMPONENT_REF
9230 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9231 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9232 incremented = stabilize_reference (incremented);
9233 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9234 ones into save exprs so that they don't accidentally get evaluated
9235 more than once by the code below. */
9236 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9237 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9238 incremented = save_expr (incremented);
9239
9240 /* Compute the operands as RTX.
9241 Note whether OP0 is the actual lvalue or a copy of it:
9242 I believe it is a copy iff it is a register or subreg
9243 and insns were generated in computing it. */
9244
9245 temp = get_last_insn ();
9246 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9247
9248 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9249 in place but instead must do sign- or zero-extension during assignment,
9250 so we copy it into a new register and let the code below use it as
9251 a copy.
9252
9253 Note that we can safely modify this SUBREG since it is know not to be
9254 shared (it was made by the expand_expr call above). */
9255
9256 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9257 {
9258 if (post)
9259 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9260 else
9261 bad_subreg = 1;
9262 }
9263 else if (GET_CODE (op0) == SUBREG
9264 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9265 {
9266 /* We cannot increment this SUBREG in place. If we are
9267 post-incrementing, get a copy of the old value. Otherwise,
9268 just mark that we cannot increment in place. */
9269 if (post)
9270 op0 = copy_to_reg (op0);
9271 else
9272 bad_subreg = 1;
9273 }
9274
9275 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9276 && temp != get_last_insn ());
9277 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9278
9279 /* Decide whether incrementing or decrementing. */
9280 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9281 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9282 this_optab = sub_optab;
9283
9284 /* Convert decrement by a constant into a negative increment. */
9285 if (this_optab == sub_optab
9286 && GET_CODE (op1) == CONST_INT)
9287 {
9288 op1 = GEN_INT (-INTVAL (op1));
9289 this_optab = add_optab;
9290 }
9291
9292 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9293 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9294
9295 /* For a preincrement, see if we can do this with a single instruction. */
9296 if (!post)
9297 {
9298 icode = (int) this_optab->handlers[(int) mode].insn_code;
9299 if (icode != (int) CODE_FOR_nothing
9300 /* Make sure that OP0 is valid for operands 0 and 1
9301 of the insn we want to queue. */
9302 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9303 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9304 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9305 single_insn = 1;
9306 }
9307
9308 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9309 then we cannot just increment OP0. We must therefore contrive to
9310 increment the original value. Then, for postincrement, we can return
9311 OP0 since it is a copy of the old value. For preincrement, expand here
9312 unless we can do it with a single insn.
9313
9314 Likewise if storing directly into OP0 would clobber high bits
9315 we need to preserve (bad_subreg). */
9316 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9317 {
9318 /* This is the easiest way to increment the value wherever it is.
9319 Problems with multiple evaluation of INCREMENTED are prevented
9320 because either (1) it is a component_ref or preincrement,
9321 in which case it was stabilized above, or (2) it is an array_ref
9322 with constant index in an array in a register, which is
9323 safe to reevaluate. */
9324 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9325 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9326 ? MINUS_EXPR : PLUS_EXPR),
9327 TREE_TYPE (exp),
9328 incremented,
9329 TREE_OPERAND (exp, 1));
9330
9331 while (TREE_CODE (incremented) == NOP_EXPR
9332 || TREE_CODE (incremented) == CONVERT_EXPR)
9333 {
9334 newexp = convert (TREE_TYPE (incremented), newexp);
9335 incremented = TREE_OPERAND (incremented, 0);
9336 }
9337
9338 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9339 return post ? op0 : temp;
9340 }
9341
9342 if (post)
9343 {
9344 /* We have a true reference to the value in OP0.
9345 If there is an insn to add or subtract in this mode, queue it.
9346 Queuing the increment insn avoids the register shuffling
9347 that often results if we must increment now and first save
9348 the old value for subsequent use. */
9349
9350 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9351 op0 = stabilize (op0);
9352 #endif
9353
9354 icode = (int) this_optab->handlers[(int) mode].insn_code;
9355 if (icode != (int) CODE_FOR_nothing
9356 /* Make sure that OP0 is valid for operands 0 and 1
9357 of the insn we want to queue. */
9358 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9359 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9360 {
9361 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9362 op1 = force_reg (mode, op1);
9363
9364 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9365 }
9366 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9367 {
9368 rtx addr = (general_operand (XEXP (op0, 0), mode)
9369 ? force_reg (Pmode, XEXP (op0, 0))
9370 : copy_to_reg (XEXP (op0, 0)));
9371 rtx temp, result;
9372
9373 op0 = replace_equiv_address (op0, addr);
9374 temp = force_reg (GET_MODE (op0), op0);
9375 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9376 op1 = force_reg (mode, op1);
9377
9378 /* The increment queue is LIFO, thus we have to `queue'
9379 the instructions in reverse order. */
9380 enqueue_insn (op0, gen_move_insn (op0, temp));
9381 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9382 return result;
9383 }
9384 }
9385
9386 /* Preincrement, or we can't increment with one simple insn. */
9387 if (post)
9388 /* Save a copy of the value before inc or dec, to return it later. */
9389 temp = value = copy_to_reg (op0);
9390 else
9391 /* Arrange to return the incremented value. */
9392 /* Copy the rtx because expand_binop will protect from the queue,
9393 and the results of that would be invalid for us to return
9394 if our caller does emit_queue before using our result. */
9395 temp = copy_rtx (value = op0);
9396
9397 /* Increment however we can. */
9398 op1 = expand_binop (mode, this_optab, value, op1, op0,
9399 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9400
9401 /* Make sure the value is stored into OP0. */
9402 if (op1 != op0)
9403 emit_move_insn (op0, op1);
9404
9405 return temp;
9406 }
9407 \f
9408 /* Generate code to calculate EXP using a store-flag instruction
9409 and return an rtx for the result. EXP is either a comparison
9410 or a TRUTH_NOT_EXPR whose operand is a comparison.
9411
9412 If TARGET is nonzero, store the result there if convenient.
9413
9414 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9415 cheap.
9416
9417 Return zero if there is no suitable set-flag instruction
9418 available on this machine.
9419
9420 Once expand_expr has been called on the arguments of the comparison,
9421 we are committed to doing the store flag, since it is not safe to
9422 re-evaluate the expression. We emit the store-flag insn by calling
9423 emit_store_flag, but only expand the arguments if we have a reason
9424 to believe that emit_store_flag will be successful. If we think that
9425 it will, but it isn't, we have to simulate the store-flag with a
9426 set/jump/set sequence. */
9427
9428 static rtx
9429 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9430 {
9431 enum rtx_code code;
9432 tree arg0, arg1, type;
9433 tree tem;
9434 enum machine_mode operand_mode;
9435 int invert = 0;
9436 int unsignedp;
9437 rtx op0, op1;
9438 enum insn_code icode;
9439 rtx subtarget = target;
9440 rtx result, label;
9441
9442 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9443 result at the end. We can't simply invert the test since it would
9444 have already been inverted if it were valid. This case occurs for
9445 some floating-point comparisons. */
9446
9447 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9448 invert = 1, exp = TREE_OPERAND (exp, 0);
9449
9450 arg0 = TREE_OPERAND (exp, 0);
9451 arg1 = TREE_OPERAND (exp, 1);
9452
9453 /* Don't crash if the comparison was erroneous. */
9454 if (arg0 == error_mark_node || arg1 == error_mark_node)
9455 return const0_rtx;
9456
9457 type = TREE_TYPE (arg0);
9458 operand_mode = TYPE_MODE (type);
9459 unsignedp = TREE_UNSIGNED (type);
9460
9461 /* We won't bother with BLKmode store-flag operations because it would mean
9462 passing a lot of information to emit_store_flag. */
9463 if (operand_mode == BLKmode)
9464 return 0;
9465
9466 /* We won't bother with store-flag operations involving function pointers
9467 when function pointers must be canonicalized before comparisons. */
9468 #ifdef HAVE_canonicalize_funcptr_for_compare
9469 if (HAVE_canonicalize_funcptr_for_compare
9470 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9471 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9472 == FUNCTION_TYPE))
9473 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9474 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9475 == FUNCTION_TYPE))))
9476 return 0;
9477 #endif
9478
9479 STRIP_NOPS (arg0);
9480 STRIP_NOPS (arg1);
9481
9482 /* Get the rtx comparison code to use. We know that EXP is a comparison
9483 operation of some type. Some comparisons against 1 and -1 can be
9484 converted to comparisons with zero. Do so here so that the tests
9485 below will be aware that we have a comparison with zero. These
9486 tests will not catch constants in the first operand, but constants
9487 are rarely passed as the first operand. */
9488
9489 switch (TREE_CODE (exp))
9490 {
9491 case EQ_EXPR:
9492 code = EQ;
9493 break;
9494 case NE_EXPR:
9495 code = NE;
9496 break;
9497 case LT_EXPR:
9498 if (integer_onep (arg1))
9499 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9500 else
9501 code = unsignedp ? LTU : LT;
9502 break;
9503 case LE_EXPR:
9504 if (! unsignedp && integer_all_onesp (arg1))
9505 arg1 = integer_zero_node, code = LT;
9506 else
9507 code = unsignedp ? LEU : LE;
9508 break;
9509 case GT_EXPR:
9510 if (! unsignedp && integer_all_onesp (arg1))
9511 arg1 = integer_zero_node, code = GE;
9512 else
9513 code = unsignedp ? GTU : GT;
9514 break;
9515 case GE_EXPR:
9516 if (integer_onep (arg1))
9517 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9518 else
9519 code = unsignedp ? GEU : GE;
9520 break;
9521
9522 case UNORDERED_EXPR:
9523 code = UNORDERED;
9524 break;
9525 case ORDERED_EXPR:
9526 code = ORDERED;
9527 break;
9528 case UNLT_EXPR:
9529 code = UNLT;
9530 break;
9531 case UNLE_EXPR:
9532 code = UNLE;
9533 break;
9534 case UNGT_EXPR:
9535 code = UNGT;
9536 break;
9537 case UNGE_EXPR:
9538 code = UNGE;
9539 break;
9540 case UNEQ_EXPR:
9541 code = UNEQ;
9542 break;
9543
9544 default:
9545 abort ();
9546 }
9547
9548 /* Put a constant second. */
9549 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9550 {
9551 tem = arg0; arg0 = arg1; arg1 = tem;
9552 code = swap_condition (code);
9553 }
9554
9555 /* If this is an equality or inequality test of a single bit, we can
9556 do this by shifting the bit being tested to the low-order bit and
9557 masking the result with the constant 1. If the condition was EQ,
9558 we xor it with 1. This does not require an scc insn and is faster
9559 than an scc insn even if we have it.
9560
9561 The code to make this transformation was moved into fold_single_bit_test,
9562 so we just call into the folder and expand its result. */
9563
9564 if ((code == NE || code == EQ)
9565 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9566 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9567 {
9568 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9569 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9570 arg0, arg1, type),
9571 target, VOIDmode, EXPAND_NORMAL);
9572 }
9573
9574 /* Now see if we are likely to be able to do this. Return if not. */
9575 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9576 return 0;
9577
9578 icode = setcc_gen_code[(int) code];
9579 if (icode == CODE_FOR_nothing
9580 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9581 {
9582 /* We can only do this if it is one of the special cases that
9583 can be handled without an scc insn. */
9584 if ((code == LT && integer_zerop (arg1))
9585 || (! only_cheap && code == GE && integer_zerop (arg1)))
9586 ;
9587 else if (BRANCH_COST >= 0
9588 && ! only_cheap && (code == NE || code == EQ)
9589 && TREE_CODE (type) != REAL_TYPE
9590 && ((abs_optab->handlers[(int) operand_mode].insn_code
9591 != CODE_FOR_nothing)
9592 || (ffs_optab->handlers[(int) operand_mode].insn_code
9593 != CODE_FOR_nothing)))
9594 ;
9595 else
9596 return 0;
9597 }
9598
9599 if (! get_subtarget (target)
9600 || GET_MODE (subtarget) != operand_mode)
9601 subtarget = 0;
9602
9603 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9604
9605 if (target == 0)
9606 target = gen_reg_rtx (mode);
9607
9608 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9609 because, if the emit_store_flag does anything it will succeed and
9610 OP0 and OP1 will not be used subsequently. */
9611
9612 result = emit_store_flag (target, code,
9613 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9614 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9615 operand_mode, unsignedp, 1);
9616
9617 if (result)
9618 {
9619 if (invert)
9620 result = expand_binop (mode, xor_optab, result, const1_rtx,
9621 result, 0, OPTAB_LIB_WIDEN);
9622 return result;
9623 }
9624
9625 /* If this failed, we have to do this with set/compare/jump/set code. */
9626 if (GET_CODE (target) != REG
9627 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9628 target = gen_reg_rtx (GET_MODE (target));
9629
9630 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9631 result = compare_from_rtx (op0, op1, code, unsignedp,
9632 operand_mode, NULL_RTX);
9633 if (GET_CODE (result) == CONST_INT)
9634 return (((result == const0_rtx && ! invert)
9635 || (result != const0_rtx && invert))
9636 ? const0_rtx : const1_rtx);
9637
9638 /* The code of RESULT may not match CODE if compare_from_rtx
9639 decided to swap its operands and reverse the original code.
9640
9641 We know that compare_from_rtx returns either a CONST_INT or
9642 a new comparison code, so it is safe to just extract the
9643 code from RESULT. */
9644 code = GET_CODE (result);
9645
9646 label = gen_label_rtx ();
9647 if (bcc_gen_fctn[(int) code] == 0)
9648 abort ();
9649
9650 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9651 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9652 emit_label (label);
9653
9654 return target;
9655 }
9656 \f
9657
9658 /* Stubs in case we haven't got a casesi insn. */
9659 #ifndef HAVE_casesi
9660 # define HAVE_casesi 0
9661 # define gen_casesi(a, b, c, d, e) (0)
9662 # define CODE_FOR_casesi CODE_FOR_nothing
9663 #endif
9664
9665 /* If the machine does not have a case insn that compares the bounds,
9666 this means extra overhead for dispatch tables, which raises the
9667 threshold for using them. */
9668 #ifndef CASE_VALUES_THRESHOLD
9669 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9670 #endif /* CASE_VALUES_THRESHOLD */
9671
9672 unsigned int
9673 case_values_threshold (void)
9674 {
9675 return CASE_VALUES_THRESHOLD;
9676 }
9677
9678 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9679 0 otherwise (i.e. if there is no casesi instruction). */
9680 int
9681 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9682 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9683 {
9684 enum machine_mode index_mode = SImode;
9685 int index_bits = GET_MODE_BITSIZE (index_mode);
9686 rtx op1, op2, index;
9687 enum machine_mode op_mode;
9688
9689 if (! HAVE_casesi)
9690 return 0;
9691
9692 /* Convert the index to SImode. */
9693 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9694 {
9695 enum machine_mode omode = TYPE_MODE (index_type);
9696 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9697
9698 /* We must handle the endpoints in the original mode. */
9699 index_expr = build (MINUS_EXPR, index_type,
9700 index_expr, minval);
9701 minval = integer_zero_node;
9702 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9703 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9704 omode, 1, default_label);
9705 /* Now we can safely truncate. */
9706 index = convert_to_mode (index_mode, index, 0);
9707 }
9708 else
9709 {
9710 if (TYPE_MODE (index_type) != index_mode)
9711 {
9712 index_expr = convert ((*lang_hooks.types.type_for_size)
9713 (index_bits, 0), index_expr);
9714 index_type = TREE_TYPE (index_expr);
9715 }
9716
9717 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9718 }
9719 emit_queue ();
9720 index = protect_from_queue (index, 0);
9721 do_pending_stack_adjust ();
9722
9723 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9724 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9725 (index, op_mode))
9726 index = copy_to_mode_reg (op_mode, index);
9727
9728 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9729
9730 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9731 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9732 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9733 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9734 (op1, op_mode))
9735 op1 = copy_to_mode_reg (op_mode, op1);
9736
9737 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9738
9739 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9740 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9741 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9742 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9743 (op2, op_mode))
9744 op2 = copy_to_mode_reg (op_mode, op2);
9745
9746 emit_jump_insn (gen_casesi (index, op1, op2,
9747 table_label, default_label));
9748 return 1;
9749 }
9750
9751 /* Attempt to generate a tablejump instruction; same concept. */
9752 #ifndef HAVE_tablejump
9753 #define HAVE_tablejump 0
9754 #define gen_tablejump(x, y) (0)
9755 #endif
9756
9757 /* Subroutine of the next function.
9758
9759 INDEX is the value being switched on, with the lowest value
9760 in the table already subtracted.
9761 MODE is its expected mode (needed if INDEX is constant).
9762 RANGE is the length of the jump table.
9763 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9764
9765 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9766 index value is out of range. */
9767
9768 static void
9769 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9770 rtx default_label)
9771 {
9772 rtx temp, vector;
9773
9774 if (INTVAL (range) > cfun->max_jumptable_ents)
9775 cfun->max_jumptable_ents = INTVAL (range);
9776
9777 /* Do an unsigned comparison (in the proper mode) between the index
9778 expression and the value which represents the length of the range.
9779 Since we just finished subtracting the lower bound of the range
9780 from the index expression, this comparison allows us to simultaneously
9781 check that the original index expression value is both greater than
9782 or equal to the minimum value of the range and less than or equal to
9783 the maximum value of the range. */
9784
9785 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9786 default_label);
9787
9788 /* If index is in range, it must fit in Pmode.
9789 Convert to Pmode so we can index with it. */
9790 if (mode != Pmode)
9791 index = convert_to_mode (Pmode, index, 1);
9792
9793 /* Don't let a MEM slip thru, because then INDEX that comes
9794 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9795 and break_out_memory_refs will go to work on it and mess it up. */
9796 #ifdef PIC_CASE_VECTOR_ADDRESS
9797 if (flag_pic && GET_CODE (index) != REG)
9798 index = copy_to_mode_reg (Pmode, index);
9799 #endif
9800
9801 /* If flag_force_addr were to affect this address
9802 it could interfere with the tricky assumptions made
9803 about addresses that contain label-refs,
9804 which may be valid only very near the tablejump itself. */
9805 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9806 GET_MODE_SIZE, because this indicates how large insns are. The other
9807 uses should all be Pmode, because they are addresses. This code
9808 could fail if addresses and insns are not the same size. */
9809 index = gen_rtx_PLUS (Pmode,
9810 gen_rtx_MULT (Pmode, index,
9811 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9812 gen_rtx_LABEL_REF (Pmode, table_label));
9813 #ifdef PIC_CASE_VECTOR_ADDRESS
9814 if (flag_pic)
9815 index = PIC_CASE_VECTOR_ADDRESS (index);
9816 else
9817 #endif
9818 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9819 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9820 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9821 RTX_UNCHANGING_P (vector) = 1;
9822 MEM_NOTRAP_P (vector) = 1;
9823 convert_move (temp, vector, 0);
9824
9825 emit_jump_insn (gen_tablejump (temp, table_label));
9826
9827 /* If we are generating PIC code or if the table is PC-relative, the
9828 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9829 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9830 emit_barrier ();
9831 }
9832
9833 int
9834 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9835 rtx table_label, rtx default_label)
9836 {
9837 rtx index;
9838
9839 if (! HAVE_tablejump)
9840 return 0;
9841
9842 index_expr = fold (build (MINUS_EXPR, index_type,
9843 convert (index_type, index_expr),
9844 convert (index_type, minval)));
9845 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9846 emit_queue ();
9847 index = protect_from_queue (index, 0);
9848 do_pending_stack_adjust ();
9849
9850 do_tablejump (index, TYPE_MODE (index_type),
9851 convert_modes (TYPE_MODE (index_type),
9852 TYPE_MODE (TREE_TYPE (range)),
9853 expand_expr (range, NULL_RTX,
9854 VOIDmode, 0),
9855 TREE_UNSIGNED (TREE_TYPE (range))),
9856 table_label, default_label);
9857 return 1;
9858 }
9859
9860 /* Nonzero if the mode is a valid vector mode for this architecture.
9861 This returns nonzero even if there is no hardware support for the
9862 vector mode, but we can emulate with narrower modes. */
9863
9864 int
9865 vector_mode_valid_p (enum machine_mode mode)
9866 {
9867 enum mode_class class = GET_MODE_CLASS (mode);
9868 enum machine_mode innermode;
9869
9870 /* Doh! What's going on? */
9871 if (class != MODE_VECTOR_INT
9872 && class != MODE_VECTOR_FLOAT)
9873 return 0;
9874
9875 /* Hardware support. Woo hoo! */
9876 if (VECTOR_MODE_SUPPORTED_P (mode))
9877 return 1;
9878
9879 innermode = GET_MODE_INNER (mode);
9880
9881 /* We should probably return 1 if requesting V4DI and we have no DI,
9882 but we have V2DI, but this is probably very unlikely. */
9883
9884 /* If we have support for the inner mode, we can safely emulate it.
9885 We may not have V2DI, but me can emulate with a pair of DIs. */
9886 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9887 }
9888
9889 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9890 static rtx
9891 const_vector_from_tree (tree exp)
9892 {
9893 rtvec v;
9894 int units, i;
9895 tree link, elt;
9896 enum machine_mode inner, mode;
9897
9898 mode = TYPE_MODE (TREE_TYPE (exp));
9899
9900 if (is_zeros_p (exp))
9901 return CONST0_RTX (mode);
9902
9903 units = GET_MODE_NUNITS (mode);
9904 inner = GET_MODE_INNER (mode);
9905
9906 v = rtvec_alloc (units);
9907
9908 link = TREE_VECTOR_CST_ELTS (exp);
9909 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9910 {
9911 elt = TREE_VALUE (link);
9912
9913 if (TREE_CODE (elt) == REAL_CST)
9914 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9915 inner);
9916 else
9917 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9918 TREE_INT_CST_HIGH (elt),
9919 inner);
9920 }
9921
9922 /* Initialize remaining elements to 0. */
9923 for (; i < units; ++i)
9924 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9925
9926 return gen_rtx_raw_CONST_VECTOR (mode, v);
9927 }
9928
9929 #include "gt-expr.h"
This page took 0.498155 seconds and 6 git commands to generate.