]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
re PR middle-end/11968 (Array reference is incorrectly computed for POINTERS_EXTEND_U...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
64
65 #endif
66
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
74
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
79
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
87
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 void *constfundata;
130 int reverse;
131 };
132
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
161
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
164
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
168 enum expand_modifier);
169 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
170 #ifdef PUSH_ROUNDING
171 static void emit_single_push_insn (enum machine_mode, rtx, tree);
172 #endif
173 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
174 static rtx const_vector_from_tree (tree);
175
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
179
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
182
183 /* Record for each mode whether we can float-extend from memory. */
184
185 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
186
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
189
190 #ifndef MOVE_RATIO
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 #define MOVE_RATIO 2
193 #else
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 #endif
197 #endif
198
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 #endif
205
206 /* If a clear memory operation would take CLEAR_RATIO or more simple
207 move-instruction sequences, we will do a clrstr or libcall instead. */
208
209 #ifndef CLEAR_RATIO
210 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
211 #define CLEAR_RATIO 2
212 #else
213 /* If we are optimizing for space, cut down the default clear ratio. */
214 #define CLEAR_RATIO (optimize_size ? 3 : 15)
215 #endif
216 #endif
217
218 /* This macro is used to determine whether clear_by_pieces should be
219 called to clear storage. */
220 #ifndef CLEAR_BY_PIECES_P
221 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
222 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
223 #endif
224
225 /* This macro is used to determine whether store_by_pieces should be
226 called to "memset" storage with byte values other than zero, or
227 to "memcpy" storage when the source is a constant string. */
228 #ifndef STORE_BY_PIECES_P
229 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
230 #endif
231
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
234
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237
238 /* These arrays record the insn_code of two different kinds of insns
239 to perform block compares. */
240 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
241 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
242
243 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
244 struct file_stack *expr_wfl_stack;
245
246 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247
248 #ifndef SLOW_UNALIGNED_ACCESS
249 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
250 #endif
251 \f
252 /* This is run once per compilation to set up which modes can be used
253 directly in memory and to initialize the block move optab. */
254
255 void
256 init_expr_once (void)
257 {
258 rtx insn, pat;
259 enum machine_mode mode;
260 int num_clobbers;
261 rtx mem, mem1;
262 rtx reg;
263
264 /* Try indexing by frame ptr and try by stack ptr.
265 It is known that on the Convex the stack ptr isn't a valid index.
266 With luck, one or the other is valid on any machine. */
267 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
268 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269
270 /* A scratch register we can modify in-place below to avoid
271 useless RTL allocations. */
272 reg = gen_rtx_REG (VOIDmode, -1);
273
274 insn = rtx_alloc (INSN);
275 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
276 PATTERN (insn) = pat;
277
278 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
279 mode = (enum machine_mode) ((int) mode + 1))
280 {
281 int regno;
282
283 direct_load[(int) mode] = direct_store[(int) mode] = 0;
284 PUT_MODE (mem, mode);
285 PUT_MODE (mem1, mode);
286 PUT_MODE (reg, mode);
287
288 /* See if there is some register that can be used in this mode and
289 directly loaded or stored from memory. */
290
291 if (mode != VOIDmode && mode != BLKmode)
292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
293 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
294 regno++)
295 {
296 if (! HARD_REGNO_MODE_OK (regno, mode))
297 continue;
298
299 REGNO (reg) = regno;
300
301 SET_SRC (pat) = mem;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
305
306 SET_SRC (pat) = mem1;
307 SET_DEST (pat) = reg;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_load[(int) mode] = 1;
310
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
315
316 SET_SRC (pat) = reg;
317 SET_DEST (pat) = mem1;
318 if (recog (pat, insn, &num_clobbers) >= 0)
319 direct_store[(int) mode] = 1;
320 }
321 }
322
323 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324
325 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
326 mode = GET_MODE_WIDER_MODE (mode))
327 {
328 enum machine_mode srcmode;
329 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
330 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 {
332 enum insn_code ic;
333
334 ic = can_extend_p (mode, srcmode, 0);
335 if (ic == CODE_FOR_nothing)
336 continue;
337
338 PUT_MODE (mem, srcmode);
339
340 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
341 float_extend_from_mem[mode][srcmode] = true;
342 }
343 }
344 }
345
346 /* This is run at the start of compiling a function. */
347
348 void
349 init_expr (void)
350 {
351 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
352 }
353
354 /* Small sanity check that the queue is empty at the end of a function. */
355
356 void
357 finish_expr_for_function (void)
358 {
359 if (pending_chain)
360 abort ();
361 }
362 \f
363 /* Manage the queue of increment instructions to be output
364 for POSTINCREMENT_EXPR expressions, etc. */
365
366 /* Queue up to increment (or change) VAR later. BODY says how:
367 BODY should be the same thing you would pass to emit_insn
368 to increment right away. It will go to emit_insn later on.
369
370 The value is a QUEUED expression to be used in place of VAR
371 where you want to guarantee the pre-incrementation value of VAR. */
372
373 static rtx
374 enqueue_insn (rtx var, rtx body)
375 {
376 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
377 body, pending_chain);
378 return pending_chain;
379 }
380
381 /* Use protect_from_queue to convert a QUEUED expression
382 into something that you can put immediately into an instruction.
383 If the queued incrementation has not happened yet,
384 protect_from_queue returns the variable itself.
385 If the incrementation has happened, protect_from_queue returns a temp
386 that contains a copy of the old value of the variable.
387
388 Any time an rtx which might possibly be a QUEUED is to be put
389 into an instruction, it must be passed through protect_from_queue first.
390 QUEUED expressions are not meaningful in instructions.
391
392 Do not pass a value through protect_from_queue and then hold
393 on to it for a while before putting it in an instruction!
394 If the queue is flushed in between, incorrect code will result. */
395
396 rtx
397 protect_from_queue (rtx x, int modify)
398 {
399 RTX_CODE code = GET_CODE (x);
400
401 #if 0 /* A QUEUED can hang around after the queue is forced out. */
402 /* Shortcut for most common case. */
403 if (pending_chain == 0)
404 return x;
405 #endif
406
407 if (code != QUEUED)
408 {
409 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
410 use of autoincrement. Make a copy of the contents of the memory
411 location rather than a copy of the address, but not if the value is
412 of mode BLKmode. Don't modify X in place since it might be
413 shared. */
414 if (code == MEM && GET_MODE (x) != BLKmode
415 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
416 {
417 rtx y = XEXP (x, 0);
418 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
419
420 if (QUEUED_INSN (y))
421 {
422 rtx temp = gen_reg_rtx (GET_MODE (x));
423
424 emit_insn_before (gen_move_insn (temp, new),
425 QUEUED_INSN (y));
426 return temp;
427 }
428
429 /* Copy the address into a pseudo, so that the returned value
430 remains correct across calls to emit_queue. */
431 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
432 }
433
434 /* Otherwise, recursively protect the subexpressions of all
435 the kinds of rtx's that can contain a QUEUED. */
436 if (code == MEM)
437 {
438 rtx tem = protect_from_queue (XEXP (x, 0), 0);
439 if (tem != XEXP (x, 0))
440 {
441 x = copy_rtx (x);
442 XEXP (x, 0) = tem;
443 }
444 }
445 else if (code == PLUS || code == MULT)
446 {
447 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
448 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
449 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
450 {
451 x = copy_rtx (x);
452 XEXP (x, 0) = new0;
453 XEXP (x, 1) = new1;
454 }
455 }
456 return x;
457 }
458 /* If the increment has not happened, use the variable itself. Copy it
459 into a new pseudo so that the value remains correct across calls to
460 emit_queue. */
461 if (QUEUED_INSN (x) == 0)
462 return copy_to_reg (QUEUED_VAR (x));
463 /* If the increment has happened and a pre-increment copy exists,
464 use that copy. */
465 if (QUEUED_COPY (x) != 0)
466 return QUEUED_COPY (x);
467 /* The increment has happened but we haven't set up a pre-increment copy.
468 Set one up now, and use it. */
469 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
470 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
471 QUEUED_INSN (x));
472 return QUEUED_COPY (x);
473 }
474
475 /* Return nonzero if X contains a QUEUED expression:
476 if it contains anything that will be altered by a queued increment.
477 We handle only combinations of MEM, PLUS, MINUS and MULT operators
478 since memory addresses generally contain only those. */
479
480 int
481 queued_subexp_p (rtx x)
482 {
483 enum rtx_code code = GET_CODE (x);
484 switch (code)
485 {
486 case QUEUED:
487 return 1;
488 case MEM:
489 return queued_subexp_p (XEXP (x, 0));
490 case MULT:
491 case PLUS:
492 case MINUS:
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
495 default:
496 return 0;
497 }
498 }
499
500 /* Perform all the pending incrementations. */
501
502 void
503 emit_queue (void)
504 {
505 rtx p;
506 while ((p = pending_chain))
507 {
508 rtx body = QUEUED_BODY (p);
509
510 switch (GET_CODE (body))
511 {
512 case INSN:
513 case JUMP_INSN:
514 case CALL_INSN:
515 case CODE_LABEL:
516 case BARRIER:
517 case NOTE:
518 QUEUED_INSN (p) = body;
519 emit_insn (body);
520 break;
521
522 #ifdef ENABLE_CHECKING
523 case SEQUENCE:
524 abort ();
525 break;
526 #endif
527
528 default:
529 QUEUED_INSN (p) = emit_insn (body);
530 break;
531 }
532
533 pending_chain = QUEUED_NEXT (p);
534 }
535 }
536 \f
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
541
542 void
543 convert_move (rtx to, rtx from, int unsignedp)
544 {
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
549 enum insn_code code;
550 rtx libcall;
551
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
554 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
555
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
558
559 if (to_real != from_real)
560 abort ();
561
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
564 TO here. */
565
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
571
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
573 abort ();
574
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
577 {
578 emit_move_insn (to, from);
579 return;
580 }
581
582 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
583 {
584 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
585 abort ();
586
587 if (VECTOR_MODE_P (to_mode))
588 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
589 else
590 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
591
592 emit_move_insn (to, from);
593 return;
594 }
595
596 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
597 {
598 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
599 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
600 return;
601 }
602
603 if (to_real)
604 {
605 rtx value, insns;
606 convert_optab tab;
607
608 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 tab = sext_optab;
610 else if (GET_MODE_BITSIZE (from_mode) > GET_MODE_BITSIZE (to_mode))
611 tab = trunc_optab;
612 else
613 abort ();
614
615 /* Try converting directly if the insn is supported. */
616
617 code = tab->handlers[to_mode][from_mode].insn_code;
618 if (code != CODE_FOR_nothing)
619 {
620 emit_unop_insn (code, to, from,
621 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
622 return;
623 }
624
625 /* Otherwise use a libcall. */
626 libcall = tab->handlers[to_mode][from_mode].libfunc;
627
628 if (!libcall)
629 /* This conversion is not implemented yet. */
630 abort ();
631
632 start_sequence ();
633 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
634 1, from, from_mode);
635 insns = get_insns ();
636 end_sequence ();
637 emit_libcall_block (insns, to, value,
638 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
639 from)
640 : gen_rtx_FLOAT_EXTEND (to_mode, from));
641 return;
642 }
643
644 /* Handle pointer conversion. */ /* SPEE 900220. */
645 /* Targets are expected to provide conversion insns between PxImode and
646 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
647 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
648 {
649 enum machine_mode full_mode
650 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
651
652 if (trunc_optab->handlers[to_mode][full_mode].insn_code
653 == CODE_FOR_nothing)
654 abort ();
655
656 if (full_mode != from_mode)
657 from = convert_to_mode (full_mode, from, unsignedp);
658 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
659 to, from, UNKNOWN);
660 return;
661 }
662 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
663 {
664 enum machine_mode full_mode
665 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
666
667 if (sext_optab->handlers[full_mode][from_mode].insn_code
668 == CODE_FOR_nothing)
669 abort ();
670
671 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
672 to, from, UNKNOWN);
673 if (to_mode == full_mode)
674 return;
675
676 /* else proceed to integer conversions below */
677 from_mode = full_mode;
678 }
679
680 /* Now both modes are integers. */
681
682 /* Handle expanding beyond a word. */
683 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
684 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
685 {
686 rtx insns;
687 rtx lowpart;
688 rtx fill_value;
689 rtx lowfrom;
690 int i;
691 enum machine_mode lowpart_mode;
692 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
693
694 /* Try converting directly if the insn is supported. */
695 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
696 != CODE_FOR_nothing)
697 {
698 /* If FROM is a SUBREG, put it into a register. Do this
699 so that we always generate the same set of insns for
700 better cse'ing; if an intermediate assignment occurred,
701 we won't be doing the operation directly on the SUBREG. */
702 if (optimize > 0 && GET_CODE (from) == SUBREG)
703 from = force_reg (from_mode, from);
704 emit_unop_insn (code, to, from, equiv_code);
705 return;
706 }
707 /* Next, try converting via full word. */
708 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
709 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
710 != CODE_FOR_nothing))
711 {
712 if (GET_CODE (to) == REG)
713 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
714 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
715 emit_unop_insn (code, to,
716 gen_lowpart (word_mode, to), equiv_code);
717 return;
718 }
719
720 /* No special multiword conversion insn; do it by hand. */
721 start_sequence ();
722
723 /* Since we will turn this into a no conflict block, we must ensure
724 that the source does not overlap the target. */
725
726 if (reg_overlap_mentioned_p (to, from))
727 from = force_reg (from_mode, from);
728
729 /* Get a copy of FROM widened to a word, if necessary. */
730 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
731 lowpart_mode = word_mode;
732 else
733 lowpart_mode = from_mode;
734
735 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
736
737 lowpart = gen_lowpart (lowpart_mode, to);
738 emit_move_insn (lowpart, lowfrom);
739
740 /* Compute the value to put in each remaining word. */
741 if (unsignedp)
742 fill_value = const0_rtx;
743 else
744 {
745 #ifdef HAVE_slt
746 if (HAVE_slt
747 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
748 && STORE_FLAG_VALUE == -1)
749 {
750 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
751 lowpart_mode, 0);
752 fill_value = gen_reg_rtx (word_mode);
753 emit_insn (gen_slt (fill_value));
754 }
755 else
756 #endif
757 {
758 fill_value
759 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
760 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
761 NULL_RTX, 0);
762 fill_value = convert_to_mode (word_mode, fill_value, 1);
763 }
764 }
765
766 /* Fill the remaining words. */
767 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
768 {
769 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
770 rtx subword = operand_subword (to, index, 1, to_mode);
771
772 if (subword == 0)
773 abort ();
774
775 if (fill_value != subword)
776 emit_move_insn (subword, fill_value);
777 }
778
779 insns = get_insns ();
780 end_sequence ();
781
782 emit_no_conflict_block (insns, to, from, NULL_RTX,
783 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
784 return;
785 }
786
787 /* Truncating multi-word to a word or less. */
788 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
789 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
790 {
791 if (!((GET_CODE (from) == MEM
792 && ! MEM_VOLATILE_P (from)
793 && direct_load[(int) to_mode]
794 && ! mode_dependent_address_p (XEXP (from, 0)))
795 || GET_CODE (from) == REG
796 || GET_CODE (from) == SUBREG))
797 from = force_reg (from_mode, from);
798 convert_move (to, gen_lowpart (word_mode, from), 0);
799 return;
800 }
801
802 /* Now follow all the conversions between integers
803 no more than a word long. */
804
805 /* For truncation, usually we can just refer to FROM in a narrower mode. */
806 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
808 GET_MODE_BITSIZE (from_mode)))
809 {
810 if (!((GET_CODE (from) == MEM
811 && ! MEM_VOLATILE_P (from)
812 && direct_load[(int) to_mode]
813 && ! mode_dependent_address_p (XEXP (from, 0)))
814 || GET_CODE (from) == REG
815 || GET_CODE (from) == SUBREG))
816 from = force_reg (from_mode, from);
817 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
818 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
819 from = copy_to_reg (from);
820 emit_move_insn (to, gen_lowpart (to_mode, from));
821 return;
822 }
823
824 /* Handle extension. */
825 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
826 {
827 /* Convert directly if that works. */
828 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
829 != CODE_FOR_nothing)
830 {
831 if (flag_force_mem)
832 from = force_not_mem (from);
833
834 emit_unop_insn (code, to, from, equiv_code);
835 return;
836 }
837 else
838 {
839 enum machine_mode intermediate;
840 rtx tmp;
841 tree shift_amount;
842
843 /* Search for a mode to convert via. */
844 for (intermediate = from_mode; intermediate != VOIDmode;
845 intermediate = GET_MODE_WIDER_MODE (intermediate))
846 if (((can_extend_p (to_mode, intermediate, unsignedp)
847 != CODE_FOR_nothing)
848 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
849 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
850 GET_MODE_BITSIZE (intermediate))))
851 && (can_extend_p (intermediate, from_mode, unsignedp)
852 != CODE_FOR_nothing))
853 {
854 convert_move (to, convert_to_mode (intermediate, from,
855 unsignedp), unsignedp);
856 return;
857 }
858
859 /* No suitable intermediate mode.
860 Generate what we need with shifts. */
861 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
862 - GET_MODE_BITSIZE (from_mode), 0);
863 from = gen_lowpart (to_mode, force_reg (from_mode, from));
864 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
865 to, unsignedp);
866 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
867 to, unsignedp);
868 if (tmp != to)
869 emit_move_insn (to, tmp);
870 return;
871 }
872 }
873
874 /* Support special truncate insns for certain modes. */
875 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
876 {
877 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
878 to, from, UNKNOWN);
879 return;
880 }
881
882 /* Handle truncation of volatile memrefs, and so on;
883 the things that couldn't be truncated directly,
884 and for which there was no special instruction.
885
886 ??? Code above formerly short-circuited this, for most integer
887 mode pairs, with a force_reg in from_mode followed by a recursive
888 call to this routine. Appears always to have been wrong. */
889 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
890 {
891 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
892 emit_move_insn (to, temp);
893 return;
894 }
895
896 /* Mode combination is not recognized. */
897 abort ();
898 }
899
900 /* Return an rtx for a value that would result
901 from converting X to mode MODE.
902 Both X and MODE may be floating, or both integer.
903 UNSIGNEDP is nonzero if X is an unsigned value.
904 This can be done by referring to a part of X in place
905 or by copying to a new temporary with conversion.
906
907 This function *must not* call protect_from_queue
908 except when putting X into an insn (in which case convert_move does it). */
909
910 rtx
911 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
912 {
913 return convert_modes (mode, VOIDmode, x, unsignedp);
914 }
915
916 /* Return an rtx for a value that would result
917 from converting X from mode OLDMODE to mode MODE.
918 Both modes may be floating, or both integer.
919 UNSIGNEDP is nonzero if X is an unsigned value.
920
921 This can be done by referring to a part of X in place
922 or by copying to a new temporary with conversion.
923
924 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
925
926 This function *must not* call protect_from_queue
927 except when putting X into an insn (in which case convert_move does it). */
928
929 rtx
930 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
931 {
932 rtx temp;
933
934 /* If FROM is a SUBREG that indicates that we have already done at least
935 the required extension, strip it. */
936
937 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
938 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
939 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
940 x = gen_lowpart (mode, x);
941
942 if (GET_MODE (x) != VOIDmode)
943 oldmode = GET_MODE (x);
944
945 if (mode == oldmode)
946 return x;
947
948 /* There is one case that we must handle specially: If we are converting
949 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
950 we are to interpret the constant as unsigned, gen_lowpart will do
951 the wrong if the constant appears negative. What we want to do is
952 make the high-order word of the constant zero, not all ones. */
953
954 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
955 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
956 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
957 {
958 HOST_WIDE_INT val = INTVAL (x);
959
960 if (oldmode != VOIDmode
961 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
962 {
963 int width = GET_MODE_BITSIZE (oldmode);
964
965 /* We need to zero extend VAL. */
966 val &= ((HOST_WIDE_INT) 1 << width) - 1;
967 }
968
969 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
970 }
971
972 /* We can do this with a gen_lowpart if both desired and current modes
973 are integer, and this is either a constant integer, a register, or a
974 non-volatile MEM. Except for the constant case where MODE is no
975 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
976
977 if ((GET_CODE (x) == CONST_INT
978 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
979 || (GET_MODE_CLASS (mode) == MODE_INT
980 && GET_MODE_CLASS (oldmode) == MODE_INT
981 && (GET_CODE (x) == CONST_DOUBLE
982 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
983 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
984 && direct_load[(int) mode])
985 || (GET_CODE (x) == REG
986 && (! HARD_REGISTER_P (x)
987 || HARD_REGNO_MODE_OK (REGNO (x), mode))
988 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
989 GET_MODE_BITSIZE (GET_MODE (x)))))))))
990 {
991 /* ?? If we don't know OLDMODE, we have to assume here that
992 X does not need sign- or zero-extension. This may not be
993 the case, but it's the best we can do. */
994 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
995 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
996 {
997 HOST_WIDE_INT val = INTVAL (x);
998 int width = GET_MODE_BITSIZE (oldmode);
999
1000 /* We must sign or zero-extend in this case. Start by
1001 zero-extending, then sign extend if we need to. */
1002 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1003 if (! unsignedp
1004 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1005 val |= (HOST_WIDE_INT) (-1) << width;
1006
1007 return gen_int_mode (val, mode);
1008 }
1009
1010 return gen_lowpart (mode, x);
1011 }
1012
1013 /* Converting from integer constant into mode is always equivalent to an
1014 subreg operation. */
1015 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1016 {
1017 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1018 abort ();
1019 return simplify_gen_subreg (mode, x, oldmode, 0);
1020 }
1021
1022 temp = gen_reg_rtx (mode);
1023 convert_move (temp, x, unsignedp);
1024 return temp;
1025 }
1026 \f
1027 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1028 store efficiently. Due to internal GCC limitations, this is
1029 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1030 for an immediate constant. */
1031
1032 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1033
1034 /* Determine whether the LEN bytes can be moved by using several move
1035 instructions. Return nonzero if a call to move_by_pieces should
1036 succeed. */
1037
1038 int
1039 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1040 unsigned int align ATTRIBUTE_UNUSED)
1041 {
1042 return MOVE_BY_PIECES_P (len, align);
1043 }
1044
1045 /* Generate several move instructions to copy LEN bytes from block FROM to
1046 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1047 and TO through protect_from_queue before calling.
1048
1049 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1050 used to push FROM to the stack.
1051
1052 ALIGN is maximum stack alignment we can assume.
1053
1054 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1055 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1056 stpcpy. */
1057
1058 rtx
1059 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1060 unsigned int align, int endp)
1061 {
1062 struct move_by_pieces data;
1063 rtx to_addr, from_addr = XEXP (from, 0);
1064 unsigned int max_size = MOVE_MAX_PIECES + 1;
1065 enum machine_mode mode = VOIDmode, tmode;
1066 enum insn_code icode;
1067
1068 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1069
1070 data.offset = 0;
1071 data.from_addr = from_addr;
1072 if (to)
1073 {
1074 to_addr = XEXP (to, 0);
1075 data.to = to;
1076 data.autinc_to
1077 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1078 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1079 data.reverse
1080 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1081 }
1082 else
1083 {
1084 to_addr = NULL_RTX;
1085 data.to = NULL_RTX;
1086 data.autinc_to = 1;
1087 #ifdef STACK_GROWS_DOWNWARD
1088 data.reverse = 1;
1089 #else
1090 data.reverse = 0;
1091 #endif
1092 }
1093 data.to_addr = to_addr;
1094 data.from = from;
1095 data.autinc_from
1096 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1097 || GET_CODE (from_addr) == POST_INC
1098 || GET_CODE (from_addr) == POST_DEC);
1099
1100 data.explicit_inc_from = 0;
1101 data.explicit_inc_to = 0;
1102 if (data.reverse) data.offset = len;
1103 data.len = len;
1104
1105 /* If copying requires more than two move insns,
1106 copy addresses to registers (to make displacements shorter)
1107 and use post-increment if available. */
1108 if (!(data.autinc_from && data.autinc_to)
1109 && move_by_pieces_ninsns (len, align) > 2)
1110 {
1111 /* Find the mode of the largest move... */
1112 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1113 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1114 if (GET_MODE_SIZE (tmode) < max_size)
1115 mode = tmode;
1116
1117 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1118 {
1119 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1120 data.autinc_from = 1;
1121 data.explicit_inc_from = -1;
1122 }
1123 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1124 {
1125 data.from_addr = copy_addr_to_reg (from_addr);
1126 data.autinc_from = 1;
1127 data.explicit_inc_from = 1;
1128 }
1129 if (!data.autinc_from && CONSTANT_P (from_addr))
1130 data.from_addr = copy_addr_to_reg (from_addr);
1131 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1132 {
1133 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1134 data.autinc_to = 1;
1135 data.explicit_inc_to = -1;
1136 }
1137 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1138 {
1139 data.to_addr = copy_addr_to_reg (to_addr);
1140 data.autinc_to = 1;
1141 data.explicit_inc_to = 1;
1142 }
1143 if (!data.autinc_to && CONSTANT_P (to_addr))
1144 data.to_addr = copy_addr_to_reg (to_addr);
1145 }
1146
1147 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1148 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1149 align = MOVE_MAX * BITS_PER_UNIT;
1150
1151 /* First move what we can in the largest integer mode, then go to
1152 successively smaller modes. */
1153
1154 while (max_size > 1)
1155 {
1156 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1157 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1158 if (GET_MODE_SIZE (tmode) < max_size)
1159 mode = tmode;
1160
1161 if (mode == VOIDmode)
1162 break;
1163
1164 icode = mov_optab->handlers[(int) mode].insn_code;
1165 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1166 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1167
1168 max_size = GET_MODE_SIZE (mode);
1169 }
1170
1171 /* The code above should have handled everything. */
1172 if (data.len > 0)
1173 abort ();
1174
1175 if (endp)
1176 {
1177 rtx to1;
1178
1179 if (data.reverse)
1180 abort ();
1181 if (data.autinc_to)
1182 {
1183 if (endp == 2)
1184 {
1185 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1186 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1187 else
1188 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1189 -1));
1190 }
1191 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1192 data.offset);
1193 }
1194 else
1195 {
1196 if (endp == 2)
1197 --data.offset;
1198 to1 = adjust_address (data.to, QImode, data.offset);
1199 }
1200 return to1;
1201 }
1202 else
1203 return data.to;
1204 }
1205
1206 /* Return number of insns required to move L bytes by pieces.
1207 ALIGN (in bits) is maximum alignment we can assume. */
1208
1209 static unsigned HOST_WIDE_INT
1210 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1211 {
1212 unsigned HOST_WIDE_INT n_insns = 0;
1213 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1214
1215 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1216 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1217 align = MOVE_MAX * BITS_PER_UNIT;
1218
1219 while (max_size > 1)
1220 {
1221 enum machine_mode mode = VOIDmode, tmode;
1222 enum insn_code icode;
1223
1224 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1225 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1226 if (GET_MODE_SIZE (tmode) < max_size)
1227 mode = tmode;
1228
1229 if (mode == VOIDmode)
1230 break;
1231
1232 icode = mov_optab->handlers[(int) mode].insn_code;
1233 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1234 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1235
1236 max_size = GET_MODE_SIZE (mode);
1237 }
1238
1239 if (l)
1240 abort ();
1241 return n_insns;
1242 }
1243
1244 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1245 with move instructions for mode MODE. GENFUN is the gen_... function
1246 to make a move insn for that mode. DATA has all the other info. */
1247
1248 static void
1249 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1250 struct move_by_pieces *data)
1251 {
1252 unsigned int size = GET_MODE_SIZE (mode);
1253 rtx to1 = NULL_RTX, from1;
1254
1255 while (data->len >= size)
1256 {
1257 if (data->reverse)
1258 data->offset -= size;
1259
1260 if (data->to)
1261 {
1262 if (data->autinc_to)
1263 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1264 data->offset);
1265 else
1266 to1 = adjust_address (data->to, mode, data->offset);
1267 }
1268
1269 if (data->autinc_from)
1270 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1271 data->offset);
1272 else
1273 from1 = adjust_address (data->from, mode, data->offset);
1274
1275 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1276 emit_insn (gen_add2_insn (data->to_addr,
1277 GEN_INT (-(HOST_WIDE_INT)size)));
1278 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1279 emit_insn (gen_add2_insn (data->from_addr,
1280 GEN_INT (-(HOST_WIDE_INT)size)));
1281
1282 if (data->to)
1283 emit_insn ((*genfun) (to1, from1));
1284 else
1285 {
1286 #ifdef PUSH_ROUNDING
1287 emit_single_push_insn (mode, from1, NULL);
1288 #else
1289 abort ();
1290 #endif
1291 }
1292
1293 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1294 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1295 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1296 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1297
1298 if (! data->reverse)
1299 data->offset += size;
1300
1301 data->len -= size;
1302 }
1303 }
1304 \f
1305 /* Emit code to move a block Y to a block X. This may be done with
1306 string-move instructions, with multiple scalar move instructions,
1307 or with a library call.
1308
1309 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1310 SIZE is an rtx that says how long they are.
1311 ALIGN is the maximum alignment we can assume they have.
1312 METHOD describes what kind of copy this is, and what mechanisms may be used.
1313
1314 Return the address of the new block, if memcpy is called and returns it,
1315 0 otherwise. */
1316
1317 rtx
1318 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1319 {
1320 bool may_use_call;
1321 rtx retval = 0;
1322 unsigned int align;
1323
1324 switch (method)
1325 {
1326 case BLOCK_OP_NORMAL:
1327 may_use_call = true;
1328 break;
1329
1330 case BLOCK_OP_CALL_PARM:
1331 may_use_call = block_move_libcall_safe_for_call_parm ();
1332
1333 /* Make inhibit_defer_pop nonzero around the library call
1334 to force it to pop the arguments right away. */
1335 NO_DEFER_POP;
1336 break;
1337
1338 case BLOCK_OP_NO_LIBCALL:
1339 may_use_call = false;
1340 break;
1341
1342 default:
1343 abort ();
1344 }
1345
1346 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1347
1348 if (GET_MODE (x) != BLKmode)
1349 abort ();
1350 if (GET_MODE (y) != BLKmode)
1351 abort ();
1352
1353 x = protect_from_queue (x, 1);
1354 y = protect_from_queue (y, 0);
1355 size = protect_from_queue (size, 0);
1356
1357 if (GET_CODE (x) != MEM)
1358 abort ();
1359 if (GET_CODE (y) != MEM)
1360 abort ();
1361 if (size == 0)
1362 abort ();
1363
1364 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1365 can be incorrect is coming from __builtin_memcpy. */
1366 if (GET_CODE (size) == CONST_INT)
1367 {
1368 if (INTVAL (size) == 0)
1369 return 0;
1370
1371 x = shallow_copy_rtx (x);
1372 y = shallow_copy_rtx (y);
1373 set_mem_size (x, size);
1374 set_mem_size (y, size);
1375 }
1376
1377 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1378 move_by_pieces (x, y, INTVAL (size), align, 0);
1379 else if (emit_block_move_via_movstr (x, y, size, align))
1380 ;
1381 else if (may_use_call)
1382 retval = emit_block_move_via_libcall (x, y, size);
1383 else
1384 emit_block_move_via_loop (x, y, size, align);
1385
1386 if (method == BLOCK_OP_CALL_PARM)
1387 OK_DEFER_POP;
1388
1389 return retval;
1390 }
1391
1392 /* A subroutine of emit_block_move. Returns true if calling the
1393 block move libcall will not clobber any parameters which may have
1394 already been placed on the stack. */
1395
1396 static bool
1397 block_move_libcall_safe_for_call_parm (void)
1398 {
1399 /* If arguments are pushed on the stack, then they're safe. */
1400 if (PUSH_ARGS)
1401 return true;
1402
1403 /* If registers go on the stack anyway, any argument is sure to clobber
1404 an outgoing argument. */
1405 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1406 {
1407 tree fn = emit_block_move_libcall_fn (false);
1408 (void) fn;
1409 if (REG_PARM_STACK_SPACE (fn) != 0)
1410 return false;
1411 }
1412 #endif
1413
1414 /* If any argument goes in memory, then it might clobber an outgoing
1415 argument. */
1416 {
1417 CUMULATIVE_ARGS args_so_far;
1418 tree fn, arg;
1419
1420 fn = emit_block_move_libcall_fn (false);
1421 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1422
1423 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1424 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1425 {
1426 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1427 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1428 if (!tmp || !REG_P (tmp))
1429 return false;
1430 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1431 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1432 NULL_TREE, 1))
1433 return false;
1434 #endif
1435 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1436 }
1437 }
1438 return true;
1439 }
1440
1441 /* A subroutine of emit_block_move. Expand a movstr pattern;
1442 return true if successful. */
1443
1444 static bool
1445 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1446 {
1447 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1448 enum machine_mode mode;
1449
1450 /* Since this is a move insn, we don't care about volatility. */
1451 volatile_ok = 1;
1452
1453 /* Try the most limited insn first, because there's no point
1454 including more than one in the machine description unless
1455 the more limited one has some advantage. */
1456
1457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1458 mode = GET_MODE_WIDER_MODE (mode))
1459 {
1460 enum insn_code code = movstr_optab[(int) mode];
1461 insn_operand_predicate_fn pred;
1462
1463 if (code != CODE_FOR_nothing
1464 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1465 here because if SIZE is less than the mode mask, as it is
1466 returned by the macro, it will definitely be less than the
1467 actual mode mask. */
1468 && ((GET_CODE (size) == CONST_INT
1469 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1470 <= (GET_MODE_MASK (mode) >> 1)))
1471 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1472 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1473 || (*pred) (x, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1475 || (*pred) (y, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1477 || (*pred) (opalign, VOIDmode)))
1478 {
1479 rtx op2;
1480 rtx last = get_last_insn ();
1481 rtx pat;
1482
1483 op2 = convert_to_mode (mode, size, 1);
1484 pred = insn_data[(int) code].operand[2].predicate;
1485 if (pred != 0 && ! (*pred) (op2, mode))
1486 op2 = copy_to_mode_reg (mode, op2);
1487
1488 /* ??? When called via emit_block_move_for_call, it'd be
1489 nice if there were some way to inform the backend, so
1490 that it doesn't fail the expansion because it thinks
1491 emitting the libcall would be more efficient. */
1492
1493 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1494 if (pat)
1495 {
1496 emit_insn (pat);
1497 volatile_ok = 0;
1498 return true;
1499 }
1500 else
1501 delete_insns_since (last);
1502 }
1503 }
1504
1505 volatile_ok = 0;
1506 return false;
1507 }
1508
1509 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1510 Return the return value from memcpy, 0 otherwise. */
1511
1512 static rtx
1513 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1514 {
1515 rtx dst_addr, src_addr;
1516 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1517 enum machine_mode size_mode;
1518 rtx retval;
1519
1520 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1521
1522 It is unsafe to save the value generated by protect_from_queue and reuse
1523 it later. Consider what happens if emit_queue is called before the
1524 return value from protect_from_queue is used.
1525
1526 Expansion of the CALL_EXPR below will call emit_queue before we are
1527 finished emitting RTL for argument setup. So if we are not careful we
1528 could get the wrong value for an argument.
1529
1530 To avoid this problem we go ahead and emit code to copy the addresses of
1531 DST and SRC and SIZE into new pseudos. We can then place those new
1532 pseudos into an RTL_EXPR and use them later, even after a call to
1533 emit_queue.
1534
1535 Note this is not strictly needed for library calls since they do not call
1536 emit_queue before loading their arguments. However, we may need to have
1537 library calls call emit_queue in the future since failing to do so could
1538 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1539 arguments in registers. */
1540
1541 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1542 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1543
1544 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1545 src_addr = convert_memory_address (ptr_mode, src_addr);
1546
1547 dst_tree = make_tree (ptr_type_node, dst_addr);
1548 src_tree = make_tree (ptr_type_node, src_addr);
1549
1550 if (TARGET_MEM_FUNCTIONS)
1551 size_mode = TYPE_MODE (sizetype);
1552 else
1553 size_mode = TYPE_MODE (unsigned_type_node);
1554
1555 size = convert_to_mode (size_mode, size, 1);
1556 size = copy_to_mode_reg (size_mode, size);
1557
1558 /* It is incorrect to use the libcall calling conventions to call
1559 memcpy in this context. This could be a user call to memcpy and
1560 the user may wish to examine the return value from memcpy. For
1561 targets where libcalls and normal calls have different conventions
1562 for returning pointers, we could end up generating incorrect code.
1563
1564 For convenience, we generate the call to bcopy this way as well. */
1565
1566 if (TARGET_MEM_FUNCTIONS)
1567 size_tree = make_tree (sizetype, size);
1568 else
1569 size_tree = make_tree (unsigned_type_node, size);
1570
1571 fn = emit_block_move_libcall_fn (true);
1572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1573 if (TARGET_MEM_FUNCTIONS)
1574 {
1575 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1576 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1577 }
1578 else
1579 {
1580 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1582 }
1583
1584 /* Now we have to build up the CALL_EXPR itself. */
1585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1586 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1587 call_expr, arg_list, NULL_TREE);
1588
1589 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1590
1591 /* If we are initializing a readonly value, show the above call clobbered
1592 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1593 the delay slot scheduler might overlook conflicts and take nasty
1594 decisions. */
1595 if (RTX_UNCHANGING_P (dst))
1596 add_function_usage_to
1597 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1598 gen_rtx_CLOBBER (VOIDmode, dst),
1599 NULL_RTX));
1600
1601 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1602 }
1603
1604 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1605 for the function we use for block copies. The first time FOR_CALL
1606 is true, we call assemble_external. */
1607
1608 static GTY(()) tree block_move_fn;
1609
1610 void
1611 init_block_move_fn (const char *asmspec)
1612 {
1613 if (!block_move_fn)
1614 {
1615 tree args, fn;
1616
1617 if (TARGET_MEM_FUNCTIONS)
1618 {
1619 fn = get_identifier ("memcpy");
1620 args = build_function_type_list (ptr_type_node, ptr_type_node,
1621 const_ptr_type_node, sizetype,
1622 NULL_TREE);
1623 }
1624 else
1625 {
1626 fn = get_identifier ("bcopy");
1627 args = build_function_type_list (void_type_node, const_ptr_type_node,
1628 ptr_type_node, unsigned_type_node,
1629 NULL_TREE);
1630 }
1631
1632 fn = build_decl (FUNCTION_DECL, fn, args);
1633 DECL_EXTERNAL (fn) = 1;
1634 TREE_PUBLIC (fn) = 1;
1635 DECL_ARTIFICIAL (fn) = 1;
1636 TREE_NOTHROW (fn) = 1;
1637
1638 block_move_fn = fn;
1639 }
1640
1641 if (asmspec)
1642 {
1643 SET_DECL_RTL (block_move_fn, NULL_RTX);
1644 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1645 }
1646 }
1647
1648 static tree
1649 emit_block_move_libcall_fn (int for_call)
1650 {
1651 static bool emitted_extern;
1652
1653 if (!block_move_fn)
1654 init_block_move_fn (NULL);
1655
1656 if (for_call && !emitted_extern)
1657 {
1658 emitted_extern = true;
1659 make_decl_rtl (block_move_fn, NULL);
1660 assemble_external (block_move_fn);
1661 }
1662
1663 return block_move_fn;
1664 }
1665
1666 /* A subroutine of emit_block_move. Copy the data via an explicit
1667 loop. This is used only when libcalls are forbidden. */
1668 /* ??? It'd be nice to copy in hunks larger than QImode. */
1669
1670 static void
1671 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1672 unsigned int align ATTRIBUTE_UNUSED)
1673 {
1674 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1675 enum machine_mode iter_mode;
1676
1677 iter_mode = GET_MODE (size);
1678 if (iter_mode == VOIDmode)
1679 iter_mode = word_mode;
1680
1681 top_label = gen_label_rtx ();
1682 cmp_label = gen_label_rtx ();
1683 iter = gen_reg_rtx (iter_mode);
1684
1685 emit_move_insn (iter, const0_rtx);
1686
1687 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1688 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1689 do_pending_stack_adjust ();
1690
1691 emit_note (NOTE_INSN_LOOP_BEG);
1692
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1695
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1701
1702 emit_move_insn (x, y);
1703
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1706 if (tmp != iter)
1707 emit_move_insn (iter, tmp);
1708
1709 emit_note (NOTE_INSN_LOOP_CONT);
1710 emit_label (cmp_label);
1711
1712 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1713 true, top_label);
1714
1715 emit_note (NOTE_INSN_LOOP_END);
1716 }
1717 \f
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1720
1721 void
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1723 {
1724 int i;
1725 #ifdef HAVE_load_multiple
1726 rtx pat;
1727 rtx last;
1728 #endif
1729
1730 if (nregs == 0)
1731 return;
1732
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1735
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1739 {
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1742 GEN_INT (nregs));
1743 if (pat)
1744 {
1745 emit_insn (pat);
1746 return;
1747 }
1748 else
1749 delete_insns_since (last);
1750 }
1751 #endif
1752
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1756 }
1757
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1760
1761 void
1762 move_block_from_reg (int regno, rtx x, int nregs)
1763 {
1764 int i;
1765
1766 if (nregs == 0)
1767 return;
1768
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1772 {
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1775 GEN_INT (nregs));
1776 if (pat)
1777 {
1778 emit_insn (pat);
1779 return;
1780 }
1781 else
1782 delete_insns_since (last);
1783 }
1784 #endif
1785
1786 for (i = 0; i < nregs; i++)
1787 {
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1789
1790 if (tem == 0)
1791 abort ();
1792
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1794 }
1795 }
1796
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1802
1803 rtx
1804 gen_group_rtx (rtx orig)
1805 {
1806 int i, length;
1807 rtx *tmps;
1808
1809 if (GET_CODE (orig) != PARALLEL)
1810 abort ();
1811
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1814
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1817
1818 if (i)
1819 tmps[0] = 0;
1820
1821 for (; i < length; i++)
1822 {
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1825
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1827 }
1828
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1830 }
1831
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1835 if not known. */
1836
1837 void
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1839 {
1840 rtx *tmps, src;
1841 int start, i;
1842
1843 if (GET_CODE (dst) != PARALLEL)
1844 abort ();
1845
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1849 start = 0;
1850 else
1851 start = 1;
1852
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1854
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1857 {
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1861 int shift = 0;
1862
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1865 {
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1868 if (
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1872 #else
1873 BYTES_BIG_ENDIAN
1874 #endif
1875 )
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1878 if (bytelen <= 0)
1879 abort ();
1880 }
1881
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1885 src = orig_src;
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1890 {
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1893 else
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1895
1896 emit_move_insn (src, orig_src);
1897 }
1898
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1905 {
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1908 }
1909 else if (GET_CODE (src) == CONCAT)
1910 {
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1913
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1916 {
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1920 to be extracted. */
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1927 }
1928 else if (bytepos == 0)
1929 {
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1933 }
1934 else
1935 abort ();
1936 }
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && GET_CODE (src) == REG)
1942 {
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1944 rtx mem;
1945
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1949 }
1950 else if (CONSTANT_P (src)
1951 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1952 tmps[i] = src;
1953 else
1954 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1955 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1956 mode, mode, ssize);
1957
1958 if (shift)
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
1961 }
1962
1963 emit_queue ();
1964
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1968 }
1969
1970 /* Emit code to move a block SRC to block DST, where SRC and DST are
1971 non-consecutive groups of registers, each represented by a PARALLEL. */
1972
1973 void
1974 emit_group_move (rtx dst, rtx src)
1975 {
1976 int i;
1977
1978 if (GET_CODE (src) != PARALLEL
1979 || GET_CODE (dst) != PARALLEL
1980 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1981 abort ();
1982
1983 /* Skip first entry if NULL. */
1984 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1985 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1986 XEXP (XVECEXP (src, 0, i), 0));
1987 }
1988
1989 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1990 where SRC is non-consecutive registers represented by a PARALLEL.
1991 SSIZE represents the total size of block ORIG_DST, or -1 if not
1992 known. */
1993
1994 void
1995 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1996 {
1997 rtx *tmps, dst;
1998 int start, i;
1999
2000 if (GET_CODE (src) != PARALLEL)
2001 abort ();
2002
2003 /* Check for a NULL entry, used to indicate that the parameter goes
2004 both on the stack and in registers. */
2005 if (XEXP (XVECEXP (src, 0, 0), 0))
2006 start = 0;
2007 else
2008 start = 1;
2009
2010 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2011
2012 /* Copy the (probable) hard regs into pseudos. */
2013 for (i = start; i < XVECLEN (src, 0); i++)
2014 {
2015 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2016 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2017 emit_move_insn (tmps[i], reg);
2018 }
2019 emit_queue ();
2020
2021 /* If we won't be storing directly into memory, protect the real destination
2022 from strange tricks we might play. */
2023 dst = orig_dst;
2024 if (GET_CODE (dst) == PARALLEL)
2025 {
2026 rtx temp;
2027
2028 /* We can get a PARALLEL dst if there is a conditional expression in
2029 a return statement. In that case, the dst and src are the same,
2030 so no action is necessary. */
2031 if (rtx_equal_p (dst, src))
2032 return;
2033
2034 /* It is unclear if we can ever reach here, but we may as well handle
2035 it. Allocate a temporary, and split this into a store/load to/from
2036 the temporary. */
2037
2038 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2039 emit_group_store (temp, src, type, ssize);
2040 emit_group_load (dst, temp, type, ssize);
2041 return;
2042 }
2043 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2044 {
2045 dst = gen_reg_rtx (GET_MODE (orig_dst));
2046 /* Make life a bit easier for combine. */
2047 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2048 }
2049
2050 /* Process the pieces. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2052 {
2053 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2054 enum machine_mode mode = GET_MODE (tmps[i]);
2055 unsigned int bytelen = GET_MODE_SIZE (mode);
2056 rtx dest = dst;
2057
2058 /* Handle trailing fragments that run over the size of the struct. */
2059 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2060 {
2061 /* store_bit_field always takes its value from the lsb.
2062 Move the fragment to the lsb if it's not already there. */
2063 if (
2064 #ifdef BLOCK_REG_PADDING
2065 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2066 == (BYTES_BIG_ENDIAN ? upward : downward)
2067 #else
2068 BYTES_BIG_ENDIAN
2069 #endif
2070 )
2071 {
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2075 }
2076 bytelen = ssize - bytepos;
2077 }
2078
2079 if (GET_CODE (dst) == CONCAT)
2080 {
2081 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2082 dest = XEXP (dst, 0);
2083 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2084 {
2085 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2086 dest = XEXP (dst, 1);
2087 }
2088 else if (bytepos == 0 && XVECLEN (src, 0))
2089 {
2090 dest = assign_stack_temp (GET_MODE (dest),
2091 GET_MODE_SIZE (GET_MODE (dest)), 0);
2092 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2093 tmps[i]);
2094 dst = dest;
2095 break;
2096 }
2097 else
2098 abort ();
2099 }
2100
2101 /* Optimize the access just a bit. */
2102 if (GET_CODE (dest) == MEM
2103 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2104 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2105 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2106 && bytelen == GET_MODE_SIZE (mode))
2107 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2108 else
2109 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2110 mode, tmps[i], ssize);
2111 }
2112
2113 emit_queue ();
2114
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (orig_dst != dst)
2117 emit_move_insn (orig_dst, dst);
2118 }
2119
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2123
2124 The primary purpose of this routine is to handle functions
2125 that return BLKmode structures in registers. Some machines
2126 (the PA for example) want to return all small structures
2127 in registers regardless of the structure's alignment. */
2128
2129 rtx
2130 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2131 {
2132 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2133 rtx src = NULL, dst = NULL;
2134 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2135 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2136
2137 if (tgtblk == 0)
2138 {
2139 tgtblk = assign_temp (build_qualified_type (type,
2140 (TYPE_QUALS (type)
2141 | TYPE_QUAL_CONST)),
2142 0, 1, 1);
2143 preserve_temp_slots (tgtblk);
2144 }
2145
2146 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2147 into a new pseudo which is a full word. */
2148
2149 if (GET_MODE (srcreg) != BLKmode
2150 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2151 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2152
2153 /* Structures whose size is not a multiple of a word are aligned
2154 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2155 machine, this means we must skip the empty high order bytes when
2156 calculating the bit offset. */
2157 if (BYTES_BIG_ENDIAN
2158 && bytes % UNITS_PER_WORD)
2159 big_endian_correction
2160 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2161
2162 /* Copy the structure BITSIZE bites at a time.
2163
2164 We could probably emit more efficient code for machines which do not use
2165 strict alignment, but it doesn't seem worth the effort at the current
2166 time. */
2167 for (bitpos = 0, xbitpos = big_endian_correction;
2168 bitpos < bytes * BITS_PER_UNIT;
2169 bitpos += bitsize, xbitpos += bitsize)
2170 {
2171 /* We need a new source operand each time xbitpos is on a
2172 word boundary and when xbitpos == big_endian_correction
2173 (the first time through). */
2174 if (xbitpos % BITS_PER_WORD == 0
2175 || xbitpos == big_endian_correction)
2176 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2177 GET_MODE (srcreg));
2178
2179 /* We need a new destination operand each time bitpos is on
2180 a word boundary. */
2181 if (bitpos % BITS_PER_WORD == 0)
2182 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2183
2184 /* Use xbitpos for the source extraction (right justified) and
2185 xbitpos for the destination store (left justified). */
2186 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2187 extract_bit_field (src, bitsize,
2188 xbitpos % BITS_PER_WORD, 1,
2189 NULL_RTX, word_mode, word_mode,
2190 BITS_PER_WORD),
2191 BITS_PER_WORD);
2192 }
2193
2194 return tgtblk;
2195 }
2196
2197 /* Add a USE expression for REG to the (possibly empty) list pointed
2198 to by CALL_FUSAGE. REG must denote a hard register. */
2199
2200 void
2201 use_reg (rtx *call_fusage, rtx reg)
2202 {
2203 if (GET_CODE (reg) != REG
2204 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2205 abort ();
2206
2207 *call_fusage
2208 = gen_rtx_EXPR_LIST (VOIDmode,
2209 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2210 }
2211
2212 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2213 starting at REGNO. All of these registers must be hard registers. */
2214
2215 void
2216 use_regs (rtx *call_fusage, int regno, int nregs)
2217 {
2218 int i;
2219
2220 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2221 abort ();
2222
2223 for (i = 0; i < nregs; i++)
2224 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2225 }
2226
2227 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2228 PARALLEL REGS. This is for calls that pass values in multiple
2229 non-contiguous locations. The Irix 6 ABI has examples of this. */
2230
2231 void
2232 use_group_regs (rtx *call_fusage, rtx regs)
2233 {
2234 int i;
2235
2236 for (i = 0; i < XVECLEN (regs, 0); i++)
2237 {
2238 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2239
2240 /* A NULL entry means the parameter goes both on the stack and in
2241 registers. This can also be a MEM for targets that pass values
2242 partially on the stack and partially in registers. */
2243 if (reg != 0 && GET_CODE (reg) == REG)
2244 use_reg (call_fusage, reg);
2245 }
2246 }
2247 \f
2248
2249 /* Determine whether the LEN bytes generated by CONSTFUN can be
2250 stored to memory using several move instructions. CONSTFUNDATA is
2251 a pointer which will be passed as argument in every CONSTFUN call.
2252 ALIGN is maximum alignment we can assume. Return nonzero if a
2253 call to store_by_pieces should succeed. */
2254
2255 int
2256 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2257 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2258 void *constfundata, unsigned int align)
2259 {
2260 unsigned HOST_WIDE_INT max_size, l;
2261 HOST_WIDE_INT offset = 0;
2262 enum machine_mode mode, tmode;
2263 enum insn_code icode;
2264 int reverse;
2265 rtx cst;
2266
2267 if (len == 0)
2268 return 1;
2269
2270 if (! STORE_BY_PIECES_P (len, align))
2271 return 0;
2272
2273 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2274 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2275 align = MOVE_MAX * BITS_PER_UNIT;
2276
2277 /* We would first store what we can in the largest integer mode, then go to
2278 successively smaller modes. */
2279
2280 for (reverse = 0;
2281 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2282 reverse++)
2283 {
2284 l = len;
2285 mode = VOIDmode;
2286 max_size = STORE_MAX_PIECES + 1;
2287 while (max_size > 1)
2288 {
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2293
2294 if (mode == VOIDmode)
2295 break;
2296
2297 icode = mov_optab->handlers[(int) mode].insn_code;
2298 if (icode != CODE_FOR_nothing
2299 && align >= GET_MODE_ALIGNMENT (mode))
2300 {
2301 unsigned int size = GET_MODE_SIZE (mode);
2302
2303 while (l >= size)
2304 {
2305 if (reverse)
2306 offset -= size;
2307
2308 cst = (*constfun) (constfundata, offset, mode);
2309 if (!LEGITIMATE_CONSTANT_P (cst))
2310 return 0;
2311
2312 if (!reverse)
2313 offset += size;
2314
2315 l -= size;
2316 }
2317 }
2318
2319 max_size = GET_MODE_SIZE (mode);
2320 }
2321
2322 /* The code above should have handled everything. */
2323 if (l != 0)
2324 abort ();
2325 }
2326
2327 return 1;
2328 }
2329
2330 /* Generate several move instructions to store LEN bytes generated by
2331 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2332 pointer which will be passed as argument in every CONSTFUN call.
2333 ALIGN is maximum alignment we can assume.
2334 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2335 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2336 stpcpy. */
2337
2338 rtx
2339 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2340 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2341 void *constfundata, unsigned int align, int endp)
2342 {
2343 struct store_by_pieces data;
2344
2345 if (len == 0)
2346 {
2347 if (endp == 2)
2348 abort ();
2349 return to;
2350 }
2351
2352 if (! STORE_BY_PIECES_P (len, align))
2353 abort ();
2354 to = protect_from_queue (to, 1);
2355 data.constfun = constfun;
2356 data.constfundata = constfundata;
2357 data.len = len;
2358 data.to = to;
2359 store_by_pieces_1 (&data, align);
2360 if (endp)
2361 {
2362 rtx to1;
2363
2364 if (data.reverse)
2365 abort ();
2366 if (data.autinc_to)
2367 {
2368 if (endp == 2)
2369 {
2370 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2371 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2372 else
2373 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2374 -1));
2375 }
2376 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2377 data.offset);
2378 }
2379 else
2380 {
2381 if (endp == 2)
2382 --data.offset;
2383 to1 = adjust_address (data.to, QImode, data.offset);
2384 }
2385 return to1;
2386 }
2387 else
2388 return data.to;
2389 }
2390
2391 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). The caller must pass TO through protect_from_queue
2393 before calling. ALIGN is maximum alignment we can assume. */
2394
2395 static void
2396 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2397 {
2398 struct store_by_pieces data;
2399
2400 if (len == 0)
2401 return;
2402
2403 data.constfun = clear_by_pieces_1;
2404 data.constfundata = NULL;
2405 data.len = len;
2406 data.to = to;
2407 store_by_pieces_1 (&data, align);
2408 }
2409
2410 /* Callback routine for clear_by_pieces.
2411 Return const0_rtx unconditionally. */
2412
2413 static rtx
2414 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2415 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2416 enum machine_mode mode ATTRIBUTE_UNUSED)
2417 {
2418 return const0_rtx;
2419 }
2420
2421 /* Subroutine of clear_by_pieces and store_by_pieces.
2422 Generate several move instructions to store LEN bytes of block TO. (A MEM
2423 rtx with BLKmode). The caller must pass TO through protect_from_queue
2424 before calling. ALIGN is maximum alignment we can assume. */
2425
2426 static void
2427 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2428 unsigned int align ATTRIBUTE_UNUSED)
2429 {
2430 rtx to_addr = XEXP (data->to, 0);
2431 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2432 enum machine_mode mode = VOIDmode, tmode;
2433 enum insn_code icode;
2434
2435 data->offset = 0;
2436 data->to_addr = to_addr;
2437 data->autinc_to
2438 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2439 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2440
2441 data->explicit_inc_to = 0;
2442 data->reverse
2443 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2444 if (data->reverse)
2445 data->offset = data->len;
2446
2447 /* If storing requires more than two move insns,
2448 copy addresses to registers (to make displacements shorter)
2449 and use post-increment if available. */
2450 if (!data->autinc_to
2451 && move_by_pieces_ninsns (data->len, align) > 2)
2452 {
2453 /* Determine the main mode we'll be using. */
2454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2456 if (GET_MODE_SIZE (tmode) < max_size)
2457 mode = tmode;
2458
2459 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2460 {
2461 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = -1;
2464 }
2465
2466 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2467 && ! data->autinc_to)
2468 {
2469 data->to_addr = copy_addr_to_reg (to_addr);
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = 1;
2472 }
2473
2474 if ( !data->autinc_to && CONSTANT_P (to_addr))
2475 data->to_addr = copy_addr_to_reg (to_addr);
2476 }
2477
2478 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2479 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2480 align = MOVE_MAX * BITS_PER_UNIT;
2481
2482 /* First store what we can in the largest integer mode, then go to
2483 successively smaller modes. */
2484
2485 while (max_size > 1)
2486 {
2487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2489 if (GET_MODE_SIZE (tmode) < max_size)
2490 mode = tmode;
2491
2492 if (mode == VOIDmode)
2493 break;
2494
2495 icode = mov_optab->handlers[(int) mode].insn_code;
2496 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2497 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2498
2499 max_size = GET_MODE_SIZE (mode);
2500 }
2501
2502 /* The code above should have handled everything. */
2503 if (data->len != 0)
2504 abort ();
2505 }
2506
2507 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2508 with move instructions for mode MODE. GENFUN is the gen_... function
2509 to make a move insn for that mode. DATA has all the other info. */
2510
2511 static void
2512 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2513 struct store_by_pieces *data)
2514 {
2515 unsigned int size = GET_MODE_SIZE (mode);
2516 rtx to1, cst;
2517
2518 while (data->len >= size)
2519 {
2520 if (data->reverse)
2521 data->offset -= size;
2522
2523 if (data->autinc_to)
2524 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2525 data->offset);
2526 else
2527 to1 = adjust_address (data->to, mode, data->offset);
2528
2529 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2530 emit_insn (gen_add2_insn (data->to_addr,
2531 GEN_INT (-(HOST_WIDE_INT) size)));
2532
2533 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2534 emit_insn ((*genfun) (to1, cst));
2535
2536 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2537 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2538
2539 if (! data->reverse)
2540 data->offset += size;
2541
2542 data->len -= size;
2543 }
2544 }
2545 \f
2546 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2547 its length in bytes. */
2548
2549 rtx
2550 clear_storage (rtx object, rtx size)
2551 {
2552 rtx retval = 0;
2553 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2554 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2555
2556 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2557 just move a zero. Otherwise, do this a piece at a time. */
2558 if (GET_MODE (object) != BLKmode
2559 && GET_CODE (size) == CONST_INT
2560 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2561 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2562 else
2563 {
2564 object = protect_from_queue (object, 1);
2565 size = protect_from_queue (size, 0);
2566
2567 if (size == const0_rtx)
2568 ;
2569 else if (GET_CODE (size) == CONST_INT
2570 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2572 else if (clear_storage_via_clrstr (object, size, align))
2573 ;
2574 else
2575 retval = clear_storage_via_libcall (object, size);
2576 }
2577
2578 return retval;
2579 }
2580
2581 /* A subroutine of clear_storage. Expand a clrstr pattern;
2582 return true if successful. */
2583
2584 static bool
2585 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2586 {
2587 /* Try the most limited insn first, because there's no point
2588 including more than one in the machine description unless
2589 the more limited one has some advantage. */
2590
2591 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2592 enum machine_mode mode;
2593
2594 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2595 mode = GET_MODE_WIDER_MODE (mode))
2596 {
2597 enum insn_code code = clrstr_optab[(int) mode];
2598 insn_operand_predicate_fn pred;
2599
2600 if (code != CODE_FOR_nothing
2601 /* We don't need MODE to be narrower than
2602 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2603 the mode mask, as it is returned by the macro, it will
2604 definitely be less than the actual mode mask. */
2605 && ((GET_CODE (size) == CONST_INT
2606 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2607 <= (GET_MODE_MASK (mode) >> 1)))
2608 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2609 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2610 || (*pred) (object, BLKmode))
2611 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2612 || (*pred) (opalign, VOIDmode)))
2613 {
2614 rtx op1;
2615 rtx last = get_last_insn ();
2616 rtx pat;
2617
2618 op1 = convert_to_mode (mode, size, 1);
2619 pred = insn_data[(int) code].operand[1].predicate;
2620 if (pred != 0 && ! (*pred) (op1, mode))
2621 op1 = copy_to_mode_reg (mode, op1);
2622
2623 pat = GEN_FCN ((int) code) (object, op1, opalign);
2624 if (pat)
2625 {
2626 emit_insn (pat);
2627 return true;
2628 }
2629 else
2630 delete_insns_since (last);
2631 }
2632 }
2633
2634 return false;
2635 }
2636
2637 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2638 Return the return value of memset, 0 otherwise. */
2639
2640 static rtx
2641 clear_storage_via_libcall (rtx object, rtx size)
2642 {
2643 tree call_expr, arg_list, fn, object_tree, size_tree;
2644 enum machine_mode size_mode;
2645 rtx retval;
2646
2647 /* OBJECT or SIZE may have been passed through protect_from_queue.
2648
2649 It is unsafe to save the value generated by protect_from_queue
2650 and reuse it later. Consider what happens if emit_queue is
2651 called before the return value from protect_from_queue is used.
2652
2653 Expansion of the CALL_EXPR below will call emit_queue before
2654 we are finished emitting RTL for argument setup. So if we are
2655 not careful we could get the wrong value for an argument.
2656
2657 To avoid this problem we go ahead and emit code to copy OBJECT
2658 and SIZE into new pseudos. We can then place those new pseudos
2659 into an RTL_EXPR and use them later, even after a call to
2660 emit_queue.
2661
2662 Note this is not strictly needed for library calls since they
2663 do not call emit_queue before loading their arguments. However,
2664 we may need to have library calls call emit_queue in the future
2665 since failing to do so could cause problems for targets which
2666 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2667
2668 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2669
2670 if (TARGET_MEM_FUNCTIONS)
2671 size_mode = TYPE_MODE (sizetype);
2672 else
2673 size_mode = TYPE_MODE (unsigned_type_node);
2674 size = convert_to_mode (size_mode, size, 1);
2675 size = copy_to_mode_reg (size_mode, size);
2676
2677 /* It is incorrect to use the libcall calling conventions to call
2678 memset in this context. This could be a user call to memset and
2679 the user may wish to examine the return value from memset. For
2680 targets where libcalls and normal calls have different conventions
2681 for returning pointers, we could end up generating incorrect code.
2682
2683 For convenience, we generate the call to bzero this way as well. */
2684
2685 object_tree = make_tree (ptr_type_node, object);
2686 if (TARGET_MEM_FUNCTIONS)
2687 size_tree = make_tree (sizetype, size);
2688 else
2689 size_tree = make_tree (unsigned_type_node, size);
2690
2691 fn = clear_storage_libcall_fn (true);
2692 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2693 if (TARGET_MEM_FUNCTIONS)
2694 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2695 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2696
2697 /* Now we have to build up the CALL_EXPR itself. */
2698 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2699 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2700 call_expr, arg_list, NULL_TREE);
2701
2702 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2703
2704 /* If we are initializing a readonly value, show the above call
2705 clobbered it. Otherwise, a load from it may erroneously be
2706 hoisted from a loop. */
2707 if (RTX_UNCHANGING_P (object))
2708 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2709
2710 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2711 }
2712
2713 /* A subroutine of clear_storage_via_libcall. Create the tree node
2714 for the function we use for block clears. The first time FOR_CALL
2715 is true, we call assemble_external. */
2716
2717 static GTY(()) tree block_clear_fn;
2718
2719 void
2720 init_block_clear_fn (const char *asmspec)
2721 {
2722 if (!block_clear_fn)
2723 {
2724 tree fn, args;
2725
2726 if (TARGET_MEM_FUNCTIONS)
2727 {
2728 fn = get_identifier ("memset");
2729 args = build_function_type_list (ptr_type_node, ptr_type_node,
2730 integer_type_node, sizetype,
2731 NULL_TREE);
2732 }
2733 else
2734 {
2735 fn = get_identifier ("bzero");
2736 args = build_function_type_list (void_type_node, ptr_type_node,
2737 unsigned_type_node, NULL_TREE);
2738 }
2739
2740 fn = build_decl (FUNCTION_DECL, fn, args);
2741 DECL_EXTERNAL (fn) = 1;
2742 TREE_PUBLIC (fn) = 1;
2743 DECL_ARTIFICIAL (fn) = 1;
2744 TREE_NOTHROW (fn) = 1;
2745
2746 block_clear_fn = fn;
2747 }
2748
2749 if (asmspec)
2750 {
2751 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2752 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2753 }
2754 }
2755
2756 static tree
2757 clear_storage_libcall_fn (int for_call)
2758 {
2759 static bool emitted_extern;
2760
2761 if (!block_clear_fn)
2762 init_block_clear_fn (NULL);
2763
2764 if (for_call && !emitted_extern)
2765 {
2766 emitted_extern = true;
2767 make_decl_rtl (block_clear_fn, NULL);
2768 assemble_external (block_clear_fn);
2769 }
2770
2771 return block_clear_fn;
2772 }
2773 \f
2774 /* Generate code to copy Y into X.
2775 Both Y and X must have the same mode, except that
2776 Y can be a constant with VOIDmode.
2777 This mode cannot be BLKmode; use emit_block_move for that.
2778
2779 Return the last instruction emitted. */
2780
2781 rtx
2782 emit_move_insn (rtx x, rtx y)
2783 {
2784 enum machine_mode mode = GET_MODE (x);
2785 rtx y_cst = NULL_RTX;
2786 rtx last_insn, set;
2787
2788 x = protect_from_queue (x, 1);
2789 y = protect_from_queue (y, 0);
2790
2791 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2792 abort ();
2793
2794 /* Never force constant_p_rtx to memory. */
2795 if (GET_CODE (y) == CONSTANT_P_RTX)
2796 ;
2797 else if (CONSTANT_P (y))
2798 {
2799 if (optimize
2800 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2801 && (last_insn = compress_float_constant (x, y)))
2802 return last_insn;
2803
2804 y_cst = y;
2805
2806 if (!LEGITIMATE_CONSTANT_P (y))
2807 {
2808 y = force_const_mem (mode, y);
2809
2810 /* If the target's cannot_force_const_mem prevented the spill,
2811 assume that the target's move expanders will also take care
2812 of the non-legitimate constant. */
2813 if (!y)
2814 y = y_cst;
2815 }
2816 }
2817
2818 /* If X or Y are memory references, verify that their addresses are valid
2819 for the machine. */
2820 if (GET_CODE (x) == MEM
2821 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2822 && ! push_operand (x, GET_MODE (x)))
2823 || (flag_force_addr
2824 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2825 x = validize_mem (x);
2826
2827 if (GET_CODE (y) == MEM
2828 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2829 || (flag_force_addr
2830 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2831 y = validize_mem (y);
2832
2833 if (mode == BLKmode)
2834 abort ();
2835
2836 last_insn = emit_move_insn_1 (x, y);
2837
2838 if (y_cst && GET_CODE (x) == REG
2839 && (set = single_set (last_insn)) != NULL_RTX
2840 && SET_DEST (set) == x
2841 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2842 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2843
2844 return last_insn;
2845 }
2846
2847 /* Low level part of emit_move_insn.
2848 Called just like emit_move_insn, but assumes X and Y
2849 are basically valid. */
2850
2851 rtx
2852 emit_move_insn_1 (rtx x, rtx y)
2853 {
2854 enum machine_mode mode = GET_MODE (x);
2855 enum machine_mode submode;
2856 enum mode_class class = GET_MODE_CLASS (mode);
2857
2858 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2859 abort ();
2860
2861 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2862 return
2863 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2864
2865 /* Expand complex moves by moving real part and imag part, if possible. */
2866 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2867 && BLKmode != (submode = GET_MODE_INNER (mode))
2868 && (mov_optab->handlers[(int) submode].insn_code
2869 != CODE_FOR_nothing))
2870 {
2871 /* Don't split destination if it is a stack push. */
2872 int stack = push_operand (x, GET_MODE (x));
2873
2874 #ifdef PUSH_ROUNDING
2875 /* In case we output to the stack, but the size is smaller than the
2876 machine can push exactly, we need to use move instructions. */
2877 if (stack
2878 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2879 != GET_MODE_SIZE (submode)))
2880 {
2881 rtx temp;
2882 HOST_WIDE_INT offset1, offset2;
2883
2884 /* Do not use anti_adjust_stack, since we don't want to update
2885 stack_pointer_delta. */
2886 temp = expand_binop (Pmode,
2887 #ifdef STACK_GROWS_DOWNWARD
2888 sub_optab,
2889 #else
2890 add_optab,
2891 #endif
2892 stack_pointer_rtx,
2893 GEN_INT
2894 (PUSH_ROUNDING
2895 (GET_MODE_SIZE (GET_MODE (x)))),
2896 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2897
2898 if (temp != stack_pointer_rtx)
2899 emit_move_insn (stack_pointer_rtx, temp);
2900
2901 #ifdef STACK_GROWS_DOWNWARD
2902 offset1 = 0;
2903 offset2 = GET_MODE_SIZE (submode);
2904 #else
2905 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2906 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2907 + GET_MODE_SIZE (submode));
2908 #endif
2909
2910 emit_move_insn (change_address (x, submode,
2911 gen_rtx_PLUS (Pmode,
2912 stack_pointer_rtx,
2913 GEN_INT (offset1))),
2914 gen_realpart (submode, y));
2915 emit_move_insn (change_address (x, submode,
2916 gen_rtx_PLUS (Pmode,
2917 stack_pointer_rtx,
2918 GEN_INT (offset2))),
2919 gen_imagpart (submode, y));
2920 }
2921 else
2922 #endif
2923 /* If this is a stack, push the highpart first, so it
2924 will be in the argument order.
2925
2926 In that case, change_address is used only to convert
2927 the mode, not to change the address. */
2928 if (stack)
2929 {
2930 /* Note that the real part always precedes the imag part in memory
2931 regardless of machine's endianness. */
2932 #ifdef STACK_GROWS_DOWNWARD
2933 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2934 gen_imagpart (submode, y));
2935 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2936 gen_realpart (submode, y));
2937 #else
2938 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2939 gen_realpart (submode, y));
2940 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2941 gen_imagpart (submode, y));
2942 #endif
2943 }
2944 else
2945 {
2946 rtx realpart_x, realpart_y;
2947 rtx imagpart_x, imagpart_y;
2948
2949 /* If this is a complex value with each part being smaller than a
2950 word, the usual calling sequence will likely pack the pieces into
2951 a single register. Unfortunately, SUBREG of hard registers only
2952 deals in terms of words, so we have a problem converting input
2953 arguments to the CONCAT of two registers that is used elsewhere
2954 for complex values. If this is before reload, we can copy it into
2955 memory and reload. FIXME, we should see about using extract and
2956 insert on integer registers, but complex short and complex char
2957 variables should be rarely used. */
2958 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2959 && (reload_in_progress | reload_completed) == 0)
2960 {
2961 int packed_dest_p
2962 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2963 int packed_src_p
2964 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2965
2966 if (packed_dest_p || packed_src_p)
2967 {
2968 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2969 ? MODE_FLOAT : MODE_INT);
2970
2971 enum machine_mode reg_mode
2972 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2973
2974 if (reg_mode != BLKmode)
2975 {
2976 rtx mem = assign_stack_temp (reg_mode,
2977 GET_MODE_SIZE (mode), 0);
2978 rtx cmem = adjust_address (mem, mode, 0);
2979
2980 cfun->cannot_inline
2981 = N_("function using short complex types cannot be inline");
2982
2983 if (packed_dest_p)
2984 {
2985 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2986
2987 emit_move_insn_1 (cmem, y);
2988 return emit_move_insn_1 (sreg, mem);
2989 }
2990 else
2991 {
2992 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2993
2994 emit_move_insn_1 (mem, sreg);
2995 return emit_move_insn_1 (x, cmem);
2996 }
2997 }
2998 }
2999 }
3000
3001 realpart_x = gen_realpart (submode, x);
3002 realpart_y = gen_realpart (submode, y);
3003 imagpart_x = gen_imagpart (submode, x);
3004 imagpart_y = gen_imagpart (submode, y);
3005
3006 /* Show the output dies here. This is necessary for SUBREGs
3007 of pseudos since we cannot track their lifetimes correctly;
3008 hard regs shouldn't appear here except as return values.
3009 We never want to emit such a clobber after reload. */
3010 if (x != y
3011 && ! (reload_in_progress || reload_completed)
3012 && (GET_CODE (realpart_x) == SUBREG
3013 || GET_CODE (imagpart_x) == SUBREG))
3014 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3015
3016 emit_move_insn (realpart_x, realpart_y);
3017 emit_move_insn (imagpart_x, imagpart_y);
3018 }
3019
3020 return get_last_insn ();
3021 }
3022
3023 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3024 find a mode to do it in. If we have a movcc, use it. Otherwise,
3025 find the MODE_INT mode of the same width. */
3026 else if (GET_MODE_CLASS (mode) == MODE_CC
3027 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3028 {
3029 enum insn_code insn_code;
3030 enum machine_mode tmode = VOIDmode;
3031 rtx x1 = x, y1 = y;
3032
3033 if (mode != CCmode
3034 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3035 tmode = CCmode;
3036 else
3037 for (tmode = QImode; tmode != VOIDmode;
3038 tmode = GET_MODE_WIDER_MODE (tmode))
3039 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3040 break;
3041
3042 if (tmode == VOIDmode)
3043 abort ();
3044
3045 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3046 may call change_address which is not appropriate if we were
3047 called when a reload was in progress. We don't have to worry
3048 about changing the address since the size in bytes is supposed to
3049 be the same. Copy the MEM to change the mode and move any
3050 substitutions from the old MEM to the new one. */
3051
3052 if (reload_in_progress)
3053 {
3054 x = gen_lowpart_common (tmode, x1);
3055 if (x == 0 && GET_CODE (x1) == MEM)
3056 {
3057 x = adjust_address_nv (x1, tmode, 0);
3058 copy_replacements (x1, x);
3059 }
3060
3061 y = gen_lowpart_common (tmode, y1);
3062 if (y == 0 && GET_CODE (y1) == MEM)
3063 {
3064 y = adjust_address_nv (y1, tmode, 0);
3065 copy_replacements (y1, y);
3066 }
3067 }
3068 else
3069 {
3070 x = gen_lowpart (tmode, x);
3071 y = gen_lowpart (tmode, y);
3072 }
3073
3074 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3075 return emit_insn (GEN_FCN (insn_code) (x, y));
3076 }
3077
3078 /* Try using a move pattern for the corresponding integer mode. This is
3079 only safe when simplify_subreg can convert MODE constants into integer
3080 constants. At present, it can only do this reliably if the value
3081 fits within a HOST_WIDE_INT. */
3082 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3083 && (submode = int_mode_for_mode (mode)) != BLKmode
3084 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3085 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3086 (simplify_gen_subreg (submode, x, mode, 0),
3087 simplify_gen_subreg (submode, y, mode, 0)));
3088
3089 /* This will handle any multi-word or full-word mode that lacks a move_insn
3090 pattern. However, you will get better code if you define such patterns,
3091 even if they must turn into multiple assembler instructions. */
3092 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3093 {
3094 rtx last_insn = 0;
3095 rtx seq, inner;
3096 int need_clobber;
3097 int i;
3098
3099 #ifdef PUSH_ROUNDING
3100
3101 /* If X is a push on the stack, do the push now and replace
3102 X with a reference to the stack pointer. */
3103 if (push_operand (x, GET_MODE (x)))
3104 {
3105 rtx temp;
3106 enum rtx_code code;
3107
3108 /* Do not use anti_adjust_stack, since we don't want to update
3109 stack_pointer_delta. */
3110 temp = expand_binop (Pmode,
3111 #ifdef STACK_GROWS_DOWNWARD
3112 sub_optab,
3113 #else
3114 add_optab,
3115 #endif
3116 stack_pointer_rtx,
3117 GEN_INT
3118 (PUSH_ROUNDING
3119 (GET_MODE_SIZE (GET_MODE (x)))),
3120 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3121
3122 if (temp != stack_pointer_rtx)
3123 emit_move_insn (stack_pointer_rtx, temp);
3124
3125 code = GET_CODE (XEXP (x, 0));
3126
3127 /* Just hope that small offsets off SP are OK. */
3128 if (code == POST_INC)
3129 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3130 GEN_INT (-((HOST_WIDE_INT)
3131 GET_MODE_SIZE (GET_MODE (x)))));
3132 else if (code == POST_DEC)
3133 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3134 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3135 else
3136 temp = stack_pointer_rtx;
3137
3138 x = change_address (x, VOIDmode, temp);
3139 }
3140 #endif
3141
3142 /* If we are in reload, see if either operand is a MEM whose address
3143 is scheduled for replacement. */
3144 if (reload_in_progress && GET_CODE (x) == MEM
3145 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3146 x = replace_equiv_address_nv (x, inner);
3147 if (reload_in_progress && GET_CODE (y) == MEM
3148 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3149 y = replace_equiv_address_nv (y, inner);
3150
3151 start_sequence ();
3152
3153 need_clobber = 0;
3154 for (i = 0;
3155 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3156 i++)
3157 {
3158 rtx xpart = operand_subword (x, i, 1, mode);
3159 rtx ypart = operand_subword (y, i, 1, mode);
3160
3161 /* If we can't get a part of Y, put Y into memory if it is a
3162 constant. Otherwise, force it into a register. If we still
3163 can't get a part of Y, abort. */
3164 if (ypart == 0 && CONSTANT_P (y))
3165 {
3166 y = force_const_mem (mode, y);
3167 ypart = operand_subword (y, i, 1, mode);
3168 }
3169 else if (ypart == 0)
3170 ypart = operand_subword_force (y, i, mode);
3171
3172 if (xpart == 0 || ypart == 0)
3173 abort ();
3174
3175 need_clobber |= (GET_CODE (xpart) == SUBREG);
3176
3177 last_insn = emit_move_insn (xpart, ypart);
3178 }
3179
3180 seq = get_insns ();
3181 end_sequence ();
3182
3183 /* Show the output dies here. This is necessary for SUBREGs
3184 of pseudos since we cannot track their lifetimes correctly;
3185 hard regs shouldn't appear here except as return values.
3186 We never want to emit such a clobber after reload. */
3187 if (x != y
3188 && ! (reload_in_progress || reload_completed)
3189 && need_clobber != 0)
3190 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3191
3192 emit_insn (seq);
3193
3194 return last_insn;
3195 }
3196 else
3197 abort ();
3198 }
3199
3200 /* If Y is representable exactly in a narrower mode, and the target can
3201 perform the extension directly from constant or memory, then emit the
3202 move as an extension. */
3203
3204 static rtx
3205 compress_float_constant (rtx x, rtx y)
3206 {
3207 enum machine_mode dstmode = GET_MODE (x);
3208 enum machine_mode orig_srcmode = GET_MODE (y);
3209 enum machine_mode srcmode;
3210 REAL_VALUE_TYPE r;
3211
3212 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3213
3214 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3215 srcmode != orig_srcmode;
3216 srcmode = GET_MODE_WIDER_MODE (srcmode))
3217 {
3218 enum insn_code ic;
3219 rtx trunc_y, last_insn;
3220
3221 /* Skip if the target can't extend this way. */
3222 ic = can_extend_p (dstmode, srcmode, 0);
3223 if (ic == CODE_FOR_nothing)
3224 continue;
3225
3226 /* Skip if the narrowed value isn't exact. */
3227 if (! exact_real_truncate (srcmode, &r))
3228 continue;
3229
3230 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3231
3232 if (LEGITIMATE_CONSTANT_P (trunc_y))
3233 {
3234 /* Skip if the target needs extra instructions to perform
3235 the extension. */
3236 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3237 continue;
3238 }
3239 else if (float_extend_from_mem[dstmode][srcmode])
3240 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3241 else
3242 continue;
3243
3244 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3245 last_insn = get_last_insn ();
3246
3247 if (GET_CODE (x) == REG)
3248 set_unique_reg_note (last_insn, REG_EQUAL, y);
3249
3250 return last_insn;
3251 }
3252
3253 return NULL_RTX;
3254 }
3255 \f
3256 /* Pushing data onto the stack. */
3257
3258 /* Push a block of length SIZE (perhaps variable)
3259 and return an rtx to address the beginning of the block.
3260 Note that it is not possible for the value returned to be a QUEUED.
3261 The value may be virtual_outgoing_args_rtx.
3262
3263 EXTRA is the number of bytes of padding to push in addition to SIZE.
3264 BELOW nonzero means this padding comes at low addresses;
3265 otherwise, the padding comes at high addresses. */
3266
3267 rtx
3268 push_block (rtx size, int extra, int below)
3269 {
3270 rtx temp;
3271
3272 size = convert_modes (Pmode, ptr_mode, size, 1);
3273 if (CONSTANT_P (size))
3274 anti_adjust_stack (plus_constant (size, extra));
3275 else if (GET_CODE (size) == REG && extra == 0)
3276 anti_adjust_stack (size);
3277 else
3278 {
3279 temp = copy_to_mode_reg (Pmode, size);
3280 if (extra != 0)
3281 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3282 temp, 0, OPTAB_LIB_WIDEN);
3283 anti_adjust_stack (temp);
3284 }
3285
3286 #ifndef STACK_GROWS_DOWNWARD
3287 if (0)
3288 #else
3289 if (1)
3290 #endif
3291 {
3292 temp = virtual_outgoing_args_rtx;
3293 if (extra != 0 && below)
3294 temp = plus_constant (temp, extra);
3295 }
3296 else
3297 {
3298 if (GET_CODE (size) == CONST_INT)
3299 temp = plus_constant (virtual_outgoing_args_rtx,
3300 -INTVAL (size) - (below ? 0 : extra));
3301 else if (extra != 0 && !below)
3302 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3303 negate_rtx (Pmode, plus_constant (size, extra)));
3304 else
3305 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3306 negate_rtx (Pmode, size));
3307 }
3308
3309 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3310 }
3311
3312 #ifdef PUSH_ROUNDING
3313
3314 /* Emit single push insn. */
3315
3316 static void
3317 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3318 {
3319 rtx dest_addr;
3320 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3321 rtx dest;
3322 enum insn_code icode;
3323 insn_operand_predicate_fn pred;
3324
3325 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3326 /* If there is push pattern, use it. Otherwise try old way of throwing
3327 MEM representing push operation to move expander. */
3328 icode = push_optab->handlers[(int) mode].insn_code;
3329 if (icode != CODE_FOR_nothing)
3330 {
3331 if (((pred = insn_data[(int) icode].operand[0].predicate)
3332 && !((*pred) (x, mode))))
3333 x = force_reg (mode, x);
3334 emit_insn (GEN_FCN (icode) (x));
3335 return;
3336 }
3337 if (GET_MODE_SIZE (mode) == rounded_size)
3338 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3339 /* If we are to pad downward, adjust the stack pointer first and
3340 then store X into the stack location using an offset. This is
3341 because emit_move_insn does not know how to pad; it does not have
3342 access to type. */
3343 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3344 {
3345 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3346 HOST_WIDE_INT offset;
3347
3348 emit_move_insn (stack_pointer_rtx,
3349 expand_binop (Pmode,
3350 #ifdef STACK_GROWS_DOWNWARD
3351 sub_optab,
3352 #else
3353 add_optab,
3354 #endif
3355 stack_pointer_rtx,
3356 GEN_INT (rounded_size),
3357 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3358
3359 offset = (HOST_WIDE_INT) padding_size;
3360 #ifdef STACK_GROWS_DOWNWARD
3361 if (STACK_PUSH_CODE == POST_DEC)
3362 /* We have already decremented the stack pointer, so get the
3363 previous value. */
3364 offset += (HOST_WIDE_INT) rounded_size;
3365 #else
3366 if (STACK_PUSH_CODE == POST_INC)
3367 /* We have already incremented the stack pointer, so get the
3368 previous value. */
3369 offset -= (HOST_WIDE_INT) rounded_size;
3370 #endif
3371 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3372 }
3373 else
3374 {
3375 #ifdef STACK_GROWS_DOWNWARD
3376 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3377 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3378 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3379 #else
3380 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3382 GEN_INT (rounded_size));
3383 #endif
3384 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3385 }
3386
3387 dest = gen_rtx_MEM (mode, dest_addr);
3388
3389 if (type != 0)
3390 {
3391 set_mem_attributes (dest, type, 1);
3392
3393 if (flag_optimize_sibling_calls)
3394 /* Function incoming arguments may overlap with sibling call
3395 outgoing arguments and we cannot allow reordering of reads
3396 from function arguments with stores to outgoing arguments
3397 of sibling calls. */
3398 set_mem_alias_set (dest, 0);
3399 }
3400 emit_move_insn (dest, x);
3401 }
3402 #endif
3403
3404 /* Generate code to push X onto the stack, assuming it has mode MODE and
3405 type TYPE.
3406 MODE is redundant except when X is a CONST_INT (since they don't
3407 carry mode info).
3408 SIZE is an rtx for the size of data to be copied (in bytes),
3409 needed only if X is BLKmode.
3410
3411 ALIGN (in bits) is maximum alignment we can assume.
3412
3413 If PARTIAL and REG are both nonzero, then copy that many of the first
3414 words of X into registers starting with REG, and push the rest of X.
3415 The amount of space pushed is decreased by PARTIAL words,
3416 rounded *down* to a multiple of PARM_BOUNDARY.
3417 REG must be a hard register in this case.
3418 If REG is zero but PARTIAL is not, take any all others actions for an
3419 argument partially in registers, but do not actually load any
3420 registers.
3421
3422 EXTRA is the amount in bytes of extra space to leave next to this arg.
3423 This is ignored if an argument block has already been allocated.
3424
3425 On a machine that lacks real push insns, ARGS_ADDR is the address of
3426 the bottom of the argument block for this call. We use indexing off there
3427 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3428 argument block has not been preallocated.
3429
3430 ARGS_SO_FAR is the size of args previously pushed for this call.
3431
3432 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3433 for arguments passed in registers. If nonzero, it will be the number
3434 of bytes required. */
3435
3436 void
3437 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3438 unsigned int align, int partial, rtx reg, int extra,
3439 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3440 rtx alignment_pad)
3441 {
3442 rtx xinner;
3443 enum direction stack_direction
3444 #ifdef STACK_GROWS_DOWNWARD
3445 = downward;
3446 #else
3447 = upward;
3448 #endif
3449
3450 /* Decide where to pad the argument: `downward' for below,
3451 `upward' for above, or `none' for don't pad it.
3452 Default is below for small data on big-endian machines; else above. */
3453 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3454
3455 /* Invert direction if stack is post-decrement.
3456 FIXME: why? */
3457 if (STACK_PUSH_CODE == POST_DEC)
3458 if (where_pad != none)
3459 where_pad = (where_pad == downward ? upward : downward);
3460
3461 xinner = x = protect_from_queue (x, 0);
3462
3463 if (mode == BLKmode)
3464 {
3465 /* Copy a block into the stack, entirely or partially. */
3466
3467 rtx temp;
3468 int used = partial * UNITS_PER_WORD;
3469 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3470 int skip;
3471
3472 if (size == 0)
3473 abort ();
3474
3475 used -= offset;
3476
3477 /* USED is now the # of bytes we need not copy to the stack
3478 because registers will take care of them. */
3479
3480 if (partial != 0)
3481 xinner = adjust_address (xinner, BLKmode, used);
3482
3483 /* If the partial register-part of the arg counts in its stack size,
3484 skip the part of stack space corresponding to the registers.
3485 Otherwise, start copying to the beginning of the stack space,
3486 by setting SKIP to 0. */
3487 skip = (reg_parm_stack_space == 0) ? 0 : used;
3488
3489 #ifdef PUSH_ROUNDING
3490 /* Do it with several push insns if that doesn't take lots of insns
3491 and if there is no difficulty with push insns that skip bytes
3492 on the stack for alignment purposes. */
3493 if (args_addr == 0
3494 && PUSH_ARGS
3495 && GET_CODE (size) == CONST_INT
3496 && skip == 0
3497 && MEM_ALIGN (xinner) >= align
3498 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3499 /* Here we avoid the case of a structure whose weak alignment
3500 forces many pushes of a small amount of data,
3501 and such small pushes do rounding that causes trouble. */
3502 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3503 || align >= BIGGEST_ALIGNMENT
3504 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3505 == (align / BITS_PER_UNIT)))
3506 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3507 {
3508 /* Push padding now if padding above and stack grows down,
3509 or if padding below and stack grows up.
3510 But if space already allocated, this has already been done. */
3511 if (extra && args_addr == 0
3512 && where_pad != none && where_pad != stack_direction)
3513 anti_adjust_stack (GEN_INT (extra));
3514
3515 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3516 }
3517 else
3518 #endif /* PUSH_ROUNDING */
3519 {
3520 rtx target;
3521
3522 /* Otherwise make space on the stack and copy the data
3523 to the address of that space. */
3524
3525 /* Deduct words put into registers from the size we must copy. */
3526 if (partial != 0)
3527 {
3528 if (GET_CODE (size) == CONST_INT)
3529 size = GEN_INT (INTVAL (size) - used);
3530 else
3531 size = expand_binop (GET_MODE (size), sub_optab, size,
3532 GEN_INT (used), NULL_RTX, 0,
3533 OPTAB_LIB_WIDEN);
3534 }
3535
3536 /* Get the address of the stack space.
3537 In this case, we do not deal with EXTRA separately.
3538 A single stack adjust will do. */
3539 if (! args_addr)
3540 {
3541 temp = push_block (size, extra, where_pad == downward);
3542 extra = 0;
3543 }
3544 else if (GET_CODE (args_so_far) == CONST_INT)
3545 temp = memory_address (BLKmode,
3546 plus_constant (args_addr,
3547 skip + INTVAL (args_so_far)));
3548 else
3549 temp = memory_address (BLKmode,
3550 plus_constant (gen_rtx_PLUS (Pmode,
3551 args_addr,
3552 args_so_far),
3553 skip));
3554
3555 if (!ACCUMULATE_OUTGOING_ARGS)
3556 {
3557 /* If the source is referenced relative to the stack pointer,
3558 copy it to another register to stabilize it. We do not need
3559 to do this if we know that we won't be changing sp. */
3560
3561 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3562 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3563 temp = copy_to_reg (temp);
3564 }
3565
3566 target = gen_rtx_MEM (BLKmode, temp);
3567
3568 if (type != 0)
3569 {
3570 set_mem_attributes (target, type, 1);
3571 /* Function incoming arguments may overlap with sibling call
3572 outgoing arguments and we cannot allow reordering of reads
3573 from function arguments with stores to outgoing arguments
3574 of sibling calls. */
3575 set_mem_alias_set (target, 0);
3576 }
3577
3578 /* ALIGN may well be better aligned than TYPE, e.g. due to
3579 PARM_BOUNDARY. Assume the caller isn't lying. */
3580 set_mem_align (target, align);
3581
3582 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3583 }
3584 }
3585 else if (partial > 0)
3586 {
3587 /* Scalar partly in registers. */
3588
3589 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3590 int i;
3591 int not_stack;
3592 /* # words of start of argument
3593 that we must make space for but need not store. */
3594 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3595 int args_offset = INTVAL (args_so_far);
3596 int skip;
3597
3598 /* Push padding now if padding above and stack grows down,
3599 or if padding below and stack grows up.
3600 But if space already allocated, this has already been done. */
3601 if (extra && args_addr == 0
3602 && where_pad != none && where_pad != stack_direction)
3603 anti_adjust_stack (GEN_INT (extra));
3604
3605 /* If we make space by pushing it, we might as well push
3606 the real data. Otherwise, we can leave OFFSET nonzero
3607 and leave the space uninitialized. */
3608 if (args_addr == 0)
3609 offset = 0;
3610
3611 /* Now NOT_STACK gets the number of words that we don't need to
3612 allocate on the stack. */
3613 not_stack = partial - offset;
3614
3615 /* If the partial register-part of the arg counts in its stack size,
3616 skip the part of stack space corresponding to the registers.
3617 Otherwise, start copying to the beginning of the stack space,
3618 by setting SKIP to 0. */
3619 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3620
3621 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3622 x = validize_mem (force_const_mem (mode, x));
3623
3624 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3625 SUBREGs of such registers are not allowed. */
3626 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3627 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3628 x = copy_to_reg (x);
3629
3630 /* Loop over all the words allocated on the stack for this arg. */
3631 /* We can do it by words, because any scalar bigger than a word
3632 has a size a multiple of a word. */
3633 #ifndef PUSH_ARGS_REVERSED
3634 for (i = not_stack; i < size; i++)
3635 #else
3636 for (i = size - 1; i >= not_stack; i--)
3637 #endif
3638 if (i >= not_stack + offset)
3639 emit_push_insn (operand_subword_force (x, i, mode),
3640 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3641 0, args_addr,
3642 GEN_INT (args_offset + ((i - not_stack + skip)
3643 * UNITS_PER_WORD)),
3644 reg_parm_stack_space, alignment_pad);
3645 }
3646 else
3647 {
3648 rtx addr;
3649 rtx dest;
3650
3651 /* Push padding now if padding above and stack grows down,
3652 or if padding below and stack grows up.
3653 But if space already allocated, this has already been done. */
3654 if (extra && args_addr == 0
3655 && where_pad != none && where_pad != stack_direction)
3656 anti_adjust_stack (GEN_INT (extra));
3657
3658 #ifdef PUSH_ROUNDING
3659 if (args_addr == 0 && PUSH_ARGS)
3660 emit_single_push_insn (mode, x, type);
3661 else
3662 #endif
3663 {
3664 if (GET_CODE (args_so_far) == CONST_INT)
3665 addr
3666 = memory_address (mode,
3667 plus_constant (args_addr,
3668 INTVAL (args_so_far)));
3669 else
3670 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3671 args_so_far));
3672 dest = gen_rtx_MEM (mode, addr);
3673 if (type != 0)
3674 {
3675 set_mem_attributes (dest, type, 1);
3676 /* Function incoming arguments may overlap with sibling call
3677 outgoing arguments and we cannot allow reordering of reads
3678 from function arguments with stores to outgoing arguments
3679 of sibling calls. */
3680 set_mem_alias_set (dest, 0);
3681 }
3682
3683 emit_move_insn (dest, x);
3684 }
3685 }
3686
3687 /* If part should go in registers, copy that part
3688 into the appropriate registers. Do this now, at the end,
3689 since mem-to-mem copies above may do function calls. */
3690 if (partial > 0 && reg != 0)
3691 {
3692 /* Handle calls that pass values in multiple non-contiguous locations.
3693 The Irix 6 ABI has examples of this. */
3694 if (GET_CODE (reg) == PARALLEL)
3695 emit_group_load (reg, x, type, -1);
3696 else
3697 move_block_to_reg (REGNO (reg), x, partial, mode);
3698 }
3699
3700 if (extra && args_addr == 0 && where_pad == stack_direction)
3701 anti_adjust_stack (GEN_INT (extra));
3702
3703 if (alignment_pad && args_addr == 0)
3704 anti_adjust_stack (alignment_pad);
3705 }
3706 \f
3707 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3708 operations. */
3709
3710 static rtx
3711 get_subtarget (rtx x)
3712 {
3713 return ((x == 0
3714 /* Only registers can be subtargets. */
3715 || GET_CODE (x) != REG
3716 /* If the register is readonly, it can't be set more than once. */
3717 || RTX_UNCHANGING_P (x)
3718 /* Don't use hard regs to avoid extending their life. */
3719 || REGNO (x) < FIRST_PSEUDO_REGISTER
3720 /* Avoid subtargets inside loops,
3721 since they hide some invariant expressions. */
3722 || preserve_subexpressions_p ())
3723 ? 0 : x);
3724 }
3725
3726 /* Expand an assignment that stores the value of FROM into TO.
3727 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3728 (This may contain a QUEUED rtx;
3729 if the value is constant, this rtx is a constant.)
3730 Otherwise, the returned value is NULL_RTX. */
3731
3732 rtx
3733 expand_assignment (tree to, tree from, int want_value)
3734 {
3735 rtx to_rtx = 0;
3736 rtx result;
3737
3738 /* Don't crash if the lhs of the assignment was erroneous. */
3739
3740 if (TREE_CODE (to) == ERROR_MARK)
3741 {
3742 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3743 return want_value ? result : NULL_RTX;
3744 }
3745
3746 /* Assignment of a structure component needs special treatment
3747 if the structure component's rtx is not simply a MEM.
3748 Assignment of an array element at a constant index, and assignment of
3749 an array element in an unaligned packed structure field, has the same
3750 problem. */
3751
3752 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3753 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3754 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3755 {
3756 enum machine_mode mode1;
3757 HOST_WIDE_INT bitsize, bitpos;
3758 rtx orig_to_rtx;
3759 tree offset;
3760 int unsignedp;
3761 int volatilep = 0;
3762 tree tem;
3763
3764 push_temp_slots ();
3765 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3766 &unsignedp, &volatilep);
3767
3768 /* If we are going to use store_bit_field and extract_bit_field,
3769 make sure to_rtx will be safe for multiple use. */
3770
3771 if (mode1 == VOIDmode && want_value)
3772 tem = stabilize_reference (tem);
3773
3774 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3775
3776 if (offset != 0)
3777 {
3778 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3779
3780 if (GET_CODE (to_rtx) != MEM)
3781 abort ();
3782
3783 #ifdef POINTERS_EXTEND_UNSIGNED
3784 if (GET_MODE (offset_rtx) != Pmode)
3785 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3786 #else
3787 if (GET_MODE (offset_rtx) != ptr_mode)
3788 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3789 #endif
3790
3791 /* A constant address in TO_RTX can have VOIDmode, we must not try
3792 to call force_reg for that case. Avoid that case. */
3793 if (GET_CODE (to_rtx) == MEM
3794 && GET_MODE (to_rtx) == BLKmode
3795 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3796 && bitsize > 0
3797 && (bitpos % bitsize) == 0
3798 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3799 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3800 {
3801 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3802 bitpos = 0;
3803 }
3804
3805 to_rtx = offset_address (to_rtx, offset_rtx,
3806 highest_pow2_factor_for_type (TREE_TYPE (to),
3807 offset));
3808 }
3809
3810 if (GET_CODE (to_rtx) == MEM)
3811 {
3812 /* If the field is at offset zero, we could have been given the
3813 DECL_RTX of the parent struct. Don't munge it. */
3814 to_rtx = shallow_copy_rtx (to_rtx);
3815
3816 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3817 }
3818
3819 /* Deal with volatile and readonly fields. The former is only done
3820 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3821 if (volatilep && GET_CODE (to_rtx) == MEM)
3822 {
3823 if (to_rtx == orig_to_rtx)
3824 to_rtx = copy_rtx (to_rtx);
3825 MEM_VOLATILE_P (to_rtx) = 1;
3826 }
3827
3828 if (TREE_CODE (to) == COMPONENT_REF
3829 && TREE_READONLY (TREE_OPERAND (to, 1)))
3830 {
3831 if (to_rtx == orig_to_rtx)
3832 to_rtx = copy_rtx (to_rtx);
3833 RTX_UNCHANGING_P (to_rtx) = 1;
3834 }
3835
3836 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3837 {
3838 if (to_rtx == orig_to_rtx)
3839 to_rtx = copy_rtx (to_rtx);
3840 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3841 }
3842
3843 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3844 (want_value
3845 /* Spurious cast for HPUX compiler. */
3846 ? ((enum machine_mode)
3847 TYPE_MODE (TREE_TYPE (to)))
3848 : VOIDmode),
3849 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3850
3851 preserve_temp_slots (result);
3852 free_temp_slots ();
3853 pop_temp_slots ();
3854
3855 /* If the value is meaningful, convert RESULT to the proper mode.
3856 Otherwise, return nothing. */
3857 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3858 TYPE_MODE (TREE_TYPE (from)),
3859 result,
3860 TREE_UNSIGNED (TREE_TYPE (to)))
3861 : NULL_RTX);
3862 }
3863
3864 /* If the rhs is a function call and its value is not an aggregate,
3865 call the function before we start to compute the lhs.
3866 This is needed for correct code for cases such as
3867 val = setjmp (buf) on machines where reference to val
3868 requires loading up part of an address in a separate insn.
3869
3870 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3871 since it might be a promoted variable where the zero- or sign- extension
3872 needs to be done. Handling this in the normal way is safe because no
3873 computation is done before the call. */
3874 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3875 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3876 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3877 && GET_CODE (DECL_RTL (to)) == REG))
3878 {
3879 rtx value;
3880
3881 push_temp_slots ();
3882 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3883 if (to_rtx == 0)
3884 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3885
3886 /* Handle calls that return values in multiple non-contiguous locations.
3887 The Irix 6 ABI has examples of this. */
3888 if (GET_CODE (to_rtx) == PARALLEL)
3889 emit_group_load (to_rtx, value, TREE_TYPE (from),
3890 int_size_in_bytes (TREE_TYPE (from)));
3891 else if (GET_MODE (to_rtx) == BLKmode)
3892 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3893 else
3894 {
3895 if (POINTER_TYPE_P (TREE_TYPE (to)))
3896 value = convert_memory_address (GET_MODE (to_rtx), value);
3897 emit_move_insn (to_rtx, value);
3898 }
3899 preserve_temp_slots (to_rtx);
3900 free_temp_slots ();
3901 pop_temp_slots ();
3902 return want_value ? to_rtx : NULL_RTX;
3903 }
3904
3905 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3906 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3907
3908 if (to_rtx == 0)
3909 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3910
3911 /* Don't move directly into a return register. */
3912 if (TREE_CODE (to) == RESULT_DECL
3913 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3914 {
3915 rtx temp;
3916
3917 push_temp_slots ();
3918 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3919
3920 if (GET_CODE (to_rtx) == PARALLEL)
3921 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3922 int_size_in_bytes (TREE_TYPE (from)));
3923 else
3924 emit_move_insn (to_rtx, temp);
3925
3926 preserve_temp_slots (to_rtx);
3927 free_temp_slots ();
3928 pop_temp_slots ();
3929 return want_value ? to_rtx : NULL_RTX;
3930 }
3931
3932 /* In case we are returning the contents of an object which overlaps
3933 the place the value is being stored, use a safe function when copying
3934 a value through a pointer into a structure value return block. */
3935 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3936 && current_function_returns_struct
3937 && !current_function_returns_pcc_struct)
3938 {
3939 rtx from_rtx, size;
3940
3941 push_temp_slots ();
3942 size = expr_size (from);
3943 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3944
3945 if (TARGET_MEM_FUNCTIONS)
3946 emit_library_call (memmove_libfunc, LCT_NORMAL,
3947 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3948 XEXP (from_rtx, 0), Pmode,
3949 convert_to_mode (TYPE_MODE (sizetype),
3950 size, TREE_UNSIGNED (sizetype)),
3951 TYPE_MODE (sizetype));
3952 else
3953 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3954 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3955 XEXP (to_rtx, 0), Pmode,
3956 convert_to_mode (TYPE_MODE (integer_type_node),
3957 size,
3958 TREE_UNSIGNED (integer_type_node)),
3959 TYPE_MODE (integer_type_node));
3960
3961 preserve_temp_slots (to_rtx);
3962 free_temp_slots ();
3963 pop_temp_slots ();
3964 return want_value ? to_rtx : NULL_RTX;
3965 }
3966
3967 /* Compute FROM and store the value in the rtx we got. */
3968
3969 push_temp_slots ();
3970 result = store_expr (from, to_rtx, want_value);
3971 preserve_temp_slots (result);
3972 free_temp_slots ();
3973 pop_temp_slots ();
3974 return want_value ? result : NULL_RTX;
3975 }
3976
3977 /* Generate code for computing expression EXP,
3978 and storing the value into TARGET.
3979 TARGET may contain a QUEUED rtx.
3980
3981 If WANT_VALUE & 1 is nonzero, return a copy of the value
3982 not in TARGET, so that we can be sure to use the proper
3983 value in a containing expression even if TARGET has something
3984 else stored in it. If possible, we copy the value through a pseudo
3985 and return that pseudo. Or, if the value is constant, we try to
3986 return the constant. In some cases, we return a pseudo
3987 copied *from* TARGET.
3988
3989 If the mode is BLKmode then we may return TARGET itself.
3990 It turns out that in BLKmode it doesn't cause a problem.
3991 because C has no operators that could combine two different
3992 assignments into the same BLKmode object with different values
3993 with no sequence point. Will other languages need this to
3994 be more thorough?
3995
3996 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3997 to catch quickly any cases where the caller uses the value
3998 and fails to set WANT_VALUE.
3999
4000 If WANT_VALUE & 2 is set, this is a store into a call param on the
4001 stack, and block moves may need to be treated specially. */
4002
4003 rtx
4004 store_expr (tree exp, rtx target, int want_value)
4005 {
4006 rtx temp;
4007 int dont_return_target = 0;
4008 int dont_store_target = 0;
4009
4010 if (VOID_TYPE_P (TREE_TYPE (exp)))
4011 {
4012 /* C++ can generate ?: expressions with a throw expression in one
4013 branch and an rvalue in the other. Here, we resolve attempts to
4014 store the throw expression's nonexistent result. */
4015 if (want_value)
4016 abort ();
4017 expand_expr (exp, const0_rtx, VOIDmode, 0);
4018 return NULL_RTX;
4019 }
4020 if (TREE_CODE (exp) == COMPOUND_EXPR)
4021 {
4022 /* Perform first part of compound expression, then assign from second
4023 part. */
4024 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4025 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4026 emit_queue ();
4027 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4028 }
4029 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4030 {
4031 /* For conditional expression, get safe form of the target. Then
4032 test the condition, doing the appropriate assignment on either
4033 side. This avoids the creation of unnecessary temporaries.
4034 For non-BLKmode, it is more efficient not to do this. */
4035
4036 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4037
4038 emit_queue ();
4039 target = protect_from_queue (target, 1);
4040
4041 do_pending_stack_adjust ();
4042 NO_DEFER_POP;
4043 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4044 start_cleanup_deferral ();
4045 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4046 end_cleanup_deferral ();
4047 emit_queue ();
4048 emit_jump_insn (gen_jump (lab2));
4049 emit_barrier ();
4050 emit_label (lab1);
4051 start_cleanup_deferral ();
4052 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4053 end_cleanup_deferral ();
4054 emit_queue ();
4055 emit_label (lab2);
4056 OK_DEFER_POP;
4057
4058 return want_value & 1 ? target : NULL_RTX;
4059 }
4060 else if (queued_subexp_p (target))
4061 /* If target contains a postincrement, let's not risk
4062 using it as the place to generate the rhs. */
4063 {
4064 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4065 {
4066 /* Expand EXP into a new pseudo. */
4067 temp = gen_reg_rtx (GET_MODE (target));
4068 temp = expand_expr (exp, temp, GET_MODE (target),
4069 (want_value & 2
4070 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4071 }
4072 else
4073 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4074 (want_value & 2
4075 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4076
4077 /* If target is volatile, ANSI requires accessing the value
4078 *from* the target, if it is accessed. So make that happen.
4079 In no case return the target itself. */
4080 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4081 dont_return_target = 1;
4082 }
4083 else if ((want_value & 1) != 0
4084 && GET_CODE (target) == MEM
4085 && ! MEM_VOLATILE_P (target)
4086 && GET_MODE (target) != BLKmode)
4087 /* If target is in memory and caller wants value in a register instead,
4088 arrange that. Pass TARGET as target for expand_expr so that,
4089 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4090 We know expand_expr will not use the target in that case.
4091 Don't do this if TARGET is volatile because we are supposed
4092 to write it and then read it. */
4093 {
4094 temp = expand_expr (exp, target, GET_MODE (target),
4095 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4096 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4097 {
4098 /* If TEMP is already in the desired TARGET, only copy it from
4099 memory and don't store it there again. */
4100 if (temp == target
4101 || (rtx_equal_p (temp, target)
4102 && ! side_effects_p (temp) && ! side_effects_p (target)))
4103 dont_store_target = 1;
4104 temp = copy_to_reg (temp);
4105 }
4106 dont_return_target = 1;
4107 }
4108 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4109 /* If this is a scalar in a register that is stored in a wider mode
4110 than the declared mode, compute the result into its declared mode
4111 and then convert to the wider mode. Our value is the computed
4112 expression. */
4113 {
4114 rtx inner_target = 0;
4115
4116 /* If we don't want a value, we can do the conversion inside EXP,
4117 which will often result in some optimizations. Do the conversion
4118 in two steps: first change the signedness, if needed, then
4119 the extend. But don't do this if the type of EXP is a subtype
4120 of something else since then the conversion might involve
4121 more than just converting modes. */
4122 if ((want_value & 1) == 0
4123 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4124 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4125 {
4126 if (TREE_UNSIGNED (TREE_TYPE (exp))
4127 != SUBREG_PROMOTED_UNSIGNED_P (target))
4128 exp = convert
4129 ((*lang_hooks.types.signed_or_unsigned_type)
4130 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4131
4132 exp = convert ((*lang_hooks.types.type_for_mode)
4133 (GET_MODE (SUBREG_REG (target)),
4134 SUBREG_PROMOTED_UNSIGNED_P (target)),
4135 exp);
4136
4137 inner_target = SUBREG_REG (target);
4138 }
4139
4140 temp = expand_expr (exp, inner_target, VOIDmode,
4141 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4142
4143 /* If TEMP is a MEM and we want a result value, make the access
4144 now so it gets done only once. Strictly speaking, this is
4145 only necessary if the MEM is volatile, or if the address
4146 overlaps TARGET. But not performing the load twice also
4147 reduces the amount of rtl we generate and then have to CSE. */
4148 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4149 temp = copy_to_reg (temp);
4150
4151 /* If TEMP is a VOIDmode constant, use convert_modes to make
4152 sure that we properly convert it. */
4153 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4154 {
4155 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4156 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4157 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4158 GET_MODE (target), temp,
4159 SUBREG_PROMOTED_UNSIGNED_P (target));
4160 }
4161
4162 convert_move (SUBREG_REG (target), temp,
4163 SUBREG_PROMOTED_UNSIGNED_P (target));
4164
4165 /* If we promoted a constant, change the mode back down to match
4166 target. Otherwise, the caller might get confused by a result whose
4167 mode is larger than expected. */
4168
4169 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4170 {
4171 if (GET_MODE (temp) != VOIDmode)
4172 {
4173 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4174 SUBREG_PROMOTED_VAR_P (temp) = 1;
4175 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4176 SUBREG_PROMOTED_UNSIGNED_P (target));
4177 }
4178 else
4179 temp = convert_modes (GET_MODE (target),
4180 GET_MODE (SUBREG_REG (target)),
4181 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4182 }
4183
4184 return want_value & 1 ? temp : NULL_RTX;
4185 }
4186 else
4187 {
4188 temp = expand_expr (exp, target, GET_MODE (target),
4189 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4190 /* Return TARGET if it's a specified hardware register.
4191 If TARGET is a volatile mem ref, either return TARGET
4192 or return a reg copied *from* TARGET; ANSI requires this.
4193
4194 Otherwise, if TEMP is not TARGET, return TEMP
4195 if it is constant (for efficiency),
4196 or if we really want the correct value. */
4197 if (!(target && GET_CODE (target) == REG
4198 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4199 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4200 && ! rtx_equal_p (temp, target)
4201 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4202 dont_return_target = 1;
4203 }
4204
4205 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4206 the same as that of TARGET, adjust the constant. This is needed, for
4207 example, in case it is a CONST_DOUBLE and we want only a word-sized
4208 value. */
4209 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4210 && TREE_CODE (exp) != ERROR_MARK
4211 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4212 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4213 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4214
4215 /* If value was not generated in the target, store it there.
4216 Convert the value to TARGET's type first if necessary.
4217 If TEMP and TARGET compare equal according to rtx_equal_p, but
4218 one or both of them are volatile memory refs, we have to distinguish
4219 two cases:
4220 - expand_expr has used TARGET. In this case, we must not generate
4221 another copy. This can be detected by TARGET being equal according
4222 to == .
4223 - expand_expr has not used TARGET - that means that the source just
4224 happens to have the same RTX form. Since temp will have been created
4225 by expand_expr, it will compare unequal according to == .
4226 We must generate a copy in this case, to reach the correct number
4227 of volatile memory references. */
4228
4229 if ((! rtx_equal_p (temp, target)
4230 || (temp != target && (side_effects_p (temp)
4231 || side_effects_p (target))))
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && ! dont_store_target
4234 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4235 but TARGET is not valid memory reference, TEMP will differ
4236 from TARGET although it is really the same location. */
4237 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4238 || target != DECL_RTL_IF_SET (exp))
4239 /* If there's nothing to copy, don't bother. Don't call expr_size
4240 unless necessary, because some front-ends (C++) expr_size-hook
4241 aborts on objects that are not supposed to be bit-copied or
4242 bit-initialized. */
4243 && expr_size (exp) != const0_rtx)
4244 {
4245 target = protect_from_queue (target, 1);
4246 if (GET_MODE (temp) != GET_MODE (target)
4247 && GET_MODE (temp) != VOIDmode)
4248 {
4249 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4250 if (dont_return_target)
4251 {
4252 /* In this case, we will return TEMP,
4253 so make sure it has the proper mode.
4254 But don't forget to store the value into TARGET. */
4255 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4256 emit_move_insn (target, temp);
4257 }
4258 else
4259 convert_move (target, temp, unsignedp);
4260 }
4261
4262 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4263 {
4264 /* Handle copying a string constant into an array. The string
4265 constant may be shorter than the array. So copy just the string's
4266 actual length, and clear the rest. First get the size of the data
4267 type of the string, which is actually the size of the target. */
4268 rtx size = expr_size (exp);
4269
4270 if (GET_CODE (size) == CONST_INT
4271 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4272 emit_block_move (target, temp, size,
4273 (want_value & 2
4274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4275 else
4276 {
4277 /* Compute the size of the data to copy from the string. */
4278 tree copy_size
4279 = size_binop (MIN_EXPR,
4280 make_tree (sizetype, size),
4281 size_int (TREE_STRING_LENGTH (exp)));
4282 rtx copy_size_rtx
4283 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4284 (want_value & 2
4285 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4286 rtx label = 0;
4287
4288 /* Copy that much. */
4289 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4290 TREE_UNSIGNED (sizetype));
4291 emit_block_move (target, temp, copy_size_rtx,
4292 (want_value & 2
4293 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4294
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4297 if (GET_CODE (copy_size_rtx) == CONST_INT)
4298 {
4299 size = plus_constant (size, -INTVAL (copy_size_rtx));
4300 target = adjust_address (target, BLKmode,
4301 INTVAL (copy_size_rtx));
4302 }
4303 else
4304 {
4305 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4306 copy_size_rtx, NULL_RTX, 0,
4307 OPTAB_LIB_WIDEN);
4308
4309 #ifdef POINTERS_EXTEND_UNSIGNED
4310 if (GET_MODE (copy_size_rtx) != Pmode)
4311 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4312 TREE_UNSIGNED (sizetype));
4313 #endif
4314
4315 target = offset_address (target, copy_size_rtx,
4316 highest_pow2_factor (copy_size));
4317 label = gen_label_rtx ();
4318 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4319 GET_MODE (size), 0, label);
4320 }
4321
4322 if (size != const0_rtx)
4323 clear_storage (target, size);
4324
4325 if (label)
4326 emit_label (label);
4327 }
4328 }
4329 /* Handle calls that return values in multiple non-contiguous locations.
4330 The Irix 6 ABI has examples of this. */
4331 else if (GET_CODE (target) == PARALLEL)
4332 emit_group_load (target, temp, TREE_TYPE (exp),
4333 int_size_in_bytes (TREE_TYPE (exp)));
4334 else if (GET_MODE (temp) == BLKmode)
4335 emit_block_move (target, temp, expr_size (exp),
4336 (want_value & 2
4337 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4338 else
4339 emit_move_insn (target, temp);
4340 }
4341
4342 /* If we don't want a value, return NULL_RTX. */
4343 if ((want_value & 1) == 0)
4344 return NULL_RTX;
4345
4346 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4347 ??? The latter test doesn't seem to make sense. */
4348 else if (dont_return_target && GET_CODE (temp) != MEM)
4349 return temp;
4350
4351 /* Return TARGET itself if it is a hard register. */
4352 else if ((want_value & 1) != 0
4353 && GET_MODE (target) != BLKmode
4354 && ! (GET_CODE (target) == REG
4355 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4356 return copy_to_reg (target);
4357
4358 else
4359 return target;
4360 }
4361 \f
4362 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4363
4364 static int
4365 is_zeros_p (tree exp)
4366 {
4367 tree elt;
4368
4369 switch (TREE_CODE (exp))
4370 {
4371 case CONVERT_EXPR:
4372 case NOP_EXPR:
4373 case NON_LVALUE_EXPR:
4374 case VIEW_CONVERT_EXPR:
4375 return is_zeros_p (TREE_OPERAND (exp, 0));
4376
4377 case INTEGER_CST:
4378 return integer_zerop (exp);
4379
4380 case COMPLEX_CST:
4381 return
4382 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4383
4384 case REAL_CST:
4385 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4386
4387 case VECTOR_CST:
4388 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4389 elt = TREE_CHAIN (elt))
4390 if (!is_zeros_p (TREE_VALUE (elt)))
4391 return 0;
4392
4393 return 1;
4394
4395 case CONSTRUCTOR:
4396 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4397 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4398 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4399 if (! is_zeros_p (TREE_VALUE (elt)))
4400 return 0;
4401
4402 return 1;
4403
4404 default:
4405 return 0;
4406 }
4407 }
4408
4409 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410
4411 int
4412 mostly_zeros_p (tree exp)
4413 {
4414 if (TREE_CODE (exp) == CONSTRUCTOR)
4415 {
4416 int elts = 0, zeros = 0;
4417 tree elt = CONSTRUCTOR_ELTS (exp);
4418 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4419 {
4420 /* If there are no ranges of true bits, it is all zero. */
4421 return elt == NULL_TREE;
4422 }
4423 for (; elt; elt = TREE_CHAIN (elt))
4424 {
4425 /* We do not handle the case where the index is a RANGE_EXPR,
4426 so the statistic will be somewhat inaccurate.
4427 We do make a more accurate count in store_constructor itself,
4428 so since this function is only used for nested array elements,
4429 this should be close enough. */
4430 if (mostly_zeros_p (TREE_VALUE (elt)))
4431 zeros++;
4432 elts++;
4433 }
4434
4435 return 4 * zeros >= 3 * elts;
4436 }
4437
4438 return is_zeros_p (exp);
4439 }
4440 \f
4441 /* Helper function for store_constructor.
4442 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4443 TYPE is the type of the CONSTRUCTOR, not the element type.
4444 CLEARED is as for store_constructor.
4445 ALIAS_SET is the alias set to use for any stores.
4446
4447 This provides a recursive shortcut back to store_constructor when it isn't
4448 necessary to go through store_field. This is so that we can pass through
4449 the cleared field to let store_constructor know that we may not have to
4450 clear a substructure if the outer structure has already been cleared. */
4451
4452 static void
4453 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4454 HOST_WIDE_INT bitpos, enum machine_mode mode,
4455 tree exp, tree type, int cleared, int alias_set)
4456 {
4457 if (TREE_CODE (exp) == CONSTRUCTOR
4458 && bitpos % BITS_PER_UNIT == 0
4459 /* If we have a nonzero bitpos for a register target, then we just
4460 let store_field do the bitfield handling. This is unlikely to
4461 generate unnecessary clear instructions anyways. */
4462 && (bitpos == 0 || GET_CODE (target) == MEM))
4463 {
4464 if (GET_CODE (target) == MEM)
4465 target
4466 = adjust_address (target,
4467 GET_MODE (target) == BLKmode
4468 || 0 != (bitpos
4469 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4470 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4471
4472
4473 /* Update the alias set, if required. */
4474 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4475 && MEM_ALIAS_SET (target) != 0)
4476 {
4477 target = copy_rtx (target);
4478 set_mem_alias_set (target, alias_set);
4479 }
4480
4481 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4482 }
4483 else
4484 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4485 alias_set);
4486 }
4487
4488 /* Store the value of constructor EXP into the rtx TARGET.
4489 TARGET is either a REG or a MEM; we know it cannot conflict, since
4490 safe_from_p has been called.
4491 CLEARED is true if TARGET is known to have been zero'd.
4492 SIZE is the number of bytes of TARGET we are allowed to modify: this
4493 may not be the same as the size of EXP if we are assigning to a field
4494 which has been packed to exclude padding bits. */
4495
4496 static void
4497 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4498 {
4499 tree type = TREE_TYPE (exp);
4500 #ifdef WORD_REGISTER_OPERATIONS
4501 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4502 #endif
4503
4504 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4505 || TREE_CODE (type) == QUAL_UNION_TYPE)
4506 {
4507 tree elt;
4508
4509 /* If size is zero or the target is already cleared, do nothing. */
4510 if (size == 0 || cleared)
4511 cleared = 1;
4512 /* We either clear the aggregate or indicate the value is dead. */
4513 else if ((TREE_CODE (type) == UNION_TYPE
4514 || TREE_CODE (type) == QUAL_UNION_TYPE)
4515 && ! CONSTRUCTOR_ELTS (exp))
4516 /* If the constructor is empty, clear the union. */
4517 {
4518 clear_storage (target, expr_size (exp));
4519 cleared = 1;
4520 }
4521
4522 /* If we are building a static constructor into a register,
4523 set the initial value as zero so we can fold the value into
4524 a constant. But if more than one register is involved,
4525 this probably loses. */
4526 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4527 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4528 {
4529 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4530 cleared = 1;
4531 }
4532
4533 /* If the constructor has fewer fields than the structure
4534 or if we are initializing the structure to mostly zeros,
4535 clear the whole structure first. Don't do this if TARGET is a
4536 register whose mode size isn't equal to SIZE since clear_storage
4537 can't handle this case. */
4538 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4539 || mostly_zeros_p (exp))
4540 && (GET_CODE (target) != REG
4541 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4542 == size)))
4543 {
4544 rtx xtarget = target;
4545
4546 if (readonly_fields_p (type))
4547 {
4548 xtarget = copy_rtx (xtarget);
4549 RTX_UNCHANGING_P (xtarget) = 1;
4550 }
4551
4552 clear_storage (xtarget, GEN_INT (size));
4553 cleared = 1;
4554 }
4555
4556 if (! cleared)
4557 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4558
4559 /* Store each element of the constructor into
4560 the corresponding field of TARGET. */
4561
4562 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4563 {
4564 tree field = TREE_PURPOSE (elt);
4565 tree value = TREE_VALUE (elt);
4566 enum machine_mode mode;
4567 HOST_WIDE_INT bitsize;
4568 HOST_WIDE_INT bitpos = 0;
4569 tree offset;
4570 rtx to_rtx = target;
4571
4572 /* Just ignore missing fields.
4573 We cleared the whole structure, above,
4574 if any fields are missing. */
4575 if (field == 0)
4576 continue;
4577
4578 if (cleared && is_zeros_p (value))
4579 continue;
4580
4581 if (host_integerp (DECL_SIZE (field), 1))
4582 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4583 else
4584 bitsize = -1;
4585
4586 mode = DECL_MODE (field);
4587 if (DECL_BIT_FIELD (field))
4588 mode = VOIDmode;
4589
4590 offset = DECL_FIELD_OFFSET (field);
4591 if (host_integerp (offset, 0)
4592 && host_integerp (bit_position (field), 0))
4593 {
4594 bitpos = int_bit_position (field);
4595 offset = 0;
4596 }
4597 else
4598 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4599
4600 if (offset)
4601 {
4602 rtx offset_rtx;
4603
4604 if (CONTAINS_PLACEHOLDER_P (offset))
4605 offset = build (WITH_RECORD_EXPR, sizetype,
4606 offset, make_tree (TREE_TYPE (exp), target));
4607
4608 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4609 if (GET_CODE (to_rtx) != MEM)
4610 abort ();
4611
4612 #ifdef POINTERS_EXTEND_UNSIGNED
4613 if (GET_MODE (offset_rtx) != Pmode)
4614 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4615 #else
4616 if (GET_MODE (offset_rtx) != ptr_mode)
4617 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4618 #endif
4619
4620 to_rtx = offset_address (to_rtx, offset_rtx,
4621 highest_pow2_factor (offset));
4622 }
4623
4624 if (TREE_READONLY (field))
4625 {
4626 if (GET_CODE (to_rtx) == MEM)
4627 to_rtx = copy_rtx (to_rtx);
4628
4629 RTX_UNCHANGING_P (to_rtx) = 1;
4630 }
4631
4632 #ifdef WORD_REGISTER_OPERATIONS
4633 /* If this initializes a field that is smaller than a word, at the
4634 start of a word, try to widen it to a full word.
4635 This special case allows us to output C++ member function
4636 initializations in a form that the optimizers can understand. */
4637 if (GET_CODE (target) == REG
4638 && bitsize < BITS_PER_WORD
4639 && bitpos % BITS_PER_WORD == 0
4640 && GET_MODE_CLASS (mode) == MODE_INT
4641 && TREE_CODE (value) == INTEGER_CST
4642 && exp_size >= 0
4643 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4644 {
4645 tree type = TREE_TYPE (value);
4646
4647 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4648 {
4649 type = (*lang_hooks.types.type_for_size)
4650 (BITS_PER_WORD, TREE_UNSIGNED (type));
4651 value = convert (type, value);
4652 }
4653
4654 if (BYTES_BIG_ENDIAN)
4655 value
4656 = fold (build (LSHIFT_EXPR, type, value,
4657 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4658 bitsize = BITS_PER_WORD;
4659 mode = word_mode;
4660 }
4661 #endif
4662
4663 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4664 && DECL_NONADDRESSABLE_P (field))
4665 {
4666 to_rtx = copy_rtx (to_rtx);
4667 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4668 }
4669
4670 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4671 value, type, cleared,
4672 get_alias_set (TREE_TYPE (field)));
4673 }
4674 }
4675 else if (TREE_CODE (type) == ARRAY_TYPE
4676 || TREE_CODE (type) == VECTOR_TYPE)
4677 {
4678 tree elt;
4679 int i;
4680 int need_to_clear;
4681 tree domain = TYPE_DOMAIN (type);
4682 tree elttype = TREE_TYPE (type);
4683 int const_bounds_p;
4684 HOST_WIDE_INT minelt = 0;
4685 HOST_WIDE_INT maxelt = 0;
4686
4687 /* Vectors are like arrays, but the domain is stored via an array
4688 type indirectly. */
4689 if (TREE_CODE (type) == VECTOR_TYPE)
4690 {
4691 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4692 the same field as TYPE_DOMAIN, we are not guaranteed that
4693 it always will. */
4694 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4695 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4696 }
4697
4698 const_bounds_p = (TYPE_MIN_VALUE (domain)
4699 && TYPE_MAX_VALUE (domain)
4700 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4701 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4702
4703 /* If we have constant bounds for the range of the type, get them. */
4704 if (const_bounds_p)
4705 {
4706 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4707 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4708 }
4709
4710 /* If the constructor has fewer elements than the array,
4711 clear the whole array first. Similarly if this is
4712 static constructor of a non-BLKmode object. */
4713 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4714 need_to_clear = 1;
4715 else
4716 {
4717 HOST_WIDE_INT count = 0, zero_count = 0;
4718 need_to_clear = ! const_bounds_p;
4719
4720 /* This loop is a more accurate version of the loop in
4721 mostly_zeros_p (it handles RANGE_EXPR in an index).
4722 It is also needed to check for missing elements. */
4723 for (elt = CONSTRUCTOR_ELTS (exp);
4724 elt != NULL_TREE && ! need_to_clear;
4725 elt = TREE_CHAIN (elt))
4726 {
4727 tree index = TREE_PURPOSE (elt);
4728 HOST_WIDE_INT this_node_count;
4729
4730 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4731 {
4732 tree lo_index = TREE_OPERAND (index, 0);
4733 tree hi_index = TREE_OPERAND (index, 1);
4734
4735 if (! host_integerp (lo_index, 1)
4736 || ! host_integerp (hi_index, 1))
4737 {
4738 need_to_clear = 1;
4739 break;
4740 }
4741
4742 this_node_count = (tree_low_cst (hi_index, 1)
4743 - tree_low_cst (lo_index, 1) + 1);
4744 }
4745 else
4746 this_node_count = 1;
4747
4748 count += this_node_count;
4749 if (mostly_zeros_p (TREE_VALUE (elt)))
4750 zero_count += this_node_count;
4751 }
4752
4753 /* Clear the entire array first if there are any missing elements,
4754 or if the incidence of zero elements is >= 75%. */
4755 if (! need_to_clear
4756 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4757 need_to_clear = 1;
4758 }
4759
4760 if (need_to_clear && size > 0)
4761 {
4762 if (! cleared)
4763 {
4764 if (REG_P (target))
4765 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4766 else
4767 clear_storage (target, GEN_INT (size));
4768 }
4769 cleared = 1;
4770 }
4771 else if (REG_P (target))
4772 /* Inform later passes that the old value is dead. */
4773 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4774
4775 /* Store each element of the constructor into
4776 the corresponding element of TARGET, determined
4777 by counting the elements. */
4778 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4779 elt;
4780 elt = TREE_CHAIN (elt), i++)
4781 {
4782 enum machine_mode mode;
4783 HOST_WIDE_INT bitsize;
4784 HOST_WIDE_INT bitpos;
4785 int unsignedp;
4786 tree value = TREE_VALUE (elt);
4787 tree index = TREE_PURPOSE (elt);
4788 rtx xtarget = target;
4789
4790 if (cleared && is_zeros_p (value))
4791 continue;
4792
4793 unsignedp = TREE_UNSIGNED (elttype);
4794 mode = TYPE_MODE (elttype);
4795 if (mode == BLKmode)
4796 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4797 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4798 : -1);
4799 else
4800 bitsize = GET_MODE_BITSIZE (mode);
4801
4802 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4803 {
4804 tree lo_index = TREE_OPERAND (index, 0);
4805 tree hi_index = TREE_OPERAND (index, 1);
4806 rtx index_r, pos_rtx, loop_end;
4807 struct nesting *loop;
4808 HOST_WIDE_INT lo, hi, count;
4809 tree position;
4810
4811 /* If the range is constant and "small", unroll the loop. */
4812 if (const_bounds_p
4813 && host_integerp (lo_index, 0)
4814 && host_integerp (hi_index, 0)
4815 && (lo = tree_low_cst (lo_index, 0),
4816 hi = tree_low_cst (hi_index, 0),
4817 count = hi - lo + 1,
4818 (GET_CODE (target) != MEM
4819 || count <= 2
4820 || (host_integerp (TYPE_SIZE (elttype), 1)
4821 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4822 <= 40 * 8)))))
4823 {
4824 lo -= minelt; hi -= minelt;
4825 for (; lo <= hi; lo++)
4826 {
4827 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4828
4829 if (GET_CODE (target) == MEM
4830 && !MEM_KEEP_ALIAS_SET_P (target)
4831 && TREE_CODE (type) == ARRAY_TYPE
4832 && TYPE_NONALIASED_COMPONENT (type))
4833 {
4834 target = copy_rtx (target);
4835 MEM_KEEP_ALIAS_SET_P (target) = 1;
4836 }
4837
4838 store_constructor_field
4839 (target, bitsize, bitpos, mode, value, type, cleared,
4840 get_alias_set (elttype));
4841 }
4842 }
4843 else
4844 {
4845 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4846 loop_end = gen_label_rtx ();
4847
4848 unsignedp = TREE_UNSIGNED (domain);
4849
4850 index = build_decl (VAR_DECL, NULL_TREE, domain);
4851
4852 index_r
4853 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4854 &unsignedp, 0));
4855 SET_DECL_RTL (index, index_r);
4856 if (TREE_CODE (value) == SAVE_EXPR
4857 && SAVE_EXPR_RTL (value) == 0)
4858 {
4859 /* Make sure value gets expanded once before the
4860 loop. */
4861 expand_expr (value, const0_rtx, VOIDmode, 0);
4862 emit_queue ();
4863 }
4864 store_expr (lo_index, index_r, 0);
4865 loop = expand_start_loop (0);
4866
4867 /* Assign value to element index. */
4868 position
4869 = convert (ssizetype,
4870 fold (build (MINUS_EXPR, TREE_TYPE (index),
4871 index, TYPE_MIN_VALUE (domain))));
4872 position = size_binop (MULT_EXPR, position,
4873 convert (ssizetype,
4874 TYPE_SIZE_UNIT (elttype)));
4875
4876 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4877 xtarget = offset_address (target, pos_rtx,
4878 highest_pow2_factor (position));
4879 xtarget = adjust_address (xtarget, mode, 0);
4880 if (TREE_CODE (value) == CONSTRUCTOR)
4881 store_constructor (value, xtarget, cleared,
4882 bitsize / BITS_PER_UNIT);
4883 else
4884 store_expr (value, xtarget, 0);
4885
4886 expand_exit_loop_if_false (loop,
4887 build (LT_EXPR, integer_type_node,
4888 index, hi_index));
4889
4890 expand_increment (build (PREINCREMENT_EXPR,
4891 TREE_TYPE (index),
4892 index, integer_one_node), 0, 0);
4893 expand_end_loop ();
4894 emit_label (loop_end);
4895 }
4896 }
4897 else if ((index != 0 && ! host_integerp (index, 0))
4898 || ! host_integerp (TYPE_SIZE (elttype), 1))
4899 {
4900 tree position;
4901
4902 if (index == 0)
4903 index = ssize_int (1);
4904
4905 if (minelt)
4906 index = convert (ssizetype,
4907 fold (build (MINUS_EXPR, index,
4908 TYPE_MIN_VALUE (domain))));
4909
4910 position = size_binop (MULT_EXPR, index,
4911 convert (ssizetype,
4912 TYPE_SIZE_UNIT (elttype)));
4913 xtarget = offset_address (target,
4914 expand_expr (position, 0, VOIDmode, 0),
4915 highest_pow2_factor (position));
4916 xtarget = adjust_address (xtarget, mode, 0);
4917 store_expr (value, xtarget, 0);
4918 }
4919 else
4920 {
4921 if (index != 0)
4922 bitpos = ((tree_low_cst (index, 0) - minelt)
4923 * tree_low_cst (TYPE_SIZE (elttype), 1));
4924 else
4925 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4926
4927 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4928 && TREE_CODE (type) == ARRAY_TYPE
4929 && TYPE_NONALIASED_COMPONENT (type))
4930 {
4931 target = copy_rtx (target);
4932 MEM_KEEP_ALIAS_SET_P (target) = 1;
4933 }
4934
4935 store_constructor_field (target, bitsize, bitpos, mode, value,
4936 type, cleared, get_alias_set (elttype));
4937
4938 }
4939 }
4940 }
4941
4942 /* Set constructor assignments. */
4943 else if (TREE_CODE (type) == SET_TYPE)
4944 {
4945 tree elt = CONSTRUCTOR_ELTS (exp);
4946 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4947 tree domain = TYPE_DOMAIN (type);
4948 tree domain_min, domain_max, bitlength;
4949
4950 /* The default implementation strategy is to extract the constant
4951 parts of the constructor, use that to initialize the target,
4952 and then "or" in whatever non-constant ranges we need in addition.
4953
4954 If a large set is all zero or all ones, it is
4955 probably better to set it using memset (if available) or bzero.
4956 Also, if a large set has just a single range, it may also be
4957 better to first clear all the first clear the set (using
4958 bzero/memset), and set the bits we want. */
4959
4960 /* Check for all zeros. */
4961 if (elt == NULL_TREE && size > 0)
4962 {
4963 if (!cleared)
4964 clear_storage (target, GEN_INT (size));
4965 return;
4966 }
4967
4968 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4969 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4970 bitlength = size_binop (PLUS_EXPR,
4971 size_diffop (domain_max, domain_min),
4972 ssize_int (1));
4973
4974 nbits = tree_low_cst (bitlength, 1);
4975
4976 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4977 are "complicated" (more than one range), initialize (the
4978 constant parts) by copying from a constant. */
4979 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4980 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4981 {
4982 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4983 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4984 char *bit_buffer = alloca (nbits);
4985 HOST_WIDE_INT word = 0;
4986 unsigned int bit_pos = 0;
4987 unsigned int ibit = 0;
4988 unsigned int offset = 0; /* In bytes from beginning of set. */
4989
4990 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4991 for (;;)
4992 {
4993 if (bit_buffer[ibit])
4994 {
4995 if (BYTES_BIG_ENDIAN)
4996 word |= (1 << (set_word_size - 1 - bit_pos));
4997 else
4998 word |= 1 << bit_pos;
4999 }
5000
5001 bit_pos++; ibit++;
5002 if (bit_pos >= set_word_size || ibit == nbits)
5003 {
5004 if (word != 0 || ! cleared)
5005 {
5006 rtx datum = GEN_INT (word);
5007 rtx to_rtx;
5008
5009 /* The assumption here is that it is safe to use
5010 XEXP if the set is multi-word, but not if
5011 it's single-word. */
5012 if (GET_CODE (target) == MEM)
5013 to_rtx = adjust_address (target, mode, offset);
5014 else if (offset == 0)
5015 to_rtx = target;
5016 else
5017 abort ();
5018 emit_move_insn (to_rtx, datum);
5019 }
5020
5021 if (ibit == nbits)
5022 break;
5023 word = 0;
5024 bit_pos = 0;
5025 offset += set_word_size / BITS_PER_UNIT;
5026 }
5027 }
5028 }
5029 else if (!cleared)
5030 /* Don't bother clearing storage if the set is all ones. */
5031 if (TREE_CHAIN (elt) != NULL_TREE
5032 || (TREE_PURPOSE (elt) == NULL_TREE
5033 ? nbits != 1
5034 : ( ! host_integerp (TREE_VALUE (elt), 0)
5035 || ! host_integerp (TREE_PURPOSE (elt), 0)
5036 || (tree_low_cst (TREE_VALUE (elt), 0)
5037 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5038 != (HOST_WIDE_INT) nbits))))
5039 clear_storage (target, expr_size (exp));
5040
5041 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5042 {
5043 /* Start of range of element or NULL. */
5044 tree startbit = TREE_PURPOSE (elt);
5045 /* End of range of element, or element value. */
5046 tree endbit = TREE_VALUE (elt);
5047 HOST_WIDE_INT startb, endb;
5048 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5049
5050 bitlength_rtx = expand_expr (bitlength,
5051 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5052
5053 /* Handle non-range tuple element like [ expr ]. */
5054 if (startbit == NULL_TREE)
5055 {
5056 startbit = save_expr (endbit);
5057 endbit = startbit;
5058 }
5059
5060 startbit = convert (sizetype, startbit);
5061 endbit = convert (sizetype, endbit);
5062 if (! integer_zerop (domain_min))
5063 {
5064 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5065 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5066 }
5067 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5068 EXPAND_CONST_ADDRESS);
5069 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5070 EXPAND_CONST_ADDRESS);
5071
5072 if (REG_P (target))
5073 {
5074 targetx
5075 = assign_temp
5076 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5077 (GET_MODE (target), 0),
5078 TYPE_QUAL_CONST)),
5079 0, 1, 1);
5080 emit_move_insn (targetx, target);
5081 }
5082
5083 else if (GET_CODE (target) == MEM)
5084 targetx = target;
5085 else
5086 abort ();
5087
5088 /* Optimization: If startbit and endbit are constants divisible
5089 by BITS_PER_UNIT, call memset instead. */
5090 if (TARGET_MEM_FUNCTIONS
5091 && TREE_CODE (startbit) == INTEGER_CST
5092 && TREE_CODE (endbit) == INTEGER_CST
5093 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5094 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5095 {
5096 emit_library_call (memset_libfunc, LCT_NORMAL,
5097 VOIDmode, 3,
5098 plus_constant (XEXP (targetx, 0),
5099 startb / BITS_PER_UNIT),
5100 Pmode,
5101 constm1_rtx, TYPE_MODE (integer_type_node),
5102 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5103 TYPE_MODE (sizetype));
5104 }
5105 else
5106 emit_library_call (setbits_libfunc, LCT_NORMAL,
5107 VOIDmode, 4, XEXP (targetx, 0),
5108 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5109 startbit_rtx, TYPE_MODE (sizetype),
5110 endbit_rtx, TYPE_MODE (sizetype));
5111
5112 if (REG_P (target))
5113 emit_move_insn (target, targetx);
5114 }
5115 }
5116
5117 else
5118 abort ();
5119 }
5120
5121 /* Store the value of EXP (an expression tree)
5122 into a subfield of TARGET which has mode MODE and occupies
5123 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5124 If MODE is VOIDmode, it means that we are storing into a bit-field.
5125
5126 If VALUE_MODE is VOIDmode, return nothing in particular.
5127 UNSIGNEDP is not used in this case.
5128
5129 Otherwise, return an rtx for the value stored. This rtx
5130 has mode VALUE_MODE if that is convenient to do.
5131 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5132
5133 TYPE is the type of the underlying object,
5134
5135 ALIAS_SET is the alias set for the destination. This value will
5136 (in general) be different from that for TARGET, since TARGET is a
5137 reference to the containing structure. */
5138
5139 static rtx
5140 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5141 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5142 int unsignedp, tree type, int alias_set)
5143 {
5144 HOST_WIDE_INT width_mask = 0;
5145
5146 if (TREE_CODE (exp) == ERROR_MARK)
5147 return const0_rtx;
5148
5149 /* If we have nothing to store, do nothing unless the expression has
5150 side-effects. */
5151 if (bitsize == 0)
5152 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5153 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5154 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5155
5156 /* If we are storing into an unaligned field of an aligned union that is
5157 in a register, we may have the mode of TARGET being an integer mode but
5158 MODE == BLKmode. In that case, get an aligned object whose size and
5159 alignment are the same as TARGET and store TARGET into it (we can avoid
5160 the store if the field being stored is the entire width of TARGET). Then
5161 call ourselves recursively to store the field into a BLKmode version of
5162 that object. Finally, load from the object into TARGET. This is not
5163 very efficient in general, but should only be slightly more expensive
5164 than the otherwise-required unaligned accesses. Perhaps this can be
5165 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5166 twice, once with emit_move_insn and once via store_field. */
5167
5168 if (mode == BLKmode
5169 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5170 {
5171 rtx object = assign_temp (type, 0, 1, 1);
5172 rtx blk_object = adjust_address (object, BLKmode, 0);
5173
5174 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5175 emit_move_insn (object, target);
5176
5177 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5178 alias_set);
5179
5180 emit_move_insn (target, object);
5181
5182 /* We want to return the BLKmode version of the data. */
5183 return blk_object;
5184 }
5185
5186 if (GET_CODE (target) == CONCAT)
5187 {
5188 /* We're storing into a struct containing a single __complex. */
5189
5190 if (bitpos != 0)
5191 abort ();
5192 return store_expr (exp, target, 0);
5193 }
5194
5195 /* If the structure is in a register or if the component
5196 is a bit field, we cannot use addressing to access it.
5197 Use bit-field techniques or SUBREG to store in it. */
5198
5199 if (mode == VOIDmode
5200 || (mode != BLKmode && ! direct_store[(int) mode]
5201 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5202 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5203 || GET_CODE (target) == REG
5204 || GET_CODE (target) == SUBREG
5205 /* If the field isn't aligned enough to store as an ordinary memref,
5206 store it as a bit field. */
5207 || (mode != BLKmode
5208 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5209 || bitpos % GET_MODE_ALIGNMENT (mode))
5210 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5211 || (bitpos % BITS_PER_UNIT != 0)))
5212 /* If the RHS and field are a constant size and the size of the
5213 RHS isn't the same size as the bitfield, we must use bitfield
5214 operations. */
5215 || (bitsize >= 0
5216 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5217 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5218 {
5219 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5220
5221 /* If BITSIZE is narrower than the size of the type of EXP
5222 we will be narrowing TEMP. Normally, what's wanted are the
5223 low-order bits. However, if EXP's type is a record and this is
5224 big-endian machine, we want the upper BITSIZE bits. */
5225 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5226 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5227 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5228 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5229 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5230 - bitsize),
5231 NULL_RTX, 1);
5232
5233 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5234 MODE. */
5235 if (mode != VOIDmode && mode != BLKmode
5236 && mode != TYPE_MODE (TREE_TYPE (exp)))
5237 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5238
5239 /* If the modes of TARGET and TEMP are both BLKmode, both
5240 must be in memory and BITPOS must be aligned on a byte
5241 boundary. If so, we simply do a block copy. */
5242 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5243 {
5244 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5245 || bitpos % BITS_PER_UNIT != 0)
5246 abort ();
5247
5248 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5249 emit_block_move (target, temp,
5250 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5251 / BITS_PER_UNIT),
5252 BLOCK_OP_NORMAL);
5253
5254 return value_mode == VOIDmode ? const0_rtx : target;
5255 }
5256
5257 /* Store the value in the bitfield. */
5258 store_bit_field (target, bitsize, bitpos, mode, temp,
5259 int_size_in_bytes (type));
5260
5261 if (value_mode != VOIDmode)
5262 {
5263 /* The caller wants an rtx for the value.
5264 If possible, avoid refetching from the bitfield itself. */
5265 if (width_mask != 0
5266 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5267 {
5268 tree count;
5269 enum machine_mode tmode;
5270
5271 tmode = GET_MODE (temp);
5272 if (tmode == VOIDmode)
5273 tmode = value_mode;
5274
5275 if (unsignedp)
5276 return expand_and (tmode, temp,
5277 gen_int_mode (width_mask, tmode),
5278 NULL_RTX);
5279
5280 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5281 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5282 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5283 }
5284
5285 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5286 NULL_RTX, value_mode, VOIDmode,
5287 int_size_in_bytes (type));
5288 }
5289 return const0_rtx;
5290 }
5291 else
5292 {
5293 rtx addr = XEXP (target, 0);
5294 rtx to_rtx = target;
5295
5296 /* If a value is wanted, it must be the lhs;
5297 so make the address stable for multiple use. */
5298
5299 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5300 && ! CONSTANT_ADDRESS_P (addr)
5301 /* A frame-pointer reference is already stable. */
5302 && ! (GET_CODE (addr) == PLUS
5303 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5304 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5305 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5306 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5307
5308 /* Now build a reference to just the desired component. */
5309
5310 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5311
5312 if (to_rtx == target)
5313 to_rtx = copy_rtx (to_rtx);
5314
5315 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5316 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5317 set_mem_alias_set (to_rtx, alias_set);
5318
5319 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5320 }
5321 }
5322 \f
5323 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5324 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5325 codes and find the ultimate containing object, which we return.
5326
5327 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5328 bit position, and *PUNSIGNEDP to the signedness of the field.
5329 If the position of the field is variable, we store a tree
5330 giving the variable offset (in units) in *POFFSET.
5331 This offset is in addition to the bit position.
5332 If the position is not variable, we store 0 in *POFFSET.
5333
5334 If any of the extraction expressions is volatile,
5335 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5336
5337 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5338 is a mode that can be used to access the field. In that case, *PBITSIZE
5339 is redundant.
5340
5341 If the field describes a variable-sized object, *PMODE is set to
5342 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5343 this case, but the address of the object can be found. */
5344
5345 tree
5346 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5347 HOST_WIDE_INT *pbitpos, tree *poffset,
5348 enum machine_mode *pmode, int *punsignedp,
5349 int *pvolatilep)
5350 {
5351 tree size_tree = 0;
5352 enum machine_mode mode = VOIDmode;
5353 tree offset = size_zero_node;
5354 tree bit_offset = bitsize_zero_node;
5355 tree placeholder_ptr = 0;
5356 tree tem;
5357
5358 /* First get the mode, signedness, and size. We do this from just the
5359 outermost expression. */
5360 if (TREE_CODE (exp) == COMPONENT_REF)
5361 {
5362 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5363 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5364 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5365
5366 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5367 }
5368 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5369 {
5370 size_tree = TREE_OPERAND (exp, 1);
5371 *punsignedp = TREE_UNSIGNED (exp);
5372 }
5373 else
5374 {
5375 mode = TYPE_MODE (TREE_TYPE (exp));
5376 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5377
5378 if (mode == BLKmode)
5379 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5380 else
5381 *pbitsize = GET_MODE_BITSIZE (mode);
5382 }
5383
5384 if (size_tree != 0)
5385 {
5386 if (! host_integerp (size_tree, 1))
5387 mode = BLKmode, *pbitsize = -1;
5388 else
5389 *pbitsize = tree_low_cst (size_tree, 1);
5390 }
5391
5392 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5393 and find the ultimate containing object. */
5394 while (1)
5395 {
5396 if (TREE_CODE (exp) == BIT_FIELD_REF)
5397 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5398 else if (TREE_CODE (exp) == COMPONENT_REF)
5399 {
5400 tree field = TREE_OPERAND (exp, 1);
5401 tree this_offset = DECL_FIELD_OFFSET (field);
5402
5403 /* If this field hasn't been filled in yet, don't go
5404 past it. This should only happen when folding expressions
5405 made during type construction. */
5406 if (this_offset == 0)
5407 break;
5408 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5409 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5410
5411 offset = size_binop (PLUS_EXPR, offset, this_offset);
5412 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5413 DECL_FIELD_BIT_OFFSET (field));
5414
5415 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5416 }
5417
5418 else if (TREE_CODE (exp) == ARRAY_REF
5419 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5420 {
5421 tree index = TREE_OPERAND (exp, 1);
5422 tree array = TREE_OPERAND (exp, 0);
5423 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5424 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5425 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5426
5427 /* We assume all arrays have sizes that are a multiple of a byte.
5428 First subtract the lower bound, if any, in the type of the
5429 index, then convert to sizetype and multiply by the size of the
5430 array element. */
5431 if (low_bound != 0 && ! integer_zerop (low_bound))
5432 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5433 index, low_bound));
5434
5435 /* If the index has a self-referential type, pass it to a
5436 WITH_RECORD_EXPR; if the component size is, pass our
5437 component to one. */
5438 if (CONTAINS_PLACEHOLDER_P (index))
5439 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5440 if (CONTAINS_PLACEHOLDER_P (unit_size))
5441 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5442
5443 offset = size_binop (PLUS_EXPR, offset,
5444 size_binop (MULT_EXPR,
5445 convert (sizetype, index),
5446 unit_size));
5447 }
5448
5449 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5450 {
5451 tree new = find_placeholder (exp, &placeholder_ptr);
5452
5453 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5454 We might have been called from tree optimization where we
5455 haven't set up an object yet. */
5456 if (new == 0)
5457 break;
5458 else
5459 exp = new;
5460
5461 continue;
5462 }
5463
5464 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5465 conversions that don't change the mode, and all view conversions
5466 except those that need to "step up" the alignment. */
5467 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5468 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5469 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5470 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5471 && STRICT_ALIGNMENT
5472 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5473 < BIGGEST_ALIGNMENT)
5474 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5475 || TYPE_ALIGN_OK (TREE_TYPE
5476 (TREE_OPERAND (exp, 0))))))
5477 && ! ((TREE_CODE (exp) == NOP_EXPR
5478 || TREE_CODE (exp) == CONVERT_EXPR)
5479 && (TYPE_MODE (TREE_TYPE (exp))
5480 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5481 break;
5482
5483 /* If any reference in the chain is volatile, the effect is volatile. */
5484 if (TREE_THIS_VOLATILE (exp))
5485 *pvolatilep = 1;
5486
5487 exp = TREE_OPERAND (exp, 0);
5488 }
5489
5490 /* If OFFSET is constant, see if we can return the whole thing as a
5491 constant bit position. Otherwise, split it up. */
5492 if (host_integerp (offset, 0)
5493 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5494 bitsize_unit_node))
5495 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5496 && host_integerp (tem, 0))
5497 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5498 else
5499 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5500
5501 *pmode = mode;
5502 return exp;
5503 }
5504
5505 /* Return 1 if T is an expression that get_inner_reference handles. */
5506
5507 int
5508 handled_component_p (tree t)
5509 {
5510 switch (TREE_CODE (t))
5511 {
5512 case BIT_FIELD_REF:
5513 case COMPONENT_REF:
5514 case ARRAY_REF:
5515 case ARRAY_RANGE_REF:
5516 case NON_LVALUE_EXPR:
5517 case VIEW_CONVERT_EXPR:
5518 return 1;
5519
5520 /* ??? Sure they are handled, but get_inner_reference may return
5521 a different PBITSIZE, depending upon whether the expression is
5522 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5523 case NOP_EXPR:
5524 case CONVERT_EXPR:
5525 return (TYPE_MODE (TREE_TYPE (t))
5526 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5527
5528 default:
5529 return 0;
5530 }
5531 }
5532 \f
5533 /* Given an rtx VALUE that may contain additions and multiplications, return
5534 an equivalent value that just refers to a register, memory, or constant.
5535 This is done by generating instructions to perform the arithmetic and
5536 returning a pseudo-register containing the value.
5537
5538 The returned value may be a REG, SUBREG, MEM or constant. */
5539
5540 rtx
5541 force_operand (rtx value, rtx target)
5542 {
5543 rtx op1, op2;
5544 /* Use subtarget as the target for operand 0 of a binary operation. */
5545 rtx subtarget = get_subtarget (target);
5546 enum rtx_code code = GET_CODE (value);
5547
5548 /* Check for a PIC address load. */
5549 if ((code == PLUS || code == MINUS)
5550 && XEXP (value, 0) == pic_offset_table_rtx
5551 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5552 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5553 || GET_CODE (XEXP (value, 1)) == CONST))
5554 {
5555 if (!subtarget)
5556 subtarget = gen_reg_rtx (GET_MODE (value));
5557 emit_move_insn (subtarget, value);
5558 return subtarget;
5559 }
5560
5561 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5562 {
5563 if (!target)
5564 target = gen_reg_rtx (GET_MODE (value));
5565 convert_move (target, force_operand (XEXP (value, 0), NULL),
5566 code == ZERO_EXTEND);
5567 return target;
5568 }
5569
5570 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5571 {
5572 op2 = XEXP (value, 1);
5573 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5574 subtarget = 0;
5575 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5576 {
5577 code = PLUS;
5578 op2 = negate_rtx (GET_MODE (value), op2);
5579 }
5580
5581 /* Check for an addition with OP2 a constant integer and our first
5582 operand a PLUS of a virtual register and something else. In that
5583 case, we want to emit the sum of the virtual register and the
5584 constant first and then add the other value. This allows virtual
5585 register instantiation to simply modify the constant rather than
5586 creating another one around this addition. */
5587 if (code == PLUS && GET_CODE (op2) == CONST_INT
5588 && GET_CODE (XEXP (value, 0)) == PLUS
5589 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5590 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5591 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5592 {
5593 rtx temp = expand_simple_binop (GET_MODE (value), code,
5594 XEXP (XEXP (value, 0), 0), op2,
5595 subtarget, 0, OPTAB_LIB_WIDEN);
5596 return expand_simple_binop (GET_MODE (value), code, temp,
5597 force_operand (XEXP (XEXP (value,
5598 0), 1), 0),
5599 target, 0, OPTAB_LIB_WIDEN);
5600 }
5601
5602 op1 = force_operand (XEXP (value, 0), subtarget);
5603 op2 = force_operand (op2, NULL_RTX);
5604 switch (code)
5605 {
5606 case MULT:
5607 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5608 case DIV:
5609 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5610 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5611 target, 1, OPTAB_LIB_WIDEN);
5612 else
5613 return expand_divmod (0,
5614 FLOAT_MODE_P (GET_MODE (value))
5615 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5616 GET_MODE (value), op1, op2, target, 0);
5617 break;
5618 case MOD:
5619 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5620 target, 0);
5621 break;
5622 case UDIV:
5623 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5624 target, 1);
5625 break;
5626 case UMOD:
5627 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5628 target, 1);
5629 break;
5630 case ASHIFTRT:
5631 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5632 target, 0, OPTAB_LIB_WIDEN);
5633 break;
5634 default:
5635 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5636 target, 1, OPTAB_LIB_WIDEN);
5637 }
5638 }
5639 if (GET_RTX_CLASS (code) == '1')
5640 {
5641 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5642 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5643 }
5644
5645 #ifdef INSN_SCHEDULING
5646 /* On machines that have insn scheduling, we want all memory reference to be
5647 explicit, so we need to deal with such paradoxical SUBREGs. */
5648 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5649 && (GET_MODE_SIZE (GET_MODE (value))
5650 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5651 value
5652 = simplify_gen_subreg (GET_MODE (value),
5653 force_reg (GET_MODE (SUBREG_REG (value)),
5654 force_operand (SUBREG_REG (value),
5655 NULL_RTX)),
5656 GET_MODE (SUBREG_REG (value)),
5657 SUBREG_BYTE (value));
5658 #endif
5659
5660 return value;
5661 }
5662 \f
5663 /* Subroutine of expand_expr: return nonzero iff there is no way that
5664 EXP can reference X, which is being modified. TOP_P is nonzero if this
5665 call is going to be used to determine whether we need a temporary
5666 for EXP, as opposed to a recursive call to this function.
5667
5668 It is always safe for this routine to return zero since it merely
5669 searches for optimization opportunities. */
5670
5671 int
5672 safe_from_p (rtx x, tree exp, int top_p)
5673 {
5674 rtx exp_rtl = 0;
5675 int i, nops;
5676 static tree save_expr_list;
5677
5678 if (x == 0
5679 /* If EXP has varying size, we MUST use a target since we currently
5680 have no way of allocating temporaries of variable size
5681 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5682 So we assume here that something at a higher level has prevented a
5683 clash. This is somewhat bogus, but the best we can do. Only
5684 do this when X is BLKmode and when we are at the top level. */
5685 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5686 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5687 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5688 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5689 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5690 != INTEGER_CST)
5691 && GET_MODE (x) == BLKmode)
5692 /* If X is in the outgoing argument area, it is always safe. */
5693 || (GET_CODE (x) == MEM
5694 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5695 || (GET_CODE (XEXP (x, 0)) == PLUS
5696 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5697 return 1;
5698
5699 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5700 find the underlying pseudo. */
5701 if (GET_CODE (x) == SUBREG)
5702 {
5703 x = SUBREG_REG (x);
5704 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5705 return 0;
5706 }
5707
5708 /* A SAVE_EXPR might appear many times in the expression passed to the
5709 top-level safe_from_p call, and if it has a complex subexpression,
5710 examining it multiple times could result in a combinatorial explosion.
5711 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5712 with optimization took about 28 minutes to compile -- even though it was
5713 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5714 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5715 we have processed. Note that the only test of top_p was above. */
5716
5717 if (top_p)
5718 {
5719 int rtn;
5720 tree t;
5721
5722 save_expr_list = 0;
5723
5724 rtn = safe_from_p (x, exp, 0);
5725
5726 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5727 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5728
5729 return rtn;
5730 }
5731
5732 /* Now look at our tree code and possibly recurse. */
5733 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5734 {
5735 case 'd':
5736 exp_rtl = DECL_RTL_IF_SET (exp);
5737 break;
5738
5739 case 'c':
5740 return 1;
5741
5742 case 'x':
5743 if (TREE_CODE (exp) == TREE_LIST)
5744 {
5745 while (1)
5746 {
5747 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5748 return 0;
5749 exp = TREE_CHAIN (exp);
5750 if (!exp)
5751 return 1;
5752 if (TREE_CODE (exp) != TREE_LIST)
5753 return safe_from_p (x, exp, 0);
5754 }
5755 }
5756 else if (TREE_CODE (exp) == ERROR_MARK)
5757 return 1; /* An already-visited SAVE_EXPR? */
5758 else
5759 return 0;
5760
5761 case '2':
5762 case '<':
5763 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5764 return 0;
5765 /* FALLTHRU */
5766
5767 case '1':
5768 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5769
5770 case 'e':
5771 case 'r':
5772 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5773 the expression. If it is set, we conflict iff we are that rtx or
5774 both are in memory. Otherwise, we check all operands of the
5775 expression recursively. */
5776
5777 switch (TREE_CODE (exp))
5778 {
5779 case ADDR_EXPR:
5780 /* If the operand is static or we are static, we can't conflict.
5781 Likewise if we don't conflict with the operand at all. */
5782 if (staticp (TREE_OPERAND (exp, 0))
5783 || TREE_STATIC (exp)
5784 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5785 return 1;
5786
5787 /* Otherwise, the only way this can conflict is if we are taking
5788 the address of a DECL a that address if part of X, which is
5789 very rare. */
5790 exp = TREE_OPERAND (exp, 0);
5791 if (DECL_P (exp))
5792 {
5793 if (!DECL_RTL_SET_P (exp)
5794 || GET_CODE (DECL_RTL (exp)) != MEM)
5795 return 0;
5796 else
5797 exp_rtl = XEXP (DECL_RTL (exp), 0);
5798 }
5799 break;
5800
5801 case INDIRECT_REF:
5802 if (GET_CODE (x) == MEM
5803 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5804 get_alias_set (exp)))
5805 return 0;
5806 break;
5807
5808 case CALL_EXPR:
5809 /* Assume that the call will clobber all hard registers and
5810 all of memory. */
5811 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5812 || GET_CODE (x) == MEM)
5813 return 0;
5814 break;
5815
5816 case RTL_EXPR:
5817 /* If a sequence exists, we would have to scan every instruction
5818 in the sequence to see if it was safe. This is probably not
5819 worthwhile. */
5820 if (RTL_EXPR_SEQUENCE (exp))
5821 return 0;
5822
5823 exp_rtl = RTL_EXPR_RTL (exp);
5824 break;
5825
5826 case WITH_CLEANUP_EXPR:
5827 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5828 break;
5829
5830 case CLEANUP_POINT_EXPR:
5831 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5832
5833 case SAVE_EXPR:
5834 exp_rtl = SAVE_EXPR_RTL (exp);
5835 if (exp_rtl)
5836 break;
5837
5838 /* If we've already scanned this, don't do it again. Otherwise,
5839 show we've scanned it and record for clearing the flag if we're
5840 going on. */
5841 if (TREE_PRIVATE (exp))
5842 return 1;
5843
5844 TREE_PRIVATE (exp) = 1;
5845 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5846 {
5847 TREE_PRIVATE (exp) = 0;
5848 return 0;
5849 }
5850
5851 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5852 return 1;
5853
5854 case BIND_EXPR:
5855 /* The only operand we look at is operand 1. The rest aren't
5856 part of the expression. */
5857 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5858
5859 default:
5860 break;
5861 }
5862
5863 /* If we have an rtx, we do not need to scan our operands. */
5864 if (exp_rtl)
5865 break;
5866
5867 nops = first_rtl_op (TREE_CODE (exp));
5868 for (i = 0; i < nops; i++)
5869 if (TREE_OPERAND (exp, i) != 0
5870 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5871 return 0;
5872
5873 /* If this is a language-specific tree code, it may require
5874 special handling. */
5875 if ((unsigned int) TREE_CODE (exp)
5876 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5877 && !(*lang_hooks.safe_from_p) (x, exp))
5878 return 0;
5879 }
5880
5881 /* If we have an rtl, find any enclosed object. Then see if we conflict
5882 with it. */
5883 if (exp_rtl)
5884 {
5885 if (GET_CODE (exp_rtl) == SUBREG)
5886 {
5887 exp_rtl = SUBREG_REG (exp_rtl);
5888 if (GET_CODE (exp_rtl) == REG
5889 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5890 return 0;
5891 }
5892
5893 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5894 are memory and they conflict. */
5895 return ! (rtx_equal_p (x, exp_rtl)
5896 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5897 && true_dependence (exp_rtl, VOIDmode, x,
5898 rtx_addr_varies_p)));
5899 }
5900
5901 /* If we reach here, it is safe. */
5902 return 1;
5903 }
5904
5905 /* Subroutine of expand_expr: return rtx if EXP is a
5906 variable or parameter; else return 0. */
5907
5908 static rtx
5909 var_rtx (tree exp)
5910 {
5911 STRIP_NOPS (exp);
5912 switch (TREE_CODE (exp))
5913 {
5914 case PARM_DECL:
5915 case VAR_DECL:
5916 return DECL_RTL (exp);
5917 default:
5918 return 0;
5919 }
5920 }
5921
5922 #ifdef MAX_INTEGER_COMPUTATION_MODE
5923
5924 void
5925 check_max_integer_computation_mode (tree exp)
5926 {
5927 enum tree_code code;
5928 enum machine_mode mode;
5929
5930 /* Strip any NOPs that don't change the mode. */
5931 STRIP_NOPS (exp);
5932 code = TREE_CODE (exp);
5933
5934 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5935 if (code == NOP_EXPR
5936 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5937 return;
5938
5939 /* First check the type of the overall operation. We need only look at
5940 unary, binary and relational operations. */
5941 if (TREE_CODE_CLASS (code) == '1'
5942 || TREE_CODE_CLASS (code) == '2'
5943 || TREE_CODE_CLASS (code) == '<')
5944 {
5945 mode = TYPE_MODE (TREE_TYPE (exp));
5946 if (GET_MODE_CLASS (mode) == MODE_INT
5947 && mode > MAX_INTEGER_COMPUTATION_MODE)
5948 internal_error ("unsupported wide integer operation");
5949 }
5950
5951 /* Check operand of a unary op. */
5952 if (TREE_CODE_CLASS (code) == '1')
5953 {
5954 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5955 if (GET_MODE_CLASS (mode) == MODE_INT
5956 && mode > MAX_INTEGER_COMPUTATION_MODE)
5957 internal_error ("unsupported wide integer operation");
5958 }
5959
5960 /* Check operands of a binary/comparison op. */
5961 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5962 {
5963 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5964 if (GET_MODE_CLASS (mode) == MODE_INT
5965 && mode > MAX_INTEGER_COMPUTATION_MODE)
5966 internal_error ("unsupported wide integer operation");
5967
5968 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5969 if (GET_MODE_CLASS (mode) == MODE_INT
5970 && mode > MAX_INTEGER_COMPUTATION_MODE)
5971 internal_error ("unsupported wide integer operation");
5972 }
5973 }
5974 #endif
5975 \f
5976 /* Return the highest power of two that EXP is known to be a multiple of.
5977 This is used in updating alignment of MEMs in array references. */
5978
5979 static unsigned HOST_WIDE_INT
5980 highest_pow2_factor (tree exp)
5981 {
5982 unsigned HOST_WIDE_INT c0, c1;
5983
5984 switch (TREE_CODE (exp))
5985 {
5986 case INTEGER_CST:
5987 /* We can find the lowest bit that's a one. If the low
5988 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5989 We need to handle this case since we can find it in a COND_EXPR,
5990 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5991 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5992 later ICE. */
5993 if (TREE_CONSTANT_OVERFLOW (exp))
5994 return BIGGEST_ALIGNMENT;
5995 else
5996 {
5997 /* Note: tree_low_cst is intentionally not used here,
5998 we don't care about the upper bits. */
5999 c0 = TREE_INT_CST_LOW (exp);
6000 c0 &= -c0;
6001 return c0 ? c0 : BIGGEST_ALIGNMENT;
6002 }
6003 break;
6004
6005 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6006 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6007 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6008 return MIN (c0, c1);
6009
6010 case MULT_EXPR:
6011 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6012 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6013 return c0 * c1;
6014
6015 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6016 case CEIL_DIV_EXPR:
6017 if (integer_pow2p (TREE_OPERAND (exp, 1))
6018 && host_integerp (TREE_OPERAND (exp, 1), 1))
6019 {
6020 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6021 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6022 return MAX (1, c0 / c1);
6023 }
6024 break;
6025
6026 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6027 case SAVE_EXPR: case WITH_RECORD_EXPR:
6028 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6029
6030 case COMPOUND_EXPR:
6031 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6032
6033 case COND_EXPR:
6034 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6035 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6036 return MIN (c0, c1);
6037
6038 default:
6039 break;
6040 }
6041
6042 return 1;
6043 }
6044
6045 /* Similar, except that it is known that the expression must be a multiple
6046 of the alignment of TYPE. */
6047
6048 static unsigned HOST_WIDE_INT
6049 highest_pow2_factor_for_type (tree type, tree exp)
6050 {
6051 unsigned HOST_WIDE_INT type_align, factor;
6052
6053 factor = highest_pow2_factor (exp);
6054 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6055 return MAX (factor, type_align);
6056 }
6057 \f
6058 /* Return an object on the placeholder list that matches EXP, a
6059 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6060 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6061 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6062 is a location which initially points to a starting location in the
6063 placeholder list (zero means start of the list) and where a pointer into
6064 the placeholder list at which the object is found is placed. */
6065
6066 tree
6067 find_placeholder (tree exp, tree *plist)
6068 {
6069 tree type = TREE_TYPE (exp);
6070 tree placeholder_expr;
6071
6072 for (placeholder_expr
6073 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6074 placeholder_expr != 0;
6075 placeholder_expr = TREE_CHAIN (placeholder_expr))
6076 {
6077 tree need_type = TYPE_MAIN_VARIANT (type);
6078 tree elt;
6079
6080 /* Find the outermost reference that is of the type we want. If none,
6081 see if any object has a type that is a pointer to the type we
6082 want. */
6083 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6084 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6085 || TREE_CODE (elt) == COND_EXPR)
6086 ? TREE_OPERAND (elt, 1)
6087 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6088 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6089 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6090 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6091 ? TREE_OPERAND (elt, 0) : 0))
6092 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6093 {
6094 if (plist)
6095 *plist = placeholder_expr;
6096 return elt;
6097 }
6098
6099 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6100 elt
6101 = ((TREE_CODE (elt) == COMPOUND_EXPR
6102 || TREE_CODE (elt) == COND_EXPR)
6103 ? TREE_OPERAND (elt, 1)
6104 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6105 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6106 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6107 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6108 ? TREE_OPERAND (elt, 0) : 0))
6109 if (POINTER_TYPE_P (TREE_TYPE (elt))
6110 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6111 == need_type))
6112 {
6113 if (plist)
6114 *plist = placeholder_expr;
6115 return build1 (INDIRECT_REF, need_type, elt);
6116 }
6117 }
6118
6119 return 0;
6120 }
6121
6122 /* Subroutine of expand_expr. Expand the two operands of a binary
6123 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6124 The value may be stored in TARGET if TARGET is nonzero. The
6125 MODIFIER argument is as documented by expand_expr. */
6126
6127 static void
6128 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6129 enum expand_modifier modifier)
6130 {
6131 if (! safe_from_p (target, exp1, 1))
6132 target = 0;
6133 if (operand_equal_p (exp0, exp1, 0))
6134 {
6135 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6136 *op1 = copy_rtx (*op0);
6137 }
6138 else
6139 {
6140 /* If we need to preserve evaluation order, copy exp0 into its own
6141 temporary variable so that it can't be clobbered by exp1. */
6142 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6143 exp0 = save_expr (exp0);
6144 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6145 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6146 }
6147 }
6148
6149 \f
6150 /* expand_expr: generate code for computing expression EXP.
6151 An rtx for the computed value is returned. The value is never null.
6152 In the case of a void EXP, const0_rtx is returned.
6153
6154 The value may be stored in TARGET if TARGET is nonzero.
6155 TARGET is just a suggestion; callers must assume that
6156 the rtx returned may not be the same as TARGET.
6157
6158 If TARGET is CONST0_RTX, it means that the value will be ignored.
6159
6160 If TMODE is not VOIDmode, it suggests generating the
6161 result in mode TMODE. But this is done only when convenient.
6162 Otherwise, TMODE is ignored and the value generated in its natural mode.
6163 TMODE is just a suggestion; callers must assume that
6164 the rtx returned may not have mode TMODE.
6165
6166 Note that TARGET may have neither TMODE nor MODE. In that case, it
6167 probably will not be used.
6168
6169 If MODIFIER is EXPAND_SUM then when EXP is an addition
6170 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6171 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6172 products as above, or REG or MEM, or constant.
6173 Ordinarily in such cases we would output mul or add instructions
6174 and then return a pseudo reg containing the sum.
6175
6176 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6177 it also marks a label as absolutely required (it can't be dead).
6178 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6179 This is used for outputting expressions used in initializers.
6180
6181 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6182 with a constant address even if that address is not normally legitimate.
6183 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6184
6185 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6186 a call parameter. Such targets require special care as we haven't yet
6187 marked TARGET so that it's safe from being trashed by libcalls. We
6188 don't want to use TARGET for anything but the final result;
6189 Intermediate values must go elsewhere. Additionally, calls to
6190 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6191
6192 rtx
6193 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6194 enum expand_modifier modifier)
6195 {
6196 rtx op0, op1, temp;
6197 tree type = TREE_TYPE (exp);
6198 int unsignedp = TREE_UNSIGNED (type);
6199 enum machine_mode mode;
6200 enum tree_code code = TREE_CODE (exp);
6201 optab this_optab;
6202 rtx subtarget, original_target;
6203 int ignore;
6204 tree context;
6205
6206 /* Handle ERROR_MARK before anybody tries to access its type. */
6207 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6208 {
6209 op0 = CONST0_RTX (tmode);
6210 if (op0 != 0)
6211 return op0;
6212 return const0_rtx;
6213 }
6214
6215 mode = TYPE_MODE (type);
6216 /* Use subtarget as the target for operand 0 of a binary operation. */
6217 subtarget = get_subtarget (target);
6218 original_target = target;
6219 ignore = (target == const0_rtx
6220 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6221 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6222 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6223 && TREE_CODE (type) == VOID_TYPE));
6224
6225 /* If we are going to ignore this result, we need only do something
6226 if there is a side-effect somewhere in the expression. If there
6227 is, short-circuit the most common cases here. Note that we must
6228 not call expand_expr with anything but const0_rtx in case this
6229 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6230
6231 if (ignore)
6232 {
6233 if (! TREE_SIDE_EFFECTS (exp))
6234 return const0_rtx;
6235
6236 /* Ensure we reference a volatile object even if value is ignored, but
6237 don't do this if all we are doing is taking its address. */
6238 if (TREE_THIS_VOLATILE (exp)
6239 && TREE_CODE (exp) != FUNCTION_DECL
6240 && mode != VOIDmode && mode != BLKmode
6241 && modifier != EXPAND_CONST_ADDRESS)
6242 {
6243 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6244 if (GET_CODE (temp) == MEM)
6245 temp = copy_to_reg (temp);
6246 return const0_rtx;
6247 }
6248
6249 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6250 || code == INDIRECT_REF || code == BUFFER_REF)
6251 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6252 modifier);
6253
6254 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6255 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6256 {
6257 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6258 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6259 return const0_rtx;
6260 }
6261 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6262 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6263 /* If the second operand has no side effects, just evaluate
6264 the first. */
6265 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6266 modifier);
6267 else if (code == BIT_FIELD_REF)
6268 {
6269 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6270 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6271 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6272 return const0_rtx;
6273 }
6274
6275 target = 0;
6276 }
6277
6278 #ifdef MAX_INTEGER_COMPUTATION_MODE
6279 /* Only check stuff here if the mode we want is different from the mode
6280 of the expression; if it's the same, check_max_integer_computation_mode
6281 will handle it. Do we really need to check this stuff at all? */
6282
6283 if (target
6284 && GET_MODE (target) != mode
6285 && TREE_CODE (exp) != INTEGER_CST
6286 && TREE_CODE (exp) != PARM_DECL
6287 && TREE_CODE (exp) != ARRAY_REF
6288 && TREE_CODE (exp) != ARRAY_RANGE_REF
6289 && TREE_CODE (exp) != COMPONENT_REF
6290 && TREE_CODE (exp) != BIT_FIELD_REF
6291 && TREE_CODE (exp) != INDIRECT_REF
6292 && TREE_CODE (exp) != CALL_EXPR
6293 && TREE_CODE (exp) != VAR_DECL
6294 && TREE_CODE (exp) != RTL_EXPR)
6295 {
6296 enum machine_mode mode = GET_MODE (target);
6297
6298 if (GET_MODE_CLASS (mode) == MODE_INT
6299 && mode > MAX_INTEGER_COMPUTATION_MODE)
6300 internal_error ("unsupported wide integer operation");
6301 }
6302
6303 if (tmode != mode
6304 && TREE_CODE (exp) != INTEGER_CST
6305 && TREE_CODE (exp) != PARM_DECL
6306 && TREE_CODE (exp) != ARRAY_REF
6307 && TREE_CODE (exp) != ARRAY_RANGE_REF
6308 && TREE_CODE (exp) != COMPONENT_REF
6309 && TREE_CODE (exp) != BIT_FIELD_REF
6310 && TREE_CODE (exp) != INDIRECT_REF
6311 && TREE_CODE (exp) != VAR_DECL
6312 && TREE_CODE (exp) != CALL_EXPR
6313 && TREE_CODE (exp) != RTL_EXPR
6314 && GET_MODE_CLASS (tmode) == MODE_INT
6315 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6316 internal_error ("unsupported wide integer operation");
6317
6318 check_max_integer_computation_mode (exp);
6319 #endif
6320
6321 /* If will do cse, generate all results into pseudo registers
6322 since 1) that allows cse to find more things
6323 and 2) otherwise cse could produce an insn the machine
6324 cannot support. An exception is a CONSTRUCTOR into a multi-word
6325 MEM: that's much more likely to be most efficient into the MEM.
6326 Another is a CALL_EXPR which must return in memory. */
6327
6328 if (! cse_not_expected && mode != BLKmode && target
6329 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6330 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6331 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6332 target = 0;
6333
6334 switch (code)
6335 {
6336 case LABEL_DECL:
6337 {
6338 tree function = decl_function_context (exp);
6339 /* Labels in containing functions, or labels used from initializers,
6340 must be forced. */
6341 if (modifier == EXPAND_INITIALIZER
6342 || (function != current_function_decl
6343 && function != inline_function_decl
6344 && function != 0))
6345 temp = force_label_rtx (exp);
6346 else
6347 temp = label_rtx (exp);
6348
6349 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6350 if (function != current_function_decl
6351 && function != inline_function_decl && function != 0)
6352 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6353 return temp;
6354 }
6355
6356 case PARM_DECL:
6357 if (!DECL_RTL_SET_P (exp))
6358 {
6359 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6360 return CONST0_RTX (mode);
6361 }
6362
6363 /* ... fall through ... */
6364
6365 case VAR_DECL:
6366 /* If a static var's type was incomplete when the decl was written,
6367 but the type is complete now, lay out the decl now. */
6368 if (DECL_SIZE (exp) == 0
6369 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6370 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6371 layout_decl (exp, 0);
6372
6373 /* ... fall through ... */
6374
6375 case FUNCTION_DECL:
6376 case RESULT_DECL:
6377 if (DECL_RTL (exp) == 0)
6378 abort ();
6379
6380 /* Ensure variable marked as used even if it doesn't go through
6381 a parser. If it hasn't be used yet, write out an external
6382 definition. */
6383 if (! TREE_USED (exp))
6384 {
6385 assemble_external (exp);
6386 TREE_USED (exp) = 1;
6387 }
6388
6389 /* Show we haven't gotten RTL for this yet. */
6390 temp = 0;
6391
6392 /* Handle variables inherited from containing functions. */
6393 context = decl_function_context (exp);
6394
6395 /* We treat inline_function_decl as an alias for the current function
6396 because that is the inline function whose vars, types, etc.
6397 are being merged into the current function.
6398 See expand_inline_function. */
6399
6400 if (context != 0 && context != current_function_decl
6401 && context != inline_function_decl
6402 /* If var is static, we don't need a static chain to access it. */
6403 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6404 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6405 {
6406 rtx addr;
6407
6408 /* Mark as non-local and addressable. */
6409 DECL_NONLOCAL (exp) = 1;
6410 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6411 abort ();
6412 (*lang_hooks.mark_addressable) (exp);
6413 if (GET_CODE (DECL_RTL (exp)) != MEM)
6414 abort ();
6415 addr = XEXP (DECL_RTL (exp), 0);
6416 if (GET_CODE (addr) == MEM)
6417 addr
6418 = replace_equiv_address (addr,
6419 fix_lexical_addr (XEXP (addr, 0), exp));
6420 else
6421 addr = fix_lexical_addr (addr, exp);
6422
6423 temp = replace_equiv_address (DECL_RTL (exp), addr);
6424 }
6425
6426 /* This is the case of an array whose size is to be determined
6427 from its initializer, while the initializer is still being parsed.
6428 See expand_decl. */
6429
6430 else if (GET_CODE (DECL_RTL (exp)) == MEM
6431 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6432 temp = validize_mem (DECL_RTL (exp));
6433
6434 /* If DECL_RTL is memory, we are in the normal case and either
6435 the address is not valid or it is not a register and -fforce-addr
6436 is specified, get the address into a register. */
6437
6438 else if (GET_CODE (DECL_RTL (exp)) == MEM
6439 && modifier != EXPAND_CONST_ADDRESS
6440 && modifier != EXPAND_SUM
6441 && modifier != EXPAND_INITIALIZER
6442 && (! memory_address_p (DECL_MODE (exp),
6443 XEXP (DECL_RTL (exp), 0))
6444 || (flag_force_addr
6445 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6446 temp = replace_equiv_address (DECL_RTL (exp),
6447 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6448
6449 /* If we got something, return it. But first, set the alignment
6450 if the address is a register. */
6451 if (temp != 0)
6452 {
6453 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6454 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6455
6456 return temp;
6457 }
6458
6459 /* If the mode of DECL_RTL does not match that of the decl, it
6460 must be a promoted value. We return a SUBREG of the wanted mode,
6461 but mark it so that we know that it was already extended. */
6462
6463 if (GET_CODE (DECL_RTL (exp)) == REG
6464 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6465 {
6466 /* Get the signedness used for this variable. Ensure we get the
6467 same mode we got when the variable was declared. */
6468 if (GET_MODE (DECL_RTL (exp))
6469 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6470 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6471 abort ();
6472
6473 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6474 SUBREG_PROMOTED_VAR_P (temp) = 1;
6475 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6476 return temp;
6477 }
6478
6479 return DECL_RTL (exp);
6480
6481 case INTEGER_CST:
6482 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6483 TREE_INT_CST_HIGH (exp), mode);
6484
6485 /* ??? If overflow is set, fold will have done an incomplete job,
6486 which can result in (plus xx (const_int 0)), which can get
6487 simplified by validate_replace_rtx during virtual register
6488 instantiation, which can result in unrecognizable insns.
6489 Avoid this by forcing all overflows into registers. */
6490 if (TREE_CONSTANT_OVERFLOW (exp)
6491 && modifier != EXPAND_INITIALIZER)
6492 temp = force_reg (mode, temp);
6493
6494 return temp;
6495
6496 case VECTOR_CST:
6497 return const_vector_from_tree (exp);
6498
6499 case CONST_DECL:
6500 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6501
6502 case REAL_CST:
6503 /* If optimized, generate immediate CONST_DOUBLE
6504 which will be turned into memory by reload if necessary.
6505
6506 We used to force a register so that loop.c could see it. But
6507 this does not allow gen_* patterns to perform optimizations with
6508 the constants. It also produces two insns in cases like "x = 1.0;".
6509 On most machines, floating-point constants are not permitted in
6510 many insns, so we'd end up copying it to a register in any case.
6511
6512 Now, we do the copying in expand_binop, if appropriate. */
6513 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6514 TYPE_MODE (TREE_TYPE (exp)));
6515
6516 case COMPLEX_CST:
6517 /* Handle evaluating a complex constant in a CONCAT target. */
6518 if (original_target && GET_CODE (original_target) == CONCAT)
6519 {
6520 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6521 rtx rtarg, itarg;
6522
6523 rtarg = XEXP (original_target, 0);
6524 itarg = XEXP (original_target, 1);
6525
6526 /* Move the real and imaginary parts separately. */
6527 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6528 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6529
6530 if (op0 != rtarg)
6531 emit_move_insn (rtarg, op0);
6532 if (op1 != itarg)
6533 emit_move_insn (itarg, op1);
6534
6535 return original_target;
6536 }
6537
6538 /* ... fall through ... */
6539
6540 case STRING_CST:
6541 temp = output_constant_def (exp, 1);
6542
6543 /* temp contains a constant address.
6544 On RISC machines where a constant address isn't valid,
6545 make some insns to get that address into a register. */
6546 if (modifier != EXPAND_CONST_ADDRESS
6547 && modifier != EXPAND_INITIALIZER
6548 && modifier != EXPAND_SUM
6549 && (! memory_address_p (mode, XEXP (temp, 0))
6550 || flag_force_addr))
6551 return replace_equiv_address (temp,
6552 copy_rtx (XEXP (temp, 0)));
6553 return temp;
6554
6555 case EXPR_WITH_FILE_LOCATION:
6556 {
6557 rtx to_return;
6558 struct file_stack fs;
6559
6560 fs.location = input_location;
6561 fs.next = expr_wfl_stack;
6562 input_filename = EXPR_WFL_FILENAME (exp);
6563 input_line = EXPR_WFL_LINENO (exp);
6564 expr_wfl_stack = &fs;
6565 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6566 emit_line_note (input_location);
6567 /* Possibly avoid switching back and forth here. */
6568 to_return = expand_expr (EXPR_WFL_NODE (exp),
6569 (ignore ? const0_rtx : target),
6570 tmode, modifier);
6571 if (expr_wfl_stack != &fs)
6572 abort ();
6573 input_location = fs.location;
6574 expr_wfl_stack = fs.next;
6575 return to_return;
6576 }
6577
6578 case SAVE_EXPR:
6579 context = decl_function_context (exp);
6580
6581 /* If this SAVE_EXPR was at global context, assume we are an
6582 initialization function and move it into our context. */
6583 if (context == 0)
6584 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6585
6586 /* We treat inline_function_decl as an alias for the current function
6587 because that is the inline function whose vars, types, etc.
6588 are being merged into the current function.
6589 See expand_inline_function. */
6590 if (context == current_function_decl || context == inline_function_decl)
6591 context = 0;
6592
6593 /* If this is non-local, handle it. */
6594 if (context)
6595 {
6596 /* The following call just exists to abort if the context is
6597 not of a containing function. */
6598 find_function_data (context);
6599
6600 temp = SAVE_EXPR_RTL (exp);
6601 if (temp && GET_CODE (temp) == REG)
6602 {
6603 put_var_into_stack (exp, /*rescan=*/true);
6604 temp = SAVE_EXPR_RTL (exp);
6605 }
6606 if (temp == 0 || GET_CODE (temp) != MEM)
6607 abort ();
6608 return
6609 replace_equiv_address (temp,
6610 fix_lexical_addr (XEXP (temp, 0), exp));
6611 }
6612 if (SAVE_EXPR_RTL (exp) == 0)
6613 {
6614 if (mode == VOIDmode)
6615 temp = const0_rtx;
6616 else
6617 temp = assign_temp (build_qualified_type (type,
6618 (TYPE_QUALS (type)
6619 | TYPE_QUAL_CONST)),
6620 3, 0, 0);
6621
6622 SAVE_EXPR_RTL (exp) = temp;
6623 if (!optimize && GET_CODE (temp) == REG)
6624 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6625 save_expr_regs);
6626
6627 /* If the mode of TEMP does not match that of the expression, it
6628 must be a promoted value. We pass store_expr a SUBREG of the
6629 wanted mode but mark it so that we know that it was already
6630 extended. */
6631
6632 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6633 {
6634 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6635 promote_mode (type, mode, &unsignedp, 0);
6636 SUBREG_PROMOTED_VAR_P (temp) = 1;
6637 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6638 }
6639
6640 if (temp == const0_rtx)
6641 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6642 else
6643 store_expr (TREE_OPERAND (exp, 0), temp,
6644 modifier == EXPAND_STACK_PARM ? 2 : 0);
6645
6646 TREE_USED (exp) = 1;
6647 }
6648
6649 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6650 must be a promoted value. We return a SUBREG of the wanted mode,
6651 but mark it so that we know that it was already extended. */
6652
6653 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6654 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6655 {
6656 /* Compute the signedness and make the proper SUBREG. */
6657 promote_mode (type, mode, &unsignedp, 0);
6658 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6659 SUBREG_PROMOTED_VAR_P (temp) = 1;
6660 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6661 return temp;
6662 }
6663
6664 return SAVE_EXPR_RTL (exp);
6665
6666 case UNSAVE_EXPR:
6667 {
6668 rtx temp;
6669 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6670 TREE_OPERAND (exp, 0)
6671 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6672 return temp;
6673 }
6674
6675 case PLACEHOLDER_EXPR:
6676 {
6677 tree old_list = placeholder_list;
6678 tree placeholder_expr = 0;
6679
6680 exp = find_placeholder (exp, &placeholder_expr);
6681 if (exp == 0)
6682 abort ();
6683
6684 placeholder_list = TREE_CHAIN (placeholder_expr);
6685 temp = expand_expr (exp, original_target, tmode, modifier);
6686 placeholder_list = old_list;
6687 return temp;
6688 }
6689
6690 case WITH_RECORD_EXPR:
6691 /* Put the object on the placeholder list, expand our first operand,
6692 and pop the list. */
6693 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6694 placeholder_list);
6695 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6696 modifier);
6697 placeholder_list = TREE_CHAIN (placeholder_list);
6698 return target;
6699
6700 case GOTO_EXPR:
6701 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6702 expand_goto (TREE_OPERAND (exp, 0));
6703 else
6704 expand_computed_goto (TREE_OPERAND (exp, 0));
6705 return const0_rtx;
6706
6707 case EXIT_EXPR:
6708 expand_exit_loop_if_false (NULL,
6709 invert_truthvalue (TREE_OPERAND (exp, 0)));
6710 return const0_rtx;
6711
6712 case LABELED_BLOCK_EXPR:
6713 if (LABELED_BLOCK_BODY (exp))
6714 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6715 /* Should perhaps use expand_label, but this is simpler and safer. */
6716 do_pending_stack_adjust ();
6717 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6718 return const0_rtx;
6719
6720 case EXIT_BLOCK_EXPR:
6721 if (EXIT_BLOCK_RETURN (exp))
6722 sorry ("returned value in block_exit_expr");
6723 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6724 return const0_rtx;
6725
6726 case LOOP_EXPR:
6727 push_temp_slots ();
6728 expand_start_loop (1);
6729 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6730 expand_end_loop ();
6731 pop_temp_slots ();
6732
6733 return const0_rtx;
6734
6735 case BIND_EXPR:
6736 {
6737 tree vars = TREE_OPERAND (exp, 0);
6738
6739 /* Need to open a binding contour here because
6740 if there are any cleanups they must be contained here. */
6741 expand_start_bindings (2);
6742
6743 /* Mark the corresponding BLOCK for output in its proper place. */
6744 if (TREE_OPERAND (exp, 2) != 0
6745 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6746 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6747
6748 /* If VARS have not yet been expanded, expand them now. */
6749 while (vars)
6750 {
6751 if (!DECL_RTL_SET_P (vars))
6752 expand_decl (vars);
6753 expand_decl_init (vars);
6754 vars = TREE_CHAIN (vars);
6755 }
6756
6757 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6758
6759 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6760
6761 return temp;
6762 }
6763
6764 case RTL_EXPR:
6765 if (RTL_EXPR_SEQUENCE (exp))
6766 {
6767 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6768 abort ();
6769 emit_insn (RTL_EXPR_SEQUENCE (exp));
6770 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6771 }
6772 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6773 free_temps_for_rtl_expr (exp);
6774 return RTL_EXPR_RTL (exp);
6775
6776 case CONSTRUCTOR:
6777 /* If we don't need the result, just ensure we evaluate any
6778 subexpressions. */
6779 if (ignore)
6780 {
6781 tree elt;
6782
6783 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6784 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6785
6786 return const0_rtx;
6787 }
6788
6789 /* All elts simple constants => refer to a constant in memory. But
6790 if this is a non-BLKmode mode, let it store a field at a time
6791 since that should make a CONST_INT or CONST_DOUBLE when we
6792 fold. Likewise, if we have a target we can use, it is best to
6793 store directly into the target unless the type is large enough
6794 that memcpy will be used. If we are making an initializer and
6795 all operands are constant, put it in memory as well.
6796
6797 FIXME: Avoid trying to fill vector constructors piece-meal.
6798 Output them with output_constant_def below unless we're sure
6799 they're zeros. This should go away when vector initializers
6800 are treated like VECTOR_CST instead of arrays.
6801 */
6802 else if ((TREE_STATIC (exp)
6803 && ((mode == BLKmode
6804 && ! (target != 0 && safe_from_p (target, exp, 1)))
6805 || TREE_ADDRESSABLE (exp)
6806 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6807 && (! MOVE_BY_PIECES_P
6808 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6809 TYPE_ALIGN (type)))
6810 && ((TREE_CODE (type) == VECTOR_TYPE
6811 && !is_zeros_p (exp))
6812 || ! mostly_zeros_p (exp)))))
6813 || ((modifier == EXPAND_INITIALIZER
6814 || modifier == EXPAND_CONST_ADDRESS)
6815 && TREE_CONSTANT (exp)))
6816 {
6817 rtx constructor = output_constant_def (exp, 1);
6818
6819 if (modifier != EXPAND_CONST_ADDRESS
6820 && modifier != EXPAND_INITIALIZER
6821 && modifier != EXPAND_SUM)
6822 constructor = validize_mem (constructor);
6823
6824 return constructor;
6825 }
6826 else
6827 {
6828 /* Handle calls that pass values in multiple non-contiguous
6829 locations. The Irix 6 ABI has examples of this. */
6830 if (target == 0 || ! safe_from_p (target, exp, 1)
6831 || GET_CODE (target) == PARALLEL
6832 || modifier == EXPAND_STACK_PARM)
6833 target
6834 = assign_temp (build_qualified_type (type,
6835 (TYPE_QUALS (type)
6836 | (TREE_READONLY (exp)
6837 * TYPE_QUAL_CONST))),
6838 0, TREE_ADDRESSABLE (exp), 1);
6839
6840 store_constructor (exp, target, 0, int_expr_size (exp));
6841 return target;
6842 }
6843
6844 case INDIRECT_REF:
6845 {
6846 tree exp1 = TREE_OPERAND (exp, 0);
6847 tree index;
6848 tree string = string_constant (exp1, &index);
6849
6850 /* Try to optimize reads from const strings. */
6851 if (string
6852 && TREE_CODE (string) == STRING_CST
6853 && TREE_CODE (index) == INTEGER_CST
6854 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6855 && GET_MODE_CLASS (mode) == MODE_INT
6856 && GET_MODE_SIZE (mode) == 1
6857 && modifier != EXPAND_WRITE)
6858 return gen_int_mode (TREE_STRING_POINTER (string)
6859 [TREE_INT_CST_LOW (index)], mode);
6860
6861 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6862 op0 = memory_address (mode, op0);
6863 temp = gen_rtx_MEM (mode, op0);
6864 set_mem_attributes (temp, exp, 0);
6865
6866 /* If we are writing to this object and its type is a record with
6867 readonly fields, we must mark it as readonly so it will
6868 conflict with readonly references to those fields. */
6869 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6870 RTX_UNCHANGING_P (temp) = 1;
6871
6872 return temp;
6873 }
6874
6875 case ARRAY_REF:
6876 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6877 abort ();
6878
6879 {
6880 tree array = TREE_OPERAND (exp, 0);
6881 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6882 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6883 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6884 HOST_WIDE_INT i;
6885
6886 /* Optimize the special-case of a zero lower bound.
6887
6888 We convert the low_bound to sizetype to avoid some problems
6889 with constant folding. (E.g. suppose the lower bound is 1,
6890 and its mode is QI. Without the conversion, (ARRAY
6891 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6892 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6893
6894 if (! integer_zerop (low_bound))
6895 index = size_diffop (index, convert (sizetype, low_bound));
6896
6897 /* Fold an expression like: "foo"[2].
6898 This is not done in fold so it won't happen inside &.
6899 Don't fold if this is for wide characters since it's too
6900 difficult to do correctly and this is a very rare case. */
6901
6902 if (modifier != EXPAND_CONST_ADDRESS
6903 && modifier != EXPAND_INITIALIZER
6904 && modifier != EXPAND_MEMORY
6905 && TREE_CODE (array) == STRING_CST
6906 && TREE_CODE (index) == INTEGER_CST
6907 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6908 && GET_MODE_CLASS (mode) == MODE_INT
6909 && GET_MODE_SIZE (mode) == 1)
6910 return gen_int_mode (TREE_STRING_POINTER (array)
6911 [TREE_INT_CST_LOW (index)], mode);
6912
6913 /* If this is a constant index into a constant array,
6914 just get the value from the array. Handle both the cases when
6915 we have an explicit constructor and when our operand is a variable
6916 that was declared const. */
6917
6918 if (modifier != EXPAND_CONST_ADDRESS
6919 && modifier != EXPAND_INITIALIZER
6920 && modifier != EXPAND_MEMORY
6921 && TREE_CODE (array) == CONSTRUCTOR
6922 && ! TREE_SIDE_EFFECTS (array)
6923 && TREE_CODE (index) == INTEGER_CST
6924 && 0 > compare_tree_int (index,
6925 list_length (CONSTRUCTOR_ELTS
6926 (TREE_OPERAND (exp, 0)))))
6927 {
6928 tree elem;
6929
6930 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6931 i = TREE_INT_CST_LOW (index);
6932 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6933 ;
6934
6935 if (elem)
6936 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6937 modifier);
6938 }
6939
6940 else if (optimize >= 1
6941 && modifier != EXPAND_CONST_ADDRESS
6942 && modifier != EXPAND_INITIALIZER
6943 && modifier != EXPAND_MEMORY
6944 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6945 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6946 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6947 {
6948 if (TREE_CODE (index) == INTEGER_CST)
6949 {
6950 tree init = DECL_INITIAL (array);
6951
6952 if (TREE_CODE (init) == CONSTRUCTOR)
6953 {
6954 tree elem;
6955
6956 for (elem = CONSTRUCTOR_ELTS (init);
6957 (elem
6958 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6959 elem = TREE_CHAIN (elem))
6960 ;
6961
6962 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6963 return expand_expr (fold (TREE_VALUE (elem)), target,
6964 tmode, modifier);
6965 }
6966 else if (TREE_CODE (init) == STRING_CST
6967 && 0 > compare_tree_int (index,
6968 TREE_STRING_LENGTH (init)))
6969 {
6970 tree type = TREE_TYPE (TREE_TYPE (init));
6971 enum machine_mode mode = TYPE_MODE (type);
6972
6973 if (GET_MODE_CLASS (mode) == MODE_INT
6974 && GET_MODE_SIZE (mode) == 1)
6975 return gen_int_mode (TREE_STRING_POINTER (init)
6976 [TREE_INT_CST_LOW (index)], mode);
6977 }
6978 }
6979 }
6980 }
6981 goto normal_inner_ref;
6982
6983 case COMPONENT_REF:
6984 /* If the operand is a CONSTRUCTOR, we can just extract the
6985 appropriate field if it is present. */
6986 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6987 {
6988 tree elt;
6989
6990 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6991 elt = TREE_CHAIN (elt))
6992 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6993 /* We can normally use the value of the field in the
6994 CONSTRUCTOR. However, if this is a bitfield in
6995 an integral mode that we can fit in a HOST_WIDE_INT,
6996 we must mask only the number of bits in the bitfield,
6997 since this is done implicitly by the constructor. If
6998 the bitfield does not meet either of those conditions,
6999 we can't do this optimization. */
7000 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7001 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7002 == MODE_INT)
7003 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7004 <= HOST_BITS_PER_WIDE_INT))))
7005 {
7006 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7007 && modifier == EXPAND_STACK_PARM)
7008 target = 0;
7009 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7010 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7011 {
7012 HOST_WIDE_INT bitsize
7013 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7014 enum machine_mode imode
7015 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7016
7017 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7018 {
7019 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7020 op0 = expand_and (imode, op0, op1, target);
7021 }
7022 else
7023 {
7024 tree count
7025 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7026 0);
7027
7028 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7029 target, 0);
7030 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7031 target, 0);
7032 }
7033 }
7034
7035 return op0;
7036 }
7037 }
7038 goto normal_inner_ref;
7039
7040 case BIT_FIELD_REF:
7041 case ARRAY_RANGE_REF:
7042 normal_inner_ref:
7043 {
7044 enum machine_mode mode1;
7045 HOST_WIDE_INT bitsize, bitpos;
7046 tree offset;
7047 int volatilep = 0;
7048 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7049 &mode1, &unsignedp, &volatilep);
7050 rtx orig_op0;
7051
7052 /* If we got back the original object, something is wrong. Perhaps
7053 we are evaluating an expression too early. In any event, don't
7054 infinitely recurse. */
7055 if (tem == exp)
7056 abort ();
7057
7058 /* If TEM's type is a union of variable size, pass TARGET to the inner
7059 computation, since it will need a temporary and TARGET is known
7060 to have to do. This occurs in unchecked conversion in Ada. */
7061
7062 orig_op0 = op0
7063 = expand_expr (tem,
7064 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7065 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7066 != INTEGER_CST)
7067 && modifier != EXPAND_STACK_PARM
7068 ? target : NULL_RTX),
7069 VOIDmode,
7070 (modifier == EXPAND_INITIALIZER
7071 || modifier == EXPAND_CONST_ADDRESS
7072 || modifier == EXPAND_STACK_PARM)
7073 ? modifier : EXPAND_NORMAL);
7074
7075 /* If this is a constant, put it into a register if it is a
7076 legitimate constant and OFFSET is 0 and memory if it isn't. */
7077 if (CONSTANT_P (op0))
7078 {
7079 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7080 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7081 && offset == 0)
7082 op0 = force_reg (mode, op0);
7083 else
7084 op0 = validize_mem (force_const_mem (mode, op0));
7085 }
7086
7087 /* Otherwise, if this object not in memory and we either have an
7088 offset or a BLKmode result, put it there. This case can't occur in
7089 C, but can in Ada if we have unchecked conversion of an expression
7090 from a scalar type to an array or record type or for an
7091 ARRAY_RANGE_REF whose type is BLKmode. */
7092 else if (GET_CODE (op0) != MEM
7093 && (offset != 0
7094 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7095 {
7096 /* If the operand is a SAVE_EXPR, we can deal with this by
7097 forcing the SAVE_EXPR into memory. */
7098 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7099 {
7100 put_var_into_stack (TREE_OPERAND (exp, 0),
7101 /*rescan=*/true);
7102 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7103 }
7104 else
7105 {
7106 tree nt
7107 = build_qualified_type (TREE_TYPE (tem),
7108 (TYPE_QUALS (TREE_TYPE (tem))
7109 | TYPE_QUAL_CONST));
7110 rtx memloc = assign_temp (nt, 1, 1, 1);
7111
7112 emit_move_insn (memloc, op0);
7113 op0 = memloc;
7114 }
7115 }
7116
7117 if (offset != 0)
7118 {
7119 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7120 EXPAND_SUM);
7121
7122 if (GET_CODE (op0) != MEM)
7123 abort ();
7124
7125 #ifdef POINTERS_EXTEND_UNSIGNED
7126 if (GET_MODE (offset_rtx) != Pmode)
7127 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7128 #else
7129 if (GET_MODE (offset_rtx) != ptr_mode)
7130 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7131 #endif
7132
7133 /* A constant address in OP0 can have VOIDmode, we must not try
7134 to call force_reg for that case. Avoid that case. */
7135 if (GET_CODE (op0) == MEM
7136 && GET_MODE (op0) == BLKmode
7137 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7138 && bitsize != 0
7139 && (bitpos % bitsize) == 0
7140 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7141 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7142 {
7143 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7144 bitpos = 0;
7145 }
7146
7147 op0 = offset_address (op0, offset_rtx,
7148 highest_pow2_factor (offset));
7149 }
7150
7151 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7152 record its alignment as BIGGEST_ALIGNMENT. */
7153 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7154 && is_aligning_offset (offset, tem))
7155 set_mem_align (op0, BIGGEST_ALIGNMENT);
7156
7157 /* Don't forget about volatility even if this is a bitfield. */
7158 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7159 {
7160 if (op0 == orig_op0)
7161 op0 = copy_rtx (op0);
7162
7163 MEM_VOLATILE_P (op0) = 1;
7164 }
7165
7166 /* The following code doesn't handle CONCAT.
7167 Assume only bitpos == 0 can be used for CONCAT, due to
7168 one element arrays having the same mode as its element. */
7169 if (GET_CODE (op0) == CONCAT)
7170 {
7171 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7172 abort ();
7173 return op0;
7174 }
7175
7176 /* In cases where an aligned union has an unaligned object
7177 as a field, we might be extracting a BLKmode value from
7178 an integer-mode (e.g., SImode) object. Handle this case
7179 by doing the extract into an object as wide as the field
7180 (which we know to be the width of a basic mode), then
7181 storing into memory, and changing the mode to BLKmode. */
7182 if (mode1 == VOIDmode
7183 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7184 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7185 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7186 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7187 && modifier != EXPAND_CONST_ADDRESS
7188 && modifier != EXPAND_INITIALIZER)
7189 /* If the field isn't aligned enough to fetch as a memref,
7190 fetch it as a bit field. */
7191 || (mode1 != BLKmode
7192 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7193 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7194 && ((modifier == EXPAND_CONST_ADDRESS
7195 || modifier == EXPAND_INITIALIZER)
7196 ? STRICT_ALIGNMENT
7197 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7198 || (bitpos % BITS_PER_UNIT != 0)))
7199 /* If the type and the field are a constant size and the
7200 size of the type isn't the same size as the bitfield,
7201 we must use bitfield operations. */
7202 || (bitsize >= 0
7203 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7204 == INTEGER_CST)
7205 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7206 bitsize)))
7207 {
7208 enum machine_mode ext_mode = mode;
7209
7210 if (ext_mode == BLKmode
7211 && ! (target != 0 && GET_CODE (op0) == MEM
7212 && GET_CODE (target) == MEM
7213 && bitpos % BITS_PER_UNIT == 0))
7214 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7215
7216 if (ext_mode == BLKmode)
7217 {
7218 if (target == 0)
7219 target = assign_temp (type, 0, 1, 1);
7220
7221 if (bitsize == 0)
7222 return target;
7223
7224 /* In this case, BITPOS must start at a byte boundary and
7225 TARGET, if specified, must be a MEM. */
7226 if (GET_CODE (op0) != MEM
7227 || (target != 0 && GET_CODE (target) != MEM)
7228 || bitpos % BITS_PER_UNIT != 0)
7229 abort ();
7230
7231 emit_block_move (target,
7232 adjust_address (op0, VOIDmode,
7233 bitpos / BITS_PER_UNIT),
7234 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7235 / BITS_PER_UNIT),
7236 (modifier == EXPAND_STACK_PARM
7237 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7238
7239 return target;
7240 }
7241
7242 op0 = validize_mem (op0);
7243
7244 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7245 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7246
7247 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7248 (modifier == EXPAND_STACK_PARM
7249 ? NULL_RTX : target),
7250 ext_mode, ext_mode,
7251 int_size_in_bytes (TREE_TYPE (tem)));
7252
7253 /* If the result is a record type and BITSIZE is narrower than
7254 the mode of OP0, an integral mode, and this is a big endian
7255 machine, we must put the field into the high-order bits. */
7256 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7257 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7258 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7259 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7260 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7261 - bitsize),
7262 op0, 1);
7263
7264 if (mode == BLKmode)
7265 {
7266 rtx new = assign_temp (build_qualified_type
7267 ((*lang_hooks.types.type_for_mode)
7268 (ext_mode, 0),
7269 TYPE_QUAL_CONST), 0, 1, 1);
7270
7271 emit_move_insn (new, op0);
7272 op0 = copy_rtx (new);
7273 PUT_MODE (op0, BLKmode);
7274 set_mem_attributes (op0, exp, 1);
7275 }
7276
7277 return op0;
7278 }
7279
7280 /* If the result is BLKmode, use that to access the object
7281 now as well. */
7282 if (mode == BLKmode)
7283 mode1 = BLKmode;
7284
7285 /* Get a reference to just this component. */
7286 if (modifier == EXPAND_CONST_ADDRESS
7287 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7288 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7289 else
7290 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7291
7292 if (op0 == orig_op0)
7293 op0 = copy_rtx (op0);
7294
7295 set_mem_attributes (op0, exp, 0);
7296 if (GET_CODE (XEXP (op0, 0)) == REG)
7297 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7298
7299 MEM_VOLATILE_P (op0) |= volatilep;
7300 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7301 || modifier == EXPAND_CONST_ADDRESS
7302 || modifier == EXPAND_INITIALIZER)
7303 return op0;
7304 else if (target == 0)
7305 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7306
7307 convert_move (target, op0, unsignedp);
7308 return target;
7309 }
7310
7311 case VTABLE_REF:
7312 {
7313 rtx insn, before = get_last_insn (), vtbl_ref;
7314
7315 /* Evaluate the interior expression. */
7316 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7317 tmode, modifier);
7318
7319 /* Get or create an instruction off which to hang a note. */
7320 if (REG_P (subtarget))
7321 {
7322 target = subtarget;
7323 insn = get_last_insn ();
7324 if (insn == before)
7325 abort ();
7326 if (! INSN_P (insn))
7327 insn = prev_nonnote_insn (insn);
7328 }
7329 else
7330 {
7331 target = gen_reg_rtx (GET_MODE (subtarget));
7332 insn = emit_move_insn (target, subtarget);
7333 }
7334
7335 /* Collect the data for the note. */
7336 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7337 vtbl_ref = plus_constant (vtbl_ref,
7338 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7339 /* Discard the initial CONST that was added. */
7340 vtbl_ref = XEXP (vtbl_ref, 0);
7341
7342 REG_NOTES (insn)
7343 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7344
7345 return target;
7346 }
7347
7348 /* Intended for a reference to a buffer of a file-object in Pascal.
7349 But it's not certain that a special tree code will really be
7350 necessary for these. INDIRECT_REF might work for them. */
7351 case BUFFER_REF:
7352 abort ();
7353
7354 case IN_EXPR:
7355 {
7356 /* Pascal set IN expression.
7357
7358 Algorithm:
7359 rlo = set_low - (set_low%bits_per_word);
7360 the_word = set [ (index - rlo)/bits_per_word ];
7361 bit_index = index % bits_per_word;
7362 bitmask = 1 << bit_index;
7363 return !!(the_word & bitmask); */
7364
7365 tree set = TREE_OPERAND (exp, 0);
7366 tree index = TREE_OPERAND (exp, 1);
7367 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7368 tree set_type = TREE_TYPE (set);
7369 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7370 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7371 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7372 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7373 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7374 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7375 rtx setaddr = XEXP (setval, 0);
7376 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7377 rtx rlow;
7378 rtx diff, quo, rem, addr, bit, result;
7379
7380 /* If domain is empty, answer is no. Likewise if index is constant
7381 and out of bounds. */
7382 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7383 && TREE_CODE (set_low_bound) == INTEGER_CST
7384 && tree_int_cst_lt (set_high_bound, set_low_bound))
7385 || (TREE_CODE (index) == INTEGER_CST
7386 && TREE_CODE (set_low_bound) == INTEGER_CST
7387 && tree_int_cst_lt (index, set_low_bound))
7388 || (TREE_CODE (set_high_bound) == INTEGER_CST
7389 && TREE_CODE (index) == INTEGER_CST
7390 && tree_int_cst_lt (set_high_bound, index))))
7391 return const0_rtx;
7392
7393 if (target == 0)
7394 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7395
7396 /* If we get here, we have to generate the code for both cases
7397 (in range and out of range). */
7398
7399 op0 = gen_label_rtx ();
7400 op1 = gen_label_rtx ();
7401
7402 if (! (GET_CODE (index_val) == CONST_INT
7403 && GET_CODE (lo_r) == CONST_INT))
7404 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7405 GET_MODE (index_val), iunsignedp, op1);
7406
7407 if (! (GET_CODE (index_val) == CONST_INT
7408 && GET_CODE (hi_r) == CONST_INT))
7409 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7410 GET_MODE (index_val), iunsignedp, op1);
7411
7412 /* Calculate the element number of bit zero in the first word
7413 of the set. */
7414 if (GET_CODE (lo_r) == CONST_INT)
7415 rlow = GEN_INT (INTVAL (lo_r)
7416 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7417 else
7418 rlow = expand_binop (index_mode, and_optab, lo_r,
7419 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7420 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7421
7422 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7423 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7424
7425 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7426 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7427 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7428 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7429
7430 addr = memory_address (byte_mode,
7431 expand_binop (index_mode, add_optab, diff,
7432 setaddr, NULL_RTX, iunsignedp,
7433 OPTAB_LIB_WIDEN));
7434
7435 /* Extract the bit we want to examine. */
7436 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7437 gen_rtx_MEM (byte_mode, addr),
7438 make_tree (TREE_TYPE (index), rem),
7439 NULL_RTX, 1);
7440 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7441 GET_MODE (target) == byte_mode ? target : 0,
7442 1, OPTAB_LIB_WIDEN);
7443
7444 if (result != target)
7445 convert_move (target, result, 1);
7446
7447 /* Output the code to handle the out-of-range case. */
7448 emit_jump (op0);
7449 emit_label (op1);
7450 emit_move_insn (target, const0_rtx);
7451 emit_label (op0);
7452 return target;
7453 }
7454
7455 case WITH_CLEANUP_EXPR:
7456 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7457 {
7458 WITH_CLEANUP_EXPR_RTL (exp)
7459 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7460 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7461 CLEANUP_EH_ONLY (exp));
7462
7463 /* That's it for this cleanup. */
7464 TREE_OPERAND (exp, 1) = 0;
7465 }
7466 return WITH_CLEANUP_EXPR_RTL (exp);
7467
7468 case CLEANUP_POINT_EXPR:
7469 {
7470 /* Start a new binding layer that will keep track of all cleanup
7471 actions to be performed. */
7472 expand_start_bindings (2);
7473
7474 target_temp_slot_level = temp_slot_level;
7475
7476 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7477 /* If we're going to use this value, load it up now. */
7478 if (! ignore)
7479 op0 = force_not_mem (op0);
7480 preserve_temp_slots (op0);
7481 expand_end_bindings (NULL_TREE, 0, 0);
7482 }
7483 return op0;
7484
7485 case CALL_EXPR:
7486 /* Check for a built-in function. */
7487 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7488 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7489 == FUNCTION_DECL)
7490 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7491 {
7492 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7493 == BUILT_IN_FRONTEND)
7494 return (*lang_hooks.expand_expr) (exp, original_target,
7495 tmode, modifier);
7496 else
7497 return expand_builtin (exp, target, subtarget, tmode, ignore);
7498 }
7499
7500 return expand_call (exp, target, ignore);
7501
7502 case NON_LVALUE_EXPR:
7503 case NOP_EXPR:
7504 case CONVERT_EXPR:
7505 case REFERENCE_EXPR:
7506 if (TREE_OPERAND (exp, 0) == error_mark_node)
7507 return const0_rtx;
7508
7509 if (TREE_CODE (type) == UNION_TYPE)
7510 {
7511 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7512
7513 /* If both input and output are BLKmode, this conversion isn't doing
7514 anything except possibly changing memory attribute. */
7515 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7516 {
7517 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7518 modifier);
7519
7520 result = copy_rtx (result);
7521 set_mem_attributes (result, exp, 0);
7522 return result;
7523 }
7524
7525 if (target == 0)
7526 target = assign_temp (type, 0, 1, 1);
7527
7528 if (GET_CODE (target) == MEM)
7529 /* Store data into beginning of memory target. */
7530 store_expr (TREE_OPERAND (exp, 0),
7531 adjust_address (target, TYPE_MODE (valtype), 0),
7532 modifier == EXPAND_STACK_PARM ? 2 : 0);
7533
7534 else if (GET_CODE (target) == REG)
7535 /* Store this field into a union of the proper type. */
7536 store_field (target,
7537 MIN ((int_size_in_bytes (TREE_TYPE
7538 (TREE_OPERAND (exp, 0)))
7539 * BITS_PER_UNIT),
7540 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7541 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7542 VOIDmode, 0, type, 0);
7543 else
7544 abort ();
7545
7546 /* Return the entire union. */
7547 return target;
7548 }
7549
7550 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7551 {
7552 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7553 modifier);
7554
7555 /* If the signedness of the conversion differs and OP0 is
7556 a promoted SUBREG, clear that indication since we now
7557 have to do the proper extension. */
7558 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7559 && GET_CODE (op0) == SUBREG)
7560 SUBREG_PROMOTED_VAR_P (op0) = 0;
7561
7562 return op0;
7563 }
7564
7565 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7566 if (GET_MODE (op0) == mode)
7567 return op0;
7568
7569 /* If OP0 is a constant, just convert it into the proper mode. */
7570 if (CONSTANT_P (op0))
7571 {
7572 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7573 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7574
7575 if (modifier == EXPAND_INITIALIZER)
7576 return simplify_gen_subreg (mode, op0, inner_mode,
7577 subreg_lowpart_offset (mode,
7578 inner_mode));
7579 else
7580 return convert_modes (mode, inner_mode, op0,
7581 TREE_UNSIGNED (inner_type));
7582 }
7583
7584 if (modifier == EXPAND_INITIALIZER)
7585 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7586
7587 if (target == 0)
7588 return
7589 convert_to_mode (mode, op0,
7590 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7591 else
7592 convert_move (target, op0,
7593 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7594 return target;
7595
7596 case VIEW_CONVERT_EXPR:
7597 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7598
7599 /* If the input and output modes are both the same, we are done.
7600 Otherwise, if neither mode is BLKmode and both are integral and within
7601 a word, we can use gen_lowpart. If neither is true, make sure the
7602 operand is in memory and convert the MEM to the new mode. */
7603 if (TYPE_MODE (type) == GET_MODE (op0))
7604 ;
7605 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7606 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7607 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7608 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7609 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7610 op0 = gen_lowpart (TYPE_MODE (type), op0);
7611 else if (GET_CODE (op0) != MEM)
7612 {
7613 /* If the operand is not a MEM, force it into memory. Since we
7614 are going to be be changing the mode of the MEM, don't call
7615 force_const_mem for constants because we don't allow pool
7616 constants to change mode. */
7617 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7618
7619 if (TREE_ADDRESSABLE (exp))
7620 abort ();
7621
7622 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7623 target
7624 = assign_stack_temp_for_type
7625 (TYPE_MODE (inner_type),
7626 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7627
7628 emit_move_insn (target, op0);
7629 op0 = target;
7630 }
7631
7632 /* At this point, OP0 is in the correct mode. If the output type is such
7633 that the operand is known to be aligned, indicate that it is.
7634 Otherwise, we need only be concerned about alignment for non-BLKmode
7635 results. */
7636 if (GET_CODE (op0) == MEM)
7637 {
7638 op0 = copy_rtx (op0);
7639
7640 if (TYPE_ALIGN_OK (type))
7641 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7642 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7643 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7644 {
7645 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7646 HOST_WIDE_INT temp_size
7647 = MAX (int_size_in_bytes (inner_type),
7648 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7649 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7650 temp_size, 0, type);
7651 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7652
7653 if (TREE_ADDRESSABLE (exp))
7654 abort ();
7655
7656 if (GET_MODE (op0) == BLKmode)
7657 emit_block_move (new_with_op0_mode, op0,
7658 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7659 (modifier == EXPAND_STACK_PARM
7660 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7661 else
7662 emit_move_insn (new_with_op0_mode, op0);
7663
7664 op0 = new;
7665 }
7666
7667 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7668 }
7669
7670 return op0;
7671
7672 case PLUS_EXPR:
7673 this_optab = ! unsignedp && flag_trapv
7674 && (GET_MODE_CLASS (mode) == MODE_INT)
7675 ? addv_optab : add_optab;
7676
7677 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7678 something else, make sure we add the register to the constant and
7679 then to the other thing. This case can occur during strength
7680 reduction and doing it this way will produce better code if the
7681 frame pointer or argument pointer is eliminated.
7682
7683 fold-const.c will ensure that the constant is always in the inner
7684 PLUS_EXPR, so the only case we need to do anything about is if
7685 sp, ap, or fp is our second argument, in which case we must swap
7686 the innermost first argument and our second argument. */
7687
7688 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7689 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7690 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7691 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7692 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7693 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7694 {
7695 tree t = TREE_OPERAND (exp, 1);
7696
7697 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7698 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7699 }
7700
7701 /* If the result is to be ptr_mode and we are adding an integer to
7702 something, we might be forming a constant. So try to use
7703 plus_constant. If it produces a sum and we can't accept it,
7704 use force_operand. This allows P = &ARR[const] to generate
7705 efficient code on machines where a SYMBOL_REF is not a valid
7706 address.
7707
7708 If this is an EXPAND_SUM call, always return the sum. */
7709 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7710 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7711 {
7712 if (modifier == EXPAND_STACK_PARM)
7713 target = 0;
7714 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7715 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7716 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7717 {
7718 rtx constant_part;
7719
7720 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7721 EXPAND_SUM);
7722 /* Use immed_double_const to ensure that the constant is
7723 truncated according to the mode of OP1, then sign extended
7724 to a HOST_WIDE_INT. Using the constant directly can result
7725 in non-canonical RTL in a 64x32 cross compile. */
7726 constant_part
7727 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7728 (HOST_WIDE_INT) 0,
7729 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7730 op1 = plus_constant (op1, INTVAL (constant_part));
7731 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7732 op1 = force_operand (op1, target);
7733 return op1;
7734 }
7735
7736 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7737 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7738 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7739 {
7740 rtx constant_part;
7741
7742 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7743 (modifier == EXPAND_INITIALIZER
7744 ? EXPAND_INITIALIZER : EXPAND_SUM));
7745 if (! CONSTANT_P (op0))
7746 {
7747 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7748 VOIDmode, modifier);
7749 /* Return a PLUS if modifier says it's OK. */
7750 if (modifier == EXPAND_SUM
7751 || modifier == EXPAND_INITIALIZER)
7752 return simplify_gen_binary (PLUS, mode, op0, op1);
7753 goto binop2;
7754 }
7755 /* Use immed_double_const to ensure that the constant is
7756 truncated according to the mode of OP1, then sign extended
7757 to a HOST_WIDE_INT. Using the constant directly can result
7758 in non-canonical RTL in a 64x32 cross compile. */
7759 constant_part
7760 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7761 (HOST_WIDE_INT) 0,
7762 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7763 op0 = plus_constant (op0, INTVAL (constant_part));
7764 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7765 op0 = force_operand (op0, target);
7766 return op0;
7767 }
7768 }
7769
7770 /* No sense saving up arithmetic to be done
7771 if it's all in the wrong mode to form part of an address.
7772 And force_operand won't know whether to sign-extend or
7773 zero-extend. */
7774 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7775 || mode != ptr_mode)
7776 {
7777 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7778 subtarget, &op0, &op1, 0);
7779 if (op0 == const0_rtx)
7780 return op1;
7781 if (op1 == const0_rtx)
7782 return op0;
7783 goto binop2;
7784 }
7785
7786 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7787 subtarget, &op0, &op1, modifier);
7788 return simplify_gen_binary (PLUS, mode, op0, op1);
7789
7790 case MINUS_EXPR:
7791 /* For initializers, we are allowed to return a MINUS of two
7792 symbolic constants. Here we handle all cases when both operands
7793 are constant. */
7794 /* Handle difference of two symbolic constants,
7795 for the sake of an initializer. */
7796 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7797 && really_constant_p (TREE_OPERAND (exp, 0))
7798 && really_constant_p (TREE_OPERAND (exp, 1)))
7799 {
7800 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7801 NULL_RTX, &op0, &op1, modifier);
7802
7803 /* If the last operand is a CONST_INT, use plus_constant of
7804 the negated constant. Else make the MINUS. */
7805 if (GET_CODE (op1) == CONST_INT)
7806 return plus_constant (op0, - INTVAL (op1));
7807 else
7808 return gen_rtx_MINUS (mode, op0, op1);
7809 }
7810
7811 this_optab = ! unsignedp && flag_trapv
7812 && (GET_MODE_CLASS(mode) == MODE_INT)
7813 ? subv_optab : sub_optab;
7814
7815 /* No sense saving up arithmetic to be done
7816 if it's all in the wrong mode to form part of an address.
7817 And force_operand won't know whether to sign-extend or
7818 zero-extend. */
7819 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7820 || mode != ptr_mode)
7821 goto binop;
7822
7823 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7824 subtarget, &op0, &op1, modifier);
7825
7826 /* Convert A - const to A + (-const). */
7827 if (GET_CODE (op1) == CONST_INT)
7828 {
7829 op1 = negate_rtx (mode, op1);
7830 return simplify_gen_binary (PLUS, mode, op0, op1);
7831 }
7832
7833 goto binop2;
7834
7835 case MULT_EXPR:
7836 /* If first operand is constant, swap them.
7837 Thus the following special case checks need only
7838 check the second operand. */
7839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7840 {
7841 tree t1 = TREE_OPERAND (exp, 0);
7842 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7843 TREE_OPERAND (exp, 1) = t1;
7844 }
7845
7846 /* Attempt to return something suitable for generating an
7847 indexed address, for machines that support that. */
7848
7849 if (modifier == EXPAND_SUM && mode == ptr_mode
7850 && host_integerp (TREE_OPERAND (exp, 1), 0))
7851 {
7852 tree exp1 = TREE_OPERAND (exp, 1);
7853
7854 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7855 EXPAND_SUM);
7856
7857 if (GET_CODE (op0) != REG)
7858 op0 = force_operand (op0, NULL_RTX);
7859 if (GET_CODE (op0) != REG)
7860 op0 = copy_to_mode_reg (mode, op0);
7861
7862 return gen_rtx_MULT (mode, op0,
7863 gen_int_mode (tree_low_cst (exp1, 0),
7864 TYPE_MODE (TREE_TYPE (exp1))));
7865 }
7866
7867 if (modifier == EXPAND_STACK_PARM)
7868 target = 0;
7869
7870 /* Check for multiplying things that have been extended
7871 from a narrower type. If this machine supports multiplying
7872 in that narrower type with a result in the desired type,
7873 do it that way, and avoid the explicit type-conversion. */
7874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7875 && TREE_CODE (type) == INTEGER_TYPE
7876 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7877 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7878 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7879 && int_fits_type_p (TREE_OPERAND (exp, 1),
7880 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7881 /* Don't use a widening multiply if a shift will do. */
7882 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7883 > HOST_BITS_PER_WIDE_INT)
7884 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7885 ||
7886 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7887 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7888 ==
7889 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7890 /* If both operands are extended, they must either both
7891 be zero-extended or both be sign-extended. */
7892 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7893 ==
7894 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7895 {
7896 enum machine_mode innermode
7897 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7898 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7899 ? smul_widen_optab : umul_widen_optab);
7900 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7901 ? umul_widen_optab : smul_widen_optab);
7902 if (mode == GET_MODE_WIDER_MODE (innermode))
7903 {
7904 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7905 {
7906 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7907 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7908 TREE_OPERAND (exp, 1),
7909 NULL_RTX, &op0, &op1, 0);
7910 else
7911 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7912 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7913 NULL_RTX, &op0, &op1, 0);
7914 goto binop2;
7915 }
7916 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7917 && innermode == word_mode)
7918 {
7919 rtx htem;
7920 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7921 NULL_RTX, VOIDmode, 0);
7922 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7923 op1 = convert_modes (innermode, mode,
7924 expand_expr (TREE_OPERAND (exp, 1),
7925 NULL_RTX, VOIDmode, 0),
7926 unsignedp);
7927 else
7928 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7929 NULL_RTX, VOIDmode, 0);
7930 temp = expand_binop (mode, other_optab, op0, op1, target,
7931 unsignedp, OPTAB_LIB_WIDEN);
7932 htem = expand_mult_highpart_adjust (innermode,
7933 gen_highpart (innermode, temp),
7934 op0, op1,
7935 gen_highpart (innermode, temp),
7936 unsignedp);
7937 emit_move_insn (gen_highpart (innermode, temp), htem);
7938 return temp;
7939 }
7940 }
7941 }
7942 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7943 subtarget, &op0, &op1, 0);
7944 return expand_mult (mode, op0, op1, target, unsignedp);
7945
7946 case TRUNC_DIV_EXPR:
7947 case FLOOR_DIV_EXPR:
7948 case CEIL_DIV_EXPR:
7949 case ROUND_DIV_EXPR:
7950 case EXACT_DIV_EXPR:
7951 if (modifier == EXPAND_STACK_PARM)
7952 target = 0;
7953 /* Possible optimization: compute the dividend with EXPAND_SUM
7954 then if the divisor is constant can optimize the case
7955 where some terms of the dividend have coeffs divisible by it. */
7956 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7957 subtarget, &op0, &op1, 0);
7958 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7959
7960 case RDIV_EXPR:
7961 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7962 expensive divide. If not, combine will rebuild the original
7963 computation. */
7964 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7965 && TREE_CODE (type) == REAL_TYPE
7966 && !real_onep (TREE_OPERAND (exp, 0)))
7967 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7968 build (RDIV_EXPR, type,
7969 build_real (type, dconst1),
7970 TREE_OPERAND (exp, 1))),
7971 target, tmode, modifier);
7972 this_optab = sdiv_optab;
7973 goto binop;
7974
7975 case TRUNC_MOD_EXPR:
7976 case FLOOR_MOD_EXPR:
7977 case CEIL_MOD_EXPR:
7978 case ROUND_MOD_EXPR:
7979 if (modifier == EXPAND_STACK_PARM)
7980 target = 0;
7981 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7982 subtarget, &op0, &op1, 0);
7983 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7984
7985 case FIX_ROUND_EXPR:
7986 case FIX_FLOOR_EXPR:
7987 case FIX_CEIL_EXPR:
7988 abort (); /* Not used for C. */
7989
7990 case FIX_TRUNC_EXPR:
7991 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7992 if (target == 0 || modifier == EXPAND_STACK_PARM)
7993 target = gen_reg_rtx (mode);
7994 expand_fix (target, op0, unsignedp);
7995 return target;
7996
7997 case FLOAT_EXPR:
7998 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7999 if (target == 0 || modifier == EXPAND_STACK_PARM)
8000 target = gen_reg_rtx (mode);
8001 /* expand_float can't figure out what to do if FROM has VOIDmode.
8002 So give it the correct mode. With -O, cse will optimize this. */
8003 if (GET_MODE (op0) == VOIDmode)
8004 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8005 op0);
8006 expand_float (target, op0,
8007 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8008 return target;
8009
8010 case NEGATE_EXPR:
8011 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8012 if (modifier == EXPAND_STACK_PARM)
8013 target = 0;
8014 temp = expand_unop (mode,
8015 ! unsignedp && flag_trapv
8016 && (GET_MODE_CLASS(mode) == MODE_INT)
8017 ? negv_optab : neg_optab, op0, target, 0);
8018 if (temp == 0)
8019 abort ();
8020 return temp;
8021
8022 case ABS_EXPR:
8023 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8024 if (modifier == EXPAND_STACK_PARM)
8025 target = 0;
8026
8027 /* ABS_EXPR is not valid for complex arguments. */
8028 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8029 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8030 abort ();
8031
8032 /* Unsigned abs is simply the operand. Testing here means we don't
8033 risk generating incorrect code below. */
8034 if (TREE_UNSIGNED (type))
8035 return op0;
8036
8037 return expand_abs (mode, op0, target, unsignedp,
8038 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8039
8040 case MAX_EXPR:
8041 case MIN_EXPR:
8042 target = original_target;
8043 if (target == 0
8044 || modifier == EXPAND_STACK_PARM
8045 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8046 || GET_MODE (target) != mode
8047 || (GET_CODE (target) == REG
8048 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8049 target = gen_reg_rtx (mode);
8050 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8051 target, &op0, &op1, 0);
8052
8053 /* First try to do it with a special MIN or MAX instruction.
8054 If that does not win, use a conditional jump to select the proper
8055 value. */
8056 this_optab = (TREE_UNSIGNED (type)
8057 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8058 : (code == MIN_EXPR ? smin_optab : smax_optab));
8059
8060 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8061 OPTAB_WIDEN);
8062 if (temp != 0)
8063 return temp;
8064
8065 /* At this point, a MEM target is no longer useful; we will get better
8066 code without it. */
8067
8068 if (GET_CODE (target) == MEM)
8069 target = gen_reg_rtx (mode);
8070
8071 /* If op1 was placed in target, swap op0 and op1. */
8072 if (target != op0 && target == op1)
8073 {
8074 rtx tem = op0;
8075 op0 = op1;
8076 op1 = tem;
8077 }
8078
8079 if (target != op0)
8080 emit_move_insn (target, op0);
8081
8082 op0 = gen_label_rtx ();
8083
8084 /* If this mode is an integer too wide to compare properly,
8085 compare word by word. Rely on cse to optimize constant cases. */
8086 if (GET_MODE_CLASS (mode) == MODE_INT
8087 && ! can_compare_p (GE, mode, ccp_jump))
8088 {
8089 if (code == MAX_EXPR)
8090 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8091 target, op1, NULL_RTX, op0);
8092 else
8093 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8094 op1, target, NULL_RTX, op0);
8095 }
8096 else
8097 {
8098 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8099 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8100 unsignedp, mode, NULL_RTX, NULL_RTX,
8101 op0);
8102 }
8103 emit_move_insn (target, op1);
8104 emit_label (op0);
8105 return target;
8106
8107 case BIT_NOT_EXPR:
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8109 if (modifier == EXPAND_STACK_PARM)
8110 target = 0;
8111 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8112 if (temp == 0)
8113 abort ();
8114 return temp;
8115
8116 /* ??? Can optimize bitwise operations with one arg constant.
8117 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8118 and (a bitwise1 b) bitwise2 b (etc)
8119 but that is probably not worth while. */
8120
8121 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8122 boolean values when we want in all cases to compute both of them. In
8123 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8124 as actual zero-or-1 values and then bitwise anding. In cases where
8125 there cannot be any side effects, better code would be made by
8126 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8127 how to recognize those cases. */
8128
8129 case TRUTH_AND_EXPR:
8130 case BIT_AND_EXPR:
8131 this_optab = and_optab;
8132 goto binop;
8133
8134 case TRUTH_OR_EXPR:
8135 case BIT_IOR_EXPR:
8136 this_optab = ior_optab;
8137 goto binop;
8138
8139 case TRUTH_XOR_EXPR:
8140 case BIT_XOR_EXPR:
8141 this_optab = xor_optab;
8142 goto binop;
8143
8144 case LSHIFT_EXPR:
8145 case RSHIFT_EXPR:
8146 case LROTATE_EXPR:
8147 case RROTATE_EXPR:
8148 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8149 subtarget = 0;
8150 if (modifier == EXPAND_STACK_PARM)
8151 target = 0;
8152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8153 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8154 unsignedp);
8155
8156 /* Could determine the answer when only additive constants differ. Also,
8157 the addition of one can be handled by changing the condition. */
8158 case LT_EXPR:
8159 case LE_EXPR:
8160 case GT_EXPR:
8161 case GE_EXPR:
8162 case EQ_EXPR:
8163 case NE_EXPR:
8164 case UNORDERED_EXPR:
8165 case ORDERED_EXPR:
8166 case UNLT_EXPR:
8167 case UNLE_EXPR:
8168 case UNGT_EXPR:
8169 case UNGE_EXPR:
8170 case UNEQ_EXPR:
8171 temp = do_store_flag (exp,
8172 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8173 tmode != VOIDmode ? tmode : mode, 0);
8174 if (temp != 0)
8175 return temp;
8176
8177 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8178 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8179 && original_target
8180 && GET_CODE (original_target) == REG
8181 && (GET_MODE (original_target)
8182 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8183 {
8184 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8185 VOIDmode, 0);
8186
8187 /* If temp is constant, we can just compute the result. */
8188 if (GET_CODE (temp) == CONST_INT)
8189 {
8190 if (INTVAL (temp) != 0)
8191 emit_move_insn (target, const1_rtx);
8192 else
8193 emit_move_insn (target, const0_rtx);
8194
8195 return target;
8196 }
8197
8198 if (temp != original_target)
8199 {
8200 enum machine_mode mode1 = GET_MODE (temp);
8201 if (mode1 == VOIDmode)
8202 mode1 = tmode != VOIDmode ? tmode : mode;
8203
8204 temp = copy_to_mode_reg (mode1, temp);
8205 }
8206
8207 op1 = gen_label_rtx ();
8208 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8209 GET_MODE (temp), unsignedp, op1);
8210 emit_move_insn (temp, const1_rtx);
8211 emit_label (op1);
8212 return temp;
8213 }
8214
8215 /* If no set-flag instruction, must generate a conditional
8216 store into a temporary variable. Drop through
8217 and handle this like && and ||. */
8218
8219 case TRUTH_ANDIF_EXPR:
8220 case TRUTH_ORIF_EXPR:
8221 if (! ignore
8222 && (target == 0
8223 || modifier == EXPAND_STACK_PARM
8224 || ! safe_from_p (target, exp, 1)
8225 /* Make sure we don't have a hard reg (such as function's return
8226 value) live across basic blocks, if not optimizing. */
8227 || (!optimize && GET_CODE (target) == REG
8228 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8229 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8230
8231 if (target)
8232 emit_clr_insn (target);
8233
8234 op1 = gen_label_rtx ();
8235 jumpifnot (exp, op1);
8236
8237 if (target)
8238 emit_0_to_1_insn (target);
8239
8240 emit_label (op1);
8241 return ignore ? const0_rtx : target;
8242
8243 case TRUTH_NOT_EXPR:
8244 if (modifier == EXPAND_STACK_PARM)
8245 target = 0;
8246 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8247 /* The parser is careful to generate TRUTH_NOT_EXPR
8248 only with operands that are always zero or one. */
8249 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8250 target, 1, OPTAB_LIB_WIDEN);
8251 if (temp == 0)
8252 abort ();
8253 return temp;
8254
8255 case COMPOUND_EXPR:
8256 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8257 emit_queue ();
8258 return expand_expr (TREE_OPERAND (exp, 1),
8259 (ignore ? const0_rtx : target),
8260 VOIDmode, modifier);
8261
8262 case COND_EXPR:
8263 /* If we would have a "singleton" (see below) were it not for a
8264 conversion in each arm, bring that conversion back out. */
8265 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8266 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8267 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8268 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8269 {
8270 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8271 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8272
8273 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8274 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8275 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8276 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8277 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8278 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8279 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8280 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8281 return expand_expr (build1 (NOP_EXPR, type,
8282 build (COND_EXPR, TREE_TYPE (iftrue),
8283 TREE_OPERAND (exp, 0),
8284 iftrue, iffalse)),
8285 target, tmode, modifier);
8286 }
8287
8288 {
8289 /* Note that COND_EXPRs whose type is a structure or union
8290 are required to be constructed to contain assignments of
8291 a temporary variable, so that we can evaluate them here
8292 for side effect only. If type is void, we must do likewise. */
8293
8294 /* If an arm of the branch requires a cleanup,
8295 only that cleanup is performed. */
8296
8297 tree singleton = 0;
8298 tree binary_op = 0, unary_op = 0;
8299
8300 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8301 convert it to our mode, if necessary. */
8302 if (integer_onep (TREE_OPERAND (exp, 1))
8303 && integer_zerop (TREE_OPERAND (exp, 2))
8304 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8305 {
8306 if (ignore)
8307 {
8308 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8309 modifier);
8310 return const0_rtx;
8311 }
8312
8313 if (modifier == EXPAND_STACK_PARM)
8314 target = 0;
8315 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8316 if (GET_MODE (op0) == mode)
8317 return op0;
8318
8319 if (target == 0)
8320 target = gen_reg_rtx (mode);
8321 convert_move (target, op0, unsignedp);
8322 return target;
8323 }
8324
8325 /* Check for X ? A + B : A. If we have this, we can copy A to the
8326 output and conditionally add B. Similarly for unary operations.
8327 Don't do this if X has side-effects because those side effects
8328 might affect A or B and the "?" operation is a sequence point in
8329 ANSI. (operand_equal_p tests for side effects.) */
8330
8331 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8332 && operand_equal_p (TREE_OPERAND (exp, 2),
8333 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8334 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8335 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8336 && operand_equal_p (TREE_OPERAND (exp, 1),
8337 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8338 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8339 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8340 && operand_equal_p (TREE_OPERAND (exp, 2),
8341 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8342 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8343 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8344 && operand_equal_p (TREE_OPERAND (exp, 1),
8345 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8346 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8347
8348 /* If we are not to produce a result, we have no target. Otherwise,
8349 if a target was specified use it; it will not be used as an
8350 intermediate target unless it is safe. If no target, use a
8351 temporary. */
8352
8353 if (ignore)
8354 temp = 0;
8355 else if (modifier == EXPAND_STACK_PARM)
8356 temp = assign_temp (type, 0, 0, 1);
8357 else if (original_target
8358 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8359 || (singleton && GET_CODE (original_target) == REG
8360 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8361 && original_target == var_rtx (singleton)))
8362 && GET_MODE (original_target) == mode
8363 #ifdef HAVE_conditional_move
8364 && (! can_conditionally_move_p (mode)
8365 || GET_CODE (original_target) == REG
8366 || TREE_ADDRESSABLE (type))
8367 #endif
8368 && (GET_CODE (original_target) != MEM
8369 || TREE_ADDRESSABLE (type)))
8370 temp = original_target;
8371 else if (TREE_ADDRESSABLE (type))
8372 abort ();
8373 else
8374 temp = assign_temp (type, 0, 0, 1);
8375
8376 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8377 do the test of X as a store-flag operation, do this as
8378 A + ((X != 0) << log C). Similarly for other simple binary
8379 operators. Only do for C == 1 if BRANCH_COST is low. */
8380 if (temp && singleton && binary_op
8381 && (TREE_CODE (binary_op) == PLUS_EXPR
8382 || TREE_CODE (binary_op) == MINUS_EXPR
8383 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8384 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8385 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8386 : integer_onep (TREE_OPERAND (binary_op, 1)))
8387 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8388 {
8389 rtx result;
8390 tree cond;
8391 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8392 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8393 ? addv_optab : add_optab)
8394 : TREE_CODE (binary_op) == MINUS_EXPR
8395 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8396 ? subv_optab : sub_optab)
8397 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8398 : xor_optab);
8399
8400 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8401 if (singleton == TREE_OPERAND (exp, 1))
8402 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8403 else
8404 cond = TREE_OPERAND (exp, 0);
8405
8406 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8407 ? temp : NULL_RTX),
8408 mode, BRANCH_COST <= 1);
8409
8410 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8411 result = expand_shift (LSHIFT_EXPR, mode, result,
8412 build_int_2 (tree_log2
8413 (TREE_OPERAND
8414 (binary_op, 1)),
8415 0),
8416 (safe_from_p (temp, singleton, 1)
8417 ? temp : NULL_RTX), 0);
8418
8419 if (result)
8420 {
8421 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8422 return expand_binop (mode, boptab, op1, result, temp,
8423 unsignedp, OPTAB_LIB_WIDEN);
8424 }
8425 }
8426
8427 do_pending_stack_adjust ();
8428 NO_DEFER_POP;
8429 op0 = gen_label_rtx ();
8430
8431 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8432 {
8433 if (temp != 0)
8434 {
8435 /* If the target conflicts with the other operand of the
8436 binary op, we can't use it. Also, we can't use the target
8437 if it is a hard register, because evaluating the condition
8438 might clobber it. */
8439 if ((binary_op
8440 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8441 || (GET_CODE (temp) == REG
8442 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8443 temp = gen_reg_rtx (mode);
8444 store_expr (singleton, temp,
8445 modifier == EXPAND_STACK_PARM ? 2 : 0);
8446 }
8447 else
8448 expand_expr (singleton,
8449 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8450 if (singleton == TREE_OPERAND (exp, 1))
8451 jumpif (TREE_OPERAND (exp, 0), op0);
8452 else
8453 jumpifnot (TREE_OPERAND (exp, 0), op0);
8454
8455 start_cleanup_deferral ();
8456 if (binary_op && temp == 0)
8457 /* Just touch the other operand. */
8458 expand_expr (TREE_OPERAND (binary_op, 1),
8459 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8460 else if (binary_op)
8461 store_expr (build (TREE_CODE (binary_op), type,
8462 make_tree (type, temp),
8463 TREE_OPERAND (binary_op, 1)),
8464 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8465 else
8466 store_expr (build1 (TREE_CODE (unary_op), type,
8467 make_tree (type, temp)),
8468 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8469 op1 = op0;
8470 }
8471 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8472 comparison operator. If we have one of these cases, set the
8473 output to A, branch on A (cse will merge these two references),
8474 then set the output to FOO. */
8475 else if (temp
8476 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8477 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8478 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8479 TREE_OPERAND (exp, 1), 0)
8480 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8481 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8482 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8483 {
8484 if (GET_CODE (temp) == REG
8485 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8486 temp = gen_reg_rtx (mode);
8487 store_expr (TREE_OPERAND (exp, 1), temp,
8488 modifier == EXPAND_STACK_PARM ? 2 : 0);
8489 jumpif (TREE_OPERAND (exp, 0), op0);
8490
8491 start_cleanup_deferral ();
8492 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8493 store_expr (TREE_OPERAND (exp, 2), temp,
8494 modifier == EXPAND_STACK_PARM ? 2 : 0);
8495 else
8496 expand_expr (TREE_OPERAND (exp, 2),
8497 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8498 op1 = op0;
8499 }
8500 else if (temp
8501 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8502 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8503 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8504 TREE_OPERAND (exp, 2), 0)
8505 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8506 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8507 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8508 {
8509 if (GET_CODE (temp) == REG
8510 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8511 temp = gen_reg_rtx (mode);
8512 store_expr (TREE_OPERAND (exp, 2), temp,
8513 modifier == EXPAND_STACK_PARM ? 2 : 0);
8514 jumpifnot (TREE_OPERAND (exp, 0), op0);
8515
8516 start_cleanup_deferral ();
8517 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8518 store_expr (TREE_OPERAND (exp, 1), temp,
8519 modifier == EXPAND_STACK_PARM ? 2 : 0);
8520 else
8521 expand_expr (TREE_OPERAND (exp, 1),
8522 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8523 op1 = op0;
8524 }
8525 else
8526 {
8527 op1 = gen_label_rtx ();
8528 jumpifnot (TREE_OPERAND (exp, 0), op0);
8529
8530 start_cleanup_deferral ();
8531
8532 /* One branch of the cond can be void, if it never returns. For
8533 example A ? throw : E */
8534 if (temp != 0
8535 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8536 store_expr (TREE_OPERAND (exp, 1), temp,
8537 modifier == EXPAND_STACK_PARM ? 2 : 0);
8538 else
8539 expand_expr (TREE_OPERAND (exp, 1),
8540 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8541 end_cleanup_deferral ();
8542 emit_queue ();
8543 emit_jump_insn (gen_jump (op1));
8544 emit_barrier ();
8545 emit_label (op0);
8546 start_cleanup_deferral ();
8547 if (temp != 0
8548 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8549 store_expr (TREE_OPERAND (exp, 2), temp,
8550 modifier == EXPAND_STACK_PARM ? 2 : 0);
8551 else
8552 expand_expr (TREE_OPERAND (exp, 2),
8553 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8554 }
8555
8556 end_cleanup_deferral ();
8557
8558 emit_queue ();
8559 emit_label (op1);
8560 OK_DEFER_POP;
8561
8562 return temp;
8563 }
8564
8565 case TARGET_EXPR:
8566 {
8567 /* Something needs to be initialized, but we didn't know
8568 where that thing was when building the tree. For example,
8569 it could be the return value of a function, or a parameter
8570 to a function which lays down in the stack, or a temporary
8571 variable which must be passed by reference.
8572
8573 We guarantee that the expression will either be constructed
8574 or copied into our original target. */
8575
8576 tree slot = TREE_OPERAND (exp, 0);
8577 tree cleanups = NULL_TREE;
8578 tree exp1;
8579
8580 if (TREE_CODE (slot) != VAR_DECL)
8581 abort ();
8582
8583 if (! ignore)
8584 target = original_target;
8585
8586 /* Set this here so that if we get a target that refers to a
8587 register variable that's already been used, put_reg_into_stack
8588 knows that it should fix up those uses. */
8589 TREE_USED (slot) = 1;
8590
8591 if (target == 0)
8592 {
8593 if (DECL_RTL_SET_P (slot))
8594 {
8595 target = DECL_RTL (slot);
8596 /* If we have already expanded the slot, so don't do
8597 it again. (mrs) */
8598 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8599 return target;
8600 }
8601 else
8602 {
8603 target = assign_temp (type, 2, 0, 1);
8604 /* All temp slots at this level must not conflict. */
8605 preserve_temp_slots (target);
8606 SET_DECL_RTL (slot, target);
8607 if (TREE_ADDRESSABLE (slot))
8608 put_var_into_stack (slot, /*rescan=*/false);
8609
8610 /* Since SLOT is not known to the called function
8611 to belong to its stack frame, we must build an explicit
8612 cleanup. This case occurs when we must build up a reference
8613 to pass the reference as an argument. In this case,
8614 it is very likely that such a reference need not be
8615 built here. */
8616
8617 if (TREE_OPERAND (exp, 2) == 0)
8618 TREE_OPERAND (exp, 2)
8619 = (*lang_hooks.maybe_build_cleanup) (slot);
8620 cleanups = TREE_OPERAND (exp, 2);
8621 }
8622 }
8623 else
8624 {
8625 /* This case does occur, when expanding a parameter which
8626 needs to be constructed on the stack. The target
8627 is the actual stack address that we want to initialize.
8628 The function we call will perform the cleanup in this case. */
8629
8630 /* If we have already assigned it space, use that space,
8631 not target that we were passed in, as our target
8632 parameter is only a hint. */
8633 if (DECL_RTL_SET_P (slot))
8634 {
8635 target = DECL_RTL (slot);
8636 /* If we have already expanded the slot, so don't do
8637 it again. (mrs) */
8638 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8639 return target;
8640 }
8641 else
8642 {
8643 SET_DECL_RTL (slot, target);
8644 /* If we must have an addressable slot, then make sure that
8645 the RTL that we just stored in slot is OK. */
8646 if (TREE_ADDRESSABLE (slot))
8647 put_var_into_stack (slot, /*rescan=*/true);
8648 }
8649 }
8650
8651 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8652 /* Mark it as expanded. */
8653 TREE_OPERAND (exp, 1) = NULL_TREE;
8654
8655 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8656
8657 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8658
8659 return target;
8660 }
8661
8662 case INIT_EXPR:
8663 {
8664 tree lhs = TREE_OPERAND (exp, 0);
8665 tree rhs = TREE_OPERAND (exp, 1);
8666
8667 temp = expand_assignment (lhs, rhs, ! ignore);
8668 return temp;
8669 }
8670
8671 case MODIFY_EXPR:
8672 {
8673 /* If lhs is complex, expand calls in rhs before computing it.
8674 That's so we don't compute a pointer and save it over a
8675 call. If lhs is simple, compute it first so we can give it
8676 as a target if the rhs is just a call. This avoids an
8677 extra temp and copy and that prevents a partial-subsumption
8678 which makes bad code. Actually we could treat
8679 component_ref's of vars like vars. */
8680
8681 tree lhs = TREE_OPERAND (exp, 0);
8682 tree rhs = TREE_OPERAND (exp, 1);
8683
8684 temp = 0;
8685
8686 /* Check for |= or &= of a bitfield of size one into another bitfield
8687 of size 1. In this case, (unless we need the result of the
8688 assignment) we can do this more efficiently with a
8689 test followed by an assignment, if necessary.
8690
8691 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8692 things change so we do, this code should be enhanced to
8693 support it. */
8694 if (ignore
8695 && TREE_CODE (lhs) == COMPONENT_REF
8696 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8697 || TREE_CODE (rhs) == BIT_AND_EXPR)
8698 && TREE_OPERAND (rhs, 0) == lhs
8699 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8700 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8701 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8702 {
8703 rtx label = gen_label_rtx ();
8704
8705 do_jump (TREE_OPERAND (rhs, 1),
8706 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8707 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8708 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8709 (TREE_CODE (rhs) == BIT_IOR_EXPR
8710 ? integer_one_node
8711 : integer_zero_node)),
8712 0);
8713 do_pending_stack_adjust ();
8714 emit_label (label);
8715 return const0_rtx;
8716 }
8717
8718 temp = expand_assignment (lhs, rhs, ! ignore);
8719
8720 return temp;
8721 }
8722
8723 case RETURN_EXPR:
8724 if (!TREE_OPERAND (exp, 0))
8725 expand_null_return ();
8726 else
8727 expand_return (TREE_OPERAND (exp, 0));
8728 return const0_rtx;
8729
8730 case PREINCREMENT_EXPR:
8731 case PREDECREMENT_EXPR:
8732 return expand_increment (exp, 0, ignore);
8733
8734 case POSTINCREMENT_EXPR:
8735 case POSTDECREMENT_EXPR:
8736 /* Faster to treat as pre-increment if result is not used. */
8737 return expand_increment (exp, ! ignore, ignore);
8738
8739 case ADDR_EXPR:
8740 if (modifier == EXPAND_STACK_PARM)
8741 target = 0;
8742 /* Are we taking the address of a nested function? */
8743 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8744 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8745 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8746 && ! TREE_STATIC (exp))
8747 {
8748 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8749 op0 = force_operand (op0, target);
8750 }
8751 /* If we are taking the address of something erroneous, just
8752 return a zero. */
8753 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8754 return const0_rtx;
8755 /* If we are taking the address of a constant and are at the
8756 top level, we have to use output_constant_def since we can't
8757 call force_const_mem at top level. */
8758 else if (cfun == 0
8759 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8760 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8761 == 'c')))
8762 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8763 else
8764 {
8765 /* We make sure to pass const0_rtx down if we came in with
8766 ignore set, to avoid doing the cleanups twice for something. */
8767 op0 = expand_expr (TREE_OPERAND (exp, 0),
8768 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8769 (modifier == EXPAND_INITIALIZER
8770 ? modifier : EXPAND_CONST_ADDRESS));
8771
8772 /* If we are going to ignore the result, OP0 will have been set
8773 to const0_rtx, so just return it. Don't get confused and
8774 think we are taking the address of the constant. */
8775 if (ignore)
8776 return op0;
8777
8778 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8779 clever and returns a REG when given a MEM. */
8780 op0 = protect_from_queue (op0, 1);
8781
8782 /* We would like the object in memory. If it is a constant, we can
8783 have it be statically allocated into memory. For a non-constant,
8784 we need to allocate some memory and store the value into it. */
8785
8786 if (CONSTANT_P (op0))
8787 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8788 op0);
8789 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8790 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8791 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8792 {
8793 /* If the operand is a SAVE_EXPR, we can deal with this by
8794 forcing the SAVE_EXPR into memory. */
8795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8796 {
8797 put_var_into_stack (TREE_OPERAND (exp, 0),
8798 /*rescan=*/true);
8799 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8800 }
8801 else
8802 {
8803 /* If this object is in a register, it can't be BLKmode. */
8804 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8805 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8806
8807 if (GET_CODE (op0) == PARALLEL)
8808 /* Handle calls that pass values in multiple
8809 non-contiguous locations. The Irix 6 ABI has examples
8810 of this. */
8811 emit_group_store (memloc, op0, inner_type,
8812 int_size_in_bytes (inner_type));
8813 else
8814 emit_move_insn (memloc, op0);
8815
8816 op0 = memloc;
8817 }
8818 }
8819
8820 if (GET_CODE (op0) != MEM)
8821 abort ();
8822
8823 mark_temp_addr_taken (op0);
8824 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8825 {
8826 op0 = XEXP (op0, 0);
8827 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8828 op0 = convert_memory_address (ptr_mode, op0);
8829 return op0;
8830 }
8831
8832 /* If OP0 is not aligned as least as much as the type requires, we
8833 need to make a temporary, copy OP0 to it, and take the address of
8834 the temporary. We want to use the alignment of the type, not of
8835 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8836 the test for BLKmode means that can't happen. The test for
8837 BLKmode is because we never make mis-aligned MEMs with
8838 non-BLKmode.
8839
8840 We don't need to do this at all if the machine doesn't have
8841 strict alignment. */
8842 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8843 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8844 > MEM_ALIGN (op0))
8845 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8846 {
8847 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8848 rtx new;
8849
8850 if (TYPE_ALIGN_OK (inner_type))
8851 abort ();
8852
8853 if (TREE_ADDRESSABLE (inner_type))
8854 {
8855 /* We can't make a bitwise copy of this object, so fail. */
8856 error ("cannot take the address of an unaligned member");
8857 return const0_rtx;
8858 }
8859
8860 new = assign_stack_temp_for_type
8861 (TYPE_MODE (inner_type),
8862 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8863 : int_size_in_bytes (inner_type),
8864 1, build_qualified_type (inner_type,
8865 (TYPE_QUALS (inner_type)
8866 | TYPE_QUAL_CONST)));
8867
8868 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8869 (modifier == EXPAND_STACK_PARM
8870 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8871
8872 op0 = new;
8873 }
8874
8875 op0 = force_operand (XEXP (op0, 0), target);
8876 }
8877
8878 if (flag_force_addr
8879 && GET_CODE (op0) != REG
8880 && modifier != EXPAND_CONST_ADDRESS
8881 && modifier != EXPAND_INITIALIZER
8882 && modifier != EXPAND_SUM)
8883 op0 = force_reg (Pmode, op0);
8884
8885 if (GET_CODE (op0) == REG
8886 && ! REG_USERVAR_P (op0))
8887 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8888
8889 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8890 op0 = convert_memory_address (ptr_mode, op0);
8891
8892 return op0;
8893
8894 case ENTRY_VALUE_EXPR:
8895 abort ();
8896
8897 /* COMPLEX type for Extended Pascal & Fortran */
8898 case COMPLEX_EXPR:
8899 {
8900 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8901 rtx insns;
8902
8903 /* Get the rtx code of the operands. */
8904 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8905 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8906
8907 if (! target)
8908 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8909
8910 start_sequence ();
8911
8912 /* Move the real (op0) and imaginary (op1) parts to their location. */
8913 emit_move_insn (gen_realpart (mode, target), op0);
8914 emit_move_insn (gen_imagpart (mode, target), op1);
8915
8916 insns = get_insns ();
8917 end_sequence ();
8918
8919 /* Complex construction should appear as a single unit. */
8920 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8921 each with a separate pseudo as destination.
8922 It's not correct for flow to treat them as a unit. */
8923 if (GET_CODE (target) != CONCAT)
8924 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8925 else
8926 emit_insn (insns);
8927
8928 return target;
8929 }
8930
8931 case REALPART_EXPR:
8932 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8933 return gen_realpart (mode, op0);
8934
8935 case IMAGPART_EXPR:
8936 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8937 return gen_imagpart (mode, op0);
8938
8939 case CONJ_EXPR:
8940 {
8941 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8942 rtx imag_t;
8943 rtx insns;
8944
8945 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8946
8947 if (! target)
8948 target = gen_reg_rtx (mode);
8949
8950 start_sequence ();
8951
8952 /* Store the realpart and the negated imagpart to target. */
8953 emit_move_insn (gen_realpart (partmode, target),
8954 gen_realpart (partmode, op0));
8955
8956 imag_t = gen_imagpart (partmode, target);
8957 temp = expand_unop (partmode,
8958 ! unsignedp && flag_trapv
8959 && (GET_MODE_CLASS(partmode) == MODE_INT)
8960 ? negv_optab : neg_optab,
8961 gen_imagpart (partmode, op0), imag_t, 0);
8962 if (temp != imag_t)
8963 emit_move_insn (imag_t, temp);
8964
8965 insns = get_insns ();
8966 end_sequence ();
8967
8968 /* Conjugate should appear as a single unit
8969 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8970 each with a separate pseudo as destination.
8971 It's not correct for flow to treat them as a unit. */
8972 if (GET_CODE (target) != CONCAT)
8973 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8974 else
8975 emit_insn (insns);
8976
8977 return target;
8978 }
8979
8980 case TRY_CATCH_EXPR:
8981 {
8982 tree handler = TREE_OPERAND (exp, 1);
8983
8984 expand_eh_region_start ();
8985
8986 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8987
8988 expand_eh_region_end_cleanup (handler);
8989
8990 return op0;
8991 }
8992
8993 case TRY_FINALLY_EXPR:
8994 {
8995 tree try_block = TREE_OPERAND (exp, 0);
8996 tree finally_block = TREE_OPERAND (exp, 1);
8997
8998 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8999 {
9000 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9001 is not sufficient, so we cannot expand the block twice.
9002 So we play games with GOTO_SUBROUTINE_EXPR to let us
9003 expand the thing only once. */
9004 /* When not optimizing, we go ahead with this form since
9005 (1) user breakpoints operate more predictably without
9006 code duplication, and
9007 (2) we're not running any of the global optimizers
9008 that would explode in time/space with the highly
9009 connected CFG created by the indirect branching. */
9010
9011 rtx finally_label = gen_label_rtx ();
9012 rtx done_label = gen_label_rtx ();
9013 rtx return_link = gen_reg_rtx (Pmode);
9014 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9015 (tree) finally_label, (tree) return_link);
9016 TREE_SIDE_EFFECTS (cleanup) = 1;
9017
9018 /* Start a new binding layer that will keep track of all cleanup
9019 actions to be performed. */
9020 expand_start_bindings (2);
9021 target_temp_slot_level = temp_slot_level;
9022
9023 expand_decl_cleanup (NULL_TREE, cleanup);
9024 op0 = expand_expr (try_block, target, tmode, modifier);
9025
9026 preserve_temp_slots (op0);
9027 expand_end_bindings (NULL_TREE, 0, 0);
9028 emit_jump (done_label);
9029 emit_label (finally_label);
9030 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9031 emit_indirect_jump (return_link);
9032 emit_label (done_label);
9033 }
9034 else
9035 {
9036 expand_start_bindings (2);
9037 target_temp_slot_level = temp_slot_level;
9038
9039 expand_decl_cleanup (NULL_TREE, finally_block);
9040 op0 = expand_expr (try_block, target, tmode, modifier);
9041
9042 preserve_temp_slots (op0);
9043 expand_end_bindings (NULL_TREE, 0, 0);
9044 }
9045
9046 return op0;
9047 }
9048
9049 case GOTO_SUBROUTINE_EXPR:
9050 {
9051 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9052 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9053 rtx return_address = gen_label_rtx ();
9054 emit_move_insn (return_link,
9055 gen_rtx_LABEL_REF (Pmode, return_address));
9056 emit_jump (subr);
9057 emit_label (return_address);
9058 return const0_rtx;
9059 }
9060
9061 case VA_ARG_EXPR:
9062 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9063
9064 case EXC_PTR_EXPR:
9065 return get_exception_pointer (cfun);
9066
9067 case FDESC_EXPR:
9068 /* Function descriptors are not valid except for as
9069 initialization constants, and should not be expanded. */
9070 abort ();
9071
9072 default:
9073 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9074 }
9075
9076 /* Here to do an ordinary binary operator, generating an instruction
9077 from the optab already placed in `this_optab'. */
9078 binop:
9079 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9080 subtarget, &op0, &op1, 0);
9081 binop2:
9082 if (modifier == EXPAND_STACK_PARM)
9083 target = 0;
9084 temp = expand_binop (mode, this_optab, op0, op1, target,
9085 unsignedp, OPTAB_LIB_WIDEN);
9086 if (temp == 0)
9087 abort ();
9088 return temp;
9089 }
9090 \f
9091 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9092 when applied to the address of EXP produces an address known to be
9093 aligned more than BIGGEST_ALIGNMENT. */
9094
9095 static int
9096 is_aligning_offset (tree offset, tree exp)
9097 {
9098 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9099 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9100 || TREE_CODE (offset) == NOP_EXPR
9101 || TREE_CODE (offset) == CONVERT_EXPR
9102 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9103 offset = TREE_OPERAND (offset, 0);
9104
9105 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9106 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9107 if (TREE_CODE (offset) != BIT_AND_EXPR
9108 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9109 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9110 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9111 return 0;
9112
9113 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9114 It must be NEGATE_EXPR. Then strip any more conversions. */
9115 offset = TREE_OPERAND (offset, 0);
9116 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9117 || TREE_CODE (offset) == NOP_EXPR
9118 || TREE_CODE (offset) == CONVERT_EXPR)
9119 offset = TREE_OPERAND (offset, 0);
9120
9121 if (TREE_CODE (offset) != NEGATE_EXPR)
9122 return 0;
9123
9124 offset = TREE_OPERAND (offset, 0);
9125 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9126 || TREE_CODE (offset) == NOP_EXPR
9127 || TREE_CODE (offset) == CONVERT_EXPR)
9128 offset = TREE_OPERAND (offset, 0);
9129
9130 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9131 whose type is the same as EXP. */
9132 return (TREE_CODE (offset) == ADDR_EXPR
9133 && (TREE_OPERAND (offset, 0) == exp
9134 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9135 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9136 == TREE_TYPE (exp)))));
9137 }
9138 \f
9139 /* Return the tree node if an ARG corresponds to a string constant or zero
9140 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9141 in bytes within the string that ARG is accessing. The type of the
9142 offset will be `sizetype'. */
9143
9144 tree
9145 string_constant (tree arg, tree *ptr_offset)
9146 {
9147 STRIP_NOPS (arg);
9148
9149 if (TREE_CODE (arg) == ADDR_EXPR
9150 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9151 {
9152 *ptr_offset = size_zero_node;
9153 return TREE_OPERAND (arg, 0);
9154 }
9155 else if (TREE_CODE (arg) == PLUS_EXPR)
9156 {
9157 tree arg0 = TREE_OPERAND (arg, 0);
9158 tree arg1 = TREE_OPERAND (arg, 1);
9159
9160 STRIP_NOPS (arg0);
9161 STRIP_NOPS (arg1);
9162
9163 if (TREE_CODE (arg0) == ADDR_EXPR
9164 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9165 {
9166 *ptr_offset = convert (sizetype, arg1);
9167 return TREE_OPERAND (arg0, 0);
9168 }
9169 else if (TREE_CODE (arg1) == ADDR_EXPR
9170 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9171 {
9172 *ptr_offset = convert (sizetype, arg0);
9173 return TREE_OPERAND (arg1, 0);
9174 }
9175 }
9176
9177 return 0;
9178 }
9179 \f
9180 /* Expand code for a post- or pre- increment or decrement
9181 and return the RTX for the result.
9182 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9183
9184 static rtx
9185 expand_increment (tree exp, int post, int ignore)
9186 {
9187 rtx op0, op1;
9188 rtx temp, value;
9189 tree incremented = TREE_OPERAND (exp, 0);
9190 optab this_optab = add_optab;
9191 int icode;
9192 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9193 int op0_is_copy = 0;
9194 int single_insn = 0;
9195 /* 1 means we can't store into OP0 directly,
9196 because it is a subreg narrower than a word,
9197 and we don't dare clobber the rest of the word. */
9198 int bad_subreg = 0;
9199
9200 /* Stabilize any component ref that might need to be
9201 evaluated more than once below. */
9202 if (!post
9203 || TREE_CODE (incremented) == BIT_FIELD_REF
9204 || (TREE_CODE (incremented) == COMPONENT_REF
9205 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9206 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9207 incremented = stabilize_reference (incremented);
9208 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9209 ones into save exprs so that they don't accidentally get evaluated
9210 more than once by the code below. */
9211 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9212 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9213 incremented = save_expr (incremented);
9214
9215 /* Compute the operands as RTX.
9216 Note whether OP0 is the actual lvalue or a copy of it:
9217 I believe it is a copy iff it is a register or subreg
9218 and insns were generated in computing it. */
9219
9220 temp = get_last_insn ();
9221 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9222
9223 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9224 in place but instead must do sign- or zero-extension during assignment,
9225 so we copy it into a new register and let the code below use it as
9226 a copy.
9227
9228 Note that we can safely modify this SUBREG since it is know not to be
9229 shared (it was made by the expand_expr call above). */
9230
9231 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9232 {
9233 if (post)
9234 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9235 else
9236 bad_subreg = 1;
9237 }
9238 else if (GET_CODE (op0) == SUBREG
9239 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9240 {
9241 /* We cannot increment this SUBREG in place. If we are
9242 post-incrementing, get a copy of the old value. Otherwise,
9243 just mark that we cannot increment in place. */
9244 if (post)
9245 op0 = copy_to_reg (op0);
9246 else
9247 bad_subreg = 1;
9248 }
9249
9250 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9251 && temp != get_last_insn ());
9252 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9253
9254 /* Decide whether incrementing or decrementing. */
9255 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9256 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9257 this_optab = sub_optab;
9258
9259 /* Convert decrement by a constant into a negative increment. */
9260 if (this_optab == sub_optab
9261 && GET_CODE (op1) == CONST_INT)
9262 {
9263 op1 = GEN_INT (-INTVAL (op1));
9264 this_optab = add_optab;
9265 }
9266
9267 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9268 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9269
9270 /* For a preincrement, see if we can do this with a single instruction. */
9271 if (!post)
9272 {
9273 icode = (int) this_optab->handlers[(int) mode].insn_code;
9274 if (icode != (int) CODE_FOR_nothing
9275 /* Make sure that OP0 is valid for operands 0 and 1
9276 of the insn we want to queue. */
9277 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9278 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9279 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9280 single_insn = 1;
9281 }
9282
9283 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9284 then we cannot just increment OP0. We must therefore contrive to
9285 increment the original value. Then, for postincrement, we can return
9286 OP0 since it is a copy of the old value. For preincrement, expand here
9287 unless we can do it with a single insn.
9288
9289 Likewise if storing directly into OP0 would clobber high bits
9290 we need to preserve (bad_subreg). */
9291 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9292 {
9293 /* This is the easiest way to increment the value wherever it is.
9294 Problems with multiple evaluation of INCREMENTED are prevented
9295 because either (1) it is a component_ref or preincrement,
9296 in which case it was stabilized above, or (2) it is an array_ref
9297 with constant index in an array in a register, which is
9298 safe to reevaluate. */
9299 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9300 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9301 ? MINUS_EXPR : PLUS_EXPR),
9302 TREE_TYPE (exp),
9303 incremented,
9304 TREE_OPERAND (exp, 1));
9305
9306 while (TREE_CODE (incremented) == NOP_EXPR
9307 || TREE_CODE (incremented) == CONVERT_EXPR)
9308 {
9309 newexp = convert (TREE_TYPE (incremented), newexp);
9310 incremented = TREE_OPERAND (incremented, 0);
9311 }
9312
9313 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9314 return post ? op0 : temp;
9315 }
9316
9317 if (post)
9318 {
9319 /* We have a true reference to the value in OP0.
9320 If there is an insn to add or subtract in this mode, queue it.
9321 Queueing the increment insn avoids the register shuffling
9322 that often results if we must increment now and first save
9323 the old value for subsequent use. */
9324
9325 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9326 op0 = stabilize (op0);
9327 #endif
9328
9329 icode = (int) this_optab->handlers[(int) mode].insn_code;
9330 if (icode != (int) CODE_FOR_nothing
9331 /* Make sure that OP0 is valid for operands 0 and 1
9332 of the insn we want to queue. */
9333 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9334 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9335 {
9336 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9337 op1 = force_reg (mode, op1);
9338
9339 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9340 }
9341 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9342 {
9343 rtx addr = (general_operand (XEXP (op0, 0), mode)
9344 ? force_reg (Pmode, XEXP (op0, 0))
9345 : copy_to_reg (XEXP (op0, 0)));
9346 rtx temp, result;
9347
9348 op0 = replace_equiv_address (op0, addr);
9349 temp = force_reg (GET_MODE (op0), op0);
9350 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9351 op1 = force_reg (mode, op1);
9352
9353 /* The increment queue is LIFO, thus we have to `queue'
9354 the instructions in reverse order. */
9355 enqueue_insn (op0, gen_move_insn (op0, temp));
9356 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9357 return result;
9358 }
9359 }
9360
9361 /* Preincrement, or we can't increment with one simple insn. */
9362 if (post)
9363 /* Save a copy of the value before inc or dec, to return it later. */
9364 temp = value = copy_to_reg (op0);
9365 else
9366 /* Arrange to return the incremented value. */
9367 /* Copy the rtx because expand_binop will protect from the queue,
9368 and the results of that would be invalid for us to return
9369 if our caller does emit_queue before using our result. */
9370 temp = copy_rtx (value = op0);
9371
9372 /* Increment however we can. */
9373 op1 = expand_binop (mode, this_optab, value, op1, op0,
9374 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9375
9376 /* Make sure the value is stored into OP0. */
9377 if (op1 != op0)
9378 emit_move_insn (op0, op1);
9379
9380 return temp;
9381 }
9382 \f
9383 /* Generate code to calculate EXP using a store-flag instruction
9384 and return an rtx for the result. EXP is either a comparison
9385 or a TRUTH_NOT_EXPR whose operand is a comparison.
9386
9387 If TARGET is nonzero, store the result there if convenient.
9388
9389 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9390 cheap.
9391
9392 Return zero if there is no suitable set-flag instruction
9393 available on this machine.
9394
9395 Once expand_expr has been called on the arguments of the comparison,
9396 we are committed to doing the store flag, since it is not safe to
9397 re-evaluate the expression. We emit the store-flag insn by calling
9398 emit_store_flag, but only expand the arguments if we have a reason
9399 to believe that emit_store_flag will be successful. If we think that
9400 it will, but it isn't, we have to simulate the store-flag with a
9401 set/jump/set sequence. */
9402
9403 static rtx
9404 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9405 {
9406 enum rtx_code code;
9407 tree arg0, arg1, type;
9408 tree tem;
9409 enum machine_mode operand_mode;
9410 int invert = 0;
9411 int unsignedp;
9412 rtx op0, op1;
9413 enum insn_code icode;
9414 rtx subtarget = target;
9415 rtx result, label;
9416
9417 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9418 result at the end. We can't simply invert the test since it would
9419 have already been inverted if it were valid. This case occurs for
9420 some floating-point comparisons. */
9421
9422 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9423 invert = 1, exp = TREE_OPERAND (exp, 0);
9424
9425 arg0 = TREE_OPERAND (exp, 0);
9426 arg1 = TREE_OPERAND (exp, 1);
9427
9428 /* Don't crash if the comparison was erroneous. */
9429 if (arg0 == error_mark_node || arg1 == error_mark_node)
9430 return const0_rtx;
9431
9432 type = TREE_TYPE (arg0);
9433 operand_mode = TYPE_MODE (type);
9434 unsignedp = TREE_UNSIGNED (type);
9435
9436 /* We won't bother with BLKmode store-flag operations because it would mean
9437 passing a lot of information to emit_store_flag. */
9438 if (operand_mode == BLKmode)
9439 return 0;
9440
9441 /* We won't bother with store-flag operations involving function pointers
9442 when function pointers must be canonicalized before comparisons. */
9443 #ifdef HAVE_canonicalize_funcptr_for_compare
9444 if (HAVE_canonicalize_funcptr_for_compare
9445 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9446 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9447 == FUNCTION_TYPE))
9448 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9449 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9450 == FUNCTION_TYPE))))
9451 return 0;
9452 #endif
9453
9454 STRIP_NOPS (arg0);
9455 STRIP_NOPS (arg1);
9456
9457 /* Get the rtx comparison code to use. We know that EXP is a comparison
9458 operation of some type. Some comparisons against 1 and -1 can be
9459 converted to comparisons with zero. Do so here so that the tests
9460 below will be aware that we have a comparison with zero. These
9461 tests will not catch constants in the first operand, but constants
9462 are rarely passed as the first operand. */
9463
9464 switch (TREE_CODE (exp))
9465 {
9466 case EQ_EXPR:
9467 code = EQ;
9468 break;
9469 case NE_EXPR:
9470 code = NE;
9471 break;
9472 case LT_EXPR:
9473 if (integer_onep (arg1))
9474 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9475 else
9476 code = unsignedp ? LTU : LT;
9477 break;
9478 case LE_EXPR:
9479 if (! unsignedp && integer_all_onesp (arg1))
9480 arg1 = integer_zero_node, code = LT;
9481 else
9482 code = unsignedp ? LEU : LE;
9483 break;
9484 case GT_EXPR:
9485 if (! unsignedp && integer_all_onesp (arg1))
9486 arg1 = integer_zero_node, code = GE;
9487 else
9488 code = unsignedp ? GTU : GT;
9489 break;
9490 case GE_EXPR:
9491 if (integer_onep (arg1))
9492 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9493 else
9494 code = unsignedp ? GEU : GE;
9495 break;
9496
9497 case UNORDERED_EXPR:
9498 code = UNORDERED;
9499 break;
9500 case ORDERED_EXPR:
9501 code = ORDERED;
9502 break;
9503 case UNLT_EXPR:
9504 code = UNLT;
9505 break;
9506 case UNLE_EXPR:
9507 code = UNLE;
9508 break;
9509 case UNGT_EXPR:
9510 code = UNGT;
9511 break;
9512 case UNGE_EXPR:
9513 code = UNGE;
9514 break;
9515 case UNEQ_EXPR:
9516 code = UNEQ;
9517 break;
9518
9519 default:
9520 abort ();
9521 }
9522
9523 /* Put a constant second. */
9524 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9525 {
9526 tem = arg0; arg0 = arg1; arg1 = tem;
9527 code = swap_condition (code);
9528 }
9529
9530 /* If this is an equality or inequality test of a single bit, we can
9531 do this by shifting the bit being tested to the low-order bit and
9532 masking the result with the constant 1. If the condition was EQ,
9533 we xor it with 1. This does not require an scc insn and is faster
9534 than an scc insn even if we have it.
9535
9536 The code to make this transformation was moved into fold_single_bit_test,
9537 so we just call into the folder and expand its result. */
9538
9539 if ((code == NE || code == EQ)
9540 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9541 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9542 {
9543 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9544 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9545 arg0, arg1, type),
9546 target, VOIDmode, EXPAND_NORMAL);
9547 }
9548
9549 /* Now see if we are likely to be able to do this. Return if not. */
9550 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9551 return 0;
9552
9553 icode = setcc_gen_code[(int) code];
9554 if (icode == CODE_FOR_nothing
9555 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9556 {
9557 /* We can only do this if it is one of the special cases that
9558 can be handled without an scc insn. */
9559 if ((code == LT && integer_zerop (arg1))
9560 || (! only_cheap && code == GE && integer_zerop (arg1)))
9561 ;
9562 else if (BRANCH_COST >= 0
9563 && ! only_cheap && (code == NE || code == EQ)
9564 && TREE_CODE (type) != REAL_TYPE
9565 && ((abs_optab->handlers[(int) operand_mode].insn_code
9566 != CODE_FOR_nothing)
9567 || (ffs_optab->handlers[(int) operand_mode].insn_code
9568 != CODE_FOR_nothing)))
9569 ;
9570 else
9571 return 0;
9572 }
9573
9574 if (! get_subtarget (target)
9575 || GET_MODE (subtarget) != operand_mode)
9576 subtarget = 0;
9577
9578 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9579
9580 if (target == 0)
9581 target = gen_reg_rtx (mode);
9582
9583 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9584 because, if the emit_store_flag does anything it will succeed and
9585 OP0 and OP1 will not be used subsequently. */
9586
9587 result = emit_store_flag (target, code,
9588 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9589 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9590 operand_mode, unsignedp, 1);
9591
9592 if (result)
9593 {
9594 if (invert)
9595 result = expand_binop (mode, xor_optab, result, const1_rtx,
9596 result, 0, OPTAB_LIB_WIDEN);
9597 return result;
9598 }
9599
9600 /* If this failed, we have to do this with set/compare/jump/set code. */
9601 if (GET_CODE (target) != REG
9602 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9603 target = gen_reg_rtx (GET_MODE (target));
9604
9605 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9606 result = compare_from_rtx (op0, op1, code, unsignedp,
9607 operand_mode, NULL_RTX);
9608 if (GET_CODE (result) == CONST_INT)
9609 return (((result == const0_rtx && ! invert)
9610 || (result != const0_rtx && invert))
9611 ? const0_rtx : const1_rtx);
9612
9613 /* The code of RESULT may not match CODE if compare_from_rtx
9614 decided to swap its operands and reverse the original code.
9615
9616 We know that compare_from_rtx returns either a CONST_INT or
9617 a new comparison code, so it is safe to just extract the
9618 code from RESULT. */
9619 code = GET_CODE (result);
9620
9621 label = gen_label_rtx ();
9622 if (bcc_gen_fctn[(int) code] == 0)
9623 abort ();
9624
9625 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9626 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9627 emit_label (label);
9628
9629 return target;
9630 }
9631 \f
9632
9633 /* Stubs in case we haven't got a casesi insn. */
9634 #ifndef HAVE_casesi
9635 # define HAVE_casesi 0
9636 # define gen_casesi(a, b, c, d, e) (0)
9637 # define CODE_FOR_casesi CODE_FOR_nothing
9638 #endif
9639
9640 /* If the machine does not have a case insn that compares the bounds,
9641 this means extra overhead for dispatch tables, which raises the
9642 threshold for using them. */
9643 #ifndef CASE_VALUES_THRESHOLD
9644 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9645 #endif /* CASE_VALUES_THRESHOLD */
9646
9647 unsigned int
9648 case_values_threshold (void)
9649 {
9650 return CASE_VALUES_THRESHOLD;
9651 }
9652
9653 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9654 0 otherwise (i.e. if there is no casesi instruction). */
9655 int
9656 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9657 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9658 {
9659 enum machine_mode index_mode = SImode;
9660 int index_bits = GET_MODE_BITSIZE (index_mode);
9661 rtx op1, op2, index;
9662 enum machine_mode op_mode;
9663
9664 if (! HAVE_casesi)
9665 return 0;
9666
9667 /* Convert the index to SImode. */
9668 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9669 {
9670 enum machine_mode omode = TYPE_MODE (index_type);
9671 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9672
9673 /* We must handle the endpoints in the original mode. */
9674 index_expr = build (MINUS_EXPR, index_type,
9675 index_expr, minval);
9676 minval = integer_zero_node;
9677 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9678 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9679 omode, 1, default_label);
9680 /* Now we can safely truncate. */
9681 index = convert_to_mode (index_mode, index, 0);
9682 }
9683 else
9684 {
9685 if (TYPE_MODE (index_type) != index_mode)
9686 {
9687 index_expr = convert ((*lang_hooks.types.type_for_size)
9688 (index_bits, 0), index_expr);
9689 index_type = TREE_TYPE (index_expr);
9690 }
9691
9692 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9693 }
9694 emit_queue ();
9695 index = protect_from_queue (index, 0);
9696 do_pending_stack_adjust ();
9697
9698 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9699 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9700 (index, op_mode))
9701 index = copy_to_mode_reg (op_mode, index);
9702
9703 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9704
9705 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9706 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9707 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9708 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9709 (op1, op_mode))
9710 op1 = copy_to_mode_reg (op_mode, op1);
9711
9712 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9713
9714 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9715 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9716 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9717 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9718 (op2, op_mode))
9719 op2 = copy_to_mode_reg (op_mode, op2);
9720
9721 emit_jump_insn (gen_casesi (index, op1, op2,
9722 table_label, default_label));
9723 return 1;
9724 }
9725
9726 /* Attempt to generate a tablejump instruction; same concept. */
9727 #ifndef HAVE_tablejump
9728 #define HAVE_tablejump 0
9729 #define gen_tablejump(x, y) (0)
9730 #endif
9731
9732 /* Subroutine of the next function.
9733
9734 INDEX is the value being switched on, with the lowest value
9735 in the table already subtracted.
9736 MODE is its expected mode (needed if INDEX is constant).
9737 RANGE is the length of the jump table.
9738 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9739
9740 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9741 index value is out of range. */
9742
9743 static void
9744 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9745 rtx default_label)
9746 {
9747 rtx temp, vector;
9748
9749 if (INTVAL (range) > cfun->max_jumptable_ents)
9750 cfun->max_jumptable_ents = INTVAL (range);
9751
9752 /* Do an unsigned comparison (in the proper mode) between the index
9753 expression and the value which represents the length of the range.
9754 Since we just finished subtracting the lower bound of the range
9755 from the index expression, this comparison allows us to simultaneously
9756 check that the original index expression value is both greater than
9757 or equal to the minimum value of the range and less than or equal to
9758 the maximum value of the range. */
9759
9760 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9761 default_label);
9762
9763 /* If index is in range, it must fit in Pmode.
9764 Convert to Pmode so we can index with it. */
9765 if (mode != Pmode)
9766 index = convert_to_mode (Pmode, index, 1);
9767
9768 /* Don't let a MEM slip thru, because then INDEX that comes
9769 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9770 and break_out_memory_refs will go to work on it and mess it up. */
9771 #ifdef PIC_CASE_VECTOR_ADDRESS
9772 if (flag_pic && GET_CODE (index) != REG)
9773 index = copy_to_mode_reg (Pmode, index);
9774 #endif
9775
9776 /* If flag_force_addr were to affect this address
9777 it could interfere with the tricky assumptions made
9778 about addresses that contain label-refs,
9779 which may be valid only very near the tablejump itself. */
9780 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9781 GET_MODE_SIZE, because this indicates how large insns are. The other
9782 uses should all be Pmode, because they are addresses. This code
9783 could fail if addresses and insns are not the same size. */
9784 index = gen_rtx_PLUS (Pmode,
9785 gen_rtx_MULT (Pmode, index,
9786 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9787 gen_rtx_LABEL_REF (Pmode, table_label));
9788 #ifdef PIC_CASE_VECTOR_ADDRESS
9789 if (flag_pic)
9790 index = PIC_CASE_VECTOR_ADDRESS (index);
9791 else
9792 #endif
9793 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9794 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9795 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9796 RTX_UNCHANGING_P (vector) = 1;
9797 MEM_NOTRAP_P (vector) = 1;
9798 convert_move (temp, vector, 0);
9799
9800 emit_jump_insn (gen_tablejump (temp, table_label));
9801
9802 /* If we are generating PIC code or if the table is PC-relative, the
9803 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9804 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9805 emit_barrier ();
9806 }
9807
9808 int
9809 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9810 rtx table_label, rtx default_label)
9811 {
9812 rtx index;
9813
9814 if (! HAVE_tablejump)
9815 return 0;
9816
9817 index_expr = fold (build (MINUS_EXPR, index_type,
9818 convert (index_type, index_expr),
9819 convert (index_type, minval)));
9820 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9821 emit_queue ();
9822 index = protect_from_queue (index, 0);
9823 do_pending_stack_adjust ();
9824
9825 do_tablejump (index, TYPE_MODE (index_type),
9826 convert_modes (TYPE_MODE (index_type),
9827 TYPE_MODE (TREE_TYPE (range)),
9828 expand_expr (range, NULL_RTX,
9829 VOIDmode, 0),
9830 TREE_UNSIGNED (TREE_TYPE (range))),
9831 table_label, default_label);
9832 return 1;
9833 }
9834
9835 /* Nonzero if the mode is a valid vector mode for this architecture.
9836 This returns nonzero even if there is no hardware support for the
9837 vector mode, but we can emulate with narrower modes. */
9838
9839 int
9840 vector_mode_valid_p (enum machine_mode mode)
9841 {
9842 enum mode_class class = GET_MODE_CLASS (mode);
9843 enum machine_mode innermode;
9844
9845 /* Doh! What's going on? */
9846 if (class != MODE_VECTOR_INT
9847 && class != MODE_VECTOR_FLOAT)
9848 return 0;
9849
9850 /* Hardware support. Woo hoo! */
9851 if (VECTOR_MODE_SUPPORTED_P (mode))
9852 return 1;
9853
9854 innermode = GET_MODE_INNER (mode);
9855
9856 /* We should probably return 1 if requesting V4DI and we have no DI,
9857 but we have V2DI, but this is probably very unlikely. */
9858
9859 /* If we have support for the inner mode, we can safely emulate it.
9860 We may not have V2DI, but me can emulate with a pair of DIs. */
9861 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9862 }
9863
9864 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9865 static rtx
9866 const_vector_from_tree (tree exp)
9867 {
9868 rtvec v;
9869 int units, i;
9870 tree link, elt;
9871 enum machine_mode inner, mode;
9872
9873 mode = TYPE_MODE (TREE_TYPE (exp));
9874
9875 if (is_zeros_p (exp))
9876 return CONST0_RTX (mode);
9877
9878 units = GET_MODE_NUNITS (mode);
9879 inner = GET_MODE_INNER (mode);
9880
9881 v = rtvec_alloc (units);
9882
9883 link = TREE_VECTOR_CST_ELTS (exp);
9884 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9885 {
9886 elt = TREE_VALUE (link);
9887
9888 if (TREE_CODE (elt) == REAL_CST)
9889 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9890 inner);
9891 else
9892 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9893 TREE_INT_CST_HIGH (elt),
9894 inner);
9895 }
9896
9897 /* Initialize remaining elements to 0. */
9898 for (; i < units; ++i)
9899 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9900
9901 return gen_rtx_raw_CONST_VECTOR (mode, v);
9902 }
9903
9904 #include "gt-expr.h"
This page took 0.650594 seconds and 6 git commands to generate.