]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
builtins.c (build_function_call_expr): Don't set TREE_SIDE_EFFECTS here.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
64
65 #endif
66
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
74
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
79
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
87
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 void *constfundata;
130 int reverse;
131 };
132
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
161
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
164
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
168 #ifdef PUSH_ROUNDING
169 static void emit_single_push_insn (enum machine_mode, rtx, tree);
170 #endif
171 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
172 static rtx const_vector_from_tree (tree);
173
174 /* Record for each mode whether we can move a register directly to or
175 from an object of that mode in memory. If we can't, we won't try
176 to use that mode directly when accessing a field of that mode. */
177
178 static char direct_load[NUM_MACHINE_MODES];
179 static char direct_store[NUM_MACHINE_MODES];
180
181 /* Record for each mode whether we can float-extend from memory. */
182
183 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
184
185 /* If a memory-to-memory move would take MOVE_RATIO or more simple
186 move-instruction sequences, we will do a movstr or libcall instead. */
187
188 #ifndef MOVE_RATIO
189 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
190 #define MOVE_RATIO 2
191 #else
192 /* If we are optimizing for space (-Os), cut down the default move ratio. */
193 #define MOVE_RATIO (optimize_size ? 3 : 15)
194 #endif
195 #endif
196
197 /* This macro is used to determine whether move_by_pieces should be called
198 to perform a structure copy. */
199 #ifndef MOVE_BY_PIECES_P
200 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
202 #endif
203
204 /* If a clear memory operation would take CLEAR_RATIO or more simple
205 move-instruction sequences, we will do a clrstr or libcall instead. */
206
207 #ifndef CLEAR_RATIO
208 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
209 #define CLEAR_RATIO 2
210 #else
211 /* If we are optimizing for space, cut down the default clear ratio. */
212 #define CLEAR_RATIO (optimize_size ? 3 : 15)
213 #endif
214 #endif
215
216 /* This macro is used to determine whether clear_by_pieces should be
217 called to clear storage. */
218 #ifndef CLEAR_BY_PIECES_P
219 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
220 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
221 #endif
222
223 /* This macro is used to determine whether store_by_pieces should be
224 called to "memset" storage with byte values other than zero, or
225 to "memcpy" storage when the source is a constant string. */
226 #ifndef STORE_BY_PIECES_P
227 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
228 #endif
229
230 /* This array records the insn_code of insns to perform block moves. */
231 enum insn_code movstr_optab[NUM_MACHINE_MODES];
232
233 /* This array records the insn_code of insns to perform block clears. */
234 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
235
236 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237
238 #ifndef SLOW_UNALIGNED_ACCESS
239 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 #endif
241 \f
242 /* This is run once per compilation to set up which modes can be used
243 directly in memory and to initialize the block move optab. */
244
245 void
246 init_expr_once (void)
247 {
248 rtx insn, pat;
249 enum machine_mode mode;
250 int num_clobbers;
251 rtx mem, mem1;
252 rtx reg;
253
254 /* Try indexing by frame ptr and try by stack ptr.
255 It is known that on the Convex the stack ptr isn't a valid index.
256 With luck, one or the other is valid on any machine. */
257 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
258 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
259
260 /* A scratch register we can modify in-place below to avoid
261 useless RTL allocations. */
262 reg = gen_rtx_REG (VOIDmode, -1);
263
264 insn = rtx_alloc (INSN);
265 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
266 PATTERN (insn) = pat;
267
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
270 {
271 int regno;
272
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
275 PUT_MODE (mem1, mode);
276 PUT_MODE (reg, mode);
277
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
280
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 regno++)
285 {
286 if (! HARD_REGNO_MODE_OK (regno, mode))
287 continue;
288
289 REGNO (reg) = regno;
290
291 SET_SRC (pat) = mem;
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
295
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
300
301 SET_SRC (pat) = reg;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
305
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
310 }
311 }
312
313 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
314
315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
316 mode = GET_MODE_WIDER_MODE (mode))
317 {
318 enum machine_mode srcmode;
319 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
320 srcmode = GET_MODE_WIDER_MODE (srcmode))
321 {
322 enum insn_code ic;
323
324 ic = can_extend_p (mode, srcmode, 0);
325 if (ic == CODE_FOR_nothing)
326 continue;
327
328 PUT_MODE (mem, srcmode);
329
330 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
331 float_extend_from_mem[mode][srcmode] = true;
332 }
333 }
334 }
335
336 /* This is run at the start of compiling a function. */
337
338 void
339 init_expr (void)
340 {
341 cfun->expr = ggc_alloc (sizeof (struct expr_status));
342
343 pending_chain = 0;
344 pending_stack_adjust = 0;
345 stack_pointer_delta = 0;
346 inhibit_defer_pop = 0;
347 saveregs_value = 0;
348 apply_args_value = 0;
349 forced_labels = 0;
350 }
351
352 /* Small sanity check that the queue is empty at the end of a function. */
353
354 void
355 finish_expr_for_function (void)
356 {
357 if (pending_chain)
358 abort ();
359 }
360 \f
361 /* Manage the queue of increment instructions to be output
362 for POSTINCREMENT_EXPR expressions, etc. */
363
364 /* Queue up to increment (or change) VAR later. BODY says how:
365 BODY should be the same thing you would pass to emit_insn
366 to increment right away. It will go to emit_insn later on.
367
368 The value is a QUEUED expression to be used in place of VAR
369 where you want to guarantee the pre-incrementation value of VAR. */
370
371 static rtx
372 enqueue_insn (rtx var, rtx body)
373 {
374 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
375 body, pending_chain);
376 return pending_chain;
377 }
378
379 /* Use protect_from_queue to convert a QUEUED expression
380 into something that you can put immediately into an instruction.
381 If the queued incrementation has not happened yet,
382 protect_from_queue returns the variable itself.
383 If the incrementation has happened, protect_from_queue returns a temp
384 that contains a copy of the old value of the variable.
385
386 Any time an rtx which might possibly be a QUEUED is to be put
387 into an instruction, it must be passed through protect_from_queue first.
388 QUEUED expressions are not meaningful in instructions.
389
390 Do not pass a value through protect_from_queue and then hold
391 on to it for a while before putting it in an instruction!
392 If the queue is flushed in between, incorrect code will result. */
393
394 rtx
395 protect_from_queue (rtx x, int modify)
396 {
397 RTX_CODE code = GET_CODE (x);
398
399 #if 0 /* A QUEUED can hang around after the queue is forced out. */
400 /* Shortcut for most common case. */
401 if (pending_chain == 0)
402 return x;
403 #endif
404
405 if (code != QUEUED)
406 {
407 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
408 use of autoincrement. Make a copy of the contents of the memory
409 location rather than a copy of the address, but not if the value is
410 of mode BLKmode. Don't modify X in place since it might be
411 shared. */
412 if (code == MEM && GET_MODE (x) != BLKmode
413 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
414 {
415 rtx y = XEXP (x, 0);
416 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
417
418 if (QUEUED_INSN (y))
419 {
420 rtx temp = gen_reg_rtx (GET_MODE (x));
421
422 emit_insn_before (gen_move_insn (temp, new),
423 QUEUED_INSN (y));
424 return temp;
425 }
426
427 /* Copy the address into a pseudo, so that the returned value
428 remains correct across calls to emit_queue. */
429 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
430 }
431
432 /* Otherwise, recursively protect the subexpressions of all
433 the kinds of rtx's that can contain a QUEUED. */
434 if (code == MEM)
435 {
436 rtx tem = protect_from_queue (XEXP (x, 0), 0);
437 if (tem != XEXP (x, 0))
438 {
439 x = copy_rtx (x);
440 XEXP (x, 0) = tem;
441 }
442 }
443 else if (code == PLUS || code == MULT)
444 {
445 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
446 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
447 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
448 {
449 x = copy_rtx (x);
450 XEXP (x, 0) = new0;
451 XEXP (x, 1) = new1;
452 }
453 }
454 return x;
455 }
456 /* If the increment has not happened, use the variable itself. Copy it
457 into a new pseudo so that the value remains correct across calls to
458 emit_queue. */
459 if (QUEUED_INSN (x) == 0)
460 return copy_to_reg (QUEUED_VAR (x));
461 /* If the increment has happened and a pre-increment copy exists,
462 use that copy. */
463 if (QUEUED_COPY (x) != 0)
464 return QUEUED_COPY (x);
465 /* The increment has happened but we haven't set up a pre-increment copy.
466 Set one up now, and use it. */
467 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
468 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
469 QUEUED_INSN (x));
470 return QUEUED_COPY (x);
471 }
472
473 /* Return nonzero if X contains a QUEUED expression:
474 if it contains anything that will be altered by a queued increment.
475 We handle only combinations of MEM, PLUS, MINUS and MULT operators
476 since memory addresses generally contain only those. */
477
478 int
479 queued_subexp_p (rtx x)
480 {
481 enum rtx_code code = GET_CODE (x);
482 switch (code)
483 {
484 case QUEUED:
485 return 1;
486 case MEM:
487 return queued_subexp_p (XEXP (x, 0));
488 case MULT:
489 case PLUS:
490 case MINUS:
491 return (queued_subexp_p (XEXP (x, 0))
492 || queued_subexp_p (XEXP (x, 1)));
493 default:
494 return 0;
495 }
496 }
497
498 /* Perform all the pending incrementations. */
499
500 void
501 emit_queue (void)
502 {
503 rtx p;
504 while ((p = pending_chain))
505 {
506 rtx body = QUEUED_BODY (p);
507
508 switch (GET_CODE (body))
509 {
510 case INSN:
511 case JUMP_INSN:
512 case CALL_INSN:
513 case CODE_LABEL:
514 case BARRIER:
515 case NOTE:
516 QUEUED_INSN (p) = body;
517 emit_insn (body);
518 break;
519
520 #ifdef ENABLE_CHECKING
521 case SEQUENCE:
522 abort ();
523 break;
524 #endif
525
526 default:
527 QUEUED_INSN (p) = emit_insn (body);
528 break;
529 }
530
531 pending_chain = QUEUED_NEXT (p);
532 }
533 }
534 \f
535 /* Copy data from FROM to TO, where the machine modes are not the same.
536 Both modes may be integer, or both may be floating.
537 UNSIGNEDP should be nonzero if FROM is an unsigned type.
538 This causes zero-extension instead of sign-extension. */
539
540 void
541 convert_move (rtx to, rtx from, int unsignedp)
542 {
543 enum machine_mode to_mode = GET_MODE (to);
544 enum machine_mode from_mode = GET_MODE (from);
545 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
546 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
547 enum insn_code code;
548 rtx libcall;
549
550 /* rtx code for making an equivalent value. */
551 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
552 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
553
554 to = protect_from_queue (to, 1);
555 from = protect_from_queue (from, 0);
556
557 if (to_real != from_real)
558 abort ();
559
560 /* If FROM is a SUBREG that indicates that we have already done at least
561 the required extension, strip it. We don't handle such SUBREGs as
562 TO here. */
563
564 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
565 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
566 >= GET_MODE_SIZE (to_mode))
567 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
568 from = gen_lowpart (to_mode, from), from_mode = to_mode;
569
570 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
571 abort ();
572
573 if (to_mode == from_mode
574 || (from_mode == VOIDmode && CONSTANT_P (from)))
575 {
576 emit_move_insn (to, from);
577 return;
578 }
579
580 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
581 {
582 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
583 abort ();
584
585 if (VECTOR_MODE_P (to_mode))
586 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
587 else
588 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
589
590 emit_move_insn (to, from);
591 return;
592 }
593
594 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
595 {
596 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
597 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
598 return;
599 }
600
601 if (to_real != from_real)
602 abort ();
603
604 if (to_real)
605 {
606 rtx value, insns;
607
608 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 {
610 /* Try converting directly if the insn is supported. */
611 if ((code = can_extend_p (to_mode, from_mode, 0))
612 != CODE_FOR_nothing)
613 {
614 emit_unop_insn (code, to, from, UNKNOWN);
615 return;
616 }
617 }
618
619 #ifdef HAVE_trunchfqf2
620 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
623 return;
624 }
625 #endif
626 #ifdef HAVE_trunctqfqf2
627 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718 #endif
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725 #endif
726
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740 #endif
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747 #endif
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754 #endif
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761 #endif
762
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780
781 default:
782 break;
783 }
784 break;
785
786 case DFmode:
787 switch (to_mode)
788 {
789 case SFmode:
790 libcall = truncdfsf2_libfunc;
791 break;
792
793 case XFmode:
794 libcall = extenddfxf2_libfunc;
795 break;
796
797 case TFmode:
798 libcall = extenddftf2_libfunc;
799 break;
800
801 default:
802 break;
803 }
804 break;
805
806 case XFmode:
807 switch (to_mode)
808 {
809 case SFmode:
810 libcall = truncxfsf2_libfunc;
811 break;
812
813 case DFmode:
814 libcall = truncxfdf2_libfunc;
815 break;
816
817 default:
818 break;
819 }
820 break;
821
822 case TFmode:
823 switch (to_mode)
824 {
825 case SFmode:
826 libcall = trunctfsf2_libfunc;
827 break;
828
829 case DFmode:
830 libcall = trunctfdf2_libfunc;
831 break;
832
833 default:
834 break;
835 }
836 break;
837
838 default:
839 break;
840 }
841
842 if (libcall == (rtx) 0)
843 /* This conversion is not implemented yet. */
844 abort ();
845
846 start_sequence ();
847 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
848 1, from, from_mode);
849 insns = get_insns ();
850 end_sequence ();
851 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
852 from));
853 return;
854 }
855
856 /* Now both modes are integers. */
857
858 /* Handle expanding beyond a word. */
859 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
860 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
861 {
862 rtx insns;
863 rtx lowpart;
864 rtx fill_value;
865 rtx lowfrom;
866 int i;
867 enum machine_mode lowpart_mode;
868 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
869
870 /* Try converting directly if the insn is supported. */
871 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
872 != CODE_FOR_nothing)
873 {
874 /* If FROM is a SUBREG, put it into a register. Do this
875 so that we always generate the same set of insns for
876 better cse'ing; if an intermediate assignment occurred,
877 we won't be doing the operation directly on the SUBREG. */
878 if (optimize > 0 && GET_CODE (from) == SUBREG)
879 from = force_reg (from_mode, from);
880 emit_unop_insn (code, to, from, equiv_code);
881 return;
882 }
883 /* Next, try converting via full word. */
884 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
885 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
886 != CODE_FOR_nothing))
887 {
888 if (GET_CODE (to) == REG)
889 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
890 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
891 emit_unop_insn (code, to,
892 gen_lowpart (word_mode, to), equiv_code);
893 return;
894 }
895
896 /* No special multiword conversion insn; do it by hand. */
897 start_sequence ();
898
899 /* Since we will turn this into a no conflict block, we must ensure
900 that the source does not overlap the target. */
901
902 if (reg_overlap_mentioned_p (to, from))
903 from = force_reg (from_mode, from);
904
905 /* Get a copy of FROM widened to a word, if necessary. */
906 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
907 lowpart_mode = word_mode;
908 else
909 lowpart_mode = from_mode;
910
911 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
912
913 lowpart = gen_lowpart (lowpart_mode, to);
914 emit_move_insn (lowpart, lowfrom);
915
916 /* Compute the value to put in each remaining word. */
917 if (unsignedp)
918 fill_value = const0_rtx;
919 else
920 {
921 #ifdef HAVE_slt
922 if (HAVE_slt
923 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
924 && STORE_FLAG_VALUE == -1)
925 {
926 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
927 lowpart_mode, 0);
928 fill_value = gen_reg_rtx (word_mode);
929 emit_insn (gen_slt (fill_value));
930 }
931 else
932 #endif
933 {
934 fill_value
935 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
936 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
937 NULL_RTX, 0);
938 fill_value = convert_to_mode (word_mode, fill_value, 1);
939 }
940 }
941
942 /* Fill the remaining words. */
943 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
944 {
945 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
946 rtx subword = operand_subword (to, index, 1, to_mode);
947
948 if (subword == 0)
949 abort ();
950
951 if (fill_value != subword)
952 emit_move_insn (subword, fill_value);
953 }
954
955 insns = get_insns ();
956 end_sequence ();
957
958 emit_no_conflict_block (insns, to, from, NULL_RTX,
959 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
960 return;
961 }
962
963 /* Truncating multi-word to a word or less. */
964 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
965 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
966 {
967 if (!((GET_CODE (from) == MEM
968 && ! MEM_VOLATILE_P (from)
969 && direct_load[(int) to_mode]
970 && ! mode_dependent_address_p (XEXP (from, 0)))
971 || GET_CODE (from) == REG
972 || GET_CODE (from) == SUBREG))
973 from = force_reg (from_mode, from);
974 convert_move (to, gen_lowpart (word_mode, from), 0);
975 return;
976 }
977
978 /* Handle pointer conversion. */ /* SPEE 900220. */
979 if (to_mode == PQImode)
980 {
981 if (from_mode != QImode)
982 from = convert_to_mode (QImode, from, unsignedp);
983
984 #ifdef HAVE_truncqipqi2
985 if (HAVE_truncqipqi2)
986 {
987 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
988 return;
989 }
990 #endif /* HAVE_truncqipqi2 */
991 abort ();
992 }
993
994 if (from_mode == PQImode)
995 {
996 if (to_mode != QImode)
997 {
998 from = convert_to_mode (QImode, from, unsignedp);
999 from_mode = QImode;
1000 }
1001 else
1002 {
1003 #ifdef HAVE_extendpqiqi2
1004 if (HAVE_extendpqiqi2)
1005 {
1006 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1007 return;
1008 }
1009 #endif /* HAVE_extendpqiqi2 */
1010 abort ();
1011 }
1012 }
1013
1014 if (to_mode == PSImode)
1015 {
1016 if (from_mode != SImode)
1017 from = convert_to_mode (SImode, from, unsignedp);
1018
1019 #ifdef HAVE_truncsipsi2
1020 if (HAVE_truncsipsi2)
1021 {
1022 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1023 return;
1024 }
1025 #endif /* HAVE_truncsipsi2 */
1026 abort ();
1027 }
1028
1029 if (from_mode == PSImode)
1030 {
1031 if (to_mode != SImode)
1032 {
1033 from = convert_to_mode (SImode, from, unsignedp);
1034 from_mode = SImode;
1035 }
1036 else
1037 {
1038 #ifdef HAVE_extendpsisi2
1039 if (! unsignedp && HAVE_extendpsisi2)
1040 {
1041 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1042 return;
1043 }
1044 #endif /* HAVE_extendpsisi2 */
1045 #ifdef HAVE_zero_extendpsisi2
1046 if (unsignedp && HAVE_zero_extendpsisi2)
1047 {
1048 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1049 return;
1050 }
1051 #endif /* HAVE_zero_extendpsisi2 */
1052 abort ();
1053 }
1054 }
1055
1056 if (to_mode == PDImode)
1057 {
1058 if (from_mode != DImode)
1059 from = convert_to_mode (DImode, from, unsignedp);
1060
1061 #ifdef HAVE_truncdipdi2
1062 if (HAVE_truncdipdi2)
1063 {
1064 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1065 return;
1066 }
1067 #endif /* HAVE_truncdipdi2 */
1068 abort ();
1069 }
1070
1071 if (from_mode == PDImode)
1072 {
1073 if (to_mode != DImode)
1074 {
1075 from = convert_to_mode (DImode, from, unsignedp);
1076 from_mode = DImode;
1077 }
1078 else
1079 {
1080 #ifdef HAVE_extendpdidi2
1081 if (HAVE_extendpdidi2)
1082 {
1083 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1084 return;
1085 }
1086 #endif /* HAVE_extendpdidi2 */
1087 abort ();
1088 }
1089 }
1090
1091 /* Now follow all the conversions between integers
1092 no more than a word long. */
1093
1094 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1095 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1096 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097 GET_MODE_BITSIZE (from_mode)))
1098 {
1099 if (!((GET_CODE (from) == MEM
1100 && ! MEM_VOLATILE_P (from)
1101 && direct_load[(int) to_mode]
1102 && ! mode_dependent_address_p (XEXP (from, 0)))
1103 || GET_CODE (from) == REG
1104 || GET_CODE (from) == SUBREG))
1105 from = force_reg (from_mode, from);
1106 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1107 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1108 from = copy_to_reg (from);
1109 emit_move_insn (to, gen_lowpart (to_mode, from));
1110 return;
1111 }
1112
1113 /* Handle extension. */
1114 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1115 {
1116 /* Convert directly if that works. */
1117 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1118 != CODE_FOR_nothing)
1119 {
1120 if (flag_force_mem)
1121 from = force_not_mem (from);
1122
1123 emit_unop_insn (code, to, from, equiv_code);
1124 return;
1125 }
1126 else
1127 {
1128 enum machine_mode intermediate;
1129 rtx tmp;
1130 tree shift_amount;
1131
1132 /* Search for a mode to convert via. */
1133 for (intermediate = from_mode; intermediate != VOIDmode;
1134 intermediate = GET_MODE_WIDER_MODE (intermediate))
1135 if (((can_extend_p (to_mode, intermediate, unsignedp)
1136 != CODE_FOR_nothing)
1137 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1138 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1139 GET_MODE_BITSIZE (intermediate))))
1140 && (can_extend_p (intermediate, from_mode, unsignedp)
1141 != CODE_FOR_nothing))
1142 {
1143 convert_move (to, convert_to_mode (intermediate, from,
1144 unsignedp), unsignedp);
1145 return;
1146 }
1147
1148 /* No suitable intermediate mode.
1149 Generate what we need with shifts. */
1150 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1151 - GET_MODE_BITSIZE (from_mode), 0);
1152 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1153 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1154 to, unsignedp);
1155 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1156 to, unsignedp);
1157 if (tmp != to)
1158 emit_move_insn (to, tmp);
1159 return;
1160 }
1161 }
1162
1163 /* Support special truncate insns for certain modes. */
1164
1165 if (from_mode == DImode && to_mode == SImode)
1166 {
1167 #ifdef HAVE_truncdisi2
1168 if (HAVE_truncdisi2)
1169 {
1170 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == DImode && to_mode == HImode)
1179 {
1180 #ifdef HAVE_truncdihi2
1181 if (HAVE_truncdihi2)
1182 {
1183 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == DImode && to_mode == QImode)
1192 {
1193 #ifdef HAVE_truncdiqi2
1194 if (HAVE_truncdiqi2)
1195 {
1196 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == SImode && to_mode == HImode)
1205 {
1206 #ifdef HAVE_truncsihi2
1207 if (HAVE_truncsihi2)
1208 {
1209 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1210 return;
1211 }
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == SImode && to_mode == QImode)
1218 {
1219 #ifdef HAVE_truncsiqi2
1220 if (HAVE_truncsiqi2)
1221 {
1222 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1223 return;
1224 }
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 if (from_mode == HImode && to_mode == QImode)
1231 {
1232 #ifdef HAVE_trunchiqi2
1233 if (HAVE_trunchiqi2)
1234 {
1235 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1236 return;
1237 }
1238 #endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1241 }
1242
1243 if (from_mode == TImode && to_mode == DImode)
1244 {
1245 #ifdef HAVE_trunctidi2
1246 if (HAVE_trunctidi2)
1247 {
1248 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1249 return;
1250 }
1251 #endif
1252 convert_move (to, force_reg (from_mode, from), unsignedp);
1253 return;
1254 }
1255
1256 if (from_mode == TImode && to_mode == SImode)
1257 {
1258 #ifdef HAVE_trunctisi2
1259 if (HAVE_trunctisi2)
1260 {
1261 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1262 return;
1263 }
1264 #endif
1265 convert_move (to, force_reg (from_mode, from), unsignedp);
1266 return;
1267 }
1268
1269 if (from_mode == TImode && to_mode == HImode)
1270 {
1271 #ifdef HAVE_trunctihi2
1272 if (HAVE_trunctihi2)
1273 {
1274 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1275 return;
1276 }
1277 #endif
1278 convert_move (to, force_reg (from_mode, from), unsignedp);
1279 return;
1280 }
1281
1282 if (from_mode == TImode && to_mode == QImode)
1283 {
1284 #ifdef HAVE_trunctiqi2
1285 if (HAVE_trunctiqi2)
1286 {
1287 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1288 return;
1289 }
1290 #endif
1291 convert_move (to, force_reg (from_mode, from), unsignedp);
1292 return;
1293 }
1294
1295 /* Handle truncation of volatile memrefs, and so on;
1296 the things that couldn't be truncated directly,
1297 and for which there was no special instruction. */
1298 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1299 {
1300 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1301 emit_move_insn (to, temp);
1302 return;
1303 }
1304
1305 /* Mode combination is not recognized. */
1306 abort ();
1307 }
1308
1309 /* Return an rtx for a value that would result
1310 from converting X to mode MODE.
1311 Both X and MODE may be floating, or both integer.
1312 UNSIGNEDP is nonzero if X is an unsigned value.
1313 This can be done by referring to a part of X in place
1314 or by copying to a new temporary with conversion.
1315
1316 This function *must not* call protect_from_queue
1317 except when putting X into an insn (in which case convert_move does it). */
1318
1319 rtx
1320 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1321 {
1322 return convert_modes (mode, VOIDmode, x, unsignedp);
1323 }
1324
1325 /* Return an rtx for a value that would result
1326 from converting X from mode OLDMODE to mode MODE.
1327 Both modes may be floating, or both integer.
1328 UNSIGNEDP is nonzero if X is an unsigned value.
1329
1330 This can be done by referring to a part of X in place
1331 or by copying to a new temporary with conversion.
1332
1333 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1334
1335 This function *must not* call protect_from_queue
1336 except when putting X into an insn (in which case convert_move does it). */
1337
1338 rtx
1339 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1340 {
1341 rtx temp;
1342
1343 /* If FROM is a SUBREG that indicates that we have already done at least
1344 the required extension, strip it. */
1345
1346 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1347 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1348 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1349 x = gen_lowpart (mode, x);
1350
1351 if (GET_MODE (x) != VOIDmode)
1352 oldmode = GET_MODE (x);
1353
1354 if (mode == oldmode)
1355 return x;
1356
1357 /* There is one case that we must handle specially: If we are converting
1358 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1359 we are to interpret the constant as unsigned, gen_lowpart will do
1360 the wrong if the constant appears negative. What we want to do is
1361 make the high-order word of the constant zero, not all ones. */
1362
1363 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1364 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1365 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1366 {
1367 HOST_WIDE_INT val = INTVAL (x);
1368
1369 if (oldmode != VOIDmode
1370 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1371 {
1372 int width = GET_MODE_BITSIZE (oldmode);
1373
1374 /* We need to zero extend VAL. */
1375 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1376 }
1377
1378 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1379 }
1380
1381 /* We can do this with a gen_lowpart if both desired and current modes
1382 are integer, and this is either a constant integer, a register, or a
1383 non-volatile MEM. Except for the constant case where MODE is no
1384 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1385
1386 if ((GET_CODE (x) == CONST_INT
1387 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1388 || (GET_MODE_CLASS (mode) == MODE_INT
1389 && GET_MODE_CLASS (oldmode) == MODE_INT
1390 && (GET_CODE (x) == CONST_DOUBLE
1391 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1392 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1393 && direct_load[(int) mode])
1394 || (GET_CODE (x) == REG
1395 && (! HARD_REGISTER_P (x)
1396 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1397 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1398 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1399 {
1400 /* ?? If we don't know OLDMODE, we have to assume here that
1401 X does not need sign- or zero-extension. This may not be
1402 the case, but it's the best we can do. */
1403 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1404 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1405 {
1406 HOST_WIDE_INT val = INTVAL (x);
1407 int width = GET_MODE_BITSIZE (oldmode);
1408
1409 /* We must sign or zero-extend in this case. Start by
1410 zero-extending, then sign extend if we need to. */
1411 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1412 if (! unsignedp
1413 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1414 val |= (HOST_WIDE_INT) (-1) << width;
1415
1416 return gen_int_mode (val, mode);
1417 }
1418
1419 return gen_lowpart (mode, x);
1420 }
1421
1422 /* Converting from integer constant into mode is always equivalent to an
1423 subreg operation. */
1424 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1425 {
1426 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1427 abort ();
1428 return simplify_gen_subreg (mode, x, oldmode, 0);
1429 }
1430
1431 temp = gen_reg_rtx (mode);
1432 convert_move (temp, x, unsignedp);
1433 return temp;
1434 }
1435 \f
1436 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1437 store efficiently. Due to internal GCC limitations, this is
1438 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1439 for an immediate constant. */
1440
1441 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1442
1443 /* Determine whether the LEN bytes can be moved by using several move
1444 instructions. Return nonzero if a call to move_by_pieces should
1445 succeed. */
1446
1447 int
1448 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1449 unsigned int align ATTRIBUTE_UNUSED)
1450 {
1451 return MOVE_BY_PIECES_P (len, align);
1452 }
1453
1454 /* Generate several move instructions to copy LEN bytes from block FROM to
1455 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1456 and TO through protect_from_queue before calling.
1457
1458 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1459 used to push FROM to the stack.
1460
1461 ALIGN is maximum stack alignment we can assume.
1462
1463 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1464 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1465 stpcpy. */
1466
1467 rtx
1468 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1469 unsigned int align, int endp)
1470 {
1471 struct move_by_pieces data;
1472 rtx to_addr, from_addr = XEXP (from, 0);
1473 unsigned int max_size = MOVE_MAX_PIECES + 1;
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1476
1477 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1478
1479 data.offset = 0;
1480 data.from_addr = from_addr;
1481 if (to)
1482 {
1483 to_addr = XEXP (to, 0);
1484 data.to = to;
1485 data.autinc_to
1486 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1487 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1488 data.reverse
1489 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1490 }
1491 else
1492 {
1493 to_addr = NULL_RTX;
1494 data.to = NULL_RTX;
1495 data.autinc_to = 1;
1496 #ifdef STACK_GROWS_DOWNWARD
1497 data.reverse = 1;
1498 #else
1499 data.reverse = 0;
1500 #endif
1501 }
1502 data.to_addr = to_addr;
1503 data.from = from;
1504 data.autinc_from
1505 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1506 || GET_CODE (from_addr) == POST_INC
1507 || GET_CODE (from_addr) == POST_DEC);
1508
1509 data.explicit_inc_from = 0;
1510 data.explicit_inc_to = 0;
1511 if (data.reverse) data.offset = len;
1512 data.len = len;
1513
1514 /* If copying requires more than two move insns,
1515 copy addresses to registers (to make displacements shorter)
1516 and use post-increment if available. */
1517 if (!(data.autinc_from && data.autinc_to)
1518 && move_by_pieces_ninsns (len, align) > 2)
1519 {
1520 /* Find the mode of the largest move... */
1521 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1522 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1523 if (GET_MODE_SIZE (tmode) < max_size)
1524 mode = tmode;
1525
1526 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1527 {
1528 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1529 data.autinc_from = 1;
1530 data.explicit_inc_from = -1;
1531 }
1532 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1533 {
1534 data.from_addr = copy_addr_to_reg (from_addr);
1535 data.autinc_from = 1;
1536 data.explicit_inc_from = 1;
1537 }
1538 if (!data.autinc_from && CONSTANT_P (from_addr))
1539 data.from_addr = copy_addr_to_reg (from_addr);
1540 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1541 {
1542 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1543 data.autinc_to = 1;
1544 data.explicit_inc_to = -1;
1545 }
1546 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1547 {
1548 data.to_addr = copy_addr_to_reg (to_addr);
1549 data.autinc_to = 1;
1550 data.explicit_inc_to = 1;
1551 }
1552 if (!data.autinc_to && CONSTANT_P (to_addr))
1553 data.to_addr = copy_addr_to_reg (to_addr);
1554 }
1555
1556 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1557 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1558 align = MOVE_MAX * BITS_PER_UNIT;
1559
1560 /* First move what we can in the largest integer mode, then go to
1561 successively smaller modes. */
1562
1563 while (max_size > 1)
1564 {
1565 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1566 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1567 if (GET_MODE_SIZE (tmode) < max_size)
1568 mode = tmode;
1569
1570 if (mode == VOIDmode)
1571 break;
1572
1573 icode = mov_optab->handlers[(int) mode].insn_code;
1574 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1575 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1576
1577 max_size = GET_MODE_SIZE (mode);
1578 }
1579
1580 /* The code above should have handled everything. */
1581 if (data.len > 0)
1582 abort ();
1583
1584 if (endp)
1585 {
1586 rtx to1;
1587
1588 if (data.reverse)
1589 abort ();
1590 if (data.autinc_to)
1591 {
1592 if (endp == 2)
1593 {
1594 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1595 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1596 else
1597 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1598 -1));
1599 }
1600 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1601 data.offset);
1602 }
1603 else
1604 {
1605 if (endp == 2)
1606 --data.offset;
1607 to1 = adjust_address (data.to, QImode, data.offset);
1608 }
1609 return to1;
1610 }
1611 else
1612 return data.to;
1613 }
1614
1615 /* Return number of insns required to move L bytes by pieces.
1616 ALIGN (in bits) is maximum alignment we can assume. */
1617
1618 static unsigned HOST_WIDE_INT
1619 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1620 {
1621 unsigned HOST_WIDE_INT n_insns = 0;
1622 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1623
1624 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1625 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1626 align = MOVE_MAX * BITS_PER_UNIT;
1627
1628 while (max_size > 1)
1629 {
1630 enum machine_mode mode = VOIDmode, tmode;
1631 enum insn_code icode;
1632
1633 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1634 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1635 if (GET_MODE_SIZE (tmode) < max_size)
1636 mode = tmode;
1637
1638 if (mode == VOIDmode)
1639 break;
1640
1641 icode = mov_optab->handlers[(int) mode].insn_code;
1642 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1643 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1644
1645 max_size = GET_MODE_SIZE (mode);
1646 }
1647
1648 if (l)
1649 abort ();
1650 return n_insns;
1651 }
1652
1653 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1654 with move instructions for mode MODE. GENFUN is the gen_... function
1655 to make a move insn for that mode. DATA has all the other info. */
1656
1657 static void
1658 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1659 struct move_by_pieces *data)
1660 {
1661 unsigned int size = GET_MODE_SIZE (mode);
1662 rtx to1 = NULL_RTX, from1;
1663
1664 while (data->len >= size)
1665 {
1666 if (data->reverse)
1667 data->offset -= size;
1668
1669 if (data->to)
1670 {
1671 if (data->autinc_to)
1672 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1673 data->offset);
1674 else
1675 to1 = adjust_address (data->to, mode, data->offset);
1676 }
1677
1678 if (data->autinc_from)
1679 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1680 data->offset);
1681 else
1682 from1 = adjust_address (data->from, mode, data->offset);
1683
1684 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1685 emit_insn (gen_add2_insn (data->to_addr,
1686 GEN_INT (-(HOST_WIDE_INT)size)));
1687 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1688 emit_insn (gen_add2_insn (data->from_addr,
1689 GEN_INT (-(HOST_WIDE_INT)size)));
1690
1691 if (data->to)
1692 emit_insn ((*genfun) (to1, from1));
1693 else
1694 {
1695 #ifdef PUSH_ROUNDING
1696 emit_single_push_insn (mode, from1, NULL);
1697 #else
1698 abort ();
1699 #endif
1700 }
1701
1702 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1703 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1704 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1705 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1706
1707 if (! data->reverse)
1708 data->offset += size;
1709
1710 data->len -= size;
1711 }
1712 }
1713 \f
1714 /* Emit code to move a block Y to a block X. This may be done with
1715 string-move instructions, with multiple scalar move instructions,
1716 or with a library call.
1717
1718 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1719 SIZE is an rtx that says how long they are.
1720 ALIGN is the maximum alignment we can assume they have.
1721 METHOD describes what kind of copy this is, and what mechanisms may be used.
1722
1723 Return the address of the new block, if memcpy is called and returns it,
1724 0 otherwise. */
1725
1726 rtx
1727 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1728 {
1729 bool may_use_call;
1730 rtx retval = 0;
1731 unsigned int align;
1732
1733 switch (method)
1734 {
1735 case BLOCK_OP_NORMAL:
1736 may_use_call = true;
1737 break;
1738
1739 case BLOCK_OP_CALL_PARM:
1740 may_use_call = block_move_libcall_safe_for_call_parm ();
1741
1742 /* Make inhibit_defer_pop nonzero around the library call
1743 to force it to pop the arguments right away. */
1744 NO_DEFER_POP;
1745 break;
1746
1747 case BLOCK_OP_NO_LIBCALL:
1748 may_use_call = false;
1749 break;
1750
1751 default:
1752 abort ();
1753 }
1754
1755 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1756
1757 if (GET_MODE (x) != BLKmode)
1758 abort ();
1759 if (GET_MODE (y) != BLKmode)
1760 abort ();
1761
1762 x = protect_from_queue (x, 1);
1763 y = protect_from_queue (y, 0);
1764 size = protect_from_queue (size, 0);
1765
1766 if (GET_CODE (x) != MEM)
1767 abort ();
1768 if (GET_CODE (y) != MEM)
1769 abort ();
1770 if (size == 0)
1771 abort ();
1772
1773 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1774 can be incorrect is coming from __builtin_memcpy. */
1775 if (GET_CODE (size) == CONST_INT)
1776 {
1777 if (INTVAL (size) == 0)
1778 return 0;
1779
1780 x = shallow_copy_rtx (x);
1781 y = shallow_copy_rtx (y);
1782 set_mem_size (x, size);
1783 set_mem_size (y, size);
1784 }
1785
1786 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1787 move_by_pieces (x, y, INTVAL (size), align, 0);
1788 else if (emit_block_move_via_movstr (x, y, size, align))
1789 ;
1790 else if (may_use_call)
1791 retval = emit_block_move_via_libcall (x, y, size);
1792 else
1793 emit_block_move_via_loop (x, y, size, align);
1794
1795 if (method == BLOCK_OP_CALL_PARM)
1796 OK_DEFER_POP;
1797
1798 return retval;
1799 }
1800
1801 /* A subroutine of emit_block_move. Returns true if calling the
1802 block move libcall will not clobber any parameters which may have
1803 already been placed on the stack. */
1804
1805 static bool
1806 block_move_libcall_safe_for_call_parm (void)
1807 {
1808 if (PUSH_ARGS)
1809 return true;
1810 else
1811 {
1812 /* Check to see whether memcpy takes all register arguments. */
1813 static enum {
1814 takes_regs_uninit, takes_regs_no, takes_regs_yes
1815 } takes_regs = takes_regs_uninit;
1816
1817 switch (takes_regs)
1818 {
1819 case takes_regs_uninit:
1820 {
1821 CUMULATIVE_ARGS args_so_far;
1822 tree fn, arg;
1823
1824 fn = emit_block_move_libcall_fn (false);
1825 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1826
1827 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1828 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1829 {
1830 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1831 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1832 if (!tmp || !REG_P (tmp))
1833 goto fail_takes_regs;
1834 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1835 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1836 NULL_TREE, 1))
1837 goto fail_takes_regs;
1838 #endif
1839 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1840 }
1841 }
1842 takes_regs = takes_regs_yes;
1843 /* FALLTHRU */
1844
1845 case takes_regs_yes:
1846 return true;
1847
1848 fail_takes_regs:
1849 takes_regs = takes_regs_no;
1850 /* FALLTHRU */
1851 case takes_regs_no:
1852 return false;
1853
1854 default:
1855 abort ();
1856 }
1857 }
1858 }
1859
1860 /* A subroutine of emit_block_move. Expand a movstr pattern;
1861 return true if successful. */
1862
1863 static bool
1864 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1865 {
1866 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1867 enum machine_mode mode;
1868
1869 /* Since this is a move insn, we don't care about volatility. */
1870 volatile_ok = 1;
1871
1872 /* Try the most limited insn first, because there's no point
1873 including more than one in the machine description unless
1874 the more limited one has some advantage. */
1875
1876 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1877 mode = GET_MODE_WIDER_MODE (mode))
1878 {
1879 enum insn_code code = movstr_optab[(int) mode];
1880 insn_operand_predicate_fn pred;
1881
1882 if (code != CODE_FOR_nothing
1883 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1884 here because if SIZE is less than the mode mask, as it is
1885 returned by the macro, it will definitely be less than the
1886 actual mode mask. */
1887 && ((GET_CODE (size) == CONST_INT
1888 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1889 <= (GET_MODE_MASK (mode) >> 1)))
1890 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1891 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1892 || (*pred) (x, BLKmode))
1893 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1894 || (*pred) (y, BLKmode))
1895 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1896 || (*pred) (opalign, VOIDmode)))
1897 {
1898 rtx op2;
1899 rtx last = get_last_insn ();
1900 rtx pat;
1901
1902 op2 = convert_to_mode (mode, size, 1);
1903 pred = insn_data[(int) code].operand[2].predicate;
1904 if (pred != 0 && ! (*pred) (op2, mode))
1905 op2 = copy_to_mode_reg (mode, op2);
1906
1907 /* ??? When called via emit_block_move_for_call, it'd be
1908 nice if there were some way to inform the backend, so
1909 that it doesn't fail the expansion because it thinks
1910 emitting the libcall would be more efficient. */
1911
1912 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1913 if (pat)
1914 {
1915 emit_insn (pat);
1916 volatile_ok = 0;
1917 return true;
1918 }
1919 else
1920 delete_insns_since (last);
1921 }
1922 }
1923
1924 volatile_ok = 0;
1925 return false;
1926 }
1927
1928 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1929 Return the return value from memcpy, 0 otherwise. */
1930
1931 static rtx
1932 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1933 {
1934 rtx dst_addr, src_addr;
1935 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1936 enum machine_mode size_mode;
1937 rtx retval;
1938
1939 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1940
1941 It is unsafe to save the value generated by protect_from_queue and reuse
1942 it later. Consider what happens if emit_queue is called before the
1943 return value from protect_from_queue is used.
1944
1945 Expansion of the CALL_EXPR below will call emit_queue before we are
1946 finished emitting RTL for argument setup. So if we are not careful we
1947 could get the wrong value for an argument.
1948
1949 To avoid this problem we go ahead and emit code to copy the addresses of
1950 DST and SRC and SIZE into new pseudos. We can then place those new
1951 pseudos into an RTL_EXPR and use them later, even after a call to
1952 emit_queue.
1953
1954 Note this is not strictly needed for library calls since they do not call
1955 emit_queue before loading their arguments. However, we may need to have
1956 library calls call emit_queue in the future since failing to do so could
1957 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1958 arguments in registers. */
1959
1960 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1961 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1962
1963 #ifdef POINTERS_EXTEND_UNSIGNED
1964 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1965 src_addr = convert_memory_address (ptr_mode, src_addr);
1966 #endif
1967
1968 dst_tree = make_tree (ptr_type_node, dst_addr);
1969 src_tree = make_tree (ptr_type_node, src_addr);
1970
1971 if (TARGET_MEM_FUNCTIONS)
1972 size_mode = TYPE_MODE (sizetype);
1973 else
1974 size_mode = TYPE_MODE (unsigned_type_node);
1975
1976 size = convert_to_mode (size_mode, size, 1);
1977 size = copy_to_mode_reg (size_mode, size);
1978
1979 /* It is incorrect to use the libcall calling conventions to call
1980 memcpy in this context. This could be a user call to memcpy and
1981 the user may wish to examine the return value from memcpy. For
1982 targets where libcalls and normal calls have different conventions
1983 for returning pointers, we could end up generating incorrect code.
1984
1985 For convenience, we generate the call to bcopy this way as well. */
1986
1987 if (TARGET_MEM_FUNCTIONS)
1988 size_tree = make_tree (sizetype, size);
1989 else
1990 size_tree = make_tree (unsigned_type_node, size);
1991
1992 fn = emit_block_move_libcall_fn (true);
1993 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1994 if (TARGET_MEM_FUNCTIONS)
1995 {
1996 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1997 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1998 }
1999 else
2000 {
2001 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2002 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2003 }
2004
2005 /* Now we have to build up the CALL_EXPR itself. */
2006 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2007 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2008 call_expr, arg_list, NULL_TREE);
2009
2010 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2011
2012 /* If we are initializing a readonly value, show the above call clobbered
2013 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2014 the delay slot scheduler might overlook conflicts and take nasty
2015 decisions. */
2016 if (RTX_UNCHANGING_P (dst))
2017 add_function_usage_to
2018 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2019 gen_rtx_CLOBBER (VOIDmode, dst),
2020 NULL_RTX));
2021
2022 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2023 }
2024
2025 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2026 for the function we use for block copies. The first time FOR_CALL
2027 is true, we call assemble_external. */
2028
2029 static GTY(()) tree block_move_fn;
2030
2031 void
2032 init_block_move_fn (const char *asmspec)
2033 {
2034 if (!block_move_fn)
2035 {
2036 tree args, fn;
2037
2038 if (TARGET_MEM_FUNCTIONS)
2039 {
2040 fn = get_identifier ("memcpy");
2041 args = build_function_type_list (ptr_type_node, ptr_type_node,
2042 const_ptr_type_node, sizetype,
2043 NULL_TREE);
2044 }
2045 else
2046 {
2047 fn = get_identifier ("bcopy");
2048 args = build_function_type_list (void_type_node, const_ptr_type_node,
2049 ptr_type_node, unsigned_type_node,
2050 NULL_TREE);
2051 }
2052
2053 fn = build_decl (FUNCTION_DECL, fn, args);
2054 DECL_EXTERNAL (fn) = 1;
2055 TREE_PUBLIC (fn) = 1;
2056 DECL_ARTIFICIAL (fn) = 1;
2057 TREE_NOTHROW (fn) = 1;
2058
2059 block_move_fn = fn;
2060 }
2061
2062 if (asmspec)
2063 {
2064 SET_DECL_RTL (block_move_fn, NULL_RTX);
2065 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2066 }
2067 }
2068
2069 static tree
2070 emit_block_move_libcall_fn (int for_call)
2071 {
2072 static bool emitted_extern;
2073
2074 if (!block_move_fn)
2075 init_block_move_fn (NULL);
2076
2077 if (for_call && !emitted_extern)
2078 {
2079 emitted_extern = true;
2080 make_decl_rtl (block_move_fn, NULL);
2081 assemble_external (block_move_fn);
2082 }
2083
2084 return block_move_fn;
2085 }
2086
2087 /* A subroutine of emit_block_move. Copy the data via an explicit
2088 loop. This is used only when libcalls are forbidden. */
2089 /* ??? It'd be nice to copy in hunks larger than QImode. */
2090
2091 static void
2092 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2093 unsigned int align ATTRIBUTE_UNUSED)
2094 {
2095 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2096 enum machine_mode iter_mode;
2097
2098 iter_mode = GET_MODE (size);
2099 if (iter_mode == VOIDmode)
2100 iter_mode = word_mode;
2101
2102 top_label = gen_label_rtx ();
2103 cmp_label = gen_label_rtx ();
2104 iter = gen_reg_rtx (iter_mode);
2105
2106 emit_move_insn (iter, const0_rtx);
2107
2108 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2109 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2110 do_pending_stack_adjust ();
2111
2112 emit_note (NOTE_INSN_LOOP_BEG);
2113
2114 emit_jump (cmp_label);
2115 emit_label (top_label);
2116
2117 tmp = convert_modes (Pmode, iter_mode, iter, true);
2118 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2119 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2120 x = change_address (x, QImode, x_addr);
2121 y = change_address (y, QImode, y_addr);
2122
2123 emit_move_insn (x, y);
2124
2125 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2126 true, OPTAB_LIB_WIDEN);
2127 if (tmp != iter)
2128 emit_move_insn (iter, tmp);
2129
2130 emit_note (NOTE_INSN_LOOP_CONT);
2131 emit_label (cmp_label);
2132
2133 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2134 true, top_label);
2135
2136 emit_note (NOTE_INSN_LOOP_END);
2137 }
2138 \f
2139 /* Copy all or part of a value X into registers starting at REGNO.
2140 The number of registers to be filled is NREGS. */
2141
2142 void
2143 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2144 {
2145 int i;
2146 #ifdef HAVE_load_multiple
2147 rtx pat;
2148 rtx last;
2149 #endif
2150
2151 if (nregs == 0)
2152 return;
2153
2154 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2155 x = validize_mem (force_const_mem (mode, x));
2156
2157 /* See if the machine can do this with a load multiple insn. */
2158 #ifdef HAVE_load_multiple
2159 if (HAVE_load_multiple)
2160 {
2161 last = get_last_insn ();
2162 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2163 GEN_INT (nregs));
2164 if (pat)
2165 {
2166 emit_insn (pat);
2167 return;
2168 }
2169 else
2170 delete_insns_since (last);
2171 }
2172 #endif
2173
2174 for (i = 0; i < nregs; i++)
2175 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2176 operand_subword_force (x, i, mode));
2177 }
2178
2179 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2180 The number of registers to be filled is NREGS. */
2181
2182 void
2183 move_block_from_reg (int regno, rtx x, int nregs)
2184 {
2185 int i;
2186
2187 if (nregs == 0)
2188 return;
2189
2190 /* See if the machine can do this with a store multiple insn. */
2191 #ifdef HAVE_store_multiple
2192 if (HAVE_store_multiple)
2193 {
2194 rtx last = get_last_insn ();
2195 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2196 GEN_INT (nregs));
2197 if (pat)
2198 {
2199 emit_insn (pat);
2200 return;
2201 }
2202 else
2203 delete_insns_since (last);
2204 }
2205 #endif
2206
2207 for (i = 0; i < nregs; i++)
2208 {
2209 rtx tem = operand_subword (x, i, 1, BLKmode);
2210
2211 if (tem == 0)
2212 abort ();
2213
2214 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2215 }
2216 }
2217
2218 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2219 ORIG, where ORIG is a non-consecutive group of registers represented by
2220 a PARALLEL. The clone is identical to the original except in that the
2221 original set of registers is replaced by a new set of pseudo registers.
2222 The new set has the same modes as the original set. */
2223
2224 rtx
2225 gen_group_rtx (rtx orig)
2226 {
2227 int i, length;
2228 rtx *tmps;
2229
2230 if (GET_CODE (orig) != PARALLEL)
2231 abort ();
2232
2233 length = XVECLEN (orig, 0);
2234 tmps = alloca (sizeof (rtx) * length);
2235
2236 /* Skip a NULL entry in first slot. */
2237 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2238
2239 if (i)
2240 tmps[0] = 0;
2241
2242 for (; i < length; i++)
2243 {
2244 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2245 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2246
2247 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2248 }
2249
2250 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2251 }
2252
2253 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2254 where DST is non-consecutive registers represented by a PARALLEL.
2255 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2256 if not known. */
2257
2258 void
2259 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
2260 {
2261 rtx *tmps, src;
2262 int start, i;
2263
2264 if (GET_CODE (dst) != PARALLEL)
2265 abort ();
2266
2267 /* Check for a NULL entry, used to indicate that the parameter goes
2268 both on the stack and in registers. */
2269 if (XEXP (XVECEXP (dst, 0, 0), 0))
2270 start = 0;
2271 else
2272 start = 1;
2273
2274 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
2275
2276 /* Process the pieces. */
2277 for (i = start; i < XVECLEN (dst, 0); i++)
2278 {
2279 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2280 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2281 unsigned int bytelen = GET_MODE_SIZE (mode);
2282 int shift = 0;
2283
2284 /* Handle trailing fragments that run over the size of the struct. */
2285 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2286 {
2287 /* Arrange to shift the fragment to where it belongs.
2288 extract_bit_field loads to the lsb of the reg. */
2289 if (
2290 #ifdef BLOCK_REG_PADDING
2291 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2292 == (BYTES_BIG_ENDIAN ? upward : downward)
2293 #else
2294 BYTES_BIG_ENDIAN
2295 #endif
2296 )
2297 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2298 bytelen = ssize - bytepos;
2299 if (bytelen <= 0)
2300 abort ();
2301 }
2302
2303 /* If we won't be loading directly from memory, protect the real source
2304 from strange tricks we might play; but make sure that the source can
2305 be loaded directly into the destination. */
2306 src = orig_src;
2307 if (GET_CODE (orig_src) != MEM
2308 && (!CONSTANT_P (orig_src)
2309 || (GET_MODE (orig_src) != mode
2310 && GET_MODE (orig_src) != VOIDmode)))
2311 {
2312 if (GET_MODE (orig_src) == VOIDmode)
2313 src = gen_reg_rtx (mode);
2314 else
2315 src = gen_reg_rtx (GET_MODE (orig_src));
2316
2317 emit_move_insn (src, orig_src);
2318 }
2319
2320 /* Optimize the access just a bit. */
2321 if (GET_CODE (src) == MEM
2322 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2323 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2324 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2325 && bytelen == GET_MODE_SIZE (mode))
2326 {
2327 tmps[i] = gen_reg_rtx (mode);
2328 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2329 }
2330 else if (GET_CODE (src) == CONCAT)
2331 {
2332 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2333 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2334
2335 if ((bytepos == 0 && bytelen == slen0)
2336 || (bytepos != 0 && bytepos + bytelen <= slen))
2337 {
2338 /* The following assumes that the concatenated objects all
2339 have the same size. In this case, a simple calculation
2340 can be used to determine the object and the bit field
2341 to be extracted. */
2342 tmps[i] = XEXP (src, bytepos / slen0);
2343 if (! CONSTANT_P (tmps[i])
2344 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2345 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2346 (bytepos % slen0) * BITS_PER_UNIT,
2347 1, NULL_RTX, mode, mode, ssize);
2348 }
2349 else if (bytepos == 0)
2350 {
2351 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2352 emit_move_insn (mem, src);
2353 tmps[i] = adjust_address (mem, mode, 0);
2354 }
2355 else
2356 abort ();
2357 }
2358 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2359 SIMD register, which is currently broken. While we get GCC
2360 to emit proper RTL for these cases, let's dump to memory. */
2361 else if (VECTOR_MODE_P (GET_MODE (dst))
2362 && GET_CODE (src) == REG)
2363 {
2364 int slen = GET_MODE_SIZE (GET_MODE (src));
2365 rtx mem;
2366
2367 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2368 emit_move_insn (mem, src);
2369 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2370 }
2371 else if (CONSTANT_P (src)
2372 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2373 tmps[i] = src;
2374 else
2375 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2376 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2377 mode, mode, ssize);
2378
2379 if (shift)
2380 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2381 tmps[i], 0, OPTAB_WIDEN);
2382 }
2383
2384 emit_queue ();
2385
2386 /* Copy the extracted pieces into the proper (probable) hard regs. */
2387 for (i = start; i < XVECLEN (dst, 0); i++)
2388 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2389 }
2390
2391 /* Emit code to move a block SRC to block DST, where SRC and DST are
2392 non-consecutive groups of registers, each represented by a PARALLEL. */
2393
2394 void
2395 emit_group_move (rtx dst, rtx src)
2396 {
2397 int i;
2398
2399 if (GET_CODE (src) != PARALLEL
2400 || GET_CODE (dst) != PARALLEL
2401 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2402 abort ();
2403
2404 /* Skip first entry if NULL. */
2405 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2406 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2407 XEXP (XVECEXP (src, 0, i), 0));
2408 }
2409
2410 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2411 where SRC is non-consecutive registers represented by a PARALLEL.
2412 SSIZE represents the total size of block ORIG_DST, or -1 if not
2413 known. */
2414
2415 void
2416 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2417 {
2418 rtx *tmps, dst;
2419 int start, i;
2420
2421 if (GET_CODE (src) != PARALLEL)
2422 abort ();
2423
2424 /* Check for a NULL entry, used to indicate that the parameter goes
2425 both on the stack and in registers. */
2426 if (XEXP (XVECEXP (src, 0, 0), 0))
2427 start = 0;
2428 else
2429 start = 1;
2430
2431 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2432
2433 /* Copy the (probable) hard regs into pseudos. */
2434 for (i = start; i < XVECLEN (src, 0); i++)
2435 {
2436 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2437 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2438 emit_move_insn (tmps[i], reg);
2439 }
2440 emit_queue ();
2441
2442 /* If we won't be storing directly into memory, protect the real destination
2443 from strange tricks we might play. */
2444 dst = orig_dst;
2445 if (GET_CODE (dst) == PARALLEL)
2446 {
2447 rtx temp;
2448
2449 /* We can get a PARALLEL dst if there is a conditional expression in
2450 a return statement. In that case, the dst and src are the same,
2451 so no action is necessary. */
2452 if (rtx_equal_p (dst, src))
2453 return;
2454
2455 /* It is unclear if we can ever reach here, but we may as well handle
2456 it. Allocate a temporary, and split this into a store/load to/from
2457 the temporary. */
2458
2459 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2460 emit_group_store (temp, src, type, ssize);
2461 emit_group_load (dst, temp, type, ssize);
2462 return;
2463 }
2464 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2465 {
2466 dst = gen_reg_rtx (GET_MODE (orig_dst));
2467 /* Make life a bit easier for combine. */
2468 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2469 }
2470
2471 /* Process the pieces. */
2472 for (i = start; i < XVECLEN (src, 0); i++)
2473 {
2474 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2475 enum machine_mode mode = GET_MODE (tmps[i]);
2476 unsigned int bytelen = GET_MODE_SIZE (mode);
2477 rtx dest = dst;
2478
2479 /* Handle trailing fragments that run over the size of the struct. */
2480 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2481 {
2482 /* store_bit_field always takes its value from the lsb.
2483 Move the fragment to the lsb if it's not already there. */
2484 if (
2485 #ifdef BLOCK_REG_PADDING
2486 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2487 == (BYTES_BIG_ENDIAN ? upward : downward)
2488 #else
2489 BYTES_BIG_ENDIAN
2490 #endif
2491 )
2492 {
2493 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2494 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2495 tmps[i], 0, OPTAB_WIDEN);
2496 }
2497 bytelen = ssize - bytepos;
2498 }
2499
2500 if (GET_CODE (dst) == CONCAT)
2501 {
2502 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2503 dest = XEXP (dst, 0);
2504 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2505 {
2506 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2507 dest = XEXP (dst, 1);
2508 }
2509 else if (bytepos == 0 && XVECLEN (src, 0))
2510 {
2511 dest = assign_stack_temp (GET_MODE (dest),
2512 GET_MODE_SIZE (GET_MODE (dest)), 0);
2513 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2514 tmps[i]);
2515 dst = dest;
2516 break;
2517 }
2518 else
2519 abort ();
2520 }
2521
2522 /* Optimize the access just a bit. */
2523 if (GET_CODE (dest) == MEM
2524 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2525 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2526 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2527 && bytelen == GET_MODE_SIZE (mode))
2528 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2529 else
2530 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2531 mode, tmps[i], ssize);
2532 }
2533
2534 emit_queue ();
2535
2536 /* Copy from the pseudo into the (probable) hard reg. */
2537 if (orig_dst != dst)
2538 emit_move_insn (orig_dst, dst);
2539 }
2540
2541 /* Generate code to copy a BLKmode object of TYPE out of a
2542 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2543 is null, a stack temporary is created. TGTBLK is returned.
2544
2545 The primary purpose of this routine is to handle functions
2546 that return BLKmode structures in registers. Some machines
2547 (the PA for example) want to return all small structures
2548 in registers regardless of the structure's alignment. */
2549
2550 rtx
2551 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2552 {
2553 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2554 rtx src = NULL, dst = NULL;
2555 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2556 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2557
2558 if (tgtblk == 0)
2559 {
2560 tgtblk = assign_temp (build_qualified_type (type,
2561 (TYPE_QUALS (type)
2562 | TYPE_QUAL_CONST)),
2563 0, 1, 1);
2564 preserve_temp_slots (tgtblk);
2565 }
2566
2567 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2568 into a new pseudo which is a full word. */
2569
2570 if (GET_MODE (srcreg) != BLKmode
2571 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2572 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2573
2574 /* Structures whose size is not a multiple of a word are aligned
2575 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2576 machine, this means we must skip the empty high order bytes when
2577 calculating the bit offset. */
2578 if (BYTES_BIG_ENDIAN
2579 && bytes % UNITS_PER_WORD)
2580 big_endian_correction
2581 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2582
2583 /* Copy the structure BITSIZE bites at a time.
2584
2585 We could probably emit more efficient code for machines which do not use
2586 strict alignment, but it doesn't seem worth the effort at the current
2587 time. */
2588 for (bitpos = 0, xbitpos = big_endian_correction;
2589 bitpos < bytes * BITS_PER_UNIT;
2590 bitpos += bitsize, xbitpos += bitsize)
2591 {
2592 /* We need a new source operand each time xbitpos is on a
2593 word boundary and when xbitpos == big_endian_correction
2594 (the first time through). */
2595 if (xbitpos % BITS_PER_WORD == 0
2596 || xbitpos == big_endian_correction)
2597 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2598 GET_MODE (srcreg));
2599
2600 /* We need a new destination operand each time bitpos is on
2601 a word boundary. */
2602 if (bitpos % BITS_PER_WORD == 0)
2603 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2604
2605 /* Use xbitpos for the source extraction (right justified) and
2606 xbitpos for the destination store (left justified). */
2607 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2608 extract_bit_field (src, bitsize,
2609 xbitpos % BITS_PER_WORD, 1,
2610 NULL_RTX, word_mode, word_mode,
2611 BITS_PER_WORD),
2612 BITS_PER_WORD);
2613 }
2614
2615 return tgtblk;
2616 }
2617
2618 /* Add a USE expression for REG to the (possibly empty) list pointed
2619 to by CALL_FUSAGE. REG must denote a hard register. */
2620
2621 void
2622 use_reg (rtx *call_fusage, rtx reg)
2623 {
2624 if (GET_CODE (reg) != REG
2625 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2626 abort ();
2627
2628 *call_fusage
2629 = gen_rtx_EXPR_LIST (VOIDmode,
2630 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2631 }
2632
2633 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2634 starting at REGNO. All of these registers must be hard registers. */
2635
2636 void
2637 use_regs (rtx *call_fusage, int regno, int nregs)
2638 {
2639 int i;
2640
2641 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2642 abort ();
2643
2644 for (i = 0; i < nregs; i++)
2645 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2646 }
2647
2648 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2649 PARALLEL REGS. This is for calls that pass values in multiple
2650 non-contiguous locations. The Irix 6 ABI has examples of this. */
2651
2652 void
2653 use_group_regs (rtx *call_fusage, rtx regs)
2654 {
2655 int i;
2656
2657 for (i = 0; i < XVECLEN (regs, 0); i++)
2658 {
2659 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2660
2661 /* A NULL entry means the parameter goes both on the stack and in
2662 registers. This can also be a MEM for targets that pass values
2663 partially on the stack and partially in registers. */
2664 if (reg != 0 && GET_CODE (reg) == REG)
2665 use_reg (call_fusage, reg);
2666 }
2667 }
2668 \f
2669
2670 /* Determine whether the LEN bytes generated by CONSTFUN can be
2671 stored to memory using several move instructions. CONSTFUNDATA is
2672 a pointer which will be passed as argument in every CONSTFUN call.
2673 ALIGN is maximum alignment we can assume. Return nonzero if a
2674 call to store_by_pieces should succeed. */
2675
2676 int
2677 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2678 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2679 void *constfundata, unsigned int align)
2680 {
2681 unsigned HOST_WIDE_INT max_size, l;
2682 HOST_WIDE_INT offset = 0;
2683 enum machine_mode mode, tmode;
2684 enum insn_code icode;
2685 int reverse;
2686 rtx cst;
2687
2688 if (len == 0)
2689 return 1;
2690
2691 if (! STORE_BY_PIECES_P (len, align))
2692 return 0;
2693
2694 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2695 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2696 align = MOVE_MAX * BITS_PER_UNIT;
2697
2698 /* We would first store what we can in the largest integer mode, then go to
2699 successively smaller modes. */
2700
2701 for (reverse = 0;
2702 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2703 reverse++)
2704 {
2705 l = len;
2706 mode = VOIDmode;
2707 max_size = STORE_MAX_PIECES + 1;
2708 while (max_size > 1)
2709 {
2710 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2711 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2712 if (GET_MODE_SIZE (tmode) < max_size)
2713 mode = tmode;
2714
2715 if (mode == VOIDmode)
2716 break;
2717
2718 icode = mov_optab->handlers[(int) mode].insn_code;
2719 if (icode != CODE_FOR_nothing
2720 && align >= GET_MODE_ALIGNMENT (mode))
2721 {
2722 unsigned int size = GET_MODE_SIZE (mode);
2723
2724 while (l >= size)
2725 {
2726 if (reverse)
2727 offset -= size;
2728
2729 cst = (*constfun) (constfundata, offset, mode);
2730 if (!LEGITIMATE_CONSTANT_P (cst))
2731 return 0;
2732
2733 if (!reverse)
2734 offset += size;
2735
2736 l -= size;
2737 }
2738 }
2739
2740 max_size = GET_MODE_SIZE (mode);
2741 }
2742
2743 /* The code above should have handled everything. */
2744 if (l != 0)
2745 abort ();
2746 }
2747
2748 return 1;
2749 }
2750
2751 /* Generate several move instructions to store LEN bytes generated by
2752 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2753 pointer which will be passed as argument in every CONSTFUN call.
2754 ALIGN is maximum alignment we can assume.
2755 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2756 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2757 stpcpy. */
2758
2759 rtx
2760 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2761 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2762 void *constfundata, unsigned int align, int endp)
2763 {
2764 struct store_by_pieces data;
2765
2766 if (len == 0)
2767 {
2768 if (endp == 2)
2769 abort ();
2770 return to;
2771 }
2772
2773 if (! STORE_BY_PIECES_P (len, align))
2774 abort ();
2775 to = protect_from_queue (to, 1);
2776 data.constfun = constfun;
2777 data.constfundata = constfundata;
2778 data.len = len;
2779 data.to = to;
2780 store_by_pieces_1 (&data, align);
2781 if (endp)
2782 {
2783 rtx to1;
2784
2785 if (data.reverse)
2786 abort ();
2787 if (data.autinc_to)
2788 {
2789 if (endp == 2)
2790 {
2791 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2792 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2793 else
2794 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2795 -1));
2796 }
2797 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2798 data.offset);
2799 }
2800 else
2801 {
2802 if (endp == 2)
2803 --data.offset;
2804 to1 = adjust_address (data.to, QImode, data.offset);
2805 }
2806 return to1;
2807 }
2808 else
2809 return data.to;
2810 }
2811
2812 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2813 rtx with BLKmode). The caller must pass TO through protect_from_queue
2814 before calling. ALIGN is maximum alignment we can assume. */
2815
2816 static void
2817 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2818 {
2819 struct store_by_pieces data;
2820
2821 if (len == 0)
2822 return;
2823
2824 data.constfun = clear_by_pieces_1;
2825 data.constfundata = NULL;
2826 data.len = len;
2827 data.to = to;
2828 store_by_pieces_1 (&data, align);
2829 }
2830
2831 /* Callback routine for clear_by_pieces.
2832 Return const0_rtx unconditionally. */
2833
2834 static rtx
2835 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2836 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2837 enum machine_mode mode ATTRIBUTE_UNUSED)
2838 {
2839 return const0_rtx;
2840 }
2841
2842 /* Subroutine of clear_by_pieces and store_by_pieces.
2843 Generate several move instructions to store LEN bytes of block TO. (A MEM
2844 rtx with BLKmode). The caller must pass TO through protect_from_queue
2845 before calling. ALIGN is maximum alignment we can assume. */
2846
2847 static void
2848 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2849 unsigned int align ATTRIBUTE_UNUSED)
2850 {
2851 rtx to_addr = XEXP (data->to, 0);
2852 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2853 enum machine_mode mode = VOIDmode, tmode;
2854 enum insn_code icode;
2855
2856 data->offset = 0;
2857 data->to_addr = to_addr;
2858 data->autinc_to
2859 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2860 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2861
2862 data->explicit_inc_to = 0;
2863 data->reverse
2864 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2865 if (data->reverse)
2866 data->offset = data->len;
2867
2868 /* If storing requires more than two move insns,
2869 copy addresses to registers (to make displacements shorter)
2870 and use post-increment if available. */
2871 if (!data->autinc_to
2872 && move_by_pieces_ninsns (data->len, align) > 2)
2873 {
2874 /* Determine the main mode we'll be using. */
2875 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2876 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2877 if (GET_MODE_SIZE (tmode) < max_size)
2878 mode = tmode;
2879
2880 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2881 {
2882 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2883 data->autinc_to = 1;
2884 data->explicit_inc_to = -1;
2885 }
2886
2887 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2888 && ! data->autinc_to)
2889 {
2890 data->to_addr = copy_addr_to_reg (to_addr);
2891 data->autinc_to = 1;
2892 data->explicit_inc_to = 1;
2893 }
2894
2895 if ( !data->autinc_to && CONSTANT_P (to_addr))
2896 data->to_addr = copy_addr_to_reg (to_addr);
2897 }
2898
2899 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2900 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2901 align = MOVE_MAX * BITS_PER_UNIT;
2902
2903 /* First store what we can in the largest integer mode, then go to
2904 successively smaller modes. */
2905
2906 while (max_size > 1)
2907 {
2908 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2909 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2910 if (GET_MODE_SIZE (tmode) < max_size)
2911 mode = tmode;
2912
2913 if (mode == VOIDmode)
2914 break;
2915
2916 icode = mov_optab->handlers[(int) mode].insn_code;
2917 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2918 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2919
2920 max_size = GET_MODE_SIZE (mode);
2921 }
2922
2923 /* The code above should have handled everything. */
2924 if (data->len != 0)
2925 abort ();
2926 }
2927
2928 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2929 with move instructions for mode MODE. GENFUN is the gen_... function
2930 to make a move insn for that mode. DATA has all the other info. */
2931
2932 static void
2933 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2934 struct store_by_pieces *data)
2935 {
2936 unsigned int size = GET_MODE_SIZE (mode);
2937 rtx to1, cst;
2938
2939 while (data->len >= size)
2940 {
2941 if (data->reverse)
2942 data->offset -= size;
2943
2944 if (data->autinc_to)
2945 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2946 data->offset);
2947 else
2948 to1 = adjust_address (data->to, mode, data->offset);
2949
2950 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2951 emit_insn (gen_add2_insn (data->to_addr,
2952 GEN_INT (-(HOST_WIDE_INT) size)));
2953
2954 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2955 emit_insn ((*genfun) (to1, cst));
2956
2957 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2958 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2959
2960 if (! data->reverse)
2961 data->offset += size;
2962
2963 data->len -= size;
2964 }
2965 }
2966 \f
2967 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2968 its length in bytes. */
2969
2970 rtx
2971 clear_storage (rtx object, rtx size)
2972 {
2973 rtx retval = 0;
2974 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2975 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2976
2977 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2978 just move a zero. Otherwise, do this a piece at a time. */
2979 if (GET_MODE (object) != BLKmode
2980 && GET_CODE (size) == CONST_INT
2981 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2982 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2983 else
2984 {
2985 object = protect_from_queue (object, 1);
2986 size = protect_from_queue (size, 0);
2987
2988 if (size == const0_rtx)
2989 ;
2990 else if (GET_CODE (size) == CONST_INT
2991 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2992 clear_by_pieces (object, INTVAL (size), align);
2993 else if (clear_storage_via_clrstr (object, size, align))
2994 ;
2995 else
2996 retval = clear_storage_via_libcall (object, size);
2997 }
2998
2999 return retval;
3000 }
3001
3002 /* A subroutine of clear_storage. Expand a clrstr pattern;
3003 return true if successful. */
3004
3005 static bool
3006 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
3007 {
3008 /* Try the most limited insn first, because there's no point
3009 including more than one in the machine description unless
3010 the more limited one has some advantage. */
3011
3012 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3013 enum machine_mode mode;
3014
3015 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3016 mode = GET_MODE_WIDER_MODE (mode))
3017 {
3018 enum insn_code code = clrstr_optab[(int) mode];
3019 insn_operand_predicate_fn pred;
3020
3021 if (code != CODE_FOR_nothing
3022 /* We don't need MODE to be narrower than
3023 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3024 the mode mask, as it is returned by the macro, it will
3025 definitely be less than the actual mode mask. */
3026 && ((GET_CODE (size) == CONST_INT
3027 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3028 <= (GET_MODE_MASK (mode) >> 1)))
3029 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3030 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3031 || (*pred) (object, BLKmode))
3032 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3033 || (*pred) (opalign, VOIDmode)))
3034 {
3035 rtx op1;
3036 rtx last = get_last_insn ();
3037 rtx pat;
3038
3039 op1 = convert_to_mode (mode, size, 1);
3040 pred = insn_data[(int) code].operand[1].predicate;
3041 if (pred != 0 && ! (*pred) (op1, mode))
3042 op1 = copy_to_mode_reg (mode, op1);
3043
3044 pat = GEN_FCN ((int) code) (object, op1, opalign);
3045 if (pat)
3046 {
3047 emit_insn (pat);
3048 return true;
3049 }
3050 else
3051 delete_insns_since (last);
3052 }
3053 }
3054
3055 return false;
3056 }
3057
3058 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3059 Return the return value of memset, 0 otherwise. */
3060
3061 static rtx
3062 clear_storage_via_libcall (rtx object, rtx size)
3063 {
3064 tree call_expr, arg_list, fn, object_tree, size_tree;
3065 enum machine_mode size_mode;
3066 rtx retval;
3067
3068 /* OBJECT or SIZE may have been passed through protect_from_queue.
3069
3070 It is unsafe to save the value generated by protect_from_queue
3071 and reuse it later. Consider what happens if emit_queue is
3072 called before the return value from protect_from_queue is used.
3073
3074 Expansion of the CALL_EXPR below will call emit_queue before
3075 we are finished emitting RTL for argument setup. So if we are
3076 not careful we could get the wrong value for an argument.
3077
3078 To avoid this problem we go ahead and emit code to copy OBJECT
3079 and SIZE into new pseudos. We can then place those new pseudos
3080 into an RTL_EXPR and use them later, even after a call to
3081 emit_queue.
3082
3083 Note this is not strictly needed for library calls since they
3084 do not call emit_queue before loading their arguments. However,
3085 we may need to have library calls call emit_queue in the future
3086 since failing to do so could cause problems for targets which
3087 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3088
3089 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3090
3091 if (TARGET_MEM_FUNCTIONS)
3092 size_mode = TYPE_MODE (sizetype);
3093 else
3094 size_mode = TYPE_MODE (unsigned_type_node);
3095 size = convert_to_mode (size_mode, size, 1);
3096 size = copy_to_mode_reg (size_mode, size);
3097
3098 /* It is incorrect to use the libcall calling conventions to call
3099 memset in this context. This could be a user call to memset and
3100 the user may wish to examine the return value from memset. For
3101 targets where libcalls and normal calls have different conventions
3102 for returning pointers, we could end up generating incorrect code.
3103
3104 For convenience, we generate the call to bzero this way as well. */
3105
3106 object_tree = make_tree (ptr_type_node, object);
3107 if (TARGET_MEM_FUNCTIONS)
3108 size_tree = make_tree (sizetype, size);
3109 else
3110 size_tree = make_tree (unsigned_type_node, size);
3111
3112 fn = clear_storage_libcall_fn (true);
3113 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3114 if (TARGET_MEM_FUNCTIONS)
3115 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3116 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3117
3118 /* Now we have to build up the CALL_EXPR itself. */
3119 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3120 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3121 call_expr, arg_list, NULL_TREE);
3122
3123 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3124
3125 /* If we are initializing a readonly value, show the above call
3126 clobbered it. Otherwise, a load from it may erroneously be
3127 hoisted from a loop. */
3128 if (RTX_UNCHANGING_P (object))
3129 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3130
3131 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3132 }
3133
3134 /* A subroutine of clear_storage_via_libcall. Create the tree node
3135 for the function we use for block clears. The first time FOR_CALL
3136 is true, we call assemble_external. */
3137
3138 static GTY(()) tree block_clear_fn;
3139
3140 void
3141 init_block_clear_fn (const char *asmspec)
3142 {
3143 if (!block_clear_fn)
3144 {
3145 tree fn, args;
3146
3147 if (TARGET_MEM_FUNCTIONS)
3148 {
3149 fn = get_identifier ("memset");
3150 args = build_function_type_list (ptr_type_node, ptr_type_node,
3151 integer_type_node, sizetype,
3152 NULL_TREE);
3153 }
3154 else
3155 {
3156 fn = get_identifier ("bzero");
3157 args = build_function_type_list (void_type_node, ptr_type_node,
3158 unsigned_type_node, NULL_TREE);
3159 }
3160
3161 fn = build_decl (FUNCTION_DECL, fn, args);
3162 DECL_EXTERNAL (fn) = 1;
3163 TREE_PUBLIC (fn) = 1;
3164 DECL_ARTIFICIAL (fn) = 1;
3165 TREE_NOTHROW (fn) = 1;
3166
3167 block_clear_fn = fn;
3168 }
3169
3170 if (asmspec)
3171 {
3172 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3173 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3174 }
3175 }
3176
3177 static tree
3178 clear_storage_libcall_fn (int for_call)
3179 {
3180 static bool emitted_extern;
3181
3182 if (!block_clear_fn)
3183 init_block_clear_fn (NULL);
3184
3185 if (for_call && !emitted_extern)
3186 {
3187 emitted_extern = true;
3188 make_decl_rtl (block_clear_fn, NULL);
3189 assemble_external (block_clear_fn);
3190 }
3191
3192 return block_clear_fn;
3193 }
3194 \f
3195 /* Generate code to copy Y into X.
3196 Both Y and X must have the same mode, except that
3197 Y can be a constant with VOIDmode.
3198 This mode cannot be BLKmode; use emit_block_move for that.
3199
3200 Return the last instruction emitted. */
3201
3202 rtx
3203 emit_move_insn (rtx x, rtx y)
3204 {
3205 enum machine_mode mode = GET_MODE (x);
3206 rtx y_cst = NULL_RTX;
3207 rtx last_insn, set;
3208
3209 x = protect_from_queue (x, 1);
3210 y = protect_from_queue (y, 0);
3211
3212 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3213 abort ();
3214
3215 /* Never force constant_p_rtx to memory. */
3216 if (GET_CODE (y) == CONSTANT_P_RTX)
3217 ;
3218 else if (CONSTANT_P (y))
3219 {
3220 if (optimize
3221 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3222 && (last_insn = compress_float_constant (x, y)))
3223 return last_insn;
3224
3225 y_cst = y;
3226
3227 if (!LEGITIMATE_CONSTANT_P (y))
3228 {
3229 y = force_const_mem (mode, y);
3230
3231 /* If the target's cannot_force_const_mem prevented the spill,
3232 assume that the target's move expanders will also take care
3233 of the non-legitimate constant. */
3234 if (!y)
3235 y = y_cst;
3236 }
3237 }
3238
3239 /* If X or Y are memory references, verify that their addresses are valid
3240 for the machine. */
3241 if (GET_CODE (x) == MEM
3242 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3243 && ! push_operand (x, GET_MODE (x)))
3244 || (flag_force_addr
3245 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3246 x = validize_mem (x);
3247
3248 if (GET_CODE (y) == MEM
3249 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3250 || (flag_force_addr
3251 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3252 y = validize_mem (y);
3253
3254 if (mode == BLKmode)
3255 abort ();
3256
3257 last_insn = emit_move_insn_1 (x, y);
3258
3259 if (y_cst && GET_CODE (x) == REG
3260 && (set = single_set (last_insn)) != NULL_RTX
3261 && SET_DEST (set) == x
3262 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3263 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3264
3265 return last_insn;
3266 }
3267
3268 /* Low level part of emit_move_insn.
3269 Called just like emit_move_insn, but assumes X and Y
3270 are basically valid. */
3271
3272 rtx
3273 emit_move_insn_1 (rtx x, rtx y)
3274 {
3275 enum machine_mode mode = GET_MODE (x);
3276 enum machine_mode submode;
3277 enum mode_class class = GET_MODE_CLASS (mode);
3278
3279 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3280 abort ();
3281
3282 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3283 return
3284 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3285
3286 /* Expand complex moves by moving real part and imag part, if possible. */
3287 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3288 && BLKmode != (submode = GET_MODE_INNER (mode))
3289 && (mov_optab->handlers[(int) submode].insn_code
3290 != CODE_FOR_nothing))
3291 {
3292 /* Don't split destination if it is a stack push. */
3293 int stack = push_operand (x, GET_MODE (x));
3294
3295 #ifdef PUSH_ROUNDING
3296 /* In case we output to the stack, but the size is smaller than the
3297 machine can push exactly, we need to use move instructions. */
3298 if (stack
3299 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3300 != GET_MODE_SIZE (submode)))
3301 {
3302 rtx temp;
3303 HOST_WIDE_INT offset1, offset2;
3304
3305 /* Do not use anti_adjust_stack, since we don't want to update
3306 stack_pointer_delta. */
3307 temp = expand_binop (Pmode,
3308 #ifdef STACK_GROWS_DOWNWARD
3309 sub_optab,
3310 #else
3311 add_optab,
3312 #endif
3313 stack_pointer_rtx,
3314 GEN_INT
3315 (PUSH_ROUNDING
3316 (GET_MODE_SIZE (GET_MODE (x)))),
3317 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3318
3319 if (temp != stack_pointer_rtx)
3320 emit_move_insn (stack_pointer_rtx, temp);
3321
3322 #ifdef STACK_GROWS_DOWNWARD
3323 offset1 = 0;
3324 offset2 = GET_MODE_SIZE (submode);
3325 #else
3326 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3327 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3328 + GET_MODE_SIZE (submode));
3329 #endif
3330
3331 emit_move_insn (change_address (x, submode,
3332 gen_rtx_PLUS (Pmode,
3333 stack_pointer_rtx,
3334 GEN_INT (offset1))),
3335 gen_realpart (submode, y));
3336 emit_move_insn (change_address (x, submode,
3337 gen_rtx_PLUS (Pmode,
3338 stack_pointer_rtx,
3339 GEN_INT (offset2))),
3340 gen_imagpart (submode, y));
3341 }
3342 else
3343 #endif
3344 /* If this is a stack, push the highpart first, so it
3345 will be in the argument order.
3346
3347 In that case, change_address is used only to convert
3348 the mode, not to change the address. */
3349 if (stack)
3350 {
3351 /* Note that the real part always precedes the imag part in memory
3352 regardless of machine's endianness. */
3353 #ifdef STACK_GROWS_DOWNWARD
3354 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3355 gen_imagpart (submode, y));
3356 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3357 gen_realpart (submode, y));
3358 #else
3359 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3360 gen_realpart (submode, y));
3361 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3362 gen_imagpart (submode, y));
3363 #endif
3364 }
3365 else
3366 {
3367 rtx realpart_x, realpart_y;
3368 rtx imagpart_x, imagpart_y;
3369
3370 /* If this is a complex value with each part being smaller than a
3371 word, the usual calling sequence will likely pack the pieces into
3372 a single register. Unfortunately, SUBREG of hard registers only
3373 deals in terms of words, so we have a problem converting input
3374 arguments to the CONCAT of two registers that is used elsewhere
3375 for complex values. If this is before reload, we can copy it into
3376 memory and reload. FIXME, we should see about using extract and
3377 insert on integer registers, but complex short and complex char
3378 variables should be rarely used. */
3379 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3380 && (reload_in_progress | reload_completed) == 0)
3381 {
3382 int packed_dest_p
3383 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3384 int packed_src_p
3385 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3386
3387 if (packed_dest_p || packed_src_p)
3388 {
3389 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3390 ? MODE_FLOAT : MODE_INT);
3391
3392 enum machine_mode reg_mode
3393 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3394
3395 if (reg_mode != BLKmode)
3396 {
3397 rtx mem = assign_stack_temp (reg_mode,
3398 GET_MODE_SIZE (mode), 0);
3399 rtx cmem = adjust_address (mem, mode, 0);
3400
3401 cfun->cannot_inline
3402 = N_("function using short complex types cannot be inline");
3403
3404 if (packed_dest_p)
3405 {
3406 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3407
3408 emit_move_insn_1 (cmem, y);
3409 return emit_move_insn_1 (sreg, mem);
3410 }
3411 else
3412 {
3413 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3414
3415 emit_move_insn_1 (mem, sreg);
3416 return emit_move_insn_1 (x, cmem);
3417 }
3418 }
3419 }
3420 }
3421
3422 realpart_x = gen_realpart (submode, x);
3423 realpart_y = gen_realpart (submode, y);
3424 imagpart_x = gen_imagpart (submode, x);
3425 imagpart_y = gen_imagpart (submode, y);
3426
3427 /* Show the output dies here. This is necessary for SUBREGs
3428 of pseudos since we cannot track their lifetimes correctly;
3429 hard regs shouldn't appear here except as return values.
3430 We never want to emit such a clobber after reload. */
3431 if (x != y
3432 && ! (reload_in_progress || reload_completed)
3433 && (GET_CODE (realpart_x) == SUBREG
3434 || GET_CODE (imagpart_x) == SUBREG))
3435 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3436
3437 emit_move_insn (realpart_x, realpart_y);
3438 emit_move_insn (imagpart_x, imagpart_y);
3439 }
3440
3441 return get_last_insn ();
3442 }
3443
3444 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3445 find a mode to do it in. If we have a movcc, use it. Otherwise,
3446 find the MODE_INT mode of the same width. */
3447 else if (GET_MODE_CLASS (mode) == MODE_CC
3448 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3449 {
3450 enum insn_code insn_code;
3451 enum machine_mode tmode = VOIDmode;
3452 rtx x1 = x, y1 = y;
3453
3454 if (mode != CCmode
3455 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3456 tmode = CCmode;
3457 else
3458 for (tmode = QImode; tmode != VOIDmode;
3459 tmode = GET_MODE_WIDER_MODE (tmode))
3460 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3461 break;
3462
3463 if (tmode == VOIDmode)
3464 abort ();
3465
3466 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3467 may call change_address which is not appropriate if we were
3468 called when a reload was in progress. We don't have to worry
3469 about changing the address since the size in bytes is supposed to
3470 be the same. Copy the MEM to change the mode and move any
3471 substitutions from the old MEM to the new one. */
3472
3473 if (reload_in_progress)
3474 {
3475 x = gen_lowpart_common (tmode, x1);
3476 if (x == 0 && GET_CODE (x1) == MEM)
3477 {
3478 x = adjust_address_nv (x1, tmode, 0);
3479 copy_replacements (x1, x);
3480 }
3481
3482 y = gen_lowpart_common (tmode, y1);
3483 if (y == 0 && GET_CODE (y1) == MEM)
3484 {
3485 y = adjust_address_nv (y1, tmode, 0);
3486 copy_replacements (y1, y);
3487 }
3488 }
3489 else
3490 {
3491 x = gen_lowpart (tmode, x);
3492 y = gen_lowpart (tmode, y);
3493 }
3494
3495 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3496 return emit_insn (GEN_FCN (insn_code) (x, y));
3497 }
3498
3499 /* This will handle any multi-word or full-word mode that lacks a move_insn
3500 pattern. However, you will get better code if you define such patterns,
3501 even if they must turn into multiple assembler instructions. */
3502 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3503 {
3504 rtx last_insn = 0;
3505 rtx seq, inner;
3506 int need_clobber;
3507 int i;
3508
3509 #ifdef PUSH_ROUNDING
3510
3511 /* If X is a push on the stack, do the push now and replace
3512 X with a reference to the stack pointer. */
3513 if (push_operand (x, GET_MODE (x)))
3514 {
3515 rtx temp;
3516 enum rtx_code code;
3517
3518 /* Do not use anti_adjust_stack, since we don't want to update
3519 stack_pointer_delta. */
3520 temp = expand_binop (Pmode,
3521 #ifdef STACK_GROWS_DOWNWARD
3522 sub_optab,
3523 #else
3524 add_optab,
3525 #endif
3526 stack_pointer_rtx,
3527 GEN_INT
3528 (PUSH_ROUNDING
3529 (GET_MODE_SIZE (GET_MODE (x)))),
3530 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3531
3532 if (temp != stack_pointer_rtx)
3533 emit_move_insn (stack_pointer_rtx, temp);
3534
3535 code = GET_CODE (XEXP (x, 0));
3536
3537 /* Just hope that small offsets off SP are OK. */
3538 if (code == POST_INC)
3539 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3540 GEN_INT (-((HOST_WIDE_INT)
3541 GET_MODE_SIZE (GET_MODE (x)))));
3542 else if (code == POST_DEC)
3543 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3544 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3545 else
3546 temp = stack_pointer_rtx;
3547
3548 x = change_address (x, VOIDmode, temp);
3549 }
3550 #endif
3551
3552 /* If we are in reload, see if either operand is a MEM whose address
3553 is scheduled for replacement. */
3554 if (reload_in_progress && GET_CODE (x) == MEM
3555 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3556 x = replace_equiv_address_nv (x, inner);
3557 if (reload_in_progress && GET_CODE (y) == MEM
3558 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3559 y = replace_equiv_address_nv (y, inner);
3560
3561 start_sequence ();
3562
3563 need_clobber = 0;
3564 for (i = 0;
3565 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3566 i++)
3567 {
3568 rtx xpart = operand_subword (x, i, 1, mode);
3569 rtx ypart = operand_subword (y, i, 1, mode);
3570
3571 /* If we can't get a part of Y, put Y into memory if it is a
3572 constant. Otherwise, force it into a register. If we still
3573 can't get a part of Y, abort. */
3574 if (ypart == 0 && CONSTANT_P (y))
3575 {
3576 y = force_const_mem (mode, y);
3577 ypart = operand_subword (y, i, 1, mode);
3578 }
3579 else if (ypart == 0)
3580 ypart = operand_subword_force (y, i, mode);
3581
3582 if (xpart == 0 || ypart == 0)
3583 abort ();
3584
3585 need_clobber |= (GET_CODE (xpart) == SUBREG);
3586
3587 last_insn = emit_move_insn (xpart, ypart);
3588 }
3589
3590 seq = get_insns ();
3591 end_sequence ();
3592
3593 /* Show the output dies here. This is necessary for SUBREGs
3594 of pseudos since we cannot track their lifetimes correctly;
3595 hard regs shouldn't appear here except as return values.
3596 We never want to emit such a clobber after reload. */
3597 if (x != y
3598 && ! (reload_in_progress || reload_completed)
3599 && need_clobber != 0)
3600 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3601
3602 emit_insn (seq);
3603
3604 return last_insn;
3605 }
3606 else
3607 abort ();
3608 }
3609
3610 /* If Y is representable exactly in a narrower mode, and the target can
3611 perform the extension directly from constant or memory, then emit the
3612 move as an extension. */
3613
3614 static rtx
3615 compress_float_constant (rtx x, rtx y)
3616 {
3617 enum machine_mode dstmode = GET_MODE (x);
3618 enum machine_mode orig_srcmode = GET_MODE (y);
3619 enum machine_mode srcmode;
3620 REAL_VALUE_TYPE r;
3621
3622 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3623
3624 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3625 srcmode != orig_srcmode;
3626 srcmode = GET_MODE_WIDER_MODE (srcmode))
3627 {
3628 enum insn_code ic;
3629 rtx trunc_y, last_insn;
3630
3631 /* Skip if the target can't extend this way. */
3632 ic = can_extend_p (dstmode, srcmode, 0);
3633 if (ic == CODE_FOR_nothing)
3634 continue;
3635
3636 /* Skip if the narrowed value isn't exact. */
3637 if (! exact_real_truncate (srcmode, &r))
3638 continue;
3639
3640 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3641
3642 if (LEGITIMATE_CONSTANT_P (trunc_y))
3643 {
3644 /* Skip if the target needs extra instructions to perform
3645 the extension. */
3646 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3647 continue;
3648 }
3649 else if (float_extend_from_mem[dstmode][srcmode])
3650 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3651 else
3652 continue;
3653
3654 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3655 last_insn = get_last_insn ();
3656
3657 if (GET_CODE (x) == REG)
3658 set_unique_reg_note (last_insn, REG_EQUAL, y);
3659
3660 return last_insn;
3661 }
3662
3663 return NULL_RTX;
3664 }
3665 \f
3666 /* Pushing data onto the stack. */
3667
3668 /* Push a block of length SIZE (perhaps variable)
3669 and return an rtx to address the beginning of the block.
3670 Note that it is not possible for the value returned to be a QUEUED.
3671 The value may be virtual_outgoing_args_rtx.
3672
3673 EXTRA is the number of bytes of padding to push in addition to SIZE.
3674 BELOW nonzero means this padding comes at low addresses;
3675 otherwise, the padding comes at high addresses. */
3676
3677 rtx
3678 push_block (rtx size, int extra, int below)
3679 {
3680 rtx temp;
3681
3682 size = convert_modes (Pmode, ptr_mode, size, 1);
3683 if (CONSTANT_P (size))
3684 anti_adjust_stack (plus_constant (size, extra));
3685 else if (GET_CODE (size) == REG && extra == 0)
3686 anti_adjust_stack (size);
3687 else
3688 {
3689 temp = copy_to_mode_reg (Pmode, size);
3690 if (extra != 0)
3691 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3692 temp, 0, OPTAB_LIB_WIDEN);
3693 anti_adjust_stack (temp);
3694 }
3695
3696 #ifndef STACK_GROWS_DOWNWARD
3697 if (0)
3698 #else
3699 if (1)
3700 #endif
3701 {
3702 temp = virtual_outgoing_args_rtx;
3703 if (extra != 0 && below)
3704 temp = plus_constant (temp, extra);
3705 }
3706 else
3707 {
3708 if (GET_CODE (size) == CONST_INT)
3709 temp = plus_constant (virtual_outgoing_args_rtx,
3710 -INTVAL (size) - (below ? 0 : extra));
3711 else if (extra != 0 && !below)
3712 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3713 negate_rtx (Pmode, plus_constant (size, extra)));
3714 else
3715 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3716 negate_rtx (Pmode, size));
3717 }
3718
3719 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3720 }
3721
3722 #ifdef PUSH_ROUNDING
3723
3724 /* Emit single push insn. */
3725
3726 static void
3727 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3728 {
3729 rtx dest_addr;
3730 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3731 rtx dest;
3732 enum insn_code icode;
3733 insn_operand_predicate_fn pred;
3734
3735 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3736 /* If there is push pattern, use it. Otherwise try old way of throwing
3737 MEM representing push operation to move expander. */
3738 icode = push_optab->handlers[(int) mode].insn_code;
3739 if (icode != CODE_FOR_nothing)
3740 {
3741 if (((pred = insn_data[(int) icode].operand[0].predicate)
3742 && !((*pred) (x, mode))))
3743 x = force_reg (mode, x);
3744 emit_insn (GEN_FCN (icode) (x));
3745 return;
3746 }
3747 if (GET_MODE_SIZE (mode) == rounded_size)
3748 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3749 /* If we are to pad downward, adjust the stack pointer first and
3750 then store X into the stack location using an offset. This is
3751 because emit_move_insn does not know how to pad; it does not have
3752 access to type. */
3753 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3754 {
3755 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3756 HOST_WIDE_INT offset;
3757
3758 emit_move_insn (stack_pointer_rtx,
3759 expand_binop (Pmode,
3760 #ifdef STACK_GROWS_DOWNWARD
3761 sub_optab,
3762 #else
3763 add_optab,
3764 #endif
3765 stack_pointer_rtx,
3766 GEN_INT (rounded_size),
3767 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3768
3769 offset = (HOST_WIDE_INT) padding_size;
3770 #ifdef STACK_GROWS_DOWNWARD
3771 if (STACK_PUSH_CODE == POST_DEC)
3772 /* We have already decremented the stack pointer, so get the
3773 previous value. */
3774 offset += (HOST_WIDE_INT) rounded_size;
3775 #else
3776 if (STACK_PUSH_CODE == POST_INC)
3777 /* We have already incremented the stack pointer, so get the
3778 previous value. */
3779 offset -= (HOST_WIDE_INT) rounded_size;
3780 #endif
3781 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3782 }
3783 else
3784 {
3785 #ifdef STACK_GROWS_DOWNWARD
3786 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3787 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3788 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3789 #else
3790 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3791 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3792 GEN_INT (rounded_size));
3793 #endif
3794 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3795 }
3796
3797 dest = gen_rtx_MEM (mode, dest_addr);
3798
3799 if (type != 0)
3800 {
3801 set_mem_attributes (dest, type, 1);
3802
3803 if (flag_optimize_sibling_calls)
3804 /* Function incoming arguments may overlap with sibling call
3805 outgoing arguments and we cannot allow reordering of reads
3806 from function arguments with stores to outgoing arguments
3807 of sibling calls. */
3808 set_mem_alias_set (dest, 0);
3809 }
3810 emit_move_insn (dest, x);
3811 }
3812 #endif
3813
3814 /* Generate code to push X onto the stack, assuming it has mode MODE and
3815 type TYPE.
3816 MODE is redundant except when X is a CONST_INT (since they don't
3817 carry mode info).
3818 SIZE is an rtx for the size of data to be copied (in bytes),
3819 needed only if X is BLKmode.
3820
3821 ALIGN (in bits) is maximum alignment we can assume.
3822
3823 If PARTIAL and REG are both nonzero, then copy that many of the first
3824 words of X into registers starting with REG, and push the rest of X.
3825 The amount of space pushed is decreased by PARTIAL words,
3826 rounded *down* to a multiple of PARM_BOUNDARY.
3827 REG must be a hard register in this case.
3828 If REG is zero but PARTIAL is not, take any all others actions for an
3829 argument partially in registers, but do not actually load any
3830 registers.
3831
3832 EXTRA is the amount in bytes of extra space to leave next to this arg.
3833 This is ignored if an argument block has already been allocated.
3834
3835 On a machine that lacks real push insns, ARGS_ADDR is the address of
3836 the bottom of the argument block for this call. We use indexing off there
3837 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3838 argument block has not been preallocated.
3839
3840 ARGS_SO_FAR is the size of args previously pushed for this call.
3841
3842 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3843 for arguments passed in registers. If nonzero, it will be the number
3844 of bytes required. */
3845
3846 void
3847 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3848 unsigned int align, int partial, rtx reg, int extra,
3849 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3850 rtx alignment_pad)
3851 {
3852 rtx xinner;
3853 enum direction stack_direction
3854 #ifdef STACK_GROWS_DOWNWARD
3855 = downward;
3856 #else
3857 = upward;
3858 #endif
3859
3860 /* Decide where to pad the argument: `downward' for below,
3861 `upward' for above, or `none' for don't pad it.
3862 Default is below for small data on big-endian machines; else above. */
3863 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3864
3865 /* Invert direction if stack is post-decrement.
3866 FIXME: why? */
3867 if (STACK_PUSH_CODE == POST_DEC)
3868 if (where_pad != none)
3869 where_pad = (where_pad == downward ? upward : downward);
3870
3871 xinner = x = protect_from_queue (x, 0);
3872
3873 if (mode == BLKmode)
3874 {
3875 /* Copy a block into the stack, entirely or partially. */
3876
3877 rtx temp;
3878 int used = partial * UNITS_PER_WORD;
3879 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3880 int skip;
3881
3882 if (size == 0)
3883 abort ();
3884
3885 used -= offset;
3886
3887 /* USED is now the # of bytes we need not copy to the stack
3888 because registers will take care of them. */
3889
3890 if (partial != 0)
3891 xinner = adjust_address (xinner, BLKmode, used);
3892
3893 /* If the partial register-part of the arg counts in its stack size,
3894 skip the part of stack space corresponding to the registers.
3895 Otherwise, start copying to the beginning of the stack space,
3896 by setting SKIP to 0. */
3897 skip = (reg_parm_stack_space == 0) ? 0 : used;
3898
3899 #ifdef PUSH_ROUNDING
3900 /* Do it with several push insns if that doesn't take lots of insns
3901 and if there is no difficulty with push insns that skip bytes
3902 on the stack for alignment purposes. */
3903 if (args_addr == 0
3904 && PUSH_ARGS
3905 && GET_CODE (size) == CONST_INT
3906 && skip == 0
3907 && MEM_ALIGN (xinner) >= align
3908 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3909 /* Here we avoid the case of a structure whose weak alignment
3910 forces many pushes of a small amount of data,
3911 and such small pushes do rounding that causes trouble. */
3912 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3913 || align >= BIGGEST_ALIGNMENT
3914 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3915 == (align / BITS_PER_UNIT)))
3916 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3917 {
3918 /* Push padding now if padding above and stack grows down,
3919 or if padding below and stack grows up.
3920 But if space already allocated, this has already been done. */
3921 if (extra && args_addr == 0
3922 && where_pad != none && where_pad != stack_direction)
3923 anti_adjust_stack (GEN_INT (extra));
3924
3925 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3926 }
3927 else
3928 #endif /* PUSH_ROUNDING */
3929 {
3930 rtx target;
3931
3932 /* Otherwise make space on the stack and copy the data
3933 to the address of that space. */
3934
3935 /* Deduct words put into registers from the size we must copy. */
3936 if (partial != 0)
3937 {
3938 if (GET_CODE (size) == CONST_INT)
3939 size = GEN_INT (INTVAL (size) - used);
3940 else
3941 size = expand_binop (GET_MODE (size), sub_optab, size,
3942 GEN_INT (used), NULL_RTX, 0,
3943 OPTAB_LIB_WIDEN);
3944 }
3945
3946 /* Get the address of the stack space.
3947 In this case, we do not deal with EXTRA separately.
3948 A single stack adjust will do. */
3949 if (! args_addr)
3950 {
3951 temp = push_block (size, extra, where_pad == downward);
3952 extra = 0;
3953 }
3954 else if (GET_CODE (args_so_far) == CONST_INT)
3955 temp = memory_address (BLKmode,
3956 plus_constant (args_addr,
3957 skip + INTVAL (args_so_far)));
3958 else
3959 temp = memory_address (BLKmode,
3960 plus_constant (gen_rtx_PLUS (Pmode,
3961 args_addr,
3962 args_so_far),
3963 skip));
3964
3965 if (!ACCUMULATE_OUTGOING_ARGS)
3966 {
3967 /* If the source is referenced relative to the stack pointer,
3968 copy it to another register to stabilize it. We do not need
3969 to do this if we know that we won't be changing sp. */
3970
3971 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3972 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3973 temp = copy_to_reg (temp);
3974 }
3975
3976 target = gen_rtx_MEM (BLKmode, temp);
3977
3978 if (type != 0)
3979 {
3980 set_mem_attributes (target, type, 1);
3981 /* Function incoming arguments may overlap with sibling call
3982 outgoing arguments and we cannot allow reordering of reads
3983 from function arguments with stores to outgoing arguments
3984 of sibling calls. */
3985 set_mem_alias_set (target, 0);
3986 }
3987
3988 /* ALIGN may well be better aligned than TYPE, e.g. due to
3989 PARM_BOUNDARY. Assume the caller isn't lying. */
3990 set_mem_align (target, align);
3991
3992 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3993 }
3994 }
3995 else if (partial > 0)
3996 {
3997 /* Scalar partly in registers. */
3998
3999 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4000 int i;
4001 int not_stack;
4002 /* # words of start of argument
4003 that we must make space for but need not store. */
4004 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4005 int args_offset = INTVAL (args_so_far);
4006 int skip;
4007
4008 /* Push padding now if padding above and stack grows down,
4009 or if padding below and stack grows up.
4010 But if space already allocated, this has already been done. */
4011 if (extra && args_addr == 0
4012 && where_pad != none && where_pad != stack_direction)
4013 anti_adjust_stack (GEN_INT (extra));
4014
4015 /* If we make space by pushing it, we might as well push
4016 the real data. Otherwise, we can leave OFFSET nonzero
4017 and leave the space uninitialized. */
4018 if (args_addr == 0)
4019 offset = 0;
4020
4021 /* Now NOT_STACK gets the number of words that we don't need to
4022 allocate on the stack. */
4023 not_stack = partial - offset;
4024
4025 /* If the partial register-part of the arg counts in its stack size,
4026 skip the part of stack space corresponding to the registers.
4027 Otherwise, start copying to the beginning of the stack space,
4028 by setting SKIP to 0. */
4029 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4030
4031 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4032 x = validize_mem (force_const_mem (mode, x));
4033
4034 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4035 SUBREGs of such registers are not allowed. */
4036 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4037 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4038 x = copy_to_reg (x);
4039
4040 /* Loop over all the words allocated on the stack for this arg. */
4041 /* We can do it by words, because any scalar bigger than a word
4042 has a size a multiple of a word. */
4043 #ifndef PUSH_ARGS_REVERSED
4044 for (i = not_stack; i < size; i++)
4045 #else
4046 for (i = size - 1; i >= not_stack; i--)
4047 #endif
4048 if (i >= not_stack + offset)
4049 emit_push_insn (operand_subword_force (x, i, mode),
4050 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4051 0, args_addr,
4052 GEN_INT (args_offset + ((i - not_stack + skip)
4053 * UNITS_PER_WORD)),
4054 reg_parm_stack_space, alignment_pad);
4055 }
4056 else
4057 {
4058 rtx addr;
4059 rtx dest;
4060
4061 /* Push padding now if padding above and stack grows down,
4062 or if padding below and stack grows up.
4063 But if space already allocated, this has already been done. */
4064 if (extra && args_addr == 0
4065 && where_pad != none && where_pad != stack_direction)
4066 anti_adjust_stack (GEN_INT (extra));
4067
4068 #ifdef PUSH_ROUNDING
4069 if (args_addr == 0 && PUSH_ARGS)
4070 emit_single_push_insn (mode, x, type);
4071 else
4072 #endif
4073 {
4074 if (GET_CODE (args_so_far) == CONST_INT)
4075 addr
4076 = memory_address (mode,
4077 plus_constant (args_addr,
4078 INTVAL (args_so_far)));
4079 else
4080 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4081 args_so_far));
4082 dest = gen_rtx_MEM (mode, addr);
4083 if (type != 0)
4084 {
4085 set_mem_attributes (dest, type, 1);
4086 /* Function incoming arguments may overlap with sibling call
4087 outgoing arguments and we cannot allow reordering of reads
4088 from function arguments with stores to outgoing arguments
4089 of sibling calls. */
4090 set_mem_alias_set (dest, 0);
4091 }
4092
4093 emit_move_insn (dest, x);
4094 }
4095 }
4096
4097 /* If part should go in registers, copy that part
4098 into the appropriate registers. Do this now, at the end,
4099 since mem-to-mem copies above may do function calls. */
4100 if (partial > 0 && reg != 0)
4101 {
4102 /* Handle calls that pass values in multiple non-contiguous locations.
4103 The Irix 6 ABI has examples of this. */
4104 if (GET_CODE (reg) == PARALLEL)
4105 emit_group_load (reg, x, type, -1);
4106 else
4107 move_block_to_reg (REGNO (reg), x, partial, mode);
4108 }
4109
4110 if (extra && args_addr == 0 && where_pad == stack_direction)
4111 anti_adjust_stack (GEN_INT (extra));
4112
4113 if (alignment_pad && args_addr == 0)
4114 anti_adjust_stack (alignment_pad);
4115 }
4116 \f
4117 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4118 operations. */
4119
4120 static rtx
4121 get_subtarget (rtx x)
4122 {
4123 return ((x == 0
4124 /* Only registers can be subtargets. */
4125 || GET_CODE (x) != REG
4126 /* If the register is readonly, it can't be set more than once. */
4127 || RTX_UNCHANGING_P (x)
4128 /* Don't use hard regs to avoid extending their life. */
4129 || REGNO (x) < FIRST_PSEUDO_REGISTER
4130 /* Avoid subtargets inside loops,
4131 since they hide some invariant expressions. */
4132 || preserve_subexpressions_p ())
4133 ? 0 : x);
4134 }
4135
4136 /* Expand an assignment that stores the value of FROM into TO.
4137 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4138 (This may contain a QUEUED rtx;
4139 if the value is constant, this rtx is a constant.)
4140 Otherwise, the returned value is NULL_RTX. */
4141
4142 rtx
4143 expand_assignment (tree to, tree from, int want_value)
4144 {
4145 rtx to_rtx = 0;
4146 rtx result;
4147
4148 /* Don't crash if the lhs of the assignment was erroneous. */
4149
4150 if (TREE_CODE (to) == ERROR_MARK)
4151 {
4152 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4153 return want_value ? result : NULL_RTX;
4154 }
4155
4156 /* Assignment of a structure component needs special treatment
4157 if the structure component's rtx is not simply a MEM.
4158 Assignment of an array element at a constant index, and assignment of
4159 an array element in an unaligned packed structure field, has the same
4160 problem. */
4161
4162 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4163 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4164 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4165 {
4166 enum machine_mode mode1;
4167 HOST_WIDE_INT bitsize, bitpos;
4168 rtx orig_to_rtx;
4169 tree offset;
4170 int unsignedp;
4171 int volatilep = 0;
4172 tree tem;
4173
4174 push_temp_slots ();
4175 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4176 &unsignedp, &volatilep);
4177
4178 /* If we are going to use store_bit_field and extract_bit_field,
4179 make sure to_rtx will be safe for multiple use. */
4180
4181 if (mode1 == VOIDmode && want_value)
4182 tem = stabilize_reference (tem);
4183
4184 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4185
4186 if (offset != 0)
4187 {
4188 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4189
4190 if (GET_CODE (to_rtx) != MEM)
4191 abort ();
4192
4193 #ifdef POINTERS_EXTEND_UNSIGNED
4194 if (GET_MODE (offset_rtx) != Pmode)
4195 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4196 #else
4197 if (GET_MODE (offset_rtx) != ptr_mode)
4198 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4199 #endif
4200
4201 /* A constant address in TO_RTX can have VOIDmode, we must not try
4202 to call force_reg for that case. Avoid that case. */
4203 if (GET_CODE (to_rtx) == MEM
4204 && GET_MODE (to_rtx) == BLKmode
4205 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4206 && bitsize > 0
4207 && (bitpos % bitsize) == 0
4208 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4209 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4210 {
4211 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4212 bitpos = 0;
4213 }
4214
4215 to_rtx = offset_address (to_rtx, offset_rtx,
4216 highest_pow2_factor_for_type (TREE_TYPE (to),
4217 offset));
4218 }
4219
4220 if (GET_CODE (to_rtx) == MEM)
4221 {
4222 /* If the field is at offset zero, we could have been given the
4223 DECL_RTX of the parent struct. Don't munge it. */
4224 to_rtx = shallow_copy_rtx (to_rtx);
4225
4226 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4227 }
4228
4229 /* Deal with volatile and readonly fields. The former is only done
4230 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4231 if (volatilep && GET_CODE (to_rtx) == MEM)
4232 {
4233 if (to_rtx == orig_to_rtx)
4234 to_rtx = copy_rtx (to_rtx);
4235 MEM_VOLATILE_P (to_rtx) = 1;
4236 }
4237
4238 if (TREE_CODE (to) == COMPONENT_REF
4239 && TREE_READONLY (TREE_OPERAND (to, 1)))
4240 {
4241 if (to_rtx == orig_to_rtx)
4242 to_rtx = copy_rtx (to_rtx);
4243 RTX_UNCHANGING_P (to_rtx) = 1;
4244 }
4245
4246 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4247 {
4248 if (to_rtx == orig_to_rtx)
4249 to_rtx = copy_rtx (to_rtx);
4250 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4251 }
4252
4253 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4254 (want_value
4255 /* Spurious cast for HPUX compiler. */
4256 ? ((enum machine_mode)
4257 TYPE_MODE (TREE_TYPE (to)))
4258 : VOIDmode),
4259 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4260
4261 preserve_temp_slots (result);
4262 free_temp_slots ();
4263 pop_temp_slots ();
4264
4265 /* If the value is meaningful, convert RESULT to the proper mode.
4266 Otherwise, return nothing. */
4267 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4268 TYPE_MODE (TREE_TYPE (from)),
4269 result,
4270 TREE_UNSIGNED (TREE_TYPE (to)))
4271 : NULL_RTX);
4272 }
4273
4274 /* If the rhs is a function call and its value is not an aggregate,
4275 call the function before we start to compute the lhs.
4276 This is needed for correct code for cases such as
4277 val = setjmp (buf) on machines where reference to val
4278 requires loading up part of an address in a separate insn.
4279
4280 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4281 since it might be a promoted variable where the zero- or sign- extension
4282 needs to be done. Handling this in the normal way is safe because no
4283 computation is done before the call. */
4284 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4285 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4286 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4287 && GET_CODE (DECL_RTL (to)) == REG))
4288 {
4289 rtx value;
4290
4291 push_temp_slots ();
4292 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4293 if (to_rtx == 0)
4294 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4295
4296 /* Handle calls that return values in multiple non-contiguous locations.
4297 The Irix 6 ABI has examples of this. */
4298 if (GET_CODE (to_rtx) == PARALLEL)
4299 emit_group_load (to_rtx, value, TREE_TYPE (from),
4300 int_size_in_bytes (TREE_TYPE (from)));
4301 else if (GET_MODE (to_rtx) == BLKmode)
4302 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4303 else
4304 {
4305 #ifdef POINTERS_EXTEND_UNSIGNED
4306 if (POINTER_TYPE_P (TREE_TYPE (to))
4307 && GET_MODE (to_rtx) != GET_MODE (value))
4308 value = convert_memory_address (GET_MODE (to_rtx), value);
4309 #endif
4310 emit_move_insn (to_rtx, value);
4311 }
4312 preserve_temp_slots (to_rtx);
4313 free_temp_slots ();
4314 pop_temp_slots ();
4315 return want_value ? to_rtx : NULL_RTX;
4316 }
4317
4318 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4319 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4320
4321 if (to_rtx == 0)
4322 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4323
4324 /* Don't move directly into a return register. */
4325 if (TREE_CODE (to) == RESULT_DECL
4326 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4327 {
4328 rtx temp;
4329
4330 push_temp_slots ();
4331 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4332
4333 if (GET_CODE (to_rtx) == PARALLEL)
4334 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4335 int_size_in_bytes (TREE_TYPE (from)));
4336 else
4337 emit_move_insn (to_rtx, temp);
4338
4339 preserve_temp_slots (to_rtx);
4340 free_temp_slots ();
4341 pop_temp_slots ();
4342 return want_value ? to_rtx : NULL_RTX;
4343 }
4344
4345 /* In case we are returning the contents of an object which overlaps
4346 the place the value is being stored, use a safe function when copying
4347 a value through a pointer into a structure value return block. */
4348 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4349 && current_function_returns_struct
4350 && !current_function_returns_pcc_struct)
4351 {
4352 rtx from_rtx, size;
4353
4354 push_temp_slots ();
4355 size = expr_size (from);
4356 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4357
4358 if (TARGET_MEM_FUNCTIONS)
4359 emit_library_call (memmove_libfunc, LCT_NORMAL,
4360 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4361 XEXP (from_rtx, 0), Pmode,
4362 convert_to_mode (TYPE_MODE (sizetype),
4363 size, TREE_UNSIGNED (sizetype)),
4364 TYPE_MODE (sizetype));
4365 else
4366 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4367 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4368 XEXP (to_rtx, 0), Pmode,
4369 convert_to_mode (TYPE_MODE (integer_type_node),
4370 size,
4371 TREE_UNSIGNED (integer_type_node)),
4372 TYPE_MODE (integer_type_node));
4373
4374 preserve_temp_slots (to_rtx);
4375 free_temp_slots ();
4376 pop_temp_slots ();
4377 return want_value ? to_rtx : NULL_RTX;
4378 }
4379
4380 /* Compute FROM and store the value in the rtx we got. */
4381
4382 push_temp_slots ();
4383 result = store_expr (from, to_rtx, want_value);
4384 preserve_temp_slots (result);
4385 free_temp_slots ();
4386 pop_temp_slots ();
4387 return want_value ? result : NULL_RTX;
4388 }
4389
4390 /* Generate code for computing expression EXP,
4391 and storing the value into TARGET.
4392 TARGET may contain a QUEUED rtx.
4393
4394 If WANT_VALUE & 1 is nonzero, return a copy of the value
4395 not in TARGET, so that we can be sure to use the proper
4396 value in a containing expression even if TARGET has something
4397 else stored in it. If possible, we copy the value through a pseudo
4398 and return that pseudo. Or, if the value is constant, we try to
4399 return the constant. In some cases, we return a pseudo
4400 copied *from* TARGET.
4401
4402 If the mode is BLKmode then we may return TARGET itself.
4403 It turns out that in BLKmode it doesn't cause a problem.
4404 because C has no operators that could combine two different
4405 assignments into the same BLKmode object with different values
4406 with no sequence point. Will other languages need this to
4407 be more thorough?
4408
4409 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4410 to catch quickly any cases where the caller uses the value
4411 and fails to set WANT_VALUE.
4412
4413 If WANT_VALUE & 2 is set, this is a store into a call param on the
4414 stack, and block moves may need to be treated specially. */
4415
4416 rtx
4417 store_expr (tree exp, rtx target, int want_value)
4418 {
4419 rtx temp;
4420 int dont_return_target = 0;
4421 int dont_store_target = 0;
4422
4423 if (VOID_TYPE_P (TREE_TYPE (exp)))
4424 {
4425 /* C++ can generate ?: expressions with a throw expression in one
4426 branch and an rvalue in the other. Here, we resolve attempts to
4427 store the throw expression's nonexistent result. */
4428 if (want_value)
4429 abort ();
4430 expand_expr (exp, const0_rtx, VOIDmode, 0);
4431 return NULL_RTX;
4432 }
4433 if (TREE_CODE (exp) == COMPOUND_EXPR)
4434 {
4435 /* Perform first part of compound expression, then assign from second
4436 part. */
4437 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4438 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4439 emit_queue ();
4440 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4441 }
4442 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4443 {
4444 /* For conditional expression, get safe form of the target. Then
4445 test the condition, doing the appropriate assignment on either
4446 side. This avoids the creation of unnecessary temporaries.
4447 For non-BLKmode, it is more efficient not to do this. */
4448
4449 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4450
4451 emit_queue ();
4452 target = protect_from_queue (target, 1);
4453
4454 do_pending_stack_adjust ();
4455 NO_DEFER_POP;
4456 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4457 start_cleanup_deferral ();
4458 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4459 end_cleanup_deferral ();
4460 emit_queue ();
4461 emit_jump_insn (gen_jump (lab2));
4462 emit_barrier ();
4463 emit_label (lab1);
4464 start_cleanup_deferral ();
4465 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4466 end_cleanup_deferral ();
4467 emit_queue ();
4468 emit_label (lab2);
4469 OK_DEFER_POP;
4470
4471 return want_value & 1 ? target : NULL_RTX;
4472 }
4473 else if (queued_subexp_p (target))
4474 /* If target contains a postincrement, let's not risk
4475 using it as the place to generate the rhs. */
4476 {
4477 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4478 {
4479 /* Expand EXP into a new pseudo. */
4480 temp = gen_reg_rtx (GET_MODE (target));
4481 temp = expand_expr (exp, temp, GET_MODE (target),
4482 (want_value & 2
4483 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4484 }
4485 else
4486 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4487 (want_value & 2
4488 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4489
4490 /* If target is volatile, ANSI requires accessing the value
4491 *from* the target, if it is accessed. So make that happen.
4492 In no case return the target itself. */
4493 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4494 dont_return_target = 1;
4495 }
4496 else if ((want_value & 1) != 0
4497 && GET_CODE (target) == MEM
4498 && ! MEM_VOLATILE_P (target)
4499 && GET_MODE (target) != BLKmode)
4500 /* If target is in memory and caller wants value in a register instead,
4501 arrange that. Pass TARGET as target for expand_expr so that,
4502 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4503 We know expand_expr will not use the target in that case.
4504 Don't do this if TARGET is volatile because we are supposed
4505 to write it and then read it. */
4506 {
4507 temp = expand_expr (exp, target, GET_MODE (target),
4508 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4509 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4510 {
4511 /* If TEMP is already in the desired TARGET, only copy it from
4512 memory and don't store it there again. */
4513 if (temp == target
4514 || (rtx_equal_p (temp, target)
4515 && ! side_effects_p (temp) && ! side_effects_p (target)))
4516 dont_store_target = 1;
4517 temp = copy_to_reg (temp);
4518 }
4519 dont_return_target = 1;
4520 }
4521 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4522 /* If this is a scalar in a register that is stored in a wider mode
4523 than the declared mode, compute the result into its declared mode
4524 and then convert to the wider mode. Our value is the computed
4525 expression. */
4526 {
4527 rtx inner_target = 0;
4528
4529 /* If we don't want a value, we can do the conversion inside EXP,
4530 which will often result in some optimizations. Do the conversion
4531 in two steps: first change the signedness, if needed, then
4532 the extend. But don't do this if the type of EXP is a subtype
4533 of something else since then the conversion might involve
4534 more than just converting modes. */
4535 if ((want_value & 1) == 0
4536 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4537 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4538 {
4539 if (TREE_UNSIGNED (TREE_TYPE (exp))
4540 != SUBREG_PROMOTED_UNSIGNED_P (target))
4541 exp = convert
4542 ((*lang_hooks.types.signed_or_unsigned_type)
4543 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4544
4545 exp = convert ((*lang_hooks.types.type_for_mode)
4546 (GET_MODE (SUBREG_REG (target)),
4547 SUBREG_PROMOTED_UNSIGNED_P (target)),
4548 exp);
4549
4550 inner_target = SUBREG_REG (target);
4551 }
4552
4553 temp = expand_expr (exp, inner_target, VOIDmode,
4554 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4555
4556 /* If TEMP is a MEM and we want a result value, make the access
4557 now so it gets done only once. Strictly speaking, this is
4558 only necessary if the MEM is volatile, or if the address
4559 overlaps TARGET. But not performing the load twice also
4560 reduces the amount of rtl we generate and then have to CSE. */
4561 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4562 temp = copy_to_reg (temp);
4563
4564 /* If TEMP is a VOIDmode constant, use convert_modes to make
4565 sure that we properly convert it. */
4566 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4567 {
4568 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4569 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4570 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4571 GET_MODE (target), temp,
4572 SUBREG_PROMOTED_UNSIGNED_P (target));
4573 }
4574
4575 convert_move (SUBREG_REG (target), temp,
4576 SUBREG_PROMOTED_UNSIGNED_P (target));
4577
4578 /* If we promoted a constant, change the mode back down to match
4579 target. Otherwise, the caller might get confused by a result whose
4580 mode is larger than expected. */
4581
4582 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4583 {
4584 if (GET_MODE (temp) != VOIDmode)
4585 {
4586 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4587 SUBREG_PROMOTED_VAR_P (temp) = 1;
4588 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4589 SUBREG_PROMOTED_UNSIGNED_P (target));
4590 }
4591 else
4592 temp = convert_modes (GET_MODE (target),
4593 GET_MODE (SUBREG_REG (target)),
4594 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4595 }
4596
4597 return want_value & 1 ? temp : NULL_RTX;
4598 }
4599 else
4600 {
4601 temp = expand_expr (exp, target, GET_MODE (target),
4602 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4603 /* Return TARGET if it's a specified hardware register.
4604 If TARGET is a volatile mem ref, either return TARGET
4605 or return a reg copied *from* TARGET; ANSI requires this.
4606
4607 Otherwise, if TEMP is not TARGET, return TEMP
4608 if it is constant (for efficiency),
4609 or if we really want the correct value. */
4610 if (!(target && GET_CODE (target) == REG
4611 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4612 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4613 && ! rtx_equal_p (temp, target)
4614 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4615 dont_return_target = 1;
4616 }
4617
4618 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4619 the same as that of TARGET, adjust the constant. This is needed, for
4620 example, in case it is a CONST_DOUBLE and we want only a word-sized
4621 value. */
4622 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4623 && TREE_CODE (exp) != ERROR_MARK
4624 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4625 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4626 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4627
4628 /* If value was not generated in the target, store it there.
4629 Convert the value to TARGET's type first if necessary.
4630 If TEMP and TARGET compare equal according to rtx_equal_p, but
4631 one or both of them are volatile memory refs, we have to distinguish
4632 two cases:
4633 - expand_expr has used TARGET. In this case, we must not generate
4634 another copy. This can be detected by TARGET being equal according
4635 to == .
4636 - expand_expr has not used TARGET - that means that the source just
4637 happens to have the same RTX form. Since temp will have been created
4638 by expand_expr, it will compare unequal according to == .
4639 We must generate a copy in this case, to reach the correct number
4640 of volatile memory references. */
4641
4642 if ((! rtx_equal_p (temp, target)
4643 || (temp != target && (side_effects_p (temp)
4644 || side_effects_p (target))))
4645 && TREE_CODE (exp) != ERROR_MARK
4646 && ! dont_store_target
4647 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4648 but TARGET is not valid memory reference, TEMP will differ
4649 from TARGET although it is really the same location. */
4650 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4651 || target != DECL_RTL_IF_SET (exp))
4652 /* If there's nothing to copy, don't bother. Don't call expr_size
4653 unless necessary, because some front-ends (C++) expr_size-hook
4654 aborts on objects that are not supposed to be bit-copied or
4655 bit-initialized. */
4656 && expr_size (exp) != const0_rtx)
4657 {
4658 target = protect_from_queue (target, 1);
4659 if (GET_MODE (temp) != GET_MODE (target)
4660 && GET_MODE (temp) != VOIDmode)
4661 {
4662 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4663 if (dont_return_target)
4664 {
4665 /* In this case, we will return TEMP,
4666 so make sure it has the proper mode.
4667 But don't forget to store the value into TARGET. */
4668 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4669 emit_move_insn (target, temp);
4670 }
4671 else
4672 convert_move (target, temp, unsignedp);
4673 }
4674
4675 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4676 {
4677 /* Handle copying a string constant into an array. The string
4678 constant may be shorter than the array. So copy just the string's
4679 actual length, and clear the rest. First get the size of the data
4680 type of the string, which is actually the size of the target. */
4681 rtx size = expr_size (exp);
4682
4683 if (GET_CODE (size) == CONST_INT
4684 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4685 emit_block_move (target, temp, size,
4686 (want_value & 2
4687 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4688 else
4689 {
4690 /* Compute the size of the data to copy from the string. */
4691 tree copy_size
4692 = size_binop (MIN_EXPR,
4693 make_tree (sizetype, size),
4694 size_int (TREE_STRING_LENGTH (exp)));
4695 rtx copy_size_rtx
4696 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4697 (want_value & 2
4698 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4699 rtx label = 0;
4700
4701 /* Copy that much. */
4702 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4703 TREE_UNSIGNED (sizetype));
4704 emit_block_move (target, temp, copy_size_rtx,
4705 (want_value & 2
4706 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4707
4708 /* Figure out how much is left in TARGET that we have to clear.
4709 Do all calculations in ptr_mode. */
4710 if (GET_CODE (copy_size_rtx) == CONST_INT)
4711 {
4712 size = plus_constant (size, -INTVAL (copy_size_rtx));
4713 target = adjust_address (target, BLKmode,
4714 INTVAL (copy_size_rtx));
4715 }
4716 else
4717 {
4718 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4719 copy_size_rtx, NULL_RTX, 0,
4720 OPTAB_LIB_WIDEN);
4721
4722 #ifdef POINTERS_EXTEND_UNSIGNED
4723 if (GET_MODE (copy_size_rtx) != Pmode)
4724 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4725 TREE_UNSIGNED (sizetype));
4726 #endif
4727
4728 target = offset_address (target, copy_size_rtx,
4729 highest_pow2_factor (copy_size));
4730 label = gen_label_rtx ();
4731 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4732 GET_MODE (size), 0, label);
4733 }
4734
4735 if (size != const0_rtx)
4736 clear_storage (target, size);
4737
4738 if (label)
4739 emit_label (label);
4740 }
4741 }
4742 /* Handle calls that return values in multiple non-contiguous locations.
4743 The Irix 6 ABI has examples of this. */
4744 else if (GET_CODE (target) == PARALLEL)
4745 emit_group_load (target, temp, TREE_TYPE (exp),
4746 int_size_in_bytes (TREE_TYPE (exp)));
4747 else if (GET_MODE (temp) == BLKmode)
4748 emit_block_move (target, temp, expr_size (exp),
4749 (want_value & 2
4750 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4751 else
4752 emit_move_insn (target, temp);
4753 }
4754
4755 /* If we don't want a value, return NULL_RTX. */
4756 if ((want_value & 1) == 0)
4757 return NULL_RTX;
4758
4759 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4760 ??? The latter test doesn't seem to make sense. */
4761 else if (dont_return_target && GET_CODE (temp) != MEM)
4762 return temp;
4763
4764 /* Return TARGET itself if it is a hard register. */
4765 else if ((want_value & 1) != 0
4766 && GET_MODE (target) != BLKmode
4767 && ! (GET_CODE (target) == REG
4768 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4769 return copy_to_reg (target);
4770
4771 else
4772 return target;
4773 }
4774 \f
4775 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4776
4777 static int
4778 is_zeros_p (tree exp)
4779 {
4780 tree elt;
4781
4782 switch (TREE_CODE (exp))
4783 {
4784 case CONVERT_EXPR:
4785 case NOP_EXPR:
4786 case NON_LVALUE_EXPR:
4787 case VIEW_CONVERT_EXPR:
4788 return is_zeros_p (TREE_OPERAND (exp, 0));
4789
4790 case INTEGER_CST:
4791 return integer_zerop (exp);
4792
4793 case COMPLEX_CST:
4794 return
4795 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4796
4797 case REAL_CST:
4798 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4799
4800 case VECTOR_CST:
4801 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4802 elt = TREE_CHAIN (elt))
4803 if (!is_zeros_p (TREE_VALUE (elt)))
4804 return 0;
4805
4806 return 1;
4807
4808 case CONSTRUCTOR:
4809 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4810 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4811 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4812 if (! is_zeros_p (TREE_VALUE (elt)))
4813 return 0;
4814
4815 return 1;
4816
4817 default:
4818 return 0;
4819 }
4820 }
4821
4822 /* Return 1 if EXP contains mostly (3/4) zeros. */
4823
4824 int
4825 mostly_zeros_p (tree exp)
4826 {
4827 if (TREE_CODE (exp) == CONSTRUCTOR)
4828 {
4829 int elts = 0, zeros = 0;
4830 tree elt = CONSTRUCTOR_ELTS (exp);
4831 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4832 {
4833 /* If there are no ranges of true bits, it is all zero. */
4834 return elt == NULL_TREE;
4835 }
4836 for (; elt; elt = TREE_CHAIN (elt))
4837 {
4838 /* We do not handle the case where the index is a RANGE_EXPR,
4839 so the statistic will be somewhat inaccurate.
4840 We do make a more accurate count in store_constructor itself,
4841 so since this function is only used for nested array elements,
4842 this should be close enough. */
4843 if (mostly_zeros_p (TREE_VALUE (elt)))
4844 zeros++;
4845 elts++;
4846 }
4847
4848 return 4 * zeros >= 3 * elts;
4849 }
4850
4851 return is_zeros_p (exp);
4852 }
4853 \f
4854 /* Helper function for store_constructor.
4855 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4856 TYPE is the type of the CONSTRUCTOR, not the element type.
4857 CLEARED is as for store_constructor.
4858 ALIAS_SET is the alias set to use for any stores.
4859
4860 This provides a recursive shortcut back to store_constructor when it isn't
4861 necessary to go through store_field. This is so that we can pass through
4862 the cleared field to let store_constructor know that we may not have to
4863 clear a substructure if the outer structure has already been cleared. */
4864
4865 static void
4866 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4867 HOST_WIDE_INT bitpos, enum machine_mode mode,
4868 tree exp, tree type, int cleared, int alias_set)
4869 {
4870 if (TREE_CODE (exp) == CONSTRUCTOR
4871 && bitpos % BITS_PER_UNIT == 0
4872 /* If we have a nonzero bitpos for a register target, then we just
4873 let store_field do the bitfield handling. This is unlikely to
4874 generate unnecessary clear instructions anyways. */
4875 && (bitpos == 0 || GET_CODE (target) == MEM))
4876 {
4877 if (GET_CODE (target) == MEM)
4878 target
4879 = adjust_address (target,
4880 GET_MODE (target) == BLKmode
4881 || 0 != (bitpos
4882 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4883 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4884
4885
4886 /* Update the alias set, if required. */
4887 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4888 && MEM_ALIAS_SET (target) != 0)
4889 {
4890 target = copy_rtx (target);
4891 set_mem_alias_set (target, alias_set);
4892 }
4893
4894 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4895 }
4896 else
4897 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4898 alias_set);
4899 }
4900
4901 /* Store the value of constructor EXP into the rtx TARGET.
4902 TARGET is either a REG or a MEM; we know it cannot conflict, since
4903 safe_from_p has been called.
4904 CLEARED is true if TARGET is known to have been zero'd.
4905 SIZE is the number of bytes of TARGET we are allowed to modify: this
4906 may not be the same as the size of EXP if we are assigning to a field
4907 which has been packed to exclude padding bits. */
4908
4909 static void
4910 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4911 {
4912 tree type = TREE_TYPE (exp);
4913 #ifdef WORD_REGISTER_OPERATIONS
4914 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4915 #endif
4916
4917 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4918 || TREE_CODE (type) == QUAL_UNION_TYPE)
4919 {
4920 tree elt;
4921
4922 /* If size is zero or the target is already cleared, do nothing. */
4923 if (size == 0 || cleared)
4924 cleared = 1;
4925 /* We either clear the aggregate or indicate the value is dead. */
4926 else if ((TREE_CODE (type) == UNION_TYPE
4927 || TREE_CODE (type) == QUAL_UNION_TYPE)
4928 && ! CONSTRUCTOR_ELTS (exp))
4929 /* If the constructor is empty, clear the union. */
4930 {
4931 clear_storage (target, expr_size (exp));
4932 cleared = 1;
4933 }
4934
4935 /* If we are building a static constructor into a register,
4936 set the initial value as zero so we can fold the value into
4937 a constant. But if more than one register is involved,
4938 this probably loses. */
4939 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4940 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4941 {
4942 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4943 cleared = 1;
4944 }
4945
4946 /* If the constructor has fewer fields than the structure
4947 or if we are initializing the structure to mostly zeros,
4948 clear the whole structure first. Don't do this if TARGET is a
4949 register whose mode size isn't equal to SIZE since clear_storage
4950 can't handle this case. */
4951 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4952 || mostly_zeros_p (exp))
4953 && (GET_CODE (target) != REG
4954 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4955 == size)))
4956 {
4957 rtx xtarget = target;
4958
4959 if (readonly_fields_p (type))
4960 {
4961 xtarget = copy_rtx (xtarget);
4962 RTX_UNCHANGING_P (xtarget) = 1;
4963 }
4964
4965 clear_storage (xtarget, GEN_INT (size));
4966 cleared = 1;
4967 }
4968
4969 if (! cleared)
4970 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4971
4972 /* Store each element of the constructor into
4973 the corresponding field of TARGET. */
4974
4975 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4976 {
4977 tree field = TREE_PURPOSE (elt);
4978 tree value = TREE_VALUE (elt);
4979 enum machine_mode mode;
4980 HOST_WIDE_INT bitsize;
4981 HOST_WIDE_INT bitpos = 0;
4982 tree offset;
4983 rtx to_rtx = target;
4984
4985 /* Just ignore missing fields.
4986 We cleared the whole structure, above,
4987 if any fields are missing. */
4988 if (field == 0)
4989 continue;
4990
4991 if (cleared && is_zeros_p (value))
4992 continue;
4993
4994 if (host_integerp (DECL_SIZE (field), 1))
4995 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4996 else
4997 bitsize = -1;
4998
4999 mode = DECL_MODE (field);
5000 if (DECL_BIT_FIELD (field))
5001 mode = VOIDmode;
5002
5003 offset = DECL_FIELD_OFFSET (field);
5004 if (host_integerp (offset, 0)
5005 && host_integerp (bit_position (field), 0))
5006 {
5007 bitpos = int_bit_position (field);
5008 offset = 0;
5009 }
5010 else
5011 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5012
5013 if (offset)
5014 {
5015 rtx offset_rtx;
5016
5017 if (CONTAINS_PLACEHOLDER_P (offset))
5018 offset = build (WITH_RECORD_EXPR, sizetype,
5019 offset, make_tree (TREE_TYPE (exp), target));
5020
5021 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5022 if (GET_CODE (to_rtx) != MEM)
5023 abort ();
5024
5025 #ifdef POINTERS_EXTEND_UNSIGNED
5026 if (GET_MODE (offset_rtx) != Pmode)
5027 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5028 #else
5029 if (GET_MODE (offset_rtx) != ptr_mode)
5030 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5031 #endif
5032
5033 to_rtx = offset_address (to_rtx, offset_rtx,
5034 highest_pow2_factor (offset));
5035 }
5036
5037 if (TREE_READONLY (field))
5038 {
5039 if (GET_CODE (to_rtx) == MEM)
5040 to_rtx = copy_rtx (to_rtx);
5041
5042 RTX_UNCHANGING_P (to_rtx) = 1;
5043 }
5044
5045 #ifdef WORD_REGISTER_OPERATIONS
5046 /* If this initializes a field that is smaller than a word, at the
5047 start of a word, try to widen it to a full word.
5048 This special case allows us to output C++ member function
5049 initializations in a form that the optimizers can understand. */
5050 if (GET_CODE (target) == REG
5051 && bitsize < BITS_PER_WORD
5052 && bitpos % BITS_PER_WORD == 0
5053 && GET_MODE_CLASS (mode) == MODE_INT
5054 && TREE_CODE (value) == INTEGER_CST
5055 && exp_size >= 0
5056 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5057 {
5058 tree type = TREE_TYPE (value);
5059
5060 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5061 {
5062 type = (*lang_hooks.types.type_for_size)
5063 (BITS_PER_WORD, TREE_UNSIGNED (type));
5064 value = convert (type, value);
5065 }
5066
5067 if (BYTES_BIG_ENDIAN)
5068 value
5069 = fold (build (LSHIFT_EXPR, type, value,
5070 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5071 bitsize = BITS_PER_WORD;
5072 mode = word_mode;
5073 }
5074 #endif
5075
5076 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5077 && DECL_NONADDRESSABLE_P (field))
5078 {
5079 to_rtx = copy_rtx (to_rtx);
5080 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5081 }
5082
5083 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5084 value, type, cleared,
5085 get_alias_set (TREE_TYPE (field)));
5086 }
5087 }
5088 else if (TREE_CODE (type) == ARRAY_TYPE
5089 || TREE_CODE (type) == VECTOR_TYPE)
5090 {
5091 tree elt;
5092 int i;
5093 int need_to_clear;
5094 tree domain = TYPE_DOMAIN (type);
5095 tree elttype = TREE_TYPE (type);
5096 int const_bounds_p;
5097 HOST_WIDE_INT minelt = 0;
5098 HOST_WIDE_INT maxelt = 0;
5099
5100 /* Vectors are like arrays, but the domain is stored via an array
5101 type indirectly. */
5102 if (TREE_CODE (type) == VECTOR_TYPE)
5103 {
5104 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5105 the same field as TYPE_DOMAIN, we are not guaranteed that
5106 it always will. */
5107 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5108 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5109 }
5110
5111 const_bounds_p = (TYPE_MIN_VALUE (domain)
5112 && TYPE_MAX_VALUE (domain)
5113 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5114 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5115
5116 /* If we have constant bounds for the range of the type, get them. */
5117 if (const_bounds_p)
5118 {
5119 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5120 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5121 }
5122
5123 /* If the constructor has fewer elements than the array,
5124 clear the whole array first. Similarly if this is
5125 static constructor of a non-BLKmode object. */
5126 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5127 need_to_clear = 1;
5128 else
5129 {
5130 HOST_WIDE_INT count = 0, zero_count = 0;
5131 need_to_clear = ! const_bounds_p;
5132
5133 /* This loop is a more accurate version of the loop in
5134 mostly_zeros_p (it handles RANGE_EXPR in an index).
5135 It is also needed to check for missing elements. */
5136 for (elt = CONSTRUCTOR_ELTS (exp);
5137 elt != NULL_TREE && ! need_to_clear;
5138 elt = TREE_CHAIN (elt))
5139 {
5140 tree index = TREE_PURPOSE (elt);
5141 HOST_WIDE_INT this_node_count;
5142
5143 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5144 {
5145 tree lo_index = TREE_OPERAND (index, 0);
5146 tree hi_index = TREE_OPERAND (index, 1);
5147
5148 if (! host_integerp (lo_index, 1)
5149 || ! host_integerp (hi_index, 1))
5150 {
5151 need_to_clear = 1;
5152 break;
5153 }
5154
5155 this_node_count = (tree_low_cst (hi_index, 1)
5156 - tree_low_cst (lo_index, 1) + 1);
5157 }
5158 else
5159 this_node_count = 1;
5160
5161 count += this_node_count;
5162 if (mostly_zeros_p (TREE_VALUE (elt)))
5163 zero_count += this_node_count;
5164 }
5165
5166 /* Clear the entire array first if there are any missing elements,
5167 or if the incidence of zero elements is >= 75%. */
5168 if (! need_to_clear
5169 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5170 need_to_clear = 1;
5171 }
5172
5173 if (need_to_clear && size > 0)
5174 {
5175 if (! cleared)
5176 {
5177 if (REG_P (target))
5178 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5179 else
5180 clear_storage (target, GEN_INT (size));
5181 }
5182 cleared = 1;
5183 }
5184 else if (REG_P (target))
5185 /* Inform later passes that the old value is dead. */
5186 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5187
5188 /* Store each element of the constructor into
5189 the corresponding element of TARGET, determined
5190 by counting the elements. */
5191 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5192 elt;
5193 elt = TREE_CHAIN (elt), i++)
5194 {
5195 enum machine_mode mode;
5196 HOST_WIDE_INT bitsize;
5197 HOST_WIDE_INT bitpos;
5198 int unsignedp;
5199 tree value = TREE_VALUE (elt);
5200 tree index = TREE_PURPOSE (elt);
5201 rtx xtarget = target;
5202
5203 if (cleared && is_zeros_p (value))
5204 continue;
5205
5206 unsignedp = TREE_UNSIGNED (elttype);
5207 mode = TYPE_MODE (elttype);
5208 if (mode == BLKmode)
5209 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5210 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5211 : -1);
5212 else
5213 bitsize = GET_MODE_BITSIZE (mode);
5214
5215 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5216 {
5217 tree lo_index = TREE_OPERAND (index, 0);
5218 tree hi_index = TREE_OPERAND (index, 1);
5219 rtx index_r, pos_rtx, loop_end;
5220 struct nesting *loop;
5221 HOST_WIDE_INT lo, hi, count;
5222 tree position;
5223
5224 /* If the range is constant and "small", unroll the loop. */
5225 if (const_bounds_p
5226 && host_integerp (lo_index, 0)
5227 && host_integerp (hi_index, 0)
5228 && (lo = tree_low_cst (lo_index, 0),
5229 hi = tree_low_cst (hi_index, 0),
5230 count = hi - lo + 1,
5231 (GET_CODE (target) != MEM
5232 || count <= 2
5233 || (host_integerp (TYPE_SIZE (elttype), 1)
5234 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5235 <= 40 * 8)))))
5236 {
5237 lo -= minelt; hi -= minelt;
5238 for (; lo <= hi; lo++)
5239 {
5240 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5241
5242 if (GET_CODE (target) == MEM
5243 && !MEM_KEEP_ALIAS_SET_P (target)
5244 && TREE_CODE (type) == ARRAY_TYPE
5245 && TYPE_NONALIASED_COMPONENT (type))
5246 {
5247 target = copy_rtx (target);
5248 MEM_KEEP_ALIAS_SET_P (target) = 1;
5249 }
5250
5251 store_constructor_field
5252 (target, bitsize, bitpos, mode, value, type, cleared,
5253 get_alias_set (elttype));
5254 }
5255 }
5256 else
5257 {
5258 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5259 loop_end = gen_label_rtx ();
5260
5261 unsignedp = TREE_UNSIGNED (domain);
5262
5263 index = build_decl (VAR_DECL, NULL_TREE, domain);
5264
5265 index_r
5266 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5267 &unsignedp, 0));
5268 SET_DECL_RTL (index, index_r);
5269 if (TREE_CODE (value) == SAVE_EXPR
5270 && SAVE_EXPR_RTL (value) == 0)
5271 {
5272 /* Make sure value gets expanded once before the
5273 loop. */
5274 expand_expr (value, const0_rtx, VOIDmode, 0);
5275 emit_queue ();
5276 }
5277 store_expr (lo_index, index_r, 0);
5278 loop = expand_start_loop (0);
5279
5280 /* Assign value to element index. */
5281 position
5282 = convert (ssizetype,
5283 fold (build (MINUS_EXPR, TREE_TYPE (index),
5284 index, TYPE_MIN_VALUE (domain))));
5285 position = size_binop (MULT_EXPR, position,
5286 convert (ssizetype,
5287 TYPE_SIZE_UNIT (elttype)));
5288
5289 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5290 xtarget = offset_address (target, pos_rtx,
5291 highest_pow2_factor (position));
5292 xtarget = adjust_address (xtarget, mode, 0);
5293 if (TREE_CODE (value) == CONSTRUCTOR)
5294 store_constructor (value, xtarget, cleared,
5295 bitsize / BITS_PER_UNIT);
5296 else
5297 store_expr (value, xtarget, 0);
5298
5299 expand_exit_loop_if_false (loop,
5300 build (LT_EXPR, integer_type_node,
5301 index, hi_index));
5302
5303 expand_increment (build (PREINCREMENT_EXPR,
5304 TREE_TYPE (index),
5305 index, integer_one_node), 0, 0);
5306 expand_end_loop ();
5307 emit_label (loop_end);
5308 }
5309 }
5310 else if ((index != 0 && ! host_integerp (index, 0))
5311 || ! host_integerp (TYPE_SIZE (elttype), 1))
5312 {
5313 tree position;
5314
5315 if (index == 0)
5316 index = ssize_int (1);
5317
5318 if (minelt)
5319 index = convert (ssizetype,
5320 fold (build (MINUS_EXPR, index,
5321 TYPE_MIN_VALUE (domain))));
5322
5323 position = size_binop (MULT_EXPR, index,
5324 convert (ssizetype,
5325 TYPE_SIZE_UNIT (elttype)));
5326 xtarget = offset_address (target,
5327 expand_expr (position, 0, VOIDmode, 0),
5328 highest_pow2_factor (position));
5329 xtarget = adjust_address (xtarget, mode, 0);
5330 store_expr (value, xtarget, 0);
5331 }
5332 else
5333 {
5334 if (index != 0)
5335 bitpos = ((tree_low_cst (index, 0) - minelt)
5336 * tree_low_cst (TYPE_SIZE (elttype), 1));
5337 else
5338 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5339
5340 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5341 && TREE_CODE (type) == ARRAY_TYPE
5342 && TYPE_NONALIASED_COMPONENT (type))
5343 {
5344 target = copy_rtx (target);
5345 MEM_KEEP_ALIAS_SET_P (target) = 1;
5346 }
5347
5348 store_constructor_field (target, bitsize, bitpos, mode, value,
5349 type, cleared, get_alias_set (elttype));
5350
5351 }
5352 }
5353 }
5354
5355 /* Set constructor assignments. */
5356 else if (TREE_CODE (type) == SET_TYPE)
5357 {
5358 tree elt = CONSTRUCTOR_ELTS (exp);
5359 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5360 tree domain = TYPE_DOMAIN (type);
5361 tree domain_min, domain_max, bitlength;
5362
5363 /* The default implementation strategy is to extract the constant
5364 parts of the constructor, use that to initialize the target,
5365 and then "or" in whatever non-constant ranges we need in addition.
5366
5367 If a large set is all zero or all ones, it is
5368 probably better to set it using memset (if available) or bzero.
5369 Also, if a large set has just a single range, it may also be
5370 better to first clear all the first clear the set (using
5371 bzero/memset), and set the bits we want. */
5372
5373 /* Check for all zeros. */
5374 if (elt == NULL_TREE && size > 0)
5375 {
5376 if (!cleared)
5377 clear_storage (target, GEN_INT (size));
5378 return;
5379 }
5380
5381 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5382 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5383 bitlength = size_binop (PLUS_EXPR,
5384 size_diffop (domain_max, domain_min),
5385 ssize_int (1));
5386
5387 nbits = tree_low_cst (bitlength, 1);
5388
5389 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5390 are "complicated" (more than one range), initialize (the
5391 constant parts) by copying from a constant. */
5392 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5393 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5394 {
5395 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5396 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5397 char *bit_buffer = alloca (nbits);
5398 HOST_WIDE_INT word = 0;
5399 unsigned int bit_pos = 0;
5400 unsigned int ibit = 0;
5401 unsigned int offset = 0; /* In bytes from beginning of set. */
5402
5403 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5404 for (;;)
5405 {
5406 if (bit_buffer[ibit])
5407 {
5408 if (BYTES_BIG_ENDIAN)
5409 word |= (1 << (set_word_size - 1 - bit_pos));
5410 else
5411 word |= 1 << bit_pos;
5412 }
5413
5414 bit_pos++; ibit++;
5415 if (bit_pos >= set_word_size || ibit == nbits)
5416 {
5417 if (word != 0 || ! cleared)
5418 {
5419 rtx datum = GEN_INT (word);
5420 rtx to_rtx;
5421
5422 /* The assumption here is that it is safe to use
5423 XEXP if the set is multi-word, but not if
5424 it's single-word. */
5425 if (GET_CODE (target) == MEM)
5426 to_rtx = adjust_address (target, mode, offset);
5427 else if (offset == 0)
5428 to_rtx = target;
5429 else
5430 abort ();
5431 emit_move_insn (to_rtx, datum);
5432 }
5433
5434 if (ibit == nbits)
5435 break;
5436 word = 0;
5437 bit_pos = 0;
5438 offset += set_word_size / BITS_PER_UNIT;
5439 }
5440 }
5441 }
5442 else if (!cleared)
5443 /* Don't bother clearing storage if the set is all ones. */
5444 if (TREE_CHAIN (elt) != NULL_TREE
5445 || (TREE_PURPOSE (elt) == NULL_TREE
5446 ? nbits != 1
5447 : ( ! host_integerp (TREE_VALUE (elt), 0)
5448 || ! host_integerp (TREE_PURPOSE (elt), 0)
5449 || (tree_low_cst (TREE_VALUE (elt), 0)
5450 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5451 != (HOST_WIDE_INT) nbits))))
5452 clear_storage (target, expr_size (exp));
5453
5454 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5455 {
5456 /* Start of range of element or NULL. */
5457 tree startbit = TREE_PURPOSE (elt);
5458 /* End of range of element, or element value. */
5459 tree endbit = TREE_VALUE (elt);
5460 HOST_WIDE_INT startb, endb;
5461 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5462
5463 bitlength_rtx = expand_expr (bitlength,
5464 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5465
5466 /* Handle non-range tuple element like [ expr ]. */
5467 if (startbit == NULL_TREE)
5468 {
5469 startbit = save_expr (endbit);
5470 endbit = startbit;
5471 }
5472
5473 startbit = convert (sizetype, startbit);
5474 endbit = convert (sizetype, endbit);
5475 if (! integer_zerop (domain_min))
5476 {
5477 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5478 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5479 }
5480 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5481 EXPAND_CONST_ADDRESS);
5482 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5483 EXPAND_CONST_ADDRESS);
5484
5485 if (REG_P (target))
5486 {
5487 targetx
5488 = assign_temp
5489 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5490 (GET_MODE (target), 0),
5491 TYPE_QUAL_CONST)),
5492 0, 1, 1);
5493 emit_move_insn (targetx, target);
5494 }
5495
5496 else if (GET_CODE (target) == MEM)
5497 targetx = target;
5498 else
5499 abort ();
5500
5501 /* Optimization: If startbit and endbit are constants divisible
5502 by BITS_PER_UNIT, call memset instead. */
5503 if (TARGET_MEM_FUNCTIONS
5504 && TREE_CODE (startbit) == INTEGER_CST
5505 && TREE_CODE (endbit) == INTEGER_CST
5506 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5507 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5508 {
5509 emit_library_call (memset_libfunc, LCT_NORMAL,
5510 VOIDmode, 3,
5511 plus_constant (XEXP (targetx, 0),
5512 startb / BITS_PER_UNIT),
5513 Pmode,
5514 constm1_rtx, TYPE_MODE (integer_type_node),
5515 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5516 TYPE_MODE (sizetype));
5517 }
5518 else
5519 emit_library_call (setbits_libfunc, LCT_NORMAL,
5520 VOIDmode, 4, XEXP (targetx, 0),
5521 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5522 startbit_rtx, TYPE_MODE (sizetype),
5523 endbit_rtx, TYPE_MODE (sizetype));
5524
5525 if (REG_P (target))
5526 emit_move_insn (target, targetx);
5527 }
5528 }
5529
5530 else
5531 abort ();
5532 }
5533
5534 /* Store the value of EXP (an expression tree)
5535 into a subfield of TARGET which has mode MODE and occupies
5536 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5537 If MODE is VOIDmode, it means that we are storing into a bit-field.
5538
5539 If VALUE_MODE is VOIDmode, return nothing in particular.
5540 UNSIGNEDP is not used in this case.
5541
5542 Otherwise, return an rtx for the value stored. This rtx
5543 has mode VALUE_MODE if that is convenient to do.
5544 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5545
5546 TYPE is the type of the underlying object,
5547
5548 ALIAS_SET is the alias set for the destination. This value will
5549 (in general) be different from that for TARGET, since TARGET is a
5550 reference to the containing structure. */
5551
5552 static rtx
5553 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5554 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5555 int unsignedp, tree type, int alias_set)
5556 {
5557 HOST_WIDE_INT width_mask = 0;
5558
5559 if (TREE_CODE (exp) == ERROR_MARK)
5560 return const0_rtx;
5561
5562 /* If we have nothing to store, do nothing unless the expression has
5563 side-effects. */
5564 if (bitsize == 0)
5565 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5566 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5567 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5568
5569 /* If we are storing into an unaligned field of an aligned union that is
5570 in a register, we may have the mode of TARGET being an integer mode but
5571 MODE == BLKmode. In that case, get an aligned object whose size and
5572 alignment are the same as TARGET and store TARGET into it (we can avoid
5573 the store if the field being stored is the entire width of TARGET). Then
5574 call ourselves recursively to store the field into a BLKmode version of
5575 that object. Finally, load from the object into TARGET. This is not
5576 very efficient in general, but should only be slightly more expensive
5577 than the otherwise-required unaligned accesses. Perhaps this can be
5578 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5579 twice, once with emit_move_insn and once via store_field. */
5580
5581 if (mode == BLKmode
5582 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5583 {
5584 rtx object = assign_temp (type, 0, 1, 1);
5585 rtx blk_object = adjust_address (object, BLKmode, 0);
5586
5587 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5588 emit_move_insn (object, target);
5589
5590 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5591 alias_set);
5592
5593 emit_move_insn (target, object);
5594
5595 /* We want to return the BLKmode version of the data. */
5596 return blk_object;
5597 }
5598
5599 if (GET_CODE (target) == CONCAT)
5600 {
5601 /* We're storing into a struct containing a single __complex. */
5602
5603 if (bitpos != 0)
5604 abort ();
5605 return store_expr (exp, target, 0);
5606 }
5607
5608 /* If the structure is in a register or if the component
5609 is a bit field, we cannot use addressing to access it.
5610 Use bit-field techniques or SUBREG to store in it. */
5611
5612 if (mode == VOIDmode
5613 || (mode != BLKmode && ! direct_store[(int) mode]
5614 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5615 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5616 || GET_CODE (target) == REG
5617 || GET_CODE (target) == SUBREG
5618 /* If the field isn't aligned enough to store as an ordinary memref,
5619 store it as a bit field. */
5620 || (mode != BLKmode
5621 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5622 || bitpos % GET_MODE_ALIGNMENT (mode))
5623 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5624 || (bitpos % BITS_PER_UNIT != 0)))
5625 /* If the RHS and field are a constant size and the size of the
5626 RHS isn't the same size as the bitfield, we must use bitfield
5627 operations. */
5628 || (bitsize >= 0
5629 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5630 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5631 {
5632 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5633
5634 /* If BITSIZE is narrower than the size of the type of EXP
5635 we will be narrowing TEMP. Normally, what's wanted are the
5636 low-order bits. However, if EXP's type is a record and this is
5637 big-endian machine, we want the upper BITSIZE bits. */
5638 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5639 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5640 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5641 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5642 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5643 - bitsize),
5644 NULL_RTX, 1);
5645
5646 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5647 MODE. */
5648 if (mode != VOIDmode && mode != BLKmode
5649 && mode != TYPE_MODE (TREE_TYPE (exp)))
5650 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5651
5652 /* If the modes of TARGET and TEMP are both BLKmode, both
5653 must be in memory and BITPOS must be aligned on a byte
5654 boundary. If so, we simply do a block copy. */
5655 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5656 {
5657 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5658 || bitpos % BITS_PER_UNIT != 0)
5659 abort ();
5660
5661 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5662 emit_block_move (target, temp,
5663 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5664 / BITS_PER_UNIT),
5665 BLOCK_OP_NORMAL);
5666
5667 return value_mode == VOIDmode ? const0_rtx : target;
5668 }
5669
5670 /* Store the value in the bitfield. */
5671 store_bit_field (target, bitsize, bitpos, mode, temp,
5672 int_size_in_bytes (type));
5673
5674 if (value_mode != VOIDmode)
5675 {
5676 /* The caller wants an rtx for the value.
5677 If possible, avoid refetching from the bitfield itself. */
5678 if (width_mask != 0
5679 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5680 {
5681 tree count;
5682 enum machine_mode tmode;
5683
5684 tmode = GET_MODE (temp);
5685 if (tmode == VOIDmode)
5686 tmode = value_mode;
5687
5688 if (unsignedp)
5689 return expand_and (tmode, temp,
5690 gen_int_mode (width_mask, tmode),
5691 NULL_RTX);
5692
5693 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5694 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5695 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5696 }
5697
5698 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5699 NULL_RTX, value_mode, VOIDmode,
5700 int_size_in_bytes (type));
5701 }
5702 return const0_rtx;
5703 }
5704 else
5705 {
5706 rtx addr = XEXP (target, 0);
5707 rtx to_rtx = target;
5708
5709 /* If a value is wanted, it must be the lhs;
5710 so make the address stable for multiple use. */
5711
5712 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5713 && ! CONSTANT_ADDRESS_P (addr)
5714 /* A frame-pointer reference is already stable. */
5715 && ! (GET_CODE (addr) == PLUS
5716 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5717 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5718 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5719 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5720
5721 /* Now build a reference to just the desired component. */
5722
5723 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5724
5725 if (to_rtx == target)
5726 to_rtx = copy_rtx (to_rtx);
5727
5728 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5729 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5730 set_mem_alias_set (to_rtx, alias_set);
5731
5732 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5733 }
5734 }
5735 \f
5736 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5737 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5738 codes and find the ultimate containing object, which we return.
5739
5740 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5741 bit position, and *PUNSIGNEDP to the signedness of the field.
5742 If the position of the field is variable, we store a tree
5743 giving the variable offset (in units) in *POFFSET.
5744 This offset is in addition to the bit position.
5745 If the position is not variable, we store 0 in *POFFSET.
5746
5747 If any of the extraction expressions is volatile,
5748 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5749
5750 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5751 is a mode that can be used to access the field. In that case, *PBITSIZE
5752 is redundant.
5753
5754 If the field describes a variable-sized object, *PMODE is set to
5755 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5756 this case, but the address of the object can be found. */
5757
5758 tree
5759 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5760 HOST_WIDE_INT *pbitpos, tree *poffset,
5761 enum machine_mode *pmode, int *punsignedp,
5762 int *pvolatilep)
5763 {
5764 tree size_tree = 0;
5765 enum machine_mode mode = VOIDmode;
5766 tree offset = size_zero_node;
5767 tree bit_offset = bitsize_zero_node;
5768 tree placeholder_ptr = 0;
5769 tree tem;
5770
5771 /* First get the mode, signedness, and size. We do this from just the
5772 outermost expression. */
5773 if (TREE_CODE (exp) == COMPONENT_REF)
5774 {
5775 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5776 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5777 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5778
5779 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5780 }
5781 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5782 {
5783 size_tree = TREE_OPERAND (exp, 1);
5784 *punsignedp = TREE_UNSIGNED (exp);
5785 }
5786 else
5787 {
5788 mode = TYPE_MODE (TREE_TYPE (exp));
5789 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5790
5791 if (mode == BLKmode)
5792 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5793 else
5794 *pbitsize = GET_MODE_BITSIZE (mode);
5795 }
5796
5797 if (size_tree != 0)
5798 {
5799 if (! host_integerp (size_tree, 1))
5800 mode = BLKmode, *pbitsize = -1;
5801 else
5802 *pbitsize = tree_low_cst (size_tree, 1);
5803 }
5804
5805 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5806 and find the ultimate containing object. */
5807 while (1)
5808 {
5809 if (TREE_CODE (exp) == BIT_FIELD_REF)
5810 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5811 else if (TREE_CODE (exp) == COMPONENT_REF)
5812 {
5813 tree field = TREE_OPERAND (exp, 1);
5814 tree this_offset = DECL_FIELD_OFFSET (field);
5815
5816 /* If this field hasn't been filled in yet, don't go
5817 past it. This should only happen when folding expressions
5818 made during type construction. */
5819 if (this_offset == 0)
5820 break;
5821 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5822 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5823
5824 offset = size_binop (PLUS_EXPR, offset, this_offset);
5825 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5826 DECL_FIELD_BIT_OFFSET (field));
5827
5828 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5829 }
5830
5831 else if (TREE_CODE (exp) == ARRAY_REF
5832 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5833 {
5834 tree index = TREE_OPERAND (exp, 1);
5835 tree array = TREE_OPERAND (exp, 0);
5836 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5837 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5838 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5839
5840 /* We assume all arrays have sizes that are a multiple of a byte.
5841 First subtract the lower bound, if any, in the type of the
5842 index, then convert to sizetype and multiply by the size of the
5843 array element. */
5844 if (low_bound != 0 && ! integer_zerop (low_bound))
5845 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5846 index, low_bound));
5847
5848 /* If the index has a self-referential type, pass it to a
5849 WITH_RECORD_EXPR; if the component size is, pass our
5850 component to one. */
5851 if (CONTAINS_PLACEHOLDER_P (index))
5852 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5853 if (CONTAINS_PLACEHOLDER_P (unit_size))
5854 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5855
5856 offset = size_binop (PLUS_EXPR, offset,
5857 size_binop (MULT_EXPR,
5858 convert (sizetype, index),
5859 unit_size));
5860 }
5861
5862 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5863 {
5864 tree new = find_placeholder (exp, &placeholder_ptr);
5865
5866 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5867 We might have been called from tree optimization where we
5868 haven't set up an object yet. */
5869 if (new == 0)
5870 break;
5871 else
5872 exp = new;
5873
5874 continue;
5875 }
5876
5877 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5878 conversions that don't change the mode, and all view conversions
5879 except those that need to "step up" the alignment. */
5880 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5881 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5882 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5883 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5884 && STRICT_ALIGNMENT
5885 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5886 < BIGGEST_ALIGNMENT)
5887 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5888 || TYPE_ALIGN_OK (TREE_TYPE
5889 (TREE_OPERAND (exp, 0))))))
5890 && ! ((TREE_CODE (exp) == NOP_EXPR
5891 || TREE_CODE (exp) == CONVERT_EXPR)
5892 && (TYPE_MODE (TREE_TYPE (exp))
5893 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5894 break;
5895
5896 /* If any reference in the chain is volatile, the effect is volatile. */
5897 if (TREE_THIS_VOLATILE (exp))
5898 *pvolatilep = 1;
5899
5900 exp = TREE_OPERAND (exp, 0);
5901 }
5902
5903 /* If OFFSET is constant, see if we can return the whole thing as a
5904 constant bit position. Otherwise, split it up. */
5905 if (host_integerp (offset, 0)
5906 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5907 bitsize_unit_node))
5908 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5909 && host_integerp (tem, 0))
5910 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5911 else
5912 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5913
5914 *pmode = mode;
5915 return exp;
5916 }
5917
5918 /* Return 1 if T is an expression that get_inner_reference handles. */
5919
5920 int
5921 handled_component_p (tree t)
5922 {
5923 switch (TREE_CODE (t))
5924 {
5925 case BIT_FIELD_REF:
5926 case COMPONENT_REF:
5927 case ARRAY_REF:
5928 case ARRAY_RANGE_REF:
5929 case NON_LVALUE_EXPR:
5930 case VIEW_CONVERT_EXPR:
5931 return 1;
5932
5933 /* ??? Sure they are handled, but get_inner_reference may return
5934 a different PBITSIZE, depending upon whether the expression is
5935 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5936 case NOP_EXPR:
5937 case CONVERT_EXPR:
5938 return (TYPE_MODE (TREE_TYPE (t))
5939 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5940
5941 default:
5942 return 0;
5943 }
5944 }
5945 \f
5946 /* Given an rtx VALUE that may contain additions and multiplications, return
5947 an equivalent value that just refers to a register, memory, or constant.
5948 This is done by generating instructions to perform the arithmetic and
5949 returning a pseudo-register containing the value.
5950
5951 The returned value may be a REG, SUBREG, MEM or constant. */
5952
5953 rtx
5954 force_operand (rtx value, rtx target)
5955 {
5956 rtx op1, op2;
5957 /* Use subtarget as the target for operand 0 of a binary operation. */
5958 rtx subtarget = get_subtarget (target);
5959 enum rtx_code code = GET_CODE (value);
5960
5961 /* Check for a PIC address load. */
5962 if ((code == PLUS || code == MINUS)
5963 && XEXP (value, 0) == pic_offset_table_rtx
5964 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5965 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5966 || GET_CODE (XEXP (value, 1)) == CONST))
5967 {
5968 if (!subtarget)
5969 subtarget = gen_reg_rtx (GET_MODE (value));
5970 emit_move_insn (subtarget, value);
5971 return subtarget;
5972 }
5973
5974 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5975 {
5976 if (!target)
5977 target = gen_reg_rtx (GET_MODE (value));
5978 convert_move (target, force_operand (XEXP (value, 0), NULL),
5979 code == ZERO_EXTEND);
5980 return target;
5981 }
5982
5983 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5984 {
5985 op2 = XEXP (value, 1);
5986 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5987 subtarget = 0;
5988 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5989 {
5990 code = PLUS;
5991 op2 = negate_rtx (GET_MODE (value), op2);
5992 }
5993
5994 /* Check for an addition with OP2 a constant integer and our first
5995 operand a PLUS of a virtual register and something else. In that
5996 case, we want to emit the sum of the virtual register and the
5997 constant first and then add the other value. This allows virtual
5998 register instantiation to simply modify the constant rather than
5999 creating another one around this addition. */
6000 if (code == PLUS && GET_CODE (op2) == CONST_INT
6001 && GET_CODE (XEXP (value, 0)) == PLUS
6002 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6003 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6004 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6005 {
6006 rtx temp = expand_simple_binop (GET_MODE (value), code,
6007 XEXP (XEXP (value, 0), 0), op2,
6008 subtarget, 0, OPTAB_LIB_WIDEN);
6009 return expand_simple_binop (GET_MODE (value), code, temp,
6010 force_operand (XEXP (XEXP (value,
6011 0), 1), 0),
6012 target, 0, OPTAB_LIB_WIDEN);
6013 }
6014
6015 op1 = force_operand (XEXP (value, 0), subtarget);
6016 op2 = force_operand (op2, NULL_RTX);
6017 switch (code)
6018 {
6019 case MULT:
6020 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6021 case DIV:
6022 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6023 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6024 target, 1, OPTAB_LIB_WIDEN);
6025 else
6026 return expand_divmod (0,
6027 FLOAT_MODE_P (GET_MODE (value))
6028 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6029 GET_MODE (value), op1, op2, target, 0);
6030 break;
6031 case MOD:
6032 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6033 target, 0);
6034 break;
6035 case UDIV:
6036 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6037 target, 1);
6038 break;
6039 case UMOD:
6040 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6041 target, 1);
6042 break;
6043 case ASHIFTRT:
6044 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6045 target, 0, OPTAB_LIB_WIDEN);
6046 break;
6047 default:
6048 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6049 target, 1, OPTAB_LIB_WIDEN);
6050 }
6051 }
6052 if (GET_RTX_CLASS (code) == '1')
6053 {
6054 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6055 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6056 }
6057
6058 #ifdef INSN_SCHEDULING
6059 /* On machines that have insn scheduling, we want all memory reference to be
6060 explicit, so we need to deal with such paradoxical SUBREGs. */
6061 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6062 && (GET_MODE_SIZE (GET_MODE (value))
6063 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6064 value
6065 = simplify_gen_subreg (GET_MODE (value),
6066 force_reg (GET_MODE (SUBREG_REG (value)),
6067 force_operand (SUBREG_REG (value),
6068 NULL_RTX)),
6069 GET_MODE (SUBREG_REG (value)),
6070 SUBREG_BYTE (value));
6071 #endif
6072
6073 return value;
6074 }
6075 \f
6076 /* Subroutine of expand_expr: return nonzero iff there is no way that
6077 EXP can reference X, which is being modified. TOP_P is nonzero if this
6078 call is going to be used to determine whether we need a temporary
6079 for EXP, as opposed to a recursive call to this function.
6080
6081 It is always safe for this routine to return zero since it merely
6082 searches for optimization opportunities. */
6083
6084 int
6085 safe_from_p (rtx x, tree exp, int top_p)
6086 {
6087 rtx exp_rtl = 0;
6088 int i, nops;
6089 static tree save_expr_list;
6090
6091 if (x == 0
6092 /* If EXP has varying size, we MUST use a target since we currently
6093 have no way of allocating temporaries of variable size
6094 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6095 So we assume here that something at a higher level has prevented a
6096 clash. This is somewhat bogus, but the best we can do. Only
6097 do this when X is BLKmode and when we are at the top level. */
6098 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6099 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6100 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6101 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6102 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6103 != INTEGER_CST)
6104 && GET_MODE (x) == BLKmode)
6105 /* If X is in the outgoing argument area, it is always safe. */
6106 || (GET_CODE (x) == MEM
6107 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6108 || (GET_CODE (XEXP (x, 0)) == PLUS
6109 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6110 return 1;
6111
6112 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6113 find the underlying pseudo. */
6114 if (GET_CODE (x) == SUBREG)
6115 {
6116 x = SUBREG_REG (x);
6117 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6118 return 0;
6119 }
6120
6121 /* A SAVE_EXPR might appear many times in the expression passed to the
6122 top-level safe_from_p call, and if it has a complex subexpression,
6123 examining it multiple times could result in a combinatorial explosion.
6124 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6125 with optimization took about 28 minutes to compile -- even though it was
6126 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6127 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6128 we have processed. Note that the only test of top_p was above. */
6129
6130 if (top_p)
6131 {
6132 int rtn;
6133 tree t;
6134
6135 save_expr_list = 0;
6136
6137 rtn = safe_from_p (x, exp, 0);
6138
6139 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6140 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6141
6142 return rtn;
6143 }
6144
6145 /* Now look at our tree code and possibly recurse. */
6146 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6147 {
6148 case 'd':
6149 exp_rtl = DECL_RTL_IF_SET (exp);
6150 break;
6151
6152 case 'c':
6153 return 1;
6154
6155 case 'x':
6156 if (TREE_CODE (exp) == TREE_LIST)
6157 {
6158 while (1)
6159 {
6160 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6161 return 0;
6162 exp = TREE_CHAIN (exp);
6163 if (!exp)
6164 return 1;
6165 if (TREE_CODE (exp) != TREE_LIST)
6166 return safe_from_p (x, exp, 0);
6167 }
6168 }
6169 else if (TREE_CODE (exp) == ERROR_MARK)
6170 return 1; /* An already-visited SAVE_EXPR? */
6171 else
6172 return 0;
6173
6174 case '2':
6175 case '<':
6176 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6177 return 0;
6178 /* FALLTHRU */
6179
6180 case '1':
6181 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6182
6183 case 'e':
6184 case 'r':
6185 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6186 the expression. If it is set, we conflict iff we are that rtx or
6187 both are in memory. Otherwise, we check all operands of the
6188 expression recursively. */
6189
6190 switch (TREE_CODE (exp))
6191 {
6192 case ADDR_EXPR:
6193 /* If the operand is static or we are static, we can't conflict.
6194 Likewise if we don't conflict with the operand at all. */
6195 if (staticp (TREE_OPERAND (exp, 0))
6196 || TREE_STATIC (exp)
6197 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6198 return 1;
6199
6200 /* Otherwise, the only way this can conflict is if we are taking
6201 the address of a DECL a that address if part of X, which is
6202 very rare. */
6203 exp = TREE_OPERAND (exp, 0);
6204 if (DECL_P (exp))
6205 {
6206 if (!DECL_RTL_SET_P (exp)
6207 || GET_CODE (DECL_RTL (exp)) != MEM)
6208 return 0;
6209 else
6210 exp_rtl = XEXP (DECL_RTL (exp), 0);
6211 }
6212 break;
6213
6214 case INDIRECT_REF:
6215 if (GET_CODE (x) == MEM
6216 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6217 get_alias_set (exp)))
6218 return 0;
6219 break;
6220
6221 case CALL_EXPR:
6222 /* Assume that the call will clobber all hard registers and
6223 all of memory. */
6224 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6225 || GET_CODE (x) == MEM)
6226 return 0;
6227 break;
6228
6229 case RTL_EXPR:
6230 /* If a sequence exists, we would have to scan every instruction
6231 in the sequence to see if it was safe. This is probably not
6232 worthwhile. */
6233 if (RTL_EXPR_SEQUENCE (exp))
6234 return 0;
6235
6236 exp_rtl = RTL_EXPR_RTL (exp);
6237 break;
6238
6239 case WITH_CLEANUP_EXPR:
6240 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6241 break;
6242
6243 case CLEANUP_POINT_EXPR:
6244 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6245
6246 case SAVE_EXPR:
6247 exp_rtl = SAVE_EXPR_RTL (exp);
6248 if (exp_rtl)
6249 break;
6250
6251 /* If we've already scanned this, don't do it again. Otherwise,
6252 show we've scanned it and record for clearing the flag if we're
6253 going on. */
6254 if (TREE_PRIVATE (exp))
6255 return 1;
6256
6257 TREE_PRIVATE (exp) = 1;
6258 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6259 {
6260 TREE_PRIVATE (exp) = 0;
6261 return 0;
6262 }
6263
6264 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6265 return 1;
6266
6267 case BIND_EXPR:
6268 /* The only operand we look at is operand 1. The rest aren't
6269 part of the expression. */
6270 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6271
6272 default:
6273 break;
6274 }
6275
6276 /* If we have an rtx, we do not need to scan our operands. */
6277 if (exp_rtl)
6278 break;
6279
6280 nops = first_rtl_op (TREE_CODE (exp));
6281 for (i = 0; i < nops; i++)
6282 if (TREE_OPERAND (exp, i) != 0
6283 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6284 return 0;
6285
6286 /* If this is a language-specific tree code, it may require
6287 special handling. */
6288 if ((unsigned int) TREE_CODE (exp)
6289 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6290 && !(*lang_hooks.safe_from_p) (x, exp))
6291 return 0;
6292 }
6293
6294 /* If we have an rtl, find any enclosed object. Then see if we conflict
6295 with it. */
6296 if (exp_rtl)
6297 {
6298 if (GET_CODE (exp_rtl) == SUBREG)
6299 {
6300 exp_rtl = SUBREG_REG (exp_rtl);
6301 if (GET_CODE (exp_rtl) == REG
6302 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6303 return 0;
6304 }
6305
6306 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6307 are memory and they conflict. */
6308 return ! (rtx_equal_p (x, exp_rtl)
6309 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6310 && true_dependence (exp_rtl, VOIDmode, x,
6311 rtx_addr_varies_p)));
6312 }
6313
6314 /* If we reach here, it is safe. */
6315 return 1;
6316 }
6317
6318 /* Subroutine of expand_expr: return rtx if EXP is a
6319 variable or parameter; else return 0. */
6320
6321 static rtx
6322 var_rtx (tree exp)
6323 {
6324 STRIP_NOPS (exp);
6325 switch (TREE_CODE (exp))
6326 {
6327 case PARM_DECL:
6328 case VAR_DECL:
6329 return DECL_RTL (exp);
6330 default:
6331 return 0;
6332 }
6333 }
6334
6335 #ifdef MAX_INTEGER_COMPUTATION_MODE
6336
6337 void
6338 check_max_integer_computation_mode (tree exp)
6339 {
6340 enum tree_code code;
6341 enum machine_mode mode;
6342
6343 /* Strip any NOPs that don't change the mode. */
6344 STRIP_NOPS (exp);
6345 code = TREE_CODE (exp);
6346
6347 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6348 if (code == NOP_EXPR
6349 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6350 return;
6351
6352 /* First check the type of the overall operation. We need only look at
6353 unary, binary and relational operations. */
6354 if (TREE_CODE_CLASS (code) == '1'
6355 || TREE_CODE_CLASS (code) == '2'
6356 || TREE_CODE_CLASS (code) == '<')
6357 {
6358 mode = TYPE_MODE (TREE_TYPE (exp));
6359 if (GET_MODE_CLASS (mode) == MODE_INT
6360 && mode > MAX_INTEGER_COMPUTATION_MODE)
6361 internal_error ("unsupported wide integer operation");
6362 }
6363
6364 /* Check operand of a unary op. */
6365 if (TREE_CODE_CLASS (code) == '1')
6366 {
6367 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6368 if (GET_MODE_CLASS (mode) == MODE_INT
6369 && mode > MAX_INTEGER_COMPUTATION_MODE)
6370 internal_error ("unsupported wide integer operation");
6371 }
6372
6373 /* Check operands of a binary/comparison op. */
6374 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6375 {
6376 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6377 if (GET_MODE_CLASS (mode) == MODE_INT
6378 && mode > MAX_INTEGER_COMPUTATION_MODE)
6379 internal_error ("unsupported wide integer operation");
6380
6381 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6382 if (GET_MODE_CLASS (mode) == MODE_INT
6383 && mode > MAX_INTEGER_COMPUTATION_MODE)
6384 internal_error ("unsupported wide integer operation");
6385 }
6386 }
6387 #endif
6388 \f
6389 /* Return the highest power of two that EXP is known to be a multiple of.
6390 This is used in updating alignment of MEMs in array references. */
6391
6392 static unsigned HOST_WIDE_INT
6393 highest_pow2_factor (tree exp)
6394 {
6395 unsigned HOST_WIDE_INT c0, c1;
6396
6397 switch (TREE_CODE (exp))
6398 {
6399 case INTEGER_CST:
6400 /* We can find the lowest bit that's a one. If the low
6401 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6402 We need to handle this case since we can find it in a COND_EXPR,
6403 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6404 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6405 later ICE. */
6406 if (TREE_CONSTANT_OVERFLOW (exp))
6407 return BIGGEST_ALIGNMENT;
6408 else
6409 {
6410 /* Note: tree_low_cst is intentionally not used here,
6411 we don't care about the upper bits. */
6412 c0 = TREE_INT_CST_LOW (exp);
6413 c0 &= -c0;
6414 return c0 ? c0 : BIGGEST_ALIGNMENT;
6415 }
6416 break;
6417
6418 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6419 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6420 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6421 return MIN (c0, c1);
6422
6423 case MULT_EXPR:
6424 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6425 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6426 return c0 * c1;
6427
6428 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6429 case CEIL_DIV_EXPR:
6430 if (integer_pow2p (TREE_OPERAND (exp, 1))
6431 && host_integerp (TREE_OPERAND (exp, 1), 1))
6432 {
6433 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6434 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6435 return MAX (1, c0 / c1);
6436 }
6437 break;
6438
6439 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6440 case SAVE_EXPR: case WITH_RECORD_EXPR:
6441 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6442
6443 case COMPOUND_EXPR:
6444 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6445
6446 case COND_EXPR:
6447 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6448 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6449 return MIN (c0, c1);
6450
6451 default:
6452 break;
6453 }
6454
6455 return 1;
6456 }
6457
6458 /* Similar, except that it is known that the expression must be a multiple
6459 of the alignment of TYPE. */
6460
6461 static unsigned HOST_WIDE_INT
6462 highest_pow2_factor_for_type (tree type, tree exp)
6463 {
6464 unsigned HOST_WIDE_INT type_align, factor;
6465
6466 factor = highest_pow2_factor (exp);
6467 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6468 return MAX (factor, type_align);
6469 }
6470 \f
6471 /* Return an object on the placeholder list that matches EXP, a
6472 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6473 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6474 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6475 is a location which initially points to a starting location in the
6476 placeholder list (zero means start of the list) and where a pointer into
6477 the placeholder list at which the object is found is placed. */
6478
6479 tree
6480 find_placeholder (tree exp, tree *plist)
6481 {
6482 tree type = TREE_TYPE (exp);
6483 tree placeholder_expr;
6484
6485 for (placeholder_expr
6486 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6487 placeholder_expr != 0;
6488 placeholder_expr = TREE_CHAIN (placeholder_expr))
6489 {
6490 tree need_type = TYPE_MAIN_VARIANT (type);
6491 tree elt;
6492
6493 /* Find the outermost reference that is of the type we want. If none,
6494 see if any object has a type that is a pointer to the type we
6495 want. */
6496 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6497 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6498 || TREE_CODE (elt) == COND_EXPR)
6499 ? TREE_OPERAND (elt, 1)
6500 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6501 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6502 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6503 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6504 ? TREE_OPERAND (elt, 0) : 0))
6505 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6506 {
6507 if (plist)
6508 *plist = placeholder_expr;
6509 return elt;
6510 }
6511
6512 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6513 elt
6514 = ((TREE_CODE (elt) == COMPOUND_EXPR
6515 || TREE_CODE (elt) == COND_EXPR)
6516 ? TREE_OPERAND (elt, 1)
6517 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6518 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6519 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6520 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6521 ? TREE_OPERAND (elt, 0) : 0))
6522 if (POINTER_TYPE_P (TREE_TYPE (elt))
6523 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6524 == need_type))
6525 {
6526 if (plist)
6527 *plist = placeholder_expr;
6528 return build1 (INDIRECT_REF, need_type, elt);
6529 }
6530 }
6531
6532 return 0;
6533 }
6534 \f
6535 /* expand_expr: generate code for computing expression EXP.
6536 An rtx for the computed value is returned. The value is never null.
6537 In the case of a void EXP, const0_rtx is returned.
6538
6539 The value may be stored in TARGET if TARGET is nonzero.
6540 TARGET is just a suggestion; callers must assume that
6541 the rtx returned may not be the same as TARGET.
6542
6543 If TARGET is CONST0_RTX, it means that the value will be ignored.
6544
6545 If TMODE is not VOIDmode, it suggests generating the
6546 result in mode TMODE. But this is done only when convenient.
6547 Otherwise, TMODE is ignored and the value generated in its natural mode.
6548 TMODE is just a suggestion; callers must assume that
6549 the rtx returned may not have mode TMODE.
6550
6551 Note that TARGET may have neither TMODE nor MODE. In that case, it
6552 probably will not be used.
6553
6554 If MODIFIER is EXPAND_SUM then when EXP is an addition
6555 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6556 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6557 products as above, or REG or MEM, or constant.
6558 Ordinarily in such cases we would output mul or add instructions
6559 and then return a pseudo reg containing the sum.
6560
6561 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6562 it also marks a label as absolutely required (it can't be dead).
6563 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6564 This is used for outputting expressions used in initializers.
6565
6566 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6567 with a constant address even if that address is not normally legitimate.
6568 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6569
6570 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6571 a call parameter. Such targets require special care as we haven't yet
6572 marked TARGET so that it's safe from being trashed by libcalls. We
6573 don't want to use TARGET for anything but the final result;
6574 Intermediate values must go elsewhere. Additionally, calls to
6575 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6576
6577 rtx
6578 expand_expr (tree exp, rtx target, enum machine_mode tmode, enum expand_modifier modifier)
6579 {
6580 rtx op0, op1, temp;
6581 tree type = TREE_TYPE (exp);
6582 int unsignedp = TREE_UNSIGNED (type);
6583 enum machine_mode mode;
6584 enum tree_code code = TREE_CODE (exp);
6585 optab this_optab;
6586 rtx subtarget, original_target;
6587 int ignore;
6588 tree context;
6589
6590 /* Handle ERROR_MARK before anybody tries to access its type. */
6591 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6592 {
6593 op0 = CONST0_RTX (tmode);
6594 if (op0 != 0)
6595 return op0;
6596 return const0_rtx;
6597 }
6598
6599 mode = TYPE_MODE (type);
6600 /* Use subtarget as the target for operand 0 of a binary operation. */
6601 subtarget = get_subtarget (target);
6602 original_target = target;
6603 ignore = (target == const0_rtx
6604 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6605 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6606 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6607 && TREE_CODE (type) == VOID_TYPE));
6608
6609 /* If we are going to ignore this result, we need only do something
6610 if there is a side-effect somewhere in the expression. If there
6611 is, short-circuit the most common cases here. Note that we must
6612 not call expand_expr with anything but const0_rtx in case this
6613 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6614
6615 if (ignore)
6616 {
6617 if (! TREE_SIDE_EFFECTS (exp))
6618 return const0_rtx;
6619
6620 /* Ensure we reference a volatile object even if value is ignored, but
6621 don't do this if all we are doing is taking its address. */
6622 if (TREE_THIS_VOLATILE (exp)
6623 && TREE_CODE (exp) != FUNCTION_DECL
6624 && mode != VOIDmode && mode != BLKmode
6625 && modifier != EXPAND_CONST_ADDRESS)
6626 {
6627 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6628 if (GET_CODE (temp) == MEM)
6629 temp = copy_to_reg (temp);
6630 return const0_rtx;
6631 }
6632
6633 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6634 || code == INDIRECT_REF || code == BUFFER_REF)
6635 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6636 modifier);
6637
6638 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6639 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6640 {
6641 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6642 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6643 return const0_rtx;
6644 }
6645 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6646 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6647 /* If the second operand has no side effects, just evaluate
6648 the first. */
6649 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6650 modifier);
6651 else if (code == BIT_FIELD_REF)
6652 {
6653 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6654 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6655 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6656 return const0_rtx;
6657 }
6658
6659 target = 0;
6660 }
6661
6662 #ifdef MAX_INTEGER_COMPUTATION_MODE
6663 /* Only check stuff here if the mode we want is different from the mode
6664 of the expression; if it's the same, check_max_integer_computation_mode
6665 will handle it. Do we really need to check this stuff at all? */
6666
6667 if (target
6668 && GET_MODE (target) != mode
6669 && TREE_CODE (exp) != INTEGER_CST
6670 && TREE_CODE (exp) != PARM_DECL
6671 && TREE_CODE (exp) != ARRAY_REF
6672 && TREE_CODE (exp) != ARRAY_RANGE_REF
6673 && TREE_CODE (exp) != COMPONENT_REF
6674 && TREE_CODE (exp) != BIT_FIELD_REF
6675 && TREE_CODE (exp) != INDIRECT_REF
6676 && TREE_CODE (exp) != CALL_EXPR
6677 && TREE_CODE (exp) != VAR_DECL
6678 && TREE_CODE (exp) != RTL_EXPR)
6679 {
6680 enum machine_mode mode = GET_MODE (target);
6681
6682 if (GET_MODE_CLASS (mode) == MODE_INT
6683 && mode > MAX_INTEGER_COMPUTATION_MODE)
6684 internal_error ("unsupported wide integer operation");
6685 }
6686
6687 if (tmode != mode
6688 && TREE_CODE (exp) != INTEGER_CST
6689 && TREE_CODE (exp) != PARM_DECL
6690 && TREE_CODE (exp) != ARRAY_REF
6691 && TREE_CODE (exp) != ARRAY_RANGE_REF
6692 && TREE_CODE (exp) != COMPONENT_REF
6693 && TREE_CODE (exp) != BIT_FIELD_REF
6694 && TREE_CODE (exp) != INDIRECT_REF
6695 && TREE_CODE (exp) != VAR_DECL
6696 && TREE_CODE (exp) != CALL_EXPR
6697 && TREE_CODE (exp) != RTL_EXPR
6698 && GET_MODE_CLASS (tmode) == MODE_INT
6699 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6700 internal_error ("unsupported wide integer operation");
6701
6702 check_max_integer_computation_mode (exp);
6703 #endif
6704
6705 /* If will do cse, generate all results into pseudo registers
6706 since 1) that allows cse to find more things
6707 and 2) otherwise cse could produce an insn the machine
6708 cannot support. An exception is a CONSTRUCTOR into a multi-word
6709 MEM: that's much more likely to be most efficient into the MEM.
6710 Another is a CALL_EXPR which must return in memory. */
6711
6712 if (! cse_not_expected && mode != BLKmode && target
6713 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6714 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6715 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6716 target = 0;
6717
6718 switch (code)
6719 {
6720 case LABEL_DECL:
6721 {
6722 tree function = decl_function_context (exp);
6723 /* Labels in containing functions, or labels used from initializers,
6724 must be forced. */
6725 if (modifier == EXPAND_INITIALIZER
6726 || (function != current_function_decl
6727 && function != inline_function_decl
6728 && function != 0))
6729 temp = force_label_rtx (exp);
6730 else
6731 temp = label_rtx (exp);
6732
6733 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6734 if (function != current_function_decl
6735 && function != inline_function_decl && function != 0)
6736 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6737 return temp;
6738 }
6739
6740 case PARM_DECL:
6741 if (!DECL_RTL_SET_P (exp))
6742 {
6743 error ("%Hprior parameter's size depends on '%D'",
6744 &DECL_SOURCE_LOCATION (exp), exp);
6745 return CONST0_RTX (mode);
6746 }
6747
6748 /* ... fall through ... */
6749
6750 case VAR_DECL:
6751 /* If a static var's type was incomplete when the decl was written,
6752 but the type is complete now, lay out the decl now. */
6753 if (DECL_SIZE (exp) == 0
6754 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6755 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6756 layout_decl (exp, 0);
6757
6758 /* ... fall through ... */
6759
6760 case FUNCTION_DECL:
6761 case RESULT_DECL:
6762 if (DECL_RTL (exp) == 0)
6763 abort ();
6764
6765 /* Ensure variable marked as used even if it doesn't go through
6766 a parser. If it hasn't be used yet, write out an external
6767 definition. */
6768 if (! TREE_USED (exp))
6769 {
6770 assemble_external (exp);
6771 TREE_USED (exp) = 1;
6772 }
6773
6774 /* Show we haven't gotten RTL for this yet. */
6775 temp = 0;
6776
6777 /* Handle variables inherited from containing functions. */
6778 context = decl_function_context (exp);
6779
6780 /* We treat inline_function_decl as an alias for the current function
6781 because that is the inline function whose vars, types, etc.
6782 are being merged into the current function.
6783 See expand_inline_function. */
6784
6785 if (context != 0 && context != current_function_decl
6786 && context != inline_function_decl
6787 /* If var is static, we don't need a static chain to access it. */
6788 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6789 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6790 {
6791 rtx addr;
6792
6793 /* Mark as non-local and addressable. */
6794 DECL_NONLOCAL (exp) = 1;
6795 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6796 abort ();
6797 (*lang_hooks.mark_addressable) (exp);
6798 if (GET_CODE (DECL_RTL (exp)) != MEM)
6799 abort ();
6800 addr = XEXP (DECL_RTL (exp), 0);
6801 if (GET_CODE (addr) == MEM)
6802 addr
6803 = replace_equiv_address (addr,
6804 fix_lexical_addr (XEXP (addr, 0), exp));
6805 else
6806 addr = fix_lexical_addr (addr, exp);
6807
6808 temp = replace_equiv_address (DECL_RTL (exp), addr);
6809 }
6810
6811 /* This is the case of an array whose size is to be determined
6812 from its initializer, while the initializer is still being parsed.
6813 See expand_decl. */
6814
6815 else if (GET_CODE (DECL_RTL (exp)) == MEM
6816 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6817 temp = validize_mem (DECL_RTL (exp));
6818
6819 /* If DECL_RTL is memory, we are in the normal case and either
6820 the address is not valid or it is not a register and -fforce-addr
6821 is specified, get the address into a register. */
6822
6823 else if (GET_CODE (DECL_RTL (exp)) == MEM
6824 && modifier != EXPAND_CONST_ADDRESS
6825 && modifier != EXPAND_SUM
6826 && modifier != EXPAND_INITIALIZER
6827 && (! memory_address_p (DECL_MODE (exp),
6828 XEXP (DECL_RTL (exp), 0))
6829 || (flag_force_addr
6830 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6831 temp = replace_equiv_address (DECL_RTL (exp),
6832 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6833
6834 /* If we got something, return it. But first, set the alignment
6835 if the address is a register. */
6836 if (temp != 0)
6837 {
6838 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6839 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6840
6841 return temp;
6842 }
6843
6844 /* If the mode of DECL_RTL does not match that of the decl, it
6845 must be a promoted value. We return a SUBREG of the wanted mode,
6846 but mark it so that we know that it was already extended. */
6847
6848 if (GET_CODE (DECL_RTL (exp)) == REG
6849 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6850 {
6851 /* Get the signedness used for this variable. Ensure we get the
6852 same mode we got when the variable was declared. */
6853 if (GET_MODE (DECL_RTL (exp))
6854 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6855 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6856 abort ();
6857
6858 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6859 SUBREG_PROMOTED_VAR_P (temp) = 1;
6860 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6861 return temp;
6862 }
6863
6864 return DECL_RTL (exp);
6865
6866 case INTEGER_CST:
6867 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6868 TREE_INT_CST_HIGH (exp), mode);
6869
6870 /* ??? If overflow is set, fold will have done an incomplete job,
6871 which can result in (plus xx (const_int 0)), which can get
6872 simplified by validate_replace_rtx during virtual register
6873 instantiation, which can result in unrecognizable insns.
6874 Avoid this by forcing all overflows into registers. */
6875 if (TREE_CONSTANT_OVERFLOW (exp)
6876 && modifier != EXPAND_INITIALIZER)
6877 temp = force_reg (mode, temp);
6878
6879 return temp;
6880
6881 case VECTOR_CST:
6882 return const_vector_from_tree (exp);
6883
6884 case CONST_DECL:
6885 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6886
6887 case REAL_CST:
6888 /* If optimized, generate immediate CONST_DOUBLE
6889 which will be turned into memory by reload if necessary.
6890
6891 We used to force a register so that loop.c could see it. But
6892 this does not allow gen_* patterns to perform optimizations with
6893 the constants. It also produces two insns in cases like "x = 1.0;".
6894 On most machines, floating-point constants are not permitted in
6895 many insns, so we'd end up copying it to a register in any case.
6896
6897 Now, we do the copying in expand_binop, if appropriate. */
6898 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6899 TYPE_MODE (TREE_TYPE (exp)));
6900
6901 case COMPLEX_CST:
6902 /* Handle evaluating a complex constant in a CONCAT target. */
6903 if (original_target && GET_CODE (original_target) == CONCAT)
6904 {
6905 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6906 rtx rtarg, itarg;
6907
6908 rtarg = XEXP (original_target, 0);
6909 itarg = XEXP (original_target, 1);
6910
6911 /* Move the real and imaginary parts separately. */
6912 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6913 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6914
6915 if (op0 != rtarg)
6916 emit_move_insn (rtarg, op0);
6917 if (op1 != itarg)
6918 emit_move_insn (itarg, op1);
6919
6920 return original_target;
6921 }
6922
6923 /* ... fall through ... */
6924
6925 case STRING_CST:
6926 temp = output_constant_def (exp, 1);
6927
6928 /* temp contains a constant address.
6929 On RISC machines where a constant address isn't valid,
6930 make some insns to get that address into a register. */
6931 if (modifier != EXPAND_CONST_ADDRESS
6932 && modifier != EXPAND_INITIALIZER
6933 && modifier != EXPAND_SUM
6934 && (! memory_address_p (mode, XEXP (temp, 0))
6935 || flag_force_addr))
6936 return replace_equiv_address (temp,
6937 copy_rtx (XEXP (temp, 0)));
6938 return temp;
6939
6940 case EXPR_WITH_FILE_LOCATION:
6941 {
6942 rtx to_return;
6943 location_t saved_loc = input_location;
6944 input_filename = EXPR_WFL_FILENAME (exp);
6945 input_line = EXPR_WFL_LINENO (exp);
6946 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6947 emit_line_note (input_location);
6948 /* Possibly avoid switching back and forth here. */
6949 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6950 input_location = saved_loc;
6951 return to_return;
6952 }
6953
6954 case SAVE_EXPR:
6955 context = decl_function_context (exp);
6956
6957 /* If this SAVE_EXPR was at global context, assume we are an
6958 initialization function and move it into our context. */
6959 if (context == 0)
6960 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6961
6962 /* We treat inline_function_decl as an alias for the current function
6963 because that is the inline function whose vars, types, etc.
6964 are being merged into the current function.
6965 See expand_inline_function. */
6966 if (context == current_function_decl || context == inline_function_decl)
6967 context = 0;
6968
6969 /* If this is non-local, handle it. */
6970 if (context)
6971 {
6972 /* The following call just exists to abort if the context is
6973 not of a containing function. */
6974 find_function_data (context);
6975
6976 temp = SAVE_EXPR_RTL (exp);
6977 if (temp && GET_CODE (temp) == REG)
6978 {
6979 put_var_into_stack (exp, /*rescan=*/true);
6980 temp = SAVE_EXPR_RTL (exp);
6981 }
6982 if (temp == 0 || GET_CODE (temp) != MEM)
6983 abort ();
6984 return
6985 replace_equiv_address (temp,
6986 fix_lexical_addr (XEXP (temp, 0), exp));
6987 }
6988 if (SAVE_EXPR_RTL (exp) == 0)
6989 {
6990 if (mode == VOIDmode)
6991 temp = const0_rtx;
6992 else
6993 temp = assign_temp (build_qualified_type (type,
6994 (TYPE_QUALS (type)
6995 | TYPE_QUAL_CONST)),
6996 3, 0, 0);
6997
6998 SAVE_EXPR_RTL (exp) = temp;
6999 if (!optimize && GET_CODE (temp) == REG)
7000 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7001 save_expr_regs);
7002
7003 /* If the mode of TEMP does not match that of the expression, it
7004 must be a promoted value. We pass store_expr a SUBREG of the
7005 wanted mode but mark it so that we know that it was already
7006 extended. */
7007
7008 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7009 {
7010 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7011 promote_mode (type, mode, &unsignedp, 0);
7012 SUBREG_PROMOTED_VAR_P (temp) = 1;
7013 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7014 }
7015
7016 if (temp == const0_rtx)
7017 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7018 else
7019 store_expr (TREE_OPERAND (exp, 0), temp,
7020 modifier == EXPAND_STACK_PARM ? 2 : 0);
7021
7022 TREE_USED (exp) = 1;
7023 }
7024
7025 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7026 must be a promoted value. We return a SUBREG of the wanted mode,
7027 but mark it so that we know that it was already extended. */
7028
7029 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7030 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7031 {
7032 /* Compute the signedness and make the proper SUBREG. */
7033 promote_mode (type, mode, &unsignedp, 0);
7034 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7035 SUBREG_PROMOTED_VAR_P (temp) = 1;
7036 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7037 return temp;
7038 }
7039
7040 return SAVE_EXPR_RTL (exp);
7041
7042 case UNSAVE_EXPR:
7043 {
7044 rtx temp;
7045 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7046 TREE_OPERAND (exp, 0)
7047 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7048 return temp;
7049 }
7050
7051 case PLACEHOLDER_EXPR:
7052 {
7053 tree old_list = placeholder_list;
7054 tree placeholder_expr = 0;
7055
7056 exp = find_placeholder (exp, &placeholder_expr);
7057 if (exp == 0)
7058 abort ();
7059
7060 placeholder_list = TREE_CHAIN (placeholder_expr);
7061 temp = expand_expr (exp, original_target, tmode, modifier);
7062 placeholder_list = old_list;
7063 return temp;
7064 }
7065
7066 case WITH_RECORD_EXPR:
7067 /* Put the object on the placeholder list, expand our first operand,
7068 and pop the list. */
7069 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7070 placeholder_list);
7071 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7072 modifier);
7073 placeholder_list = TREE_CHAIN (placeholder_list);
7074 return target;
7075
7076 case GOTO_EXPR:
7077 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7078 expand_goto (TREE_OPERAND (exp, 0));
7079 else
7080 expand_computed_goto (TREE_OPERAND (exp, 0));
7081 return const0_rtx;
7082
7083 case EXIT_EXPR:
7084 expand_exit_loop_if_false (NULL,
7085 invert_truthvalue (TREE_OPERAND (exp, 0)));
7086 return const0_rtx;
7087
7088 case LABELED_BLOCK_EXPR:
7089 if (LABELED_BLOCK_BODY (exp))
7090 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7091 /* Should perhaps use expand_label, but this is simpler and safer. */
7092 do_pending_stack_adjust ();
7093 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7094 return const0_rtx;
7095
7096 case EXIT_BLOCK_EXPR:
7097 if (EXIT_BLOCK_RETURN (exp))
7098 sorry ("returned value in block_exit_expr");
7099 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7100 return const0_rtx;
7101
7102 case LOOP_EXPR:
7103 push_temp_slots ();
7104 expand_start_loop (1);
7105 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7106 expand_end_loop ();
7107 pop_temp_slots ();
7108
7109 return const0_rtx;
7110
7111 case BIND_EXPR:
7112 {
7113 tree vars = TREE_OPERAND (exp, 0);
7114
7115 /* Need to open a binding contour here because
7116 if there are any cleanups they must be contained here. */
7117 expand_start_bindings (2);
7118
7119 /* Mark the corresponding BLOCK for output in its proper place. */
7120 if (TREE_OPERAND (exp, 2) != 0
7121 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7122 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7123
7124 /* If VARS have not yet been expanded, expand them now. */
7125 while (vars)
7126 {
7127 if (!DECL_RTL_SET_P (vars))
7128 expand_decl (vars);
7129 expand_decl_init (vars);
7130 vars = TREE_CHAIN (vars);
7131 }
7132
7133 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7134
7135 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7136
7137 return temp;
7138 }
7139
7140 case RTL_EXPR:
7141 if (RTL_EXPR_SEQUENCE (exp))
7142 {
7143 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7144 abort ();
7145 emit_insn (RTL_EXPR_SEQUENCE (exp));
7146 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7147 }
7148 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7149 free_temps_for_rtl_expr (exp);
7150 return RTL_EXPR_RTL (exp);
7151
7152 case CONSTRUCTOR:
7153 /* If we don't need the result, just ensure we evaluate any
7154 subexpressions. */
7155 if (ignore)
7156 {
7157 tree elt;
7158
7159 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7160 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7161
7162 return const0_rtx;
7163 }
7164
7165 /* All elts simple constants => refer to a constant in memory. But
7166 if this is a non-BLKmode mode, let it store a field at a time
7167 since that should make a CONST_INT or CONST_DOUBLE when we
7168 fold. Likewise, if we have a target we can use, it is best to
7169 store directly into the target unless the type is large enough
7170 that memcpy will be used. If we are making an initializer and
7171 all operands are constant, put it in memory as well.
7172
7173 FIXME: Avoid trying to fill vector constructors piece-meal.
7174 Output them with output_constant_def below unless we're sure
7175 they're zeros. This should go away when vector initializers
7176 are treated like VECTOR_CST instead of arrays.
7177 */
7178 else if ((TREE_STATIC (exp)
7179 && ((mode == BLKmode
7180 && ! (target != 0 && safe_from_p (target, exp, 1)))
7181 || TREE_ADDRESSABLE (exp)
7182 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7183 && (! MOVE_BY_PIECES_P
7184 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7185 TYPE_ALIGN (type)))
7186 && ((TREE_CODE (type) == VECTOR_TYPE
7187 && !is_zeros_p (exp))
7188 || ! mostly_zeros_p (exp)))))
7189 || ((modifier == EXPAND_INITIALIZER
7190 || modifier == EXPAND_CONST_ADDRESS)
7191 && TREE_CONSTANT (exp)))
7192 {
7193 rtx constructor = output_constant_def (exp, 1);
7194
7195 if (modifier != EXPAND_CONST_ADDRESS
7196 && modifier != EXPAND_INITIALIZER
7197 && modifier != EXPAND_SUM)
7198 constructor = validize_mem (constructor);
7199
7200 return constructor;
7201 }
7202 else
7203 {
7204 /* Handle calls that pass values in multiple non-contiguous
7205 locations. The Irix 6 ABI has examples of this. */
7206 if (target == 0 || ! safe_from_p (target, exp, 1)
7207 || GET_CODE (target) == PARALLEL
7208 || modifier == EXPAND_STACK_PARM)
7209 target
7210 = assign_temp (build_qualified_type (type,
7211 (TYPE_QUALS (type)
7212 | (TREE_READONLY (exp)
7213 * TYPE_QUAL_CONST))),
7214 0, TREE_ADDRESSABLE (exp), 1);
7215
7216 store_constructor (exp, target, 0, int_expr_size (exp));
7217 return target;
7218 }
7219
7220 case INDIRECT_REF:
7221 {
7222 tree exp1 = TREE_OPERAND (exp, 0);
7223 tree index;
7224 tree string = string_constant (exp1, &index);
7225
7226 /* Try to optimize reads from const strings. */
7227 if (string
7228 && TREE_CODE (string) == STRING_CST
7229 && TREE_CODE (index) == INTEGER_CST
7230 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7231 && GET_MODE_CLASS (mode) == MODE_INT
7232 && GET_MODE_SIZE (mode) == 1
7233 && modifier != EXPAND_WRITE)
7234 return gen_int_mode (TREE_STRING_POINTER (string)
7235 [TREE_INT_CST_LOW (index)], mode);
7236
7237 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7238 op0 = memory_address (mode, op0);
7239 temp = gen_rtx_MEM (mode, op0);
7240 set_mem_attributes (temp, exp, 0);
7241
7242 /* If we are writing to this object and its type is a record with
7243 readonly fields, we must mark it as readonly so it will
7244 conflict with readonly references to those fields. */
7245 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7246 RTX_UNCHANGING_P (temp) = 1;
7247
7248 return temp;
7249 }
7250
7251 case ARRAY_REF:
7252 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7253 abort ();
7254
7255 {
7256 tree array = TREE_OPERAND (exp, 0);
7257 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7258 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7259 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7260 HOST_WIDE_INT i;
7261
7262 /* Optimize the special-case of a zero lower bound.
7263
7264 We convert the low_bound to sizetype to avoid some problems
7265 with constant folding. (E.g. suppose the lower bound is 1,
7266 and its mode is QI. Without the conversion, (ARRAY
7267 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7268 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7269
7270 if (! integer_zerop (low_bound))
7271 index = size_diffop (index, convert (sizetype, low_bound));
7272
7273 /* Fold an expression like: "foo"[2].
7274 This is not done in fold so it won't happen inside &.
7275 Don't fold if this is for wide characters since it's too
7276 difficult to do correctly and this is a very rare case. */
7277
7278 if (modifier != EXPAND_CONST_ADDRESS
7279 && modifier != EXPAND_INITIALIZER
7280 && modifier != EXPAND_MEMORY
7281 && TREE_CODE (array) == STRING_CST
7282 && TREE_CODE (index) == INTEGER_CST
7283 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7284 && GET_MODE_CLASS (mode) == MODE_INT
7285 && GET_MODE_SIZE (mode) == 1)
7286 return gen_int_mode (TREE_STRING_POINTER (array)
7287 [TREE_INT_CST_LOW (index)], mode);
7288
7289 /* If this is a constant index into a constant array,
7290 just get the value from the array. Handle both the cases when
7291 we have an explicit constructor and when our operand is a variable
7292 that was declared const. */
7293
7294 if (modifier != EXPAND_CONST_ADDRESS
7295 && modifier != EXPAND_INITIALIZER
7296 && modifier != EXPAND_MEMORY
7297 && TREE_CODE (array) == CONSTRUCTOR
7298 && ! TREE_SIDE_EFFECTS (array)
7299 && TREE_CODE (index) == INTEGER_CST
7300 && 0 > compare_tree_int (index,
7301 list_length (CONSTRUCTOR_ELTS
7302 (TREE_OPERAND (exp, 0)))))
7303 {
7304 tree elem;
7305
7306 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7307 i = TREE_INT_CST_LOW (index);
7308 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7309 ;
7310
7311 if (elem)
7312 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7313 modifier);
7314 }
7315
7316 else if (optimize >= 1
7317 && modifier != EXPAND_CONST_ADDRESS
7318 && modifier != EXPAND_INITIALIZER
7319 && modifier != EXPAND_MEMORY
7320 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7321 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7322 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7323 {
7324 if (TREE_CODE (index) == INTEGER_CST)
7325 {
7326 tree init = DECL_INITIAL (array);
7327
7328 if (TREE_CODE (init) == CONSTRUCTOR)
7329 {
7330 tree elem;
7331
7332 for (elem = CONSTRUCTOR_ELTS (init);
7333 (elem
7334 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7335 elem = TREE_CHAIN (elem))
7336 ;
7337
7338 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7339 return expand_expr (fold (TREE_VALUE (elem)), target,
7340 tmode, modifier);
7341 }
7342 else if (TREE_CODE (init) == STRING_CST
7343 && 0 > compare_tree_int (index,
7344 TREE_STRING_LENGTH (init)))
7345 {
7346 tree type = TREE_TYPE (TREE_TYPE (init));
7347 enum machine_mode mode = TYPE_MODE (type);
7348
7349 if (GET_MODE_CLASS (mode) == MODE_INT
7350 && GET_MODE_SIZE (mode) == 1)
7351 return gen_int_mode (TREE_STRING_POINTER (init)
7352 [TREE_INT_CST_LOW (index)], mode);
7353 }
7354 }
7355 }
7356 }
7357 goto normal_inner_ref;
7358
7359 case COMPONENT_REF:
7360 /* If the operand is a CONSTRUCTOR, we can just extract the
7361 appropriate field if it is present. */
7362 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7363 {
7364 tree elt;
7365
7366 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7367 elt = TREE_CHAIN (elt))
7368 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7369 /* We can normally use the value of the field in the
7370 CONSTRUCTOR. However, if this is a bitfield in
7371 an integral mode that we can fit in a HOST_WIDE_INT,
7372 we must mask only the number of bits in the bitfield,
7373 since this is done implicitly by the constructor. If
7374 the bitfield does not meet either of those conditions,
7375 we can't do this optimization. */
7376 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7377 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7378 == MODE_INT)
7379 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7380 <= HOST_BITS_PER_WIDE_INT))))
7381 {
7382 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7383 && modifier == EXPAND_STACK_PARM)
7384 target = 0;
7385 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7386 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7387 {
7388 HOST_WIDE_INT bitsize
7389 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7390 enum machine_mode imode
7391 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7392
7393 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7394 {
7395 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7396 op0 = expand_and (imode, op0, op1, target);
7397 }
7398 else
7399 {
7400 tree count
7401 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7402 0);
7403
7404 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7405 target, 0);
7406 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7407 target, 0);
7408 }
7409 }
7410
7411 return op0;
7412 }
7413 }
7414 goto normal_inner_ref;
7415
7416 case BIT_FIELD_REF:
7417 case ARRAY_RANGE_REF:
7418 normal_inner_ref:
7419 {
7420 enum machine_mode mode1;
7421 HOST_WIDE_INT bitsize, bitpos;
7422 tree offset;
7423 int volatilep = 0;
7424 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7425 &mode1, &unsignedp, &volatilep);
7426 rtx orig_op0;
7427
7428 /* If we got back the original object, something is wrong. Perhaps
7429 we are evaluating an expression too early. In any event, don't
7430 infinitely recurse. */
7431 if (tem == exp)
7432 abort ();
7433
7434 /* If TEM's type is a union of variable size, pass TARGET to the inner
7435 computation, since it will need a temporary and TARGET is known
7436 to have to do. This occurs in unchecked conversion in Ada. */
7437
7438 orig_op0 = op0
7439 = expand_expr (tem,
7440 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7441 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7442 != INTEGER_CST)
7443 && modifier != EXPAND_STACK_PARM
7444 ? target : NULL_RTX),
7445 VOIDmode,
7446 (modifier == EXPAND_INITIALIZER
7447 || modifier == EXPAND_CONST_ADDRESS
7448 || modifier == EXPAND_STACK_PARM)
7449 ? modifier : EXPAND_NORMAL);
7450
7451 /* If this is a constant, put it into a register if it is a
7452 legitimate constant and OFFSET is 0 and memory if it isn't. */
7453 if (CONSTANT_P (op0))
7454 {
7455 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7456 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7457 && offset == 0)
7458 op0 = force_reg (mode, op0);
7459 else
7460 op0 = validize_mem (force_const_mem (mode, op0));
7461 }
7462
7463 /* Otherwise, if this object not in memory and we either have an
7464 offset or a BLKmode result, put it there. This case can't occur in
7465 C, but can in Ada if we have unchecked conversion of an expression
7466 from a scalar type to an array or record type or for an
7467 ARRAY_RANGE_REF whose type is BLKmode. */
7468 else if (GET_CODE (op0) != MEM
7469 && (offset != 0
7470 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7471 {
7472 /* If the operand is a SAVE_EXPR, we can deal with this by
7473 forcing the SAVE_EXPR into memory. */
7474 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7475 {
7476 put_var_into_stack (TREE_OPERAND (exp, 0),
7477 /*rescan=*/true);
7478 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7479 }
7480 else
7481 {
7482 tree nt
7483 = build_qualified_type (TREE_TYPE (tem),
7484 (TYPE_QUALS (TREE_TYPE (tem))
7485 | TYPE_QUAL_CONST));
7486 rtx memloc = assign_temp (nt, 1, 1, 1);
7487
7488 emit_move_insn (memloc, op0);
7489 op0 = memloc;
7490 }
7491 }
7492
7493 if (offset != 0)
7494 {
7495 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7496 EXPAND_SUM);
7497
7498 if (GET_CODE (op0) != MEM)
7499 abort ();
7500
7501 #ifdef POINTERS_EXTEND_UNSIGNED
7502 if (GET_MODE (offset_rtx) != Pmode)
7503 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7504 #else
7505 if (GET_MODE (offset_rtx) != ptr_mode)
7506 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7507 #endif
7508
7509 /* A constant address in OP0 can have VOIDmode, we must not try
7510 to call force_reg for that case. Avoid that case. */
7511 if (GET_CODE (op0) == MEM
7512 && GET_MODE (op0) == BLKmode
7513 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7514 && bitsize != 0
7515 && (bitpos % bitsize) == 0
7516 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7517 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7518 {
7519 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7520 bitpos = 0;
7521 }
7522
7523 op0 = offset_address (op0, offset_rtx,
7524 highest_pow2_factor (offset));
7525 }
7526
7527 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7528 record its alignment as BIGGEST_ALIGNMENT. */
7529 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7530 && is_aligning_offset (offset, tem))
7531 set_mem_align (op0, BIGGEST_ALIGNMENT);
7532
7533 /* Don't forget about volatility even if this is a bitfield. */
7534 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7535 {
7536 if (op0 == orig_op0)
7537 op0 = copy_rtx (op0);
7538
7539 MEM_VOLATILE_P (op0) = 1;
7540 }
7541
7542 /* The following code doesn't handle CONCAT.
7543 Assume only bitpos == 0 can be used for CONCAT, due to
7544 one element arrays having the same mode as its element. */
7545 if (GET_CODE (op0) == CONCAT)
7546 {
7547 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7548 abort ();
7549 return op0;
7550 }
7551
7552 /* In cases where an aligned union has an unaligned object
7553 as a field, we might be extracting a BLKmode value from
7554 an integer-mode (e.g., SImode) object. Handle this case
7555 by doing the extract into an object as wide as the field
7556 (which we know to be the width of a basic mode), then
7557 storing into memory, and changing the mode to BLKmode. */
7558 if (mode1 == VOIDmode
7559 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7560 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7562 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7563 && modifier != EXPAND_CONST_ADDRESS
7564 && modifier != EXPAND_INITIALIZER)
7565 /* If the field isn't aligned enough to fetch as a memref,
7566 fetch it as a bit field. */
7567 || (mode1 != BLKmode
7568 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7569 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7570 && ((modifier == EXPAND_CONST_ADDRESS
7571 || modifier == EXPAND_INITIALIZER)
7572 ? STRICT_ALIGNMENT
7573 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7574 || (bitpos % BITS_PER_UNIT != 0)))
7575 /* If the type and the field are a constant size and the
7576 size of the type isn't the same size as the bitfield,
7577 we must use bitfield operations. */
7578 || (bitsize >= 0
7579 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7580 == INTEGER_CST)
7581 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7582 bitsize)))
7583 {
7584 enum machine_mode ext_mode = mode;
7585
7586 if (ext_mode == BLKmode
7587 && ! (target != 0 && GET_CODE (op0) == MEM
7588 && GET_CODE (target) == MEM
7589 && bitpos % BITS_PER_UNIT == 0))
7590 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7591
7592 if (ext_mode == BLKmode)
7593 {
7594 /* In this case, BITPOS must start at a byte boundary and
7595 TARGET, if specified, must be a MEM. */
7596 if (GET_CODE (op0) != MEM
7597 || (target != 0 && GET_CODE (target) != MEM)
7598 || bitpos % BITS_PER_UNIT != 0)
7599 abort ();
7600
7601 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7602 if (target == 0)
7603 target = assign_temp (type, 0, 1, 1);
7604
7605 emit_block_move (target, op0,
7606 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7607 / BITS_PER_UNIT),
7608 (modifier == EXPAND_STACK_PARM
7609 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7610
7611 return target;
7612 }
7613
7614 op0 = validize_mem (op0);
7615
7616 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7617 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7618
7619 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7620 (modifier == EXPAND_STACK_PARM
7621 ? NULL_RTX : target),
7622 ext_mode, ext_mode,
7623 int_size_in_bytes (TREE_TYPE (tem)));
7624
7625 /* If the result is a record type and BITSIZE is narrower than
7626 the mode of OP0, an integral mode, and this is a big endian
7627 machine, we must put the field into the high-order bits. */
7628 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7629 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7630 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7631 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7632 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7633 - bitsize),
7634 op0, 1);
7635
7636 if (mode == BLKmode)
7637 {
7638 rtx new = assign_temp (build_qualified_type
7639 ((*lang_hooks.types.type_for_mode)
7640 (ext_mode, 0),
7641 TYPE_QUAL_CONST), 0, 1, 1);
7642
7643 emit_move_insn (new, op0);
7644 op0 = copy_rtx (new);
7645 PUT_MODE (op0, BLKmode);
7646 set_mem_attributes (op0, exp, 1);
7647 }
7648
7649 return op0;
7650 }
7651
7652 /* If the result is BLKmode, use that to access the object
7653 now as well. */
7654 if (mode == BLKmode)
7655 mode1 = BLKmode;
7656
7657 /* Get a reference to just this component. */
7658 if (modifier == EXPAND_CONST_ADDRESS
7659 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7660 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7661 else
7662 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7663
7664 if (op0 == orig_op0)
7665 op0 = copy_rtx (op0);
7666
7667 set_mem_attributes (op0, exp, 0);
7668 if (GET_CODE (XEXP (op0, 0)) == REG)
7669 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7670
7671 MEM_VOLATILE_P (op0) |= volatilep;
7672 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7673 || modifier == EXPAND_CONST_ADDRESS
7674 || modifier == EXPAND_INITIALIZER)
7675 return op0;
7676 else if (target == 0)
7677 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7678
7679 convert_move (target, op0, unsignedp);
7680 return target;
7681 }
7682
7683 case VTABLE_REF:
7684 {
7685 rtx insn, before = get_last_insn (), vtbl_ref;
7686
7687 /* Evaluate the interior expression. */
7688 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7689 tmode, modifier);
7690
7691 /* Get or create an instruction off which to hang a note. */
7692 if (REG_P (subtarget))
7693 {
7694 target = subtarget;
7695 insn = get_last_insn ();
7696 if (insn == before)
7697 abort ();
7698 if (! INSN_P (insn))
7699 insn = prev_nonnote_insn (insn);
7700 }
7701 else
7702 {
7703 target = gen_reg_rtx (GET_MODE (subtarget));
7704 insn = emit_move_insn (target, subtarget);
7705 }
7706
7707 /* Collect the data for the note. */
7708 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7709 vtbl_ref = plus_constant (vtbl_ref,
7710 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7711 /* Discard the initial CONST that was added. */
7712 vtbl_ref = XEXP (vtbl_ref, 0);
7713
7714 REG_NOTES (insn)
7715 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7716
7717 return target;
7718 }
7719
7720 /* Intended for a reference to a buffer of a file-object in Pascal.
7721 But it's not certain that a special tree code will really be
7722 necessary for these. INDIRECT_REF might work for them. */
7723 case BUFFER_REF:
7724 abort ();
7725
7726 case IN_EXPR:
7727 {
7728 /* Pascal set IN expression.
7729
7730 Algorithm:
7731 rlo = set_low - (set_low%bits_per_word);
7732 the_word = set [ (index - rlo)/bits_per_word ];
7733 bit_index = index % bits_per_word;
7734 bitmask = 1 << bit_index;
7735 return !!(the_word & bitmask); */
7736
7737 tree set = TREE_OPERAND (exp, 0);
7738 tree index = TREE_OPERAND (exp, 1);
7739 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7740 tree set_type = TREE_TYPE (set);
7741 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7742 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7743 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7744 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7745 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7746 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7747 rtx setaddr = XEXP (setval, 0);
7748 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7749 rtx rlow;
7750 rtx diff, quo, rem, addr, bit, result;
7751
7752 /* If domain is empty, answer is no. Likewise if index is constant
7753 and out of bounds. */
7754 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7755 && TREE_CODE (set_low_bound) == INTEGER_CST
7756 && tree_int_cst_lt (set_high_bound, set_low_bound))
7757 || (TREE_CODE (index) == INTEGER_CST
7758 && TREE_CODE (set_low_bound) == INTEGER_CST
7759 && tree_int_cst_lt (index, set_low_bound))
7760 || (TREE_CODE (set_high_bound) == INTEGER_CST
7761 && TREE_CODE (index) == INTEGER_CST
7762 && tree_int_cst_lt (set_high_bound, index))))
7763 return const0_rtx;
7764
7765 if (target == 0)
7766 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7767
7768 /* If we get here, we have to generate the code for both cases
7769 (in range and out of range). */
7770
7771 op0 = gen_label_rtx ();
7772 op1 = gen_label_rtx ();
7773
7774 if (! (GET_CODE (index_val) == CONST_INT
7775 && GET_CODE (lo_r) == CONST_INT))
7776 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7777 GET_MODE (index_val), iunsignedp, op1);
7778
7779 if (! (GET_CODE (index_val) == CONST_INT
7780 && GET_CODE (hi_r) == CONST_INT))
7781 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7782 GET_MODE (index_val), iunsignedp, op1);
7783
7784 /* Calculate the element number of bit zero in the first word
7785 of the set. */
7786 if (GET_CODE (lo_r) == CONST_INT)
7787 rlow = GEN_INT (INTVAL (lo_r)
7788 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7789 else
7790 rlow = expand_binop (index_mode, and_optab, lo_r,
7791 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7792 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7793
7794 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7795 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7796
7797 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7798 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7799 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7800 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7801
7802 addr = memory_address (byte_mode,
7803 expand_binop (index_mode, add_optab, diff,
7804 setaddr, NULL_RTX, iunsignedp,
7805 OPTAB_LIB_WIDEN));
7806
7807 /* Extract the bit we want to examine. */
7808 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7809 gen_rtx_MEM (byte_mode, addr),
7810 make_tree (TREE_TYPE (index), rem),
7811 NULL_RTX, 1);
7812 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7813 GET_MODE (target) == byte_mode ? target : 0,
7814 1, OPTAB_LIB_WIDEN);
7815
7816 if (result != target)
7817 convert_move (target, result, 1);
7818
7819 /* Output the code to handle the out-of-range case. */
7820 emit_jump (op0);
7821 emit_label (op1);
7822 emit_move_insn (target, const0_rtx);
7823 emit_label (op0);
7824 return target;
7825 }
7826
7827 case WITH_CLEANUP_EXPR:
7828 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7829 {
7830 WITH_CLEANUP_EXPR_RTL (exp)
7831 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7832 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7833 CLEANUP_EH_ONLY (exp));
7834
7835 /* That's it for this cleanup. */
7836 TREE_OPERAND (exp, 1) = 0;
7837 }
7838 return WITH_CLEANUP_EXPR_RTL (exp);
7839
7840 case CLEANUP_POINT_EXPR:
7841 {
7842 /* Start a new binding layer that will keep track of all cleanup
7843 actions to be performed. */
7844 expand_start_bindings (2);
7845
7846 target_temp_slot_level = temp_slot_level;
7847
7848 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7849 /* If we're going to use this value, load it up now. */
7850 if (! ignore)
7851 op0 = force_not_mem (op0);
7852 preserve_temp_slots (op0);
7853 expand_end_bindings (NULL_TREE, 0, 0);
7854 }
7855 return op0;
7856
7857 case CALL_EXPR:
7858 /* Check for a built-in function. */
7859 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7860 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7861 == FUNCTION_DECL)
7862 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7863 {
7864 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7865 == BUILT_IN_FRONTEND)
7866 return (*lang_hooks.expand_expr) (exp, original_target,
7867 tmode, modifier);
7868 else
7869 return expand_builtin (exp, target, subtarget, tmode, ignore);
7870 }
7871
7872 return expand_call (exp, target, ignore);
7873
7874 case NON_LVALUE_EXPR:
7875 case NOP_EXPR:
7876 case CONVERT_EXPR:
7877 case REFERENCE_EXPR:
7878 if (TREE_OPERAND (exp, 0) == error_mark_node)
7879 return const0_rtx;
7880
7881 if (TREE_CODE (type) == UNION_TYPE)
7882 {
7883 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7884
7885 /* If both input and output are BLKmode, this conversion isn't doing
7886 anything except possibly changing memory attribute. */
7887 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7888 {
7889 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7890 modifier);
7891
7892 result = copy_rtx (result);
7893 set_mem_attributes (result, exp, 0);
7894 return result;
7895 }
7896
7897 if (target == 0)
7898 target = assign_temp (type, 0, 1, 1);
7899
7900 if (GET_CODE (target) == MEM)
7901 /* Store data into beginning of memory target. */
7902 store_expr (TREE_OPERAND (exp, 0),
7903 adjust_address (target, TYPE_MODE (valtype), 0),
7904 modifier == EXPAND_STACK_PARM ? 2 : 0);
7905
7906 else if (GET_CODE (target) == REG)
7907 /* Store this field into a union of the proper type. */
7908 store_field (target,
7909 MIN ((int_size_in_bytes (TREE_TYPE
7910 (TREE_OPERAND (exp, 0)))
7911 * BITS_PER_UNIT),
7912 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7913 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7914 VOIDmode, 0, type, 0);
7915 else
7916 abort ();
7917
7918 /* Return the entire union. */
7919 return target;
7920 }
7921
7922 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7923 {
7924 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7925 modifier);
7926
7927 /* If the signedness of the conversion differs and OP0 is
7928 a promoted SUBREG, clear that indication since we now
7929 have to do the proper extension. */
7930 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7931 && GET_CODE (op0) == SUBREG)
7932 SUBREG_PROMOTED_VAR_P (op0) = 0;
7933
7934 return op0;
7935 }
7936
7937 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7938 if (GET_MODE (op0) == mode)
7939 return op0;
7940
7941 /* If OP0 is a constant, just convert it into the proper mode. */
7942 if (CONSTANT_P (op0))
7943 {
7944 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7945 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7946
7947 if (modifier == EXPAND_INITIALIZER)
7948 return simplify_gen_subreg (mode, op0, inner_mode,
7949 subreg_lowpart_offset (mode,
7950 inner_mode));
7951 else
7952 return convert_modes (mode, inner_mode, op0,
7953 TREE_UNSIGNED (inner_type));
7954 }
7955
7956 if (modifier == EXPAND_INITIALIZER)
7957 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7958
7959 if (target == 0)
7960 return
7961 convert_to_mode (mode, op0,
7962 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7963 else
7964 convert_move (target, op0,
7965 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7966 return target;
7967
7968 case VIEW_CONVERT_EXPR:
7969 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7970
7971 /* If the input and output modes are both the same, we are done.
7972 Otherwise, if neither mode is BLKmode and both are integral and within
7973 a word, we can use gen_lowpart. If neither is true, make sure the
7974 operand is in memory and convert the MEM to the new mode. */
7975 if (TYPE_MODE (type) == GET_MODE (op0))
7976 ;
7977 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7978 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7979 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7980 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7981 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7982 op0 = gen_lowpart (TYPE_MODE (type), op0);
7983 else if (GET_CODE (op0) != MEM)
7984 {
7985 /* If the operand is not a MEM, force it into memory. Since we
7986 are going to be be changing the mode of the MEM, don't call
7987 force_const_mem for constants because we don't allow pool
7988 constants to change mode. */
7989 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7990
7991 if (TREE_ADDRESSABLE (exp))
7992 abort ();
7993
7994 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7995 target
7996 = assign_stack_temp_for_type
7997 (TYPE_MODE (inner_type),
7998 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7999
8000 emit_move_insn (target, op0);
8001 op0 = target;
8002 }
8003
8004 /* At this point, OP0 is in the correct mode. If the output type is such
8005 that the operand is known to be aligned, indicate that it is.
8006 Otherwise, we need only be concerned about alignment for non-BLKmode
8007 results. */
8008 if (GET_CODE (op0) == MEM)
8009 {
8010 op0 = copy_rtx (op0);
8011
8012 if (TYPE_ALIGN_OK (type))
8013 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8014 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8015 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8016 {
8017 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8018 HOST_WIDE_INT temp_size
8019 = MAX (int_size_in_bytes (inner_type),
8020 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8021 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8022 temp_size, 0, type);
8023 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8024
8025 if (TREE_ADDRESSABLE (exp))
8026 abort ();
8027
8028 if (GET_MODE (op0) == BLKmode)
8029 emit_block_move (new_with_op0_mode, op0,
8030 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8031 (modifier == EXPAND_STACK_PARM
8032 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8033 else
8034 emit_move_insn (new_with_op0_mode, op0);
8035
8036 op0 = new;
8037 }
8038
8039 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8040 }
8041
8042 return op0;
8043
8044 case PLUS_EXPR:
8045 this_optab = ! unsignedp && flag_trapv
8046 && (GET_MODE_CLASS (mode) == MODE_INT)
8047 ? addv_optab : add_optab;
8048
8049 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8050 something else, make sure we add the register to the constant and
8051 then to the other thing. This case can occur during strength
8052 reduction and doing it this way will produce better code if the
8053 frame pointer or argument pointer is eliminated.
8054
8055 fold-const.c will ensure that the constant is always in the inner
8056 PLUS_EXPR, so the only case we need to do anything about is if
8057 sp, ap, or fp is our second argument, in which case we must swap
8058 the innermost first argument and our second argument. */
8059
8060 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8061 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8062 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8063 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8064 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8065 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8066 {
8067 tree t = TREE_OPERAND (exp, 1);
8068
8069 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8070 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8071 }
8072
8073 /* If the result is to be ptr_mode and we are adding an integer to
8074 something, we might be forming a constant. So try to use
8075 plus_constant. If it produces a sum and we can't accept it,
8076 use force_operand. This allows P = &ARR[const] to generate
8077 efficient code on machines where a SYMBOL_REF is not a valid
8078 address.
8079
8080 If this is an EXPAND_SUM call, always return the sum. */
8081 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8082 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8083 {
8084 if (modifier == EXPAND_STACK_PARM)
8085 target = 0;
8086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8087 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8088 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8089 {
8090 rtx constant_part;
8091
8092 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8093 EXPAND_SUM);
8094 /* Use immed_double_const to ensure that the constant is
8095 truncated according to the mode of OP1, then sign extended
8096 to a HOST_WIDE_INT. Using the constant directly can result
8097 in non-canonical RTL in a 64x32 cross compile. */
8098 constant_part
8099 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8100 (HOST_WIDE_INT) 0,
8101 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8102 op1 = plus_constant (op1, INTVAL (constant_part));
8103 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8104 op1 = force_operand (op1, target);
8105 return op1;
8106 }
8107
8108 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8109 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8110 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8111 {
8112 rtx constant_part;
8113
8114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8115 (modifier == EXPAND_INITIALIZER
8116 ? EXPAND_INITIALIZER : EXPAND_SUM));
8117 if (! CONSTANT_P (op0))
8118 {
8119 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8120 VOIDmode, modifier);
8121 /* Don't go to both_summands if modifier
8122 says it's not right to return a PLUS. */
8123 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8124 goto binop2;
8125 goto both_summands;
8126 }
8127 /* Use immed_double_const to ensure that the constant is
8128 truncated according to the mode of OP1, then sign extended
8129 to a HOST_WIDE_INT. Using the constant directly can result
8130 in non-canonical RTL in a 64x32 cross compile. */
8131 constant_part
8132 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8133 (HOST_WIDE_INT) 0,
8134 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8135 op0 = plus_constant (op0, INTVAL (constant_part));
8136 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8137 op0 = force_operand (op0, target);
8138 return op0;
8139 }
8140 }
8141
8142 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8143 subtarget = 0;
8144
8145 /* No sense saving up arithmetic to be done
8146 if it's all in the wrong mode to form part of an address.
8147 And force_operand won't know whether to sign-extend or
8148 zero-extend. */
8149 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8150 || mode != ptr_mode)
8151 {
8152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8153 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8154 TREE_OPERAND (exp, 1), 0))
8155 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8156 else
8157 op1 = op0;
8158 if (op0 == const0_rtx)
8159 return op1;
8160 if (op1 == const0_rtx)
8161 return op0;
8162 goto binop2;
8163 }
8164
8165 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8166 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8167 TREE_OPERAND (exp, 1), 0))
8168 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8169 VOIDmode, modifier);
8170 else
8171 op1 = op0;
8172
8173 /* We come here from MINUS_EXPR when the second operand is a
8174 constant. */
8175 both_summands:
8176 /* Make sure any term that's a sum with a constant comes last. */
8177 if (GET_CODE (op0) == PLUS
8178 && CONSTANT_P (XEXP (op0, 1)))
8179 {
8180 temp = op0;
8181 op0 = op1;
8182 op1 = temp;
8183 }
8184 /* If adding to a sum including a constant,
8185 associate it to put the constant outside. */
8186 if (GET_CODE (op1) == PLUS
8187 && CONSTANT_P (XEXP (op1, 1)))
8188 {
8189 rtx constant_term = const0_rtx;
8190
8191 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8192 if (temp != 0)
8193 op0 = temp;
8194 /* Ensure that MULT comes first if there is one. */
8195 else if (GET_CODE (op0) == MULT)
8196 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8197 else
8198 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8199
8200 /* Let's also eliminate constants from op0 if possible. */
8201 op0 = eliminate_constant_term (op0, &constant_term);
8202
8203 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8204 their sum should be a constant. Form it into OP1, since the
8205 result we want will then be OP0 + OP1. */
8206
8207 temp = simplify_binary_operation (PLUS, mode, constant_term,
8208 XEXP (op1, 1));
8209 if (temp != 0)
8210 op1 = temp;
8211 else
8212 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8213 }
8214
8215 /* Put a constant term last and put a multiplication first. */
8216 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8217 temp = op1, op1 = op0, op0 = temp;
8218
8219 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8220 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8221
8222 case MINUS_EXPR:
8223 /* For initializers, we are allowed to return a MINUS of two
8224 symbolic constants. Here we handle all cases when both operands
8225 are constant. */
8226 /* Handle difference of two symbolic constants,
8227 for the sake of an initializer. */
8228 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8229 && really_constant_p (TREE_OPERAND (exp, 0))
8230 && really_constant_p (TREE_OPERAND (exp, 1)))
8231 {
8232 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8233 modifier);
8234 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8235 modifier);
8236
8237 /* If the last operand is a CONST_INT, use plus_constant of
8238 the negated constant. Else make the MINUS. */
8239 if (GET_CODE (op1) == CONST_INT)
8240 return plus_constant (op0, - INTVAL (op1));
8241 else
8242 return gen_rtx_MINUS (mode, op0, op1);
8243 }
8244
8245 this_optab = ! unsignedp && flag_trapv
8246 && (GET_MODE_CLASS(mode) == MODE_INT)
8247 ? subv_optab : sub_optab;
8248
8249 /* No sense saving up arithmetic to be done
8250 if it's all in the wrong mode to form part of an address.
8251 And force_operand won't know whether to sign-extend or
8252 zero-extend. */
8253 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8254 || mode != ptr_mode)
8255 goto binop;
8256
8257 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8258 subtarget = 0;
8259
8260 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8261 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8262
8263 /* Convert A - const to A + (-const). */
8264 if (GET_CODE (op1) == CONST_INT)
8265 {
8266 op1 = negate_rtx (mode, op1);
8267 goto both_summands;
8268 }
8269
8270 goto binop2;
8271
8272 case MULT_EXPR:
8273 /* If first operand is constant, swap them.
8274 Thus the following special case checks need only
8275 check the second operand. */
8276 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8277 {
8278 tree t1 = TREE_OPERAND (exp, 0);
8279 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8280 TREE_OPERAND (exp, 1) = t1;
8281 }
8282
8283 /* Attempt to return something suitable for generating an
8284 indexed address, for machines that support that. */
8285
8286 if (modifier == EXPAND_SUM && mode == ptr_mode
8287 && host_integerp (TREE_OPERAND (exp, 1), 0))
8288 {
8289 tree exp1 = TREE_OPERAND (exp, 1);
8290
8291 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8292 EXPAND_SUM);
8293
8294 /* If we knew for certain that this is arithmetic for an array
8295 reference, and we knew the bounds of the array, then we could
8296 apply the distributive law across (PLUS X C) for constant C.
8297 Without such knowledge, we risk overflowing the computation
8298 when both X and C are large, but X+C isn't. */
8299 /* ??? Could perhaps special-case EXP being unsigned and C being
8300 positive. In that case we are certain that X+C is no smaller
8301 than X and so the transformed expression will overflow iff the
8302 original would have. */
8303
8304 if (GET_CODE (op0) != REG)
8305 op0 = force_operand (op0, NULL_RTX);
8306 if (GET_CODE (op0) != REG)
8307 op0 = copy_to_mode_reg (mode, op0);
8308
8309 return gen_rtx_MULT (mode, op0,
8310 gen_int_mode (tree_low_cst (exp1, 0),
8311 TYPE_MODE (TREE_TYPE (exp1))));
8312 }
8313
8314 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8315 subtarget = 0;
8316
8317 if (modifier == EXPAND_STACK_PARM)
8318 target = 0;
8319
8320 /* Check for multiplying things that have been extended
8321 from a narrower type. If this machine supports multiplying
8322 in that narrower type with a result in the desired type,
8323 do it that way, and avoid the explicit type-conversion. */
8324 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8325 && TREE_CODE (type) == INTEGER_TYPE
8326 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8327 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8328 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8329 && int_fits_type_p (TREE_OPERAND (exp, 1),
8330 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8331 /* Don't use a widening multiply if a shift will do. */
8332 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8333 > HOST_BITS_PER_WIDE_INT)
8334 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8335 ||
8336 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8337 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8338 ==
8339 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8340 /* If both operands are extended, they must either both
8341 be zero-extended or both be sign-extended. */
8342 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8343 ==
8344 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8345 {
8346 enum machine_mode innermode
8347 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8348 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8349 ? smul_widen_optab : umul_widen_optab);
8350 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8351 ? umul_widen_optab : smul_widen_optab);
8352 if (mode == GET_MODE_WIDER_MODE (innermode))
8353 {
8354 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8355 {
8356 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8357 NULL_RTX, VOIDmode, 0);
8358 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8359 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8360 VOIDmode, 0);
8361 else
8362 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8363 NULL_RTX, VOIDmode, 0);
8364 goto binop2;
8365 }
8366 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8367 && innermode == word_mode)
8368 {
8369 rtx htem;
8370 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8371 NULL_RTX, VOIDmode, 0);
8372 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8373 op1 = convert_modes (innermode, mode,
8374 expand_expr (TREE_OPERAND (exp, 1),
8375 NULL_RTX, VOIDmode, 0),
8376 unsignedp);
8377 else
8378 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8379 NULL_RTX, VOIDmode, 0);
8380 temp = expand_binop (mode, other_optab, op0, op1, target,
8381 unsignedp, OPTAB_LIB_WIDEN);
8382 htem = expand_mult_highpart_adjust (innermode,
8383 gen_highpart (innermode, temp),
8384 op0, op1,
8385 gen_highpart (innermode, temp),
8386 unsignedp);
8387 emit_move_insn (gen_highpart (innermode, temp), htem);
8388 return temp;
8389 }
8390 }
8391 }
8392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8393 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8394 TREE_OPERAND (exp, 1), 0))
8395 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8396 else
8397 op1 = op0;
8398 return expand_mult (mode, op0, op1, target, unsignedp);
8399
8400 case TRUNC_DIV_EXPR:
8401 case FLOOR_DIV_EXPR:
8402 case CEIL_DIV_EXPR:
8403 case ROUND_DIV_EXPR:
8404 case EXACT_DIV_EXPR:
8405 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8406 subtarget = 0;
8407 if (modifier == EXPAND_STACK_PARM)
8408 target = 0;
8409 /* Possible optimization: compute the dividend with EXPAND_SUM
8410 then if the divisor is constant can optimize the case
8411 where some terms of the dividend have coeffs divisible by it. */
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8413 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8414 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8415
8416 case RDIV_EXPR:
8417 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8418 expensive divide. If not, combine will rebuild the original
8419 computation. */
8420 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8421 && TREE_CODE (type) == REAL_TYPE
8422 && !real_onep (TREE_OPERAND (exp, 0)))
8423 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8424 build (RDIV_EXPR, type,
8425 build_real (type, dconst1),
8426 TREE_OPERAND (exp, 1))),
8427 target, tmode, modifier);
8428 this_optab = sdiv_optab;
8429 goto binop;
8430
8431 case TRUNC_MOD_EXPR:
8432 case FLOOR_MOD_EXPR:
8433 case CEIL_MOD_EXPR:
8434 case ROUND_MOD_EXPR:
8435 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8436 subtarget = 0;
8437 if (modifier == EXPAND_STACK_PARM)
8438 target = 0;
8439 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8440 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8441 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8442
8443 case FIX_ROUND_EXPR:
8444 case FIX_FLOOR_EXPR:
8445 case FIX_CEIL_EXPR:
8446 abort (); /* Not used for C. */
8447
8448 case FIX_TRUNC_EXPR:
8449 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8450 if (target == 0 || modifier == EXPAND_STACK_PARM)
8451 target = gen_reg_rtx (mode);
8452 expand_fix (target, op0, unsignedp);
8453 return target;
8454
8455 case FLOAT_EXPR:
8456 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8457 if (target == 0 || modifier == EXPAND_STACK_PARM)
8458 target = gen_reg_rtx (mode);
8459 /* expand_float can't figure out what to do if FROM has VOIDmode.
8460 So give it the correct mode. With -O, cse will optimize this. */
8461 if (GET_MODE (op0) == VOIDmode)
8462 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8463 op0);
8464 expand_float (target, op0,
8465 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8466 return target;
8467
8468 case NEGATE_EXPR:
8469 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8470 if (modifier == EXPAND_STACK_PARM)
8471 target = 0;
8472 temp = expand_unop (mode,
8473 ! unsignedp && flag_trapv
8474 && (GET_MODE_CLASS(mode) == MODE_INT)
8475 ? negv_optab : neg_optab, op0, target, 0);
8476 if (temp == 0)
8477 abort ();
8478 return temp;
8479
8480 case ABS_EXPR:
8481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8482 if (modifier == EXPAND_STACK_PARM)
8483 target = 0;
8484
8485 /* ABS_EXPR is not valid for complex arguments. */
8486 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8487 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8488 abort ();
8489
8490 /* Unsigned abs is simply the operand. Testing here means we don't
8491 risk generating incorrect code below. */
8492 if (TREE_UNSIGNED (type))
8493 return op0;
8494
8495 return expand_abs (mode, op0, target, unsignedp,
8496 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8497
8498 case MAX_EXPR:
8499 case MIN_EXPR:
8500 target = original_target;
8501 if (target == 0
8502 || modifier == EXPAND_STACK_PARM
8503 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8504 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8505 || GET_MODE (target) != mode
8506 || (GET_CODE (target) == REG
8507 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8508 target = gen_reg_rtx (mode);
8509 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8510 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8511
8512 /* First try to do it with a special MIN or MAX instruction.
8513 If that does not win, use a conditional jump to select the proper
8514 value. */
8515 this_optab = (TREE_UNSIGNED (type)
8516 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8517 : (code == MIN_EXPR ? smin_optab : smax_optab));
8518
8519 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8520 OPTAB_WIDEN);
8521 if (temp != 0)
8522 return temp;
8523
8524 /* At this point, a MEM target is no longer useful; we will get better
8525 code without it. */
8526
8527 if (GET_CODE (target) == MEM)
8528 target = gen_reg_rtx (mode);
8529
8530 if (target != op0)
8531 emit_move_insn (target, op0);
8532
8533 op0 = gen_label_rtx ();
8534
8535 /* If this mode is an integer too wide to compare properly,
8536 compare word by word. Rely on cse to optimize constant cases. */
8537 if (GET_MODE_CLASS (mode) == MODE_INT
8538 && ! can_compare_p (GE, mode, ccp_jump))
8539 {
8540 if (code == MAX_EXPR)
8541 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8542 target, op1, NULL_RTX, op0);
8543 else
8544 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8545 op1, target, NULL_RTX, op0);
8546 }
8547 else
8548 {
8549 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8550 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8551 unsignedp, mode, NULL_RTX, NULL_RTX,
8552 op0);
8553 }
8554 emit_move_insn (target, op1);
8555 emit_label (op0);
8556 return target;
8557
8558 case BIT_NOT_EXPR:
8559 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8560 if (modifier == EXPAND_STACK_PARM)
8561 target = 0;
8562 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8563 if (temp == 0)
8564 abort ();
8565 return temp;
8566
8567 case FFS_EXPR:
8568 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8569 if (modifier == EXPAND_STACK_PARM)
8570 target = 0;
8571 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8572 if (temp == 0)
8573 abort ();
8574 return temp;
8575
8576 case CLZ_EXPR:
8577 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8578 temp = expand_unop (mode, clz_optab, op0, target, 1);
8579 if (temp == 0)
8580 abort ();
8581 return temp;
8582
8583 case CTZ_EXPR:
8584 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8585 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8586 if (temp == 0)
8587 abort ();
8588 return temp;
8589
8590 case POPCOUNT_EXPR:
8591 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8592 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8593 if (temp == 0)
8594 abort ();
8595 return temp;
8596
8597 case PARITY_EXPR:
8598 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8599 temp = expand_unop (mode, parity_optab, op0, target, 1);
8600 if (temp == 0)
8601 abort ();
8602 return temp;
8603
8604 /* ??? Can optimize bitwise operations with one arg constant.
8605 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8606 and (a bitwise1 b) bitwise2 b (etc)
8607 but that is probably not worth while. */
8608
8609 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8610 boolean values when we want in all cases to compute both of them. In
8611 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8612 as actual zero-or-1 values and then bitwise anding. In cases where
8613 there cannot be any side effects, better code would be made by
8614 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8615 how to recognize those cases. */
8616
8617 case TRUTH_AND_EXPR:
8618 case BIT_AND_EXPR:
8619 this_optab = and_optab;
8620 goto binop;
8621
8622 case TRUTH_OR_EXPR:
8623 case BIT_IOR_EXPR:
8624 this_optab = ior_optab;
8625 goto binop;
8626
8627 case TRUTH_XOR_EXPR:
8628 case BIT_XOR_EXPR:
8629 this_optab = xor_optab;
8630 goto binop;
8631
8632 case LSHIFT_EXPR:
8633 case RSHIFT_EXPR:
8634 case LROTATE_EXPR:
8635 case RROTATE_EXPR:
8636 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8637 subtarget = 0;
8638 if (modifier == EXPAND_STACK_PARM)
8639 target = 0;
8640 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8641 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8642 unsignedp);
8643
8644 /* Could determine the answer when only additive constants differ. Also,
8645 the addition of one can be handled by changing the condition. */
8646 case LT_EXPR:
8647 case LE_EXPR:
8648 case GT_EXPR:
8649 case GE_EXPR:
8650 case EQ_EXPR:
8651 case NE_EXPR:
8652 case UNORDERED_EXPR:
8653 case ORDERED_EXPR:
8654 case UNLT_EXPR:
8655 case UNLE_EXPR:
8656 case UNGT_EXPR:
8657 case UNGE_EXPR:
8658 case UNEQ_EXPR:
8659 temp = do_store_flag (exp,
8660 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8661 tmode != VOIDmode ? tmode : mode, 0);
8662 if (temp != 0)
8663 return temp;
8664
8665 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8666 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8667 && original_target
8668 && GET_CODE (original_target) == REG
8669 && (GET_MODE (original_target)
8670 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8671 {
8672 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8673 VOIDmode, 0);
8674
8675 /* If temp is constant, we can just compute the result. */
8676 if (GET_CODE (temp) == CONST_INT)
8677 {
8678 if (INTVAL (temp) != 0)
8679 emit_move_insn (target, const1_rtx);
8680 else
8681 emit_move_insn (target, const0_rtx);
8682
8683 return target;
8684 }
8685
8686 if (temp != original_target)
8687 {
8688 enum machine_mode mode1 = GET_MODE (temp);
8689 if (mode1 == VOIDmode)
8690 mode1 = tmode != VOIDmode ? tmode : mode;
8691
8692 temp = copy_to_mode_reg (mode1, temp);
8693 }
8694
8695 op1 = gen_label_rtx ();
8696 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8697 GET_MODE (temp), unsignedp, op1);
8698 emit_move_insn (temp, const1_rtx);
8699 emit_label (op1);
8700 return temp;
8701 }
8702
8703 /* If no set-flag instruction, must generate a conditional
8704 store into a temporary variable. Drop through
8705 and handle this like && and ||. */
8706
8707 case TRUTH_ANDIF_EXPR:
8708 case TRUTH_ORIF_EXPR:
8709 if (! ignore
8710 && (target == 0
8711 || modifier == EXPAND_STACK_PARM
8712 || ! safe_from_p (target, exp, 1)
8713 /* Make sure we don't have a hard reg (such as function's return
8714 value) live across basic blocks, if not optimizing. */
8715 || (!optimize && GET_CODE (target) == REG
8716 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8717 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8718
8719 if (target)
8720 emit_clr_insn (target);
8721
8722 op1 = gen_label_rtx ();
8723 jumpifnot (exp, op1);
8724
8725 if (target)
8726 emit_0_to_1_insn (target);
8727
8728 emit_label (op1);
8729 return ignore ? const0_rtx : target;
8730
8731 case TRUTH_NOT_EXPR:
8732 if (modifier == EXPAND_STACK_PARM)
8733 target = 0;
8734 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8735 /* The parser is careful to generate TRUTH_NOT_EXPR
8736 only with operands that are always zero or one. */
8737 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8738 target, 1, OPTAB_LIB_WIDEN);
8739 if (temp == 0)
8740 abort ();
8741 return temp;
8742
8743 case COMPOUND_EXPR:
8744 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8745 emit_queue ();
8746 return expand_expr (TREE_OPERAND (exp, 1),
8747 (ignore ? const0_rtx : target),
8748 VOIDmode, modifier);
8749
8750 case COND_EXPR:
8751 /* If we would have a "singleton" (see below) were it not for a
8752 conversion in each arm, bring that conversion back out. */
8753 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8754 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8755 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8756 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8757 {
8758 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8759 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8760
8761 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8762 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8763 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8764 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8765 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8766 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8767 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8768 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8769 return expand_expr (build1 (NOP_EXPR, type,
8770 build (COND_EXPR, TREE_TYPE (iftrue),
8771 TREE_OPERAND (exp, 0),
8772 iftrue, iffalse)),
8773 target, tmode, modifier);
8774 }
8775
8776 {
8777 /* Note that COND_EXPRs whose type is a structure or union
8778 are required to be constructed to contain assignments of
8779 a temporary variable, so that we can evaluate them here
8780 for side effect only. If type is void, we must do likewise. */
8781
8782 /* If an arm of the branch requires a cleanup,
8783 only that cleanup is performed. */
8784
8785 tree singleton = 0;
8786 tree binary_op = 0, unary_op = 0;
8787
8788 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8789 convert it to our mode, if necessary. */
8790 if (integer_onep (TREE_OPERAND (exp, 1))
8791 && integer_zerop (TREE_OPERAND (exp, 2))
8792 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8793 {
8794 if (ignore)
8795 {
8796 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8797 modifier);
8798 return const0_rtx;
8799 }
8800
8801 if (modifier == EXPAND_STACK_PARM)
8802 target = 0;
8803 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8804 if (GET_MODE (op0) == mode)
8805 return op0;
8806
8807 if (target == 0)
8808 target = gen_reg_rtx (mode);
8809 convert_move (target, op0, unsignedp);
8810 return target;
8811 }
8812
8813 /* Check for X ? A + B : A. If we have this, we can copy A to the
8814 output and conditionally add B. Similarly for unary operations.
8815 Don't do this if X has side-effects because those side effects
8816 might affect A or B and the "?" operation is a sequence point in
8817 ANSI. (operand_equal_p tests for side effects.) */
8818
8819 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8820 && operand_equal_p (TREE_OPERAND (exp, 2),
8821 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8822 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8823 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8824 && operand_equal_p (TREE_OPERAND (exp, 1),
8825 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8826 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8827 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8828 && operand_equal_p (TREE_OPERAND (exp, 2),
8829 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8830 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8831 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8832 && operand_equal_p (TREE_OPERAND (exp, 1),
8833 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8834 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8835
8836 /* If we are not to produce a result, we have no target. Otherwise,
8837 if a target was specified use it; it will not be used as an
8838 intermediate target unless it is safe. If no target, use a
8839 temporary. */
8840
8841 if (ignore)
8842 temp = 0;
8843 else if (modifier == EXPAND_STACK_PARM)
8844 temp = assign_temp (type, 0, 0, 1);
8845 else if (original_target
8846 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8847 || (singleton && GET_CODE (original_target) == REG
8848 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8849 && original_target == var_rtx (singleton)))
8850 && GET_MODE (original_target) == mode
8851 #ifdef HAVE_conditional_move
8852 && (! can_conditionally_move_p (mode)
8853 || GET_CODE (original_target) == REG
8854 || TREE_ADDRESSABLE (type))
8855 #endif
8856 && (GET_CODE (original_target) != MEM
8857 || TREE_ADDRESSABLE (type)))
8858 temp = original_target;
8859 else if (TREE_ADDRESSABLE (type))
8860 abort ();
8861 else
8862 temp = assign_temp (type, 0, 0, 1);
8863
8864 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8865 do the test of X as a store-flag operation, do this as
8866 A + ((X != 0) << log C). Similarly for other simple binary
8867 operators. Only do for C == 1 if BRANCH_COST is low. */
8868 if (temp && singleton && binary_op
8869 && (TREE_CODE (binary_op) == PLUS_EXPR
8870 || TREE_CODE (binary_op) == MINUS_EXPR
8871 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8872 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8873 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8874 : integer_onep (TREE_OPERAND (binary_op, 1)))
8875 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8876 {
8877 rtx result;
8878 tree cond;
8879 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8880 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8881 ? addv_optab : add_optab)
8882 : TREE_CODE (binary_op) == MINUS_EXPR
8883 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8884 ? subv_optab : sub_optab)
8885 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8886 : xor_optab);
8887
8888 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8889 if (singleton == TREE_OPERAND (exp, 1))
8890 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8891 else
8892 cond = TREE_OPERAND (exp, 0);
8893
8894 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8895 ? temp : NULL_RTX),
8896 mode, BRANCH_COST <= 1);
8897
8898 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8899 result = expand_shift (LSHIFT_EXPR, mode, result,
8900 build_int_2 (tree_log2
8901 (TREE_OPERAND
8902 (binary_op, 1)),
8903 0),
8904 (safe_from_p (temp, singleton, 1)
8905 ? temp : NULL_RTX), 0);
8906
8907 if (result)
8908 {
8909 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8910 return expand_binop (mode, boptab, op1, result, temp,
8911 unsignedp, OPTAB_LIB_WIDEN);
8912 }
8913 }
8914
8915 do_pending_stack_adjust ();
8916 NO_DEFER_POP;
8917 op0 = gen_label_rtx ();
8918
8919 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8920 {
8921 if (temp != 0)
8922 {
8923 /* If the target conflicts with the other operand of the
8924 binary op, we can't use it. Also, we can't use the target
8925 if it is a hard register, because evaluating the condition
8926 might clobber it. */
8927 if ((binary_op
8928 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8929 || (GET_CODE (temp) == REG
8930 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8931 temp = gen_reg_rtx (mode);
8932 store_expr (singleton, temp,
8933 modifier == EXPAND_STACK_PARM ? 2 : 0);
8934 }
8935 else
8936 expand_expr (singleton,
8937 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8938 if (singleton == TREE_OPERAND (exp, 1))
8939 jumpif (TREE_OPERAND (exp, 0), op0);
8940 else
8941 jumpifnot (TREE_OPERAND (exp, 0), op0);
8942
8943 start_cleanup_deferral ();
8944 if (binary_op && temp == 0)
8945 /* Just touch the other operand. */
8946 expand_expr (TREE_OPERAND (binary_op, 1),
8947 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8948 else if (binary_op)
8949 store_expr (build (TREE_CODE (binary_op), type,
8950 make_tree (type, temp),
8951 TREE_OPERAND (binary_op, 1)),
8952 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8953 else
8954 store_expr (build1 (TREE_CODE (unary_op), type,
8955 make_tree (type, temp)),
8956 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8957 op1 = op0;
8958 }
8959 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8960 comparison operator. If we have one of these cases, set the
8961 output to A, branch on A (cse will merge these two references),
8962 then set the output to FOO. */
8963 else if (temp
8964 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8965 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8966 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8967 TREE_OPERAND (exp, 1), 0)
8968 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8969 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8970 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8971 {
8972 if (GET_CODE (temp) == REG
8973 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8974 temp = gen_reg_rtx (mode);
8975 store_expr (TREE_OPERAND (exp, 1), temp,
8976 modifier == EXPAND_STACK_PARM ? 2 : 0);
8977 jumpif (TREE_OPERAND (exp, 0), op0);
8978
8979 start_cleanup_deferral ();
8980 store_expr (TREE_OPERAND (exp, 2), temp,
8981 modifier == EXPAND_STACK_PARM ? 2 : 0);
8982 op1 = op0;
8983 }
8984 else if (temp
8985 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8986 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8987 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8988 TREE_OPERAND (exp, 2), 0)
8989 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8990 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8991 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8992 {
8993 if (GET_CODE (temp) == REG
8994 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8995 temp = gen_reg_rtx (mode);
8996 store_expr (TREE_OPERAND (exp, 2), temp,
8997 modifier == EXPAND_STACK_PARM ? 2 : 0);
8998 jumpifnot (TREE_OPERAND (exp, 0), op0);
8999
9000 start_cleanup_deferral ();
9001 store_expr (TREE_OPERAND (exp, 1), temp,
9002 modifier == EXPAND_STACK_PARM ? 2 : 0);
9003 op1 = op0;
9004 }
9005 else
9006 {
9007 op1 = gen_label_rtx ();
9008 jumpifnot (TREE_OPERAND (exp, 0), op0);
9009
9010 start_cleanup_deferral ();
9011
9012 /* One branch of the cond can be void, if it never returns. For
9013 example A ? throw : E */
9014 if (temp != 0
9015 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
9016 store_expr (TREE_OPERAND (exp, 1), temp,
9017 modifier == EXPAND_STACK_PARM ? 2 : 0);
9018 else
9019 expand_expr (TREE_OPERAND (exp, 1),
9020 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9021 end_cleanup_deferral ();
9022 emit_queue ();
9023 emit_jump_insn (gen_jump (op1));
9024 emit_barrier ();
9025 emit_label (op0);
9026 start_cleanup_deferral ();
9027 if (temp != 0
9028 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9029 store_expr (TREE_OPERAND (exp, 2), temp,
9030 modifier == EXPAND_STACK_PARM ? 2 : 0);
9031 else
9032 expand_expr (TREE_OPERAND (exp, 2),
9033 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9034 }
9035
9036 end_cleanup_deferral ();
9037
9038 emit_queue ();
9039 emit_label (op1);
9040 OK_DEFER_POP;
9041
9042 return temp;
9043 }
9044
9045 case TARGET_EXPR:
9046 {
9047 /* Something needs to be initialized, but we didn't know
9048 where that thing was when building the tree. For example,
9049 it could be the return value of a function, or a parameter
9050 to a function which lays down in the stack, or a temporary
9051 variable which must be passed by reference.
9052
9053 We guarantee that the expression will either be constructed
9054 or copied into our original target. */
9055
9056 tree slot = TREE_OPERAND (exp, 0);
9057 tree cleanups = NULL_TREE;
9058 tree exp1;
9059
9060 if (TREE_CODE (slot) != VAR_DECL)
9061 abort ();
9062
9063 if (! ignore)
9064 target = original_target;
9065
9066 /* Set this here so that if we get a target that refers to a
9067 register variable that's already been used, put_reg_into_stack
9068 knows that it should fix up those uses. */
9069 TREE_USED (slot) = 1;
9070
9071 if (target == 0)
9072 {
9073 if (DECL_RTL_SET_P (slot))
9074 {
9075 target = DECL_RTL (slot);
9076 /* If we have already expanded the slot, so don't do
9077 it again. (mrs) */
9078 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9079 return target;
9080 }
9081 else
9082 {
9083 target = assign_temp (type, 2, 0, 1);
9084 /* All temp slots at this level must not conflict. */
9085 preserve_temp_slots (target);
9086 SET_DECL_RTL (slot, target);
9087 if (TREE_ADDRESSABLE (slot))
9088 put_var_into_stack (slot, /*rescan=*/false);
9089
9090 /* Since SLOT is not known to the called function
9091 to belong to its stack frame, we must build an explicit
9092 cleanup. This case occurs when we must build up a reference
9093 to pass the reference as an argument. In this case,
9094 it is very likely that such a reference need not be
9095 built here. */
9096
9097 if (TREE_OPERAND (exp, 2) == 0)
9098 TREE_OPERAND (exp, 2)
9099 = (*lang_hooks.maybe_build_cleanup) (slot);
9100 cleanups = TREE_OPERAND (exp, 2);
9101 }
9102 }
9103 else
9104 {
9105 /* This case does occur, when expanding a parameter which
9106 needs to be constructed on the stack. The target
9107 is the actual stack address that we want to initialize.
9108 The function we call will perform the cleanup in this case. */
9109
9110 /* If we have already assigned it space, use that space,
9111 not target that we were passed in, as our target
9112 parameter is only a hint. */
9113 if (DECL_RTL_SET_P (slot))
9114 {
9115 target = DECL_RTL (slot);
9116 /* If we have already expanded the slot, so don't do
9117 it again. (mrs) */
9118 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9119 return target;
9120 }
9121 else
9122 {
9123 SET_DECL_RTL (slot, target);
9124 /* If we must have an addressable slot, then make sure that
9125 the RTL that we just stored in slot is OK. */
9126 if (TREE_ADDRESSABLE (slot))
9127 put_var_into_stack (slot, /*rescan=*/true);
9128 }
9129 }
9130
9131 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9132 /* Mark it as expanded. */
9133 TREE_OPERAND (exp, 1) = NULL_TREE;
9134
9135 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9136
9137 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9138
9139 return target;
9140 }
9141
9142 case INIT_EXPR:
9143 {
9144 tree lhs = TREE_OPERAND (exp, 0);
9145 tree rhs = TREE_OPERAND (exp, 1);
9146
9147 temp = expand_assignment (lhs, rhs, ! ignore);
9148 return temp;
9149 }
9150
9151 case MODIFY_EXPR:
9152 {
9153 /* If lhs is complex, expand calls in rhs before computing it.
9154 That's so we don't compute a pointer and save it over a
9155 call. If lhs is simple, compute it first so we can give it
9156 as a target if the rhs is just a call. This avoids an
9157 extra temp and copy and that prevents a partial-subsumption
9158 which makes bad code. Actually we could treat
9159 component_ref's of vars like vars. */
9160
9161 tree lhs = TREE_OPERAND (exp, 0);
9162 tree rhs = TREE_OPERAND (exp, 1);
9163
9164 temp = 0;
9165
9166 /* Check for |= or &= of a bitfield of size one into another bitfield
9167 of size 1. In this case, (unless we need the result of the
9168 assignment) we can do this more efficiently with a
9169 test followed by an assignment, if necessary.
9170
9171 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9172 things change so we do, this code should be enhanced to
9173 support it. */
9174 if (ignore
9175 && TREE_CODE (lhs) == COMPONENT_REF
9176 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9177 || TREE_CODE (rhs) == BIT_AND_EXPR)
9178 && TREE_OPERAND (rhs, 0) == lhs
9179 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9180 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9181 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9182 {
9183 rtx label = gen_label_rtx ();
9184
9185 do_jump (TREE_OPERAND (rhs, 1),
9186 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9187 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9188 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9189 (TREE_CODE (rhs) == BIT_IOR_EXPR
9190 ? integer_one_node
9191 : integer_zero_node)),
9192 0);
9193 do_pending_stack_adjust ();
9194 emit_label (label);
9195 return const0_rtx;
9196 }
9197
9198 temp = expand_assignment (lhs, rhs, ! ignore);
9199
9200 return temp;
9201 }
9202
9203 case RETURN_EXPR:
9204 if (!TREE_OPERAND (exp, 0))
9205 expand_null_return ();
9206 else
9207 expand_return (TREE_OPERAND (exp, 0));
9208 return const0_rtx;
9209
9210 case PREINCREMENT_EXPR:
9211 case PREDECREMENT_EXPR:
9212 return expand_increment (exp, 0, ignore);
9213
9214 case POSTINCREMENT_EXPR:
9215 case POSTDECREMENT_EXPR:
9216 /* Faster to treat as pre-increment if result is not used. */
9217 return expand_increment (exp, ! ignore, ignore);
9218
9219 case ADDR_EXPR:
9220 if (modifier == EXPAND_STACK_PARM)
9221 target = 0;
9222 /* Are we taking the address of a nested function? */
9223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9224 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9225 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9226 && ! TREE_STATIC (exp))
9227 {
9228 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9229 op0 = force_operand (op0, target);
9230 }
9231 /* If we are taking the address of something erroneous, just
9232 return a zero. */
9233 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9234 return const0_rtx;
9235 /* If we are taking the address of a constant and are at the
9236 top level, we have to use output_constant_def since we can't
9237 call force_const_mem at top level. */
9238 else if (cfun == 0
9239 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9240 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9241 == 'c')))
9242 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9243 else
9244 {
9245 /* We make sure to pass const0_rtx down if we came in with
9246 ignore set, to avoid doing the cleanups twice for something. */
9247 op0 = expand_expr (TREE_OPERAND (exp, 0),
9248 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9249 (modifier == EXPAND_INITIALIZER
9250 ? modifier : EXPAND_CONST_ADDRESS));
9251
9252 /* If we are going to ignore the result, OP0 will have been set
9253 to const0_rtx, so just return it. Don't get confused and
9254 think we are taking the address of the constant. */
9255 if (ignore)
9256 return op0;
9257
9258 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9259 clever and returns a REG when given a MEM. */
9260 op0 = protect_from_queue (op0, 1);
9261
9262 /* We would like the object in memory. If it is a constant, we can
9263 have it be statically allocated into memory. For a non-constant,
9264 we need to allocate some memory and store the value into it. */
9265
9266 if (CONSTANT_P (op0))
9267 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9268 op0);
9269 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9270 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9271 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9272 {
9273 /* If the operand is a SAVE_EXPR, we can deal with this by
9274 forcing the SAVE_EXPR into memory. */
9275 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9276 {
9277 put_var_into_stack (TREE_OPERAND (exp, 0),
9278 /*rescan=*/true);
9279 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9280 }
9281 else
9282 {
9283 /* If this object is in a register, it can't be BLKmode. */
9284 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9285 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9286
9287 if (GET_CODE (op0) == PARALLEL)
9288 /* Handle calls that pass values in multiple
9289 non-contiguous locations. The Irix 6 ABI has examples
9290 of this. */
9291 emit_group_store (memloc, op0, inner_type,
9292 int_size_in_bytes (inner_type));
9293 else
9294 emit_move_insn (memloc, op0);
9295
9296 op0 = memloc;
9297 }
9298 }
9299
9300 if (GET_CODE (op0) != MEM)
9301 abort ();
9302
9303 mark_temp_addr_taken (op0);
9304 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9305 {
9306 op0 = XEXP (op0, 0);
9307 #ifdef POINTERS_EXTEND_UNSIGNED
9308 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9309 && mode == ptr_mode)
9310 op0 = convert_memory_address (ptr_mode, op0);
9311 #endif
9312 return op0;
9313 }
9314
9315 /* If OP0 is not aligned as least as much as the type requires, we
9316 need to make a temporary, copy OP0 to it, and take the address of
9317 the temporary. We want to use the alignment of the type, not of
9318 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9319 the test for BLKmode means that can't happen. The test for
9320 BLKmode is because we never make mis-aligned MEMs with
9321 non-BLKmode.
9322
9323 We don't need to do this at all if the machine doesn't have
9324 strict alignment. */
9325 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9326 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9327 > MEM_ALIGN (op0))
9328 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9329 {
9330 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9331 rtx new;
9332
9333 if (TYPE_ALIGN_OK (inner_type))
9334 abort ();
9335
9336 if (TREE_ADDRESSABLE (inner_type))
9337 {
9338 /* We can't make a bitwise copy of this object, so fail. */
9339 error ("cannot take the address of an unaligned member");
9340 return const0_rtx;
9341 }
9342
9343 new = assign_stack_temp_for_type
9344 (TYPE_MODE (inner_type),
9345 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9346 : int_size_in_bytes (inner_type),
9347 1, build_qualified_type (inner_type,
9348 (TYPE_QUALS (inner_type)
9349 | TYPE_QUAL_CONST)));
9350
9351 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9352 (modifier == EXPAND_STACK_PARM
9353 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9354
9355 op0 = new;
9356 }
9357
9358 op0 = force_operand (XEXP (op0, 0), target);
9359 }
9360
9361 if (flag_force_addr
9362 && GET_CODE (op0) != REG
9363 && modifier != EXPAND_CONST_ADDRESS
9364 && modifier != EXPAND_INITIALIZER
9365 && modifier != EXPAND_SUM)
9366 op0 = force_reg (Pmode, op0);
9367
9368 if (GET_CODE (op0) == REG
9369 && ! REG_USERVAR_P (op0))
9370 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9371
9372 #ifdef POINTERS_EXTEND_UNSIGNED
9373 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9374 && mode == ptr_mode)
9375 op0 = convert_memory_address (ptr_mode, op0);
9376 #endif
9377
9378 return op0;
9379
9380 case ENTRY_VALUE_EXPR:
9381 abort ();
9382
9383 /* COMPLEX type for Extended Pascal & Fortran */
9384 case COMPLEX_EXPR:
9385 {
9386 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9387 rtx insns;
9388
9389 /* Get the rtx code of the operands. */
9390 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9391 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9392
9393 if (! target)
9394 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9395
9396 start_sequence ();
9397
9398 /* Move the real (op0) and imaginary (op1) parts to their location. */
9399 emit_move_insn (gen_realpart (mode, target), op0);
9400 emit_move_insn (gen_imagpart (mode, target), op1);
9401
9402 insns = get_insns ();
9403 end_sequence ();
9404
9405 /* Complex construction should appear as a single unit. */
9406 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9407 each with a separate pseudo as destination.
9408 It's not correct for flow to treat them as a unit. */
9409 if (GET_CODE (target) != CONCAT)
9410 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9411 else
9412 emit_insn (insns);
9413
9414 return target;
9415 }
9416
9417 case REALPART_EXPR:
9418 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9419 return gen_realpart (mode, op0);
9420
9421 case IMAGPART_EXPR:
9422 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9423 return gen_imagpart (mode, op0);
9424
9425 case CONJ_EXPR:
9426 {
9427 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9428 rtx imag_t;
9429 rtx insns;
9430
9431 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9432
9433 if (! target)
9434 target = gen_reg_rtx (mode);
9435
9436 start_sequence ();
9437
9438 /* Store the realpart and the negated imagpart to target. */
9439 emit_move_insn (gen_realpart (partmode, target),
9440 gen_realpart (partmode, op0));
9441
9442 imag_t = gen_imagpart (partmode, target);
9443 temp = expand_unop (partmode,
9444 ! unsignedp && flag_trapv
9445 && (GET_MODE_CLASS(partmode) == MODE_INT)
9446 ? negv_optab : neg_optab,
9447 gen_imagpart (partmode, op0), imag_t, 0);
9448 if (temp != imag_t)
9449 emit_move_insn (imag_t, temp);
9450
9451 insns = get_insns ();
9452 end_sequence ();
9453
9454 /* Conjugate should appear as a single unit
9455 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9456 each with a separate pseudo as destination.
9457 It's not correct for flow to treat them as a unit. */
9458 if (GET_CODE (target) != CONCAT)
9459 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9460 else
9461 emit_insn (insns);
9462
9463 return target;
9464 }
9465
9466 case TRY_CATCH_EXPR:
9467 {
9468 tree handler = TREE_OPERAND (exp, 1);
9469
9470 expand_eh_region_start ();
9471
9472 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9473
9474 expand_eh_region_end_cleanup (handler);
9475
9476 return op0;
9477 }
9478
9479 case TRY_FINALLY_EXPR:
9480 {
9481 tree try_block = TREE_OPERAND (exp, 0);
9482 tree finally_block = TREE_OPERAND (exp, 1);
9483
9484 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9485 {
9486 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9487 is not sufficient, so we cannot expand the block twice.
9488 So we play games with GOTO_SUBROUTINE_EXPR to let us
9489 expand the thing only once. */
9490 /* When not optimizing, we go ahead with this form since
9491 (1) user breakpoints operate more predictably without
9492 code duplication, and
9493 (2) we're not running any of the global optimizers
9494 that would explode in time/space with the highly
9495 connected CFG created by the indirect branching. */
9496
9497 rtx finally_label = gen_label_rtx ();
9498 rtx done_label = gen_label_rtx ();
9499 rtx return_link = gen_reg_rtx (Pmode);
9500 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9501 (tree) finally_label, (tree) return_link);
9502 TREE_SIDE_EFFECTS (cleanup) = 1;
9503
9504 /* Start a new binding layer that will keep track of all cleanup
9505 actions to be performed. */
9506 expand_start_bindings (2);
9507 target_temp_slot_level = temp_slot_level;
9508
9509 expand_decl_cleanup (NULL_TREE, cleanup);
9510 op0 = expand_expr (try_block, target, tmode, modifier);
9511
9512 preserve_temp_slots (op0);
9513 expand_end_bindings (NULL_TREE, 0, 0);
9514 emit_jump (done_label);
9515 emit_label (finally_label);
9516 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9517 emit_indirect_jump (return_link);
9518 emit_label (done_label);
9519 }
9520 else
9521 {
9522 expand_start_bindings (2);
9523 target_temp_slot_level = temp_slot_level;
9524
9525 expand_decl_cleanup (NULL_TREE, finally_block);
9526 op0 = expand_expr (try_block, target, tmode, modifier);
9527
9528 preserve_temp_slots (op0);
9529 expand_end_bindings (NULL_TREE, 0, 0);
9530 }
9531
9532 return op0;
9533 }
9534
9535 case GOTO_SUBROUTINE_EXPR:
9536 {
9537 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9538 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9539 rtx return_address = gen_label_rtx ();
9540 emit_move_insn (return_link,
9541 gen_rtx_LABEL_REF (Pmode, return_address));
9542 emit_jump (subr);
9543 emit_label (return_address);
9544 return const0_rtx;
9545 }
9546
9547 case VA_ARG_EXPR:
9548 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9549
9550 case EXC_PTR_EXPR:
9551 return get_exception_pointer (cfun);
9552
9553 case FDESC_EXPR:
9554 /* Function descriptors are not valid except for as
9555 initialization constants, and should not be expanded. */
9556 abort ();
9557
9558 default:
9559 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9560 }
9561
9562 /* Here to do an ordinary binary operator, generating an instruction
9563 from the optab already placed in `this_optab'. */
9564 binop:
9565 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9566 subtarget = 0;
9567 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9568 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9569 binop2:
9570 if (modifier == EXPAND_STACK_PARM)
9571 target = 0;
9572 temp = expand_binop (mode, this_optab, op0, op1, target,
9573 unsignedp, OPTAB_LIB_WIDEN);
9574 if (temp == 0)
9575 abort ();
9576 return temp;
9577 }
9578 \f
9579 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9580 when applied to the address of EXP produces an address known to be
9581 aligned more than BIGGEST_ALIGNMENT. */
9582
9583 static int
9584 is_aligning_offset (tree offset, tree exp)
9585 {
9586 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9587 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9588 || TREE_CODE (offset) == NOP_EXPR
9589 || TREE_CODE (offset) == CONVERT_EXPR
9590 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9591 offset = TREE_OPERAND (offset, 0);
9592
9593 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9594 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9595 if (TREE_CODE (offset) != BIT_AND_EXPR
9596 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9597 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9598 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9599 return 0;
9600
9601 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9602 It must be NEGATE_EXPR. Then strip any more conversions. */
9603 offset = TREE_OPERAND (offset, 0);
9604 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9605 || TREE_CODE (offset) == NOP_EXPR
9606 || TREE_CODE (offset) == CONVERT_EXPR)
9607 offset = TREE_OPERAND (offset, 0);
9608
9609 if (TREE_CODE (offset) != NEGATE_EXPR)
9610 return 0;
9611
9612 offset = TREE_OPERAND (offset, 0);
9613 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9614 || TREE_CODE (offset) == NOP_EXPR
9615 || TREE_CODE (offset) == CONVERT_EXPR)
9616 offset = TREE_OPERAND (offset, 0);
9617
9618 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9619 whose type is the same as EXP. */
9620 return (TREE_CODE (offset) == ADDR_EXPR
9621 && (TREE_OPERAND (offset, 0) == exp
9622 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9623 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9624 == TREE_TYPE (exp)))));
9625 }
9626 \f
9627 /* Return the tree node if an ARG corresponds to a string constant or zero
9628 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9629 in bytes within the string that ARG is accessing. The type of the
9630 offset will be `sizetype'. */
9631
9632 tree
9633 string_constant (tree arg, tree *ptr_offset)
9634 {
9635 STRIP_NOPS (arg);
9636
9637 if (TREE_CODE (arg) == ADDR_EXPR
9638 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9639 {
9640 *ptr_offset = size_zero_node;
9641 return TREE_OPERAND (arg, 0);
9642 }
9643 else if (TREE_CODE (arg) == PLUS_EXPR)
9644 {
9645 tree arg0 = TREE_OPERAND (arg, 0);
9646 tree arg1 = TREE_OPERAND (arg, 1);
9647
9648 STRIP_NOPS (arg0);
9649 STRIP_NOPS (arg1);
9650
9651 if (TREE_CODE (arg0) == ADDR_EXPR
9652 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9653 {
9654 *ptr_offset = convert (sizetype, arg1);
9655 return TREE_OPERAND (arg0, 0);
9656 }
9657 else if (TREE_CODE (arg1) == ADDR_EXPR
9658 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9659 {
9660 *ptr_offset = convert (sizetype, arg0);
9661 return TREE_OPERAND (arg1, 0);
9662 }
9663 }
9664
9665 return 0;
9666 }
9667 \f
9668 /* Expand code for a post- or pre- increment or decrement
9669 and return the RTX for the result.
9670 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9671
9672 static rtx
9673 expand_increment (tree exp, int post, int ignore)
9674 {
9675 rtx op0, op1;
9676 rtx temp, value;
9677 tree incremented = TREE_OPERAND (exp, 0);
9678 optab this_optab = add_optab;
9679 int icode;
9680 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9681 int op0_is_copy = 0;
9682 int single_insn = 0;
9683 /* 1 means we can't store into OP0 directly,
9684 because it is a subreg narrower than a word,
9685 and we don't dare clobber the rest of the word. */
9686 int bad_subreg = 0;
9687
9688 /* Stabilize any component ref that might need to be
9689 evaluated more than once below. */
9690 if (!post
9691 || TREE_CODE (incremented) == BIT_FIELD_REF
9692 || (TREE_CODE (incremented) == COMPONENT_REF
9693 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9694 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9695 incremented = stabilize_reference (incremented);
9696 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9697 ones into save exprs so that they don't accidentally get evaluated
9698 more than once by the code below. */
9699 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9700 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9701 incremented = save_expr (incremented);
9702
9703 /* Compute the operands as RTX.
9704 Note whether OP0 is the actual lvalue or a copy of it:
9705 I believe it is a copy iff it is a register or subreg
9706 and insns were generated in computing it. */
9707
9708 temp = get_last_insn ();
9709 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9710
9711 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9712 in place but instead must do sign- or zero-extension during assignment,
9713 so we copy it into a new register and let the code below use it as
9714 a copy.
9715
9716 Note that we can safely modify this SUBREG since it is know not to be
9717 shared (it was made by the expand_expr call above). */
9718
9719 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9720 {
9721 if (post)
9722 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9723 else
9724 bad_subreg = 1;
9725 }
9726 else if (GET_CODE (op0) == SUBREG
9727 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9728 {
9729 /* We cannot increment this SUBREG in place. If we are
9730 post-incrementing, get a copy of the old value. Otherwise,
9731 just mark that we cannot increment in place. */
9732 if (post)
9733 op0 = copy_to_reg (op0);
9734 else
9735 bad_subreg = 1;
9736 }
9737
9738 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9739 && temp != get_last_insn ());
9740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9741
9742 /* Decide whether incrementing or decrementing. */
9743 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9744 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9745 this_optab = sub_optab;
9746
9747 /* Convert decrement by a constant into a negative increment. */
9748 if (this_optab == sub_optab
9749 && GET_CODE (op1) == CONST_INT)
9750 {
9751 op1 = GEN_INT (-INTVAL (op1));
9752 this_optab = add_optab;
9753 }
9754
9755 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9756 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9757
9758 /* For a preincrement, see if we can do this with a single instruction. */
9759 if (!post)
9760 {
9761 icode = (int) this_optab->handlers[(int) mode].insn_code;
9762 if (icode != (int) CODE_FOR_nothing
9763 /* Make sure that OP0 is valid for operands 0 and 1
9764 of the insn we want to queue. */
9765 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9766 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9767 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9768 single_insn = 1;
9769 }
9770
9771 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9772 then we cannot just increment OP0. We must therefore contrive to
9773 increment the original value. Then, for postincrement, we can return
9774 OP0 since it is a copy of the old value. For preincrement, expand here
9775 unless we can do it with a single insn.
9776
9777 Likewise if storing directly into OP0 would clobber high bits
9778 we need to preserve (bad_subreg). */
9779 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9780 {
9781 /* This is the easiest way to increment the value wherever it is.
9782 Problems with multiple evaluation of INCREMENTED are prevented
9783 because either (1) it is a component_ref or preincrement,
9784 in which case it was stabilized above, or (2) it is an array_ref
9785 with constant index in an array in a register, which is
9786 safe to reevaluate. */
9787 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9788 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9789 ? MINUS_EXPR : PLUS_EXPR),
9790 TREE_TYPE (exp),
9791 incremented,
9792 TREE_OPERAND (exp, 1));
9793
9794 while (TREE_CODE (incremented) == NOP_EXPR
9795 || TREE_CODE (incremented) == CONVERT_EXPR)
9796 {
9797 newexp = convert (TREE_TYPE (incremented), newexp);
9798 incremented = TREE_OPERAND (incremented, 0);
9799 }
9800
9801 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9802 return post ? op0 : temp;
9803 }
9804
9805 if (post)
9806 {
9807 /* We have a true reference to the value in OP0.
9808 If there is an insn to add or subtract in this mode, queue it.
9809 Queueing the increment insn avoids the register shuffling
9810 that often results if we must increment now and first save
9811 the old value for subsequent use. */
9812
9813 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9814 op0 = stabilize (op0);
9815 #endif
9816
9817 icode = (int) this_optab->handlers[(int) mode].insn_code;
9818 if (icode != (int) CODE_FOR_nothing
9819 /* Make sure that OP0 is valid for operands 0 and 1
9820 of the insn we want to queue. */
9821 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9822 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9823 {
9824 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9825 op1 = force_reg (mode, op1);
9826
9827 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9828 }
9829 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9830 {
9831 rtx addr = (general_operand (XEXP (op0, 0), mode)
9832 ? force_reg (Pmode, XEXP (op0, 0))
9833 : copy_to_reg (XEXP (op0, 0)));
9834 rtx temp, result;
9835
9836 op0 = replace_equiv_address (op0, addr);
9837 temp = force_reg (GET_MODE (op0), op0);
9838 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9839 op1 = force_reg (mode, op1);
9840
9841 /* The increment queue is LIFO, thus we have to `queue'
9842 the instructions in reverse order. */
9843 enqueue_insn (op0, gen_move_insn (op0, temp));
9844 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9845 return result;
9846 }
9847 }
9848
9849 /* Preincrement, or we can't increment with one simple insn. */
9850 if (post)
9851 /* Save a copy of the value before inc or dec, to return it later. */
9852 temp = value = copy_to_reg (op0);
9853 else
9854 /* Arrange to return the incremented value. */
9855 /* Copy the rtx because expand_binop will protect from the queue,
9856 and the results of that would be invalid for us to return
9857 if our caller does emit_queue before using our result. */
9858 temp = copy_rtx (value = op0);
9859
9860 /* Increment however we can. */
9861 op1 = expand_binop (mode, this_optab, value, op1, op0,
9862 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9863
9864 /* Make sure the value is stored into OP0. */
9865 if (op1 != op0)
9866 emit_move_insn (op0, op1);
9867
9868 return temp;
9869 }
9870 \f
9871 /* Generate code to calculate EXP using a store-flag instruction
9872 and return an rtx for the result. EXP is either a comparison
9873 or a TRUTH_NOT_EXPR whose operand is a comparison.
9874
9875 If TARGET is nonzero, store the result there if convenient.
9876
9877 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9878 cheap.
9879
9880 Return zero if there is no suitable set-flag instruction
9881 available on this machine.
9882
9883 Once expand_expr has been called on the arguments of the comparison,
9884 we are committed to doing the store flag, since it is not safe to
9885 re-evaluate the expression. We emit the store-flag insn by calling
9886 emit_store_flag, but only expand the arguments if we have a reason
9887 to believe that emit_store_flag will be successful. If we think that
9888 it will, but it isn't, we have to simulate the store-flag with a
9889 set/jump/set sequence. */
9890
9891 static rtx
9892 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9893 {
9894 enum rtx_code code;
9895 tree arg0, arg1, type;
9896 tree tem;
9897 enum machine_mode operand_mode;
9898 int invert = 0;
9899 int unsignedp;
9900 rtx op0, op1;
9901 enum insn_code icode;
9902 rtx subtarget = target;
9903 rtx result, label;
9904
9905 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9906 result at the end. We can't simply invert the test since it would
9907 have already been inverted if it were valid. This case occurs for
9908 some floating-point comparisons. */
9909
9910 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9911 invert = 1, exp = TREE_OPERAND (exp, 0);
9912
9913 arg0 = TREE_OPERAND (exp, 0);
9914 arg1 = TREE_OPERAND (exp, 1);
9915
9916 /* Don't crash if the comparison was erroneous. */
9917 if (arg0 == error_mark_node || arg1 == error_mark_node)
9918 return const0_rtx;
9919
9920 type = TREE_TYPE (arg0);
9921 operand_mode = TYPE_MODE (type);
9922 unsignedp = TREE_UNSIGNED (type);
9923
9924 /* We won't bother with BLKmode store-flag operations because it would mean
9925 passing a lot of information to emit_store_flag. */
9926 if (operand_mode == BLKmode)
9927 return 0;
9928
9929 /* We won't bother with store-flag operations involving function pointers
9930 when function pointers must be canonicalized before comparisons. */
9931 #ifdef HAVE_canonicalize_funcptr_for_compare
9932 if (HAVE_canonicalize_funcptr_for_compare
9933 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9934 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9935 == FUNCTION_TYPE))
9936 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9937 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9938 == FUNCTION_TYPE))))
9939 return 0;
9940 #endif
9941
9942 STRIP_NOPS (arg0);
9943 STRIP_NOPS (arg1);
9944
9945 /* Get the rtx comparison code to use. We know that EXP is a comparison
9946 operation of some type. Some comparisons against 1 and -1 can be
9947 converted to comparisons with zero. Do so here so that the tests
9948 below will be aware that we have a comparison with zero. These
9949 tests will not catch constants in the first operand, but constants
9950 are rarely passed as the first operand. */
9951
9952 switch (TREE_CODE (exp))
9953 {
9954 case EQ_EXPR:
9955 code = EQ;
9956 break;
9957 case NE_EXPR:
9958 code = NE;
9959 break;
9960 case LT_EXPR:
9961 if (integer_onep (arg1))
9962 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9963 else
9964 code = unsignedp ? LTU : LT;
9965 break;
9966 case LE_EXPR:
9967 if (! unsignedp && integer_all_onesp (arg1))
9968 arg1 = integer_zero_node, code = LT;
9969 else
9970 code = unsignedp ? LEU : LE;
9971 break;
9972 case GT_EXPR:
9973 if (! unsignedp && integer_all_onesp (arg1))
9974 arg1 = integer_zero_node, code = GE;
9975 else
9976 code = unsignedp ? GTU : GT;
9977 break;
9978 case GE_EXPR:
9979 if (integer_onep (arg1))
9980 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9981 else
9982 code = unsignedp ? GEU : GE;
9983 break;
9984
9985 case UNORDERED_EXPR:
9986 code = UNORDERED;
9987 break;
9988 case ORDERED_EXPR:
9989 code = ORDERED;
9990 break;
9991 case UNLT_EXPR:
9992 code = UNLT;
9993 break;
9994 case UNLE_EXPR:
9995 code = UNLE;
9996 break;
9997 case UNGT_EXPR:
9998 code = UNGT;
9999 break;
10000 case UNGE_EXPR:
10001 code = UNGE;
10002 break;
10003 case UNEQ_EXPR:
10004 code = UNEQ;
10005 break;
10006
10007 default:
10008 abort ();
10009 }
10010
10011 /* Put a constant second. */
10012 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10013 {
10014 tem = arg0; arg0 = arg1; arg1 = tem;
10015 code = swap_condition (code);
10016 }
10017
10018 /* If this is an equality or inequality test of a single bit, we can
10019 do this by shifting the bit being tested to the low-order bit and
10020 masking the result with the constant 1. If the condition was EQ,
10021 we xor it with 1. This does not require an scc insn and is faster
10022 than an scc insn even if we have it.
10023
10024 The code to make this transformation was moved into fold_single_bit_test,
10025 so we just call into the folder and expand its result. */
10026
10027 if ((code == NE || code == EQ)
10028 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10029 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10030 {
10031 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
10032 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
10033 arg0, arg1, type),
10034 target, VOIDmode, EXPAND_NORMAL);
10035 }
10036
10037 /* Now see if we are likely to be able to do this. Return if not. */
10038 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10039 return 0;
10040
10041 icode = setcc_gen_code[(int) code];
10042 if (icode == CODE_FOR_nothing
10043 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10044 {
10045 /* We can only do this if it is one of the special cases that
10046 can be handled without an scc insn. */
10047 if ((code == LT && integer_zerop (arg1))
10048 || (! only_cheap && code == GE && integer_zerop (arg1)))
10049 ;
10050 else if (BRANCH_COST >= 0
10051 && ! only_cheap && (code == NE || code == EQ)
10052 && TREE_CODE (type) != REAL_TYPE
10053 && ((abs_optab->handlers[(int) operand_mode].insn_code
10054 != CODE_FOR_nothing)
10055 || (ffs_optab->handlers[(int) operand_mode].insn_code
10056 != CODE_FOR_nothing)))
10057 ;
10058 else
10059 return 0;
10060 }
10061
10062 if (! get_subtarget (target)
10063 || GET_MODE (subtarget) != operand_mode
10064 || ! safe_from_p (subtarget, arg1, 1))
10065 subtarget = 0;
10066
10067 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10068 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10069
10070 if (target == 0)
10071 target = gen_reg_rtx (mode);
10072
10073 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10074 because, if the emit_store_flag does anything it will succeed and
10075 OP0 and OP1 will not be used subsequently. */
10076
10077 result = emit_store_flag (target, code,
10078 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10079 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10080 operand_mode, unsignedp, 1);
10081
10082 if (result)
10083 {
10084 if (invert)
10085 result = expand_binop (mode, xor_optab, result, const1_rtx,
10086 result, 0, OPTAB_LIB_WIDEN);
10087 return result;
10088 }
10089
10090 /* If this failed, we have to do this with set/compare/jump/set code. */
10091 if (GET_CODE (target) != REG
10092 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10093 target = gen_reg_rtx (GET_MODE (target));
10094
10095 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10096 result = compare_from_rtx (op0, op1, code, unsignedp,
10097 operand_mode, NULL_RTX);
10098 if (GET_CODE (result) == CONST_INT)
10099 return (((result == const0_rtx && ! invert)
10100 || (result != const0_rtx && invert))
10101 ? const0_rtx : const1_rtx);
10102
10103 /* The code of RESULT may not match CODE if compare_from_rtx
10104 decided to swap its operands and reverse the original code.
10105
10106 We know that compare_from_rtx returns either a CONST_INT or
10107 a new comparison code, so it is safe to just extract the
10108 code from RESULT. */
10109 code = GET_CODE (result);
10110
10111 label = gen_label_rtx ();
10112 if (bcc_gen_fctn[(int) code] == 0)
10113 abort ();
10114
10115 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10116 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10117 emit_label (label);
10118
10119 return target;
10120 }
10121 \f
10122
10123 /* Stubs in case we haven't got a casesi insn. */
10124 #ifndef HAVE_casesi
10125 # define HAVE_casesi 0
10126 # define gen_casesi(a, b, c, d, e) (0)
10127 # define CODE_FOR_casesi CODE_FOR_nothing
10128 #endif
10129
10130 /* If the machine does not have a case insn that compares the bounds,
10131 this means extra overhead for dispatch tables, which raises the
10132 threshold for using them. */
10133 #ifndef CASE_VALUES_THRESHOLD
10134 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10135 #endif /* CASE_VALUES_THRESHOLD */
10136
10137 unsigned int
10138 case_values_threshold (void)
10139 {
10140 return CASE_VALUES_THRESHOLD;
10141 }
10142
10143 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10144 0 otherwise (i.e. if there is no casesi instruction). */
10145 int
10146 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10147 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10148 {
10149 enum machine_mode index_mode = SImode;
10150 int index_bits = GET_MODE_BITSIZE (index_mode);
10151 rtx op1, op2, index;
10152 enum machine_mode op_mode;
10153
10154 if (! HAVE_casesi)
10155 return 0;
10156
10157 /* Convert the index to SImode. */
10158 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10159 {
10160 enum machine_mode omode = TYPE_MODE (index_type);
10161 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10162
10163 /* We must handle the endpoints in the original mode. */
10164 index_expr = build (MINUS_EXPR, index_type,
10165 index_expr, minval);
10166 minval = integer_zero_node;
10167 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10168 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10169 omode, 1, default_label);
10170 /* Now we can safely truncate. */
10171 index = convert_to_mode (index_mode, index, 0);
10172 }
10173 else
10174 {
10175 if (TYPE_MODE (index_type) != index_mode)
10176 {
10177 index_expr = convert ((*lang_hooks.types.type_for_size)
10178 (index_bits, 0), index_expr);
10179 index_type = TREE_TYPE (index_expr);
10180 }
10181
10182 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10183 }
10184 emit_queue ();
10185 index = protect_from_queue (index, 0);
10186 do_pending_stack_adjust ();
10187
10188 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10189 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10190 (index, op_mode))
10191 index = copy_to_mode_reg (op_mode, index);
10192
10193 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10194
10195 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10196 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10197 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10198 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10199 (op1, op_mode))
10200 op1 = copy_to_mode_reg (op_mode, op1);
10201
10202 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10203
10204 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10205 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10206 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10207 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10208 (op2, op_mode))
10209 op2 = copy_to_mode_reg (op_mode, op2);
10210
10211 emit_jump_insn (gen_casesi (index, op1, op2,
10212 table_label, default_label));
10213 return 1;
10214 }
10215
10216 /* Attempt to generate a tablejump instruction; same concept. */
10217 #ifndef HAVE_tablejump
10218 #define HAVE_tablejump 0
10219 #define gen_tablejump(x, y) (0)
10220 #endif
10221
10222 /* Subroutine of the next function.
10223
10224 INDEX is the value being switched on, with the lowest value
10225 in the table already subtracted.
10226 MODE is its expected mode (needed if INDEX is constant).
10227 RANGE is the length of the jump table.
10228 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10229
10230 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10231 index value is out of range. */
10232
10233 static void
10234 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10235 rtx default_label)
10236 {
10237 rtx temp, vector;
10238
10239 if (INTVAL (range) > cfun->max_jumptable_ents)
10240 cfun->max_jumptable_ents = INTVAL (range);
10241
10242 /* Do an unsigned comparison (in the proper mode) between the index
10243 expression and the value which represents the length of the range.
10244 Since we just finished subtracting the lower bound of the range
10245 from the index expression, this comparison allows us to simultaneously
10246 check that the original index expression value is both greater than
10247 or equal to the minimum value of the range and less than or equal to
10248 the maximum value of the range. */
10249
10250 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10251 default_label);
10252
10253 /* If index is in range, it must fit in Pmode.
10254 Convert to Pmode so we can index with it. */
10255 if (mode != Pmode)
10256 index = convert_to_mode (Pmode, index, 1);
10257
10258 /* Don't let a MEM slip thru, because then INDEX that comes
10259 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10260 and break_out_memory_refs will go to work on it and mess it up. */
10261 #ifdef PIC_CASE_VECTOR_ADDRESS
10262 if (flag_pic && GET_CODE (index) != REG)
10263 index = copy_to_mode_reg (Pmode, index);
10264 #endif
10265
10266 /* If flag_force_addr were to affect this address
10267 it could interfere with the tricky assumptions made
10268 about addresses that contain label-refs,
10269 which may be valid only very near the tablejump itself. */
10270 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10271 GET_MODE_SIZE, because this indicates how large insns are. The other
10272 uses should all be Pmode, because they are addresses. This code
10273 could fail if addresses and insns are not the same size. */
10274 index = gen_rtx_PLUS (Pmode,
10275 gen_rtx_MULT (Pmode, index,
10276 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10277 gen_rtx_LABEL_REF (Pmode, table_label));
10278 #ifdef PIC_CASE_VECTOR_ADDRESS
10279 if (flag_pic)
10280 index = PIC_CASE_VECTOR_ADDRESS (index);
10281 else
10282 #endif
10283 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10284 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10285 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10286 RTX_UNCHANGING_P (vector) = 1;
10287 MEM_NOTRAP_P (vector) = 1;
10288 convert_move (temp, vector, 0);
10289
10290 emit_jump_insn (gen_tablejump (temp, table_label));
10291
10292 /* If we are generating PIC code or if the table is PC-relative, the
10293 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10294 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10295 emit_barrier ();
10296 }
10297
10298 int
10299 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10300 rtx table_label, rtx default_label)
10301 {
10302 rtx index;
10303
10304 if (! HAVE_tablejump)
10305 return 0;
10306
10307 index_expr = fold (build (MINUS_EXPR, index_type,
10308 convert (index_type, index_expr),
10309 convert (index_type, minval)));
10310 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10311 emit_queue ();
10312 index = protect_from_queue (index, 0);
10313 do_pending_stack_adjust ();
10314
10315 do_tablejump (index, TYPE_MODE (index_type),
10316 convert_modes (TYPE_MODE (index_type),
10317 TYPE_MODE (TREE_TYPE (range)),
10318 expand_expr (range, NULL_RTX,
10319 VOIDmode, 0),
10320 TREE_UNSIGNED (TREE_TYPE (range))),
10321 table_label, default_label);
10322 return 1;
10323 }
10324
10325 /* Nonzero if the mode is a valid vector mode for this architecture.
10326 This returns nonzero even if there is no hardware support for the
10327 vector mode, but we can emulate with narrower modes. */
10328
10329 int
10330 vector_mode_valid_p (enum machine_mode mode)
10331 {
10332 enum mode_class class = GET_MODE_CLASS (mode);
10333 enum machine_mode innermode;
10334
10335 /* Doh! What's going on? */
10336 if (class != MODE_VECTOR_INT
10337 && class != MODE_VECTOR_FLOAT)
10338 return 0;
10339
10340 /* Hardware support. Woo hoo! */
10341 if (VECTOR_MODE_SUPPORTED_P (mode))
10342 return 1;
10343
10344 innermode = GET_MODE_INNER (mode);
10345
10346 /* We should probably return 1 if requesting V4DI and we have no DI,
10347 but we have V2DI, but this is probably very unlikely. */
10348
10349 /* If we have support for the inner mode, we can safely emulate it.
10350 We may not have V2DI, but me can emulate with a pair of DIs. */
10351 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10352 }
10353
10354 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10355 static rtx
10356 const_vector_from_tree (tree exp)
10357 {
10358 rtvec v;
10359 int units, i;
10360 tree link, elt;
10361 enum machine_mode inner, mode;
10362
10363 mode = TYPE_MODE (TREE_TYPE (exp));
10364
10365 if (is_zeros_p (exp))
10366 return CONST0_RTX (mode);
10367
10368 units = GET_MODE_NUNITS (mode);
10369 inner = GET_MODE_INNER (mode);
10370
10371 v = rtvec_alloc (units);
10372
10373 link = TREE_VECTOR_CST_ELTS (exp);
10374 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10375 {
10376 elt = TREE_VALUE (link);
10377
10378 if (TREE_CODE (elt) == REAL_CST)
10379 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10380 inner);
10381 else
10382 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10383 TREE_INT_CST_HIGH (elt),
10384 inner);
10385 }
10386
10387 /* Initialize remaining elements to 0. */
10388 for (; i < units; ++i)
10389 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10390
10391 return gen_rtx_raw_CONST_VECTOR (mode, v);
10392 }
10393
10394 #include "gt-expr.h"
This page took 0.526614 seconds and 6 git commands to generate.