]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
winnt.c (i386_pe_mark_dllimport): Make the new RTL have the same form as the old...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
64
65 #endif
66
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
74
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
79
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
87
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
131 };
132
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
173
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
182
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
186
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
189
190 /* Record for each mode whether we can float-extend from memory. */
191
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
193
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
196
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
205
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
212
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
215
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
224
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
231
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
238
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
241
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
244
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
246
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
250 \f
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
253
254 void
255 init_expr_once ()
256 {
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
262
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
268
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
272
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
276
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
279 {
280 int regno;
281
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
286
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
289
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
294 {
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
297
298 REGNO (reg) = regno;
299
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
304
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
309
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
314
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
319 }
320 }
321
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
323
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
326 {
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 {
331 enum insn_code ic;
332
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
336
337 PUT_MODE (mem, srcmode);
338
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
341 }
342 }
343 }
344
345 /* This is run at the start of compiling a function. */
346
347 void
348 init_expr ()
349 {
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
351
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
359 }
360
361 /* Small sanity check that the queue is empty at the end of a function. */
362
363 void
364 finish_expr_for_function ()
365 {
366 if (pending_chain)
367 abort ();
368 }
369 \f
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
372
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
376
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
379
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
383 {
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
387 }
388
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
395
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
399
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
403
404 rtx
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
408 {
409 RTX_CODE code = GET_CODE (x);
410
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
416
417 if (code != QUEUED)
418 {
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 {
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
429
430 if (QUEUED_INSN (y))
431 {
432 rtx temp = gen_reg_rtx (GET_MODE (x));
433
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
437 }
438
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
442 }
443
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
447 {
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
450 {
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
453 }
454 }
455 else if (code == PLUS || code == MULT)
456 {
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
460 {
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
464 }
465 }
466 return x;
467 }
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
483 }
484
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
489
490 int
491 queued_subexp_p (x)
492 rtx x;
493 {
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
496 {
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
508 }
509 }
510
511 /* Perform all the pending incrementations. */
512
513 void
514 emit_queue ()
515 {
516 rtx p;
517 while ((p = pending_chain))
518 {
519 rtx body = QUEUED_BODY (p);
520
521 switch (GET_CODE (body))
522 {
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
532
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
538
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
542 }
543
544 pending_chain = QUEUED_NEXT (p);
545 }
546 }
547 \f
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
552
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
557 {
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
564
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
568
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
571
572 if (to_real != from_real)
573 abort ();
574
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
578
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
584
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
587
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
590 {
591 emit_move_insn (to, from);
592 return;
593 }
594
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
596 {
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
599
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
604
605 emit_move_insn (to, from);
606 return;
607 }
608
609 if (to_real != from_real)
610 abort ();
611
612 if (to_real)
613 {
614 rtx value, insns;
615
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 {
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
621 {
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
624 }
625 }
626
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
650 {
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
653 }
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
660 }
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
689 }
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
696 }
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
700 {
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
722 {
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
725 }
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
729 {
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769 #endif
770
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
788
789 default:
790 break;
791 }
792 break;
793
794 case DFmode:
795 switch (to_mode)
796 {
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
800
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
804
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
808
809 default:
810 break;
811 }
812 break;
813
814 case XFmode:
815 switch (to_mode)
816 {
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
820
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
824
825 default:
826 break;
827 }
828 break;
829
830 case TFmode:
831 switch (to_mode)
832 {
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
836
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
840
841 default:
842 break;
843 }
844 break;
845
846 default:
847 break;
848 }
849
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
853
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
862 }
863
864 /* Now both modes are integers. */
865
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
869 {
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
877
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
881 {
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
890 }
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
895 {
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
902 }
903
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
906
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
909
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
912
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
918
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
920
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
923
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
928 {
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
933 {
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
938 }
939 else
940 #endif
941 {
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
947 }
948 }
949
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
952 {
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
955
956 if (subword == 0)
957 abort ();
958
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
961 }
962
963 insns = get_insns ();
964 end_sequence ();
965
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
969 }
970
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
974 {
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
984 }
985
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
988 {
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
991
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
994 {
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1000 }
1001
1002 if (from_mode == PQImode)
1003 {
1004 if (to_mode != QImode)
1005 {
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1008 }
1009 else
1010 {
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1013 {
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1016 }
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1019 }
1020 }
1021
1022 if (to_mode == PSImode)
1023 {
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1026
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1029 {
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1032 }
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1035 }
1036
1037 if (from_mode == PSImode)
1038 {
1039 if (to_mode != SImode)
1040 {
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1043 }
1044 else
1045 {
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1048 {
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1051 }
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1055 {
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1058 }
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1061 }
1062 }
1063
1064 if (to_mode == PDImode)
1065 {
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1068
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1071 {
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1074 }
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1077 }
1078
1079 if (from_mode == PDImode)
1080 {
1081 if (to_mode != DImode)
1082 {
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1085 }
1086 else
1087 {
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1090 {
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1093 }
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1096 }
1097 }
1098
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1101
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1106 {
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1119 }
1120
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1123 {
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1127 {
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1130
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1133 }
1134 else
1135 {
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1139
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1150 {
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1154 }
1155
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1168 }
1169 }
1170
1171 /* Support special truncate insns for certain modes. */
1172
1173 if (from_mode == DImode && to_mode == SImode)
1174 {
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1177 {
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1180 }
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1184 }
1185
1186 if (from_mode == DImode && to_mode == HImode)
1187 {
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1190 {
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1193 }
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1197 }
1198
1199 if (from_mode == DImode && to_mode == QImode)
1200 {
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1203 {
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1206 }
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == SImode && to_mode == HImode)
1213 {
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1216 {
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1219 }
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
1225 if (from_mode == SImode && to_mode == QImode)
1226 {
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1229 {
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1232 }
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1236 }
1237
1238 if (from_mode == HImode && to_mode == QImode)
1239 {
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1242 {
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1245 }
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1249 }
1250
1251 if (from_mode == TImode && to_mode == DImode)
1252 {
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1255 {
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1258 }
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1262 }
1263
1264 if (from_mode == TImode && to_mode == SImode)
1265 {
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1268 {
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1271 }
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1275 }
1276
1277 if (from_mode == TImode && to_mode == HImode)
1278 {
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1281 {
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1284 }
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1288 }
1289
1290 if (from_mode == TImode && to_mode == QImode)
1291 {
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1294 {
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1297 }
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1301 }
1302
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1307 {
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1311 }
1312
1313 /* Mode combination is not recognized. */
1314 abort ();
1315 }
1316
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1323
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1326
1327 rtx
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1332 {
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1334 }
1335
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1340
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1343
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1345
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1348
1349 rtx
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1354 {
1355 rtx temp;
1356
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1359
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1364
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1367
1368 if (mode == oldmode)
1369 return x;
1370
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1376
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1380 {
1381 HOST_WIDE_INT val = INTVAL (x);
1382
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1385 {
1386 int width = GET_MODE_BITSIZE (oldmode);
1387
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1390 }
1391
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1393 }
1394
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1399
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1413 {
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1419 {
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1422
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1429
1430 return gen_int_mode (val, mode);
1431 }
1432
1433 return gen_lowpart (mode, x);
1434 }
1435
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1439 }
1440 \f
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1443
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1447
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1450 #endif
1451
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1456
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1458
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1462
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1465
1466 ALIGN is maximum alignment we can assume. */
1467
1468 void
1469 move_by_pieces (to, from, len, align)
1470 rtx to, from;
1471 unsigned HOST_WIDE_INT len;
1472 unsigned int align;
1473 {
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1479
1480 data.offset = 0;
1481 data.from_addr = from_addr;
1482 if (to)
1483 {
1484 to_addr = XEXP (to, 0);
1485 data.to = to;
1486 data.autinc_to
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1489 data.reverse
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1491 }
1492 else
1493 {
1494 to_addr = NULL_RTX;
1495 data.to = NULL_RTX;
1496 data.autinc_to = 1;
1497 #ifdef STACK_GROWS_DOWNWARD
1498 data.reverse = 1;
1499 #else
1500 data.reverse = 0;
1501 #endif
1502 }
1503 data.to_addr = to_addr;
1504 data.from = from;
1505 data.autinc_from
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1509
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1513 data.len = len;
1514
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1520 {
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1525 mode = tmode;
1526
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1528 {
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1532 }
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1534 {
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1538 }
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1542 {
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1544 data.autinc_to = 1;
1545 data.explicit_inc_to = -1;
1546 }
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1548 {
1549 data.to_addr = copy_addr_to_reg (to_addr);
1550 data.autinc_to = 1;
1551 data.explicit_inc_to = 1;
1552 }
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1555 }
1556
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1560
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1563
1564 while (max_size > 1)
1565 {
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1569 mode = tmode;
1570
1571 if (mode == VOIDmode)
1572 break;
1573
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1577
1578 max_size = GET_MODE_SIZE (mode);
1579 }
1580
1581 /* The code above should have handled everything. */
1582 if (data.len > 0)
1583 abort ();
1584 }
1585
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1588
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1592 unsigned int align;
1593 {
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1596
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1600
1601 while (max_size > 1)
1602 {
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1605
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1609 mode = tmode;
1610
1611 if (mode == VOIDmode)
1612 break;
1613
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1617
1618 max_size = GET_MODE_SIZE (mode);
1619 }
1620
1621 if (l)
1622 abort ();
1623 return n_insns;
1624 }
1625
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1629
1630 static void
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1635 {
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1638
1639 while (data->len >= size)
1640 {
1641 if (data->reverse)
1642 data->offset -= size;
1643
1644 if (data->to)
1645 {
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1648 data->offset);
1649 else
1650 to1 = adjust_address (data->to, mode, data->offset);
1651 }
1652
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1655 data->offset);
1656 else
1657 from1 = adjust_address (data->from, mode, data->offset);
1658
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1665
1666 if (data->to)
1667 emit_insn ((*genfun) (to1, from1));
1668 else
1669 {
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1672 #else
1673 abort ();
1674 #endif
1675 }
1676
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1681
1682 if (! data->reverse)
1683 data->offset += size;
1684
1685 data->len -= size;
1686 }
1687 }
1688 \f
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1692
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1697
1698 Return the address of the new block, if memcpy is called and returns it,
1699 0 otherwise. */
1700
1701 rtx
1702 emit_block_move (x, y, size, method)
1703 rtx x, y, size;
1704 enum block_op_methods method;
1705 {
1706 bool may_use_call;
1707 rtx retval = 0;
1708 unsigned int align;
1709
1710 switch (method)
1711 {
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1714 break;
1715
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1718
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1721 NO_DEFER_POP;
1722 break;
1723
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1726 break;
1727
1728 default:
1729 abort ();
1730 }
1731
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1733
1734 if (GET_MODE (x) != BLKmode)
1735 abort ();
1736 if (GET_MODE (y) != BLKmode)
1737 abort ();
1738
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1742
1743 if (GET_CODE (x) != MEM)
1744 abort ();
1745 if (GET_CODE (y) != MEM)
1746 abort ();
1747 if (size == 0)
1748 abort ();
1749
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1753 {
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1758 }
1759
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1763 ;
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1766 else
1767 emit_block_move_via_loop (x, y, size, align);
1768
1769 if (method == BLOCK_OP_CALL_PARM)
1770 OK_DEFER_POP;
1771
1772 return retval;
1773 }
1774
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1778
1779 static bool
1780 block_move_libcall_safe_for_call_parm ()
1781 {
1782 if (PUSH_ARGS)
1783 return true;
1784 else
1785 {
1786 /* Check to see whether memcpy takes all register arguments. */
1787 static enum {
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1790
1791 switch (takes_regs)
1792 {
1793 case takes_regs_uninit:
1794 {
1795 CUMULATIVE_ARGS args_so_far;
1796 tree fn, arg;
1797
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1800
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1803 {
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1810 NULL_TREE, 1))
1811 goto fail_takes_regs;
1812 #endif
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1814 }
1815 }
1816 takes_regs = takes_regs_yes;
1817 /* FALLTHRU */
1818
1819 case takes_regs_yes:
1820 return true;
1821
1822 fail_takes_regs:
1823 takes_regs = takes_regs_no;
1824 /* FALLTHRU */
1825 case takes_regs_no:
1826 return false;
1827
1828 default:
1829 abort ();
1830 }
1831 }
1832 }
1833
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1836
1837 static bool
1838 emit_block_move_via_movstr (x, y, size, align)
1839 rtx x, y, size;
1840 unsigned int align;
1841 {
1842 /* Try the most limited insn first, because there's no point
1843 including more than one in the machine description unless
1844 the more limited one has some advantage. */
1845
1846 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1847 enum machine_mode mode;
1848
1849 /* Since this is a move insn, we don't care about volatility. */
1850 volatile_ok = 1;
1851
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1854 {
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1857
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1873 {
1874 rtx op2;
1875 rtx last = get_last_insn ();
1876 rtx pat;
1877
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1882
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1887
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1889 if (pat)
1890 {
1891 emit_insn (pat);
1892 volatile_ok = 0;
1893 return true;
1894 }
1895 else
1896 delete_insns_since (last);
1897 }
1898 }
1899
1900 volatile_ok = 0;
1901 return false;
1902 }
1903
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1906
1907 static rtx
1908 emit_block_move_via_libcall (dst, src, size)
1909 rtx dst, src, size;
1910 {
1911 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1912 enum machine_mode size_mode;
1913 rtx retval;
1914
1915 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1916
1917 It is unsafe to save the value generated by protect_from_queue
1918 and reuse it later. Consider what happens if emit_queue is
1919 called before the return value from protect_from_queue is used.
1920
1921 Expansion of the CALL_EXPR below will call emit_queue before
1922 we are finished emitting RTL for argument setup. So if we are
1923 not careful we could get the wrong value for an argument.
1924
1925 To avoid this problem we go ahead and emit code to copy X, Y &
1926 SIZE into new pseudos. We can then place those new pseudos
1927 into an RTL_EXPR and use them later, even after a call to
1928 emit_queue.
1929
1930 Note this is not strictly needed for library calls since they
1931 do not call emit_queue before loading their arguments. However,
1932 we may need to have library calls call emit_queue in the future
1933 since failing to do so could cause problems for targets which
1934 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1935
1936 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1937 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1938
1939 if (TARGET_MEM_FUNCTIONS)
1940 size_mode = TYPE_MODE (sizetype);
1941 else
1942 size_mode = TYPE_MODE (unsigned_type_node);
1943 size = convert_to_mode (size_mode, size, 1);
1944 size = copy_to_mode_reg (size_mode, size);
1945
1946 /* It is incorrect to use the libcall calling conventions to call
1947 memcpy in this context. This could be a user call to memcpy and
1948 the user may wish to examine the return value from memcpy. For
1949 targets where libcalls and normal calls have different conventions
1950 for returning pointers, we could end up generating incorrect code.
1951
1952 For convenience, we generate the call to bcopy this way as well. */
1953
1954 dst_tree = make_tree (ptr_type_node, dst);
1955 src_tree = make_tree (ptr_type_node, src);
1956 if (TARGET_MEM_FUNCTIONS)
1957 size_tree = make_tree (sizetype, size);
1958 else
1959 size_tree = make_tree (unsigned_type_node, size);
1960
1961 fn = emit_block_move_libcall_fn (true);
1962 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1963 if (TARGET_MEM_FUNCTIONS)
1964 {
1965 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1967 }
1968 else
1969 {
1970 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1971 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1972 }
1973
1974 /* Now we have to build up the CALL_EXPR itself. */
1975 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1976 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1977 call_expr, arg_list, NULL_TREE);
1978 TREE_SIDE_EFFECTS (call_expr) = 1;
1979
1980 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1981
1982 /* If we are initializing a readonly value, show the above call
1983 clobbered it. Otherwise, a load from it may erroneously be
1984 hoisted from a loop. */
1985 if (RTX_UNCHANGING_P (dst))
1986 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1987
1988 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1989 }
1990
1991 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1992 for the function we use for block copies. The first time FOR_CALL
1993 is true, we call assemble_external. */
1994
1995 static GTY(()) tree block_move_fn;
1996
1997 static tree
1998 emit_block_move_libcall_fn (for_call)
1999 int for_call;
2000 {
2001 static bool emitted_extern;
2002 tree fn = block_move_fn, args;
2003
2004 if (!fn)
2005 {
2006 if (TARGET_MEM_FUNCTIONS)
2007 {
2008 fn = get_identifier ("memcpy");
2009 args = build_function_type_list (ptr_type_node, ptr_type_node,
2010 const_ptr_type_node, sizetype,
2011 NULL_TREE);
2012 }
2013 else
2014 {
2015 fn = get_identifier ("bcopy");
2016 args = build_function_type_list (void_type_node, const_ptr_type_node,
2017 ptr_type_node, unsigned_type_node,
2018 NULL_TREE);
2019 }
2020
2021 fn = build_decl (FUNCTION_DECL, fn, args);
2022 DECL_EXTERNAL (fn) = 1;
2023 TREE_PUBLIC (fn) = 1;
2024 DECL_ARTIFICIAL (fn) = 1;
2025 TREE_NOTHROW (fn) = 1;
2026
2027 block_move_fn = fn;
2028 }
2029
2030 if (for_call && !emitted_extern)
2031 {
2032 emitted_extern = true;
2033 make_decl_rtl (fn, NULL);
2034 assemble_external (fn);
2035 }
2036
2037 return fn;
2038 }
2039
2040 /* A subroutine of emit_block_move. Copy the data via an explicit
2041 loop. This is used only when libcalls are forbidden. */
2042 /* ??? It'd be nice to copy in hunks larger than QImode. */
2043
2044 static void
2045 emit_block_move_via_loop (x, y, size, align)
2046 rtx x, y, size;
2047 unsigned int align ATTRIBUTE_UNUSED;
2048 {
2049 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2050 enum machine_mode iter_mode;
2051
2052 iter_mode = GET_MODE (size);
2053 if (iter_mode == VOIDmode)
2054 iter_mode = word_mode;
2055
2056 top_label = gen_label_rtx ();
2057 cmp_label = gen_label_rtx ();
2058 iter = gen_reg_rtx (iter_mode);
2059
2060 emit_move_insn (iter, const0_rtx);
2061
2062 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2063 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2064 do_pending_stack_adjust ();
2065
2066 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2067
2068 emit_jump (cmp_label);
2069 emit_label (top_label);
2070
2071 tmp = convert_modes (Pmode, iter_mode, iter, true);
2072 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2073 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2074 x = change_address (x, QImode, x_addr);
2075 y = change_address (y, QImode, y_addr);
2076
2077 emit_move_insn (x, y);
2078
2079 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2080 true, OPTAB_LIB_WIDEN);
2081 if (tmp != iter)
2082 emit_move_insn (iter, tmp);
2083
2084 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2085 emit_label (cmp_label);
2086
2087 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2088 true, top_label);
2089
2090 emit_note (NULL, NOTE_INSN_LOOP_END);
2091 }
2092 \f
2093 /* Copy all or part of a value X into registers starting at REGNO.
2094 The number of registers to be filled is NREGS. */
2095
2096 void
2097 move_block_to_reg (regno, x, nregs, mode)
2098 int regno;
2099 rtx x;
2100 int nregs;
2101 enum machine_mode mode;
2102 {
2103 int i;
2104 #ifdef HAVE_load_multiple
2105 rtx pat;
2106 rtx last;
2107 #endif
2108
2109 if (nregs == 0)
2110 return;
2111
2112 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2113 x = validize_mem (force_const_mem (mode, x));
2114
2115 /* See if the machine can do this with a load multiple insn. */
2116 #ifdef HAVE_load_multiple
2117 if (HAVE_load_multiple)
2118 {
2119 last = get_last_insn ();
2120 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2121 GEN_INT (nregs));
2122 if (pat)
2123 {
2124 emit_insn (pat);
2125 return;
2126 }
2127 else
2128 delete_insns_since (last);
2129 }
2130 #endif
2131
2132 for (i = 0; i < nregs; i++)
2133 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2134 operand_subword_force (x, i, mode));
2135 }
2136
2137 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2138 The number of registers to be filled is NREGS. SIZE indicates the number
2139 of bytes in the object X. */
2140
2141 void
2142 move_block_from_reg (regno, x, nregs, size)
2143 int regno;
2144 rtx x;
2145 int nregs;
2146 int size;
2147 {
2148 int i;
2149 #ifdef HAVE_store_multiple
2150 rtx pat;
2151 rtx last;
2152 #endif
2153 enum machine_mode mode;
2154
2155 if (nregs == 0)
2156 return;
2157
2158 /* If SIZE is that of a mode no bigger than a word, just use that
2159 mode's store operation. */
2160 if (size <= UNITS_PER_WORD
2161 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2162 {
2163 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2164 return;
2165 }
2166
2167 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2168 to the left before storing to memory. Note that the previous test
2169 doesn't handle all cases (e.g. SIZE == 3). */
2170 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2171 {
2172 rtx tem = operand_subword (x, 0, 1, BLKmode);
2173 rtx shift;
2174
2175 if (tem == 0)
2176 abort ();
2177
2178 shift = expand_shift (LSHIFT_EXPR, word_mode,
2179 gen_rtx_REG (word_mode, regno),
2180 build_int_2 ((UNITS_PER_WORD - size)
2181 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2182 emit_move_insn (tem, shift);
2183 return;
2184 }
2185
2186 /* See if the machine can do this with a store multiple insn. */
2187 #ifdef HAVE_store_multiple
2188 if (HAVE_store_multiple)
2189 {
2190 last = get_last_insn ();
2191 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2192 GEN_INT (nregs));
2193 if (pat)
2194 {
2195 emit_insn (pat);
2196 return;
2197 }
2198 else
2199 delete_insns_since (last);
2200 }
2201 #endif
2202
2203 for (i = 0; i < nregs; i++)
2204 {
2205 rtx tem = operand_subword (x, i, 1, BLKmode);
2206
2207 if (tem == 0)
2208 abort ();
2209
2210 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2211 }
2212 }
2213
2214 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2215 ORIG, where ORIG is a non-consecutive group of registers represented by
2216 a PARALLEL. The clone is identical to the original except in that the
2217 original set of registers is replaced by a new set of pseudo registers.
2218 The new set has the same modes as the original set. */
2219
2220 rtx
2221 gen_group_rtx (orig)
2222 rtx orig;
2223 {
2224 int i, length;
2225 rtx *tmps;
2226
2227 if (GET_CODE (orig) != PARALLEL)
2228 abort ();
2229
2230 length = XVECLEN (orig, 0);
2231 tmps = (rtx *) alloca (sizeof (rtx) * length);
2232
2233 /* Skip a NULL entry in first slot. */
2234 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2235
2236 if (i)
2237 tmps[0] = 0;
2238
2239 for (; i < length; i++)
2240 {
2241 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2242 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2243
2244 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2245 }
2246
2247 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2248 }
2249
2250 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2251 registers represented by a PARALLEL. SSIZE represents the total size of
2252 block SRC in bytes, or -1 if not known. */
2253 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2254 the balance will be in what would be the low-order memory addresses, i.e.
2255 left justified for big endian, right justified for little endian. This
2256 happens to be true for the targets currently using this support. If this
2257 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2258 would be needed. */
2259
2260 void
2261 emit_group_load (dst, orig_src, ssize)
2262 rtx dst, orig_src;
2263 int ssize;
2264 {
2265 rtx *tmps, src;
2266 int start, i;
2267
2268 if (GET_CODE (dst) != PARALLEL)
2269 abort ();
2270
2271 /* Check for a NULL entry, used to indicate that the parameter goes
2272 both on the stack and in registers. */
2273 if (XEXP (XVECEXP (dst, 0, 0), 0))
2274 start = 0;
2275 else
2276 start = 1;
2277
2278 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2279
2280 /* Process the pieces. */
2281 for (i = start; i < XVECLEN (dst, 0); i++)
2282 {
2283 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2284 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2285 unsigned int bytelen = GET_MODE_SIZE (mode);
2286 int shift = 0;
2287
2288 /* Handle trailing fragments that run over the size of the struct. */
2289 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2290 {
2291 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2292 bytelen = ssize - bytepos;
2293 if (bytelen <= 0)
2294 abort ();
2295 }
2296
2297 /* If we won't be loading directly from memory, protect the real source
2298 from strange tricks we might play; but make sure that the source can
2299 be loaded directly into the destination. */
2300 src = orig_src;
2301 if (GET_CODE (orig_src) != MEM
2302 && (!CONSTANT_P (orig_src)
2303 || (GET_MODE (orig_src) != mode
2304 && GET_MODE (orig_src) != VOIDmode)))
2305 {
2306 if (GET_MODE (orig_src) == VOIDmode)
2307 src = gen_reg_rtx (mode);
2308 else
2309 src = gen_reg_rtx (GET_MODE (orig_src));
2310
2311 emit_move_insn (src, orig_src);
2312 }
2313
2314 /* Optimize the access just a bit. */
2315 if (GET_CODE (src) == MEM
2316 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2317 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2318 && bytelen == GET_MODE_SIZE (mode))
2319 {
2320 tmps[i] = gen_reg_rtx (mode);
2321 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2322 }
2323 else if (GET_CODE (src) == CONCAT)
2324 {
2325 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2326 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2327
2328 if ((bytepos == 0 && bytelen == slen0)
2329 || (bytepos != 0 && bytepos + bytelen <= slen))
2330 {
2331 /* The following assumes that the concatenated objects all
2332 have the same size. In this case, a simple calculation
2333 can be used to determine the object and the bit field
2334 to be extracted. */
2335 tmps[i] = XEXP (src, bytepos / slen0);
2336 if (! CONSTANT_P (tmps[i])
2337 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2338 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2339 (bytepos % slen0) * BITS_PER_UNIT,
2340 1, NULL_RTX, mode, mode, ssize);
2341 }
2342 else if (bytepos == 0)
2343 {
2344 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2345 emit_move_insn (mem, src);
2346 tmps[i] = adjust_address (mem, mode, 0);
2347 }
2348 else
2349 abort ();
2350 }
2351 else if (CONSTANT_P (src)
2352 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2353 tmps[i] = src;
2354 else
2355 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2356 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2357 mode, mode, ssize);
2358
2359 if (BYTES_BIG_ENDIAN && shift)
2360 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2361 tmps[i], 0, OPTAB_WIDEN);
2362 }
2363
2364 emit_queue ();
2365
2366 /* Copy the extracted pieces into the proper (probable) hard regs. */
2367 for (i = start; i < XVECLEN (dst, 0); i++)
2368 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2369 }
2370
2371 /* Emit code to move a block SRC to block DST, where SRC and DST are
2372 non-consecutive groups of registers, each represented by a PARALLEL. */
2373
2374 void
2375 emit_group_move (dst, src)
2376 rtx dst, src;
2377 {
2378 int i;
2379
2380 if (GET_CODE (src) != PARALLEL
2381 || GET_CODE (dst) != PARALLEL
2382 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2383 abort ();
2384
2385 /* Skip first entry if NULL. */
2386 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2387 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2388 XEXP (XVECEXP (src, 0, i), 0));
2389 }
2390
2391 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2392 registers represented by a PARALLEL. SSIZE represents the total size of
2393 block DST, or -1 if not known. */
2394
2395 void
2396 emit_group_store (orig_dst, src, ssize)
2397 rtx orig_dst, src;
2398 int ssize;
2399 {
2400 rtx *tmps, dst;
2401 int start, i;
2402
2403 if (GET_CODE (src) != PARALLEL)
2404 abort ();
2405
2406 /* Check for a NULL entry, used to indicate that the parameter goes
2407 both on the stack and in registers. */
2408 if (XEXP (XVECEXP (src, 0, 0), 0))
2409 start = 0;
2410 else
2411 start = 1;
2412
2413 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2414
2415 /* Copy the (probable) hard regs into pseudos. */
2416 for (i = start; i < XVECLEN (src, 0); i++)
2417 {
2418 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2419 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2420 emit_move_insn (tmps[i], reg);
2421 }
2422 emit_queue ();
2423
2424 /* If we won't be storing directly into memory, protect the real destination
2425 from strange tricks we might play. */
2426 dst = orig_dst;
2427 if (GET_CODE (dst) == PARALLEL)
2428 {
2429 rtx temp;
2430
2431 /* We can get a PARALLEL dst if there is a conditional expression in
2432 a return statement. In that case, the dst and src are the same,
2433 so no action is necessary. */
2434 if (rtx_equal_p (dst, src))
2435 return;
2436
2437 /* It is unclear if we can ever reach here, but we may as well handle
2438 it. Allocate a temporary, and split this into a store/load to/from
2439 the temporary. */
2440
2441 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2442 emit_group_store (temp, src, ssize);
2443 emit_group_load (dst, temp, ssize);
2444 return;
2445 }
2446 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2447 {
2448 dst = gen_reg_rtx (GET_MODE (orig_dst));
2449 /* Make life a bit easier for combine. */
2450 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2451 }
2452
2453 /* Process the pieces. */
2454 for (i = start; i < XVECLEN (src, 0); i++)
2455 {
2456 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2457 enum machine_mode mode = GET_MODE (tmps[i]);
2458 unsigned int bytelen = GET_MODE_SIZE (mode);
2459 rtx dest = dst;
2460
2461 /* Handle trailing fragments that run over the size of the struct. */
2462 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2463 {
2464 if (BYTES_BIG_ENDIAN)
2465 {
2466 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2467 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2468 tmps[i], 0, OPTAB_WIDEN);
2469 }
2470 bytelen = ssize - bytepos;
2471 }
2472
2473 if (GET_CODE (dst) == CONCAT)
2474 {
2475 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2476 dest = XEXP (dst, 0);
2477 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2478 {
2479 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2480 dest = XEXP (dst, 1);
2481 }
2482 else if (bytepos == 0 && XVECLEN (src, 0))
2483 {
2484 dest = assign_stack_temp (GET_MODE (dest),
2485 GET_MODE_SIZE (GET_MODE (dest)), 0);
2486 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2487 tmps[i]);
2488 dst = dest;
2489 break;
2490 }
2491 else
2492 abort ();
2493 }
2494
2495 /* Optimize the access just a bit. */
2496 if (GET_CODE (dest) == MEM
2497 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2498 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2499 && bytelen == GET_MODE_SIZE (mode))
2500 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2501 else
2502 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2503 mode, tmps[i], ssize);
2504 }
2505
2506 emit_queue ();
2507
2508 /* Copy from the pseudo into the (probable) hard reg. */
2509 if (orig_dst != dst)
2510 emit_move_insn (orig_dst, dst);
2511 }
2512
2513 /* Generate code to copy a BLKmode object of TYPE out of a
2514 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2515 is null, a stack temporary is created. TGTBLK is returned.
2516
2517 The primary purpose of this routine is to handle functions
2518 that return BLKmode structures in registers. Some machines
2519 (the PA for example) want to return all small structures
2520 in registers regardless of the structure's alignment. */
2521
2522 rtx
2523 copy_blkmode_from_reg (tgtblk, srcreg, type)
2524 rtx tgtblk;
2525 rtx srcreg;
2526 tree type;
2527 {
2528 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2529 rtx src = NULL, dst = NULL;
2530 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2531 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2532
2533 if (tgtblk == 0)
2534 {
2535 tgtblk = assign_temp (build_qualified_type (type,
2536 (TYPE_QUALS (type)
2537 | TYPE_QUAL_CONST)),
2538 0, 1, 1);
2539 preserve_temp_slots (tgtblk);
2540 }
2541
2542 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2543 into a new pseudo which is a full word. */
2544
2545 if (GET_MODE (srcreg) != BLKmode
2546 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2547 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2548
2549 /* Structures whose size is not a multiple of a word are aligned
2550 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2551 machine, this means we must skip the empty high order bytes when
2552 calculating the bit offset. */
2553 if (BYTES_BIG_ENDIAN
2554 && bytes % UNITS_PER_WORD)
2555 big_endian_correction
2556 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2557
2558 /* Copy the structure BITSIZE bites at a time.
2559
2560 We could probably emit more efficient code for machines which do not use
2561 strict alignment, but it doesn't seem worth the effort at the current
2562 time. */
2563 for (bitpos = 0, xbitpos = big_endian_correction;
2564 bitpos < bytes * BITS_PER_UNIT;
2565 bitpos += bitsize, xbitpos += bitsize)
2566 {
2567 /* We need a new source operand each time xbitpos is on a
2568 word boundary and when xbitpos == big_endian_correction
2569 (the first time through). */
2570 if (xbitpos % BITS_PER_WORD == 0
2571 || xbitpos == big_endian_correction)
2572 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2573 GET_MODE (srcreg));
2574
2575 /* We need a new destination operand each time bitpos is on
2576 a word boundary. */
2577 if (bitpos % BITS_PER_WORD == 0)
2578 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2579
2580 /* Use xbitpos for the source extraction (right justified) and
2581 xbitpos for the destination store (left justified). */
2582 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2583 extract_bit_field (src, bitsize,
2584 xbitpos % BITS_PER_WORD, 1,
2585 NULL_RTX, word_mode, word_mode,
2586 BITS_PER_WORD),
2587 BITS_PER_WORD);
2588 }
2589
2590 return tgtblk;
2591 }
2592
2593 /* Add a USE expression for REG to the (possibly empty) list pointed
2594 to by CALL_FUSAGE. REG must denote a hard register. */
2595
2596 void
2597 use_reg (call_fusage, reg)
2598 rtx *call_fusage, reg;
2599 {
2600 if (GET_CODE (reg) != REG
2601 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2602 abort ();
2603
2604 *call_fusage
2605 = gen_rtx_EXPR_LIST (VOIDmode,
2606 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2607 }
2608
2609 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2610 starting at REGNO. All of these registers must be hard registers. */
2611
2612 void
2613 use_regs (call_fusage, regno, nregs)
2614 rtx *call_fusage;
2615 int regno;
2616 int nregs;
2617 {
2618 int i;
2619
2620 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2621 abort ();
2622
2623 for (i = 0; i < nregs; i++)
2624 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2625 }
2626
2627 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2628 PARALLEL REGS. This is for calls that pass values in multiple
2629 non-contiguous locations. The Irix 6 ABI has examples of this. */
2630
2631 void
2632 use_group_regs (call_fusage, regs)
2633 rtx *call_fusage;
2634 rtx regs;
2635 {
2636 int i;
2637
2638 for (i = 0; i < XVECLEN (regs, 0); i++)
2639 {
2640 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2641
2642 /* A NULL entry means the parameter goes both on the stack and in
2643 registers. This can also be a MEM for targets that pass values
2644 partially on the stack and partially in registers. */
2645 if (reg != 0 && GET_CODE (reg) == REG)
2646 use_reg (call_fusage, reg);
2647 }
2648 }
2649 \f
2650
2651 /* Determine whether the LEN bytes generated by CONSTFUN can be
2652 stored to memory using several move instructions. CONSTFUNDATA is
2653 a pointer which will be passed as argument in every CONSTFUN call.
2654 ALIGN is maximum alignment we can assume. Return nonzero if a
2655 call to store_by_pieces should succeed. */
2656
2657 int
2658 can_store_by_pieces (len, constfun, constfundata, align)
2659 unsigned HOST_WIDE_INT len;
2660 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2661 PTR constfundata;
2662 unsigned int align;
2663 {
2664 unsigned HOST_WIDE_INT max_size, l;
2665 HOST_WIDE_INT offset = 0;
2666 enum machine_mode mode, tmode;
2667 enum insn_code icode;
2668 int reverse;
2669 rtx cst;
2670
2671 if (! STORE_BY_PIECES_P (len, align))
2672 return 0;
2673
2674 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2675 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2676 align = MOVE_MAX * BITS_PER_UNIT;
2677
2678 /* We would first store what we can in the largest integer mode, then go to
2679 successively smaller modes. */
2680
2681 for (reverse = 0;
2682 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2683 reverse++)
2684 {
2685 l = len;
2686 mode = VOIDmode;
2687 max_size = STORE_MAX_PIECES + 1;
2688 while (max_size > 1)
2689 {
2690 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2691 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2692 if (GET_MODE_SIZE (tmode) < max_size)
2693 mode = tmode;
2694
2695 if (mode == VOIDmode)
2696 break;
2697
2698 icode = mov_optab->handlers[(int) mode].insn_code;
2699 if (icode != CODE_FOR_nothing
2700 && align >= GET_MODE_ALIGNMENT (mode))
2701 {
2702 unsigned int size = GET_MODE_SIZE (mode);
2703
2704 while (l >= size)
2705 {
2706 if (reverse)
2707 offset -= size;
2708
2709 cst = (*constfun) (constfundata, offset, mode);
2710 if (!LEGITIMATE_CONSTANT_P (cst))
2711 return 0;
2712
2713 if (!reverse)
2714 offset += size;
2715
2716 l -= size;
2717 }
2718 }
2719
2720 max_size = GET_MODE_SIZE (mode);
2721 }
2722
2723 /* The code above should have handled everything. */
2724 if (l != 0)
2725 abort ();
2726 }
2727
2728 return 1;
2729 }
2730
2731 /* Generate several move instructions to store LEN bytes generated by
2732 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2733 pointer which will be passed as argument in every CONSTFUN call.
2734 ALIGN is maximum alignment we can assume. */
2735
2736 void
2737 store_by_pieces (to, len, constfun, constfundata, align)
2738 rtx to;
2739 unsigned HOST_WIDE_INT len;
2740 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2741 PTR constfundata;
2742 unsigned int align;
2743 {
2744 struct store_by_pieces data;
2745
2746 if (! STORE_BY_PIECES_P (len, align))
2747 abort ();
2748 to = protect_from_queue (to, 1);
2749 data.constfun = constfun;
2750 data.constfundata = constfundata;
2751 data.len = len;
2752 data.to = to;
2753 store_by_pieces_1 (&data, align);
2754 }
2755
2756 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2757 rtx with BLKmode). The caller must pass TO through protect_from_queue
2758 before calling. ALIGN is maximum alignment we can assume. */
2759
2760 static void
2761 clear_by_pieces (to, len, align)
2762 rtx to;
2763 unsigned HOST_WIDE_INT len;
2764 unsigned int align;
2765 {
2766 struct store_by_pieces data;
2767
2768 data.constfun = clear_by_pieces_1;
2769 data.constfundata = NULL;
2770 data.len = len;
2771 data.to = to;
2772 store_by_pieces_1 (&data, align);
2773 }
2774
2775 /* Callback routine for clear_by_pieces.
2776 Return const0_rtx unconditionally. */
2777
2778 static rtx
2779 clear_by_pieces_1 (data, offset, mode)
2780 PTR data ATTRIBUTE_UNUSED;
2781 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2782 enum machine_mode mode ATTRIBUTE_UNUSED;
2783 {
2784 return const0_rtx;
2785 }
2786
2787 /* Subroutine of clear_by_pieces and store_by_pieces.
2788 Generate several move instructions to store LEN bytes of block TO. (A MEM
2789 rtx with BLKmode). The caller must pass TO through protect_from_queue
2790 before calling. ALIGN is maximum alignment we can assume. */
2791
2792 static void
2793 store_by_pieces_1 (data, align)
2794 struct store_by_pieces *data;
2795 unsigned int align;
2796 {
2797 rtx to_addr = XEXP (data->to, 0);
2798 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2799 enum machine_mode mode = VOIDmode, tmode;
2800 enum insn_code icode;
2801
2802 data->offset = 0;
2803 data->to_addr = to_addr;
2804 data->autinc_to
2805 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2806 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2807
2808 data->explicit_inc_to = 0;
2809 data->reverse
2810 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2811 if (data->reverse)
2812 data->offset = data->len;
2813
2814 /* If storing requires more than two move insns,
2815 copy addresses to registers (to make displacements shorter)
2816 and use post-increment if available. */
2817 if (!data->autinc_to
2818 && move_by_pieces_ninsns (data->len, align) > 2)
2819 {
2820 /* Determine the main mode we'll be using. */
2821 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2822 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2823 if (GET_MODE_SIZE (tmode) < max_size)
2824 mode = tmode;
2825
2826 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2827 {
2828 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2829 data->autinc_to = 1;
2830 data->explicit_inc_to = -1;
2831 }
2832
2833 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2834 && ! data->autinc_to)
2835 {
2836 data->to_addr = copy_addr_to_reg (to_addr);
2837 data->autinc_to = 1;
2838 data->explicit_inc_to = 1;
2839 }
2840
2841 if ( !data->autinc_to && CONSTANT_P (to_addr))
2842 data->to_addr = copy_addr_to_reg (to_addr);
2843 }
2844
2845 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2846 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2847 align = MOVE_MAX * BITS_PER_UNIT;
2848
2849 /* First store what we can in the largest integer mode, then go to
2850 successively smaller modes. */
2851
2852 while (max_size > 1)
2853 {
2854 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2855 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2856 if (GET_MODE_SIZE (tmode) < max_size)
2857 mode = tmode;
2858
2859 if (mode == VOIDmode)
2860 break;
2861
2862 icode = mov_optab->handlers[(int) mode].insn_code;
2863 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2864 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2865
2866 max_size = GET_MODE_SIZE (mode);
2867 }
2868
2869 /* The code above should have handled everything. */
2870 if (data->len != 0)
2871 abort ();
2872 }
2873
2874 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2875 with move instructions for mode MODE. GENFUN is the gen_... function
2876 to make a move insn for that mode. DATA has all the other info. */
2877
2878 static void
2879 store_by_pieces_2 (genfun, mode, data)
2880 rtx (*genfun) PARAMS ((rtx, ...));
2881 enum machine_mode mode;
2882 struct store_by_pieces *data;
2883 {
2884 unsigned int size = GET_MODE_SIZE (mode);
2885 rtx to1, cst;
2886
2887 while (data->len >= size)
2888 {
2889 if (data->reverse)
2890 data->offset -= size;
2891
2892 if (data->autinc_to)
2893 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2894 data->offset);
2895 else
2896 to1 = adjust_address (data->to, mode, data->offset);
2897
2898 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2899 emit_insn (gen_add2_insn (data->to_addr,
2900 GEN_INT (-(HOST_WIDE_INT) size)));
2901
2902 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2903 emit_insn ((*genfun) (to1, cst));
2904
2905 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2906 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2907
2908 if (! data->reverse)
2909 data->offset += size;
2910
2911 data->len -= size;
2912 }
2913 }
2914 \f
2915 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2916 its length in bytes. */
2917
2918 rtx
2919 clear_storage (object, size)
2920 rtx object;
2921 rtx size;
2922 {
2923 rtx retval = 0;
2924 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2925 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2926
2927 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2928 just move a zero. Otherwise, do this a piece at a time. */
2929 if (GET_MODE (object) != BLKmode
2930 && GET_CODE (size) == CONST_INT
2931 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2932 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2933 else
2934 {
2935 object = protect_from_queue (object, 1);
2936 size = protect_from_queue (size, 0);
2937
2938 if (GET_CODE (size) == CONST_INT
2939 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2940 clear_by_pieces (object, INTVAL (size), align);
2941 else if (clear_storage_via_clrstr (object, size, align))
2942 ;
2943 else
2944 retval = clear_storage_via_libcall (object, size);
2945 }
2946
2947 return retval;
2948 }
2949
2950 /* A subroutine of clear_storage. Expand a clrstr pattern;
2951 return true if successful. */
2952
2953 static bool
2954 clear_storage_via_clrstr (object, size, align)
2955 rtx object, size;
2956 unsigned int align;
2957 {
2958 /* Try the most limited insn first, because there's no point
2959 including more than one in the machine description unless
2960 the more limited one has some advantage. */
2961
2962 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2963 enum machine_mode mode;
2964
2965 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2966 mode = GET_MODE_WIDER_MODE (mode))
2967 {
2968 enum insn_code code = clrstr_optab[(int) mode];
2969 insn_operand_predicate_fn pred;
2970
2971 if (code != CODE_FOR_nothing
2972 /* We don't need MODE to be narrower than
2973 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2974 the mode mask, as it is returned by the macro, it will
2975 definitely be less than the actual mode mask. */
2976 && ((GET_CODE (size) == CONST_INT
2977 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2978 <= (GET_MODE_MASK (mode) >> 1)))
2979 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2980 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2981 || (*pred) (object, BLKmode))
2982 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2983 || (*pred) (opalign, VOIDmode)))
2984 {
2985 rtx op1;
2986 rtx last = get_last_insn ();
2987 rtx pat;
2988
2989 op1 = convert_to_mode (mode, size, 1);
2990 pred = insn_data[(int) code].operand[1].predicate;
2991 if (pred != 0 && ! (*pred) (op1, mode))
2992 op1 = copy_to_mode_reg (mode, op1);
2993
2994 pat = GEN_FCN ((int) code) (object, op1, opalign);
2995 if (pat)
2996 {
2997 emit_insn (pat);
2998 return true;
2999 }
3000 else
3001 delete_insns_since (last);
3002 }
3003 }
3004
3005 return false;
3006 }
3007
3008 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3009 Return the return value of memset, 0 otherwise. */
3010
3011 static rtx
3012 clear_storage_via_libcall (object, size)
3013 rtx object, size;
3014 {
3015 tree call_expr, arg_list, fn, object_tree, size_tree;
3016 enum machine_mode size_mode;
3017 rtx retval;
3018
3019 /* OBJECT or SIZE may have been passed through protect_from_queue.
3020
3021 It is unsafe to save the value generated by protect_from_queue
3022 and reuse it later. Consider what happens if emit_queue is
3023 called before the return value from protect_from_queue is used.
3024
3025 Expansion of the CALL_EXPR below will call emit_queue before
3026 we are finished emitting RTL for argument setup. So if we are
3027 not careful we could get the wrong value for an argument.
3028
3029 To avoid this problem we go ahead and emit code to copy OBJECT
3030 and SIZE into new pseudos. We can then place those new pseudos
3031 into an RTL_EXPR and use them later, even after a call to
3032 emit_queue.
3033
3034 Note this is not strictly needed for library calls since they
3035 do not call emit_queue before loading their arguments. However,
3036 we may need to have library calls call emit_queue in the future
3037 since failing to do so could cause problems for targets which
3038 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3039
3040 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3041
3042 if (TARGET_MEM_FUNCTIONS)
3043 size_mode = TYPE_MODE (sizetype);
3044 else
3045 size_mode = TYPE_MODE (unsigned_type_node);
3046 size = convert_to_mode (size_mode, size, 1);
3047 size = copy_to_mode_reg (size_mode, size);
3048
3049 /* It is incorrect to use the libcall calling conventions to call
3050 memset in this context. This could be a user call to memset and
3051 the user may wish to examine the return value from memset. For
3052 targets where libcalls and normal calls have different conventions
3053 for returning pointers, we could end up generating incorrect code.
3054
3055 For convenience, we generate the call to bzero this way as well. */
3056
3057 object_tree = make_tree (ptr_type_node, object);
3058 if (TARGET_MEM_FUNCTIONS)
3059 size_tree = make_tree (sizetype, size);
3060 else
3061 size_tree = make_tree (unsigned_type_node, size);
3062
3063 fn = clear_storage_libcall_fn (true);
3064 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3065 if (TARGET_MEM_FUNCTIONS)
3066 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3067 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3068
3069 /* Now we have to build up the CALL_EXPR itself. */
3070 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3071 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3072 call_expr, arg_list, NULL_TREE);
3073 TREE_SIDE_EFFECTS (call_expr) = 1;
3074
3075 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3076
3077 /* If we are initializing a readonly value, show the above call
3078 clobbered it. Otherwise, a load from it may erroneously be
3079 hoisted from a loop. */
3080 if (RTX_UNCHANGING_P (object))
3081 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3082
3083 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3084 }
3085
3086 /* A subroutine of clear_storage_via_libcall. Create the tree node
3087 for the function we use for block clears. The first time FOR_CALL
3088 is true, we call assemble_external. */
3089
3090 static GTY(()) tree block_clear_fn;
3091
3092 static tree
3093 clear_storage_libcall_fn (for_call)
3094 int for_call;
3095 {
3096 static bool emitted_extern;
3097 tree fn = block_clear_fn, args;
3098
3099 if (!fn)
3100 {
3101 if (TARGET_MEM_FUNCTIONS)
3102 {
3103 fn = get_identifier ("memset");
3104 args = build_function_type_list (ptr_type_node, ptr_type_node,
3105 integer_type_node, sizetype,
3106 NULL_TREE);
3107 }
3108 else
3109 {
3110 fn = get_identifier ("bzero");
3111 args = build_function_type_list (void_type_node, ptr_type_node,
3112 unsigned_type_node, NULL_TREE);
3113 }
3114
3115 fn = build_decl (FUNCTION_DECL, fn, args);
3116 DECL_EXTERNAL (fn) = 1;
3117 TREE_PUBLIC (fn) = 1;
3118 DECL_ARTIFICIAL (fn) = 1;
3119 TREE_NOTHROW (fn) = 1;
3120
3121 block_clear_fn = fn;
3122 }
3123
3124 if (for_call && !emitted_extern)
3125 {
3126 emitted_extern = true;
3127 make_decl_rtl (fn, NULL);
3128 assemble_external (fn);
3129 }
3130
3131 return fn;
3132 }
3133 \f
3134 /* Generate code to copy Y into X.
3135 Both Y and X must have the same mode, except that
3136 Y can be a constant with VOIDmode.
3137 This mode cannot be BLKmode; use emit_block_move for that.
3138
3139 Return the last instruction emitted. */
3140
3141 rtx
3142 emit_move_insn (x, y)
3143 rtx x, y;
3144 {
3145 enum machine_mode mode = GET_MODE (x);
3146 rtx y_cst = NULL_RTX;
3147 rtx last_insn;
3148
3149 x = protect_from_queue (x, 1);
3150 y = protect_from_queue (y, 0);
3151
3152 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3153 abort ();
3154
3155 /* Never force constant_p_rtx to memory. */
3156 if (GET_CODE (y) == CONSTANT_P_RTX)
3157 ;
3158 else if (CONSTANT_P (y))
3159 {
3160 if (optimize
3161 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3162 && (last_insn = compress_float_constant (x, y)))
3163 return last_insn;
3164
3165 if (!LEGITIMATE_CONSTANT_P (y))
3166 {
3167 y_cst = y;
3168 y = force_const_mem (mode, y);
3169
3170 /* If the target's cannot_force_const_mem prevented the spill,
3171 assume that the target's move expanders will also take care
3172 of the non-legitimate constant. */
3173 if (!y)
3174 y = y_cst;
3175 }
3176 }
3177
3178 /* If X or Y are memory references, verify that their addresses are valid
3179 for the machine. */
3180 if (GET_CODE (x) == MEM
3181 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3182 && ! push_operand (x, GET_MODE (x)))
3183 || (flag_force_addr
3184 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3185 x = validize_mem (x);
3186
3187 if (GET_CODE (y) == MEM
3188 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3189 || (flag_force_addr
3190 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3191 y = validize_mem (y);
3192
3193 if (mode == BLKmode)
3194 abort ();
3195
3196 last_insn = emit_move_insn_1 (x, y);
3197
3198 if (y_cst && GET_CODE (x) == REG)
3199 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3200
3201 return last_insn;
3202 }
3203
3204 /* Low level part of emit_move_insn.
3205 Called just like emit_move_insn, but assumes X and Y
3206 are basically valid. */
3207
3208 rtx
3209 emit_move_insn_1 (x, y)
3210 rtx x, y;
3211 {
3212 enum machine_mode mode = GET_MODE (x);
3213 enum machine_mode submode;
3214 enum mode_class class = GET_MODE_CLASS (mode);
3215
3216 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3217 abort ();
3218
3219 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3220 return
3221 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3222
3223 /* Expand complex moves by moving real part and imag part, if possible. */
3224 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3225 && BLKmode != (submode = GET_MODE_INNER (mode))
3226 && (mov_optab->handlers[(int) submode].insn_code
3227 != CODE_FOR_nothing))
3228 {
3229 /* Don't split destination if it is a stack push. */
3230 int stack = push_operand (x, GET_MODE (x));
3231
3232 #ifdef PUSH_ROUNDING
3233 /* In case we output to the stack, but the size is smaller machine can
3234 push exactly, we need to use move instructions. */
3235 if (stack
3236 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3237 != GET_MODE_SIZE (submode)))
3238 {
3239 rtx temp;
3240 HOST_WIDE_INT offset1, offset2;
3241
3242 /* Do not use anti_adjust_stack, since we don't want to update
3243 stack_pointer_delta. */
3244 temp = expand_binop (Pmode,
3245 #ifdef STACK_GROWS_DOWNWARD
3246 sub_optab,
3247 #else
3248 add_optab,
3249 #endif
3250 stack_pointer_rtx,
3251 GEN_INT
3252 (PUSH_ROUNDING
3253 (GET_MODE_SIZE (GET_MODE (x)))),
3254 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3255
3256 if (temp != stack_pointer_rtx)
3257 emit_move_insn (stack_pointer_rtx, temp);
3258
3259 #ifdef STACK_GROWS_DOWNWARD
3260 offset1 = 0;
3261 offset2 = GET_MODE_SIZE (submode);
3262 #else
3263 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3264 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3265 + GET_MODE_SIZE (submode));
3266 #endif
3267
3268 emit_move_insn (change_address (x, submode,
3269 gen_rtx_PLUS (Pmode,
3270 stack_pointer_rtx,
3271 GEN_INT (offset1))),
3272 gen_realpart (submode, y));
3273 emit_move_insn (change_address (x, submode,
3274 gen_rtx_PLUS (Pmode,
3275 stack_pointer_rtx,
3276 GEN_INT (offset2))),
3277 gen_imagpart (submode, y));
3278 }
3279 else
3280 #endif
3281 /* If this is a stack, push the highpart first, so it
3282 will be in the argument order.
3283
3284 In that case, change_address is used only to convert
3285 the mode, not to change the address. */
3286 if (stack)
3287 {
3288 /* Note that the real part always precedes the imag part in memory
3289 regardless of machine's endianness. */
3290 #ifdef STACK_GROWS_DOWNWARD
3291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3292 (gen_rtx_MEM (submode, XEXP (x, 0)),
3293 gen_imagpart (submode, y)));
3294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3295 (gen_rtx_MEM (submode, XEXP (x, 0)),
3296 gen_realpart (submode, y)));
3297 #else
3298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3299 (gen_rtx_MEM (submode, XEXP (x, 0)),
3300 gen_realpart (submode, y)));
3301 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3302 (gen_rtx_MEM (submode, XEXP (x, 0)),
3303 gen_imagpart (submode, y)));
3304 #endif
3305 }
3306 else
3307 {
3308 rtx realpart_x, realpart_y;
3309 rtx imagpart_x, imagpart_y;
3310
3311 /* If this is a complex value with each part being smaller than a
3312 word, the usual calling sequence will likely pack the pieces into
3313 a single register. Unfortunately, SUBREG of hard registers only
3314 deals in terms of words, so we have a problem converting input
3315 arguments to the CONCAT of two registers that is used elsewhere
3316 for complex values. If this is before reload, we can copy it into
3317 memory and reload. FIXME, we should see about using extract and
3318 insert on integer registers, but complex short and complex char
3319 variables should be rarely used. */
3320 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3321 && (reload_in_progress | reload_completed) == 0)
3322 {
3323 int packed_dest_p
3324 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3325 int packed_src_p
3326 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3327
3328 if (packed_dest_p || packed_src_p)
3329 {
3330 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3331 ? MODE_FLOAT : MODE_INT);
3332
3333 enum machine_mode reg_mode
3334 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3335
3336 if (reg_mode != BLKmode)
3337 {
3338 rtx mem = assign_stack_temp (reg_mode,
3339 GET_MODE_SIZE (mode), 0);
3340 rtx cmem = adjust_address (mem, mode, 0);
3341
3342 cfun->cannot_inline
3343 = N_("function using short complex types cannot be inline");
3344
3345 if (packed_dest_p)
3346 {
3347 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3348
3349 emit_move_insn_1 (cmem, y);
3350 return emit_move_insn_1 (sreg, mem);
3351 }
3352 else
3353 {
3354 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3355
3356 emit_move_insn_1 (mem, sreg);
3357 return emit_move_insn_1 (x, cmem);
3358 }
3359 }
3360 }
3361 }
3362
3363 realpart_x = gen_realpart (submode, x);
3364 realpart_y = gen_realpart (submode, y);
3365 imagpart_x = gen_imagpart (submode, x);
3366 imagpart_y = gen_imagpart (submode, y);
3367
3368 /* Show the output dies here. This is necessary for SUBREGs
3369 of pseudos since we cannot track their lifetimes correctly;
3370 hard regs shouldn't appear here except as return values.
3371 We never want to emit such a clobber after reload. */
3372 if (x != y
3373 && ! (reload_in_progress || reload_completed)
3374 && (GET_CODE (realpart_x) == SUBREG
3375 || GET_CODE (imagpart_x) == SUBREG))
3376 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3377
3378 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3379 (realpart_x, realpart_y));
3380 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3381 (imagpart_x, imagpart_y));
3382 }
3383
3384 return get_last_insn ();
3385 }
3386
3387 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3388 find a mode to do it in. If we have a movcc, use it. Otherwise,
3389 find the MODE_INT mode of the same width. */
3390 else if (GET_MODE_CLASS (mode) == MODE_CC
3391 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3392 {
3393 enum insn_code insn_code;
3394 enum machine_mode tmode = VOIDmode;
3395 rtx x1 = x, y1 = y;
3396
3397 if (mode != CCmode
3398 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3399 tmode = CCmode;
3400 else
3401 for (tmode = QImode; tmode != VOIDmode;
3402 tmode = GET_MODE_WIDER_MODE (tmode))
3403 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3404 break;
3405
3406 if (tmode == VOIDmode)
3407 abort ();
3408
3409 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3410 may call change_address which is not appropriate if we were
3411 called when a reload was in progress. We don't have to worry
3412 about changing the address since the size in bytes is supposed to
3413 be the same. Copy the MEM to change the mode and move any
3414 substitutions from the old MEM to the new one. */
3415
3416 if (reload_in_progress)
3417 {
3418 x = gen_lowpart_common (tmode, x1);
3419 if (x == 0 && GET_CODE (x1) == MEM)
3420 {
3421 x = adjust_address_nv (x1, tmode, 0);
3422 copy_replacements (x1, x);
3423 }
3424
3425 y = gen_lowpart_common (tmode, y1);
3426 if (y == 0 && GET_CODE (y1) == MEM)
3427 {
3428 y = adjust_address_nv (y1, tmode, 0);
3429 copy_replacements (y1, y);
3430 }
3431 }
3432 else
3433 {
3434 x = gen_lowpart (tmode, x);
3435 y = gen_lowpart (tmode, y);
3436 }
3437
3438 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3439 return emit_insn (GEN_FCN (insn_code) (x, y));
3440 }
3441
3442 /* This will handle any multi-word or full-word mode that lacks a move_insn
3443 pattern. However, you will get better code if you define such patterns,
3444 even if they must turn into multiple assembler instructions. */
3445 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3446 {
3447 rtx last_insn = 0;
3448 rtx seq, inner;
3449 int need_clobber;
3450 int i;
3451
3452 #ifdef PUSH_ROUNDING
3453
3454 /* If X is a push on the stack, do the push now and replace
3455 X with a reference to the stack pointer. */
3456 if (push_operand (x, GET_MODE (x)))
3457 {
3458 rtx temp;
3459 enum rtx_code code;
3460
3461 /* Do not use anti_adjust_stack, since we don't want to update
3462 stack_pointer_delta. */
3463 temp = expand_binop (Pmode,
3464 #ifdef STACK_GROWS_DOWNWARD
3465 sub_optab,
3466 #else
3467 add_optab,
3468 #endif
3469 stack_pointer_rtx,
3470 GEN_INT
3471 (PUSH_ROUNDING
3472 (GET_MODE_SIZE (GET_MODE (x)))),
3473 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3474
3475 if (temp != stack_pointer_rtx)
3476 emit_move_insn (stack_pointer_rtx, temp);
3477
3478 code = GET_CODE (XEXP (x, 0));
3479
3480 /* Just hope that small offsets off SP are OK. */
3481 if (code == POST_INC)
3482 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3483 GEN_INT (-((HOST_WIDE_INT)
3484 GET_MODE_SIZE (GET_MODE (x)))));
3485 else if (code == POST_DEC)
3486 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3487 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3488 else
3489 temp = stack_pointer_rtx;
3490
3491 x = change_address (x, VOIDmode, temp);
3492 }
3493 #endif
3494
3495 /* If we are in reload, see if either operand is a MEM whose address
3496 is scheduled for replacement. */
3497 if (reload_in_progress && GET_CODE (x) == MEM
3498 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3499 x = replace_equiv_address_nv (x, inner);
3500 if (reload_in_progress && GET_CODE (y) == MEM
3501 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3502 y = replace_equiv_address_nv (y, inner);
3503
3504 start_sequence ();
3505
3506 need_clobber = 0;
3507 for (i = 0;
3508 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3509 i++)
3510 {
3511 rtx xpart = operand_subword (x, i, 1, mode);
3512 rtx ypart = operand_subword (y, i, 1, mode);
3513
3514 /* If we can't get a part of Y, put Y into memory if it is a
3515 constant. Otherwise, force it into a register. If we still
3516 can't get a part of Y, abort. */
3517 if (ypart == 0 && CONSTANT_P (y))
3518 {
3519 y = force_const_mem (mode, y);
3520 ypart = operand_subword (y, i, 1, mode);
3521 }
3522 else if (ypart == 0)
3523 ypart = operand_subword_force (y, i, mode);
3524
3525 if (xpart == 0 || ypart == 0)
3526 abort ();
3527
3528 need_clobber |= (GET_CODE (xpart) == SUBREG);
3529
3530 last_insn = emit_move_insn (xpart, ypart);
3531 }
3532
3533 seq = get_insns ();
3534 end_sequence ();
3535
3536 /* Show the output dies here. This is necessary for SUBREGs
3537 of pseudos since we cannot track their lifetimes correctly;
3538 hard regs shouldn't appear here except as return values.
3539 We never want to emit such a clobber after reload. */
3540 if (x != y
3541 && ! (reload_in_progress || reload_completed)
3542 && need_clobber != 0)
3543 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3544
3545 emit_insn (seq);
3546
3547 return last_insn;
3548 }
3549 else
3550 abort ();
3551 }
3552
3553 /* If Y is representable exactly in a narrower mode, and the target can
3554 perform the extension directly from constant or memory, then emit the
3555 move as an extension. */
3556
3557 static rtx
3558 compress_float_constant (x, y)
3559 rtx x, y;
3560 {
3561 enum machine_mode dstmode = GET_MODE (x);
3562 enum machine_mode orig_srcmode = GET_MODE (y);
3563 enum machine_mode srcmode;
3564 REAL_VALUE_TYPE r;
3565
3566 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3567
3568 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3569 srcmode != orig_srcmode;
3570 srcmode = GET_MODE_WIDER_MODE (srcmode))
3571 {
3572 enum insn_code ic;
3573 rtx trunc_y, last_insn;
3574
3575 /* Skip if the target can't extend this way. */
3576 ic = can_extend_p (dstmode, srcmode, 0);
3577 if (ic == CODE_FOR_nothing)
3578 continue;
3579
3580 /* Skip if the narrowed value isn't exact. */
3581 if (! exact_real_truncate (srcmode, &r))
3582 continue;
3583
3584 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3585
3586 if (LEGITIMATE_CONSTANT_P (trunc_y))
3587 {
3588 /* Skip if the target needs extra instructions to perform
3589 the extension. */
3590 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3591 continue;
3592 }
3593 else if (float_extend_from_mem[dstmode][srcmode])
3594 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3595 else
3596 continue;
3597
3598 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3599 last_insn = get_last_insn ();
3600
3601 if (GET_CODE (x) == REG)
3602 REG_NOTES (last_insn)
3603 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3604
3605 return last_insn;
3606 }
3607
3608 return NULL_RTX;
3609 }
3610 \f
3611 /* Pushing data onto the stack. */
3612
3613 /* Push a block of length SIZE (perhaps variable)
3614 and return an rtx to address the beginning of the block.
3615 Note that it is not possible for the value returned to be a QUEUED.
3616 The value may be virtual_outgoing_args_rtx.
3617
3618 EXTRA is the number of bytes of padding to push in addition to SIZE.
3619 BELOW nonzero means this padding comes at low addresses;
3620 otherwise, the padding comes at high addresses. */
3621
3622 rtx
3623 push_block (size, extra, below)
3624 rtx size;
3625 int extra, below;
3626 {
3627 rtx temp;
3628
3629 size = convert_modes (Pmode, ptr_mode, size, 1);
3630 if (CONSTANT_P (size))
3631 anti_adjust_stack (plus_constant (size, extra));
3632 else if (GET_CODE (size) == REG && extra == 0)
3633 anti_adjust_stack (size);
3634 else
3635 {
3636 temp = copy_to_mode_reg (Pmode, size);
3637 if (extra != 0)
3638 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3639 temp, 0, OPTAB_LIB_WIDEN);
3640 anti_adjust_stack (temp);
3641 }
3642
3643 #ifndef STACK_GROWS_DOWNWARD
3644 if (0)
3645 #else
3646 if (1)
3647 #endif
3648 {
3649 temp = virtual_outgoing_args_rtx;
3650 if (extra != 0 && below)
3651 temp = plus_constant (temp, extra);
3652 }
3653 else
3654 {
3655 if (GET_CODE (size) == CONST_INT)
3656 temp = plus_constant (virtual_outgoing_args_rtx,
3657 -INTVAL (size) - (below ? 0 : extra));
3658 else if (extra != 0 && !below)
3659 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3660 negate_rtx (Pmode, plus_constant (size, extra)));
3661 else
3662 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3663 negate_rtx (Pmode, size));
3664 }
3665
3666 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3667 }
3668
3669 #ifdef PUSH_ROUNDING
3670
3671 /* Emit single push insn. */
3672
3673 static void
3674 emit_single_push_insn (mode, x, type)
3675 rtx x;
3676 enum machine_mode mode;
3677 tree type;
3678 {
3679 rtx dest_addr;
3680 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3681 rtx dest;
3682 enum insn_code icode;
3683 insn_operand_predicate_fn pred;
3684
3685 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3686 /* If there is push pattern, use it. Otherwise try old way of throwing
3687 MEM representing push operation to move expander. */
3688 icode = push_optab->handlers[(int) mode].insn_code;
3689 if (icode != CODE_FOR_nothing)
3690 {
3691 if (((pred = insn_data[(int) icode].operand[0].predicate)
3692 && !((*pred) (x, mode))))
3693 x = force_reg (mode, x);
3694 emit_insn (GEN_FCN (icode) (x));
3695 return;
3696 }
3697 if (GET_MODE_SIZE (mode) == rounded_size)
3698 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3699 else
3700 {
3701 #ifdef STACK_GROWS_DOWNWARD
3702 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3703 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3704 #else
3705 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3706 GEN_INT (rounded_size));
3707 #endif
3708 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3709 }
3710
3711 dest = gen_rtx_MEM (mode, dest_addr);
3712
3713 if (type != 0)
3714 {
3715 set_mem_attributes (dest, type, 1);
3716
3717 if (flag_optimize_sibling_calls)
3718 /* Function incoming arguments may overlap with sibling call
3719 outgoing arguments and we cannot allow reordering of reads
3720 from function arguments with stores to outgoing arguments
3721 of sibling calls. */
3722 set_mem_alias_set (dest, 0);
3723 }
3724 emit_move_insn (dest, x);
3725 }
3726 #endif
3727
3728 /* Generate code to push X onto the stack, assuming it has mode MODE and
3729 type TYPE.
3730 MODE is redundant except when X is a CONST_INT (since they don't
3731 carry mode info).
3732 SIZE is an rtx for the size of data to be copied (in bytes),
3733 needed only if X is BLKmode.
3734
3735 ALIGN (in bits) is maximum alignment we can assume.
3736
3737 If PARTIAL and REG are both nonzero, then copy that many of the first
3738 words of X into registers starting with REG, and push the rest of X.
3739 The amount of space pushed is decreased by PARTIAL words,
3740 rounded *down* to a multiple of PARM_BOUNDARY.
3741 REG must be a hard register in this case.
3742 If REG is zero but PARTIAL is not, take any all others actions for an
3743 argument partially in registers, but do not actually load any
3744 registers.
3745
3746 EXTRA is the amount in bytes of extra space to leave next to this arg.
3747 This is ignored if an argument block has already been allocated.
3748
3749 On a machine that lacks real push insns, ARGS_ADDR is the address of
3750 the bottom of the argument block for this call. We use indexing off there
3751 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3752 argument block has not been preallocated.
3753
3754 ARGS_SO_FAR is the size of args previously pushed for this call.
3755
3756 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3757 for arguments passed in registers. If nonzero, it will be the number
3758 of bytes required. */
3759
3760 void
3761 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3762 args_addr, args_so_far, reg_parm_stack_space,
3763 alignment_pad)
3764 rtx x;
3765 enum machine_mode mode;
3766 tree type;
3767 rtx size;
3768 unsigned int align;
3769 int partial;
3770 rtx reg;
3771 int extra;
3772 rtx args_addr;
3773 rtx args_so_far;
3774 int reg_parm_stack_space;
3775 rtx alignment_pad;
3776 {
3777 rtx xinner;
3778 enum direction stack_direction
3779 #ifdef STACK_GROWS_DOWNWARD
3780 = downward;
3781 #else
3782 = upward;
3783 #endif
3784
3785 /* Decide where to pad the argument: `downward' for below,
3786 `upward' for above, or `none' for don't pad it.
3787 Default is below for small data on big-endian machines; else above. */
3788 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3789
3790 /* Invert direction if stack is post-decrement.
3791 FIXME: why? */
3792 if (STACK_PUSH_CODE == POST_DEC)
3793 if (where_pad != none)
3794 where_pad = (where_pad == downward ? upward : downward);
3795
3796 xinner = x = protect_from_queue (x, 0);
3797
3798 if (mode == BLKmode)
3799 {
3800 /* Copy a block into the stack, entirely or partially. */
3801
3802 rtx temp;
3803 int used = partial * UNITS_PER_WORD;
3804 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3805 int skip;
3806
3807 if (size == 0)
3808 abort ();
3809
3810 used -= offset;
3811
3812 /* USED is now the # of bytes we need not copy to the stack
3813 because registers will take care of them. */
3814
3815 if (partial != 0)
3816 xinner = adjust_address (xinner, BLKmode, used);
3817
3818 /* If the partial register-part of the arg counts in its stack size,
3819 skip the part of stack space corresponding to the registers.
3820 Otherwise, start copying to the beginning of the stack space,
3821 by setting SKIP to 0. */
3822 skip = (reg_parm_stack_space == 0) ? 0 : used;
3823
3824 #ifdef PUSH_ROUNDING
3825 /* Do it with several push insns if that doesn't take lots of insns
3826 and if there is no difficulty with push insns that skip bytes
3827 on the stack for alignment purposes. */
3828 if (args_addr == 0
3829 && PUSH_ARGS
3830 && GET_CODE (size) == CONST_INT
3831 && skip == 0
3832 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3833 /* Here we avoid the case of a structure whose weak alignment
3834 forces many pushes of a small amount of data,
3835 and such small pushes do rounding that causes trouble. */
3836 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3837 || align >= BIGGEST_ALIGNMENT
3838 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3839 == (align / BITS_PER_UNIT)))
3840 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3841 {
3842 /* Push padding now if padding above and stack grows down,
3843 or if padding below and stack grows up.
3844 But if space already allocated, this has already been done. */
3845 if (extra && args_addr == 0
3846 && where_pad != none && where_pad != stack_direction)
3847 anti_adjust_stack (GEN_INT (extra));
3848
3849 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3850 }
3851 else
3852 #endif /* PUSH_ROUNDING */
3853 {
3854 rtx target;
3855
3856 /* Otherwise make space on the stack and copy the data
3857 to the address of that space. */
3858
3859 /* Deduct words put into registers from the size we must copy. */
3860 if (partial != 0)
3861 {
3862 if (GET_CODE (size) == CONST_INT)
3863 size = GEN_INT (INTVAL (size) - used);
3864 else
3865 size = expand_binop (GET_MODE (size), sub_optab, size,
3866 GEN_INT (used), NULL_RTX, 0,
3867 OPTAB_LIB_WIDEN);
3868 }
3869
3870 /* Get the address of the stack space.
3871 In this case, we do not deal with EXTRA separately.
3872 A single stack adjust will do. */
3873 if (! args_addr)
3874 {
3875 temp = push_block (size, extra, where_pad == downward);
3876 extra = 0;
3877 }
3878 else if (GET_CODE (args_so_far) == CONST_INT)
3879 temp = memory_address (BLKmode,
3880 plus_constant (args_addr,
3881 skip + INTVAL (args_so_far)));
3882 else
3883 temp = memory_address (BLKmode,
3884 plus_constant (gen_rtx_PLUS (Pmode,
3885 args_addr,
3886 args_so_far),
3887 skip));
3888
3889 if (!ACCUMULATE_OUTGOING_ARGS)
3890 {
3891 /* If the source is referenced relative to the stack pointer,
3892 copy it to another register to stabilize it. We do not need
3893 to do this if we know that we won't be changing sp. */
3894
3895 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3896 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3897 temp = copy_to_reg (temp);
3898 }
3899
3900 target = gen_rtx_MEM (BLKmode, temp);
3901
3902 if (type != 0)
3903 {
3904 set_mem_attributes (target, type, 1);
3905 /* Function incoming arguments may overlap with sibling call
3906 outgoing arguments and we cannot allow reordering of reads
3907 from function arguments with stores to outgoing arguments
3908 of sibling calls. */
3909 set_mem_alias_set (target, 0);
3910 }
3911
3912 /* ALIGN may well be better aligned than TYPE, e.g. due to
3913 PARM_BOUNDARY. Assume the caller isn't lying. */
3914 set_mem_align (target, align);
3915
3916 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3917 }
3918 }
3919 else if (partial > 0)
3920 {
3921 /* Scalar partly in registers. */
3922
3923 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3924 int i;
3925 int not_stack;
3926 /* # words of start of argument
3927 that we must make space for but need not store. */
3928 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3929 int args_offset = INTVAL (args_so_far);
3930 int skip;
3931
3932 /* Push padding now if padding above and stack grows down,
3933 or if padding below and stack grows up.
3934 But if space already allocated, this has already been done. */
3935 if (extra && args_addr == 0
3936 && where_pad != none && where_pad != stack_direction)
3937 anti_adjust_stack (GEN_INT (extra));
3938
3939 /* If we make space by pushing it, we might as well push
3940 the real data. Otherwise, we can leave OFFSET nonzero
3941 and leave the space uninitialized. */
3942 if (args_addr == 0)
3943 offset = 0;
3944
3945 /* Now NOT_STACK gets the number of words that we don't need to
3946 allocate on the stack. */
3947 not_stack = partial - offset;
3948
3949 /* If the partial register-part of the arg counts in its stack size,
3950 skip the part of stack space corresponding to the registers.
3951 Otherwise, start copying to the beginning of the stack space,
3952 by setting SKIP to 0. */
3953 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3954
3955 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3956 x = validize_mem (force_const_mem (mode, x));
3957
3958 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3959 SUBREGs of such registers are not allowed. */
3960 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3961 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3962 x = copy_to_reg (x);
3963
3964 /* Loop over all the words allocated on the stack for this arg. */
3965 /* We can do it by words, because any scalar bigger than a word
3966 has a size a multiple of a word. */
3967 #ifndef PUSH_ARGS_REVERSED
3968 for (i = not_stack; i < size; i++)
3969 #else
3970 for (i = size - 1; i >= not_stack; i--)
3971 #endif
3972 if (i >= not_stack + offset)
3973 emit_push_insn (operand_subword_force (x, i, mode),
3974 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3975 0, args_addr,
3976 GEN_INT (args_offset + ((i - not_stack + skip)
3977 * UNITS_PER_WORD)),
3978 reg_parm_stack_space, alignment_pad);
3979 }
3980 else
3981 {
3982 rtx addr;
3983 rtx dest;
3984
3985 /* Push padding now if padding above and stack grows down,
3986 or if padding below and stack grows up.
3987 But if space already allocated, this has already been done. */
3988 if (extra && args_addr == 0
3989 && where_pad != none && where_pad != stack_direction)
3990 anti_adjust_stack (GEN_INT (extra));
3991
3992 #ifdef PUSH_ROUNDING
3993 if (args_addr == 0 && PUSH_ARGS)
3994 emit_single_push_insn (mode, x, type);
3995 else
3996 #endif
3997 {
3998 if (GET_CODE (args_so_far) == CONST_INT)
3999 addr
4000 = memory_address (mode,
4001 plus_constant (args_addr,
4002 INTVAL (args_so_far)));
4003 else
4004 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4005 args_so_far));
4006 dest = gen_rtx_MEM (mode, addr);
4007 if (type != 0)
4008 {
4009 set_mem_attributes (dest, type, 1);
4010 /* Function incoming arguments may overlap with sibling call
4011 outgoing arguments and we cannot allow reordering of reads
4012 from function arguments with stores to outgoing arguments
4013 of sibling calls. */
4014 set_mem_alias_set (dest, 0);
4015 }
4016
4017 emit_move_insn (dest, x);
4018 }
4019 }
4020
4021 /* If part should go in registers, copy that part
4022 into the appropriate registers. Do this now, at the end,
4023 since mem-to-mem copies above may do function calls. */
4024 if (partial > 0 && reg != 0)
4025 {
4026 /* Handle calls that pass values in multiple non-contiguous locations.
4027 The Irix 6 ABI has examples of this. */
4028 if (GET_CODE (reg) == PARALLEL)
4029 emit_group_load (reg, x, -1); /* ??? size? */
4030 else
4031 move_block_to_reg (REGNO (reg), x, partial, mode);
4032 }
4033
4034 if (extra && args_addr == 0 && where_pad == stack_direction)
4035 anti_adjust_stack (GEN_INT (extra));
4036
4037 if (alignment_pad && args_addr == 0)
4038 anti_adjust_stack (alignment_pad);
4039 }
4040 \f
4041 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4042 operations. */
4043
4044 static rtx
4045 get_subtarget (x)
4046 rtx x;
4047 {
4048 return ((x == 0
4049 /* Only registers can be subtargets. */
4050 || GET_CODE (x) != REG
4051 /* If the register is readonly, it can't be set more than once. */
4052 || RTX_UNCHANGING_P (x)
4053 /* Don't use hard regs to avoid extending their life. */
4054 || REGNO (x) < FIRST_PSEUDO_REGISTER
4055 /* Avoid subtargets inside loops,
4056 since they hide some invariant expressions. */
4057 || preserve_subexpressions_p ())
4058 ? 0 : x);
4059 }
4060
4061 /* Expand an assignment that stores the value of FROM into TO.
4062 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4063 (This may contain a QUEUED rtx;
4064 if the value is constant, this rtx is a constant.)
4065 Otherwise, the returned value is NULL_RTX.
4066
4067 SUGGEST_REG is no longer actually used.
4068 It used to mean, copy the value through a register
4069 and return that register, if that is possible.
4070 We now use WANT_VALUE to decide whether to do this. */
4071
4072 rtx
4073 expand_assignment (to, from, want_value, suggest_reg)
4074 tree to, from;
4075 int want_value;
4076 int suggest_reg ATTRIBUTE_UNUSED;
4077 {
4078 rtx to_rtx = 0;
4079 rtx result;
4080
4081 /* Don't crash if the lhs of the assignment was erroneous. */
4082
4083 if (TREE_CODE (to) == ERROR_MARK)
4084 {
4085 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4086 return want_value ? result : NULL_RTX;
4087 }
4088
4089 /* Assignment of a structure component needs special treatment
4090 if the structure component's rtx is not simply a MEM.
4091 Assignment of an array element at a constant index, and assignment of
4092 an array element in an unaligned packed structure field, has the same
4093 problem. */
4094
4095 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4096 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4097 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4098 {
4099 enum machine_mode mode1;
4100 HOST_WIDE_INT bitsize, bitpos;
4101 rtx orig_to_rtx;
4102 tree offset;
4103 int unsignedp;
4104 int volatilep = 0;
4105 tree tem;
4106
4107 push_temp_slots ();
4108 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4109 &unsignedp, &volatilep);
4110
4111 /* If we are going to use store_bit_field and extract_bit_field,
4112 make sure to_rtx will be safe for multiple use. */
4113
4114 if (mode1 == VOIDmode && want_value)
4115 tem = stabilize_reference (tem);
4116
4117 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4118
4119 if (offset != 0)
4120 {
4121 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4122
4123 if (GET_CODE (to_rtx) != MEM)
4124 abort ();
4125
4126 #ifdef POINTERS_EXTEND_UNSIGNED
4127 if (GET_MODE (offset_rtx) != Pmode)
4128 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4129 #else
4130 if (GET_MODE (offset_rtx) != ptr_mode)
4131 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4132 #endif
4133
4134 /* A constant address in TO_RTX can have VOIDmode, we must not try
4135 to call force_reg for that case. Avoid that case. */
4136 if (GET_CODE (to_rtx) == MEM
4137 && GET_MODE (to_rtx) == BLKmode
4138 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4139 && bitsize > 0
4140 && (bitpos % bitsize) == 0
4141 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4142 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4143 {
4144 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4145 bitpos = 0;
4146 }
4147
4148 to_rtx = offset_address (to_rtx, offset_rtx,
4149 highest_pow2_factor_for_type (TREE_TYPE (to),
4150 offset));
4151 }
4152
4153 if (GET_CODE (to_rtx) == MEM)
4154 {
4155 /* If the field is at offset zero, we could have been given the
4156 DECL_RTX of the parent struct. Don't munge it. */
4157 to_rtx = shallow_copy_rtx (to_rtx);
4158
4159 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4160 }
4161
4162 /* Deal with volatile and readonly fields. The former is only done
4163 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4164 if (volatilep && GET_CODE (to_rtx) == MEM)
4165 {
4166 if (to_rtx == orig_to_rtx)
4167 to_rtx = copy_rtx (to_rtx);
4168 MEM_VOLATILE_P (to_rtx) = 1;
4169 }
4170
4171 if (TREE_CODE (to) == COMPONENT_REF
4172 && TREE_READONLY (TREE_OPERAND (to, 1)))
4173 {
4174 if (to_rtx == orig_to_rtx)
4175 to_rtx = copy_rtx (to_rtx);
4176 RTX_UNCHANGING_P (to_rtx) = 1;
4177 }
4178
4179 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4180 {
4181 if (to_rtx == orig_to_rtx)
4182 to_rtx = copy_rtx (to_rtx);
4183 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4184 }
4185
4186 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4187 (want_value
4188 /* Spurious cast for HPUX compiler. */
4189 ? ((enum machine_mode)
4190 TYPE_MODE (TREE_TYPE (to)))
4191 : VOIDmode),
4192 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4193
4194 preserve_temp_slots (result);
4195 free_temp_slots ();
4196 pop_temp_slots ();
4197
4198 /* If the value is meaningful, convert RESULT to the proper mode.
4199 Otherwise, return nothing. */
4200 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4201 TYPE_MODE (TREE_TYPE (from)),
4202 result,
4203 TREE_UNSIGNED (TREE_TYPE (to)))
4204 : NULL_RTX);
4205 }
4206
4207 /* If the rhs is a function call and its value is not an aggregate,
4208 call the function before we start to compute the lhs.
4209 This is needed for correct code for cases such as
4210 val = setjmp (buf) on machines where reference to val
4211 requires loading up part of an address in a separate insn.
4212
4213 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4214 since it might be a promoted variable where the zero- or sign- extension
4215 needs to be done. Handling this in the normal way is safe because no
4216 computation is done before the call. */
4217 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4219 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4220 && GET_CODE (DECL_RTL (to)) == REG))
4221 {
4222 rtx value;
4223
4224 push_temp_slots ();
4225 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4226 if (to_rtx == 0)
4227 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4228
4229 /* Handle calls that return values in multiple non-contiguous locations.
4230 The Irix 6 ABI has examples of this. */
4231 if (GET_CODE (to_rtx) == PARALLEL)
4232 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4233 else if (GET_MODE (to_rtx) == BLKmode)
4234 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4235 else
4236 {
4237 #ifdef POINTERS_EXTEND_UNSIGNED
4238 if (POINTER_TYPE_P (TREE_TYPE (to))
4239 && GET_MODE (to_rtx) != GET_MODE (value))
4240 value = convert_memory_address (GET_MODE (to_rtx), value);
4241 #endif
4242 emit_move_insn (to_rtx, value);
4243 }
4244 preserve_temp_slots (to_rtx);
4245 free_temp_slots ();
4246 pop_temp_slots ();
4247 return want_value ? to_rtx : NULL_RTX;
4248 }
4249
4250 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4251 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4252
4253 if (to_rtx == 0)
4254 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4255
4256 /* Don't move directly into a return register. */
4257 if (TREE_CODE (to) == RESULT_DECL
4258 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4259 {
4260 rtx temp;
4261
4262 push_temp_slots ();
4263 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4264
4265 if (GET_CODE (to_rtx) == PARALLEL)
4266 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4267 else
4268 emit_move_insn (to_rtx, temp);
4269
4270 preserve_temp_slots (to_rtx);
4271 free_temp_slots ();
4272 pop_temp_slots ();
4273 return want_value ? to_rtx : NULL_RTX;
4274 }
4275
4276 /* In case we are returning the contents of an object which overlaps
4277 the place the value is being stored, use a safe function when copying
4278 a value through a pointer into a structure value return block. */
4279 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4280 && current_function_returns_struct
4281 && !current_function_returns_pcc_struct)
4282 {
4283 rtx from_rtx, size;
4284
4285 push_temp_slots ();
4286 size = expr_size (from);
4287 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4288
4289 if (TARGET_MEM_FUNCTIONS)
4290 emit_library_call (memmove_libfunc, LCT_NORMAL,
4291 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4292 XEXP (from_rtx, 0), Pmode,
4293 convert_to_mode (TYPE_MODE (sizetype),
4294 size, TREE_UNSIGNED (sizetype)),
4295 TYPE_MODE (sizetype));
4296 else
4297 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4298 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4299 XEXP (to_rtx, 0), Pmode,
4300 convert_to_mode (TYPE_MODE (integer_type_node),
4301 size,
4302 TREE_UNSIGNED (integer_type_node)),
4303 TYPE_MODE (integer_type_node));
4304
4305 preserve_temp_slots (to_rtx);
4306 free_temp_slots ();
4307 pop_temp_slots ();
4308 return want_value ? to_rtx : NULL_RTX;
4309 }
4310
4311 /* Compute FROM and store the value in the rtx we got. */
4312
4313 push_temp_slots ();
4314 result = store_expr (from, to_rtx, want_value);
4315 preserve_temp_slots (result);
4316 free_temp_slots ();
4317 pop_temp_slots ();
4318 return want_value ? result : NULL_RTX;
4319 }
4320
4321 /* Generate code for computing expression EXP,
4322 and storing the value into TARGET.
4323 TARGET may contain a QUEUED rtx.
4324
4325 If WANT_VALUE & 1 is nonzero, return a copy of the value
4326 not in TARGET, so that we can be sure to use the proper
4327 value in a containing expression even if TARGET has something
4328 else stored in it. If possible, we copy the value through a pseudo
4329 and return that pseudo. Or, if the value is constant, we try to
4330 return the constant. In some cases, we return a pseudo
4331 copied *from* TARGET.
4332
4333 If the mode is BLKmode then we may return TARGET itself.
4334 It turns out that in BLKmode it doesn't cause a problem.
4335 because C has no operators that could combine two different
4336 assignments into the same BLKmode object with different values
4337 with no sequence point. Will other languages need this to
4338 be more thorough?
4339
4340 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4341 to catch quickly any cases where the caller uses the value
4342 and fails to set WANT_VALUE.
4343
4344 If WANT_VALUE & 2 is set, this is a store into a call param on the
4345 stack, and block moves may need to be treated specially. */
4346
4347 rtx
4348 store_expr (exp, target, want_value)
4349 tree exp;
4350 rtx target;
4351 int want_value;
4352 {
4353 rtx temp;
4354 int dont_return_target = 0;
4355 int dont_store_target = 0;
4356
4357 if (VOID_TYPE_P (TREE_TYPE (exp)))
4358 {
4359 /* C++ can generate ?: expressions with a throw expression in one
4360 branch and an rvalue in the other. Here, we resolve attempts to
4361 store the throw expression's nonexistant result. */
4362 if (want_value)
4363 abort ();
4364 expand_expr (exp, const0_rtx, VOIDmode, 0);
4365 return NULL_RTX;
4366 }
4367 if (TREE_CODE (exp) == COMPOUND_EXPR)
4368 {
4369 /* Perform first part of compound expression, then assign from second
4370 part. */
4371 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4372 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4373 emit_queue ();
4374 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4375 }
4376 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4377 {
4378 /* For conditional expression, get safe form of the target. Then
4379 test the condition, doing the appropriate assignment on either
4380 side. This avoids the creation of unnecessary temporaries.
4381 For non-BLKmode, it is more efficient not to do this. */
4382
4383 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4384
4385 emit_queue ();
4386 target = protect_from_queue (target, 1);
4387
4388 do_pending_stack_adjust ();
4389 NO_DEFER_POP;
4390 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4391 start_cleanup_deferral ();
4392 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4393 end_cleanup_deferral ();
4394 emit_queue ();
4395 emit_jump_insn (gen_jump (lab2));
4396 emit_barrier ();
4397 emit_label (lab1);
4398 start_cleanup_deferral ();
4399 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4400 end_cleanup_deferral ();
4401 emit_queue ();
4402 emit_label (lab2);
4403 OK_DEFER_POP;
4404
4405 return want_value & 1 ? target : NULL_RTX;
4406 }
4407 else if (queued_subexp_p (target))
4408 /* If target contains a postincrement, let's not risk
4409 using it as the place to generate the rhs. */
4410 {
4411 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4412 {
4413 /* Expand EXP into a new pseudo. */
4414 temp = gen_reg_rtx (GET_MODE (target));
4415 temp = expand_expr (exp, temp, GET_MODE (target),
4416 (want_value & 2
4417 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4418 }
4419 else
4420 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4421 (want_value & 2
4422 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4423
4424 /* If target is volatile, ANSI requires accessing the value
4425 *from* the target, if it is accessed. So make that happen.
4426 In no case return the target itself. */
4427 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4428 dont_return_target = 1;
4429 }
4430 else if ((want_value & 1) != 0
4431 && GET_CODE (target) == MEM
4432 && ! MEM_VOLATILE_P (target)
4433 && GET_MODE (target) != BLKmode)
4434 /* If target is in memory and caller wants value in a register instead,
4435 arrange that. Pass TARGET as target for expand_expr so that,
4436 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4437 We know expand_expr will not use the target in that case.
4438 Don't do this if TARGET is volatile because we are supposed
4439 to write it and then read it. */
4440 {
4441 temp = expand_expr (exp, target, GET_MODE (target),
4442 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4443 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4444 {
4445 /* If TEMP is already in the desired TARGET, only copy it from
4446 memory and don't store it there again. */
4447 if (temp == target
4448 || (rtx_equal_p (temp, target)
4449 && ! side_effects_p (temp) && ! side_effects_p (target)))
4450 dont_store_target = 1;
4451 temp = copy_to_reg (temp);
4452 }
4453 dont_return_target = 1;
4454 }
4455 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4456 /* If this is a scalar in a register that is stored in a wider mode
4457 than the declared mode, compute the result into its declared mode
4458 and then convert to the wider mode. Our value is the computed
4459 expression. */
4460 {
4461 rtx inner_target = 0;
4462
4463 /* If we don't want a value, we can do the conversion inside EXP,
4464 which will often result in some optimizations. Do the conversion
4465 in two steps: first change the signedness, if needed, then
4466 the extend. But don't do this if the type of EXP is a subtype
4467 of something else since then the conversion might involve
4468 more than just converting modes. */
4469 if ((want_value & 1) == 0
4470 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4471 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4472 {
4473 if (TREE_UNSIGNED (TREE_TYPE (exp))
4474 != SUBREG_PROMOTED_UNSIGNED_P (target))
4475 exp = convert
4476 ((*lang_hooks.types.signed_or_unsigned_type)
4477 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4478
4479 exp = convert ((*lang_hooks.types.type_for_mode)
4480 (GET_MODE (SUBREG_REG (target)),
4481 SUBREG_PROMOTED_UNSIGNED_P (target)),
4482 exp);
4483
4484 inner_target = SUBREG_REG (target);
4485 }
4486
4487 temp = expand_expr (exp, inner_target, VOIDmode,
4488 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4489
4490 /* If TEMP is a MEM and we want a result value, make the access
4491 now so it gets done only once. Strictly speaking, this is
4492 only necessary if the MEM is volatile, or if the address
4493 overlaps TARGET. But not performing the load twice also
4494 reduces the amount of rtl we generate and then have to CSE. */
4495 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4496 temp = copy_to_reg (temp);
4497
4498 /* If TEMP is a VOIDmode constant, use convert_modes to make
4499 sure that we properly convert it. */
4500 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4501 {
4502 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4503 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4504 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4505 GET_MODE (target), temp,
4506 SUBREG_PROMOTED_UNSIGNED_P (target));
4507 }
4508
4509 convert_move (SUBREG_REG (target), temp,
4510 SUBREG_PROMOTED_UNSIGNED_P (target));
4511
4512 /* If we promoted a constant, change the mode back down to match
4513 target. Otherwise, the caller might get confused by a result whose
4514 mode is larger than expected. */
4515
4516 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4517 {
4518 if (GET_MODE (temp) != VOIDmode)
4519 {
4520 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4521 SUBREG_PROMOTED_VAR_P (temp) = 1;
4522 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4523 SUBREG_PROMOTED_UNSIGNED_P (target));
4524 }
4525 else
4526 temp = convert_modes (GET_MODE (target),
4527 GET_MODE (SUBREG_REG (target)),
4528 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4529 }
4530
4531 return want_value & 1 ? temp : NULL_RTX;
4532 }
4533 else
4534 {
4535 temp = expand_expr (exp, target, GET_MODE (target),
4536 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4537 /* Return TARGET if it's a specified hardware register.
4538 If TARGET is a volatile mem ref, either return TARGET
4539 or return a reg copied *from* TARGET; ANSI requires this.
4540
4541 Otherwise, if TEMP is not TARGET, return TEMP
4542 if it is constant (for efficiency),
4543 or if we really want the correct value. */
4544 if (!(target && GET_CODE (target) == REG
4545 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4546 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4547 && ! rtx_equal_p (temp, target)
4548 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4549 dont_return_target = 1;
4550 }
4551
4552 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4553 the same as that of TARGET, adjust the constant. This is needed, for
4554 example, in case it is a CONST_DOUBLE and we want only a word-sized
4555 value. */
4556 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4557 && TREE_CODE (exp) != ERROR_MARK
4558 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4559 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4560 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4561
4562 /* If value was not generated in the target, store it there.
4563 Convert the value to TARGET's type first if necessary.
4564 If TEMP and TARGET compare equal according to rtx_equal_p, but
4565 one or both of them are volatile memory refs, we have to distinguish
4566 two cases:
4567 - expand_expr has used TARGET. In this case, we must not generate
4568 another copy. This can be detected by TARGET being equal according
4569 to == .
4570 - expand_expr has not used TARGET - that means that the source just
4571 happens to have the same RTX form. Since temp will have been created
4572 by expand_expr, it will compare unequal according to == .
4573 We must generate a copy in this case, to reach the correct number
4574 of volatile memory references. */
4575
4576 if ((! rtx_equal_p (temp, target)
4577 || (temp != target && (side_effects_p (temp)
4578 || side_effects_p (target))))
4579 && TREE_CODE (exp) != ERROR_MARK
4580 && ! dont_store_target
4581 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4582 but TARGET is not valid memory reference, TEMP will differ
4583 from TARGET although it is really the same location. */
4584 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4585 || target != DECL_RTL_IF_SET (exp))
4586 /* If there's nothing to copy, don't bother. Don't call expr_size
4587 unless necessary, because some front-ends (C++) expr_size-hook
4588 aborts on objects that are not supposed to be bit-copied or
4589 bit-initialized. */
4590 && expr_size (exp) != const0_rtx)
4591 {
4592 target = protect_from_queue (target, 1);
4593 if (GET_MODE (temp) != GET_MODE (target)
4594 && GET_MODE (temp) != VOIDmode)
4595 {
4596 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4597 if (dont_return_target)
4598 {
4599 /* In this case, we will return TEMP,
4600 so make sure it has the proper mode.
4601 But don't forget to store the value into TARGET. */
4602 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4603 emit_move_insn (target, temp);
4604 }
4605 else
4606 convert_move (target, temp, unsignedp);
4607 }
4608
4609 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4610 {
4611 /* Handle copying a string constant into an array. The string
4612 constant may be shorter than the array. So copy just the string's
4613 actual length, and clear the rest. First get the size of the data
4614 type of the string, which is actually the size of the target. */
4615 rtx size = expr_size (exp);
4616
4617 if (GET_CODE (size) == CONST_INT
4618 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4619 emit_block_move (target, temp, size,
4620 (want_value & 2
4621 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4622 else
4623 {
4624 /* Compute the size of the data to copy from the string. */
4625 tree copy_size
4626 = size_binop (MIN_EXPR,
4627 make_tree (sizetype, size),
4628 size_int (TREE_STRING_LENGTH (exp)));
4629 rtx copy_size_rtx
4630 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4631 (want_value & 2
4632 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4633 rtx label = 0;
4634
4635 /* Copy that much. */
4636 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4637 TREE_UNSIGNED (sizetype));
4638 emit_block_move (target, temp, copy_size_rtx,
4639 (want_value & 2
4640 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4641
4642 /* Figure out how much is left in TARGET that we have to clear.
4643 Do all calculations in ptr_mode. */
4644 if (GET_CODE (copy_size_rtx) == CONST_INT)
4645 {
4646 size = plus_constant (size, -INTVAL (copy_size_rtx));
4647 target = adjust_address (target, BLKmode,
4648 INTVAL (copy_size_rtx));
4649 }
4650 else
4651 {
4652 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4653 copy_size_rtx, NULL_RTX, 0,
4654 OPTAB_LIB_WIDEN);
4655
4656 #ifdef POINTERS_EXTEND_UNSIGNED
4657 if (GET_MODE (copy_size_rtx) != Pmode)
4658 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4659 TREE_UNSIGNED (sizetype));
4660 #endif
4661
4662 target = offset_address (target, copy_size_rtx,
4663 highest_pow2_factor (copy_size));
4664 label = gen_label_rtx ();
4665 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4666 GET_MODE (size), 0, label);
4667 }
4668
4669 if (size != const0_rtx)
4670 clear_storage (target, size);
4671
4672 if (label)
4673 emit_label (label);
4674 }
4675 }
4676 /* Handle calls that return values in multiple non-contiguous locations.
4677 The Irix 6 ABI has examples of this. */
4678 else if (GET_CODE (target) == PARALLEL)
4679 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4680 else if (GET_MODE (temp) == BLKmode)
4681 emit_block_move (target, temp, expr_size (exp),
4682 (want_value & 2
4683 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4684 else
4685 emit_move_insn (target, temp);
4686 }
4687
4688 /* If we don't want a value, return NULL_RTX. */
4689 if ((want_value & 1) == 0)
4690 return NULL_RTX;
4691
4692 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4693 ??? The latter test doesn't seem to make sense. */
4694 else if (dont_return_target && GET_CODE (temp) != MEM)
4695 return temp;
4696
4697 /* Return TARGET itself if it is a hard register. */
4698 else if ((want_value & 1) != 0
4699 && GET_MODE (target) != BLKmode
4700 && ! (GET_CODE (target) == REG
4701 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4702 return copy_to_reg (target);
4703
4704 else
4705 return target;
4706 }
4707 \f
4708 /* Return 1 if EXP just contains zeros. */
4709
4710 static int
4711 is_zeros_p (exp)
4712 tree exp;
4713 {
4714 tree elt;
4715
4716 switch (TREE_CODE (exp))
4717 {
4718 case CONVERT_EXPR:
4719 case NOP_EXPR:
4720 case NON_LVALUE_EXPR:
4721 case VIEW_CONVERT_EXPR:
4722 return is_zeros_p (TREE_OPERAND (exp, 0));
4723
4724 case INTEGER_CST:
4725 return integer_zerop (exp);
4726
4727 case COMPLEX_CST:
4728 return
4729 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4730
4731 case REAL_CST:
4732 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4733
4734 case VECTOR_CST:
4735 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4736 elt = TREE_CHAIN (elt))
4737 if (!is_zeros_p (TREE_VALUE (elt)))
4738 return 0;
4739
4740 return 1;
4741
4742 case CONSTRUCTOR:
4743 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4744 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4745 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4746 if (! is_zeros_p (TREE_VALUE (elt)))
4747 return 0;
4748
4749 return 1;
4750
4751 default:
4752 return 0;
4753 }
4754 }
4755
4756 /* Return 1 if EXP contains mostly (3/4) zeros. */
4757
4758 static int
4759 mostly_zeros_p (exp)
4760 tree exp;
4761 {
4762 if (TREE_CODE (exp) == CONSTRUCTOR)
4763 {
4764 int elts = 0, zeros = 0;
4765 tree elt = CONSTRUCTOR_ELTS (exp);
4766 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4767 {
4768 /* If there are no ranges of true bits, it is all zero. */
4769 return elt == NULL_TREE;
4770 }
4771 for (; elt; elt = TREE_CHAIN (elt))
4772 {
4773 /* We do not handle the case where the index is a RANGE_EXPR,
4774 so the statistic will be somewhat inaccurate.
4775 We do make a more accurate count in store_constructor itself,
4776 so since this function is only used for nested array elements,
4777 this should be close enough. */
4778 if (mostly_zeros_p (TREE_VALUE (elt)))
4779 zeros++;
4780 elts++;
4781 }
4782
4783 return 4 * zeros >= 3 * elts;
4784 }
4785
4786 return is_zeros_p (exp);
4787 }
4788 \f
4789 /* Helper function for store_constructor.
4790 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4791 TYPE is the type of the CONSTRUCTOR, not the element type.
4792 CLEARED is as for store_constructor.
4793 ALIAS_SET is the alias set to use for any stores.
4794
4795 This provides a recursive shortcut back to store_constructor when it isn't
4796 necessary to go through store_field. This is so that we can pass through
4797 the cleared field to let store_constructor know that we may not have to
4798 clear a substructure if the outer structure has already been cleared. */
4799
4800 static void
4801 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4802 alias_set)
4803 rtx target;
4804 unsigned HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
4806 enum machine_mode mode;
4807 tree exp, type;
4808 int cleared;
4809 int alias_set;
4810 {
4811 if (TREE_CODE (exp) == CONSTRUCTOR
4812 && bitpos % BITS_PER_UNIT == 0
4813 /* If we have a nonzero bitpos for a register target, then we just
4814 let store_field do the bitfield handling. This is unlikely to
4815 generate unnecessary clear instructions anyways. */
4816 && (bitpos == 0 || GET_CODE (target) == MEM))
4817 {
4818 if (GET_CODE (target) == MEM)
4819 target
4820 = adjust_address (target,
4821 GET_MODE (target) == BLKmode
4822 || 0 != (bitpos
4823 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4824 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4825
4826
4827 /* Update the alias set, if required. */
4828 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4829 && MEM_ALIAS_SET (target) != 0)
4830 {
4831 target = copy_rtx (target);
4832 set_mem_alias_set (target, alias_set);
4833 }
4834
4835 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4836 }
4837 else
4838 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4839 alias_set);
4840 }
4841
4842 /* Store the value of constructor EXP into the rtx TARGET.
4843 TARGET is either a REG or a MEM; we know it cannot conflict, since
4844 safe_from_p has been called.
4845 CLEARED is true if TARGET is known to have been zero'd.
4846 SIZE is the number of bytes of TARGET we are allowed to modify: this
4847 may not be the same as the size of EXP if we are assigning to a field
4848 which has been packed to exclude padding bits. */
4849
4850 static void
4851 store_constructor (exp, target, cleared, size)
4852 tree exp;
4853 rtx target;
4854 int cleared;
4855 HOST_WIDE_INT size;
4856 {
4857 tree type = TREE_TYPE (exp);
4858 #ifdef WORD_REGISTER_OPERATIONS
4859 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4860 #endif
4861
4862 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4863 || TREE_CODE (type) == QUAL_UNION_TYPE)
4864 {
4865 tree elt;
4866
4867 /* We either clear the aggregate or indicate the value is dead. */
4868 if ((TREE_CODE (type) == UNION_TYPE
4869 || TREE_CODE (type) == QUAL_UNION_TYPE)
4870 && ! cleared
4871 && ! CONSTRUCTOR_ELTS (exp))
4872 /* If the constructor is empty, clear the union. */
4873 {
4874 clear_storage (target, expr_size (exp));
4875 cleared = 1;
4876 }
4877
4878 /* If we are building a static constructor into a register,
4879 set the initial value as zero so we can fold the value into
4880 a constant. But if more than one register is involved,
4881 this probably loses. */
4882 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4883 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4884 {
4885 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4886 cleared = 1;
4887 }
4888
4889 /* If the constructor has fewer fields than the structure
4890 or if we are initializing the structure to mostly zeros,
4891 clear the whole structure first. Don't do this if TARGET is a
4892 register whose mode size isn't equal to SIZE since clear_storage
4893 can't handle this case. */
4894 else if (! cleared && size > 0
4895 && ((list_length (CONSTRUCTOR_ELTS (exp))
4896 != fields_length (type))
4897 || mostly_zeros_p (exp))
4898 && (GET_CODE (target) != REG
4899 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4900 == size)))
4901 {
4902 clear_storage (target, GEN_INT (size));
4903 cleared = 1;
4904 }
4905
4906 if (! cleared)
4907 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4908
4909 /* Store each element of the constructor into
4910 the corresponding field of TARGET. */
4911
4912 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4913 {
4914 tree field = TREE_PURPOSE (elt);
4915 tree value = TREE_VALUE (elt);
4916 enum machine_mode mode;
4917 HOST_WIDE_INT bitsize;
4918 HOST_WIDE_INT bitpos = 0;
4919 tree offset;
4920 rtx to_rtx = target;
4921
4922 /* Just ignore missing fields.
4923 We cleared the whole structure, above,
4924 if any fields are missing. */
4925 if (field == 0)
4926 continue;
4927
4928 if (cleared && is_zeros_p (value))
4929 continue;
4930
4931 if (host_integerp (DECL_SIZE (field), 1))
4932 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4933 else
4934 bitsize = -1;
4935
4936 mode = DECL_MODE (field);
4937 if (DECL_BIT_FIELD (field))
4938 mode = VOIDmode;
4939
4940 offset = DECL_FIELD_OFFSET (field);
4941 if (host_integerp (offset, 0)
4942 && host_integerp (bit_position (field), 0))
4943 {
4944 bitpos = int_bit_position (field);
4945 offset = 0;
4946 }
4947 else
4948 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4949
4950 if (offset)
4951 {
4952 rtx offset_rtx;
4953
4954 if (contains_placeholder_p (offset))
4955 offset = build (WITH_RECORD_EXPR, sizetype,
4956 offset, make_tree (TREE_TYPE (exp), target));
4957
4958 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4959 if (GET_CODE (to_rtx) != MEM)
4960 abort ();
4961
4962 #ifdef POINTERS_EXTEND_UNSIGNED
4963 if (GET_MODE (offset_rtx) != Pmode)
4964 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4965 #else
4966 if (GET_MODE (offset_rtx) != ptr_mode)
4967 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4968 #endif
4969
4970 to_rtx = offset_address (to_rtx, offset_rtx,
4971 highest_pow2_factor (offset));
4972 }
4973
4974 if (TREE_READONLY (field))
4975 {
4976 if (GET_CODE (to_rtx) == MEM)
4977 to_rtx = copy_rtx (to_rtx);
4978
4979 RTX_UNCHANGING_P (to_rtx) = 1;
4980 }
4981
4982 #ifdef WORD_REGISTER_OPERATIONS
4983 /* If this initializes a field that is smaller than a word, at the
4984 start of a word, try to widen it to a full word.
4985 This special case allows us to output C++ member function
4986 initializations in a form that the optimizers can understand. */
4987 if (GET_CODE (target) == REG
4988 && bitsize < BITS_PER_WORD
4989 && bitpos % BITS_PER_WORD == 0
4990 && GET_MODE_CLASS (mode) == MODE_INT
4991 && TREE_CODE (value) == INTEGER_CST
4992 && exp_size >= 0
4993 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4994 {
4995 tree type = TREE_TYPE (value);
4996
4997 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4998 {
4999 type = (*lang_hooks.types.type_for_size)
5000 (BITS_PER_WORD, TREE_UNSIGNED (type));
5001 value = convert (type, value);
5002 }
5003
5004 if (BYTES_BIG_ENDIAN)
5005 value
5006 = fold (build (LSHIFT_EXPR, type, value,
5007 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5008 bitsize = BITS_PER_WORD;
5009 mode = word_mode;
5010 }
5011 #endif
5012
5013 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5014 && DECL_NONADDRESSABLE_P (field))
5015 {
5016 to_rtx = copy_rtx (to_rtx);
5017 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5018 }
5019
5020 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5021 value, type, cleared,
5022 get_alias_set (TREE_TYPE (field)));
5023 }
5024 }
5025 else if (TREE_CODE (type) == ARRAY_TYPE
5026 || TREE_CODE (type) == VECTOR_TYPE)
5027 {
5028 tree elt;
5029 int i;
5030 int need_to_clear;
5031 tree domain = TYPE_DOMAIN (type);
5032 tree elttype = TREE_TYPE (type);
5033 int const_bounds_p;
5034 HOST_WIDE_INT minelt = 0;
5035 HOST_WIDE_INT maxelt = 0;
5036
5037 /* Vectors are like arrays, but the domain is stored via an array
5038 type indirectly. */
5039 if (TREE_CODE (type) == VECTOR_TYPE)
5040 {
5041 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5042 the same field as TYPE_DOMAIN, we are not guaranteed that
5043 it always will. */
5044 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5045 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5046 }
5047
5048 const_bounds_p = (TYPE_MIN_VALUE (domain)
5049 && TYPE_MAX_VALUE (domain)
5050 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5051 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5052
5053 /* If we have constant bounds for the range of the type, get them. */
5054 if (const_bounds_p)
5055 {
5056 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5057 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5058 }
5059
5060 /* If the constructor has fewer elements than the array,
5061 clear the whole array first. Similarly if this is
5062 static constructor of a non-BLKmode object. */
5063 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5064 need_to_clear = 1;
5065 else
5066 {
5067 HOST_WIDE_INT count = 0, zero_count = 0;
5068 need_to_clear = ! const_bounds_p;
5069
5070 /* This loop is a more accurate version of the loop in
5071 mostly_zeros_p (it handles RANGE_EXPR in an index).
5072 It is also needed to check for missing elements. */
5073 for (elt = CONSTRUCTOR_ELTS (exp);
5074 elt != NULL_TREE && ! need_to_clear;
5075 elt = TREE_CHAIN (elt))
5076 {
5077 tree index = TREE_PURPOSE (elt);
5078 HOST_WIDE_INT this_node_count;
5079
5080 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5081 {
5082 tree lo_index = TREE_OPERAND (index, 0);
5083 tree hi_index = TREE_OPERAND (index, 1);
5084
5085 if (! host_integerp (lo_index, 1)
5086 || ! host_integerp (hi_index, 1))
5087 {
5088 need_to_clear = 1;
5089 break;
5090 }
5091
5092 this_node_count = (tree_low_cst (hi_index, 1)
5093 - tree_low_cst (lo_index, 1) + 1);
5094 }
5095 else
5096 this_node_count = 1;
5097
5098 count += this_node_count;
5099 if (mostly_zeros_p (TREE_VALUE (elt)))
5100 zero_count += this_node_count;
5101 }
5102
5103 /* Clear the entire array first if there are any missing elements,
5104 or if the incidence of zero elements is >= 75%. */
5105 if (! need_to_clear
5106 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5107 need_to_clear = 1;
5108 }
5109
5110 if (need_to_clear && size > 0)
5111 {
5112 if (! cleared)
5113 {
5114 if (REG_P (target))
5115 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5116 else
5117 clear_storage (target, GEN_INT (size));
5118 }
5119 cleared = 1;
5120 }
5121 else if (REG_P (target))
5122 /* Inform later passes that the old value is dead. */
5123 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5124
5125 /* Store each element of the constructor into
5126 the corresponding element of TARGET, determined
5127 by counting the elements. */
5128 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5129 elt;
5130 elt = TREE_CHAIN (elt), i++)
5131 {
5132 enum machine_mode mode;
5133 HOST_WIDE_INT bitsize;
5134 HOST_WIDE_INT bitpos;
5135 int unsignedp;
5136 tree value = TREE_VALUE (elt);
5137 tree index = TREE_PURPOSE (elt);
5138 rtx xtarget = target;
5139
5140 if (cleared && is_zeros_p (value))
5141 continue;
5142
5143 unsignedp = TREE_UNSIGNED (elttype);
5144 mode = TYPE_MODE (elttype);
5145 if (mode == BLKmode)
5146 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5147 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5148 : -1);
5149 else
5150 bitsize = GET_MODE_BITSIZE (mode);
5151
5152 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5153 {
5154 tree lo_index = TREE_OPERAND (index, 0);
5155 tree hi_index = TREE_OPERAND (index, 1);
5156 rtx index_r, pos_rtx, loop_end;
5157 struct nesting *loop;
5158 HOST_WIDE_INT lo, hi, count;
5159 tree position;
5160
5161 /* If the range is constant and "small", unroll the loop. */
5162 if (const_bounds_p
5163 && host_integerp (lo_index, 0)
5164 && host_integerp (hi_index, 0)
5165 && (lo = tree_low_cst (lo_index, 0),
5166 hi = tree_low_cst (hi_index, 0),
5167 count = hi - lo + 1,
5168 (GET_CODE (target) != MEM
5169 || count <= 2
5170 || (host_integerp (TYPE_SIZE (elttype), 1)
5171 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5172 <= 40 * 8)))))
5173 {
5174 lo -= minelt; hi -= minelt;
5175 for (; lo <= hi; lo++)
5176 {
5177 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5178
5179 if (GET_CODE (target) == MEM
5180 && !MEM_KEEP_ALIAS_SET_P (target)
5181 && TREE_CODE (type) == ARRAY_TYPE
5182 && TYPE_NONALIASED_COMPONENT (type))
5183 {
5184 target = copy_rtx (target);
5185 MEM_KEEP_ALIAS_SET_P (target) = 1;
5186 }
5187
5188 store_constructor_field
5189 (target, bitsize, bitpos, mode, value, type, cleared,
5190 get_alias_set (elttype));
5191 }
5192 }
5193 else
5194 {
5195 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5196 loop_end = gen_label_rtx ();
5197
5198 unsignedp = TREE_UNSIGNED (domain);
5199
5200 index = build_decl (VAR_DECL, NULL_TREE, domain);
5201
5202 index_r
5203 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5204 &unsignedp, 0));
5205 SET_DECL_RTL (index, index_r);
5206 if (TREE_CODE (value) == SAVE_EXPR
5207 && SAVE_EXPR_RTL (value) == 0)
5208 {
5209 /* Make sure value gets expanded once before the
5210 loop. */
5211 expand_expr (value, const0_rtx, VOIDmode, 0);
5212 emit_queue ();
5213 }
5214 store_expr (lo_index, index_r, 0);
5215 loop = expand_start_loop (0);
5216
5217 /* Assign value to element index. */
5218 position
5219 = convert (ssizetype,
5220 fold (build (MINUS_EXPR, TREE_TYPE (index),
5221 index, TYPE_MIN_VALUE (domain))));
5222 position = size_binop (MULT_EXPR, position,
5223 convert (ssizetype,
5224 TYPE_SIZE_UNIT (elttype)));
5225
5226 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5227 xtarget = offset_address (target, pos_rtx,
5228 highest_pow2_factor (position));
5229 xtarget = adjust_address (xtarget, mode, 0);
5230 if (TREE_CODE (value) == CONSTRUCTOR)
5231 store_constructor (value, xtarget, cleared,
5232 bitsize / BITS_PER_UNIT);
5233 else
5234 store_expr (value, xtarget, 0);
5235
5236 expand_exit_loop_if_false (loop,
5237 build (LT_EXPR, integer_type_node,
5238 index, hi_index));
5239
5240 expand_increment (build (PREINCREMENT_EXPR,
5241 TREE_TYPE (index),
5242 index, integer_one_node), 0, 0);
5243 expand_end_loop ();
5244 emit_label (loop_end);
5245 }
5246 }
5247 else if ((index != 0 && ! host_integerp (index, 0))
5248 || ! host_integerp (TYPE_SIZE (elttype), 1))
5249 {
5250 tree position;
5251
5252 if (index == 0)
5253 index = ssize_int (1);
5254
5255 if (minelt)
5256 index = convert (ssizetype,
5257 fold (build (MINUS_EXPR, index,
5258 TYPE_MIN_VALUE (domain))));
5259
5260 position = size_binop (MULT_EXPR, index,
5261 convert (ssizetype,
5262 TYPE_SIZE_UNIT (elttype)));
5263 xtarget = offset_address (target,
5264 expand_expr (position, 0, VOIDmode, 0),
5265 highest_pow2_factor (position));
5266 xtarget = adjust_address (xtarget, mode, 0);
5267 store_expr (value, xtarget, 0);
5268 }
5269 else
5270 {
5271 if (index != 0)
5272 bitpos = ((tree_low_cst (index, 0) - minelt)
5273 * tree_low_cst (TYPE_SIZE (elttype), 1));
5274 else
5275 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5276
5277 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5278 && TREE_CODE (type) == ARRAY_TYPE
5279 && TYPE_NONALIASED_COMPONENT (type))
5280 {
5281 target = copy_rtx (target);
5282 MEM_KEEP_ALIAS_SET_P (target) = 1;
5283 }
5284
5285 store_constructor_field (target, bitsize, bitpos, mode, value,
5286 type, cleared, get_alias_set (elttype));
5287
5288 }
5289 }
5290 }
5291
5292 /* Set constructor assignments. */
5293 else if (TREE_CODE (type) == SET_TYPE)
5294 {
5295 tree elt = CONSTRUCTOR_ELTS (exp);
5296 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5297 tree domain = TYPE_DOMAIN (type);
5298 tree domain_min, domain_max, bitlength;
5299
5300 /* The default implementation strategy is to extract the constant
5301 parts of the constructor, use that to initialize the target,
5302 and then "or" in whatever non-constant ranges we need in addition.
5303
5304 If a large set is all zero or all ones, it is
5305 probably better to set it using memset (if available) or bzero.
5306 Also, if a large set has just a single range, it may also be
5307 better to first clear all the first clear the set (using
5308 bzero/memset), and set the bits we want. */
5309
5310 /* Check for all zeros. */
5311 if (elt == NULL_TREE && size > 0)
5312 {
5313 if (!cleared)
5314 clear_storage (target, GEN_INT (size));
5315 return;
5316 }
5317
5318 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5319 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5320 bitlength = size_binop (PLUS_EXPR,
5321 size_diffop (domain_max, domain_min),
5322 ssize_int (1));
5323
5324 nbits = tree_low_cst (bitlength, 1);
5325
5326 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5327 are "complicated" (more than one range), initialize (the
5328 constant parts) by copying from a constant. */
5329 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5330 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5331 {
5332 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5333 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5334 char *bit_buffer = (char *) alloca (nbits);
5335 HOST_WIDE_INT word = 0;
5336 unsigned int bit_pos = 0;
5337 unsigned int ibit = 0;
5338 unsigned int offset = 0; /* In bytes from beginning of set. */
5339
5340 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5341 for (;;)
5342 {
5343 if (bit_buffer[ibit])
5344 {
5345 if (BYTES_BIG_ENDIAN)
5346 word |= (1 << (set_word_size - 1 - bit_pos));
5347 else
5348 word |= 1 << bit_pos;
5349 }
5350
5351 bit_pos++; ibit++;
5352 if (bit_pos >= set_word_size || ibit == nbits)
5353 {
5354 if (word != 0 || ! cleared)
5355 {
5356 rtx datum = GEN_INT (word);
5357 rtx to_rtx;
5358
5359 /* The assumption here is that it is safe to use
5360 XEXP if the set is multi-word, but not if
5361 it's single-word. */
5362 if (GET_CODE (target) == MEM)
5363 to_rtx = adjust_address (target, mode, offset);
5364 else if (offset == 0)
5365 to_rtx = target;
5366 else
5367 abort ();
5368 emit_move_insn (to_rtx, datum);
5369 }
5370
5371 if (ibit == nbits)
5372 break;
5373 word = 0;
5374 bit_pos = 0;
5375 offset += set_word_size / BITS_PER_UNIT;
5376 }
5377 }
5378 }
5379 else if (!cleared)
5380 /* Don't bother clearing storage if the set is all ones. */
5381 if (TREE_CHAIN (elt) != NULL_TREE
5382 || (TREE_PURPOSE (elt) == NULL_TREE
5383 ? nbits != 1
5384 : ( ! host_integerp (TREE_VALUE (elt), 0)
5385 || ! host_integerp (TREE_PURPOSE (elt), 0)
5386 || (tree_low_cst (TREE_VALUE (elt), 0)
5387 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5388 != (HOST_WIDE_INT) nbits))))
5389 clear_storage (target, expr_size (exp));
5390
5391 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5392 {
5393 /* Start of range of element or NULL. */
5394 tree startbit = TREE_PURPOSE (elt);
5395 /* End of range of element, or element value. */
5396 tree endbit = TREE_VALUE (elt);
5397 HOST_WIDE_INT startb, endb;
5398 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5399
5400 bitlength_rtx = expand_expr (bitlength,
5401 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5402
5403 /* Handle non-range tuple element like [ expr ]. */
5404 if (startbit == NULL_TREE)
5405 {
5406 startbit = save_expr (endbit);
5407 endbit = startbit;
5408 }
5409
5410 startbit = convert (sizetype, startbit);
5411 endbit = convert (sizetype, endbit);
5412 if (! integer_zerop (domain_min))
5413 {
5414 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5415 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5416 }
5417 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5418 EXPAND_CONST_ADDRESS);
5419 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5420 EXPAND_CONST_ADDRESS);
5421
5422 if (REG_P (target))
5423 {
5424 targetx
5425 = assign_temp
5426 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5427 (GET_MODE (target), 0),
5428 TYPE_QUAL_CONST)),
5429 0, 1, 1);
5430 emit_move_insn (targetx, target);
5431 }
5432
5433 else if (GET_CODE (target) == MEM)
5434 targetx = target;
5435 else
5436 abort ();
5437
5438 /* Optimization: If startbit and endbit are constants divisible
5439 by BITS_PER_UNIT, call memset instead. */
5440 if (TARGET_MEM_FUNCTIONS
5441 && TREE_CODE (startbit) == INTEGER_CST
5442 && TREE_CODE (endbit) == INTEGER_CST
5443 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5444 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5445 {
5446 emit_library_call (memset_libfunc, LCT_NORMAL,
5447 VOIDmode, 3,
5448 plus_constant (XEXP (targetx, 0),
5449 startb / BITS_PER_UNIT),
5450 Pmode,
5451 constm1_rtx, TYPE_MODE (integer_type_node),
5452 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5453 TYPE_MODE (sizetype));
5454 }
5455 else
5456 emit_library_call (setbits_libfunc, LCT_NORMAL,
5457 VOIDmode, 4, XEXP (targetx, 0),
5458 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5459 startbit_rtx, TYPE_MODE (sizetype),
5460 endbit_rtx, TYPE_MODE (sizetype));
5461
5462 if (REG_P (target))
5463 emit_move_insn (target, targetx);
5464 }
5465 }
5466
5467 else
5468 abort ();
5469 }
5470
5471 /* Store the value of EXP (an expression tree)
5472 into a subfield of TARGET which has mode MODE and occupies
5473 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5474 If MODE is VOIDmode, it means that we are storing into a bit-field.
5475
5476 If VALUE_MODE is VOIDmode, return nothing in particular.
5477 UNSIGNEDP is not used in this case.
5478
5479 Otherwise, return an rtx for the value stored. This rtx
5480 has mode VALUE_MODE if that is convenient to do.
5481 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5482
5483 TYPE is the type of the underlying object,
5484
5485 ALIAS_SET is the alias set for the destination. This value will
5486 (in general) be different from that for TARGET, since TARGET is a
5487 reference to the containing structure. */
5488
5489 static rtx
5490 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5491 alias_set)
5492 rtx target;
5493 HOST_WIDE_INT bitsize;
5494 HOST_WIDE_INT bitpos;
5495 enum machine_mode mode;
5496 tree exp;
5497 enum machine_mode value_mode;
5498 int unsignedp;
5499 tree type;
5500 int alias_set;
5501 {
5502 HOST_WIDE_INT width_mask = 0;
5503
5504 if (TREE_CODE (exp) == ERROR_MARK)
5505 return const0_rtx;
5506
5507 /* If we have nothing to store, do nothing unless the expression has
5508 side-effects. */
5509 if (bitsize == 0)
5510 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5511 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5512 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5513
5514 /* If we are storing into an unaligned field of an aligned union that is
5515 in a register, we may have the mode of TARGET being an integer mode but
5516 MODE == BLKmode. In that case, get an aligned object whose size and
5517 alignment are the same as TARGET and store TARGET into it (we can avoid
5518 the store if the field being stored is the entire width of TARGET). Then
5519 call ourselves recursively to store the field into a BLKmode version of
5520 that object. Finally, load from the object into TARGET. This is not
5521 very efficient in general, but should only be slightly more expensive
5522 than the otherwise-required unaligned accesses. Perhaps this can be
5523 cleaned up later. */
5524
5525 if (mode == BLKmode
5526 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5527 {
5528 rtx object
5529 = assign_temp
5530 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5531 0, 1, 1);
5532 rtx blk_object = adjust_address (object, BLKmode, 0);
5533
5534 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5535 emit_move_insn (object, target);
5536
5537 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5538 alias_set);
5539
5540 emit_move_insn (target, object);
5541
5542 /* We want to return the BLKmode version of the data. */
5543 return blk_object;
5544 }
5545
5546 if (GET_CODE (target) == CONCAT)
5547 {
5548 /* We're storing into a struct containing a single __complex. */
5549
5550 if (bitpos != 0)
5551 abort ();
5552 return store_expr (exp, target, 0);
5553 }
5554
5555 /* If the structure is in a register or if the component
5556 is a bit field, we cannot use addressing to access it.
5557 Use bit-field techniques or SUBREG to store in it. */
5558
5559 if (mode == VOIDmode
5560 || (mode != BLKmode && ! direct_store[(int) mode]
5561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5562 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5563 || GET_CODE (target) == REG
5564 || GET_CODE (target) == SUBREG
5565 /* If the field isn't aligned enough to store as an ordinary memref,
5566 store it as a bit field. */
5567 || (mode != BLKmode
5568 && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5569 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)))
5570 || bitpos % GET_MODE_ALIGNMENT (mode)))
5571 /* If the RHS and field are a constant size and the size of the
5572 RHS isn't the same size as the bitfield, we must use bitfield
5573 operations. */
5574 || (bitsize >= 0
5575 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5576 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5577 {
5578 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5579
5580 /* If BITSIZE is narrower than the size of the type of EXP
5581 we will be narrowing TEMP. Normally, what's wanted are the
5582 low-order bits. However, if EXP's type is a record and this is
5583 big-endian machine, we want the upper BITSIZE bits. */
5584 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5585 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5586 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5587 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5588 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5589 - bitsize),
5590 temp, 1);
5591
5592 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5593 MODE. */
5594 if (mode != VOIDmode && mode != BLKmode
5595 && mode != TYPE_MODE (TREE_TYPE (exp)))
5596 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5597
5598 /* If the modes of TARGET and TEMP are both BLKmode, both
5599 must be in memory and BITPOS must be aligned on a byte
5600 boundary. If so, we simply do a block copy. */
5601 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5602 {
5603 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5604 || bitpos % BITS_PER_UNIT != 0)
5605 abort ();
5606
5607 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5608 emit_block_move (target, temp,
5609 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5610 / BITS_PER_UNIT),
5611 BLOCK_OP_NORMAL);
5612
5613 return value_mode == VOIDmode ? const0_rtx : target;
5614 }
5615
5616 /* Store the value in the bitfield. */
5617 store_bit_field (target, bitsize, bitpos, mode, temp,
5618 int_size_in_bytes (type));
5619
5620 if (value_mode != VOIDmode)
5621 {
5622 /* The caller wants an rtx for the value.
5623 If possible, avoid refetching from the bitfield itself. */
5624 if (width_mask != 0
5625 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5626 {
5627 tree count;
5628 enum machine_mode tmode;
5629
5630 tmode = GET_MODE (temp);
5631 if (tmode == VOIDmode)
5632 tmode = value_mode;
5633
5634 if (unsignedp)
5635 return expand_and (tmode, temp,
5636 gen_int_mode (width_mask, tmode),
5637 NULL_RTX);
5638
5639 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5640 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5641 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5642 }
5643
5644 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5645 NULL_RTX, value_mode, VOIDmode,
5646 int_size_in_bytes (type));
5647 }
5648 return const0_rtx;
5649 }
5650 else
5651 {
5652 rtx addr = XEXP (target, 0);
5653 rtx to_rtx = target;
5654
5655 /* If a value is wanted, it must be the lhs;
5656 so make the address stable for multiple use. */
5657
5658 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5659 && ! CONSTANT_ADDRESS_P (addr)
5660 /* A frame-pointer reference is already stable. */
5661 && ! (GET_CODE (addr) == PLUS
5662 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5663 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5664 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5665 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5666
5667 /* Now build a reference to just the desired component. */
5668
5669 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5670
5671 if (to_rtx == target)
5672 to_rtx = copy_rtx (to_rtx);
5673
5674 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5675 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5676 set_mem_alias_set (to_rtx, alias_set);
5677
5678 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5679 }
5680 }
5681 \f
5682 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5683 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5684 codes and find the ultimate containing object, which we return.
5685
5686 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5687 bit position, and *PUNSIGNEDP to the signedness of the field.
5688 If the position of the field is variable, we store a tree
5689 giving the variable offset (in units) in *POFFSET.
5690 This offset is in addition to the bit position.
5691 If the position is not variable, we store 0 in *POFFSET.
5692
5693 If any of the extraction expressions is volatile,
5694 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5695
5696 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5697 is a mode that can be used to access the field. In that case, *PBITSIZE
5698 is redundant.
5699
5700 If the field describes a variable-sized object, *PMODE is set to
5701 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5702 this case, but the address of the object can be found. */
5703
5704 tree
5705 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5706 punsignedp, pvolatilep)
5707 tree exp;
5708 HOST_WIDE_INT *pbitsize;
5709 HOST_WIDE_INT *pbitpos;
5710 tree *poffset;
5711 enum machine_mode *pmode;
5712 int *punsignedp;
5713 int *pvolatilep;
5714 {
5715 tree size_tree = 0;
5716 enum machine_mode mode = VOIDmode;
5717 tree offset = size_zero_node;
5718 tree bit_offset = bitsize_zero_node;
5719 tree placeholder_ptr = 0;
5720 tree tem;
5721
5722 /* First get the mode, signedness, and size. We do this from just the
5723 outermost expression. */
5724 if (TREE_CODE (exp) == COMPONENT_REF)
5725 {
5726 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5727 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5728 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5729
5730 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5731 }
5732 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5733 {
5734 size_tree = TREE_OPERAND (exp, 1);
5735 *punsignedp = TREE_UNSIGNED (exp);
5736 }
5737 else
5738 {
5739 mode = TYPE_MODE (TREE_TYPE (exp));
5740 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5741
5742 if (mode == BLKmode)
5743 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5744 else
5745 *pbitsize = GET_MODE_BITSIZE (mode);
5746 }
5747
5748 if (size_tree != 0)
5749 {
5750 if (! host_integerp (size_tree, 1))
5751 mode = BLKmode, *pbitsize = -1;
5752 else
5753 *pbitsize = tree_low_cst (size_tree, 1);
5754 }
5755
5756 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5757 and find the ultimate containing object. */
5758 while (1)
5759 {
5760 if (TREE_CODE (exp) == BIT_FIELD_REF)
5761 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5762 else if (TREE_CODE (exp) == COMPONENT_REF)
5763 {
5764 tree field = TREE_OPERAND (exp, 1);
5765 tree this_offset = DECL_FIELD_OFFSET (field);
5766
5767 /* If this field hasn't been filled in yet, don't go
5768 past it. This should only happen when folding expressions
5769 made during type construction. */
5770 if (this_offset == 0)
5771 break;
5772 else if (! TREE_CONSTANT (this_offset)
5773 && contains_placeholder_p (this_offset))
5774 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5775
5776 offset = size_binop (PLUS_EXPR, offset, this_offset);
5777 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5778 DECL_FIELD_BIT_OFFSET (field));
5779
5780 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5781 }
5782
5783 else if (TREE_CODE (exp) == ARRAY_REF
5784 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5785 {
5786 tree index = TREE_OPERAND (exp, 1);
5787 tree array = TREE_OPERAND (exp, 0);
5788 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5789 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5790 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5791
5792 /* We assume all arrays have sizes that are a multiple of a byte.
5793 First subtract the lower bound, if any, in the type of the
5794 index, then convert to sizetype and multiply by the size of the
5795 array element. */
5796 if (low_bound != 0 && ! integer_zerop (low_bound))
5797 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5798 index, low_bound));
5799
5800 /* If the index has a self-referential type, pass it to a
5801 WITH_RECORD_EXPR; if the component size is, pass our
5802 component to one. */
5803 if (! TREE_CONSTANT (index)
5804 && contains_placeholder_p (index))
5805 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5806 if (! TREE_CONSTANT (unit_size)
5807 && contains_placeholder_p (unit_size))
5808 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5809
5810 offset = size_binop (PLUS_EXPR, offset,
5811 size_binop (MULT_EXPR,
5812 convert (sizetype, index),
5813 unit_size));
5814 }
5815
5816 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5817 {
5818 tree new = find_placeholder (exp, &placeholder_ptr);
5819
5820 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5821 We might have been called from tree optimization where we
5822 haven't set up an object yet. */
5823 if (new == 0)
5824 break;
5825 else
5826 exp = new;
5827
5828 continue;
5829 }
5830 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5831 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5832 && ! ((TREE_CODE (exp) == NOP_EXPR
5833 || TREE_CODE (exp) == CONVERT_EXPR)
5834 && (TYPE_MODE (TREE_TYPE (exp))
5835 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5836 break;
5837
5838 /* If any reference in the chain is volatile, the effect is volatile. */
5839 if (TREE_THIS_VOLATILE (exp))
5840 *pvolatilep = 1;
5841
5842 exp = TREE_OPERAND (exp, 0);
5843 }
5844
5845 /* If OFFSET is constant, see if we can return the whole thing as a
5846 constant bit position. Otherwise, split it up. */
5847 if (host_integerp (offset, 0)
5848 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5849 bitsize_unit_node))
5850 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5851 && host_integerp (tem, 0))
5852 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5853 else
5854 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5855
5856 *pmode = mode;
5857 return exp;
5858 }
5859
5860 /* Return 1 if T is an expression that get_inner_reference handles. */
5861
5862 int
5863 handled_component_p (t)
5864 tree t;
5865 {
5866 switch (TREE_CODE (t))
5867 {
5868 case BIT_FIELD_REF:
5869 case COMPONENT_REF:
5870 case ARRAY_REF:
5871 case ARRAY_RANGE_REF:
5872 case NON_LVALUE_EXPR:
5873 case VIEW_CONVERT_EXPR:
5874 return 1;
5875
5876 case NOP_EXPR:
5877 case CONVERT_EXPR:
5878 return (TYPE_MODE (TREE_TYPE (t))
5879 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5880
5881 default:
5882 return 0;
5883 }
5884 }
5885 \f
5886 /* Given an rtx VALUE that may contain additions and multiplications, return
5887 an equivalent value that just refers to a register, memory, or constant.
5888 This is done by generating instructions to perform the arithmetic and
5889 returning a pseudo-register containing the value.
5890
5891 The returned value may be a REG, SUBREG, MEM or constant. */
5892
5893 rtx
5894 force_operand (value, target)
5895 rtx value, target;
5896 {
5897 rtx op1, op2;
5898 /* Use subtarget as the target for operand 0 of a binary operation. */
5899 rtx subtarget = get_subtarget (target);
5900 enum rtx_code code = GET_CODE (value);
5901
5902 /* Check for a PIC address load. */
5903 if ((code == PLUS || code == MINUS)
5904 && XEXP (value, 0) == pic_offset_table_rtx
5905 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5906 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5907 || GET_CODE (XEXP (value, 1)) == CONST))
5908 {
5909 if (!subtarget)
5910 subtarget = gen_reg_rtx (GET_MODE (value));
5911 emit_move_insn (subtarget, value);
5912 return subtarget;
5913 }
5914
5915 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5916 {
5917 if (!target)
5918 target = gen_reg_rtx (GET_MODE (value));
5919 convert_move (target, force_operand (XEXP (value, 0), NULL),
5920 code == ZERO_EXTEND);
5921 return target;
5922 }
5923
5924 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5925 {
5926 op2 = XEXP (value, 1);
5927 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5928 subtarget = 0;
5929 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5930 {
5931 code = PLUS;
5932 op2 = negate_rtx (GET_MODE (value), op2);
5933 }
5934
5935 /* Check for an addition with OP2 a constant integer and our first
5936 operand a PLUS of a virtual register and something else. In that
5937 case, we want to emit the sum of the virtual register and the
5938 constant first and then add the other value. This allows virtual
5939 register instantiation to simply modify the constant rather than
5940 creating another one around this addition. */
5941 if (code == PLUS && GET_CODE (op2) == CONST_INT
5942 && GET_CODE (XEXP (value, 0)) == PLUS
5943 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5944 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5945 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5946 {
5947 rtx temp = expand_simple_binop (GET_MODE (value), code,
5948 XEXP (XEXP (value, 0), 0), op2,
5949 subtarget, 0, OPTAB_LIB_WIDEN);
5950 return expand_simple_binop (GET_MODE (value), code, temp,
5951 force_operand (XEXP (XEXP (value,
5952 0), 1), 0),
5953 target, 0, OPTAB_LIB_WIDEN);
5954 }
5955
5956 op1 = force_operand (XEXP (value, 0), subtarget);
5957 op2 = force_operand (op2, NULL_RTX);
5958 switch (code)
5959 {
5960 case MULT:
5961 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5962 case DIV:
5963 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5964 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5965 target, 1, OPTAB_LIB_WIDEN);
5966 else
5967 return expand_divmod (0,
5968 FLOAT_MODE_P (GET_MODE (value))
5969 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5970 GET_MODE (value), op1, op2, target, 0);
5971 break;
5972 case MOD:
5973 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5974 target, 0);
5975 break;
5976 case UDIV:
5977 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5978 target, 1);
5979 break;
5980 case UMOD:
5981 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5982 target, 1);
5983 break;
5984 case ASHIFTRT:
5985 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5986 target, 0, OPTAB_LIB_WIDEN);
5987 break;
5988 default:
5989 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5990 target, 1, OPTAB_LIB_WIDEN);
5991 }
5992 }
5993 if (GET_RTX_CLASS (code) == '1')
5994 {
5995 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5996 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5997 }
5998
5999 #ifdef INSN_SCHEDULING
6000 /* On machines that have insn scheduling, we want all memory reference to be
6001 explicit, so we need to deal with such paradoxical SUBREGs. */
6002 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6003 && (GET_MODE_SIZE (GET_MODE (value))
6004 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6005 value
6006 = simplify_gen_subreg (GET_MODE (value),
6007 force_reg (GET_MODE (SUBREG_REG (value)),
6008 force_operand (SUBREG_REG (value),
6009 NULL_RTX)),
6010 GET_MODE (SUBREG_REG (value)),
6011 SUBREG_BYTE (value));
6012 #endif
6013
6014 return value;
6015 }
6016 \f
6017 /* Subroutine of expand_expr: return nonzero iff there is no way that
6018 EXP can reference X, which is being modified. TOP_P is nonzero if this
6019 call is going to be used to determine whether we need a temporary
6020 for EXP, as opposed to a recursive call to this function.
6021
6022 It is always safe for this routine to return zero since it merely
6023 searches for optimization opportunities. */
6024
6025 int
6026 safe_from_p (x, exp, top_p)
6027 rtx x;
6028 tree exp;
6029 int top_p;
6030 {
6031 rtx exp_rtl = 0;
6032 int i, nops;
6033 static tree save_expr_list;
6034
6035 if (x == 0
6036 /* If EXP has varying size, we MUST use a target since we currently
6037 have no way of allocating temporaries of variable size
6038 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6039 So we assume here that something at a higher level has prevented a
6040 clash. This is somewhat bogus, but the best we can do. Only
6041 do this when X is BLKmode and when we are at the top level. */
6042 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6043 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6044 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6045 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6046 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6047 != INTEGER_CST)
6048 && GET_MODE (x) == BLKmode)
6049 /* If X is in the outgoing argument area, it is always safe. */
6050 || (GET_CODE (x) == MEM
6051 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6052 || (GET_CODE (XEXP (x, 0)) == PLUS
6053 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6054 return 1;
6055
6056 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6057 find the underlying pseudo. */
6058 if (GET_CODE (x) == SUBREG)
6059 {
6060 x = SUBREG_REG (x);
6061 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6062 return 0;
6063 }
6064
6065 /* A SAVE_EXPR might appear many times in the expression passed to the
6066 top-level safe_from_p call, and if it has a complex subexpression,
6067 examining it multiple times could result in a combinatorial explosion.
6068 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6069 with optimization took about 28 minutes to compile -- even though it was
6070 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6071 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6072 we have processed. Note that the only test of top_p was above. */
6073
6074 if (top_p)
6075 {
6076 int rtn;
6077 tree t;
6078
6079 save_expr_list = 0;
6080
6081 rtn = safe_from_p (x, exp, 0);
6082
6083 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6084 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6085
6086 return rtn;
6087 }
6088
6089 /* Now look at our tree code and possibly recurse. */
6090 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6091 {
6092 case 'd':
6093 exp_rtl = DECL_RTL_IF_SET (exp);
6094 break;
6095
6096 case 'c':
6097 return 1;
6098
6099 case 'x':
6100 if (TREE_CODE (exp) == TREE_LIST)
6101 {
6102 while (1)
6103 {
6104 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6105 return 0;
6106 exp = TREE_CHAIN (exp);
6107 if (!exp)
6108 return 1;
6109 if (TREE_CODE (exp) != TREE_LIST)
6110 return safe_from_p (x, exp, 0);
6111 }
6112 }
6113 else if (TREE_CODE (exp) == ERROR_MARK)
6114 return 1; /* An already-visited SAVE_EXPR? */
6115 else
6116 return 0;
6117
6118 case '2':
6119 case '<':
6120 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6121 return 0;
6122 /* FALLTHRU */
6123
6124 case '1':
6125 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6126
6127 case 'e':
6128 case 'r':
6129 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6130 the expression. If it is set, we conflict iff we are that rtx or
6131 both are in memory. Otherwise, we check all operands of the
6132 expression recursively. */
6133
6134 switch (TREE_CODE (exp))
6135 {
6136 case ADDR_EXPR:
6137 /* If the operand is static or we are static, we can't conflict.
6138 Likewise if we don't conflict with the operand at all. */
6139 if (staticp (TREE_OPERAND (exp, 0))
6140 || TREE_STATIC (exp)
6141 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6142 return 1;
6143
6144 /* Otherwise, the only way this can conflict is if we are taking
6145 the address of a DECL a that address if part of X, which is
6146 very rare. */
6147 exp = TREE_OPERAND (exp, 0);
6148 if (DECL_P (exp))
6149 {
6150 if (!DECL_RTL_SET_P (exp)
6151 || GET_CODE (DECL_RTL (exp)) != MEM)
6152 return 0;
6153 else
6154 exp_rtl = XEXP (DECL_RTL (exp), 0);
6155 }
6156 break;
6157
6158 case INDIRECT_REF:
6159 if (GET_CODE (x) == MEM
6160 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6161 get_alias_set (exp)))
6162 return 0;
6163 break;
6164
6165 case CALL_EXPR:
6166 /* Assume that the call will clobber all hard registers and
6167 all of memory. */
6168 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6169 || GET_CODE (x) == MEM)
6170 return 0;
6171 break;
6172
6173 case RTL_EXPR:
6174 /* If a sequence exists, we would have to scan every instruction
6175 in the sequence to see if it was safe. This is probably not
6176 worthwhile. */
6177 if (RTL_EXPR_SEQUENCE (exp))
6178 return 0;
6179
6180 exp_rtl = RTL_EXPR_RTL (exp);
6181 break;
6182
6183 case WITH_CLEANUP_EXPR:
6184 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6185 break;
6186
6187 case CLEANUP_POINT_EXPR:
6188 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6189
6190 case SAVE_EXPR:
6191 exp_rtl = SAVE_EXPR_RTL (exp);
6192 if (exp_rtl)
6193 break;
6194
6195 /* If we've already scanned this, don't do it again. Otherwise,
6196 show we've scanned it and record for clearing the flag if we're
6197 going on. */
6198 if (TREE_PRIVATE (exp))
6199 return 1;
6200
6201 TREE_PRIVATE (exp) = 1;
6202 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6203 {
6204 TREE_PRIVATE (exp) = 0;
6205 return 0;
6206 }
6207
6208 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6209 return 1;
6210
6211 case BIND_EXPR:
6212 /* The only operand we look at is operand 1. The rest aren't
6213 part of the expression. */
6214 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6215
6216 case METHOD_CALL_EXPR:
6217 /* This takes an rtx argument, but shouldn't appear here. */
6218 abort ();
6219
6220 default:
6221 break;
6222 }
6223
6224 /* If we have an rtx, we do not need to scan our operands. */
6225 if (exp_rtl)
6226 break;
6227
6228 nops = first_rtl_op (TREE_CODE (exp));
6229 for (i = 0; i < nops; i++)
6230 if (TREE_OPERAND (exp, i) != 0
6231 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6232 return 0;
6233
6234 /* If this is a language-specific tree code, it may require
6235 special handling. */
6236 if ((unsigned int) TREE_CODE (exp)
6237 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6238 && !(*lang_hooks.safe_from_p) (x, exp))
6239 return 0;
6240 }
6241
6242 /* If we have an rtl, find any enclosed object. Then see if we conflict
6243 with it. */
6244 if (exp_rtl)
6245 {
6246 if (GET_CODE (exp_rtl) == SUBREG)
6247 {
6248 exp_rtl = SUBREG_REG (exp_rtl);
6249 if (GET_CODE (exp_rtl) == REG
6250 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6251 return 0;
6252 }
6253
6254 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6255 are memory and they conflict. */
6256 return ! (rtx_equal_p (x, exp_rtl)
6257 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6258 && true_dependence (exp_rtl, VOIDmode, x,
6259 rtx_addr_varies_p)));
6260 }
6261
6262 /* If we reach here, it is safe. */
6263 return 1;
6264 }
6265
6266 /* Subroutine of expand_expr: return rtx if EXP is a
6267 variable or parameter; else return 0. */
6268
6269 static rtx
6270 var_rtx (exp)
6271 tree exp;
6272 {
6273 STRIP_NOPS (exp);
6274 switch (TREE_CODE (exp))
6275 {
6276 case PARM_DECL:
6277 case VAR_DECL:
6278 return DECL_RTL (exp);
6279 default:
6280 return 0;
6281 }
6282 }
6283
6284 #ifdef MAX_INTEGER_COMPUTATION_MODE
6285
6286 void
6287 check_max_integer_computation_mode (exp)
6288 tree exp;
6289 {
6290 enum tree_code code;
6291 enum machine_mode mode;
6292
6293 /* Strip any NOPs that don't change the mode. */
6294 STRIP_NOPS (exp);
6295 code = TREE_CODE (exp);
6296
6297 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6298 if (code == NOP_EXPR
6299 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6300 return;
6301
6302 /* First check the type of the overall operation. We need only look at
6303 unary, binary and relational operations. */
6304 if (TREE_CODE_CLASS (code) == '1'
6305 || TREE_CODE_CLASS (code) == '2'
6306 || TREE_CODE_CLASS (code) == '<')
6307 {
6308 mode = TYPE_MODE (TREE_TYPE (exp));
6309 if (GET_MODE_CLASS (mode) == MODE_INT
6310 && mode > MAX_INTEGER_COMPUTATION_MODE)
6311 internal_error ("unsupported wide integer operation");
6312 }
6313
6314 /* Check operand of a unary op. */
6315 if (TREE_CODE_CLASS (code) == '1')
6316 {
6317 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6318 if (GET_MODE_CLASS (mode) == MODE_INT
6319 && mode > MAX_INTEGER_COMPUTATION_MODE)
6320 internal_error ("unsupported wide integer operation");
6321 }
6322
6323 /* Check operands of a binary/comparison op. */
6324 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6325 {
6326 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6327 if (GET_MODE_CLASS (mode) == MODE_INT
6328 && mode > MAX_INTEGER_COMPUTATION_MODE)
6329 internal_error ("unsupported wide integer operation");
6330
6331 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6332 if (GET_MODE_CLASS (mode) == MODE_INT
6333 && mode > MAX_INTEGER_COMPUTATION_MODE)
6334 internal_error ("unsupported wide integer operation");
6335 }
6336 }
6337 #endif
6338 \f
6339 /* Return the highest power of two that EXP is known to be a multiple of.
6340 This is used in updating alignment of MEMs in array references. */
6341
6342 static unsigned HOST_WIDE_INT
6343 highest_pow2_factor (exp)
6344 tree exp;
6345 {
6346 unsigned HOST_WIDE_INT c0, c1;
6347
6348 switch (TREE_CODE (exp))
6349 {
6350 case INTEGER_CST:
6351 /* We can find the lowest bit that's a one. If the low
6352 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6353 We need to handle this case since we can find it in a COND_EXPR,
6354 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6355 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6356 later ICE. */
6357 if (TREE_CONSTANT_OVERFLOW (exp))
6358 return BIGGEST_ALIGNMENT;
6359 else
6360 {
6361 /* Note: tree_low_cst is intentionally not used here,
6362 we don't care about the upper bits. */
6363 c0 = TREE_INT_CST_LOW (exp);
6364 c0 &= -c0;
6365 return c0 ? c0 : BIGGEST_ALIGNMENT;
6366 }
6367 break;
6368
6369 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6370 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6371 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6372 return MIN (c0, c1);
6373
6374 case MULT_EXPR:
6375 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6376 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6377 return c0 * c1;
6378
6379 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6380 case CEIL_DIV_EXPR:
6381 if (integer_pow2p (TREE_OPERAND (exp, 1))
6382 && host_integerp (TREE_OPERAND (exp, 1), 1))
6383 {
6384 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6385 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6386 return MAX (1, c0 / c1);
6387 }
6388 break;
6389
6390 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6391 case SAVE_EXPR: case WITH_RECORD_EXPR:
6392 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6393
6394 case COMPOUND_EXPR:
6395 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6396
6397 case COND_EXPR:
6398 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6399 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6400 return MIN (c0, c1);
6401
6402 default:
6403 break;
6404 }
6405
6406 return 1;
6407 }
6408
6409 /* Similar, except that it is known that the expression must be a multiple
6410 of the alignment of TYPE. */
6411
6412 static unsigned HOST_WIDE_INT
6413 highest_pow2_factor_for_type (type, exp)
6414 tree type;
6415 tree exp;
6416 {
6417 unsigned HOST_WIDE_INT type_align, factor;
6418
6419 factor = highest_pow2_factor (exp);
6420 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6421 return MAX (factor, type_align);
6422 }
6423 \f
6424 /* Return an object on the placeholder list that matches EXP, a
6425 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6426 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6427 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6428 is a location which initially points to a starting location in the
6429 placeholder list (zero means start of the list) and where a pointer into
6430 the placeholder list at which the object is found is placed. */
6431
6432 tree
6433 find_placeholder (exp, plist)
6434 tree exp;
6435 tree *plist;
6436 {
6437 tree type = TREE_TYPE (exp);
6438 tree placeholder_expr;
6439
6440 for (placeholder_expr
6441 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6442 placeholder_expr != 0;
6443 placeholder_expr = TREE_CHAIN (placeholder_expr))
6444 {
6445 tree need_type = TYPE_MAIN_VARIANT (type);
6446 tree elt;
6447
6448 /* Find the outermost reference that is of the type we want. If none,
6449 see if any object has a type that is a pointer to the type we
6450 want. */
6451 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6452 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6453 || TREE_CODE (elt) == COND_EXPR)
6454 ? TREE_OPERAND (elt, 1)
6455 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6456 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6457 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6458 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6459 ? TREE_OPERAND (elt, 0) : 0))
6460 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6461 {
6462 if (plist)
6463 *plist = placeholder_expr;
6464 return elt;
6465 }
6466
6467 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6468 elt
6469 = ((TREE_CODE (elt) == COMPOUND_EXPR
6470 || TREE_CODE (elt) == COND_EXPR)
6471 ? TREE_OPERAND (elt, 1)
6472 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6473 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6474 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6475 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6476 ? TREE_OPERAND (elt, 0) : 0))
6477 if (POINTER_TYPE_P (TREE_TYPE (elt))
6478 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6479 == need_type))
6480 {
6481 if (plist)
6482 *plist = placeholder_expr;
6483 return build1 (INDIRECT_REF, need_type, elt);
6484 }
6485 }
6486
6487 return 0;
6488 }
6489 \f
6490 /* expand_expr: generate code for computing expression EXP.
6491 An rtx for the computed value is returned. The value is never null.
6492 In the case of a void EXP, const0_rtx is returned.
6493
6494 The value may be stored in TARGET if TARGET is nonzero.
6495 TARGET is just a suggestion; callers must assume that
6496 the rtx returned may not be the same as TARGET.
6497
6498 If TARGET is CONST0_RTX, it means that the value will be ignored.
6499
6500 If TMODE is not VOIDmode, it suggests generating the
6501 result in mode TMODE. But this is done only when convenient.
6502 Otherwise, TMODE is ignored and the value generated in its natural mode.
6503 TMODE is just a suggestion; callers must assume that
6504 the rtx returned may not have mode TMODE.
6505
6506 Note that TARGET may have neither TMODE nor MODE. In that case, it
6507 probably will not be used.
6508
6509 If MODIFIER is EXPAND_SUM then when EXP is an addition
6510 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6511 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6512 products as above, or REG or MEM, or constant.
6513 Ordinarily in such cases we would output mul or add instructions
6514 and then return a pseudo reg containing the sum.
6515
6516 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6517 it also marks a label as absolutely required (it can't be dead).
6518 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6519 This is used for outputting expressions used in initializers.
6520
6521 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6522 with a constant address even if that address is not normally legitimate.
6523 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6524
6525 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6526 a call parameter. Such targets require special care as we haven't yet
6527 marked TARGET so that it's safe from being trashed by libcalls. We
6528 don't want to use TARGET for anything but the final result;
6529 Intermediate values must go elsewhere. Additionally, calls to
6530 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6531
6532 rtx
6533 expand_expr (exp, target, tmode, modifier)
6534 tree exp;
6535 rtx target;
6536 enum machine_mode tmode;
6537 enum expand_modifier modifier;
6538 {
6539 rtx op0, op1, temp;
6540 tree type = TREE_TYPE (exp);
6541 int unsignedp = TREE_UNSIGNED (type);
6542 enum machine_mode mode;
6543 enum tree_code code = TREE_CODE (exp);
6544 optab this_optab;
6545 rtx subtarget, original_target;
6546 int ignore;
6547 tree context;
6548
6549 /* Handle ERROR_MARK before anybody tries to access its type. */
6550 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6551 {
6552 op0 = CONST0_RTX (tmode);
6553 if (op0 != 0)
6554 return op0;
6555 return const0_rtx;
6556 }
6557
6558 mode = TYPE_MODE (type);
6559 /* Use subtarget as the target for operand 0 of a binary operation. */
6560 subtarget = get_subtarget (target);
6561 original_target = target;
6562 ignore = (target == const0_rtx
6563 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6564 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6565 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6566 && TREE_CODE (type) == VOID_TYPE));
6567
6568 /* If we are going to ignore this result, we need only do something
6569 if there is a side-effect somewhere in the expression. If there
6570 is, short-circuit the most common cases here. Note that we must
6571 not call expand_expr with anything but const0_rtx in case this
6572 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6573
6574 if (ignore)
6575 {
6576 if (! TREE_SIDE_EFFECTS (exp))
6577 return const0_rtx;
6578
6579 /* Ensure we reference a volatile object even if value is ignored, but
6580 don't do this if all we are doing is taking its address. */
6581 if (TREE_THIS_VOLATILE (exp)
6582 && TREE_CODE (exp) != FUNCTION_DECL
6583 && mode != VOIDmode && mode != BLKmode
6584 && modifier != EXPAND_CONST_ADDRESS)
6585 {
6586 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6587 if (GET_CODE (temp) == MEM)
6588 temp = copy_to_reg (temp);
6589 return const0_rtx;
6590 }
6591
6592 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6593 || code == INDIRECT_REF || code == BUFFER_REF)
6594 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6595 modifier);
6596
6597 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6598 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6599 {
6600 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6601 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6602 return const0_rtx;
6603 }
6604 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6605 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6606 /* If the second operand has no side effects, just evaluate
6607 the first. */
6608 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6609 modifier);
6610 else if (code == BIT_FIELD_REF)
6611 {
6612 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6613 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6614 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6615 return const0_rtx;
6616 }
6617
6618 target = 0;
6619 }
6620
6621 #ifdef MAX_INTEGER_COMPUTATION_MODE
6622 /* Only check stuff here if the mode we want is different from the mode
6623 of the expression; if it's the same, check_max_integer_computation_mode
6624 will handle it. Do we really need to check this stuff at all? */
6625
6626 if (target
6627 && GET_MODE (target) != mode
6628 && TREE_CODE (exp) != INTEGER_CST
6629 && TREE_CODE (exp) != PARM_DECL
6630 && TREE_CODE (exp) != ARRAY_REF
6631 && TREE_CODE (exp) != ARRAY_RANGE_REF
6632 && TREE_CODE (exp) != COMPONENT_REF
6633 && TREE_CODE (exp) != BIT_FIELD_REF
6634 && TREE_CODE (exp) != INDIRECT_REF
6635 && TREE_CODE (exp) != CALL_EXPR
6636 && TREE_CODE (exp) != VAR_DECL
6637 && TREE_CODE (exp) != RTL_EXPR)
6638 {
6639 enum machine_mode mode = GET_MODE (target);
6640
6641 if (GET_MODE_CLASS (mode) == MODE_INT
6642 && mode > MAX_INTEGER_COMPUTATION_MODE)
6643 internal_error ("unsupported wide integer operation");
6644 }
6645
6646 if (tmode != mode
6647 && TREE_CODE (exp) != INTEGER_CST
6648 && TREE_CODE (exp) != PARM_DECL
6649 && TREE_CODE (exp) != ARRAY_REF
6650 && TREE_CODE (exp) != ARRAY_RANGE_REF
6651 && TREE_CODE (exp) != COMPONENT_REF
6652 && TREE_CODE (exp) != BIT_FIELD_REF
6653 && TREE_CODE (exp) != INDIRECT_REF
6654 && TREE_CODE (exp) != VAR_DECL
6655 && TREE_CODE (exp) != CALL_EXPR
6656 && TREE_CODE (exp) != RTL_EXPR
6657 && GET_MODE_CLASS (tmode) == MODE_INT
6658 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6659 internal_error ("unsupported wide integer operation");
6660
6661 check_max_integer_computation_mode (exp);
6662 #endif
6663
6664 /* If will do cse, generate all results into pseudo registers
6665 since 1) that allows cse to find more things
6666 and 2) otherwise cse could produce an insn the machine
6667 cannot support. An exception is a CONSTRUCTOR into a multi-word
6668 MEM: that's much more likely to be most efficient into the MEM.
6669 Another is a CALL_EXPR which must return in memory. */
6670
6671 if (! cse_not_expected && mode != BLKmode && target
6672 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6673 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6674 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6675 target = 0;
6676
6677 switch (code)
6678 {
6679 case LABEL_DECL:
6680 {
6681 tree function = decl_function_context (exp);
6682 /* Handle using a label in a containing function. */
6683 if (function != current_function_decl
6684 && function != inline_function_decl && function != 0)
6685 {
6686 struct function *p = find_function_data (function);
6687 p->expr->x_forced_labels
6688 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6689 p->expr->x_forced_labels);
6690 }
6691 else
6692 {
6693 if (modifier == EXPAND_INITIALIZER)
6694 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6695 label_rtx (exp),
6696 forced_labels);
6697 }
6698
6699 temp = gen_rtx_MEM (FUNCTION_MODE,
6700 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6701 if (function != current_function_decl
6702 && function != inline_function_decl && function != 0)
6703 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6704 return temp;
6705 }
6706
6707 case PARM_DECL:
6708 if (!DECL_RTL_SET_P (exp))
6709 {
6710 error_with_decl (exp, "prior parameter's size depends on `%s'");
6711 return CONST0_RTX (mode);
6712 }
6713
6714 /* ... fall through ... */
6715
6716 case VAR_DECL:
6717 /* If a static var's type was incomplete when the decl was written,
6718 but the type is complete now, lay out the decl now. */
6719 if (DECL_SIZE (exp) == 0
6720 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6721 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6722 layout_decl (exp, 0);
6723
6724 /* ... fall through ... */
6725
6726 case FUNCTION_DECL:
6727 case RESULT_DECL:
6728 if (DECL_RTL (exp) == 0)
6729 abort ();
6730
6731 /* Ensure variable marked as used even if it doesn't go through
6732 a parser. If it hasn't be used yet, write out an external
6733 definition. */
6734 if (! TREE_USED (exp))
6735 {
6736 assemble_external (exp);
6737 TREE_USED (exp) = 1;
6738 }
6739
6740 /* Show we haven't gotten RTL for this yet. */
6741 temp = 0;
6742
6743 /* Handle variables inherited from containing functions. */
6744 context = decl_function_context (exp);
6745
6746 /* We treat inline_function_decl as an alias for the current function
6747 because that is the inline function whose vars, types, etc.
6748 are being merged into the current function.
6749 See expand_inline_function. */
6750
6751 if (context != 0 && context != current_function_decl
6752 && context != inline_function_decl
6753 /* If var is static, we don't need a static chain to access it. */
6754 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6755 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6756 {
6757 rtx addr;
6758
6759 /* Mark as non-local and addressable. */
6760 DECL_NONLOCAL (exp) = 1;
6761 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6762 abort ();
6763 (*lang_hooks.mark_addressable) (exp);
6764 if (GET_CODE (DECL_RTL (exp)) != MEM)
6765 abort ();
6766 addr = XEXP (DECL_RTL (exp), 0);
6767 if (GET_CODE (addr) == MEM)
6768 addr
6769 = replace_equiv_address (addr,
6770 fix_lexical_addr (XEXP (addr, 0), exp));
6771 else
6772 addr = fix_lexical_addr (addr, exp);
6773
6774 temp = replace_equiv_address (DECL_RTL (exp), addr);
6775 }
6776
6777 /* This is the case of an array whose size is to be determined
6778 from its initializer, while the initializer is still being parsed.
6779 See expand_decl. */
6780
6781 else if (GET_CODE (DECL_RTL (exp)) == MEM
6782 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6783 temp = validize_mem (DECL_RTL (exp));
6784
6785 /* If DECL_RTL is memory, we are in the normal case and either
6786 the address is not valid or it is not a register and -fforce-addr
6787 is specified, get the address into a register. */
6788
6789 else if (GET_CODE (DECL_RTL (exp)) == MEM
6790 && modifier != EXPAND_CONST_ADDRESS
6791 && modifier != EXPAND_SUM
6792 && modifier != EXPAND_INITIALIZER
6793 && (! memory_address_p (DECL_MODE (exp),
6794 XEXP (DECL_RTL (exp), 0))
6795 || (flag_force_addr
6796 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6797 temp = replace_equiv_address (DECL_RTL (exp),
6798 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6799
6800 /* If we got something, return it. But first, set the alignment
6801 if the address is a register. */
6802 if (temp != 0)
6803 {
6804 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6805 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6806
6807 return temp;
6808 }
6809
6810 /* If the mode of DECL_RTL does not match that of the decl, it
6811 must be a promoted value. We return a SUBREG of the wanted mode,
6812 but mark it so that we know that it was already extended. */
6813
6814 if (GET_CODE (DECL_RTL (exp)) == REG
6815 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6816 {
6817 /* Get the signedness used for this variable. Ensure we get the
6818 same mode we got when the variable was declared. */
6819 if (GET_MODE (DECL_RTL (exp))
6820 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6821 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6822 abort ();
6823
6824 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6825 SUBREG_PROMOTED_VAR_P (temp) = 1;
6826 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6827 return temp;
6828 }
6829
6830 return DECL_RTL (exp);
6831
6832 case INTEGER_CST:
6833 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6834 TREE_INT_CST_HIGH (exp), mode);
6835
6836 /* ??? If overflow is set, fold will have done an incomplete job,
6837 which can result in (plus xx (const_int 0)), which can get
6838 simplified by validate_replace_rtx during virtual register
6839 instantiation, which can result in unrecognizable insns.
6840 Avoid this by forcing all overflows into registers. */
6841 if (TREE_CONSTANT_OVERFLOW (exp)
6842 && modifier != EXPAND_INITIALIZER)
6843 temp = force_reg (mode, temp);
6844
6845 return temp;
6846
6847 case VECTOR_CST:
6848 return const_vector_from_tree (exp);
6849
6850 case CONST_DECL:
6851 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6852
6853 case REAL_CST:
6854 /* If optimized, generate immediate CONST_DOUBLE
6855 which will be turned into memory by reload if necessary.
6856
6857 We used to force a register so that loop.c could see it. But
6858 this does not allow gen_* patterns to perform optimizations with
6859 the constants. It also produces two insns in cases like "x = 1.0;".
6860 On most machines, floating-point constants are not permitted in
6861 many insns, so we'd end up copying it to a register in any case.
6862
6863 Now, we do the copying in expand_binop, if appropriate. */
6864 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6865 TYPE_MODE (TREE_TYPE (exp)));
6866
6867 case COMPLEX_CST:
6868 case STRING_CST:
6869 if (! TREE_CST_RTL (exp))
6870 output_constant_def (exp, 1);
6871
6872 /* TREE_CST_RTL probably contains a constant address.
6873 On RISC machines where a constant address isn't valid,
6874 make some insns to get that address into a register. */
6875 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6876 && modifier != EXPAND_CONST_ADDRESS
6877 && modifier != EXPAND_INITIALIZER
6878 && modifier != EXPAND_SUM
6879 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6880 || (flag_force_addr
6881 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6882 return replace_equiv_address (TREE_CST_RTL (exp),
6883 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6884 return TREE_CST_RTL (exp);
6885
6886 case EXPR_WITH_FILE_LOCATION:
6887 {
6888 rtx to_return;
6889 const char *saved_input_filename = input_filename;
6890 int saved_lineno = lineno;
6891 input_filename = EXPR_WFL_FILENAME (exp);
6892 lineno = EXPR_WFL_LINENO (exp);
6893 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6894 emit_line_note (input_filename, lineno);
6895 /* Possibly avoid switching back and forth here. */
6896 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6897 input_filename = saved_input_filename;
6898 lineno = saved_lineno;
6899 return to_return;
6900 }
6901
6902 case SAVE_EXPR:
6903 context = decl_function_context (exp);
6904
6905 /* If this SAVE_EXPR was at global context, assume we are an
6906 initialization function and move it into our context. */
6907 if (context == 0)
6908 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6909
6910 /* We treat inline_function_decl as an alias for the current function
6911 because that is the inline function whose vars, types, etc.
6912 are being merged into the current function.
6913 See expand_inline_function. */
6914 if (context == current_function_decl || context == inline_function_decl)
6915 context = 0;
6916
6917 /* If this is non-local, handle it. */
6918 if (context)
6919 {
6920 /* The following call just exists to abort if the context is
6921 not of a containing function. */
6922 find_function_data (context);
6923
6924 temp = SAVE_EXPR_RTL (exp);
6925 if (temp && GET_CODE (temp) == REG)
6926 {
6927 put_var_into_stack (exp, /*rescan=*/true);
6928 temp = SAVE_EXPR_RTL (exp);
6929 }
6930 if (temp == 0 || GET_CODE (temp) != MEM)
6931 abort ();
6932 return
6933 replace_equiv_address (temp,
6934 fix_lexical_addr (XEXP (temp, 0), exp));
6935 }
6936 if (SAVE_EXPR_RTL (exp) == 0)
6937 {
6938 if (mode == VOIDmode)
6939 temp = const0_rtx;
6940 else
6941 temp = assign_temp (build_qualified_type (type,
6942 (TYPE_QUALS (type)
6943 | TYPE_QUAL_CONST)),
6944 3, 0, 0);
6945
6946 SAVE_EXPR_RTL (exp) = temp;
6947 if (!optimize && GET_CODE (temp) == REG)
6948 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6949 save_expr_regs);
6950
6951 /* If the mode of TEMP does not match that of the expression, it
6952 must be a promoted value. We pass store_expr a SUBREG of the
6953 wanted mode but mark it so that we know that it was already
6954 extended. */
6955
6956 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6957 {
6958 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6959 promote_mode (type, mode, &unsignedp, 0);
6960 SUBREG_PROMOTED_VAR_P (temp) = 1;
6961 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6962 }
6963
6964 if (temp == const0_rtx)
6965 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6966 else
6967 store_expr (TREE_OPERAND (exp, 0), temp,
6968 modifier == EXPAND_STACK_PARM ? 2 : 0);
6969
6970 TREE_USED (exp) = 1;
6971 }
6972
6973 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6974 must be a promoted value. We return a SUBREG of the wanted mode,
6975 but mark it so that we know that it was already extended. */
6976
6977 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6978 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6979 {
6980 /* Compute the signedness and make the proper SUBREG. */
6981 promote_mode (type, mode, &unsignedp, 0);
6982 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6983 SUBREG_PROMOTED_VAR_P (temp) = 1;
6984 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6985 return temp;
6986 }
6987
6988 return SAVE_EXPR_RTL (exp);
6989
6990 case UNSAVE_EXPR:
6991 {
6992 rtx temp;
6993 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6994 TREE_OPERAND (exp, 0)
6995 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6996 return temp;
6997 }
6998
6999 case PLACEHOLDER_EXPR:
7000 {
7001 tree old_list = placeholder_list;
7002 tree placeholder_expr = 0;
7003
7004 exp = find_placeholder (exp, &placeholder_expr);
7005 if (exp == 0)
7006 abort ();
7007
7008 placeholder_list = TREE_CHAIN (placeholder_expr);
7009 temp = expand_expr (exp, original_target, tmode, modifier);
7010 placeholder_list = old_list;
7011 return temp;
7012 }
7013
7014 case WITH_RECORD_EXPR:
7015 /* Put the object on the placeholder list, expand our first operand,
7016 and pop the list. */
7017 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7018 placeholder_list);
7019 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7020 modifier);
7021 placeholder_list = TREE_CHAIN (placeholder_list);
7022 return target;
7023
7024 case GOTO_EXPR:
7025 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7026 expand_goto (TREE_OPERAND (exp, 0));
7027 else
7028 expand_computed_goto (TREE_OPERAND (exp, 0));
7029 return const0_rtx;
7030
7031 case EXIT_EXPR:
7032 expand_exit_loop_if_false (NULL,
7033 invert_truthvalue (TREE_OPERAND (exp, 0)));
7034 return const0_rtx;
7035
7036 case LABELED_BLOCK_EXPR:
7037 if (LABELED_BLOCK_BODY (exp))
7038 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7039 /* Should perhaps use expand_label, but this is simpler and safer. */
7040 do_pending_stack_adjust ();
7041 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7042 return const0_rtx;
7043
7044 case EXIT_BLOCK_EXPR:
7045 if (EXIT_BLOCK_RETURN (exp))
7046 sorry ("returned value in block_exit_expr");
7047 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7048 return const0_rtx;
7049
7050 case LOOP_EXPR:
7051 push_temp_slots ();
7052 expand_start_loop (1);
7053 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7054 expand_end_loop ();
7055 pop_temp_slots ();
7056
7057 return const0_rtx;
7058
7059 case BIND_EXPR:
7060 {
7061 tree vars = TREE_OPERAND (exp, 0);
7062
7063 /* Need to open a binding contour here because
7064 if there are any cleanups they must be contained here. */
7065 expand_start_bindings (2);
7066
7067 /* Mark the corresponding BLOCK for output in its proper place. */
7068 if (TREE_OPERAND (exp, 2) != 0
7069 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7070 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7071
7072 /* If VARS have not yet been expanded, expand them now. */
7073 while (vars)
7074 {
7075 if (!DECL_RTL_SET_P (vars))
7076 expand_decl (vars);
7077 expand_decl_init (vars);
7078 vars = TREE_CHAIN (vars);
7079 }
7080
7081 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7082
7083 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7084
7085 return temp;
7086 }
7087
7088 case RTL_EXPR:
7089 if (RTL_EXPR_SEQUENCE (exp))
7090 {
7091 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7092 abort ();
7093 emit_insn (RTL_EXPR_SEQUENCE (exp));
7094 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7095 }
7096 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7097 free_temps_for_rtl_expr (exp);
7098 return RTL_EXPR_RTL (exp);
7099
7100 case CONSTRUCTOR:
7101 /* If we don't need the result, just ensure we evaluate any
7102 subexpressions. */
7103 if (ignore)
7104 {
7105 tree elt;
7106
7107 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7108 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7109
7110 return const0_rtx;
7111 }
7112
7113 /* All elts simple constants => refer to a constant in memory. But
7114 if this is a non-BLKmode mode, let it store a field at a time
7115 since that should make a CONST_INT or CONST_DOUBLE when we
7116 fold. Likewise, if we have a target we can use, it is best to
7117 store directly into the target unless the type is large enough
7118 that memcpy will be used. If we are making an initializer and
7119 all operands are constant, put it in memory as well.
7120
7121 FIXME: Avoid trying to fill vector constructors piece-meal.
7122 Output them with output_constant_def below unless we're sure
7123 they're zeros. This should go away when vector initializers
7124 are treated like VECTOR_CST instead of arrays.
7125 */
7126 else if ((TREE_STATIC (exp)
7127 && ((mode == BLKmode
7128 && ! (target != 0 && safe_from_p (target, exp, 1)))
7129 || TREE_ADDRESSABLE (exp)
7130 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7131 && (! MOVE_BY_PIECES_P
7132 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7133 TYPE_ALIGN (type)))
7134 && ((TREE_CODE (type) == VECTOR_TYPE
7135 && !is_zeros_p (exp))
7136 || ! mostly_zeros_p (exp)))))
7137 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7138 {
7139 rtx constructor = output_constant_def (exp, 1);
7140
7141 if (modifier != EXPAND_CONST_ADDRESS
7142 && modifier != EXPAND_INITIALIZER
7143 && modifier != EXPAND_SUM)
7144 constructor = validize_mem (constructor);
7145
7146 return constructor;
7147 }
7148 else
7149 {
7150 /* Handle calls that pass values in multiple non-contiguous
7151 locations. The Irix 6 ABI has examples of this. */
7152 if (target == 0 || ! safe_from_p (target, exp, 1)
7153 || GET_CODE (target) == PARALLEL
7154 || modifier == EXPAND_STACK_PARM)
7155 target
7156 = assign_temp (build_qualified_type (type,
7157 (TYPE_QUALS (type)
7158 | (TREE_READONLY (exp)
7159 * TYPE_QUAL_CONST))),
7160 0, TREE_ADDRESSABLE (exp), 1);
7161
7162 store_constructor (exp, target, 0, int_expr_size (exp));
7163 return target;
7164 }
7165
7166 case INDIRECT_REF:
7167 {
7168 tree exp1 = TREE_OPERAND (exp, 0);
7169 tree index;
7170 tree string = string_constant (exp1, &index);
7171
7172 /* Try to optimize reads from const strings. */
7173 if (string
7174 && TREE_CODE (string) == STRING_CST
7175 && TREE_CODE (index) == INTEGER_CST
7176 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7177 && GET_MODE_CLASS (mode) == MODE_INT
7178 && GET_MODE_SIZE (mode) == 1
7179 && modifier != EXPAND_WRITE)
7180 return gen_int_mode (TREE_STRING_POINTER (string)
7181 [TREE_INT_CST_LOW (index)], mode);
7182
7183 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7184 op0 = memory_address (mode, op0);
7185 temp = gen_rtx_MEM (mode, op0);
7186 set_mem_attributes (temp, exp, 0);
7187
7188 /* If we are writing to this object and its type is a record with
7189 readonly fields, we must mark it as readonly so it will
7190 conflict with readonly references to those fields. */
7191 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7192 RTX_UNCHANGING_P (temp) = 1;
7193
7194 return temp;
7195 }
7196
7197 case ARRAY_REF:
7198 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7199 abort ();
7200
7201 {
7202 tree array = TREE_OPERAND (exp, 0);
7203 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7204 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7205 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7206 HOST_WIDE_INT i;
7207
7208 /* Optimize the special-case of a zero lower bound.
7209
7210 We convert the low_bound to sizetype to avoid some problems
7211 with constant folding. (E.g. suppose the lower bound is 1,
7212 and its mode is QI. Without the conversion, (ARRAY
7213 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7214 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7215
7216 if (! integer_zerop (low_bound))
7217 index = size_diffop (index, convert (sizetype, low_bound));
7218
7219 /* Fold an expression like: "foo"[2].
7220 This is not done in fold so it won't happen inside &.
7221 Don't fold if this is for wide characters since it's too
7222 difficult to do correctly and this is a very rare case. */
7223
7224 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7225 && TREE_CODE (array) == STRING_CST
7226 && TREE_CODE (index) == INTEGER_CST
7227 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7228 && GET_MODE_CLASS (mode) == MODE_INT
7229 && GET_MODE_SIZE (mode) == 1)
7230 return gen_int_mode (TREE_STRING_POINTER (array)
7231 [TREE_INT_CST_LOW (index)], mode);
7232
7233 /* If this is a constant index into a constant array,
7234 just get the value from the array. Handle both the cases when
7235 we have an explicit constructor and when our operand is a variable
7236 that was declared const. */
7237
7238 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7239 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7240 && TREE_CODE (index) == INTEGER_CST
7241 && 0 > compare_tree_int (index,
7242 list_length (CONSTRUCTOR_ELTS
7243 (TREE_OPERAND (exp, 0)))))
7244 {
7245 tree elem;
7246
7247 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7248 i = TREE_INT_CST_LOW (index);
7249 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7250 ;
7251
7252 if (elem)
7253 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7254 modifier);
7255 }
7256
7257 else if (optimize >= 1
7258 && modifier != EXPAND_CONST_ADDRESS
7259 && modifier != EXPAND_INITIALIZER
7260 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7261 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7262 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7263 {
7264 if (TREE_CODE (index) == INTEGER_CST)
7265 {
7266 tree init = DECL_INITIAL (array);
7267
7268 if (TREE_CODE (init) == CONSTRUCTOR)
7269 {
7270 tree elem;
7271
7272 for (elem = CONSTRUCTOR_ELTS (init);
7273 (elem
7274 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7275 elem = TREE_CHAIN (elem))
7276 ;
7277
7278 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7279 return expand_expr (fold (TREE_VALUE (elem)), target,
7280 tmode, modifier);
7281 }
7282 else if (TREE_CODE (init) == STRING_CST
7283 && 0 > compare_tree_int (index,
7284 TREE_STRING_LENGTH (init)))
7285 {
7286 tree type = TREE_TYPE (TREE_TYPE (init));
7287 enum machine_mode mode = TYPE_MODE (type);
7288
7289 if (GET_MODE_CLASS (mode) == MODE_INT
7290 && GET_MODE_SIZE (mode) == 1)
7291 return gen_int_mode (TREE_STRING_POINTER (init)
7292 [TREE_INT_CST_LOW (index)], mode);
7293 }
7294 }
7295 }
7296 }
7297 /* Fall through. */
7298
7299 case COMPONENT_REF:
7300 case BIT_FIELD_REF:
7301 case ARRAY_RANGE_REF:
7302 /* If the operand is a CONSTRUCTOR, we can just extract the
7303 appropriate field if it is present. Don't do this if we have
7304 already written the data since we want to refer to that copy
7305 and varasm.c assumes that's what we'll do. */
7306 if (code == COMPONENT_REF
7307 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7308 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7309 {
7310 tree elt;
7311
7312 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7313 elt = TREE_CHAIN (elt))
7314 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7315 /* We can normally use the value of the field in the
7316 CONSTRUCTOR. However, if this is a bitfield in
7317 an integral mode that we can fit in a HOST_WIDE_INT,
7318 we must mask only the number of bits in the bitfield,
7319 since this is done implicitly by the constructor. If
7320 the bitfield does not meet either of those conditions,
7321 we can't do this optimization. */
7322 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7323 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7324 == MODE_INT)
7325 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7326 <= HOST_BITS_PER_WIDE_INT))))
7327 {
7328 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7329 && modifier == EXPAND_STACK_PARM)
7330 target = 0;
7331 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7332 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7333 {
7334 HOST_WIDE_INT bitsize
7335 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7336 enum machine_mode imode
7337 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7338
7339 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7340 {
7341 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7342 op0 = expand_and (imode, op0, op1, target);
7343 }
7344 else
7345 {
7346 tree count
7347 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7348 0);
7349
7350 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7351 target, 0);
7352 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7353 target, 0);
7354 }
7355 }
7356
7357 return op0;
7358 }
7359 }
7360
7361 {
7362 enum machine_mode mode1;
7363 HOST_WIDE_INT bitsize, bitpos;
7364 tree offset;
7365 int volatilep = 0;
7366 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7367 &mode1, &unsignedp, &volatilep);
7368 rtx orig_op0;
7369
7370 /* If we got back the original object, something is wrong. Perhaps
7371 we are evaluating an expression too early. In any event, don't
7372 infinitely recurse. */
7373 if (tem == exp)
7374 abort ();
7375
7376 /* If TEM's type is a union of variable size, pass TARGET to the inner
7377 computation, since it will need a temporary and TARGET is known
7378 to have to do. This occurs in unchecked conversion in Ada. */
7379
7380 orig_op0 = op0
7381 = expand_expr (tem,
7382 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7383 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7384 != INTEGER_CST)
7385 && modifier != EXPAND_STACK_PARM
7386 ? target : NULL_RTX),
7387 VOIDmode,
7388 (modifier == EXPAND_INITIALIZER
7389 || modifier == EXPAND_CONST_ADDRESS
7390 || modifier == EXPAND_STACK_PARM)
7391 ? modifier : EXPAND_NORMAL);
7392
7393 /* If this is a constant, put it into a register if it is a
7394 legitimate constant and OFFSET is 0 and memory if it isn't. */
7395 if (CONSTANT_P (op0))
7396 {
7397 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7398 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7399 && offset == 0)
7400 op0 = force_reg (mode, op0);
7401 else
7402 op0 = validize_mem (force_const_mem (mode, op0));
7403 }
7404
7405 if (offset != 0)
7406 {
7407 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7408 EXPAND_SUM);
7409
7410 /* If this object is in a register, put it into memory.
7411 This case can't occur in C, but can in Ada if we have
7412 unchecked conversion of an expression from a scalar type to
7413 an array or record type. */
7414 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7415 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7416 {
7417 /* If the operand is a SAVE_EXPR, we can deal with this by
7418 forcing the SAVE_EXPR into memory. */
7419 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7420 {
7421 put_var_into_stack (TREE_OPERAND (exp, 0),
7422 /*rescan=*/true);
7423 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7424 }
7425 else
7426 {
7427 tree nt
7428 = build_qualified_type (TREE_TYPE (tem),
7429 (TYPE_QUALS (TREE_TYPE (tem))
7430 | TYPE_QUAL_CONST));
7431 rtx memloc = assign_temp (nt, 1, 1, 1);
7432
7433 emit_move_insn (memloc, op0);
7434 op0 = memloc;
7435 }
7436 }
7437
7438 if (GET_CODE (op0) != MEM)
7439 abort ();
7440
7441 #ifdef POINTERS_EXTEND_UNSIGNED
7442 if (GET_MODE (offset_rtx) != Pmode)
7443 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7444 #else
7445 if (GET_MODE (offset_rtx) != ptr_mode)
7446 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7447 #endif
7448
7449 /* A constant address in OP0 can have VOIDmode, we must not try
7450 to call force_reg for that case. Avoid that case. */
7451 if (GET_CODE (op0) == MEM
7452 && GET_MODE (op0) == BLKmode
7453 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7454 && bitsize != 0
7455 && (bitpos % bitsize) == 0
7456 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7457 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7458 {
7459 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7460 bitpos = 0;
7461 }
7462
7463 op0 = offset_address (op0, offset_rtx,
7464 highest_pow2_factor (offset));
7465 }
7466
7467 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7468 record its alignment as BIGGEST_ALIGNMENT. */
7469 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7470 && is_aligning_offset (offset, tem))
7471 set_mem_align (op0, BIGGEST_ALIGNMENT);
7472
7473 /* Don't forget about volatility even if this is a bitfield. */
7474 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7475 {
7476 if (op0 == orig_op0)
7477 op0 = copy_rtx (op0);
7478
7479 MEM_VOLATILE_P (op0) = 1;
7480 }
7481
7482 /* The following code doesn't handle CONCAT.
7483 Assume only bitpos == 0 can be used for CONCAT, due to
7484 one element arrays having the same mode as its element. */
7485 if (GET_CODE (op0) == CONCAT)
7486 {
7487 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7488 abort ();
7489 return op0;
7490 }
7491
7492 /* In cases where an aligned union has an unaligned object
7493 as a field, we might be extracting a BLKmode value from
7494 an integer-mode (e.g., SImode) object. Handle this case
7495 by doing the extract into an object as wide as the field
7496 (which we know to be the width of a basic mode), then
7497 storing into memory, and changing the mode to BLKmode. */
7498 if (mode1 == VOIDmode
7499 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7500 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7501 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7502 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7503 && modifier != EXPAND_CONST_ADDRESS
7504 && modifier != EXPAND_INITIALIZER)
7505 /* If the field isn't aligned enough to fetch as a memref,
7506 fetch it as a bit field. */
7507 || (mode1 != BLKmode
7508 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7509 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7510 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7511 /* If the type and the field are a constant size and the
7512 size of the type isn't the same size as the bitfield,
7513 we must use bitfield operations. */
7514 || (bitsize >= 0
7515 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7516 == INTEGER_CST)
7517 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7518 bitsize)))
7519 {
7520 enum machine_mode ext_mode = mode;
7521
7522 if (ext_mode == BLKmode
7523 && ! (target != 0 && GET_CODE (op0) == MEM
7524 && GET_CODE (target) == MEM
7525 && bitpos % BITS_PER_UNIT == 0))
7526 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7527
7528 if (ext_mode == BLKmode)
7529 {
7530 /* In this case, BITPOS must start at a byte boundary and
7531 TARGET, if specified, must be a MEM. */
7532 if (GET_CODE (op0) != MEM
7533 || (target != 0 && GET_CODE (target) != MEM)
7534 || bitpos % BITS_PER_UNIT != 0)
7535 abort ();
7536
7537 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7538 if (target == 0)
7539 target = assign_temp (type, 0, 1, 1);
7540
7541 emit_block_move (target, op0,
7542 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7543 / BITS_PER_UNIT),
7544 (modifier == EXPAND_STACK_PARM
7545 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7546
7547 return target;
7548 }
7549
7550 op0 = validize_mem (op0);
7551
7552 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7553 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7554
7555 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7556 (modifier == EXPAND_STACK_PARM
7557 ? NULL_RTX : target),
7558 ext_mode, ext_mode,
7559 int_size_in_bytes (TREE_TYPE (tem)));
7560
7561 /* If the result is a record type and BITSIZE is narrower than
7562 the mode of OP0, an integral mode, and this is a big endian
7563 machine, we must put the field into the high-order bits. */
7564 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7565 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7566 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7567 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7568 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7569 - bitsize),
7570 op0, 1);
7571
7572 if (mode == BLKmode)
7573 {
7574 rtx new = assign_temp (build_qualified_type
7575 ((*lang_hooks.types.type_for_mode)
7576 (ext_mode, 0),
7577 TYPE_QUAL_CONST), 0, 1, 1);
7578
7579 emit_move_insn (new, op0);
7580 op0 = copy_rtx (new);
7581 PUT_MODE (op0, BLKmode);
7582 set_mem_attributes (op0, exp, 1);
7583 }
7584
7585 return op0;
7586 }
7587
7588 /* If the result is BLKmode, use that to access the object
7589 now as well. */
7590 if (mode == BLKmode)
7591 mode1 = BLKmode;
7592
7593 /* Get a reference to just this component. */
7594 if (modifier == EXPAND_CONST_ADDRESS
7595 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7596 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7597 else
7598 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7599
7600 if (op0 == orig_op0)
7601 op0 = copy_rtx (op0);
7602
7603 set_mem_attributes (op0, exp, 0);
7604 if (GET_CODE (XEXP (op0, 0)) == REG)
7605 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7606
7607 MEM_VOLATILE_P (op0) |= volatilep;
7608 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7609 || modifier == EXPAND_CONST_ADDRESS
7610 || modifier == EXPAND_INITIALIZER)
7611 return op0;
7612 else if (target == 0)
7613 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7614
7615 convert_move (target, op0, unsignedp);
7616 return target;
7617 }
7618
7619 case VTABLE_REF:
7620 {
7621 rtx insn, before = get_last_insn (), vtbl_ref;
7622
7623 /* Evaluate the interior expression. */
7624 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7625 tmode, modifier);
7626
7627 /* Get or create an instruction off which to hang a note. */
7628 if (REG_P (subtarget))
7629 {
7630 target = subtarget;
7631 insn = get_last_insn ();
7632 if (insn == before)
7633 abort ();
7634 if (! INSN_P (insn))
7635 insn = prev_nonnote_insn (insn);
7636 }
7637 else
7638 {
7639 target = gen_reg_rtx (GET_MODE (subtarget));
7640 insn = emit_move_insn (target, subtarget);
7641 }
7642
7643 /* Collect the data for the note. */
7644 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7645 vtbl_ref = plus_constant (vtbl_ref,
7646 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7647 /* Discard the initial CONST that was added. */
7648 vtbl_ref = XEXP (vtbl_ref, 0);
7649
7650 REG_NOTES (insn)
7651 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7652
7653 return target;
7654 }
7655
7656 /* Intended for a reference to a buffer of a file-object in Pascal.
7657 But it's not certain that a special tree code will really be
7658 necessary for these. INDIRECT_REF might work for them. */
7659 case BUFFER_REF:
7660 abort ();
7661
7662 case IN_EXPR:
7663 {
7664 /* Pascal set IN expression.
7665
7666 Algorithm:
7667 rlo = set_low - (set_low%bits_per_word);
7668 the_word = set [ (index - rlo)/bits_per_word ];
7669 bit_index = index % bits_per_word;
7670 bitmask = 1 << bit_index;
7671 return !!(the_word & bitmask); */
7672
7673 tree set = TREE_OPERAND (exp, 0);
7674 tree index = TREE_OPERAND (exp, 1);
7675 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7676 tree set_type = TREE_TYPE (set);
7677 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7678 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7679 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7680 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7681 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7682 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7683 rtx setaddr = XEXP (setval, 0);
7684 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7685 rtx rlow;
7686 rtx diff, quo, rem, addr, bit, result;
7687
7688 /* If domain is empty, answer is no. Likewise if index is constant
7689 and out of bounds. */
7690 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7691 && TREE_CODE (set_low_bound) == INTEGER_CST
7692 && tree_int_cst_lt (set_high_bound, set_low_bound))
7693 || (TREE_CODE (index) == INTEGER_CST
7694 && TREE_CODE (set_low_bound) == INTEGER_CST
7695 && tree_int_cst_lt (index, set_low_bound))
7696 || (TREE_CODE (set_high_bound) == INTEGER_CST
7697 && TREE_CODE (index) == INTEGER_CST
7698 && tree_int_cst_lt (set_high_bound, index))))
7699 return const0_rtx;
7700
7701 if (target == 0)
7702 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7703
7704 /* If we get here, we have to generate the code for both cases
7705 (in range and out of range). */
7706
7707 op0 = gen_label_rtx ();
7708 op1 = gen_label_rtx ();
7709
7710 if (! (GET_CODE (index_val) == CONST_INT
7711 && GET_CODE (lo_r) == CONST_INT))
7712 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7713 GET_MODE (index_val), iunsignedp, op1);
7714
7715 if (! (GET_CODE (index_val) == CONST_INT
7716 && GET_CODE (hi_r) == CONST_INT))
7717 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7718 GET_MODE (index_val), iunsignedp, op1);
7719
7720 /* Calculate the element number of bit zero in the first word
7721 of the set. */
7722 if (GET_CODE (lo_r) == CONST_INT)
7723 rlow = GEN_INT (INTVAL (lo_r)
7724 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7725 else
7726 rlow = expand_binop (index_mode, and_optab, lo_r,
7727 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7728 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7729
7730 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7731 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7732
7733 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7734 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7735 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7736 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7737
7738 addr = memory_address (byte_mode,
7739 expand_binop (index_mode, add_optab, diff,
7740 setaddr, NULL_RTX, iunsignedp,
7741 OPTAB_LIB_WIDEN));
7742
7743 /* Extract the bit we want to examine. */
7744 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7745 gen_rtx_MEM (byte_mode, addr),
7746 make_tree (TREE_TYPE (index), rem),
7747 NULL_RTX, 1);
7748 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7749 GET_MODE (target) == byte_mode ? target : 0,
7750 1, OPTAB_LIB_WIDEN);
7751
7752 if (result != target)
7753 convert_move (target, result, 1);
7754
7755 /* Output the code to handle the out-of-range case. */
7756 emit_jump (op0);
7757 emit_label (op1);
7758 emit_move_insn (target, const0_rtx);
7759 emit_label (op0);
7760 return target;
7761 }
7762
7763 case WITH_CLEANUP_EXPR:
7764 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7765 {
7766 WITH_CLEANUP_EXPR_RTL (exp)
7767 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7768 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7769 CLEANUP_EH_ONLY (exp));
7770
7771 /* That's it for this cleanup. */
7772 TREE_OPERAND (exp, 1) = 0;
7773 }
7774 return WITH_CLEANUP_EXPR_RTL (exp);
7775
7776 case CLEANUP_POINT_EXPR:
7777 {
7778 /* Start a new binding layer that will keep track of all cleanup
7779 actions to be performed. */
7780 expand_start_bindings (2);
7781
7782 target_temp_slot_level = temp_slot_level;
7783
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7785 /* If we're going to use this value, load it up now. */
7786 if (! ignore)
7787 op0 = force_not_mem (op0);
7788 preserve_temp_slots (op0);
7789 expand_end_bindings (NULL_TREE, 0, 0);
7790 }
7791 return op0;
7792
7793 case CALL_EXPR:
7794 /* Check for a built-in function. */
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7796 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7797 == FUNCTION_DECL)
7798 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7799 {
7800 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7801 == BUILT_IN_FRONTEND)
7802 return (*lang_hooks.expand_expr) (exp, original_target,
7803 tmode, modifier);
7804 else
7805 return expand_builtin (exp, target, subtarget, tmode, ignore);
7806 }
7807
7808 return expand_call (exp, target, ignore);
7809
7810 case NON_LVALUE_EXPR:
7811 case NOP_EXPR:
7812 case CONVERT_EXPR:
7813 case REFERENCE_EXPR:
7814 if (TREE_OPERAND (exp, 0) == error_mark_node)
7815 return const0_rtx;
7816
7817 if (TREE_CODE (type) == UNION_TYPE)
7818 {
7819 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7820
7821 /* If both input and output are BLKmode, this conversion isn't doing
7822 anything except possibly changing memory attribute. */
7823 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7824 {
7825 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7826 modifier);
7827
7828 result = copy_rtx (result);
7829 set_mem_attributes (result, exp, 0);
7830 return result;
7831 }
7832
7833 if (target == 0)
7834 target = assign_temp (type, 0, 1, 1);
7835
7836 if (GET_CODE (target) == MEM)
7837 /* Store data into beginning of memory target. */
7838 store_expr (TREE_OPERAND (exp, 0),
7839 adjust_address (target, TYPE_MODE (valtype), 0),
7840 modifier == EXPAND_STACK_PARM ? 2 : 0);
7841
7842 else if (GET_CODE (target) == REG)
7843 /* Store this field into a union of the proper type. */
7844 store_field (target,
7845 MIN ((int_size_in_bytes (TREE_TYPE
7846 (TREE_OPERAND (exp, 0)))
7847 * BITS_PER_UNIT),
7848 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7849 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7850 VOIDmode, 0, type, 0);
7851 else
7852 abort ();
7853
7854 /* Return the entire union. */
7855 return target;
7856 }
7857
7858 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7859 {
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7861 modifier);
7862
7863 /* If the signedness of the conversion differs and OP0 is
7864 a promoted SUBREG, clear that indication since we now
7865 have to do the proper extension. */
7866 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7867 && GET_CODE (op0) == SUBREG)
7868 SUBREG_PROMOTED_VAR_P (op0) = 0;
7869
7870 return op0;
7871 }
7872
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7874 if (GET_MODE (op0) == mode)
7875 return op0;
7876
7877 /* If OP0 is a constant, just convert it into the proper mode. */
7878 if (CONSTANT_P (op0))
7879 {
7880 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7881 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7882
7883 if (modifier == EXPAND_INITIALIZER)
7884 return simplify_gen_subreg (mode, op0, inner_mode,
7885 subreg_lowpart_offset (mode,
7886 inner_mode));
7887 else
7888 return convert_modes (mode, inner_mode, op0,
7889 TREE_UNSIGNED (inner_type));
7890 }
7891
7892 if (modifier == EXPAND_INITIALIZER)
7893 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7894
7895 if (target == 0)
7896 return
7897 convert_to_mode (mode, op0,
7898 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7899 else
7900 convert_move (target, op0,
7901 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7902 return target;
7903
7904 case VIEW_CONVERT_EXPR:
7905 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7906
7907 /* If the input and output modes are both the same, we are done.
7908 Otherwise, if neither mode is BLKmode and both are within a word, we
7909 can use gen_lowpart. If neither is true, make sure the operand is
7910 in memory and convert the MEM to the new mode. */
7911 if (TYPE_MODE (type) == GET_MODE (op0))
7912 ;
7913 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7914 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7915 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7916 op0 = gen_lowpart (TYPE_MODE (type), op0);
7917 else if (GET_CODE (op0) != MEM)
7918 {
7919 /* If the operand is not a MEM, force it into memory. Since we
7920 are going to be be changing the mode of the MEM, don't call
7921 force_const_mem for constants because we don't allow pool
7922 constants to change mode. */
7923 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7924
7925 if (TREE_ADDRESSABLE (exp))
7926 abort ();
7927
7928 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7929 target
7930 = assign_stack_temp_for_type
7931 (TYPE_MODE (inner_type),
7932 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7933
7934 emit_move_insn (target, op0);
7935 op0 = target;
7936 }
7937
7938 /* At this point, OP0 is in the correct mode. If the output type is such
7939 that the operand is known to be aligned, indicate that it is.
7940 Otherwise, we need only be concerned about alignment for non-BLKmode
7941 results. */
7942 if (GET_CODE (op0) == MEM)
7943 {
7944 op0 = copy_rtx (op0);
7945
7946 if (TYPE_ALIGN_OK (type))
7947 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7948 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7949 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7950 {
7951 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7952 HOST_WIDE_INT temp_size
7953 = MAX (int_size_in_bytes (inner_type),
7954 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7955 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7956 temp_size, 0, type);
7957 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7958
7959 if (TREE_ADDRESSABLE (exp))
7960 abort ();
7961
7962 if (GET_MODE (op0) == BLKmode)
7963 emit_block_move (new_with_op0_mode, op0,
7964 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7965 (modifier == EXPAND_STACK_PARM
7966 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7967 else
7968 emit_move_insn (new_with_op0_mode, op0);
7969
7970 op0 = new;
7971 }
7972
7973 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7974 }
7975
7976 return op0;
7977
7978 case PLUS_EXPR:
7979 this_optab = ! unsignedp && flag_trapv
7980 && (GET_MODE_CLASS (mode) == MODE_INT)
7981 ? addv_optab : add_optab;
7982
7983 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7984 something else, make sure we add the register to the constant and
7985 then to the other thing. This case can occur during strength
7986 reduction and doing it this way will produce better code if the
7987 frame pointer or argument pointer is eliminated.
7988
7989 fold-const.c will ensure that the constant is always in the inner
7990 PLUS_EXPR, so the only case we need to do anything about is if
7991 sp, ap, or fp is our second argument, in which case we must swap
7992 the innermost first argument and our second argument. */
7993
7994 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7995 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7996 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7997 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7998 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7999 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8000 {
8001 tree t = TREE_OPERAND (exp, 1);
8002
8003 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8004 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8005 }
8006
8007 /* If the result is to be ptr_mode and we are adding an integer to
8008 something, we might be forming a constant. So try to use
8009 plus_constant. If it produces a sum and we can't accept it,
8010 use force_operand. This allows P = &ARR[const] to generate
8011 efficient code on machines where a SYMBOL_REF is not a valid
8012 address.
8013
8014 If this is an EXPAND_SUM call, always return the sum. */
8015 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8016 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8017 {
8018 if (modifier == EXPAND_STACK_PARM)
8019 target = 0;
8020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8021 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8022 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8023 {
8024 rtx constant_part;
8025
8026 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8027 EXPAND_SUM);
8028 /* Use immed_double_const to ensure that the constant is
8029 truncated according to the mode of OP1, then sign extended
8030 to a HOST_WIDE_INT. Using the constant directly can result
8031 in non-canonical RTL in a 64x32 cross compile. */
8032 constant_part
8033 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8034 (HOST_WIDE_INT) 0,
8035 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8036 op1 = plus_constant (op1, INTVAL (constant_part));
8037 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8038 op1 = force_operand (op1, target);
8039 return op1;
8040 }
8041
8042 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8043 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8044 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8045 {
8046 rtx constant_part;
8047
8048 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8049 (modifier == EXPAND_INITIALIZER
8050 ? EXPAND_INITIALIZER : EXPAND_SUM));
8051 if (! CONSTANT_P (op0))
8052 {
8053 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8054 VOIDmode, modifier);
8055 /* Don't go to both_summands if modifier
8056 says it's not right to return a PLUS. */
8057 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8058 goto binop2;
8059 goto both_summands;
8060 }
8061 /* Use immed_double_const to ensure that the constant is
8062 truncated according to the mode of OP1, then sign extended
8063 to a HOST_WIDE_INT. Using the constant directly can result
8064 in non-canonical RTL in a 64x32 cross compile. */
8065 constant_part
8066 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8067 (HOST_WIDE_INT) 0,
8068 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8069 op0 = plus_constant (op0, INTVAL (constant_part));
8070 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8071 op0 = force_operand (op0, target);
8072 return op0;
8073 }
8074 }
8075
8076 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8077 subtarget = 0;
8078
8079 /* No sense saving up arithmetic to be done
8080 if it's all in the wrong mode to form part of an address.
8081 And force_operand won't know whether to sign-extend or
8082 zero-extend. */
8083 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8084 || mode != ptr_mode)
8085 {
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8087 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8088 if (op0 == const0_rtx)
8089 return op1;
8090 if (op1 == const0_rtx)
8091 return op0;
8092 goto binop2;
8093 }
8094
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8096 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8097
8098 /* We come here from MINUS_EXPR when the second operand is a
8099 constant. */
8100 both_summands:
8101 /* Make sure any term that's a sum with a constant comes last. */
8102 if (GET_CODE (op0) == PLUS
8103 && CONSTANT_P (XEXP (op0, 1)))
8104 {
8105 temp = op0;
8106 op0 = op1;
8107 op1 = temp;
8108 }
8109 /* If adding to a sum including a constant,
8110 associate it to put the constant outside. */
8111 if (GET_CODE (op1) == PLUS
8112 && CONSTANT_P (XEXP (op1, 1)))
8113 {
8114 rtx constant_term = const0_rtx;
8115
8116 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8117 if (temp != 0)
8118 op0 = temp;
8119 /* Ensure that MULT comes first if there is one. */
8120 else if (GET_CODE (op0) == MULT)
8121 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8122 else
8123 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8124
8125 /* Let's also eliminate constants from op0 if possible. */
8126 op0 = eliminate_constant_term (op0, &constant_term);
8127
8128 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8129 their sum should be a constant. Form it into OP1, since the
8130 result we want will then be OP0 + OP1. */
8131
8132 temp = simplify_binary_operation (PLUS, mode, constant_term,
8133 XEXP (op1, 1));
8134 if (temp != 0)
8135 op1 = temp;
8136 else
8137 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8138 }
8139
8140 /* Put a constant term last and put a multiplication first. */
8141 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8142 temp = op1, op1 = op0, op0 = temp;
8143
8144 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8145 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8146
8147 case MINUS_EXPR:
8148 /* For initializers, we are allowed to return a MINUS of two
8149 symbolic constants. Here we handle all cases when both operands
8150 are constant. */
8151 /* Handle difference of two symbolic constants,
8152 for the sake of an initializer. */
8153 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8154 && really_constant_p (TREE_OPERAND (exp, 0))
8155 && really_constant_p (TREE_OPERAND (exp, 1)))
8156 {
8157 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8158 modifier);
8159 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8160 modifier);
8161
8162 /* If the last operand is a CONST_INT, use plus_constant of
8163 the negated constant. Else make the MINUS. */
8164 if (GET_CODE (op1) == CONST_INT)
8165 return plus_constant (op0, - INTVAL (op1));
8166 else
8167 return gen_rtx_MINUS (mode, op0, op1);
8168 }
8169
8170 this_optab = ! unsignedp && flag_trapv
8171 && (GET_MODE_CLASS(mode) == MODE_INT)
8172 ? subv_optab : sub_optab;
8173
8174 /* No sense saving up arithmetic to be done
8175 if it's all in the wrong mode to form part of an address.
8176 And force_operand won't know whether to sign-extend or
8177 zero-extend. */
8178 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8179 || mode != ptr_mode)
8180 goto binop;
8181
8182 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8183 subtarget = 0;
8184
8185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8186 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8187
8188 /* Convert A - const to A + (-const). */
8189 if (GET_CODE (op1) == CONST_INT)
8190 {
8191 op1 = negate_rtx (mode, op1);
8192 goto both_summands;
8193 }
8194
8195 goto binop2;
8196
8197 case MULT_EXPR:
8198 /* If first operand is constant, swap them.
8199 Thus the following special case checks need only
8200 check the second operand. */
8201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8202 {
8203 tree t1 = TREE_OPERAND (exp, 0);
8204 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8205 TREE_OPERAND (exp, 1) = t1;
8206 }
8207
8208 /* Attempt to return something suitable for generating an
8209 indexed address, for machines that support that. */
8210
8211 if (modifier == EXPAND_SUM && mode == ptr_mode
8212 && host_integerp (TREE_OPERAND (exp, 1), 0))
8213 {
8214 tree exp1 = TREE_OPERAND (exp, 1);
8215
8216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8217 EXPAND_SUM);
8218
8219 /* If we knew for certain that this is arithmetic for an array
8220 reference, and we knew the bounds of the array, then we could
8221 apply the distributive law across (PLUS X C) for constant C.
8222 Without such knowledge, we risk overflowing the computation
8223 when both X and C are large, but X+C isn't. */
8224 /* ??? Could perhaps special-case EXP being unsigned and C being
8225 positive. In that case we are certain that X+C is no smaller
8226 than X and so the transformed expression will overflow iff the
8227 original would have. */
8228
8229 if (GET_CODE (op0) != REG)
8230 op0 = force_operand (op0, NULL_RTX);
8231 if (GET_CODE (op0) != REG)
8232 op0 = copy_to_mode_reg (mode, op0);
8233
8234 return gen_rtx_MULT (mode, op0,
8235 gen_int_mode (tree_low_cst (exp1, 0),
8236 TYPE_MODE (TREE_TYPE (exp1))));
8237 }
8238
8239 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8240 subtarget = 0;
8241
8242 if (modifier == EXPAND_STACK_PARM)
8243 target = 0;
8244
8245 /* Check for multiplying things that have been extended
8246 from a narrower type. If this machine supports multiplying
8247 in that narrower type with a result in the desired type,
8248 do it that way, and avoid the explicit type-conversion. */
8249 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8250 && TREE_CODE (type) == INTEGER_TYPE
8251 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8252 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8253 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8254 && int_fits_type_p (TREE_OPERAND (exp, 1),
8255 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8256 /* Don't use a widening multiply if a shift will do. */
8257 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8258 > HOST_BITS_PER_WIDE_INT)
8259 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8260 ||
8261 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8262 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8263 ==
8264 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8265 /* If both operands are extended, they must either both
8266 be zero-extended or both be sign-extended. */
8267 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8268 ==
8269 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8270 {
8271 enum machine_mode innermode
8272 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8273 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8274 ? smul_widen_optab : umul_widen_optab);
8275 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8276 ? umul_widen_optab : smul_widen_optab);
8277 if (mode == GET_MODE_WIDER_MODE (innermode))
8278 {
8279 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8280 {
8281 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8282 NULL_RTX, VOIDmode, 0);
8283 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8284 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8285 VOIDmode, 0);
8286 else
8287 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8288 NULL_RTX, VOIDmode, 0);
8289 goto binop2;
8290 }
8291 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8292 && innermode == word_mode)
8293 {
8294 rtx htem;
8295 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8296 NULL_RTX, VOIDmode, 0);
8297 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8298 op1 = convert_modes (innermode, mode,
8299 expand_expr (TREE_OPERAND (exp, 1),
8300 NULL_RTX, VOIDmode, 0),
8301 unsignedp);
8302 else
8303 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8304 NULL_RTX, VOIDmode, 0);
8305 temp = expand_binop (mode, other_optab, op0, op1, target,
8306 unsignedp, OPTAB_LIB_WIDEN);
8307 htem = expand_mult_highpart_adjust (innermode,
8308 gen_highpart (innermode, temp),
8309 op0, op1,
8310 gen_highpart (innermode, temp),
8311 unsignedp);
8312 emit_move_insn (gen_highpart (innermode, temp), htem);
8313 return temp;
8314 }
8315 }
8316 }
8317 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8318 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8319 return expand_mult (mode, op0, op1, target, unsignedp);
8320
8321 case TRUNC_DIV_EXPR:
8322 case FLOOR_DIV_EXPR:
8323 case CEIL_DIV_EXPR:
8324 case ROUND_DIV_EXPR:
8325 case EXACT_DIV_EXPR:
8326 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8327 subtarget = 0;
8328 if (modifier == EXPAND_STACK_PARM)
8329 target = 0;
8330 /* Possible optimization: compute the dividend with EXPAND_SUM
8331 then if the divisor is constant can optimize the case
8332 where some terms of the dividend have coeffs divisible by it. */
8333 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8334 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8335 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8336
8337 case RDIV_EXPR:
8338 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8339 expensive divide. If not, combine will rebuild the original
8340 computation. */
8341 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8342 && TREE_CODE (type) == REAL_TYPE
8343 && !real_onep (TREE_OPERAND (exp, 0)))
8344 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8345 build (RDIV_EXPR, type,
8346 build_real (type, dconst1),
8347 TREE_OPERAND (exp, 1))),
8348 target, tmode, modifier);
8349 this_optab = sdiv_optab;
8350 goto binop;
8351
8352 case TRUNC_MOD_EXPR:
8353 case FLOOR_MOD_EXPR:
8354 case CEIL_MOD_EXPR:
8355 case ROUND_MOD_EXPR:
8356 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8357 subtarget = 0;
8358 if (modifier == EXPAND_STACK_PARM)
8359 target = 0;
8360 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8361 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8362 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8363
8364 case FIX_ROUND_EXPR:
8365 case FIX_FLOOR_EXPR:
8366 case FIX_CEIL_EXPR:
8367 abort (); /* Not used for C. */
8368
8369 case FIX_TRUNC_EXPR:
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8371 if (target == 0 || modifier == EXPAND_STACK_PARM)
8372 target = gen_reg_rtx (mode);
8373 expand_fix (target, op0, unsignedp);
8374 return target;
8375
8376 case FLOAT_EXPR:
8377 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8378 if (target == 0 || modifier == EXPAND_STACK_PARM)
8379 target = gen_reg_rtx (mode);
8380 /* expand_float can't figure out what to do if FROM has VOIDmode.
8381 So give it the correct mode. With -O, cse will optimize this. */
8382 if (GET_MODE (op0) == VOIDmode)
8383 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8384 op0);
8385 expand_float (target, op0,
8386 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8387 return target;
8388
8389 case NEGATE_EXPR:
8390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8391 if (modifier == EXPAND_STACK_PARM)
8392 target = 0;
8393 temp = expand_unop (mode,
8394 ! unsignedp && flag_trapv
8395 && (GET_MODE_CLASS(mode) == MODE_INT)
8396 ? negv_optab : neg_optab, op0, target, 0);
8397 if (temp == 0)
8398 abort ();
8399 return temp;
8400
8401 case ABS_EXPR:
8402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403 if (modifier == EXPAND_STACK_PARM)
8404 target = 0;
8405
8406 /* Handle complex values specially. */
8407 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8408 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8409 return expand_complex_abs (mode, op0, target, unsignedp);
8410
8411 /* Unsigned abs is simply the operand. Testing here means we don't
8412 risk generating incorrect code below. */
8413 if (TREE_UNSIGNED (type))
8414 return op0;
8415
8416 return expand_abs (mode, op0, target, unsignedp,
8417 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8418
8419 case MAX_EXPR:
8420 case MIN_EXPR:
8421 target = original_target;
8422 if (target == 0
8423 || modifier == EXPAND_STACK_PARM
8424 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8425 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8426 || GET_MODE (target) != mode
8427 || (GET_CODE (target) == REG
8428 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8429 target = gen_reg_rtx (mode);
8430 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8431 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8432
8433 /* First try to do it with a special MIN or MAX instruction.
8434 If that does not win, use a conditional jump to select the proper
8435 value. */
8436 this_optab = (TREE_UNSIGNED (type)
8437 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8438 : (code == MIN_EXPR ? smin_optab : smax_optab));
8439
8440 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8441 OPTAB_WIDEN);
8442 if (temp != 0)
8443 return temp;
8444
8445 /* At this point, a MEM target is no longer useful; we will get better
8446 code without it. */
8447
8448 if (GET_CODE (target) == MEM)
8449 target = gen_reg_rtx (mode);
8450
8451 if (target != op0)
8452 emit_move_insn (target, op0);
8453
8454 op0 = gen_label_rtx ();
8455
8456 /* If this mode is an integer too wide to compare properly,
8457 compare word by word. Rely on cse to optimize constant cases. */
8458 if (GET_MODE_CLASS (mode) == MODE_INT
8459 && ! can_compare_p (GE, mode, ccp_jump))
8460 {
8461 if (code == MAX_EXPR)
8462 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8463 target, op1, NULL_RTX, op0);
8464 else
8465 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8466 op1, target, NULL_RTX, op0);
8467 }
8468 else
8469 {
8470 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8471 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8472 unsignedp, mode, NULL_RTX, NULL_RTX,
8473 op0);
8474 }
8475 emit_move_insn (target, op1);
8476 emit_label (op0);
8477 return target;
8478
8479 case BIT_NOT_EXPR:
8480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8481 if (modifier == EXPAND_STACK_PARM)
8482 target = 0;
8483 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8484 if (temp == 0)
8485 abort ();
8486 return temp;
8487
8488 case FFS_EXPR:
8489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8490 if (modifier == EXPAND_STACK_PARM)
8491 target = 0;
8492 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8493 if (temp == 0)
8494 abort ();
8495 return temp;
8496
8497 case CLZ_EXPR:
8498 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8499 temp = expand_unop (mode, clz_optab, op0, target, 1);
8500 if (temp == 0)
8501 abort ();
8502 return temp;
8503
8504 case CTZ_EXPR:
8505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8506 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8507 if (temp == 0)
8508 abort ();
8509 return temp;
8510
8511 case POPCOUNT_EXPR:
8512 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8513 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8514 if (temp == 0)
8515 abort ();
8516 return temp;
8517
8518 case PARITY_EXPR:
8519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8520 temp = expand_unop (mode, parity_optab, op0, target, 1);
8521 if (temp == 0)
8522 abort ();
8523 return temp;
8524
8525 /* ??? Can optimize bitwise operations with one arg constant.
8526 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8527 and (a bitwise1 b) bitwise2 b (etc)
8528 but that is probably not worth while. */
8529
8530 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8531 boolean values when we want in all cases to compute both of them. In
8532 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8533 as actual zero-or-1 values and then bitwise anding. In cases where
8534 there cannot be any side effects, better code would be made by
8535 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8536 how to recognize those cases. */
8537
8538 case TRUTH_AND_EXPR:
8539 case BIT_AND_EXPR:
8540 this_optab = and_optab;
8541 goto binop;
8542
8543 case TRUTH_OR_EXPR:
8544 case BIT_IOR_EXPR:
8545 this_optab = ior_optab;
8546 goto binop;
8547
8548 case TRUTH_XOR_EXPR:
8549 case BIT_XOR_EXPR:
8550 this_optab = xor_optab;
8551 goto binop;
8552
8553 case LSHIFT_EXPR:
8554 case RSHIFT_EXPR:
8555 case LROTATE_EXPR:
8556 case RROTATE_EXPR:
8557 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8558 subtarget = 0;
8559 if (modifier == EXPAND_STACK_PARM)
8560 target = 0;
8561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8562 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8563 unsignedp);
8564
8565 /* Could determine the answer when only additive constants differ. Also,
8566 the addition of one can be handled by changing the condition. */
8567 case LT_EXPR:
8568 case LE_EXPR:
8569 case GT_EXPR:
8570 case GE_EXPR:
8571 case EQ_EXPR:
8572 case NE_EXPR:
8573 case UNORDERED_EXPR:
8574 case ORDERED_EXPR:
8575 case UNLT_EXPR:
8576 case UNLE_EXPR:
8577 case UNGT_EXPR:
8578 case UNGE_EXPR:
8579 case UNEQ_EXPR:
8580 temp = do_store_flag (exp,
8581 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8582 tmode != VOIDmode ? tmode : mode, 0);
8583 if (temp != 0)
8584 return temp;
8585
8586 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8587 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8588 && original_target
8589 && GET_CODE (original_target) == REG
8590 && (GET_MODE (original_target)
8591 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8592 {
8593 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8594 VOIDmode, 0);
8595
8596 /* If temp is constant, we can just compute the result. */
8597 if (GET_CODE (temp) == CONST_INT)
8598 {
8599 if (INTVAL (temp) != 0)
8600 emit_move_insn (target, const1_rtx);
8601 else
8602 emit_move_insn (target, const0_rtx);
8603
8604 return target;
8605 }
8606
8607 if (temp != original_target)
8608 {
8609 enum machine_mode mode1 = GET_MODE (temp);
8610 if (mode1 == VOIDmode)
8611 mode1 = tmode != VOIDmode ? tmode : mode;
8612
8613 temp = copy_to_mode_reg (mode1, temp);
8614 }
8615
8616 op1 = gen_label_rtx ();
8617 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8618 GET_MODE (temp), unsignedp, op1);
8619 emit_move_insn (temp, const1_rtx);
8620 emit_label (op1);
8621 return temp;
8622 }
8623
8624 /* If no set-flag instruction, must generate a conditional
8625 store into a temporary variable. Drop through
8626 and handle this like && and ||. */
8627
8628 case TRUTH_ANDIF_EXPR:
8629 case TRUTH_ORIF_EXPR:
8630 if (! ignore
8631 && (target == 0
8632 || modifier == EXPAND_STACK_PARM
8633 || ! safe_from_p (target, exp, 1)
8634 /* Make sure we don't have a hard reg (such as function's return
8635 value) live across basic blocks, if not optimizing. */
8636 || (!optimize && GET_CODE (target) == REG
8637 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8639
8640 if (target)
8641 emit_clr_insn (target);
8642
8643 op1 = gen_label_rtx ();
8644 jumpifnot (exp, op1);
8645
8646 if (target)
8647 emit_0_to_1_insn (target);
8648
8649 emit_label (op1);
8650 return ignore ? const0_rtx : target;
8651
8652 case TRUTH_NOT_EXPR:
8653 if (modifier == EXPAND_STACK_PARM)
8654 target = 0;
8655 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8656 /* The parser is careful to generate TRUTH_NOT_EXPR
8657 only with operands that are always zero or one. */
8658 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8659 target, 1, OPTAB_LIB_WIDEN);
8660 if (temp == 0)
8661 abort ();
8662 return temp;
8663
8664 case COMPOUND_EXPR:
8665 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8666 emit_queue ();
8667 return expand_expr (TREE_OPERAND (exp, 1),
8668 (ignore ? const0_rtx : target),
8669 VOIDmode, modifier);
8670
8671 case COND_EXPR:
8672 /* If we would have a "singleton" (see below) were it not for a
8673 conversion in each arm, bring that conversion back out. */
8674 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8675 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8676 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8677 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8678 {
8679 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8680 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8681
8682 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8683 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8684 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8685 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8686 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8687 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8688 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8689 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8690 return expand_expr (build1 (NOP_EXPR, type,
8691 build (COND_EXPR, TREE_TYPE (iftrue),
8692 TREE_OPERAND (exp, 0),
8693 iftrue, iffalse)),
8694 target, tmode, modifier);
8695 }
8696
8697 {
8698 /* Note that COND_EXPRs whose type is a structure or union
8699 are required to be constructed to contain assignments of
8700 a temporary variable, so that we can evaluate them here
8701 for side effect only. If type is void, we must do likewise. */
8702
8703 /* If an arm of the branch requires a cleanup,
8704 only that cleanup is performed. */
8705
8706 tree singleton = 0;
8707 tree binary_op = 0, unary_op = 0;
8708
8709 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8710 convert it to our mode, if necessary. */
8711 if (integer_onep (TREE_OPERAND (exp, 1))
8712 && integer_zerop (TREE_OPERAND (exp, 2))
8713 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8714 {
8715 if (ignore)
8716 {
8717 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8718 modifier);
8719 return const0_rtx;
8720 }
8721
8722 if (modifier == EXPAND_STACK_PARM)
8723 target = 0;
8724 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8725 if (GET_MODE (op0) == mode)
8726 return op0;
8727
8728 if (target == 0)
8729 target = gen_reg_rtx (mode);
8730 convert_move (target, op0, unsignedp);
8731 return target;
8732 }
8733
8734 /* Check for X ? A + B : A. If we have this, we can copy A to the
8735 output and conditionally add B. Similarly for unary operations.
8736 Don't do this if X has side-effects because those side effects
8737 might affect A or B and the "?" operation is a sequence point in
8738 ANSI. (operand_equal_p tests for side effects.) */
8739
8740 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8741 && operand_equal_p (TREE_OPERAND (exp, 2),
8742 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8743 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8744 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8745 && operand_equal_p (TREE_OPERAND (exp, 1),
8746 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8747 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8748 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8749 && operand_equal_p (TREE_OPERAND (exp, 2),
8750 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8751 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8752 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8753 && operand_equal_p (TREE_OPERAND (exp, 1),
8754 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8755 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8756
8757 /* If we are not to produce a result, we have no target. Otherwise,
8758 if a target was specified use it; it will not be used as an
8759 intermediate target unless it is safe. If no target, use a
8760 temporary. */
8761
8762 if (ignore)
8763 temp = 0;
8764 else if (modifier == EXPAND_STACK_PARM)
8765 temp = assign_temp (type, 0, 0, 1);
8766 else if (original_target
8767 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8768 || (singleton && GET_CODE (original_target) == REG
8769 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8770 && original_target == var_rtx (singleton)))
8771 && GET_MODE (original_target) == mode
8772 #ifdef HAVE_conditional_move
8773 && (! can_conditionally_move_p (mode)
8774 || GET_CODE (original_target) == REG
8775 || TREE_ADDRESSABLE (type))
8776 #endif
8777 && (GET_CODE (original_target) != MEM
8778 || TREE_ADDRESSABLE (type)))
8779 temp = original_target;
8780 else if (TREE_ADDRESSABLE (type))
8781 abort ();
8782 else
8783 temp = assign_temp (type, 0, 0, 1);
8784
8785 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8786 do the test of X as a store-flag operation, do this as
8787 A + ((X != 0) << log C). Similarly for other simple binary
8788 operators. Only do for C == 1 if BRANCH_COST is low. */
8789 if (temp && singleton && binary_op
8790 && (TREE_CODE (binary_op) == PLUS_EXPR
8791 || TREE_CODE (binary_op) == MINUS_EXPR
8792 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8793 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8794 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8795 : integer_onep (TREE_OPERAND (binary_op, 1)))
8796 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8797 {
8798 rtx result;
8799 tree cond;
8800 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8801 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8802 ? addv_optab : add_optab)
8803 : TREE_CODE (binary_op) == MINUS_EXPR
8804 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8805 ? subv_optab : sub_optab)
8806 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8807 : xor_optab);
8808
8809 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8810 if (singleton == TREE_OPERAND (exp, 1))
8811 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8812 else
8813 cond = TREE_OPERAND (exp, 0);
8814
8815 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8816 ? temp : NULL_RTX),
8817 mode, BRANCH_COST <= 1);
8818
8819 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8820 result = expand_shift (LSHIFT_EXPR, mode, result,
8821 build_int_2 (tree_log2
8822 (TREE_OPERAND
8823 (binary_op, 1)),
8824 0),
8825 (safe_from_p (temp, singleton, 1)
8826 ? temp : NULL_RTX), 0);
8827
8828 if (result)
8829 {
8830 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8831 return expand_binop (mode, boptab, op1, result, temp,
8832 unsignedp, OPTAB_LIB_WIDEN);
8833 }
8834 }
8835
8836 do_pending_stack_adjust ();
8837 NO_DEFER_POP;
8838 op0 = gen_label_rtx ();
8839
8840 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8841 {
8842 if (temp != 0)
8843 {
8844 /* If the target conflicts with the other operand of the
8845 binary op, we can't use it. Also, we can't use the target
8846 if it is a hard register, because evaluating the condition
8847 might clobber it. */
8848 if ((binary_op
8849 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8850 || (GET_CODE (temp) == REG
8851 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8852 temp = gen_reg_rtx (mode);
8853 store_expr (singleton, temp,
8854 modifier == EXPAND_STACK_PARM ? 2 : 0);
8855 }
8856 else
8857 expand_expr (singleton,
8858 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8859 if (singleton == TREE_OPERAND (exp, 1))
8860 jumpif (TREE_OPERAND (exp, 0), op0);
8861 else
8862 jumpifnot (TREE_OPERAND (exp, 0), op0);
8863
8864 start_cleanup_deferral ();
8865 if (binary_op && temp == 0)
8866 /* Just touch the other operand. */
8867 expand_expr (TREE_OPERAND (binary_op, 1),
8868 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8869 else if (binary_op)
8870 store_expr (build (TREE_CODE (binary_op), type,
8871 make_tree (type, temp),
8872 TREE_OPERAND (binary_op, 1)),
8873 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8874 else
8875 store_expr (build1 (TREE_CODE (unary_op), type,
8876 make_tree (type, temp)),
8877 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8878 op1 = op0;
8879 }
8880 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8881 comparison operator. If we have one of these cases, set the
8882 output to A, branch on A (cse will merge these two references),
8883 then set the output to FOO. */
8884 else if (temp
8885 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8886 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8887 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8888 TREE_OPERAND (exp, 1), 0)
8889 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8890 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8891 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8892 {
8893 if (GET_CODE (temp) == REG
8894 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8895 temp = gen_reg_rtx (mode);
8896 store_expr (TREE_OPERAND (exp, 1), temp,
8897 modifier == EXPAND_STACK_PARM ? 2 : 0);
8898 jumpif (TREE_OPERAND (exp, 0), op0);
8899
8900 start_cleanup_deferral ();
8901 store_expr (TREE_OPERAND (exp, 2), temp,
8902 modifier == EXPAND_STACK_PARM ? 2 : 0);
8903 op1 = op0;
8904 }
8905 else if (temp
8906 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8907 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8909 TREE_OPERAND (exp, 2), 0)
8910 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8911 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8912 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8913 {
8914 if (GET_CODE (temp) == REG
8915 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8916 temp = gen_reg_rtx (mode);
8917 store_expr (TREE_OPERAND (exp, 2), temp,
8918 modifier == EXPAND_STACK_PARM ? 2 : 0);
8919 jumpifnot (TREE_OPERAND (exp, 0), op0);
8920
8921 start_cleanup_deferral ();
8922 store_expr (TREE_OPERAND (exp, 1), temp,
8923 modifier == EXPAND_STACK_PARM ? 2 : 0);
8924 op1 = op0;
8925 }
8926 else
8927 {
8928 op1 = gen_label_rtx ();
8929 jumpifnot (TREE_OPERAND (exp, 0), op0);
8930
8931 start_cleanup_deferral ();
8932
8933 /* One branch of the cond can be void, if it never returns. For
8934 example A ? throw : E */
8935 if (temp != 0
8936 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8937 store_expr (TREE_OPERAND (exp, 1), temp,
8938 modifier == EXPAND_STACK_PARM ? 2 : 0);
8939 else
8940 expand_expr (TREE_OPERAND (exp, 1),
8941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8942 end_cleanup_deferral ();
8943 emit_queue ();
8944 emit_jump_insn (gen_jump (op1));
8945 emit_barrier ();
8946 emit_label (op0);
8947 start_cleanup_deferral ();
8948 if (temp != 0
8949 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8950 store_expr (TREE_OPERAND (exp, 2), temp,
8951 modifier == EXPAND_STACK_PARM ? 2 : 0);
8952 else
8953 expand_expr (TREE_OPERAND (exp, 2),
8954 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8955 }
8956
8957 end_cleanup_deferral ();
8958
8959 emit_queue ();
8960 emit_label (op1);
8961 OK_DEFER_POP;
8962
8963 return temp;
8964 }
8965
8966 case TARGET_EXPR:
8967 {
8968 /* Something needs to be initialized, but we didn't know
8969 where that thing was when building the tree. For example,
8970 it could be the return value of a function, or a parameter
8971 to a function which lays down in the stack, or a temporary
8972 variable which must be passed by reference.
8973
8974 We guarantee that the expression will either be constructed
8975 or copied into our original target. */
8976
8977 tree slot = TREE_OPERAND (exp, 0);
8978 tree cleanups = NULL_TREE;
8979 tree exp1;
8980
8981 if (TREE_CODE (slot) != VAR_DECL)
8982 abort ();
8983
8984 if (! ignore)
8985 target = original_target;
8986
8987 /* Set this here so that if we get a target that refers to a
8988 register variable that's already been used, put_reg_into_stack
8989 knows that it should fix up those uses. */
8990 TREE_USED (slot) = 1;
8991
8992 if (target == 0)
8993 {
8994 if (DECL_RTL_SET_P (slot))
8995 {
8996 target = DECL_RTL (slot);
8997 /* If we have already expanded the slot, so don't do
8998 it again. (mrs) */
8999 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9000 return target;
9001 }
9002 else
9003 {
9004 target = assign_temp (type, 2, 0, 1);
9005 /* All temp slots at this level must not conflict. */
9006 preserve_temp_slots (target);
9007 SET_DECL_RTL (slot, target);
9008 if (TREE_ADDRESSABLE (slot))
9009 put_var_into_stack (slot, /*rescan=*/false);
9010
9011 /* Since SLOT is not known to the called function
9012 to belong to its stack frame, we must build an explicit
9013 cleanup. This case occurs when we must build up a reference
9014 to pass the reference as an argument. In this case,
9015 it is very likely that such a reference need not be
9016 built here. */
9017
9018 if (TREE_OPERAND (exp, 2) == 0)
9019 TREE_OPERAND (exp, 2)
9020 = (*lang_hooks.maybe_build_cleanup) (slot);
9021 cleanups = TREE_OPERAND (exp, 2);
9022 }
9023 }
9024 else
9025 {
9026 /* This case does occur, when expanding a parameter which
9027 needs to be constructed on the stack. The target
9028 is the actual stack address that we want to initialize.
9029 The function we call will perform the cleanup in this case. */
9030
9031 /* If we have already assigned it space, use that space,
9032 not target that we were passed in, as our target
9033 parameter is only a hint. */
9034 if (DECL_RTL_SET_P (slot))
9035 {
9036 target = DECL_RTL (slot);
9037 /* If we have already expanded the slot, so don't do
9038 it again. (mrs) */
9039 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9040 return target;
9041 }
9042 else
9043 {
9044 SET_DECL_RTL (slot, target);
9045 /* If we must have an addressable slot, then make sure that
9046 the RTL that we just stored in slot is OK. */
9047 if (TREE_ADDRESSABLE (slot))
9048 put_var_into_stack (slot, /*rescan=*/true);
9049 }
9050 }
9051
9052 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9053 /* Mark it as expanded. */
9054 TREE_OPERAND (exp, 1) = NULL_TREE;
9055
9056 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9057
9058 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9059
9060 return target;
9061 }
9062
9063 case INIT_EXPR:
9064 {
9065 tree lhs = TREE_OPERAND (exp, 0);
9066 tree rhs = TREE_OPERAND (exp, 1);
9067
9068 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9069 return temp;
9070 }
9071
9072 case MODIFY_EXPR:
9073 {
9074 /* If lhs is complex, expand calls in rhs before computing it.
9075 That's so we don't compute a pointer and save it over a
9076 call. If lhs is simple, compute it first so we can give it
9077 as a target if the rhs is just a call. This avoids an
9078 extra temp and copy and that prevents a partial-subsumption
9079 which makes bad code. Actually we could treat
9080 component_ref's of vars like vars. */
9081
9082 tree lhs = TREE_OPERAND (exp, 0);
9083 tree rhs = TREE_OPERAND (exp, 1);
9084
9085 temp = 0;
9086
9087 /* Check for |= or &= of a bitfield of size one into another bitfield
9088 of size 1. In this case, (unless we need the result of the
9089 assignment) we can do this more efficiently with a
9090 test followed by an assignment, if necessary.
9091
9092 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9093 things change so we do, this code should be enhanced to
9094 support it. */
9095 if (ignore
9096 && TREE_CODE (lhs) == COMPONENT_REF
9097 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9098 || TREE_CODE (rhs) == BIT_AND_EXPR)
9099 && TREE_OPERAND (rhs, 0) == lhs
9100 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9101 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9102 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9103 {
9104 rtx label = gen_label_rtx ();
9105
9106 do_jump (TREE_OPERAND (rhs, 1),
9107 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9108 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9109 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9110 (TREE_CODE (rhs) == BIT_IOR_EXPR
9111 ? integer_one_node
9112 : integer_zero_node)),
9113 0, 0);
9114 do_pending_stack_adjust ();
9115 emit_label (label);
9116 return const0_rtx;
9117 }
9118
9119 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9120
9121 return temp;
9122 }
9123
9124 case RETURN_EXPR:
9125 if (!TREE_OPERAND (exp, 0))
9126 expand_null_return ();
9127 else
9128 expand_return (TREE_OPERAND (exp, 0));
9129 return const0_rtx;
9130
9131 case PREINCREMENT_EXPR:
9132 case PREDECREMENT_EXPR:
9133 return expand_increment (exp, 0, ignore);
9134
9135 case POSTINCREMENT_EXPR:
9136 case POSTDECREMENT_EXPR:
9137 /* Faster to treat as pre-increment if result is not used. */
9138 return expand_increment (exp, ! ignore, ignore);
9139
9140 case ADDR_EXPR:
9141 if (modifier == EXPAND_STACK_PARM)
9142 target = 0;
9143 /* Are we taking the address of a nested function? */
9144 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9145 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9146 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9147 && ! TREE_STATIC (exp))
9148 {
9149 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9150 op0 = force_operand (op0, target);
9151 }
9152 /* If we are taking the address of something erroneous, just
9153 return a zero. */
9154 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9155 return const0_rtx;
9156 /* If we are taking the address of a constant and are at the
9157 top level, we have to use output_constant_def since we can't
9158 call force_const_mem at top level. */
9159 else if (cfun == 0
9160 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9161 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9162 == 'c')))
9163 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9164 else
9165 {
9166 /* We make sure to pass const0_rtx down if we came in with
9167 ignore set, to avoid doing the cleanups twice for something. */
9168 op0 = expand_expr (TREE_OPERAND (exp, 0),
9169 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9170 (modifier == EXPAND_INITIALIZER
9171 ? modifier : EXPAND_CONST_ADDRESS));
9172
9173 /* If we are going to ignore the result, OP0 will have been set
9174 to const0_rtx, so just return it. Don't get confused and
9175 think we are taking the address of the constant. */
9176 if (ignore)
9177 return op0;
9178
9179 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9180 clever and returns a REG when given a MEM. */
9181 op0 = protect_from_queue (op0, 1);
9182
9183 /* We would like the object in memory. If it is a constant, we can
9184 have it be statically allocated into memory. For a non-constant,
9185 we need to allocate some memory and store the value into it. */
9186
9187 if (CONSTANT_P (op0))
9188 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9189 op0);
9190 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9191 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9192 || GET_CODE (op0) == PARALLEL)
9193 {
9194 /* If the operand is a SAVE_EXPR, we can deal with this by
9195 forcing the SAVE_EXPR into memory. */
9196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9197 {
9198 put_var_into_stack (TREE_OPERAND (exp, 0),
9199 /*rescan=*/true);
9200 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9201 }
9202 else
9203 {
9204 /* If this object is in a register, it can't be BLKmode. */
9205 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9206 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9207
9208 if (GET_CODE (op0) == PARALLEL)
9209 /* Handle calls that pass values in multiple
9210 non-contiguous locations. The Irix 6 ABI has examples
9211 of this. */
9212 emit_group_store (memloc, op0,
9213 int_size_in_bytes (inner_type));
9214 else
9215 emit_move_insn (memloc, op0);
9216
9217 op0 = memloc;
9218 }
9219 }
9220
9221 if (GET_CODE (op0) != MEM)
9222 abort ();
9223
9224 mark_temp_addr_taken (op0);
9225 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9226 {
9227 op0 = XEXP (op0, 0);
9228 #ifdef POINTERS_EXTEND_UNSIGNED
9229 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9230 && mode == ptr_mode)
9231 op0 = convert_memory_address (ptr_mode, op0);
9232 #endif
9233 return op0;
9234 }
9235
9236 /* If OP0 is not aligned as least as much as the type requires, we
9237 need to make a temporary, copy OP0 to it, and take the address of
9238 the temporary. We want to use the alignment of the type, not of
9239 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9240 the test for BLKmode means that can't happen. The test for
9241 BLKmode is because we never make mis-aligned MEMs with
9242 non-BLKmode.
9243
9244 We don't need to do this at all if the machine doesn't have
9245 strict alignment. */
9246 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9247 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9248 > MEM_ALIGN (op0))
9249 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9250 {
9251 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9252 rtx new;
9253
9254 if (TYPE_ALIGN_OK (inner_type))
9255 abort ();
9256
9257 if (TREE_ADDRESSABLE (inner_type))
9258 {
9259 /* We can't make a bitwise copy of this object, so fail. */
9260 error ("cannot take the address of an unaligned member");
9261 return const0_rtx;
9262 }
9263
9264 new = assign_stack_temp_for_type
9265 (TYPE_MODE (inner_type),
9266 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9267 : int_size_in_bytes (inner_type),
9268 1, build_qualified_type (inner_type,
9269 (TYPE_QUALS (inner_type)
9270 | TYPE_QUAL_CONST)));
9271
9272 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9273 (modifier == EXPAND_STACK_PARM
9274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9275
9276 op0 = new;
9277 }
9278
9279 op0 = force_operand (XEXP (op0, 0), target);
9280 }
9281
9282 if (flag_force_addr
9283 && GET_CODE (op0) != REG
9284 && modifier != EXPAND_CONST_ADDRESS
9285 && modifier != EXPAND_INITIALIZER
9286 && modifier != EXPAND_SUM)
9287 op0 = force_reg (Pmode, op0);
9288
9289 if (GET_CODE (op0) == REG
9290 && ! REG_USERVAR_P (op0))
9291 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9292
9293 #ifdef POINTERS_EXTEND_UNSIGNED
9294 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9295 && mode == ptr_mode)
9296 op0 = convert_memory_address (ptr_mode, op0);
9297 #endif
9298
9299 return op0;
9300
9301 case ENTRY_VALUE_EXPR:
9302 abort ();
9303
9304 /* COMPLEX type for Extended Pascal & Fortran */
9305 case COMPLEX_EXPR:
9306 {
9307 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9308 rtx insns;
9309
9310 /* Get the rtx code of the operands. */
9311 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9312 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9313
9314 if (! target)
9315 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9316
9317 start_sequence ();
9318
9319 /* Move the real (op0) and imaginary (op1) parts to their location. */
9320 emit_move_insn (gen_realpart (mode, target), op0);
9321 emit_move_insn (gen_imagpart (mode, target), op1);
9322
9323 insns = get_insns ();
9324 end_sequence ();
9325
9326 /* Complex construction should appear as a single unit. */
9327 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9328 each with a separate pseudo as destination.
9329 It's not correct for flow to treat them as a unit. */
9330 if (GET_CODE (target) != CONCAT)
9331 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9332 else
9333 emit_insn (insns);
9334
9335 return target;
9336 }
9337
9338 case REALPART_EXPR:
9339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9340 return gen_realpart (mode, op0);
9341
9342 case IMAGPART_EXPR:
9343 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9344 return gen_imagpart (mode, op0);
9345
9346 case CONJ_EXPR:
9347 {
9348 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9349 rtx imag_t;
9350 rtx insns;
9351
9352 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9353
9354 if (! target)
9355 target = gen_reg_rtx (mode);
9356
9357 start_sequence ();
9358
9359 /* Store the realpart and the negated imagpart to target. */
9360 emit_move_insn (gen_realpart (partmode, target),
9361 gen_realpart (partmode, op0));
9362
9363 imag_t = gen_imagpart (partmode, target);
9364 temp = expand_unop (partmode,
9365 ! unsignedp && flag_trapv
9366 && (GET_MODE_CLASS(partmode) == MODE_INT)
9367 ? negv_optab : neg_optab,
9368 gen_imagpart (partmode, op0), imag_t, 0);
9369 if (temp != imag_t)
9370 emit_move_insn (imag_t, temp);
9371
9372 insns = get_insns ();
9373 end_sequence ();
9374
9375 /* Conjugate should appear as a single unit
9376 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9377 each with a separate pseudo as destination.
9378 It's not correct for flow to treat them as a unit. */
9379 if (GET_CODE (target) != CONCAT)
9380 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9381 else
9382 emit_insn (insns);
9383
9384 return target;
9385 }
9386
9387 case TRY_CATCH_EXPR:
9388 {
9389 tree handler = TREE_OPERAND (exp, 1);
9390
9391 expand_eh_region_start ();
9392
9393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9394
9395 expand_eh_region_end_cleanup (handler);
9396
9397 return op0;
9398 }
9399
9400 case TRY_FINALLY_EXPR:
9401 {
9402 tree try_block = TREE_OPERAND (exp, 0);
9403 tree finally_block = TREE_OPERAND (exp, 1);
9404
9405 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9406 {
9407 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9408 is not sufficient, so we cannot expand the block twice.
9409 So we play games with GOTO_SUBROUTINE_EXPR to let us
9410 expand the thing only once. */
9411 /* When not optimizing, we go ahead with this form since
9412 (1) user breakpoints operate more predictably without
9413 code duplication, and
9414 (2) we're not running any of the global optimizers
9415 that would explode in time/space with the highly
9416 connected CFG created by the indirect branching. */
9417
9418 rtx finally_label = gen_label_rtx ();
9419 rtx done_label = gen_label_rtx ();
9420 rtx return_link = gen_reg_rtx (Pmode);
9421 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9422 (tree) finally_label, (tree) return_link);
9423 TREE_SIDE_EFFECTS (cleanup) = 1;
9424
9425 /* Start a new binding layer that will keep track of all cleanup
9426 actions to be performed. */
9427 expand_start_bindings (2);
9428 target_temp_slot_level = temp_slot_level;
9429
9430 expand_decl_cleanup (NULL_TREE, cleanup);
9431 op0 = expand_expr (try_block, target, tmode, modifier);
9432
9433 preserve_temp_slots (op0);
9434 expand_end_bindings (NULL_TREE, 0, 0);
9435 emit_jump (done_label);
9436 emit_label (finally_label);
9437 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9438 emit_indirect_jump (return_link);
9439 emit_label (done_label);
9440 }
9441 else
9442 {
9443 expand_start_bindings (2);
9444 target_temp_slot_level = temp_slot_level;
9445
9446 expand_decl_cleanup (NULL_TREE, finally_block);
9447 op0 = expand_expr (try_block, target, tmode, modifier);
9448
9449 preserve_temp_slots (op0);
9450 expand_end_bindings (NULL_TREE, 0, 0);
9451 }
9452
9453 return op0;
9454 }
9455
9456 case GOTO_SUBROUTINE_EXPR:
9457 {
9458 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9459 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9460 rtx return_address = gen_label_rtx ();
9461 emit_move_insn (return_link,
9462 gen_rtx_LABEL_REF (Pmode, return_address));
9463 emit_jump (subr);
9464 emit_label (return_address);
9465 return const0_rtx;
9466 }
9467
9468 case VA_ARG_EXPR:
9469 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9470
9471 case EXC_PTR_EXPR:
9472 return get_exception_pointer (cfun);
9473
9474 case FDESC_EXPR:
9475 /* Function descriptors are not valid except for as
9476 initialization constants, and should not be expanded. */
9477 abort ();
9478
9479 default:
9480 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9481 }
9482
9483 /* Here to do an ordinary binary operator, generating an instruction
9484 from the optab already placed in `this_optab'. */
9485 binop:
9486 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9487 subtarget = 0;
9488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9489 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9490 binop2:
9491 if (modifier == EXPAND_STACK_PARM)
9492 target = 0;
9493 temp = expand_binop (mode, this_optab, op0, op1, target,
9494 unsignedp, OPTAB_LIB_WIDEN);
9495 if (temp == 0)
9496 abort ();
9497 return temp;
9498 }
9499 \f
9500 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9501 when applied to the address of EXP produces an address known to be
9502 aligned more than BIGGEST_ALIGNMENT. */
9503
9504 static int
9505 is_aligning_offset (offset, exp)
9506 tree offset;
9507 tree exp;
9508 {
9509 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9510 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9511 || TREE_CODE (offset) == NOP_EXPR
9512 || TREE_CODE (offset) == CONVERT_EXPR
9513 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9514 offset = TREE_OPERAND (offset, 0);
9515
9516 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9517 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9518 if (TREE_CODE (offset) != BIT_AND_EXPR
9519 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9520 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9521 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9522 return 0;
9523
9524 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9525 It must be NEGATE_EXPR. Then strip any more conversions. */
9526 offset = TREE_OPERAND (offset, 0);
9527 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9528 || TREE_CODE (offset) == NOP_EXPR
9529 || TREE_CODE (offset) == CONVERT_EXPR)
9530 offset = TREE_OPERAND (offset, 0);
9531
9532 if (TREE_CODE (offset) != NEGATE_EXPR)
9533 return 0;
9534
9535 offset = TREE_OPERAND (offset, 0);
9536 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9537 || TREE_CODE (offset) == NOP_EXPR
9538 || TREE_CODE (offset) == CONVERT_EXPR)
9539 offset = TREE_OPERAND (offset, 0);
9540
9541 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9542 whose type is the same as EXP. */
9543 return (TREE_CODE (offset) == ADDR_EXPR
9544 && (TREE_OPERAND (offset, 0) == exp
9545 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9546 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9547 == TREE_TYPE (exp)))));
9548 }
9549 \f
9550 /* Return the tree node if an ARG corresponds to a string constant or zero
9551 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9552 in bytes within the string that ARG is accessing. The type of the
9553 offset will be `sizetype'. */
9554
9555 tree
9556 string_constant (arg, ptr_offset)
9557 tree arg;
9558 tree *ptr_offset;
9559 {
9560 STRIP_NOPS (arg);
9561
9562 if (TREE_CODE (arg) == ADDR_EXPR
9563 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9564 {
9565 *ptr_offset = size_zero_node;
9566 return TREE_OPERAND (arg, 0);
9567 }
9568 else if (TREE_CODE (arg) == PLUS_EXPR)
9569 {
9570 tree arg0 = TREE_OPERAND (arg, 0);
9571 tree arg1 = TREE_OPERAND (arg, 1);
9572
9573 STRIP_NOPS (arg0);
9574 STRIP_NOPS (arg1);
9575
9576 if (TREE_CODE (arg0) == ADDR_EXPR
9577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9578 {
9579 *ptr_offset = convert (sizetype, arg1);
9580 return TREE_OPERAND (arg0, 0);
9581 }
9582 else if (TREE_CODE (arg1) == ADDR_EXPR
9583 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9584 {
9585 *ptr_offset = convert (sizetype, arg0);
9586 return TREE_OPERAND (arg1, 0);
9587 }
9588 }
9589
9590 return 0;
9591 }
9592 \f
9593 /* Expand code for a post- or pre- increment or decrement
9594 and return the RTX for the result.
9595 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9596
9597 static rtx
9598 expand_increment (exp, post, ignore)
9599 tree exp;
9600 int post, ignore;
9601 {
9602 rtx op0, op1;
9603 rtx temp, value;
9604 tree incremented = TREE_OPERAND (exp, 0);
9605 optab this_optab = add_optab;
9606 int icode;
9607 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9608 int op0_is_copy = 0;
9609 int single_insn = 0;
9610 /* 1 means we can't store into OP0 directly,
9611 because it is a subreg narrower than a word,
9612 and we don't dare clobber the rest of the word. */
9613 int bad_subreg = 0;
9614
9615 /* Stabilize any component ref that might need to be
9616 evaluated more than once below. */
9617 if (!post
9618 || TREE_CODE (incremented) == BIT_FIELD_REF
9619 || (TREE_CODE (incremented) == COMPONENT_REF
9620 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9621 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9622 incremented = stabilize_reference (incremented);
9623 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9624 ones into save exprs so that they don't accidentally get evaluated
9625 more than once by the code below. */
9626 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9627 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9628 incremented = save_expr (incremented);
9629
9630 /* Compute the operands as RTX.
9631 Note whether OP0 is the actual lvalue or a copy of it:
9632 I believe it is a copy iff it is a register or subreg
9633 and insns were generated in computing it. */
9634
9635 temp = get_last_insn ();
9636 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9637
9638 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9639 in place but instead must do sign- or zero-extension during assignment,
9640 so we copy it into a new register and let the code below use it as
9641 a copy.
9642
9643 Note that we can safely modify this SUBREG since it is know not to be
9644 shared (it was made by the expand_expr call above). */
9645
9646 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9647 {
9648 if (post)
9649 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9650 else
9651 bad_subreg = 1;
9652 }
9653 else if (GET_CODE (op0) == SUBREG
9654 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9655 {
9656 /* We cannot increment this SUBREG in place. If we are
9657 post-incrementing, get a copy of the old value. Otherwise,
9658 just mark that we cannot increment in place. */
9659 if (post)
9660 op0 = copy_to_reg (op0);
9661 else
9662 bad_subreg = 1;
9663 }
9664
9665 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9666 && temp != get_last_insn ());
9667 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9668
9669 /* Decide whether incrementing or decrementing. */
9670 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9671 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9672 this_optab = sub_optab;
9673
9674 /* Convert decrement by a constant into a negative increment. */
9675 if (this_optab == sub_optab
9676 && GET_CODE (op1) == CONST_INT)
9677 {
9678 op1 = GEN_INT (-INTVAL (op1));
9679 this_optab = add_optab;
9680 }
9681
9682 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9683 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9684
9685 /* For a preincrement, see if we can do this with a single instruction. */
9686 if (!post)
9687 {
9688 icode = (int) this_optab->handlers[(int) mode].insn_code;
9689 if (icode != (int) CODE_FOR_nothing
9690 /* Make sure that OP0 is valid for operands 0 and 1
9691 of the insn we want to queue. */
9692 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9693 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9694 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9695 single_insn = 1;
9696 }
9697
9698 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9699 then we cannot just increment OP0. We must therefore contrive to
9700 increment the original value. Then, for postincrement, we can return
9701 OP0 since it is a copy of the old value. For preincrement, expand here
9702 unless we can do it with a single insn.
9703
9704 Likewise if storing directly into OP0 would clobber high bits
9705 we need to preserve (bad_subreg). */
9706 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9707 {
9708 /* This is the easiest way to increment the value wherever it is.
9709 Problems with multiple evaluation of INCREMENTED are prevented
9710 because either (1) it is a component_ref or preincrement,
9711 in which case it was stabilized above, or (2) it is an array_ref
9712 with constant index in an array in a register, which is
9713 safe to reevaluate. */
9714 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9715 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9716 ? MINUS_EXPR : PLUS_EXPR),
9717 TREE_TYPE (exp),
9718 incremented,
9719 TREE_OPERAND (exp, 1));
9720
9721 while (TREE_CODE (incremented) == NOP_EXPR
9722 || TREE_CODE (incremented) == CONVERT_EXPR)
9723 {
9724 newexp = convert (TREE_TYPE (incremented), newexp);
9725 incremented = TREE_OPERAND (incremented, 0);
9726 }
9727
9728 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9729 return post ? op0 : temp;
9730 }
9731
9732 if (post)
9733 {
9734 /* We have a true reference to the value in OP0.
9735 If there is an insn to add or subtract in this mode, queue it.
9736 Queueing the increment insn avoids the register shuffling
9737 that often results if we must increment now and first save
9738 the old value for subsequent use. */
9739
9740 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9741 op0 = stabilize (op0);
9742 #endif
9743
9744 icode = (int) this_optab->handlers[(int) mode].insn_code;
9745 if (icode != (int) CODE_FOR_nothing
9746 /* Make sure that OP0 is valid for operands 0 and 1
9747 of the insn we want to queue. */
9748 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9749 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9750 {
9751 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9752 op1 = force_reg (mode, op1);
9753
9754 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9755 }
9756 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9757 {
9758 rtx addr = (general_operand (XEXP (op0, 0), mode)
9759 ? force_reg (Pmode, XEXP (op0, 0))
9760 : copy_to_reg (XEXP (op0, 0)));
9761 rtx temp, result;
9762
9763 op0 = replace_equiv_address (op0, addr);
9764 temp = force_reg (GET_MODE (op0), op0);
9765 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9766 op1 = force_reg (mode, op1);
9767
9768 /* The increment queue is LIFO, thus we have to `queue'
9769 the instructions in reverse order. */
9770 enqueue_insn (op0, gen_move_insn (op0, temp));
9771 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9772 return result;
9773 }
9774 }
9775
9776 /* Preincrement, or we can't increment with one simple insn. */
9777 if (post)
9778 /* Save a copy of the value before inc or dec, to return it later. */
9779 temp = value = copy_to_reg (op0);
9780 else
9781 /* Arrange to return the incremented value. */
9782 /* Copy the rtx because expand_binop will protect from the queue,
9783 and the results of that would be invalid for us to return
9784 if our caller does emit_queue before using our result. */
9785 temp = copy_rtx (value = op0);
9786
9787 /* Increment however we can. */
9788 op1 = expand_binop (mode, this_optab, value, op1, op0,
9789 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9790
9791 /* Make sure the value is stored into OP0. */
9792 if (op1 != op0)
9793 emit_move_insn (op0, op1);
9794
9795 return temp;
9796 }
9797 \f
9798 /* Generate code to calculate EXP using a store-flag instruction
9799 and return an rtx for the result. EXP is either a comparison
9800 or a TRUTH_NOT_EXPR whose operand is a comparison.
9801
9802 If TARGET is nonzero, store the result there if convenient.
9803
9804 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9805 cheap.
9806
9807 Return zero if there is no suitable set-flag instruction
9808 available on this machine.
9809
9810 Once expand_expr has been called on the arguments of the comparison,
9811 we are committed to doing the store flag, since it is not safe to
9812 re-evaluate the expression. We emit the store-flag insn by calling
9813 emit_store_flag, but only expand the arguments if we have a reason
9814 to believe that emit_store_flag will be successful. If we think that
9815 it will, but it isn't, we have to simulate the store-flag with a
9816 set/jump/set sequence. */
9817
9818 static rtx
9819 do_store_flag (exp, target, mode, only_cheap)
9820 tree exp;
9821 rtx target;
9822 enum machine_mode mode;
9823 int only_cheap;
9824 {
9825 enum rtx_code code;
9826 tree arg0, arg1, type;
9827 tree tem;
9828 enum machine_mode operand_mode;
9829 int invert = 0;
9830 int unsignedp;
9831 rtx op0, op1;
9832 enum insn_code icode;
9833 rtx subtarget = target;
9834 rtx result, label;
9835
9836 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9837 result at the end. We can't simply invert the test since it would
9838 have already been inverted if it were valid. This case occurs for
9839 some floating-point comparisons. */
9840
9841 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9842 invert = 1, exp = TREE_OPERAND (exp, 0);
9843
9844 arg0 = TREE_OPERAND (exp, 0);
9845 arg1 = TREE_OPERAND (exp, 1);
9846
9847 /* Don't crash if the comparison was erroneous. */
9848 if (arg0 == error_mark_node || arg1 == error_mark_node)
9849 return const0_rtx;
9850
9851 type = TREE_TYPE (arg0);
9852 operand_mode = TYPE_MODE (type);
9853 unsignedp = TREE_UNSIGNED (type);
9854
9855 /* We won't bother with BLKmode store-flag operations because it would mean
9856 passing a lot of information to emit_store_flag. */
9857 if (operand_mode == BLKmode)
9858 return 0;
9859
9860 /* We won't bother with store-flag operations involving function pointers
9861 when function pointers must be canonicalized before comparisons. */
9862 #ifdef HAVE_canonicalize_funcptr_for_compare
9863 if (HAVE_canonicalize_funcptr_for_compare
9864 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9865 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9866 == FUNCTION_TYPE))
9867 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9868 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9869 == FUNCTION_TYPE))))
9870 return 0;
9871 #endif
9872
9873 STRIP_NOPS (arg0);
9874 STRIP_NOPS (arg1);
9875
9876 /* Get the rtx comparison code to use. We know that EXP is a comparison
9877 operation of some type. Some comparisons against 1 and -1 can be
9878 converted to comparisons with zero. Do so here so that the tests
9879 below will be aware that we have a comparison with zero. These
9880 tests will not catch constants in the first operand, but constants
9881 are rarely passed as the first operand. */
9882
9883 switch (TREE_CODE (exp))
9884 {
9885 case EQ_EXPR:
9886 code = EQ;
9887 break;
9888 case NE_EXPR:
9889 code = NE;
9890 break;
9891 case LT_EXPR:
9892 if (integer_onep (arg1))
9893 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9894 else
9895 code = unsignedp ? LTU : LT;
9896 break;
9897 case LE_EXPR:
9898 if (! unsignedp && integer_all_onesp (arg1))
9899 arg1 = integer_zero_node, code = LT;
9900 else
9901 code = unsignedp ? LEU : LE;
9902 break;
9903 case GT_EXPR:
9904 if (! unsignedp && integer_all_onesp (arg1))
9905 arg1 = integer_zero_node, code = GE;
9906 else
9907 code = unsignedp ? GTU : GT;
9908 break;
9909 case GE_EXPR:
9910 if (integer_onep (arg1))
9911 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9912 else
9913 code = unsignedp ? GEU : GE;
9914 break;
9915
9916 case UNORDERED_EXPR:
9917 code = UNORDERED;
9918 break;
9919 case ORDERED_EXPR:
9920 code = ORDERED;
9921 break;
9922 case UNLT_EXPR:
9923 code = UNLT;
9924 break;
9925 case UNLE_EXPR:
9926 code = UNLE;
9927 break;
9928 case UNGT_EXPR:
9929 code = UNGT;
9930 break;
9931 case UNGE_EXPR:
9932 code = UNGE;
9933 break;
9934 case UNEQ_EXPR:
9935 code = UNEQ;
9936 break;
9937
9938 default:
9939 abort ();
9940 }
9941
9942 /* Put a constant second. */
9943 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9944 {
9945 tem = arg0; arg0 = arg1; arg1 = tem;
9946 code = swap_condition (code);
9947 }
9948
9949 /* If this is an equality or inequality test of a single bit, we can
9950 do this by shifting the bit being tested to the low-order bit and
9951 masking the result with the constant 1. If the condition was EQ,
9952 we xor it with 1. This does not require an scc insn and is faster
9953 than an scc insn even if we have it. */
9954
9955 if ((code == NE || code == EQ)
9956 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9957 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9958 {
9959 tree inner = TREE_OPERAND (arg0, 0);
9960 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9961 int ops_unsignedp;
9962
9963 /* If INNER is a right shift of a constant and it plus BITNUM does
9964 not overflow, adjust BITNUM and INNER. */
9965
9966 if (TREE_CODE (inner) == RSHIFT_EXPR
9967 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9968 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9969 && bitnum < TYPE_PRECISION (type)
9970 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9971 bitnum - TYPE_PRECISION (type)))
9972 {
9973 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9974 inner = TREE_OPERAND (inner, 0);
9975 }
9976
9977 /* If we are going to be able to omit the AND below, we must do our
9978 operations as unsigned. If we must use the AND, we have a choice.
9979 Normally unsigned is faster, but for some machines signed is. */
9980 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9981 #ifdef LOAD_EXTEND_OP
9982 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9983 #else
9984 : 1
9985 #endif
9986 );
9987
9988 if (! get_subtarget (subtarget)
9989 || GET_MODE (subtarget) != operand_mode
9990 || ! safe_from_p (subtarget, inner, 1))
9991 subtarget = 0;
9992
9993 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9994
9995 if (bitnum != 0)
9996 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
9997 size_int (bitnum), subtarget, ops_unsignedp);
9998
9999 if (GET_MODE (op0) != mode)
10000 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10001
10002 if ((code == EQ && ! invert) || (code == NE && invert))
10003 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10004 ops_unsignedp, OPTAB_LIB_WIDEN);
10005
10006 /* Put the AND last so it can combine with more things. */
10007 if (bitnum != TYPE_PRECISION (type) - 1)
10008 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10009
10010 return op0;
10011 }
10012
10013 /* Now see if we are likely to be able to do this. Return if not. */
10014 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10015 return 0;
10016
10017 icode = setcc_gen_code[(int) code];
10018 if (icode == CODE_FOR_nothing
10019 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10020 {
10021 /* We can only do this if it is one of the special cases that
10022 can be handled without an scc insn. */
10023 if ((code == LT && integer_zerop (arg1))
10024 || (! only_cheap && code == GE && integer_zerop (arg1)))
10025 ;
10026 else if (BRANCH_COST >= 0
10027 && ! only_cheap && (code == NE || code == EQ)
10028 && TREE_CODE (type) != REAL_TYPE
10029 && ((abs_optab->handlers[(int) operand_mode].insn_code
10030 != CODE_FOR_nothing)
10031 || (ffs_optab->handlers[(int) operand_mode].insn_code
10032 != CODE_FOR_nothing)))
10033 ;
10034 else
10035 return 0;
10036 }
10037
10038 if (! get_subtarget (target)
10039 || GET_MODE (subtarget) != operand_mode
10040 || ! safe_from_p (subtarget, arg1, 1))
10041 subtarget = 0;
10042
10043 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10044 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10045
10046 if (target == 0)
10047 target = gen_reg_rtx (mode);
10048
10049 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10050 because, if the emit_store_flag does anything it will succeed and
10051 OP0 and OP1 will not be used subsequently. */
10052
10053 result = emit_store_flag (target, code,
10054 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10055 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10056 operand_mode, unsignedp, 1);
10057
10058 if (result)
10059 {
10060 if (invert)
10061 result = expand_binop (mode, xor_optab, result, const1_rtx,
10062 result, 0, OPTAB_LIB_WIDEN);
10063 return result;
10064 }
10065
10066 /* If this failed, we have to do this with set/compare/jump/set code. */
10067 if (GET_CODE (target) != REG
10068 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10069 target = gen_reg_rtx (GET_MODE (target));
10070
10071 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10072 result = compare_from_rtx (op0, op1, code, unsignedp,
10073 operand_mode, NULL_RTX);
10074 if (GET_CODE (result) == CONST_INT)
10075 return (((result == const0_rtx && ! invert)
10076 || (result != const0_rtx && invert))
10077 ? const0_rtx : const1_rtx);
10078
10079 /* The code of RESULT may not match CODE if compare_from_rtx
10080 decided to swap its operands and reverse the original code.
10081
10082 We know that compare_from_rtx returns either a CONST_INT or
10083 a new comparison code, so it is safe to just extract the
10084 code from RESULT. */
10085 code = GET_CODE (result);
10086
10087 label = gen_label_rtx ();
10088 if (bcc_gen_fctn[(int) code] == 0)
10089 abort ();
10090
10091 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10092 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10093 emit_label (label);
10094
10095 return target;
10096 }
10097 \f
10098
10099 /* Stubs in case we haven't got a casesi insn. */
10100 #ifndef HAVE_casesi
10101 # define HAVE_casesi 0
10102 # define gen_casesi(a, b, c, d, e) (0)
10103 # define CODE_FOR_casesi CODE_FOR_nothing
10104 #endif
10105
10106 /* If the machine does not have a case insn that compares the bounds,
10107 this means extra overhead for dispatch tables, which raises the
10108 threshold for using them. */
10109 #ifndef CASE_VALUES_THRESHOLD
10110 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10111 #endif /* CASE_VALUES_THRESHOLD */
10112
10113 unsigned int
10114 case_values_threshold ()
10115 {
10116 return CASE_VALUES_THRESHOLD;
10117 }
10118
10119 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10120 0 otherwise (i.e. if there is no casesi instruction). */
10121 int
10122 try_casesi (index_type, index_expr, minval, range,
10123 table_label, default_label)
10124 tree index_type, index_expr, minval, range;
10125 rtx table_label ATTRIBUTE_UNUSED;
10126 rtx default_label;
10127 {
10128 enum machine_mode index_mode = SImode;
10129 int index_bits = GET_MODE_BITSIZE (index_mode);
10130 rtx op1, op2, index;
10131 enum machine_mode op_mode;
10132
10133 if (! HAVE_casesi)
10134 return 0;
10135
10136 /* Convert the index to SImode. */
10137 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10138 {
10139 enum machine_mode omode = TYPE_MODE (index_type);
10140 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10141
10142 /* We must handle the endpoints in the original mode. */
10143 index_expr = build (MINUS_EXPR, index_type,
10144 index_expr, minval);
10145 minval = integer_zero_node;
10146 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10147 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10148 omode, 1, default_label);
10149 /* Now we can safely truncate. */
10150 index = convert_to_mode (index_mode, index, 0);
10151 }
10152 else
10153 {
10154 if (TYPE_MODE (index_type) != index_mode)
10155 {
10156 index_expr = convert ((*lang_hooks.types.type_for_size)
10157 (index_bits, 0), index_expr);
10158 index_type = TREE_TYPE (index_expr);
10159 }
10160
10161 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10162 }
10163 emit_queue ();
10164 index = protect_from_queue (index, 0);
10165 do_pending_stack_adjust ();
10166
10167 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10168 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10169 (index, op_mode))
10170 index = copy_to_mode_reg (op_mode, index);
10171
10172 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10173
10174 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10175 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10176 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10177 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10178 (op1, op_mode))
10179 op1 = copy_to_mode_reg (op_mode, op1);
10180
10181 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10182
10183 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10184 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10185 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10186 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10187 (op2, op_mode))
10188 op2 = copy_to_mode_reg (op_mode, op2);
10189
10190 emit_jump_insn (gen_casesi (index, op1, op2,
10191 table_label, default_label));
10192 return 1;
10193 }
10194
10195 /* Attempt to generate a tablejump instruction; same concept. */
10196 #ifndef HAVE_tablejump
10197 #define HAVE_tablejump 0
10198 #define gen_tablejump(x, y) (0)
10199 #endif
10200
10201 /* Subroutine of the next function.
10202
10203 INDEX is the value being switched on, with the lowest value
10204 in the table already subtracted.
10205 MODE is its expected mode (needed if INDEX is constant).
10206 RANGE is the length of the jump table.
10207 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10208
10209 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10210 index value is out of range. */
10211
10212 static void
10213 do_tablejump (index, mode, range, table_label, default_label)
10214 rtx index, range, table_label, default_label;
10215 enum machine_mode mode;
10216 {
10217 rtx temp, vector;
10218
10219 if (INTVAL (range) > cfun->max_jumptable_ents)
10220 cfun->max_jumptable_ents = INTVAL (range);
10221
10222 /* Do an unsigned comparison (in the proper mode) between the index
10223 expression and the value which represents the length of the range.
10224 Since we just finished subtracting the lower bound of the range
10225 from the index expression, this comparison allows us to simultaneously
10226 check that the original index expression value is both greater than
10227 or equal to the minimum value of the range and less than or equal to
10228 the maximum value of the range. */
10229
10230 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10231 default_label);
10232
10233 /* If index is in range, it must fit in Pmode.
10234 Convert to Pmode so we can index with it. */
10235 if (mode != Pmode)
10236 index = convert_to_mode (Pmode, index, 1);
10237
10238 /* Don't let a MEM slip thru, because then INDEX that comes
10239 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10240 and break_out_memory_refs will go to work on it and mess it up. */
10241 #ifdef PIC_CASE_VECTOR_ADDRESS
10242 if (flag_pic && GET_CODE (index) != REG)
10243 index = copy_to_mode_reg (Pmode, index);
10244 #endif
10245
10246 /* If flag_force_addr were to affect this address
10247 it could interfere with the tricky assumptions made
10248 about addresses that contain label-refs,
10249 which may be valid only very near the tablejump itself. */
10250 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10251 GET_MODE_SIZE, because this indicates how large insns are. The other
10252 uses should all be Pmode, because they are addresses. This code
10253 could fail if addresses and insns are not the same size. */
10254 index = gen_rtx_PLUS (Pmode,
10255 gen_rtx_MULT (Pmode, index,
10256 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10257 gen_rtx_LABEL_REF (Pmode, table_label));
10258 #ifdef PIC_CASE_VECTOR_ADDRESS
10259 if (flag_pic)
10260 index = PIC_CASE_VECTOR_ADDRESS (index);
10261 else
10262 #endif
10263 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10264 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10265 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10266 RTX_UNCHANGING_P (vector) = 1;
10267 convert_move (temp, vector, 0);
10268
10269 emit_jump_insn (gen_tablejump (temp, table_label));
10270
10271 /* If we are generating PIC code or if the table is PC-relative, the
10272 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10273 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10274 emit_barrier ();
10275 }
10276
10277 int
10278 try_tablejump (index_type, index_expr, minval, range,
10279 table_label, default_label)
10280 tree index_type, index_expr, minval, range;
10281 rtx table_label, default_label;
10282 {
10283 rtx index;
10284
10285 if (! HAVE_tablejump)
10286 return 0;
10287
10288 index_expr = fold (build (MINUS_EXPR, index_type,
10289 convert (index_type, index_expr),
10290 convert (index_type, minval)));
10291 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10292 emit_queue ();
10293 index = protect_from_queue (index, 0);
10294 do_pending_stack_adjust ();
10295
10296 do_tablejump (index, TYPE_MODE (index_type),
10297 convert_modes (TYPE_MODE (index_type),
10298 TYPE_MODE (TREE_TYPE (range)),
10299 expand_expr (range, NULL_RTX,
10300 VOIDmode, 0),
10301 TREE_UNSIGNED (TREE_TYPE (range))),
10302 table_label, default_label);
10303 return 1;
10304 }
10305
10306 /* Nonzero if the mode is a valid vector mode for this architecture.
10307 This returns nonzero even if there is no hardware support for the
10308 vector mode, but we can emulate with narrower modes. */
10309
10310 int
10311 vector_mode_valid_p (mode)
10312 enum machine_mode mode;
10313 {
10314 enum mode_class class = GET_MODE_CLASS (mode);
10315 enum machine_mode innermode;
10316
10317 /* Doh! What's going on? */
10318 if (class != MODE_VECTOR_INT
10319 && class != MODE_VECTOR_FLOAT)
10320 return 0;
10321
10322 /* Hardware support. Woo hoo! */
10323 if (VECTOR_MODE_SUPPORTED_P (mode))
10324 return 1;
10325
10326 innermode = GET_MODE_INNER (mode);
10327
10328 /* We should probably return 1 if requesting V4DI and we have no DI,
10329 but we have V2DI, but this is probably very unlikely. */
10330
10331 /* If we have support for the inner mode, we can safely emulate it.
10332 We may not have V2DI, but me can emulate with a pair of DIs. */
10333 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10334 }
10335
10336 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10337 static rtx
10338 const_vector_from_tree (exp)
10339 tree exp;
10340 {
10341 rtvec v;
10342 int units, i;
10343 tree link, elt;
10344 enum machine_mode inner, mode;
10345
10346 mode = TYPE_MODE (TREE_TYPE (exp));
10347
10348 if (is_zeros_p (exp))
10349 return CONST0_RTX (mode);
10350
10351 units = GET_MODE_NUNITS (mode);
10352 inner = GET_MODE_INNER (mode);
10353
10354 v = rtvec_alloc (units);
10355
10356 link = TREE_VECTOR_CST_ELTS (exp);
10357 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10358 {
10359 elt = TREE_VALUE (link);
10360
10361 if (TREE_CODE (elt) == REAL_CST)
10362 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10363 inner);
10364 else
10365 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10366 TREE_INT_CST_HIGH (elt),
10367 inner);
10368 }
10369
10370 return gen_rtx_raw_CONST_VECTOR (mode, v);
10371 }
10372
10373 #include "gt-expr.h"
This page took 0.536976 seconds and 6 git commands to generate.